Use local pages for webrtc telemetry tests.
This is to avoid hitting HTTPS errors.
BUG=chromium:702201
NOTRY=True
Review-Url: https://codereview.chromium.org/2761163003
Cr-Commit-Position: refs/heads/master@{#460078}
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 9a56923..8ce0185b 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -26,6 +26,8 @@
r".*vulcanized.html$",
r".*crisper.js$",
r"tools[\\\/]md_browser[\\\/].*\.css$",
+ # Test pages for WebRTC telemetry tests.
+ r"tools[\\\/]perf[\\\/]page_sets[\\\/]webrtc_cases.*",
)
diff --git a/tools/perf/page_sets/data/webrtc_audio_cases.json b/tools/perf/page_sets/data/webrtc_audio_cases.json
deleted file mode 100644
index 0bf2632..0000000
--- a/tools/perf/page_sets/data/webrtc_audio_cases.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "archives": {
- "audio_call_g722_10s": {
- "DEFAULT": "webrtc_audio_cases_001.wpr"
- },
- "audio_call_isac16k_10s": {
- "DEFAULT": "webrtc_audio_cases_001.wpr"
- },
- "audio_call_opus_10s": {
- "DEFAULT": "webrtc_audio_cases_001.wpr"
- },
- "audio_call_pcmu_10s": {
- "DEFAULT": "webrtc_audio_cases_001.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_audio_cases_001.wpr.sha1 b/tools/perf/page_sets/data/webrtc_audio_cases_001.wpr.sha1
deleted file mode 100644
index 1db4a8f..0000000
--- a/tools/perf/page_sets/data/webrtc_audio_cases_001.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a3c4603628a18b154b1a3974e247ab19ce7c434e
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_datachannel_cases.json b/tools/perf/page_sets/data/webrtc_datachannel_cases.json
deleted file mode 100644
index 7681a19..0000000
--- a/tools/perf/page_sets/data/webrtc_datachannel_cases.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "archives": {
- "30s_datachannel_transfer": {
- "DEFAULT": "webrtc_datachannel_cases_001.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_datachannel_cases_001.wpr.sha1 b/tools/perf/page_sets/data/webrtc_datachannel_cases_001.wpr.sha1
deleted file mode 100644
index 2e4d7b52f..0000000
--- a/tools/perf/page_sets/data/webrtc_datachannel_cases_001.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-10ce827228dbf11155df76f1623e5d4d74846dc9
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_getusermedia_cases.json b/tools/perf/page_sets/data/webrtc_getusermedia_cases.json
deleted file mode 100644
index 73450d33..0000000
--- a/tools/perf/page_sets/data/webrtc_getusermedia_cases.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "archives": {
- "hd_local_stream_10s": {
- "DEFAULT": "webrtc_getusermedia_cases_001.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_getusermedia_cases_001.wpr.sha1 b/tools/perf/page_sets/data/webrtc_getusermedia_cases_001.wpr.sha1
deleted file mode 100644
index 4bef0ce0..0000000
--- a/tools/perf/page_sets/data/webrtc_getusermedia_cases_001.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ac30bfebcdedbcee7a4402e8f53b603adf4f7411
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_peerconnection_cases.json b/tools/perf/page_sets/data/webrtc_peerconnection_cases.json
deleted file mode 100644
index c7bff693..0000000
--- a/tools/perf/page_sets/data/webrtc_peerconnection_cases.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "archives": {
- "720p_call_45s": {
- "DEFAULT": "webrtc_peerconnection_cases_003.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_peerconnection_cases_003.wpr.sha1 b/tools/perf/page_sets/data/webrtc_peerconnection_cases_003.wpr.sha1
deleted file mode 100644
index ae94122..0000000
--- a/tools/perf/page_sets/data/webrtc_peerconnection_cases_003.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0bbac3b371f4dae1495699ac2ec394f1163d7cb9
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_smoothness_cases.json b/tools/perf/page_sets/data/webrtc_smoothness_cases.json
deleted file mode 100644
index d557394..0000000
--- a/tools/perf/page_sets/data/webrtc_smoothness_cases.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "archives": {
- "720p_call_45s": {
- "DEFAULT": "webrtc_smoothness_cases_001.wpr"
- },
- "canvas_capture_peer_connection": {
- "DEFAULT": "webrtc_smoothness_cases_001.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_smoothness_cases_001.wpr.sha1 b/tools/perf/page_sets/data/webrtc_smoothness_cases_001.wpr.sha1
deleted file mode 100644
index 6ec76ac..0000000
--- a/tools/perf/page_sets/data/webrtc_smoothness_cases_001.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c57b0b7001336cf5e62851757096bbbc46866b9f
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_stresstest_cases.json b/tools/perf/page_sets/data/webrtc_stresstest_cases.json
deleted file mode 100644
index e74ffe7..0000000
--- a/tools/perf/page_sets/data/webrtc_stresstest_cases.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "archives": {
- "multiple_peerconnections": {
- "DEFAULT": "webrtc_stresstest_cases_001.wpr"
- }
- },
- "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.",
- "platform_specific": true
-}
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_stresstest_cases_000.wpr.sha1 b/tools/perf/page_sets/data/webrtc_stresstest_cases_000.wpr.sha1
deleted file mode 100644
index 05423ffb..0000000
--- a/tools/perf/page_sets/data/webrtc_stresstest_cases_000.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a618ed5609bd3f2e4f0fc754a3c6457c82c75775
\ No newline at end of file
diff --git a/tools/perf/page_sets/data/webrtc_stresstest_cases_001.wpr.sha1 b/tools/perf/page_sets/data/webrtc_stresstest_cases_001.wpr.sha1
deleted file mode 100644
index e5d19566..0000000
--- a/tools/perf/page_sets/data/webrtc_stresstest_cases_001.wpr.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc4585717167caa2c3a014e9585b722d2d53cb0c
\ No newline at end of file
diff --git a/tools/perf/page_sets/update_webrtc_cases b/tools/perf/page_sets/update_webrtc_cases
new file mode 100755
index 0000000..4bed428
--- /dev/null
+++ b/tools/perf/page_sets/update_webrtc_cases
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import urllib2
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+DEFAULT_DESTINATION_DIR = os.path.join(SCRIPT_DIR, 'webrtc_cases')
+
+WEBRTC_GITHUB_URL = 'https://github.com/webrtc/'
+TEST_PAGES_LOCATION_BY_REPO = {
+ 'test-pages': {
+ 'dirs': [
+ 'src/canvas-capture',
+ 'src/multiple-peerconnections',
+ ],
+ },
+ 'samples': {
+ 'dirs': [
+ 'src/content/datachannel/datatransfer',
+ 'src/content/getusermedia/resolution',
+ 'src/content/peerconnection/constraints',
+ 'src/content/peerconnection/audio',
+ ],
+ 'files': [
+ 'src/js/common.js',
+ ],
+ },
+ 'adapter': {
+ 'files': [
+ 'release/adapter.js',
+ ],
+ },
+}
+
+ADDED_SCRIPT_TAGS = (
+ '<script src="%s.js"></script>\n'
+ '<script src="adapter.js"></script>\n'
+ '<script src="common.js"></script>\n'
+ '</body></html>'
+)
+
+COPYRIGHT_NOTICE = [
+ 'Copyright 2017 The Chromium Authors. All rights reserved.\n',
+ 'Use of this source code is governed by a BSD-style license that can be\n',
+ 'found in the LICENSE file.\n',
+]
+
+COPYRIGHT_NOTICE_LENGTH = 8
+JS_COPYRIGHT_NOTICE = ' * '.join(['/*\n'] + COPYRIGHT_NOTICE) + ' */\n'
+HTML_COPYRIGHT_NOTICE = ' * '.join(['<!--\n'] + COPYRIGHT_NOTICE) + '-->\n'
+
+STRIPPED_TAGS_RE = ('( *<meta.*?>\n?| *<link.*?>\n?|'
+ ' *<script.*>.*?</script>\n?|</body>.*?</html>)')
+
+
+class TemporaryDirectory(object):
+ def __init__(self):
+ self._closed = False
+ self._name = None
+ self._name = tempfile.mkdtemp()
+ def __enter__(self):
+ return self._name
+ def __exit__(self, exc, value, tb):
+ if self._name and not self._closed:
+ shutil.rmtree(self._name)
+ self._closed = True
+
+
+def CopyJSFile(origin, destination, has_copyright=True):
+ contents = []
+ with open(origin) as input_file:
+ contents = input_file.readlines()
+
+ if has_copyright:
+ contents = contents[COPYRIGHT_NOTICE_LENGTH:]
+ contents = [JS_COPYRIGHT_NOTICE] + contents
+
+ with open(destination, 'w') as output_file:
+ output_file.writelines(contents)
+
+
+def CopyHTMLFile(test_name, origin, destination):
+ contents = ''
+ with open(origin) as input_file:
+ contents = input_file.read()
+
+ contents = re.sub(STRIPPED_TAGS_RE, '', contents,
+ flags=re.MULTILINE|re.DOTALL)
+ contents += ADDED_SCRIPT_TAGS % test_name
+
+ contents = [line + '\n' for line in contents.split('\n')]
+ contents = (contents[:1] + [HTML_COPYRIGHT_NOTICE] +
+ contents[COPYRIGHT_NOTICE_LENGTH:])
+
+ with open(destination, 'w') as output_file:
+ output_file.writelines(contents)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description=(
+ 'Update the WebRTC test pages.\n'
+ 'This script downloads the test pages from the WebRTC GitHub '
+ 'repository and copies them to the DESTINATION directory after '
+ 'processing them as follows: \n'
+ ' * Adds a copyright notice on top of the HTML and JS files.\n'
+ ' * Deletes the <meta> tags.\n'
+ ' * Discards the CSS files and corresponding link tags.\n'
+ ' * Discards the JS files and corresponding script tags except for '
+ 'main.js, adapter.js and common.js.\n'
+ ' * Renames the index.html and main.js files for each test to '
+ 'testname.html and testname.js.'))
+
+ parser.add_argument('-d', '--destination', default=DEFAULT_DESTINATION_DIR,
+ type=str, help='Where to save the WebRTC test pages.')
+
+ args = parser.parse_args()
+
+ if not os.path.isdir(args.destination):
+ os.makedirs(args.destination)
+
+ with TemporaryDirectory() as temp_dir:
+ for repo_name, test_dirs in TEST_PAGES_LOCATION_BY_REPO.items():
+ p = subprocess.Popen(['git', 'clone', WEBRTC_GITHUB_URL + repo_name],
+ cwd=temp_dir)
+ p.wait()
+
+ for test_dir in test_dirs.get('dirs', []):
+ test_dir = os.path.join(temp_dir, repo_name, test_dir)
+ test_name = os.path.basename(test_dir)
+
+ CopyJSFile(os.path.join(test_dir, 'js', 'main.js'),
+ os.path.join(args.destination, test_name + '.js'))
+ CopyHTMLFile(test_name, os.path.join(test_dir, 'index.html'),
+ os.path.join(args.destination, test_name + '.html'))
+
+ for test_file in test_dirs.get('files', []):
+ file_name = os.path.basename(test_file)
+ CopyJSFile(os.path.join(temp_dir, repo_name, test_file),
+ os.path.join(args.destination, file_name), False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/perf/page_sets/webrtc_cases.py b/tools/perf/page_sets/webrtc_cases.py
index 9ea37d8..5d8f3f5 100644
--- a/tools/perf/page_sets/webrtc_cases.py
+++ b/tools/perf/page_sets/webrtc_cases.py
@@ -7,14 +7,10 @@
from telemetry.page import page as page_module
-WEBRTC_TEST_PAGES_URL = 'https://test.webrtc.org/manual/'
-WEBRTC_GITHUB_SAMPLES_URL = 'https://webrtc.github.io/samples/src/content/'
-MEDIARECORDER_GITHUB_URL = 'https://rawgit.com/cricdecyan/mediarecorder/master/'
-
-
class WebrtcPage(page_module.Page):
def __init__(self, url, page_set, name):
+ assert url.startswith('file://webrtc_cases/')
super(WebrtcPage, self).__init__(
url=url, page_set=page_set, name=name)
@@ -28,7 +24,7 @@
def __init__(self, page_set):
super(Page1, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'getusermedia/resolution/',
+ url='file://webrtc_cases/resolution.html',
name='hd_local_stream_10s',
page_set=page_set)
@@ -42,7 +38,7 @@
def __init__(self, page_set):
super(Page2, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/constraints/',
+ url='file://webrtc_cases/constraints.html',
name='720p_call_45s',
page_set=page_set)
@@ -64,7 +60,7 @@
def __init__(self, page_set):
super(Page3, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'datachannel/datatransfer',
+ url='file://webrtc_cases/datatransfer.html',
name='30s_datachannel_transfer',
page_set=page_set)
@@ -81,7 +77,7 @@
def __init__(self, page_set):
super(Page4, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=OPUS',
+ url='file://webrtc_cases/audio.html?codec=OPUS',
name='audio_call_opus_10s',
page_set=page_set)
@@ -96,7 +92,7 @@
def __init__(self, page_set):
super(Page5, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=G722',
+ url='file://webrtc_cases/audio.html?codec=G722',
name='audio_call_g722_10s',
page_set=page_set)
@@ -111,7 +107,7 @@
def __init__(self, page_set):
super(Page6, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=PCMU',
+ url='file://webrtc_cases/audio.html?codec=PCMU',
name='audio_call_pcmu_10s',
page_set=page_set)
@@ -126,7 +122,7 @@
def __init__(self, page_set):
super(Page7, self).__init__(
- url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=ISAC_16K',
+ url='file://webrtc_cases/audio.html?codec=ISAC_16K',
name='audio_call_isac16k_10s',
page_set=page_set)
@@ -140,18 +136,15 @@
"""Why: Sets up a canvas capture stream connection to a peer connection."""
def __init__(self, page_set):
- canvas_capure_html = 'canvascapture/canvas_capture_peerconnection.html'
super(Page8, self).__init__(
- url=MEDIARECORDER_GITHUB_URL + canvas_capure_html,
+ url='file://webrtc_cases/canvas-capture.html',
name='canvas_capture_peer_connection',
page_set=page_set)
def RunPageInteractions(self, action_runner):
with action_runner.CreateInteraction('Action_Canvas_PeerConnection',
repeatable=False):
- action_runner.WaitForJavaScriptCondition('typeof draw !== "undefined"')
- action_runner.ExecuteJavaScript('draw();')
- action_runner.ExecuteJavaScript('doCanvasCaptureAndPeerConnection();')
+ action_runner.ClickElement('button[id="startButton"]')
action_runner.Wait(10)
@@ -160,7 +153,7 @@
def __init__(self, page_set):
super(Page9, self).__init__(
- url= WEBRTC_TEST_PAGES_URL + 'multiple-peerconnections/',
+ url='file://webrtc_cases/multiple-peerconnections.html',
name='multiple_peerconnections',
page_set=page_set)
@@ -181,7 +174,6 @@
def __init__(self):
super(WebrtcGetusermediaPageSet, self).__init__(
- archive_data_file='data/webrtc_getusermedia_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page1(self))
@@ -192,7 +184,6 @@
def __init__(self):
super(WebrtcStresstestPageSet, self).__init__(
- archive_data_file='data/webrtc_stresstest_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page9(self))
@@ -203,7 +194,6 @@
def __init__(self):
super(WebrtcPeerconnectionPageSet, self).__init__(
- archive_data_file='data/webrtc_peerconnection_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page2(self))
@@ -214,7 +204,6 @@
def __init__(self):
super(WebrtcDatachannelPageSet, self).__init__(
- archive_data_file='data/webrtc_datachannel_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page3(self))
@@ -225,7 +214,6 @@
def __init__(self):
super(WebrtcAudioPageSet, self).__init__(
- archive_data_file='data/webrtc_audio_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page4(self))
@@ -239,7 +227,6 @@
def __init__(self):
super(WebrtcRenderingPageSet, self).__init__(
- archive_data_file='data/webrtc_smoothness_cases.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
self.AddStory(Page2(self))
diff --git a/tools/perf/page_sets/webrtc_cases/adapter.js b/tools/perf/page_sets/webrtc_cases/adapter.js
new file mode 100644
index 0000000..3af94d04
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/adapter.js
@@ -0,0 +1,3035 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.adapter = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
+ /* eslint-env node */
+'use strict';
+
+// SDP helpers.
+var SDPUtils = {};
+
+// Generate an alphanumeric identifier for cname or mids.
+// TODO: use UUIDs instead? https://gist.github.com/jed/982883
+SDPUtils.generateIdentifier = function() {
+ return Math.random().toString(36).substr(2, 10);
+};
+
+// The RTCP CNAME used by all peerconnections from the same JS.
+SDPUtils.localCName = SDPUtils.generateIdentifier();
+
+// Splits SDP into lines, dealing with both CRLF and LF.
+SDPUtils.splitLines = function(blob) {
+ return blob.trim().split('\n').map(function(line) {
+ return line.trim();
+ });
+};
+// Splits SDP into sessionpart and mediasections. Ensures CRLF.
+SDPUtils.splitSections = function(blob) {
+ var parts = blob.split('\nm=');
+ return parts.map(function(part, index) {
+ return (index > 0 ? 'm=' + part : part).trim() + '\r\n';
+ });
+};
+
+// Returns lines that start with a certain prefix.
+SDPUtils.matchPrefix = function(blob, prefix) {
+ return SDPUtils.splitLines(blob).filter(function(line) {
+ return line.indexOf(prefix) === 0;
+ });
+};
+
+// Parses an ICE candidate line. Sample input:
+// candidate:702786350 2 udp 41819902 8.8.8.8 60769 typ relay raddr 8.8.8.8
+// rport 55996"
+SDPUtils.parseCandidate = function(line) {
+ var parts;
+ // Parse both variants.
+ if (line.indexOf('a=candidate:') === 0) {
+ parts = line.substring(12).split(' ');
+ } else {
+ parts = line.substring(10).split(' ');
+ }
+
+ var candidate = {
+ foundation: parts[0],
+ component: parts[1],
+ protocol: parts[2].toLowerCase(),
+ priority: parseInt(parts[3], 10),
+ ip: parts[4],
+ port: parseInt(parts[5], 10),
+ // skip parts[6] == 'typ'
+ type: parts[7]
+ };
+
+ for (var i = 8; i < parts.length; i += 2) {
+ switch (parts[i]) {
+ case 'raddr':
+ candidate.relatedAddress = parts[i + 1];
+ break;
+ case 'rport':
+ candidate.relatedPort = parseInt(parts[i + 1], 10);
+ break;
+ case 'tcptype':
+ candidate.tcpType = parts[i + 1];
+ break;
+ default: // Unknown extensions are silently ignored.
+ break;
+ }
+ }
+ return candidate;
+};
+
+// Translates a candidate object into SDP candidate attribute.
+SDPUtils.writeCandidate = function(candidate) {
+ var sdp = [];
+ sdp.push(candidate.foundation);
+ sdp.push(candidate.component);
+ sdp.push(candidate.protocol.toUpperCase());
+ sdp.push(candidate.priority);
+ sdp.push(candidate.ip);
+ sdp.push(candidate.port);
+
+ var type = candidate.type;
+ sdp.push('typ');
+ sdp.push(type);
+ if (type !== 'host' && candidate.relatedAddress &&
+ candidate.relatedPort) {
+ sdp.push('raddr');
+ sdp.push(candidate.relatedAddress); // was: relAddr
+ sdp.push('rport');
+ sdp.push(candidate.relatedPort); // was: relPort
+ }
+ if (candidate.tcpType && candidate.protocol.toLowerCase() === 'tcp') {
+ sdp.push('tcptype');
+ sdp.push(candidate.tcpType);
+ }
+ return 'candidate:' + sdp.join(' ');
+};
+
+// Parses an rtpmap line, returns RTCRtpCoddecParameters. Sample input:
+// a=rtpmap:111 opus/48000/2
+SDPUtils.parseRtpMap = function(line) {
+ var parts = line.substr(9).split(' ');
+ var parsed = {
+ payloadType: parseInt(parts.shift(), 10) // was: id
+ };
+
+ parts = parts[0].split('/');
+
+ parsed.name = parts[0];
+ parsed.clockRate = parseInt(parts[1], 10); // was: clockrate
+ // was: channels
+ parsed.numChannels = parts.length === 3 ? parseInt(parts[2], 10) : 1;
+ return parsed;
+};
+
+// Generate an a=rtpmap line from RTCRtpCodecCapability or
+// RTCRtpCodecParameters.
+SDPUtils.writeRtpMap = function(codec) {
+ var pt = codec.payloadType;
+ if (codec.preferredPayloadType !== undefined) {
+ pt = codec.preferredPayloadType;
+ }
+ return 'a=rtpmap:' + pt + ' ' + codec.name + '/' + codec.clockRate +
+ (codec.numChannels !== 1 ? '/' + codec.numChannels : '') + '\r\n';
+};
+
+// Parses an a=extmap line (headerextension from RFC 5285). Sample input:
+// a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
+SDPUtils.parseExtmap = function(line) {
+ var parts = line.substr(9).split(' ');
+ return {
+ id: parseInt(parts[0], 10),
+ uri: parts[1]
+ };
+};
+
+// Generates a=extmap line from RTCRtpHeaderExtensionParameters or
+// RTCRtpHeaderExtension.
+SDPUtils.writeExtmap = function(headerExtension) {
+ return 'a=extmap:' + (headerExtension.id || headerExtension.preferredId) +
+ ' ' + headerExtension.uri + '\r\n';
+};
+
+// Parses an ftmp line, returns dictionary. Sample input:
+// a=fmtp:96 vbr=on;cng=on
+// Also deals with vbr=on; cng=on
+SDPUtils.parseFmtp = function(line) {
+ var parsed = {};
+ var kv;
+ var parts = line.substr(line.indexOf(' ') + 1).split(';');
+ for (var j = 0; j < parts.length; j++) {
+ kv = parts[j].trim().split('=');
+ parsed[kv[0].trim()] = kv[1];
+ }
+ return parsed;
+};
+
+// Generates an a=ftmp line from RTCRtpCodecCapability or RTCRtpCodecParameters.
+SDPUtils.writeFmtp = function(codec) {
+ var line = '';
+ var pt = codec.payloadType;
+ if (codec.preferredPayloadType !== undefined) {
+ pt = codec.preferredPayloadType;
+ }
+ if (codec.parameters && Object.keys(codec.parameters).length) {
+ var params = [];
+ Object.keys(codec.parameters).forEach(function(param) {
+ params.push(param + '=' + codec.parameters[param]);
+ });
+ line += 'a=fmtp:' + pt + ' ' + params.join(';') + '\r\n';
+ }
+ return line;
+};
+
+// Parses an rtcp-fb line, returns RTCPRtcpFeedback object. Sample input:
+// a=rtcp-fb:98 nack rpsi
+SDPUtils.parseRtcpFb = function(line) {
+ var parts = line.substr(line.indexOf(' ') + 1).split(' ');
+ return {
+ type: parts.shift(),
+ parameter: parts.join(' ')
+ };
+};
+// Generate a=rtcp-fb lines from RTCRtpCodecCapability or RTCRtpCodecParameters.
+SDPUtils.writeRtcpFb = function(codec) {
+ var lines = '';
+ var pt = codec.payloadType;
+ if (codec.preferredPayloadType !== undefined) {
+ pt = codec.preferredPayloadType;
+ }
+ if (codec.rtcpFeedback && codec.rtcpFeedback.length) {
+ // FIXME: special handling for trr-int?
+ codec.rtcpFeedback.forEach(function(fb) {
+ lines += 'a=rtcp-fb:' + pt + ' ' + fb.type +
+ (fb.parameter && fb.parameter.length ? ' ' + fb.parameter : '') +
+ '\r\n';
+ });
+ }
+ return lines;
+};
+
+// Parses an RFC 5576 ssrc media attribute. Sample input:
+// a=ssrc:3735928559 cname:something
+SDPUtils.parseSsrcMedia = function(line) {
+ var sp = line.indexOf(' ');
+ var parts = {
+ ssrc: parseInt(line.substr(7, sp - 7), 10)
+ };
+ var colon = line.indexOf(':', sp);
+ if (colon > -1) {
+ parts.attribute = line.substr(sp + 1, colon - sp - 1);
+ parts.value = line.substr(colon + 1);
+ } else {
+ parts.attribute = line.substr(sp + 1);
+ }
+ return parts;
+};
+
+// Extracts DTLS parameters from SDP media section or sessionpart.
+// FIXME: for consistency with other functions this should only
+// get the fingerprint line as input. See also getIceParameters.
+SDPUtils.getDtlsParameters = function(mediaSection, sessionpart) {
+ var lines = SDPUtils.splitLines(mediaSection);
+ // Search in session part, too.
+ lines = lines.concat(SDPUtils.splitLines(sessionpart));
+ var fpLine = lines.filter(function(line) {
+ return line.indexOf('a=fingerprint:') === 0;
+ })[0].substr(14);
+ // Note: a=setup line is ignored since we use the 'auto' role.
+ var dtlsParameters = {
+ role: 'auto',
+ fingerprints: [{
+ algorithm: fpLine.split(' ')[0],
+ value: fpLine.split(' ')[1]
+ }]
+ };
+ return dtlsParameters;
+};
+
+// Serializes DTLS parameters to SDP.
+SDPUtils.writeDtlsParameters = function(params, setupType) {
+ var sdp = 'a=setup:' + setupType + '\r\n';
+ params.fingerprints.forEach(function(fp) {
+ sdp += 'a=fingerprint:' + fp.algorithm + ' ' + fp.value + '\r\n';
+ });
+ return sdp;
+};
+// Parses ICE information from SDP media section or sessionpart.
+// FIXME: for consistency with other functions this should only
+// get the ice-ufrag and ice-pwd lines as input.
+SDPUtils.getIceParameters = function(mediaSection, sessionpart) {
+ var lines = SDPUtils.splitLines(mediaSection);
+ // Search in session part, too.
+ lines = lines.concat(SDPUtils.splitLines(sessionpart));
+ var iceParameters = {
+ usernameFragment: lines.filter(function(line) {
+ return line.indexOf('a=ice-ufrag:') === 0;
+ })[0].substr(12),
+ password: lines.filter(function(line) {
+ return line.indexOf('a=ice-pwd:') === 0;
+ })[0].substr(10)
+ };
+ return iceParameters;
+};
+
+// Serializes ICE parameters to SDP.
+SDPUtils.writeIceParameters = function(params) {
+ return 'a=ice-ufrag:' + params.usernameFragment + '\r\n' +
+ 'a=ice-pwd:' + params.password + '\r\n';
+};
+
+// Parses the SDP media section and returns RTCRtpParameters.
+SDPUtils.parseRtpParameters = function(mediaSection) {
+ var description = {
+ codecs: [],
+ headerExtensions: [],
+ fecMechanisms: [],
+ rtcp: []
+ };
+ var lines = SDPUtils.splitLines(mediaSection);
+ var mline = lines[0].split(' ');
+ for (var i = 3; i < mline.length; i++) { // find all codecs from mline[3..]
+ var pt = mline[i];
+ var rtpmapline = SDPUtils.matchPrefix(
+ mediaSection, 'a=rtpmap:' + pt + ' ')[0];
+ if (rtpmapline) {
+ var codec = SDPUtils.parseRtpMap(rtpmapline);
+ var fmtps = SDPUtils.matchPrefix(
+ mediaSection, 'a=fmtp:' + pt + ' ');
+ // Only the first a=fmtp:<pt> is considered.
+ codec.parameters = fmtps.length ? SDPUtils.parseFmtp(fmtps[0]) : {};
+ codec.rtcpFeedback = SDPUtils.matchPrefix(
+ mediaSection, 'a=rtcp-fb:' + pt + ' ')
+ .map(SDPUtils.parseRtcpFb);
+ description.codecs.push(codec);
+ // parse FEC mechanisms from rtpmap lines.
+ switch (codec.name.toUpperCase()) {
+ case 'RED':
+ case 'ULPFEC':
+ description.fecMechanisms.push(codec.name.toUpperCase());
+ break;
+ default: // only RED and ULPFEC are recognized as FEC mechanisms.
+ break;
+ }
+ }
+ }
+ SDPUtils.matchPrefix(mediaSection, 'a=extmap:').forEach(function(line) {
+ description.headerExtensions.push(SDPUtils.parseExtmap(line));
+ });
+ // FIXME: parse rtcp.
+ return description;
+};
+
+// Generates parts of the SDP media section describing the capabilities /
+// parameters.
+SDPUtils.writeRtpDescription = function(kind, caps) {
+ var sdp = '';
+
+ // Build the mline.
+ sdp += 'm=' + kind + ' ';
+ sdp += caps.codecs.length > 0 ? '9' : '0'; // reject if no codecs.
+ sdp += ' UDP/TLS/RTP/SAVPF ';
+ sdp += caps.codecs.map(function(codec) {
+ if (codec.preferredPayloadType !== undefined) {
+ return codec.preferredPayloadType;
+ }
+ return codec.payloadType;
+ }).join(' ') + '\r\n';
+
+ sdp += 'c=IN IP4 0.0.0.0\r\n';
+ sdp += 'a=rtcp:9 IN IP4 0.0.0.0\r\n';
+
+ // Add a=rtpmap lines for each codec. Also fmtp and rtcp-fb.
+ caps.codecs.forEach(function(codec) {
+ sdp += SDPUtils.writeRtpMap(codec);
+ sdp += SDPUtils.writeFmtp(codec);
+ sdp += SDPUtils.writeRtcpFb(codec);
+ });
+ var maxptime = 0;
+ caps.codecs.forEach(function(codec) {
+ if (codec.maxptime > maxptime) {
+ maxptime = codec.maxptime;
+ }
+ });
+ if (maxptime > 0) {
+ sdp += 'a=maxptime:' + maxptime + '\r\n';
+ }
+ sdp += 'a=rtcp-mux\r\n';
+
+ caps.headerExtensions.forEach(function(extension) {
+ sdp += SDPUtils.writeExtmap(extension);
+ });
+ // FIXME: write fecMechanisms.
+ return sdp;
+};
+
+// Parses the SDP media section and returns an array of
+// RTCRtpEncodingParameters.
+SDPUtils.parseRtpEncodingParameters = function(mediaSection) {
+ var encodingParameters = [];
+ var description = SDPUtils.parseRtpParameters(mediaSection);
+ var hasRed = description.fecMechanisms.indexOf('RED') !== -1;
+ var hasUlpfec = description.fecMechanisms.indexOf('ULPFEC') !== -1;
+
+ // filter a=ssrc:... cname:, ignore PlanB-msid
+ var ssrcs = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
+ .map(function(line) {
+ return SDPUtils.parseSsrcMedia(line);
+ })
+ .filter(function(parts) {
+ return parts.attribute === 'cname';
+ });
+ var primarySsrc = ssrcs.length > 0 && ssrcs[0].ssrc;
+ var secondarySsrc;
+
+ var flows = SDPUtils.matchPrefix(mediaSection, 'a=ssrc-group:FID')
+ .map(function(line) {
+ var parts = line.split(' ');
+ parts.shift();
+ return parts.map(function(part) {
+ return parseInt(part, 10);
+ });
+ });
+ if (flows.length > 0 && flows[0].length > 1 && flows[0][0] === primarySsrc) {
+ secondarySsrc = flows[0][1];
+ }
+
+ description.codecs.forEach(function(codec) {
+ if (codec.name.toUpperCase() === 'RTX' && codec.parameters.apt) {
+ var encParam = {
+ ssrc: primarySsrc,
+ codecPayloadType: parseInt(codec.parameters.apt, 10),
+ rtx: {
+ ssrc: secondarySsrc
+ }
+ };
+ encodingParameters.push(encParam);
+ if (hasRed) {
+ encParam = JSON.parse(JSON.stringify(encParam));
+ encParam.fec = {
+ ssrc: secondarySsrc,
+ mechanism: hasUlpfec ? 'red+ulpfec' : 'red'
+ };
+ encodingParameters.push(encParam);
+ }
+ }
+ });
+ if (encodingParameters.length === 0 && primarySsrc) {
+ encodingParameters.push({
+ ssrc: primarySsrc
+ });
+ }
+
+ // we support both b=AS and b=TIAS but interpret AS as TIAS.
+ var bandwidth = SDPUtils.matchPrefix(mediaSection, 'b=');
+ if (bandwidth.length) {
+ if (bandwidth[0].indexOf('b=TIAS:') === 0) {
+ bandwidth = parseInt(bandwidth[0].substr(7), 10);
+ } else if (bandwidth[0].indexOf('b=AS:') === 0) {
+ bandwidth = parseInt(bandwidth[0].substr(5), 10);
+ }
+ encodingParameters.forEach(function(params) {
+ params.maxBitrate = bandwidth;
+ });
+ }
+ return encodingParameters;
+};
+
+// parses http://draft.ortc.org/#rtcrtcpparameters*
+SDPUtils.parseRtcpParameters = function(mediaSection) {
+ var rtcpParameters = {};
+
+ var cname;
+ // Gets the first SSRC. Note that with RTX there might be multiple
+ // SSRCs.
+ var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
+ .map(function(line) {
+ return SDPUtils.parseSsrcMedia(line);
+ })
+ .filter(function(obj) {
+ return obj.attribute === 'cname';
+ })[0];
+ if (remoteSsrc) {
+ rtcpParameters.cname = remoteSsrc.value;
+ rtcpParameters.ssrc = remoteSsrc.ssrc;
+ }
+
+ // Edge uses the compound attribute instead of reducedSize
+ // compound is !reducedSize
+ var rsize = SDPUtils.matchPrefix(mediaSection, 'a=rtcp-rsize');
+ rtcpParameters.reducedSize = rsize.length > 0;
+ rtcpParameters.compound = rsize.length === 0;
+
+ // parses the rtcp-mux attrÑ–bute.
+ // Note that Edge does not support unmuxed RTCP.
+ var mux = SDPUtils.matchPrefix(mediaSection, 'a=rtcp-mux');
+ rtcpParameters.mux = mux.length > 0;
+
+ return rtcpParameters;
+};
+
+// parses either a=msid: or a=ssrc:... msid lines an returns
+// the id of the MediaStream and MediaStreamTrack.
+SDPUtils.parseMsid = function(mediaSection) {
+ var parts;
+ var spec = SDPUtils.matchPrefix(mediaSection, 'a=msid:');
+ if (spec.length === 1) {
+ parts = spec[0].substr(7).split(' ');
+ return {stream: parts[0], track: parts[1]};
+ }
+ var planB = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
+ .map(function(line) {
+ return SDPUtils.parseSsrcMedia(line);
+ })
+ .filter(function(parts) {
+ return parts.attribute === 'msid';
+ });
+ if (planB.length > 0) {
+ parts = planB[0].value.split(' ');
+ return {stream: parts[0], track: parts[1]};
+ }
+};
+
+SDPUtils.writeSessionBoilerplate = function() {
+ // FIXME: sess-id should be an NTP timestamp.
+ return 'v=0\r\n' +
+ 'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n' +
+ 's=-\r\n' +
+ 't=0 0\r\n';
+};
+
+SDPUtils.writeMediaSection = function(transceiver, caps, type, stream) {
+ var sdp = SDPUtils.writeRtpDescription(transceiver.kind, caps);
+
+ // Map ICE parameters (ufrag, pwd) to SDP.
+ sdp += SDPUtils.writeIceParameters(
+ transceiver.iceGatherer.getLocalParameters());
+
+ // Map DTLS parameters to SDP.
+ sdp += SDPUtils.writeDtlsParameters(
+ transceiver.dtlsTransport.getLocalParameters(),
+ type === 'offer' ? 'actpass' : 'active');
+
+ sdp += 'a=mid:' + transceiver.mid + '\r\n';
+
+ if (transceiver.rtpSender && transceiver.rtpReceiver) {
+ sdp += 'a=sendrecv\r\n';
+ } else if (transceiver.rtpSender) {
+ sdp += 'a=sendonly\r\n';
+ } else if (transceiver.rtpReceiver) {
+ sdp += 'a=recvonly\r\n';
+ } else {
+ sdp += 'a=inactive\r\n';
+ }
+
+ if (transceiver.rtpSender) {
+ // spec.
+ var msid = 'msid:' + stream.id + ' ' +
+ transceiver.rtpSender.track.id + '\r\n';
+ sdp += 'a=' + msid;
+
+ // for Chrome.
+ sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
+ ' ' + msid;
+ if (transceiver.sendEncodingParameters[0].rtx) {
+ sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].rtx.ssrc +
+ ' ' + msid;
+ sdp += 'a=ssrc-group:FID ' +
+ transceiver.sendEncodingParameters[0].ssrc + ' ' +
+ transceiver.sendEncodingParameters[0].rtx.ssrc +
+ '\r\n';
+ }
+ }
+ // FIXME: this should be written by writeRtpDescription.
+ sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
+ ' cname:' + SDPUtils.localCName + '\r\n';
+ if (transceiver.rtpSender && transceiver.sendEncodingParameters[0].rtx) {
+ sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].rtx.ssrc +
+ ' cname:' + SDPUtils.localCName + '\r\n';
+ }
+ return sdp;
+};
+
+// Gets the direction from the mediaSection or the sessionpart.
+SDPUtils.getDirection = function(mediaSection, sessionpart) {
+ // Look for sendrecv, sendonly, recvonly, inactive, default to sendrecv.
+ var lines = SDPUtils.splitLines(mediaSection);
+ for (var i = 0; i < lines.length; i++) {
+ switch (lines[i]) {
+ case 'a=sendrecv':
+ case 'a=sendonly':
+ case 'a=recvonly':
+ case 'a=inactive':
+ return lines[i].substr(2);
+ default:
+ // FIXME: What should happen here?
+ }
+ }
+ if (sessionpart) {
+ return SDPUtils.getDirection(sessionpart);
+ }
+ return 'sendrecv';
+};
+
+SDPUtils.getKind = function(mediaSection) {
+ var lines = SDPUtils.splitLines(mediaSection);
+ var mline = lines[0].split(' ');
+ return mline[0].substr(2);
+};
+
+SDPUtils.isRejected = function(mediaSection) {
+ return mediaSection.split(' ', 2)[1] === '0';
+};
+
+// Expose public methods.
+module.exports = SDPUtils;
+
+},{}],2:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+
+'use strict';
+
+// Shimming starts here.
+(function() {
+ // Utils.
+ var utils = require('./utils');
+ var logging = utils.log;
+ var browserDetails = utils.browserDetails;
+ // Export to the adapter global object visible in the browser.
+ module.exports.browserDetails = browserDetails;
+ module.exports.extractVersion = utils.extractVersion;
+ module.exports.disableLog = utils.disableLog;
+
+ // Uncomment the line below if you want logging to occur, including logging
+ // for the switch statement below. Can also be turned on in the browser via
+ // adapter.disableLog(false), but then logging from the switch statement below
+ // will not appear.
+ // require('./utils').disableLog(false);
+
+ // Browser shims.
+ var chromeShim = require('./chrome/chrome_shim') || null;
+ var edgeShim = require('./edge/edge_shim') || null;
+ var firefoxShim = require('./firefox/firefox_shim') || null;
+ var safariShim = require('./safari/safari_shim') || null;
+
+ // Shim browser if found.
+ switch (browserDetails.browser) {
+ case 'chrome':
+ if (!chromeShim || !chromeShim.shimPeerConnection) {
+ logging('Chrome shim is not included in this adapter release.');
+ return;
+ }
+ logging('adapter.js shimming chrome.');
+ // Export to the adapter global object visible in the browser.
+ module.exports.browserShim = chromeShim;
+
+ chromeShim.shimGetUserMedia();
+ chromeShim.shimMediaStream();
+ utils.shimCreateObjectURL();
+ chromeShim.shimSourceObject();
+ chromeShim.shimPeerConnection();
+ chromeShim.shimOnTrack();
+ chromeShim.shimGetSendersWithDtmf();
+ break;
+ case 'firefox':
+ if (!firefoxShim || !firefoxShim.shimPeerConnection) {
+ logging('Firefox shim is not included in this adapter release.');
+ return;
+ }
+ logging('adapter.js shimming firefox.');
+ // Export to the adapter global object visible in the browser.
+ module.exports.browserShim = firefoxShim;
+
+ firefoxShim.shimGetUserMedia();
+ utils.shimCreateObjectURL();
+ firefoxShim.shimSourceObject();
+ firefoxShim.shimPeerConnection();
+ firefoxShim.shimOnTrack();
+ break;
+ case 'edge':
+ if (!edgeShim || !edgeShim.shimPeerConnection) {
+ logging('MS edge shim is not included in this adapter release.');
+ return;
+ }
+ logging('adapter.js shimming edge.');
+ // Export to the adapter global object visible in the browser.
+ module.exports.browserShim = edgeShim;
+
+ edgeShim.shimGetUserMedia();
+ utils.shimCreateObjectURL();
+ edgeShim.shimPeerConnection();
+ break;
+ case 'safari':
+ if (!safariShim) {
+ logging('Safari shim is not included in this adapter release.');
+ return;
+ }
+ logging('adapter.js shimming safari.');
+ // Export to the adapter global object visible in the browser.
+ module.exports.browserShim = safariShim;
+
+ safariShim.shimGetUserMedia();
+ break;
+ default:
+ logging('Unsupported browser!');
+ }
+})();
+
+},{"./chrome/chrome_shim":3,"./edge/edge_shim":5,"./firefox/firefox_shim":7,"./safari/safari_shim":9,"./utils":10}],3:[function(require,module,exports){
+
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+var logging = require('../utils.js').log;
+var browserDetails = require('../utils.js').browserDetails;
+
+var chromeShim = {
+ shimMediaStream: function() {
+ window.MediaStream = window.MediaStream || window.webkitMediaStream;
+ },
+
+ shimOnTrack: function() {
+ if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
+ window.RTCPeerConnection.prototype)) {
+ Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
+ get: function() {
+ return this._ontrack;
+ },
+ set: function(f) {
+ var self = this;
+ if (this._ontrack) {
+ this.removeEventListener('track', this._ontrack);
+ this.removeEventListener('addstream', this._ontrackpoly);
+ }
+ this.addEventListener('track', this._ontrack = f);
+ this.addEventListener('addstream', this._ontrackpoly = function(e) {
+ // onaddstream does not fire when a track is added to an existing
+ // stream. But stream.onaddtrack is implemented so we use that.
+ e.stream.addEventListener('addtrack', function(te) {
+ var event = new Event('track');
+ event.track = te.track;
+ event.receiver = {track: te.track};
+ event.streams = [e.stream];
+ self.dispatchEvent(event);
+ });
+ e.stream.getTracks().forEach(function(track) {
+ var event = new Event('track');
+ event.track = track;
+ event.receiver = {track: track};
+ event.streams = [e.stream];
+ this.dispatchEvent(event);
+ }.bind(this));
+ }.bind(this));
+ }
+ });
+ }
+ },
+
+ shimGetSendersWithDtmf: function() {
+ if (typeof window === 'object' && window.RTCPeerConnection &&
+ !('getSenders' in RTCPeerConnection.prototype) &&
+ 'createDTMFSender' in RTCPeerConnection.prototype) {
+ RTCPeerConnection.prototype.getSenders = function() {
+ return this._senders;
+ };
+ var origAddStream = RTCPeerConnection.prototype.addStream;
+ var origRemoveStream = RTCPeerConnection.prototype.removeStream;
+
+ RTCPeerConnection.prototype.addStream = function(stream) {
+ var pc = this;
+ pc._senders = pc._senders || [];
+ origAddStream.apply(pc, [stream]);
+ stream.getTracks().forEach(function(track) {
+ pc._senders.push({
+ track: track,
+ get dtmf() {
+ if (this._dtmf === undefined) {
+ if (track.kind === 'audio') {
+ this._dtmf = pc.createDTMFSender(track);
+ } else {
+ this._dtmf = null;
+ }
+ }
+ return this._dtmf;
+ }
+ });
+ });
+ };
+
+ RTCPeerConnection.prototype.removeStream = function(stream) {
+ var pc = this;
+ pc._senders = pc._senders || [];
+ origRemoveStream.apply(pc, [stream]);
+ stream.getTracks().forEach(function(track) {
+ var sender = pc._senders.find(function(s) {
+ return s.track === track;
+ });
+ if (sender) {
+ pc._senders.splice(pc._senders.indexOf(sender), 1); // remove sender
+ }
+ });
+ };
+ }
+ },
+
+ shimSourceObject: function() {
+ if (typeof window === 'object') {
+ if (window.HTMLMediaElement &&
+ !('srcObject' in window.HTMLMediaElement.prototype)) {
+ // Shim the srcObject property, once, when HTMLMediaElement is found.
+ Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
+ get: function() {
+ return this._srcObject;
+ },
+ set: function(stream) {
+ var self = this;
+ // Use _srcObject as a private property for this shim
+ this._srcObject = stream;
+ if (this.src) {
+ URL.revokeObjectURL(this.src);
+ }
+
+ if (!stream) {
+ this.src = '';
+ return undefined;
+ }
+ this.src = URL.createObjectURL(stream);
+ // We need to recreate the blob url when a track is added or
+ // removed. Doing it manually since we want to avoid a recursion.
+ stream.addEventListener('addtrack', function() {
+ if (self.src) {
+ URL.revokeObjectURL(self.src);
+ }
+ self.src = URL.createObjectURL(stream);
+ });
+ stream.addEventListener('removetrack', function() {
+ if (self.src) {
+ URL.revokeObjectURL(self.src);
+ }
+ self.src = URL.createObjectURL(stream);
+ });
+ }
+ });
+ }
+ }
+ },
+
+ shimPeerConnection: function() {
+ // The RTCPeerConnection object.
+ if (!window.RTCPeerConnection) {
+ window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+ // Translate iceTransportPolicy to iceTransports,
+ // see https://code.google.com/p/webrtc/issues/detail?id=4869
+ // this was fixed in M56 along with unprefixing RTCPeerConnection.
+ logging('PeerConnection');
+ if (pcConfig && pcConfig.iceTransportPolicy) {
+ pcConfig.iceTransports = pcConfig.iceTransportPolicy;
+ }
+
+ return new webkitRTCPeerConnection(pcConfig, pcConstraints);
+ };
+ window.RTCPeerConnection.prototype = webkitRTCPeerConnection.prototype;
+ // wrap static methods. Currently just generateCertificate.
+ if (webkitRTCPeerConnection.generateCertificate) {
+ Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+ get: function() {
+ return webkitRTCPeerConnection.generateCertificate;
+ }
+ });
+ }
+ }
+
+ var origGetStats = RTCPeerConnection.prototype.getStats;
+ RTCPeerConnection.prototype.getStats = function(selector,
+ successCallback, errorCallback) {
+ var self = this;
+ var args = arguments;
+
+ // If selector is a function then we are in the old style stats so just
+ // pass back the original getStats format to avoid breaking old users.
+ if (arguments.length > 0 && typeof selector === 'function') {
+ return origGetStats.apply(this, arguments);
+ }
+
+ // When spec-style getStats is supported, return those when called with
+ // either no arguments or the selector argument is null.
+ if (origGetStats.length === 0 && (arguments.length === 0 ||
+ typeof arguments[0] !== 'function')) {
+ return origGetStats.apply(this, []);
+ }
+
+ var fixChromeStats_ = function(response) {
+ var standardReport = {};
+ var reports = response.result();
+ reports.forEach(function(report) {
+ var standardStats = {
+ id: report.id,
+ timestamp: report.timestamp,
+ type: {
+ localcandidate: 'local-candidate',
+ remotecandidate: 'remote-candidate'
+ }[report.type] || report.type
+ };
+ report.names().forEach(function(name) {
+ standardStats[name] = report.stat(name);
+ });
+ standardReport[standardStats.id] = standardStats;
+ });
+
+ return standardReport;
+ };
+
+ // shim getStats with maplike support
+ var makeMapStats = function(stats) {
+ return new Map(Object.keys(stats).map(function(key) {
+ return[key, stats[key]];
+ }));
+ };
+
+ if (arguments.length >= 2) {
+ var successCallbackWrapper_ = function(response) {
+ args[1](makeMapStats(fixChromeStats_(response)));
+ };
+
+ return origGetStats.apply(this, [successCallbackWrapper_,
+ arguments[0]]);
+ }
+
+ // promise-support
+ return new Promise(function(resolve, reject) {
+ origGetStats.apply(self, [
+ function(response) {
+ resolve(makeMapStats(fixChromeStats_(response)));
+ }, reject]);
+ }).then(successCallback, errorCallback);
+ };
+
+ // add promise support -- natively available in Chrome 51
+ if (browserDetails.version < 51) {
+ ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+ .forEach(function(method) {
+ var nativeMethod = RTCPeerConnection.prototype[method];
+ RTCPeerConnection.prototype[method] = function() {
+ var args = arguments;
+ var self = this;
+ var promise = new Promise(function(resolve, reject) {
+ nativeMethod.apply(self, [args[0], resolve, reject]);
+ });
+ if (args.length < 2) {
+ return promise;
+ }
+ return promise.then(function() {
+ args[1].apply(null, []);
+ },
+ function(err) {
+ if (args.length >= 3) {
+ args[2].apply(null, [err]);
+ }
+ });
+ };
+ });
+ }
+
+ // promise support for createOffer and createAnswer. Available (without
+ // bugs) since M52: crbug/619289
+ if (browserDetails.version < 52) {
+ ['createOffer', 'createAnswer'].forEach(function(method) {
+ var nativeMethod = RTCPeerConnection.prototype[method];
+ RTCPeerConnection.prototype[method] = function() {
+ var self = this;
+ if (arguments.length < 1 || (arguments.length === 1 &&
+ typeof arguments[0] === 'object')) {
+ var opts = arguments.length === 1 ? arguments[0] : undefined;
+ return new Promise(function(resolve, reject) {
+ nativeMethod.apply(self, [resolve, reject, opts]);
+ });
+ }
+ return nativeMethod.apply(this, arguments);
+ };
+ });
+ }
+
+ // shim implicit creation of RTCSessionDescription/RTCIceCandidate
+ ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+ .forEach(function(method) {
+ var nativeMethod = RTCPeerConnection.prototype[method];
+ RTCPeerConnection.prototype[method] = function() {
+ arguments[0] = new ((method === 'addIceCandidate') ?
+ RTCIceCandidate : RTCSessionDescription)(arguments[0]);
+ return nativeMethod.apply(this, arguments);
+ };
+ });
+
+ // support for addIceCandidate(null or undefined)
+ var nativeAddIceCandidate =
+ RTCPeerConnection.prototype.addIceCandidate;
+ RTCPeerConnection.prototype.addIceCandidate = function() {
+ if (!arguments[0]) {
+ if (arguments[1]) {
+ arguments[1].apply(null);
+ }
+ return Promise.resolve();
+ }
+ return nativeAddIceCandidate.apply(this, arguments);
+ };
+ }
+};
+
+
+// Expose public methods.
+module.exports = {
+ shimMediaStream: chromeShim.shimMediaStream,
+ shimOnTrack: chromeShim.shimOnTrack,
+ shimGetSendersWithDtmf: chromeShim.shimGetSendersWithDtmf,
+ shimSourceObject: chromeShim.shimSourceObject,
+ shimPeerConnection: chromeShim.shimPeerConnection,
+ shimGetUserMedia: require('./getusermedia')
+};
+
+},{"../utils.js":10,"./getusermedia":4}],4:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+var logging = require('../utils.js').log;
+var browserDetails = require('../utils.js').browserDetails;
+
+// Expose public methods.
+module.exports = function() {
+ var constraintsToChrome_ = function(c) {
+ if (typeof c !== 'object' || c.mandatory || c.optional) {
+ return c;
+ }
+ var cc = {};
+ Object.keys(c).forEach(function(key) {
+ if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
+ return;
+ }
+ var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
+ if (r.exact !== undefined && typeof r.exact === 'number') {
+ r.min = r.max = r.exact;
+ }
+ var oldname_ = function(prefix, name) {
+ if (prefix) {
+ return prefix + name.charAt(0).toUpperCase() + name.slice(1);
+ }
+ return (name === 'deviceId') ? 'sourceId' : name;
+ };
+ if (r.ideal !== undefined) {
+ cc.optional = cc.optional || [];
+ var oc = {};
+ if (typeof r.ideal === 'number') {
+ oc[oldname_('min', key)] = r.ideal;
+ cc.optional.push(oc);
+ oc = {};
+ oc[oldname_('max', key)] = r.ideal;
+ cc.optional.push(oc);
+ } else {
+ oc[oldname_('', key)] = r.ideal;
+ cc.optional.push(oc);
+ }
+ }
+ if (r.exact !== undefined && typeof r.exact !== 'number') {
+ cc.mandatory = cc.mandatory || {};
+ cc.mandatory[oldname_('', key)] = r.exact;
+ } else {
+ ['min', 'max'].forEach(function(mix) {
+ if (r[mix] !== undefined) {
+ cc.mandatory = cc.mandatory || {};
+ cc.mandatory[oldname_(mix, key)] = r[mix];
+ }
+ });
+ }
+ });
+ if (c.advanced) {
+ cc.optional = (cc.optional || []).concat(c.advanced);
+ }
+ return cc;
+ };
+
+ var shimConstraints_ = function(constraints, func) {
+ constraints = JSON.parse(JSON.stringify(constraints));
+ if (constraints && constraints.audio) {
+ constraints.audio = constraintsToChrome_(constraints.audio);
+ }
+ if (constraints && typeof constraints.video === 'object') {
+ // Shim facingMode for mobile, where it defaults to "user".
+ var face = constraints.video.facingMode;
+ face = face && ((typeof face === 'object') ? face : {ideal: face});
+ var getSupportedFacingModeLies = browserDetails.version < 59;
+
+ if ((face && (face.exact === 'user' || face.exact === 'environment' ||
+ face.ideal === 'user' || face.ideal === 'environment')) &&
+ !(navigator.mediaDevices.getSupportedConstraints &&
+ navigator.mediaDevices.getSupportedConstraints().facingMode &&
+ !getSupportedFacingModeLies)) {
+ delete constraints.video.facingMode;
+ if (face.exact === 'environment' || face.ideal === 'environment') {
+ // Look for "back" in label, or use last cam (typically back cam).
+ return navigator.mediaDevices.enumerateDevices()
+ .then(function(devices) {
+ devices = devices.filter(function(d) {
+ return d.kind === 'videoinput';
+ });
+ var back = devices.find(function(d) {
+ return d.label.toLowerCase().indexOf('back') !== -1;
+ }) || (devices.length && devices[devices.length - 1]);
+ if (back) {
+ constraints.video.deviceId = face.exact ? {exact: back.deviceId} :
+ {ideal: back.deviceId};
+ }
+ constraints.video = constraintsToChrome_(constraints.video);
+ logging('chrome: ' + JSON.stringify(constraints));
+ return func(constraints);
+ });
+ }
+ }
+ constraints.video = constraintsToChrome_(constraints.video);
+ }
+ logging('chrome: ' + JSON.stringify(constraints));
+ return func(constraints);
+ };
+
+ var shimError_ = function(e) {
+ return {
+ name: {
+ PermissionDeniedError: 'NotAllowedError',
+ ConstraintNotSatisfiedError: 'OverconstrainedError'
+ }[e.name] || e.name,
+ message: e.message,
+ constraint: e.constraintName,
+ toString: function() {
+ return this.name + (this.message && ': ') + this.message;
+ }
+ };
+ };
+
+ var getUserMedia_ = function(constraints, onSuccess, onError) {
+ shimConstraints_(constraints, function(c) {
+ navigator.webkitGetUserMedia(c, onSuccess, function(e) {
+ onError(shimError_(e));
+ });
+ });
+ };
+
+ navigator.getUserMedia = getUserMedia_;
+
+ // Returns the result of getUserMedia as a Promise.
+ var getUserMediaPromise_ = function(constraints) {
+ return new Promise(function(resolve, reject) {
+ navigator.getUserMedia(constraints, resolve, reject);
+ });
+ };
+
+ if (!navigator.mediaDevices) {
+ navigator.mediaDevices = {
+ getUserMedia: getUserMediaPromise_,
+ enumerateDevices: function() {
+ return new Promise(function(resolve) {
+ var kinds = {audio: 'audioinput', video: 'videoinput'};
+ return MediaStreamTrack.getSources(function(devices) {
+ resolve(devices.map(function(device) {
+ return {label: device.label,
+ kind: kinds[device.kind],
+ deviceId: device.id,
+ groupId: ''};
+ }));
+ });
+ });
+ },
+ getSupportedConstraints: function() {
+ return {
+ deviceId: true, echoCancellation: true, facingMode: true,
+ frameRate: true, height: true, width: true
+ };
+ }
+ };
+ }
+
+ // A shim for getUserMedia method on the mediaDevices object.
+ // TODO(KaptenJansson) remove once implemented in Chrome stable.
+ if (!navigator.mediaDevices.getUserMedia) {
+ navigator.mediaDevices.getUserMedia = function(constraints) {
+ return getUserMediaPromise_(constraints);
+ };
+ } else {
+ // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
+ // function which returns a Promise, it does not accept spec-style
+ // constraints.
+ var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+ bind(navigator.mediaDevices);
+ navigator.mediaDevices.getUserMedia = function(cs) {
+ return shimConstraints_(cs, function(c) {
+ return origGetUserMedia(c).then(function(stream) {
+ if (c.audio && !stream.getAudioTracks().length ||
+ c.video && !stream.getVideoTracks().length) {
+ stream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ throw new DOMException('', 'NotFoundError');
+ }
+ return stream;
+ }, function(e) {
+ return Promise.reject(shimError_(e));
+ });
+ });
+ };
+ }
+
+ // Dummy devicechange event methods.
+ // TODO(KaptenJansson) remove once implemented in Chrome stable.
+ if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
+ navigator.mediaDevices.addEventListener = function() {
+ logging('Dummy mediaDevices.addEventListener called.');
+ };
+ }
+ if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
+ navigator.mediaDevices.removeEventListener = function() {
+ logging('Dummy mediaDevices.removeEventListener called.');
+ };
+ }
+};
+
+},{"../utils.js":10}],5:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var SDPUtils = require('sdp');
+var browserDetails = require('../utils').browserDetails;
+
+// sort tracks such that they follow an a-v-a-v...
+// pattern.
+function sortTracks(tracks) {
+ var audioTracks = tracks.filter(function(track) {
+ return track.kind === 'audio';
+ });
+ var videoTracks = tracks.filter(function(track) {
+ return track.kind === 'video';
+ });
+ tracks = [];
+ while (audioTracks.length || videoTracks.length) {
+ if (audioTracks.length) {
+ tracks.push(audioTracks.shift());
+ }
+ if (videoTracks.length) {
+ tracks.push(videoTracks.shift());
+ }
+ }
+ return tracks;
+}
+
+// Edge does not like
+// 1) stun:
+// 2) turn: that does not have all of turn:host:port?transport=udp
+// 3) turn: with ipv6 addresses
+// 4) turn: occurring muliple times
+function filterIceServers(iceServers) {
+ var hasTurn = false;
+ iceServers = JSON.parse(JSON.stringify(iceServers));
+ return iceServers.filter(function(server) {
+ if (server && (server.urls || server.url)) {
+ var urls = server.urls || server.url;
+ var isString = typeof urls === 'string';
+ if (isString) {
+ urls = [urls];
+ }
+ urls = urls.filter(function(url) {
+ var validTurn = url.indexOf('turn:') === 0 &&
+ url.indexOf('transport=udp') !== -1 &&
+ url.indexOf('turn:[') === -1 &&
+ !hasTurn;
+
+ if (validTurn) {
+ hasTurn = true;
+ return true;
+ }
+ return url.indexOf('stun:') === 0 &&
+ browserDetails.version >= 14393;
+ });
+
+ delete server.url;
+ server.urls = isString ? urls[0] : urls;
+ return !!urls.length;
+ }
+ return false;
+ });
+}
+
+var edgeShim = {
+ shimPeerConnection: function() {
+ if (window.RTCIceGatherer) {
+ // ORTC defines an RTCIceCandidate object but no constructor.
+ // Not implemented in Edge.
+ if (!window.RTCIceCandidate) {
+ window.RTCIceCandidate = function(args) {
+ return args;
+ };
+ }
+ // ORTC does not have a session description object but
+ // other browsers (i.e. Chrome) that will support both PC and ORTC
+ // in the future might have this defined already.
+ if (!window.RTCSessionDescription) {
+ window.RTCSessionDescription = function(args) {
+ return args;
+ };
+ }
+ // this adds an additional event listener to MediaStrackTrack that signals
+ // when a tracks enabled property was changed. Workaround for a bug in
+ // addStream, see below. No longer required in 15025+
+ if (browserDetails.version < 15025) {
+ var origMSTEnabled = Object.getOwnPropertyDescriptor(
+ MediaStreamTrack.prototype, 'enabled');
+ Object.defineProperty(MediaStreamTrack.prototype, 'enabled', {
+ set: function(value) {
+ origMSTEnabled.set.call(this, value);
+ var ev = new Event('enabled');
+ ev.enabled = value;
+ this.dispatchEvent(ev);
+ }
+ });
+ }
+ }
+
+ window.RTCPeerConnection = function(config) {
+ var self = this;
+
+ var _eventTarget = document.createDocumentFragment();
+ ['addEventListener', 'removeEventListener', 'dispatchEvent']
+ .forEach(function(method) {
+ self[method] = _eventTarget[method].bind(_eventTarget);
+ });
+
+ this.onicecandidate = null;
+ this.onaddstream = null;
+ this.ontrack = null;
+ this.onremovestream = null;
+ this.onsignalingstatechange = null;
+ this.oniceconnectionstatechange = null;
+ this.onicegatheringstatechange = null;
+ this.onnegotiationneeded = null;
+ this.ondatachannel = null;
+
+ this.localStreams = [];
+ this.remoteStreams = [];
+ this.getLocalStreams = function() {
+ return self.localStreams;
+ };
+ this.getRemoteStreams = function() {
+ return self.remoteStreams;
+ };
+
+ this.localDescription = new RTCSessionDescription({
+ type: '',
+ sdp: ''
+ });
+ this.remoteDescription = new RTCSessionDescription({
+ type: '',
+ sdp: ''
+ });
+ this.signalingState = 'stable';
+ this.iceConnectionState = 'new';
+ this.iceGatheringState = 'new';
+
+ this.iceOptions = {
+ gatherPolicy: 'all',
+ iceServers: []
+ };
+ if (config && config.iceTransportPolicy) {
+ switch (config.iceTransportPolicy) {
+ case 'all':
+ case 'relay':
+ this.iceOptions.gatherPolicy = config.iceTransportPolicy;
+ break;
+ default:
+ // don't set iceTransportPolicy.
+ break;
+ }
+ }
+ this.usingBundle = config && config.bundlePolicy === 'max-bundle';
+
+ if (config && config.iceServers) {
+ this.iceOptions.iceServers = filterIceServers(config.iceServers);
+ }
+ this._config = config;
+
+ // per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
+ // everything that is needed to describe a SDP m-line.
+ this.transceivers = [];
+
+ // since the iceGatherer is currently created in createOffer but we
+ // must not emit candidates until after setLocalDescription we buffer
+ // them in this array.
+ this._localIceCandidatesBuffer = [];
+ };
+
+ window.RTCPeerConnection.prototype._emitGatheringStateChange = function() {
+ var event = new Event('icegatheringstatechange');
+ this.dispatchEvent(event);
+ if (this.onicegatheringstatechange !== null) {
+ this.onicegatheringstatechange(event);
+ }
+ };
+
+ window.RTCPeerConnection.prototype._emitBufferedCandidates = function() {
+ var self = this;
+ var sections = SDPUtils.splitSections(self.localDescription.sdp);
+ // FIXME: need to apply ice candidates in a way which is async but
+ // in-order
+ this._localIceCandidatesBuffer.forEach(function(event) {
+ var end = !event.candidate || Object.keys(event.candidate).length === 0;
+ if (end) {
+ for (var j = 1; j < sections.length; j++) {
+ if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
+ sections[j] += 'a=end-of-candidates\r\n';
+ }
+ }
+ } else {
+ sections[event.candidate.sdpMLineIndex + 1] +=
+ 'a=' + event.candidate.candidate + '\r\n';
+ }
+ self.localDescription.sdp = sections.join('');
+ self.dispatchEvent(event);
+ if (self.onicecandidate !== null) {
+ self.onicecandidate(event);
+ }
+ if (!event.candidate && self.iceGatheringState !== 'complete') {
+ var complete = self.transceivers.every(function(transceiver) {
+ return transceiver.iceGatherer &&
+ transceiver.iceGatherer.state === 'completed';
+ });
+ if (complete && self.iceGatheringStateChange !== 'complete') {
+ self.iceGatheringState = 'complete';
+ self._emitGatheringStateChange();
+ }
+ }
+ });
+ this._localIceCandidatesBuffer = [];
+ };
+
+ window.RTCPeerConnection.prototype.getConfiguration = function() {
+ return this._config;
+ };
+
+ window.RTCPeerConnection.prototype.addStream = function(stream) {
+ if (browserDetails.version >= 15025) {
+ this.localStreams.push(stream);
+ } else {
+ // Clone is necessary for local demos mostly, attaching directly
+ // to two different senders does not work (build 10547).
+ // Fixed in 15025 (or earlier)
+ var clonedStream = stream.clone();
+ stream.getTracks().forEach(function(track, idx) {
+ var clonedTrack = clonedStream.getTracks()[idx];
+ track.addEventListener('enabled', function(event) {
+ clonedTrack.enabled = event.enabled;
+ });
+ });
+ this.localStreams.push(clonedStream);
+ }
+ this._maybeFireNegotiationNeeded();
+ };
+
+ window.RTCPeerConnection.prototype.removeStream = function(stream) {
+ var idx = this.localStreams.indexOf(stream);
+ if (idx > -1) {
+ this.localStreams.splice(idx, 1);
+ this._maybeFireNegotiationNeeded();
+ }
+ };
+
+ window.RTCPeerConnection.prototype.getSenders = function() {
+ return this.transceivers.filter(function(transceiver) {
+ return !!transceiver.rtpSender;
+ })
+ .map(function(transceiver) {
+ return transceiver.rtpSender;
+ });
+ };
+
+ window.RTCPeerConnection.prototype.getReceivers = function() {
+ return this.transceivers.filter(function(transceiver) {
+ return !!transceiver.rtpReceiver;
+ })
+ .map(function(transceiver) {
+ return transceiver.rtpReceiver;
+ });
+ };
+
+ // Determines the intersection of local and remote capabilities.
+ window.RTCPeerConnection.prototype._getCommonCapabilities =
+ function(localCapabilities, remoteCapabilities) {
+ var commonCapabilities = {
+ codecs: [],
+ headerExtensions: [],
+ fecMechanisms: []
+ };
+
+ var findCodecByPayloadType = function(pt, codecs) {
+ pt = parseInt(pt, 10);
+ for (var i = 0; i < codecs.length; i++) {
+ if (codecs[i].payloadType === pt ||
+ codecs[i].preferredPayloadType === pt) {
+ return codecs[i];
+ }
+ }
+ };
+
+ var rtxCapabilityMatches = function(lRtx, rRtx, lCodecs, rCodecs) {
+ var lCodec = findCodecByPayloadType(lRtx.parameters.apt, lCodecs);
+ var rCodec = findCodecByPayloadType(rRtx.parameters.apt, rCodecs);
+ return lCodec && rCodec &&
+ lCodec.name.toLowerCase() === rCodec.name.toLowerCase();
+ };
+
+ localCapabilities.codecs.forEach(function(lCodec) {
+ for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
+ var rCodec = remoteCapabilities.codecs[i];
+ if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
+ lCodec.clockRate === rCodec.clockRate) {
+ if (lCodec.name.toLowerCase() === 'rtx' &&
+ lCodec.parameters && rCodec.parameters.apt) {
+ // for RTX we need to find the local rtx that has a apt
+ // which points to the same local codec as the remote one.
+ if (!rtxCapabilityMatches(lCodec, rCodec,
+ localCapabilities.codecs, remoteCapabilities.codecs)) {
+ continue;
+ }
+ }
+ rCodec = JSON.parse(JSON.stringify(rCodec)); // deepcopy
+ // number of channels is the highest common number of channels
+ rCodec.numChannels = Math.min(lCodec.numChannels,
+ rCodec.numChannels);
+ // push rCodec so we reply with offerer payload type
+ commonCapabilities.codecs.push(rCodec);
+
+ // determine common feedback mechanisms
+ rCodec.rtcpFeedback = rCodec.rtcpFeedback.filter(function(fb) {
+ for (var j = 0; j < lCodec.rtcpFeedback.length; j++) {
+ if (lCodec.rtcpFeedback[j].type === fb.type &&
+ lCodec.rtcpFeedback[j].parameter === fb.parameter) {
+ return true;
+ }
+ }
+ return false;
+ });
+ // FIXME: also need to determine .parameters
+ // see https://github.com/openpeer/ortc/issues/569
+ break;
+ }
+ }
+ });
+
+ localCapabilities.headerExtensions
+ .forEach(function(lHeaderExtension) {
+ for (var i = 0; i < remoteCapabilities.headerExtensions.length;
+ i++) {
+ var rHeaderExtension = remoteCapabilities.headerExtensions[i];
+ if (lHeaderExtension.uri === rHeaderExtension.uri) {
+ commonCapabilities.headerExtensions.push(rHeaderExtension);
+ break;
+ }
+ }
+ });
+
+ // FIXME: fecMechanisms
+ return commonCapabilities;
+ };
+
+ // Create ICE gatherer, ICE transport and DTLS transport.
+ window.RTCPeerConnection.prototype._createIceAndDtlsTransports =
+ function(mid, sdpMLineIndex) {
+ var self = this;
+ var iceGatherer = new RTCIceGatherer(self.iceOptions);
+ var iceTransport = new RTCIceTransport(iceGatherer);
+ iceGatherer.onlocalcandidate = function(evt) {
+ var event = new Event('icecandidate');
+ event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
+
+ var cand = evt.candidate;
+ var end = !cand || Object.keys(cand).length === 0;
+ // Edge emits an empty object for RTCIceCandidateComplete‥
+ if (end) {
+ // polyfill since RTCIceGatherer.state is not implemented in
+ // Edge 10547 yet.
+ if (iceGatherer.state === undefined) {
+ iceGatherer.state = 'completed';
+ }
+ } else {
+ // RTCIceCandidate doesn't have a component, needs to be added
+ cand.component = iceTransport.component === 'RTCP' ? 2 : 1;
+ event.candidate.candidate = SDPUtils.writeCandidate(cand);
+ }
+
+ // update local description.
+ var sections = SDPUtils.splitSections(self.localDescription.sdp);
+ if (!end) {
+ sections[event.candidate.sdpMLineIndex + 1] +=
+ 'a=' + event.candidate.candidate + '\r\n';
+ } else {
+ sections[event.candidate.sdpMLineIndex + 1] +=
+ 'a=end-of-candidates\r\n';
+ }
+ self.localDescription.sdp = sections.join('');
+ var transceivers = self._pendingOffer ? self._pendingOffer :
+ self.transceivers;
+ var complete = transceivers.every(function(transceiver) {
+ return transceiver.iceGatherer &&
+ transceiver.iceGatherer.state === 'completed';
+ });
+
+ // Emit candidate if localDescription is set.
+ // Also emits null candidate when all gatherers are complete.
+ switch (self.iceGatheringState) {
+ case 'new':
+ if (!end) {
+ self._localIceCandidatesBuffer.push(event);
+ }
+ if (end && complete) {
+ self._localIceCandidatesBuffer.push(
+ new Event('icecandidate'));
+ }
+ break;
+ case 'gathering':
+ self._emitBufferedCandidates();
+ if (!end) {
+ self.dispatchEvent(event);
+ if (self.onicecandidate !== null) {
+ self.onicecandidate(event);
+ }
+ }
+ if (complete) {
+ self.dispatchEvent(new Event('icecandidate'));
+ if (self.onicecandidate !== null) {
+ self.onicecandidate(new Event('icecandidate'));
+ }
+ self.iceGatheringState = 'complete';
+ self._emitGatheringStateChange();
+ }
+ break;
+ case 'complete':
+ // should not happen... currently!
+ break;
+ default: // no-op.
+ break;
+ }
+ };
+ iceTransport.onicestatechange = function() {
+ self._updateConnectionState();
+ };
+
+ var dtlsTransport = new RTCDtlsTransport(iceTransport);
+ dtlsTransport.ondtlsstatechange = function() {
+ self._updateConnectionState();
+ };
+ dtlsTransport.onerror = function() {
+ // onerror does not set state to failed by itself.
+ dtlsTransport.state = 'failed';
+ self._updateConnectionState();
+ };
+
+ return {
+ iceGatherer: iceGatherer,
+ iceTransport: iceTransport,
+ dtlsTransport: dtlsTransport
+ };
+ };
+
+ // Start the RTP Sender and Receiver for a transceiver.
+ window.RTCPeerConnection.prototype._transceive = function(transceiver,
+ send, recv) {
+ var params = this._getCommonCapabilities(transceiver.localCapabilities,
+ transceiver.remoteCapabilities);
+ if (send && transceiver.rtpSender) {
+ params.encodings = transceiver.sendEncodingParameters;
+ params.rtcp = {
+ cname: SDPUtils.localCName
+ };
+ if (transceiver.recvEncodingParameters.length) {
+ params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
+ }
+ transceiver.rtpSender.send(params);
+ }
+ if (recv && transceiver.rtpReceiver) {
+ // remove RTX field in Edge 14942
+ if (transceiver.kind === 'video'
+ && transceiver.recvEncodingParameters
+ && browserDetails.version < 15019) {
+ transceiver.recvEncodingParameters.forEach(function(p) {
+ delete p.rtx;
+ });
+ }
+ params.encodings = transceiver.recvEncodingParameters;
+ params.rtcp = {
+ cname: transceiver.cname
+ };
+ if (transceiver.sendEncodingParameters.length) {
+ params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
+ }
+ transceiver.rtpReceiver.receive(params);
+ }
+ };
+
+ window.RTCPeerConnection.prototype.setLocalDescription =
+ function(description) {
+ var self = this;
+ var sections;
+ var sessionpart;
+ if (description.type === 'offer') {
+ // FIXME: What was the purpose of this empty if statement?
+ // if (!this._pendingOffer) {
+ // } else {
+ if (this._pendingOffer) {
+ // VERY limited support for SDP munging. Limited to:
+ // * changing the order of codecs
+ sections = SDPUtils.splitSections(description.sdp);
+ sessionpart = sections.shift();
+ sections.forEach(function(mediaSection, sdpMLineIndex) {
+ var caps = SDPUtils.parseRtpParameters(mediaSection);
+ self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
+ });
+ this.transceivers = this._pendingOffer;
+ delete this._pendingOffer;
+ }
+ } else if (description.type === 'answer') {
+ sections = SDPUtils.splitSections(self.remoteDescription.sdp);
+ sessionpart = sections.shift();
+ var isIceLite = SDPUtils.matchPrefix(sessionpart,
+ 'a=ice-lite').length > 0;
+ sections.forEach(function(mediaSection, sdpMLineIndex) {
+ var transceiver = self.transceivers[sdpMLineIndex];
+ var iceGatherer = transceiver.iceGatherer;
+ var iceTransport = transceiver.iceTransport;
+ var dtlsTransport = transceiver.dtlsTransport;
+ var localCapabilities = transceiver.localCapabilities;
+ var remoteCapabilities = transceiver.remoteCapabilities;
+
+ var rejected = mediaSection.split('\n', 1)[0]
+ .split(' ', 2)[1] === '0';
+
+ if (!rejected && !transceiver.isDatachannel) {
+ var remoteIceParameters = SDPUtils.getIceParameters(
+ mediaSection, sessionpart);
+ var remoteDtlsParameters = SDPUtils.getDtlsParameters(
+ mediaSection, sessionpart);
+ if (isIceLite) {
+ remoteDtlsParameters.role = 'server';
+ }
+
+ if (!self.usingBundle || sdpMLineIndex === 0) {
+ iceTransport.start(iceGatherer, remoteIceParameters,
+ isIceLite ? 'controlling' : 'controlled');
+ dtlsTransport.start(remoteDtlsParameters);
+ }
+
+ // Calculate intersection of capabilities.
+ var params = self._getCommonCapabilities(localCapabilities,
+ remoteCapabilities);
+
+ // Start the RTCRtpSender. The RTCRtpReceiver for this
+ // transceiver has already been started in setRemoteDescription.
+ self._transceive(transceiver,
+ params.codecs.length > 0,
+ false);
+ }
+ });
+ }
+
+ this.localDescription = {
+ type: description.type,
+ sdp: description.sdp
+ };
+ switch (description.type) {
+ case 'offer':
+ this._updateSignalingState('have-local-offer');
+ break;
+ case 'answer':
+ this._updateSignalingState('stable');
+ break;
+ default:
+ throw new TypeError('unsupported type "' + description.type +
+ '"');
+ }
+
+ // If a success callback was provided, emit ICE candidates after it
+ // has been executed. Otherwise, emit callback after the Promise is
+ // resolved.
+ var hasCallback = arguments.length > 1 &&
+ typeof arguments[1] === 'function';
+ if (hasCallback) {
+ var cb = arguments[1];
+ window.setTimeout(function() {
+ cb();
+ if (self.iceGatheringState === 'new') {
+ self.iceGatheringState = 'gathering';
+ self._emitGatheringStateChange();
+ }
+ self._emitBufferedCandidates();
+ }, 0);
+ }
+ var p = Promise.resolve();
+ p.then(function() {
+ if (!hasCallback) {
+ if (self.iceGatheringState === 'new') {
+ self.iceGatheringState = 'gathering';
+ self._emitGatheringStateChange();
+ }
+ // Usually candidates will be emitted earlier.
+ window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
+ }
+ });
+ return p;
+ };
+
+ window.RTCPeerConnection.prototype.setRemoteDescription =
+ function(description) {
+ var self = this;
+ var stream = new MediaStream();
+ var receiverList = [];
+ var sections = SDPUtils.splitSections(description.sdp);
+ var sessionpart = sections.shift();
+ var isIceLite = SDPUtils.matchPrefix(sessionpart,
+ 'a=ice-lite').length > 0;
+ this.usingBundle = SDPUtils.matchPrefix(sessionpart,
+ 'a=group:BUNDLE ').length > 0;
+ sections.forEach(function(mediaSection, sdpMLineIndex) {
+ var lines = SDPUtils.splitLines(mediaSection);
+ var mline = lines[0].substr(2).split(' ');
+ var kind = mline[0];
+ var rejected = mline[1] === '0';
+ var direction = SDPUtils.getDirection(mediaSection, sessionpart);
+
+ var mid = SDPUtils.matchPrefix(mediaSection, 'a=mid:');
+ if (mid.length) {
+ mid = mid[0].substr(6);
+ } else {
+ mid = SDPUtils.generateIdentifier();
+ }
+
+ // Reject datachannels which are not implemented yet.
+ if (kind === 'application' && mline[2] === 'DTLS/SCTP') {
+ self.transceivers[sdpMLineIndex] = {
+ mid: mid,
+ isDatachannel: true
+ };
+ return;
+ }
+
+ var transceiver;
+ var iceGatherer;
+ var iceTransport;
+ var dtlsTransport;
+ var rtpSender;
+ var rtpReceiver;
+ var sendEncodingParameters;
+ var recvEncodingParameters;
+ var localCapabilities;
+
+ var track;
+ // FIXME: ensure the mediaSection has rtcp-mux set.
+ var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
+ var remoteIceParameters;
+ var remoteDtlsParameters;
+ if (!rejected) {
+ remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
+ sessionpart);
+ remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
+ sessionpart);
+ remoteDtlsParameters.role = 'client';
+ }
+ recvEncodingParameters =
+ SDPUtils.parseRtpEncodingParameters(mediaSection);
+
+ var cname;
+ // Gets the first SSRC. Note that with RTX there might be multiple
+ // SSRCs.
+ var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
+ .map(function(line) {
+ return SDPUtils.parseSsrcMedia(line);
+ })
+ .filter(function(obj) {
+ return obj.attribute === 'cname';
+ })[0];
+ if (remoteSsrc) {
+ cname = remoteSsrc.value;
+ }
+
+ var isComplete = SDPUtils.matchPrefix(mediaSection,
+ 'a=end-of-candidates', sessionpart).length > 0;
+ var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
+ .map(function(cand) {
+ return SDPUtils.parseCandidate(cand);
+ })
+ .filter(function(cand) {
+ return cand.component === '1';
+ });
+ if (description.type === 'offer' && !rejected) {
+ var transports = self.usingBundle && sdpMLineIndex > 0 ? {
+ iceGatherer: self.transceivers[0].iceGatherer,
+ iceTransport: self.transceivers[0].iceTransport,
+ dtlsTransport: self.transceivers[0].dtlsTransport
+ } : self._createIceAndDtlsTransports(mid, sdpMLineIndex);
+
+ if (isComplete && (!self.usingBundle || sdpMLineIndex === 0)) {
+ transports.iceTransport.setRemoteCandidates(cands);
+ }
+
+ localCapabilities = RTCRtpReceiver.getCapabilities(kind);
+
+ // filter RTX until additional stuff needed for RTX is implemented
+ // in adapter.js
+ if (browserDetails.version < 15019) {
+ localCapabilities.codecs = localCapabilities.codecs.filter(
+ function(codec) {
+ return codec.name !== 'rtx';
+ });
+ }
+
+ sendEncodingParameters = [{
+ ssrc: (2 * sdpMLineIndex + 2) * 1001
+ }];
+
+ if (direction === 'sendrecv' || direction === 'sendonly') {
+ rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport,
+ kind);
+
+ track = rtpReceiver.track;
+ receiverList.push([track, rtpReceiver]);
+ // FIXME: not correct when there are multiple streams but that
+ // is not currently supported in this shim.
+ stream.addTrack(track);
+ }
+
+ // FIXME: look at direction.
+ if (self.localStreams.length > 0 &&
+ self.localStreams[0].getTracks().length >= sdpMLineIndex) {
+ var localTrack;
+ if (kind === 'audio') {
+ localTrack = self.localStreams[0].getAudioTracks()[0];
+ } else if (kind === 'video') {
+ localTrack = self.localStreams[0].getVideoTracks()[0];
+ }
+ if (localTrack) {
+ // add RTX
+ if (browserDetails.version >= 15019 && kind === 'video') {
+ sendEncodingParameters[0].rtx = {
+ ssrc: (2 * sdpMLineIndex + 2) * 1001 + 1
+ };
+ }
+ rtpSender = new RTCRtpSender(localTrack,
+ transports.dtlsTransport);
+ }
+ }
+
+ self.transceivers[sdpMLineIndex] = {
+ iceGatherer: transports.iceGatherer,
+ iceTransport: transports.iceTransport,
+ dtlsTransport: transports.dtlsTransport,
+ localCapabilities: localCapabilities,
+ remoteCapabilities: remoteCapabilities,
+ rtpSender: rtpSender,
+ rtpReceiver: rtpReceiver,
+ kind: kind,
+ mid: mid,
+ cname: cname,
+ sendEncodingParameters: sendEncodingParameters,
+ recvEncodingParameters: recvEncodingParameters
+ };
+ // Start the RTCRtpReceiver now. The RTPSender is started in
+ // setLocalDescription.
+ self._transceive(self.transceivers[sdpMLineIndex],
+ false,
+ direction === 'sendrecv' || direction === 'sendonly');
+ } else if (description.type === 'answer' && !rejected) {
+ transceiver = self.transceivers[sdpMLineIndex];
+ iceGatherer = transceiver.iceGatherer;
+ iceTransport = transceiver.iceTransport;
+ dtlsTransport = transceiver.dtlsTransport;
+ rtpSender = transceiver.rtpSender;
+ rtpReceiver = transceiver.rtpReceiver;
+ sendEncodingParameters = transceiver.sendEncodingParameters;
+ localCapabilities = transceiver.localCapabilities;
+
+ self.transceivers[sdpMLineIndex].recvEncodingParameters =
+ recvEncodingParameters;
+ self.transceivers[sdpMLineIndex].remoteCapabilities =
+ remoteCapabilities;
+ self.transceivers[sdpMLineIndex].cname = cname;
+
+ if ((isIceLite || isComplete) && cands.length) {
+ iceTransport.setRemoteCandidates(cands);
+ }
+ if (!self.usingBundle || sdpMLineIndex === 0) {
+ iceTransport.start(iceGatherer, remoteIceParameters,
+ 'controlling');
+ dtlsTransport.start(remoteDtlsParameters);
+ }
+
+ self._transceive(transceiver,
+ direction === 'sendrecv' || direction === 'recvonly',
+ direction === 'sendrecv' || direction === 'sendonly');
+
+ if (rtpReceiver &&
+ (direction === 'sendrecv' || direction === 'sendonly')) {
+ track = rtpReceiver.track;
+ receiverList.push([track, rtpReceiver]);
+ stream.addTrack(track);
+ } else {
+ // FIXME: actually the receiver should be created later.
+ delete transceiver.rtpReceiver;
+ }
+ }
+ });
+
+ this.remoteDescription = {
+ type: description.type,
+ sdp: description.sdp
+ };
+ switch (description.type) {
+ case 'offer':
+ this._updateSignalingState('have-remote-offer');
+ break;
+ case 'answer':
+ this._updateSignalingState('stable');
+ break;
+ default:
+ throw new TypeError('unsupported type "' + description.type +
+ '"');
+ }
+ if (stream.getTracks().length) {
+ self.remoteStreams.push(stream);
+ window.setTimeout(function() {
+ var event = new Event('addstream');
+ event.stream = stream;
+ self.dispatchEvent(event);
+ if (self.onaddstream !== null) {
+ window.setTimeout(function() {
+ self.onaddstream(event);
+ }, 0);
+ }
+
+ receiverList.forEach(function(item) {
+ var track = item[0];
+ var receiver = item[1];
+ var trackEvent = new Event('track');
+ trackEvent.track = track;
+ trackEvent.receiver = receiver;
+ trackEvent.streams = [stream];
+ self.dispatchEvent(trackEvent);
+ if (self.ontrack !== null) {
+ window.setTimeout(function() {
+ self.ontrack(trackEvent);
+ }, 0);
+ }
+ });
+ }, 0);
+ }
+ if (arguments.length > 1 && typeof arguments[1] === 'function') {
+ window.setTimeout(arguments[1], 0);
+ }
+ return Promise.resolve();
+ };
+
+ window.RTCPeerConnection.prototype.close = function() {
+ this.transceivers.forEach(function(transceiver) {
+ /* not yet
+ if (transceiver.iceGatherer) {
+ transceiver.iceGatherer.close();
+ }
+ */
+ if (transceiver.iceTransport) {
+ transceiver.iceTransport.stop();
+ }
+ if (transceiver.dtlsTransport) {
+ transceiver.dtlsTransport.stop();
+ }
+ if (transceiver.rtpSender) {
+ transceiver.rtpSender.stop();
+ }
+ if (transceiver.rtpReceiver) {
+ transceiver.rtpReceiver.stop();
+ }
+ });
+ // FIXME: clean up tracks, local streams, remote streams, etc
+ this._updateSignalingState('closed');
+ };
+
+ // Update the signaling state.
+ window.RTCPeerConnection.prototype._updateSignalingState =
+ function(newState) {
+ this.signalingState = newState;
+ var event = new Event('signalingstatechange');
+ this.dispatchEvent(event);
+ if (this.onsignalingstatechange !== null) {
+ this.onsignalingstatechange(event);
+ }
+ };
+
+ // Determine whether to fire the negotiationneeded event.
+ window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded =
+ function() {
+ // Fire away (for now).
+ var event = new Event('negotiationneeded');
+ this.dispatchEvent(event);
+ if (this.onnegotiationneeded !== null) {
+ this.onnegotiationneeded(event);
+ }
+ };
+
+ // Update the connection state.
+ window.RTCPeerConnection.prototype._updateConnectionState = function() {
+ var self = this;
+ var newState;
+ var states = {
+ 'new': 0,
+ closed: 0,
+ connecting: 0,
+ checking: 0,
+ connected: 0,
+ completed: 0,
+ failed: 0
+ };
+ this.transceivers.forEach(function(transceiver) {
+ states[transceiver.iceTransport.state]++;
+ states[transceiver.dtlsTransport.state]++;
+ });
+ // ICETransport.completed and connected are the same for this purpose.
+ states.connected += states.completed;
+
+ newState = 'new';
+ if (states.failed > 0) {
+ newState = 'failed';
+ } else if (states.connecting > 0 || states.checking > 0) {
+ newState = 'connecting';
+ } else if (states.disconnected > 0) {
+ newState = 'disconnected';
+ } else if (states.new > 0) {
+ newState = 'new';
+ } else if (states.connected > 0 || states.completed > 0) {
+ newState = 'connected';
+ }
+
+ if (newState !== self.iceConnectionState) {
+ self.iceConnectionState = newState;
+ var event = new Event('iceconnectionstatechange');
+ this.dispatchEvent(event);
+ if (this.oniceconnectionstatechange !== null) {
+ this.oniceconnectionstatechange(event);
+ }
+ }
+ };
+
+ window.RTCPeerConnection.prototype.createOffer = function() {
+ var self = this;
+ if (this._pendingOffer) {
+ throw new Error('createOffer called while there is a pending offer.');
+ }
+ var offerOptions;
+ if (arguments.length === 1 && typeof arguments[0] !== 'function') {
+ offerOptions = arguments[0];
+ } else if (arguments.length === 3) {
+ offerOptions = arguments[2];
+ }
+
+ var tracks = [];
+ var numAudioTracks = 0;
+ var numVideoTracks = 0;
+ // Default to sendrecv.
+ if (this.localStreams.length) {
+ numAudioTracks = this.localStreams[0].getAudioTracks().length;
+ numVideoTracks = this.localStreams[0].getVideoTracks().length;
+ }
+ // Determine number of audio and video tracks we need to send/recv.
+ if (offerOptions) {
+ // Reject Chrome legacy constraints.
+ if (offerOptions.mandatory || offerOptions.optional) {
+ throw new TypeError(
+ 'Legacy mandatory/optional constraints not supported.');
+ }
+ if (offerOptions.offerToReceiveAudio !== undefined) {
+ numAudioTracks = offerOptions.offerToReceiveAudio;
+ }
+ if (offerOptions.offerToReceiveVideo !== undefined) {
+ numVideoTracks = offerOptions.offerToReceiveVideo;
+ }
+ }
+ if (this.localStreams.length) {
+ // Push local streams.
+ this.localStreams[0].getTracks().forEach(function(track) {
+ tracks.push({
+ kind: track.kind,
+ track: track,
+ wantReceive: track.kind === 'audio' ?
+ numAudioTracks > 0 : numVideoTracks > 0
+ });
+ if (track.kind === 'audio') {
+ numAudioTracks--;
+ } else if (track.kind === 'video') {
+ numVideoTracks--;
+ }
+ });
+ }
+ // Create M-lines for recvonly streams.
+ while (numAudioTracks > 0 || numVideoTracks > 0) {
+ if (numAudioTracks > 0) {
+ tracks.push({
+ kind: 'audio',
+ wantReceive: true
+ });
+ numAudioTracks--;
+ }
+ if (numVideoTracks > 0) {
+ tracks.push({
+ kind: 'video',
+ wantReceive: true
+ });
+ numVideoTracks--;
+ }
+ }
+ // reorder tracks
+ tracks = sortTracks(tracks);
+
+ var sdp = SDPUtils.writeSessionBoilerplate();
+ var transceivers = [];
+ tracks.forEach(function(mline, sdpMLineIndex) {
+ // For each track, create an ice gatherer, ice transport,
+ // dtls transport, potentially rtpsender and rtpreceiver.
+ var track = mline.track;
+ var kind = mline.kind;
+ var mid = SDPUtils.generateIdentifier();
+
+ var transports = self.usingBundle && sdpMLineIndex > 0 ? {
+ iceGatherer: transceivers[0].iceGatherer,
+ iceTransport: transceivers[0].iceTransport,
+ dtlsTransport: transceivers[0].dtlsTransport
+ } : self._createIceAndDtlsTransports(mid, sdpMLineIndex);
+
+ var localCapabilities = RTCRtpSender.getCapabilities(kind);
+ // filter RTX until additional stuff needed for RTX is implemented
+ // in adapter.js
+ if (browserDetails.version < 15019) {
+ localCapabilities.codecs = localCapabilities.codecs.filter(
+ function(codec) {
+ return codec.name !== 'rtx';
+ });
+ }
+ localCapabilities.codecs.forEach(function(codec) {
+ // work around https://bugs.chromium.org/p/webrtc/issues/detail?id=6552
+ // by adding level-asymmetry-allowed=1
+ if (codec.name === 'H264' &&
+ codec.parameters['level-asymmetry-allowed'] === undefined) {
+ codec.parameters['level-asymmetry-allowed'] = '1';
+ }
+ });
+
+ var rtpSender;
+ var rtpReceiver;
+
+ // generate an ssrc now, to be used later in rtpSender.send
+ var sendEncodingParameters = [{
+ ssrc: (2 * sdpMLineIndex + 1) * 1001
+ }];
+ if (track) {
+ // add RTX
+ if (browserDetails.version >= 15019 && kind === 'video') {
+ sendEncodingParameters[0].rtx = {
+ ssrc: (2 * sdpMLineIndex + 1) * 1001 + 1
+ };
+ }
+ rtpSender = new RTCRtpSender(track, transports.dtlsTransport);
+ }
+
+ if (mline.wantReceive) {
+ rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
+ }
+
+ transceivers[sdpMLineIndex] = {
+ iceGatherer: transports.iceGatherer,
+ iceTransport: transports.iceTransport,
+ dtlsTransport: transports.dtlsTransport,
+ localCapabilities: localCapabilities,
+ remoteCapabilities: null,
+ rtpSender: rtpSender,
+ rtpReceiver: rtpReceiver,
+ kind: kind,
+ mid: mid,
+ sendEncodingParameters: sendEncodingParameters,
+ recvEncodingParameters: null
+ };
+ });
+ if (this.usingBundle) {
+ sdp += 'a=group:BUNDLE ' + transceivers.map(function(t) {
+ return t.mid;
+ }).join(' ') + '\r\n';
+ }
+ tracks.forEach(function(mline, sdpMLineIndex) {
+ var transceiver = transceivers[sdpMLineIndex];
+ sdp += SDPUtils.writeMediaSection(transceiver,
+ transceiver.localCapabilities, 'offer', self.localStreams[0]);
+ });
+
+ this._pendingOffer = transceivers;
+ var desc = new RTCSessionDescription({
+ type: 'offer',
+ sdp: sdp
+ });
+ if (arguments.length && typeof arguments[0] === 'function') {
+ window.setTimeout(arguments[0], 0, desc);
+ }
+ return Promise.resolve(desc);
+ };
+
+ window.RTCPeerConnection.prototype.createAnswer = function() {
+ var self = this;
+
+ var sdp = SDPUtils.writeSessionBoilerplate();
+ if (this.usingBundle) {
+ sdp += 'a=group:BUNDLE ' + this.transceivers.map(function(t) {
+ return t.mid;
+ }).join(' ') + '\r\n';
+ }
+ this.transceivers.forEach(function(transceiver) {
+ if (transceiver.isDatachannel) {
+ sdp += 'm=application 0 DTLS/SCTP 5000\r\n' +
+ 'c=IN IP4 0.0.0.0\r\n' +
+ 'a=mid:' + transceiver.mid + '\r\n';
+ return;
+ }
+ // Calculate intersection of capabilities.
+ var commonCapabilities = self._getCommonCapabilities(
+ transceiver.localCapabilities,
+ transceiver.remoteCapabilities);
+
+ sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
+ 'answer', self.localStreams[0]);
+ });
+
+ var desc = new RTCSessionDescription({
+ type: 'answer',
+ sdp: sdp
+ });
+ if (arguments.length && typeof arguments[0] === 'function') {
+ window.setTimeout(arguments[0], 0, desc);
+ }
+ return Promise.resolve(desc);
+ };
+
+ window.RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
+ if (!candidate) {
+ for (var j = 0; j < this.transceivers.length; j++) {
+ this.transceivers[j].iceTransport.addRemoteCandidate({});
+ if (this.usingBundle) {
+ return Promise.resolve();
+ }
+ }
+ } else {
+ var mLineIndex = candidate.sdpMLineIndex;
+ if (candidate.sdpMid) {
+ for (var i = 0; i < this.transceivers.length; i++) {
+ if (this.transceivers[i].mid === candidate.sdpMid) {
+ mLineIndex = i;
+ break;
+ }
+ }
+ }
+ var transceiver = this.transceivers[mLineIndex];
+ if (transceiver) {
+ var cand = Object.keys(candidate.candidate).length > 0 ?
+ SDPUtils.parseCandidate(candidate.candidate) : {};
+ // Ignore Chrome's invalid candidates since Edge does not like them.
+ if (cand.protocol === 'tcp' && (cand.port === 0 || cand.port === 9)) {
+ return Promise.resolve();
+ }
+ // Ignore RTCP candidates, we assume RTCP-MUX.
+ if (cand.component !== '1') {
+ return Promise.resolve();
+ }
+ transceiver.iceTransport.addRemoteCandidate(cand);
+
+ // update the remoteDescription.
+ var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
+ sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
+ : 'a=end-of-candidates') + '\r\n';
+ this.remoteDescription.sdp = sections.join('');
+ }
+ }
+ if (arguments.length > 1 && typeof arguments[1] === 'function') {
+ window.setTimeout(arguments[1], 0);
+ }
+ return Promise.resolve();
+ };
+
+ window.RTCPeerConnection.prototype.getStats = function() {
+ var promises = [];
+ this.transceivers.forEach(function(transceiver) {
+ ['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
+ 'dtlsTransport'].forEach(function(method) {
+ if (transceiver[method]) {
+ promises.push(transceiver[method].getStats());
+ }
+ });
+ });
+ var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
+ arguments[1];
+ var fixStatsType = function(stat) {
+ return {
+ inboundrtp: 'inbound-rtp',
+ outboundrtp: 'outbound-rtp',
+ candidatepair: 'candidate-pair',
+ localcandidate: 'local-candidate',
+ remotecandidate: 'remote-candidate'
+ }[stat.type] || stat.type;
+ };
+ return new Promise(function(resolve) {
+ // shim getStats with maplike support
+ var results = new Map();
+ Promise.all(promises).then(function(res) {
+ res.forEach(function(result) {
+ Object.keys(result).forEach(function(id) {
+ result[id].type = fixStatsType(result[id]);
+ results.set(id, result[id]);
+ });
+ });
+ if (cb) {
+ window.setTimeout(cb, 0, results);
+ }
+ resolve(results);
+ });
+ });
+ };
+ }
+};
+
+// Expose public methods.
+module.exports = {
+ shimPeerConnection: edgeShim.shimPeerConnection,
+ shimGetUserMedia: require('./getusermedia')
+};
+
+},{"../utils":10,"./getusermedia":6,"sdp":1}],6:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+// Expose public methods.
+module.exports = function() {
+ var shimError_ = function(e) {
+ return {
+ name: {PermissionDeniedError: 'NotAllowedError'}[e.name] || e.name,
+ message: e.message,
+ constraint: e.constraint,
+ toString: function() {
+ return this.name;
+ }
+ };
+ };
+
+ // getUserMedia error shim.
+ var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+ bind(navigator.mediaDevices);
+ navigator.mediaDevices.getUserMedia = function(c) {
+ return origGetUserMedia(c).catch(function(e) {
+ return Promise.reject(shimError_(e));
+ });
+ };
+};
+
+},{}],7:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var browserDetails = require('../utils').browserDetails;
+
+var firefoxShim = {
+ shimOnTrack: function() {
+ if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
+ window.RTCPeerConnection.prototype)) {
+ Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
+ get: function() {
+ return this._ontrack;
+ },
+ set: function(f) {
+ if (this._ontrack) {
+ this.removeEventListener('track', this._ontrack);
+ this.removeEventListener('addstream', this._ontrackpoly);
+ }
+ this.addEventListener('track', this._ontrack = f);
+ this.addEventListener('addstream', this._ontrackpoly = function(e) {
+ e.stream.getTracks().forEach(function(track) {
+ var event = new Event('track');
+ event.track = track;
+ event.receiver = {track: track};
+ event.streams = [e.stream];
+ this.dispatchEvent(event);
+ }.bind(this));
+ }.bind(this));
+ }
+ });
+ }
+ },
+
+ shimSourceObject: function() {
+ // Firefox has supported mozSrcObject since FF22, unprefixed in 42.
+ if (typeof window === 'object') {
+ if (window.HTMLMediaElement &&
+ !('srcObject' in window.HTMLMediaElement.prototype)) {
+ // Shim the srcObject property, once, when HTMLMediaElement is found.
+ Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
+ get: function() {
+ return this.mozSrcObject;
+ },
+ set: function(stream) {
+ this.mozSrcObject = stream;
+ }
+ });
+ }
+ }
+ },
+
+ shimPeerConnection: function() {
+ if (typeof window !== 'object' || !(window.RTCPeerConnection ||
+ window.mozRTCPeerConnection)) {
+ return; // probably media.peerconnection.enabled=false in about:config
+ }
+ // The RTCPeerConnection object.
+ if (!window.RTCPeerConnection) {
+ window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+ if (browserDetails.version < 38) {
+ // .urls is not supported in FF < 38.
+ // create RTCIceServers with a single url.
+ if (pcConfig && pcConfig.iceServers) {
+ var newIceServers = [];
+ for (var i = 0; i < pcConfig.iceServers.length; i++) {
+ var server = pcConfig.iceServers[i];
+ if (server.hasOwnProperty('urls')) {
+ for (var j = 0; j < server.urls.length; j++) {
+ var newServer = {
+ url: server.urls[j]
+ };
+ if (server.urls[j].indexOf('turn') === 0) {
+ newServer.username = server.username;
+ newServer.credential = server.credential;
+ }
+ newIceServers.push(newServer);
+ }
+ } else {
+ newIceServers.push(pcConfig.iceServers[i]);
+ }
+ }
+ pcConfig.iceServers = newIceServers;
+ }
+ }
+ return new mozRTCPeerConnection(pcConfig, pcConstraints);
+ };
+ window.RTCPeerConnection.prototype = mozRTCPeerConnection.prototype;
+
+ // wrap static methods. Currently just generateCertificate.
+ if (mozRTCPeerConnection.generateCertificate) {
+ Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+ get: function() {
+ return mozRTCPeerConnection.generateCertificate;
+ }
+ });
+ }
+
+ window.RTCSessionDescription = mozRTCSessionDescription;
+ window.RTCIceCandidate = mozRTCIceCandidate;
+ }
+
+ // shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
+ ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+ .forEach(function(method) {
+ var nativeMethod = RTCPeerConnection.prototype[method];
+ RTCPeerConnection.prototype[method] = function() {
+ arguments[0] = new ((method === 'addIceCandidate') ?
+ RTCIceCandidate : RTCSessionDescription)(arguments[0]);
+ return nativeMethod.apply(this, arguments);
+ };
+ });
+
+ // support for addIceCandidate(null or undefined)
+ var nativeAddIceCandidate =
+ RTCPeerConnection.prototype.addIceCandidate;
+ RTCPeerConnection.prototype.addIceCandidate = function() {
+ if (!arguments[0]) {
+ if (arguments[1]) {
+ arguments[1].apply(null);
+ }
+ return Promise.resolve();
+ }
+ return nativeAddIceCandidate.apply(this, arguments);
+ };
+
+ // shim getStats with maplike support
+ var makeMapStats = function(stats) {
+ var map = new Map();
+ Object.keys(stats).forEach(function(key) {
+ map.set(key, stats[key]);
+ map[key] = stats[key];
+ });
+ return map;
+ };
+
+ var modernStatsTypes = {
+ inboundrtp: 'inbound-rtp',
+ outboundrtp: 'outbound-rtp',
+ candidatepair: 'candidate-pair',
+ localcandidate: 'local-candidate',
+ remotecandidate: 'remote-candidate'
+ };
+
+ var nativeGetStats = RTCPeerConnection.prototype.getStats;
+ RTCPeerConnection.prototype.getStats = function(selector, onSucc, onErr) {
+ return nativeGetStats.apply(this, [selector || null])
+ .then(function(stats) {
+ if (browserDetails.version < 48) {
+ stats = makeMapStats(stats);
+ }
+ if (browserDetails.version < 53 && !onSucc) {
+ // Shim only promise getStats with spec-hyphens in type names
+ // Leave callback version alone; misc old uses of forEach before Map
+ try {
+ stats.forEach(function(stat) {
+ stat.type = modernStatsTypes[stat.type] || stat.type;
+ });
+ } catch (e) {
+ if (e.name !== 'TypeError') {
+ throw e;
+ }
+ // Avoid TypeError: "type" is read-only, in old versions. 34-43ish
+ stats.forEach(function(stat, i) {
+ stats.set(i, Object.assign({}, stat, {
+ type: modernStatsTypes[stat.type] || stat.type
+ }));
+ });
+ }
+ }
+ return stats;
+ })
+ .then(onSucc, onErr);
+ };
+ }
+};
+
+// Expose public methods.
+module.exports = {
+ shimOnTrack: firefoxShim.shimOnTrack,
+ shimSourceObject: firefoxShim.shimSourceObject,
+ shimPeerConnection: firefoxShim.shimPeerConnection,
+ shimGetUserMedia: require('./getusermedia')
+};
+
+},{"../utils":10,"./getusermedia":8}],8:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var logging = require('../utils').log;
+var browserDetails = require('../utils').browserDetails;
+
+// Expose public methods.
+module.exports = function() {
+ var shimError_ = function(e) {
+ return {
+ name: {
+ SecurityError: 'NotAllowedError',
+ PermissionDeniedError: 'NotAllowedError'
+ }[e.name] || e.name,
+ message: {
+ 'The operation is insecure.': 'The request is not allowed by the ' +
+ 'user agent or the platform in the current context.'
+ }[e.message] || e.message,
+ constraint: e.constraint,
+ toString: function() {
+ return this.name + (this.message && ': ') + this.message;
+ }
+ };
+ };
+
+ // getUserMedia constraints shim.
+ var getUserMedia_ = function(constraints, onSuccess, onError) {
+ var constraintsToFF37_ = function(c) {
+ if (typeof c !== 'object' || c.require) {
+ return c;
+ }
+ var require = [];
+ Object.keys(c).forEach(function(key) {
+ if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
+ return;
+ }
+ var r = c[key] = (typeof c[key] === 'object') ?
+ c[key] : {ideal: c[key]};
+ if (r.min !== undefined ||
+ r.max !== undefined || r.exact !== undefined) {
+ require.push(key);
+ }
+ if (r.exact !== undefined) {
+ if (typeof r.exact === 'number') {
+ r. min = r.max = r.exact;
+ } else {
+ c[key] = r.exact;
+ }
+ delete r.exact;
+ }
+ if (r.ideal !== undefined) {
+ c.advanced = c.advanced || [];
+ var oc = {};
+ if (typeof r.ideal === 'number') {
+ oc[key] = {min: r.ideal, max: r.ideal};
+ } else {
+ oc[key] = r.ideal;
+ }
+ c.advanced.push(oc);
+ delete r.ideal;
+ if (!Object.keys(r).length) {
+ delete c[key];
+ }
+ }
+ });
+ if (require.length) {
+ c.require = require;
+ }
+ return c;
+ };
+ constraints = JSON.parse(JSON.stringify(constraints));
+ if (browserDetails.version < 38) {
+ logging('spec: ' + JSON.stringify(constraints));
+ if (constraints.audio) {
+ constraints.audio = constraintsToFF37_(constraints.audio);
+ }
+ if (constraints.video) {
+ constraints.video = constraintsToFF37_(constraints.video);
+ }
+ logging('ff37: ' + JSON.stringify(constraints));
+ }
+ return navigator.mozGetUserMedia(constraints, onSuccess, function(e) {
+ onError(shimError_(e));
+ });
+ };
+
+ // Returns the result of getUserMedia as a Promise.
+ var getUserMediaPromise_ = function(constraints) {
+ return new Promise(function(resolve, reject) {
+ getUserMedia_(constraints, resolve, reject);
+ });
+ };
+
+ // Shim for mediaDevices on older versions.
+ if (!navigator.mediaDevices) {
+ navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
+ addEventListener: function() { },
+ removeEventListener: function() { }
+ };
+ }
+ navigator.mediaDevices.enumerateDevices =
+ navigator.mediaDevices.enumerateDevices || function() {
+ return new Promise(function(resolve) {
+ var infos = [
+ {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
+ {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
+ ];
+ resolve(infos);
+ });
+ };
+
+ if (browserDetails.version < 41) {
+ // Work around http://bugzil.la/1169665
+ var orgEnumerateDevices =
+ navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
+ navigator.mediaDevices.enumerateDevices = function() {
+ return orgEnumerateDevices().then(undefined, function(e) {
+ if (e.name === 'NotFoundError') {
+ return [];
+ }
+ throw e;
+ });
+ };
+ }
+ if (browserDetails.version < 49) {
+ var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+ bind(navigator.mediaDevices);
+ navigator.mediaDevices.getUserMedia = function(c) {
+ return origGetUserMedia(c).then(function(stream) {
+ // Work around https://bugzil.la/802326
+ if (c.audio && !stream.getAudioTracks().length ||
+ c.video && !stream.getVideoTracks().length) {
+ stream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ throw new DOMException('The object can not be found here.',
+ 'NotFoundError');
+ }
+ return stream;
+ }, function(e) {
+ return Promise.reject(shimError_(e));
+ });
+ };
+ }
+ navigator.getUserMedia = function(constraints, onSuccess, onError) {
+ if (browserDetails.version < 44) {
+ return getUserMedia_(constraints, onSuccess, onError);
+ }
+ // Replace Firefox 44+'s deprecation warning with unprefixed version.
+ console.warn('navigator.getUserMedia has been replaced by ' +
+ 'navigator.mediaDevices.getUserMedia');
+ navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
+ };
+};
+
+},{"../utils":10}],9:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+'use strict';
+var safariShim = {
+ // TODO: DrAlex, should be here, double check against LayoutTests
+ // shimOnTrack: function() { },
+
+ // TODO: once the back-end for the mac port is done, add.
+ // TODO: check for webkitGTK+
+ // shimPeerConnection: function() { },
+
+ shimGetUserMedia: function() {
+ if (!navigator.getUserMedia) {
+ if (navigator.webkitGetUserMedia) {
+ navigator.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
+ } else if (navigator.mediaDevices &&
+ navigator.mediaDevices.getUserMedia) {
+ navigator.getUserMedia = function(constraints, cb, errcb) {
+ navigator.mediaDevices.getUserMedia(constraints)
+ .then(cb, errcb);
+ }.bind(navigator);
+ }
+ }
+ }
+};
+
+// Expose public methods.
+module.exports = {
+ shimGetUserMedia: safariShim.shimGetUserMedia
+ // TODO
+ // shimOnTrack: safariShim.shimOnTrack,
+ // shimPeerConnection: safariShim.shimPeerConnection
+};
+
+},{}],10:[function(require,module,exports){
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var logDisabled_ = true;
+
+// Utility methods.
+var utils = {
+ disableLog: function(bool) {
+ if (typeof bool !== 'boolean') {
+ return new Error('Argument type: ' + typeof bool +
+ '. Please use a boolean.');
+ }
+ logDisabled_ = bool;
+ return (bool) ? 'adapter.js logging disabled' :
+ 'adapter.js logging enabled';
+ },
+
+ log: function() {
+ if (typeof window === 'object') {
+ if (logDisabled_) {
+ return;
+ }
+ if (typeof console !== 'undefined' && typeof console.log === 'function') {
+ console.log.apply(console, arguments);
+ }
+ }
+ },
+
+ /**
+ * Extract browser version out of the provided user agent string.
+ *
+ * @param {!string} uastring userAgent string.
+ * @param {!string} expr Regular expression used as match criteria.
+ * @param {!number} pos position in the version string to be returned.
+ * @return {!number} browser version.
+ */
+ extractVersion: function(uastring, expr, pos) {
+ var match = uastring.match(expr);
+ return match && match.length >= pos && parseInt(match[pos], 10);
+ },
+
+ /**
+ * Browser detector.
+ *
+ * @return {object} result containing browser and version
+ * properties.
+ */
+ detectBrowser: function() {
+ // Returned result object.
+ var result = {};
+ result.browser = null;
+ result.version = null;
+
+ // Fail early if it's not a browser
+ if (typeof window === 'undefined' || !window.navigator) {
+ result.browser = 'Not a browser.';
+ return result;
+ }
+
+ // Firefox.
+ if (navigator.mozGetUserMedia) {
+ result.browser = 'firefox';
+ result.version = this.extractVersion(navigator.userAgent,
+ /Firefox\/(\d+)\./, 1);
+ } else if (navigator.webkitGetUserMedia) {
+ // Chrome, Chromium, Webview, Opera, all use the chrome shim for now
+ if (window.webkitRTCPeerConnection) {
+ result.browser = 'chrome';
+ result.version = this.extractVersion(navigator.userAgent,
+ /Chrom(e|ium)\/(\d+)\./, 2);
+ } else { // Safari (in an unpublished version) or unknown webkit-based.
+ if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
+ result.browser = 'safari';
+ result.version = this.extractVersion(navigator.userAgent,
+ /AppleWebKit\/(\d+)\./, 1);
+ } else { // unknown webkit-based browser.
+ result.browser = 'Unsupported webkit-based browser ' +
+ 'with GUM support but no WebRTC support.';
+ return result;
+ }
+ }
+ } else if (navigator.mediaDevices &&
+ navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) { // Edge.
+ result.browser = 'edge';
+ result.version = this.extractVersion(navigator.userAgent,
+ /Edge\/(\d+).(\d+)$/, 2);
+ } else if (navigator.mediaDevices &&
+ navigator.userAgent.match(/AppleWebKit\/(\d+)\./)) {
+ // Safari, with webkitGetUserMedia removed.
+ result.browser = 'safari';
+ result.version = this.extractVersion(navigator.userAgent,
+ /AppleWebKit\/(\d+)\./, 1);
+ } else { // Default fallthrough: not supported.
+ result.browser = 'Not a supported browser.';
+ return result;
+ }
+
+ return result;
+ },
+
+ // shimCreateObjectURL must be called before shimSourceObject to avoid loop.
+
+ shimCreateObjectURL: function() {
+ if (!(typeof window === 'object' && window.HTMLMediaElement &&
+ 'srcObject' in window.HTMLMediaElement.prototype)) {
+ // Only shim CreateObjectURL using srcObject if srcObject exists.
+ return undefined;
+ }
+
+ var nativeCreateObjectURL = URL.createObjectURL.bind(URL);
+ var nativeRevokeObjectURL = URL.revokeObjectURL.bind(URL);
+ var streams = new Map(), newId = 0;
+
+ URL.createObjectURL = function(stream) {
+ if ('getTracks' in stream) {
+ var url = 'polyblob:' + (++newId);
+ streams.set(url, stream);
+ console.log('URL.createObjectURL(stream) is deprecated! ' +
+ 'Use elem.srcObject = stream instead!');
+ return url;
+ }
+ return nativeCreateObjectURL(stream);
+ };
+ URL.revokeObjectURL = function(url) {
+ nativeRevokeObjectURL(url);
+ streams.delete(url);
+ };
+
+ var dsc = Object.getOwnPropertyDescriptor(window.HTMLMediaElement.prototype,
+ 'src');
+ Object.defineProperty(window.HTMLMediaElement.prototype, 'src', {
+ get: function() {
+ return dsc.get.apply(this);
+ },
+ set: function(url) {
+ this.srcObject = streams.get(url) || null;
+ return dsc.set.apply(this, [url]);
+ }
+ });
+
+ var nativeSetAttribute = HTMLMediaElement.prototype.setAttribute;
+ HTMLMediaElement.prototype.setAttribute = function() {
+ if (arguments.length === 2 &&
+ ('' + arguments[0]).toLowerCase() === 'src') {
+ this.srcObject = streams.get(arguments[1]) || null;
+ }
+ return nativeSetAttribute.apply(this, arguments);
+ };
+ }
+};
+
+// Export.
+module.exports = {
+ log: utils.log,
+ disableLog: utils.disableLog,
+ browserDetails: utils.detectBrowser(),
+ extractVersion: utils.extractVersion,
+ shimCreateObjectURL: utils.shimCreateObjectURL,
+ detectBrowser: utils.detectBrowser.bind(utils)
+};
+
+},{}]},{},[2])(2)
+});
\ No newline at end of file
diff --git a/tools/perf/page_sets/webrtc_cases/audio.html b/tools/perf/page_sets/webrtc_cases/audio.html
new file mode 100644
index 0000000..035888d
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/audio.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html>
+<head>
+
+
+ <base target="_blank">
+
+ <title>Peer connection: audio only</title>
+
+
+</head>
+
+<body>
+
+ <div id="container">
+
+ <h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a> <span>Peer connection: audio only</span></h1>
+
+ <div id="audio">
+ <div>
+ <div class="label">Local audio:</div><audio id="audio1" autoplay controls muted></audio>
+ </div>
+ <div>
+ <div class="label">Remote audio:</div><audio id="audio2" autoplay controls></audio>
+ </div>
+ </div>
+
+ <div id="buttons">
+ <select id="codec">
+ <!-- Codec values are matched with how they appear in the SDP.
+ For instance, opus matches opus/48000/2 in Chrome, and ISAC/16000
+ matches 16K iSAC (but not 32K iSAC). -->
+ <option value="opus">Opus</option>
+ <option value="ISAC">iSAC 16K</option>
+ <option value="G722">G722</option>
+ <option value="PCMU">PCMU</option>
+ </select>
+ <button id="callButton">Call</button>
+ <button id="hangupButton">Hang Up</button>
+ </div>
+ <div class="graph-container" id="bitrateGraph">
+ <div>Bitrate</div>
+ <canvas id="bitrateCanvas"></canvas>
+ </div>
+ <div class="graph-container" id="packetGraph">
+ <div>Packets sent per second</div>
+ <canvas id="packetCanvas"></canvas>
+ </div>
+
+ <a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/peerconnection/audio" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+ </div>
+
+
+<script src="audio.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/audio.js b/tools/perf/page_sets/webrtc_cases/audio.js
new file mode 100644
index 0000000..2485837
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/audio.js
@@ -0,0 +1,302 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+'use strict';
+
+var audio2 = document.querySelector('audio#audio2');
+var callButton = document.querySelector('button#callButton');
+var hangupButton = document.querySelector('button#hangupButton');
+var codecSelector = document.querySelector('select#codec');
+hangupButton.disabled = true;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var pc1;
+var pc2;
+var localStream;
+
+var bitrateGraph;
+var bitrateSeries;
+
+var packetGraph;
+var packetSeries;
+
+var lastResult;
+
+var offerOptions = {
+ offerToReceiveAudio: 1,
+ offerToReceiveVideo: 0,
+ voiceActivityDetection: false
+};
+
+function gotStream(stream) {
+ hangupButton.disabled = false;
+ trace('Received local stream');
+ localStream = stream;
+ var audioTracks = localStream.getAudioTracks();
+ if (audioTracks.length > 0) {
+ trace('Using Audio device: ' + audioTracks[0].label);
+ }
+ pc1.addStream(localStream);
+ trace('Adding Local Stream to peer connection');
+
+ pc1.createOffer(
+ offerOptions
+ ).then(
+ gotDescription1,
+ onCreateSessionDescriptionError
+ );
+
+ bitrateSeries = new TimelineDataSeries();
+ bitrateGraph = new TimelineGraphView('bitrateGraph', 'bitrateCanvas');
+ bitrateGraph.updateEndDate();
+
+ packetSeries = new TimelineDataSeries();
+ packetGraph = new TimelineGraphView('packetGraph', 'packetCanvas');
+ packetGraph.updateEndDate();
+}
+
+function onCreateSessionDescriptionError(error) {
+ trace('Failed to create session description: ' + error.toString());
+}
+
+function call() {
+ callButton.disabled = true;
+ codecSelector.disabled = true;
+ trace('Starting call');
+ var servers = null;
+ var pcConstraints = {
+ 'optional': []
+ };
+ pc1 = new RTCPeerConnection(servers, pcConstraints);
+ trace('Created local peer connection object pc1');
+ pc1.onicecandidate = function(e) {
+ onIceCandidate(pc1, e);
+ };
+ pc2 = new RTCPeerConnection(servers, pcConstraints);
+ trace('Created remote peer connection object pc2');
+ pc2.onicecandidate = function(e) {
+ onIceCandidate(pc2, e);
+ };
+ pc2.onaddstream = gotRemoteStream;
+ trace('Requesting local stream');
+ navigator.mediaDevices.getUserMedia({
+ audio: true,
+ video: false
+ })
+ .then(gotStream)
+ .catch(function(e) {
+ alert('getUserMedia() error: ' + e.name);
+ });
+}
+
+function gotDescription1(desc) {
+ trace('Offer from pc1 \n' + desc.sdp);
+ pc1.setLocalDescription(desc).then(
+ function() {
+ desc.sdp = forceChosenAudioCodec(desc.sdp);
+ pc2.setRemoteDescription(desc).then(
+ function() {
+ pc2.createAnswer().then(
+ gotDescription2,
+ onCreateSessionDescriptionError
+ );
+ },
+ onSetSessionDescriptionError
+ );
+ },
+ onSetSessionDescriptionError
+ );
+}
+
+function gotDescription2(desc) {
+ trace('Answer from pc2 \n' + desc.sdp);
+ pc2.setLocalDescription(desc).then(
+ function() {
+ desc.sdp = forceChosenAudioCodec(desc.sdp);
+ pc1.setRemoteDescription(desc).then(
+ function() {
+ },
+ onSetSessionDescriptionError
+ );
+ },
+ onSetSessionDescriptionError
+ );
+}
+
+function hangup() {
+ trace('Ending call');
+ localStream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ pc1.close();
+ pc2.close();
+ pc1 = null;
+ pc2 = null;
+ hangupButton.disabled = true;
+ callButton.disabled = false;
+ codecSelector.disabled = false;
+}
+
+function gotRemoteStream(e) {
+ audio2.srcObject = e.stream;
+ trace('Received remote stream');
+}
+
+function getOtherPc(pc) {
+ return (pc === pc1) ? pc2 : pc1;
+}
+
+function getName(pc) {
+ return (pc === pc1) ? 'pc1' : 'pc2';
+}
+
+function onIceCandidate(pc, event) {
+ getOtherPc(pc).addIceCandidate(event.candidate)
+ .then(
+ function() {
+ onAddIceCandidateSuccess(pc);
+ },
+ function(err) {
+ onAddIceCandidateError(pc, err);
+ }
+ );
+ trace(getName(pc) + ' ICE candidate: \n' + (event.candidate ?
+ event.candidate.candidate : '(null)'));
+}
+
+function onAddIceCandidateSuccess() {
+ trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+ trace('Failed to add ICE Candidate: ' + error.toString());
+}
+
+function onSetSessionDescriptionError(error) {
+ trace('Failed to set session description: ' + error.toString());
+}
+
+function forceChosenAudioCodec(sdp) {
+ return maybePreferCodec(sdp, 'audio', 'send', codecSelector.value);
+}
+
+// Copied from AppRTC's sdputils.js:
+
+// Sets |codec| as the default |type| codec if it's present.
+// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
+function maybePreferCodec(sdp, type, dir, codec) {
+ var str = type + ' ' + dir + ' codec';
+ if (codec === '') {
+ trace('No preference on ' + str + '.');
+ return sdp;
+ }
+
+ trace('Prefer ' + str + ': ' + codec);
+
+ var sdpLines = sdp.split('\r\n');
+
+ // Search for m line.
+ var mLineIndex = findLine(sdpLines, 'm=', type);
+ if (mLineIndex === null) {
+ return sdp;
+ }
+
+ // If the codec is available, set it as the default in m line.
+ var codecIndex = findLine(sdpLines, 'a=rtpmap', codec);
+ console.log('codecIndex', codecIndex);
+ if (codecIndex) {
+ var payload = getCodecPayloadType(sdpLines[codecIndex]);
+ if (payload) {
+ sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload);
+ }
+ }
+
+ sdp = sdpLines.join('\r\n');
+ return sdp;
+}
+
+// Find the line in sdpLines that starts with |prefix|, and, if specified,
+// contains |substr| (case-insensitive search).
+function findLine(sdpLines, prefix, substr) {
+ return findLineInRange(sdpLines, 0, -1, prefix, substr);
+}
+
+// Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix|
+// and, if specified, contains |substr| (case-insensitive search).
+function findLineInRange(sdpLines, startLine, endLine, prefix, substr) {
+ var realEndLine = endLine !== -1 ? endLine : sdpLines.length;
+ for (var i = startLine; i < realEndLine; ++i) {
+ if (sdpLines[i].indexOf(prefix) === 0) {
+ if (!substr ||
+ sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) {
+ return i;
+ }
+ }
+ }
+ return null;
+}
+
+// Gets the codec payload type from an a=rtpmap:X line.
+function getCodecPayloadType(sdpLine) {
+ var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
+ var result = sdpLine.match(pattern);
+ return (result && result.length === 2) ? result[1] : null;
+}
+
+// Returns a new m= line with the specified codec as the first one.
+function setDefaultCodec(mLine, payload) {
+ var elements = mLine.split(' ');
+
+ // Just copy the first three parameters; codec order starts on fourth.
+ var newLine = elements.slice(0, 3);
+
+ // Put target payload first and copy in the rest.
+ newLine.push(payload);
+ for (var i = 3; i < elements.length; i++) {
+ if (elements[i] !== payload) {
+ newLine.push(elements[i]);
+ }
+ }
+ return newLine.join(' ');
+}
+
+// query getStats every second
+window.setInterval(function() {
+ if (!window.pc1) {
+ return;
+ }
+ window.pc1.getStats(null).then(function(res) {
+ res.forEach(function(report) {
+ var bytes;
+ var packets;
+ var now = report.timestamp;
+ if ((report.type === 'outboundrtp') ||
+ (report.type === 'outbound-rtp') ||
+ (report.type === 'ssrc' && report.bytesSent)) {
+ bytes = report.bytesSent;
+ packets = report.packetsSent;
+ if (lastResult && lastResult.get(report.id)) {
+ // calculate bitrate
+ var bitrate = 8 * (bytes - lastResult.get(report.id).bytesSent) /
+ (now - lastResult.get(report.id).timestamp);
+
+ // append to chart
+ bitrateSeries.addPoint(now, bitrate);
+ bitrateGraph.setDataSeries([bitrateSeries]);
+ bitrateGraph.updateEndDate();
+
+ // calculate number of packets and append to chart
+ packetSeries.addPoint(now, packets -
+ lastResult.get(report.id).packetsSent);
+ packetGraph.setDataSeries([packetSeries]);
+ packetGraph.updateEndDate();
+ }
+ }
+ });
+ lastResult = res;
+ });
+}, 1000);
diff --git a/tools/perf/page_sets/webrtc_cases/canvas-capture.html b/tools/perf/page_sets/webrtc_cases/canvas-capture.html
new file mode 100644
index 0000000..340c6e16
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/canvas-capture.html
@@ -0,0 +1,26 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html><head>
+ <title>Canvas capture stream to peerConnection</title>
+</head>
+<body>
+ <div id="container">
+ <h1>Canvas capture stream to peerConnection</h1>
+
+ <canvas id="canvas" width=32 height=24></canvas>
+ <video id="remoteVideo" width=32 height=24 autoplay=""></video>
+
+ <div>
+ <button id="startButton" class="green">Start test</button>
+ </div>
+ </div>
+
+
+<script src="canvas-capture.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/canvas-capture.js b/tools/perf/page_sets/webrtc_cases/canvas-capture.js
new file mode 100644
index 0000000..ce7e11d3
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/canvas-capture.js
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+'use strict';
+
+const DEFAULT_FRAME_RATE = 30;
+
+var canvas = document.getElementById('canvas');
+var context = canvas.getContext('2d');
+
+var remoteVideo = document.getElementById('remoteVideo');
+var startButton = document.getElementById('startButton');
+startButton.onclick = start;
+
+var pc1;
+var pc2;
+var stream;
+
+function logError(err) {
+ console.error(err);
+}
+
+// This function draws a red rectangle on the canvas using
+// requestAnimationFrame().
+function draw() {
+ window.requestAnimationFrame(draw);
+ context.rect(0, 0, canvas.clientWidth, canvas.clientHeight);
+ var randomNumber = Math.random();
+ var hue;
+ if (randomNumber < 0.33)
+ hue = 'red';
+ else if (randomNumber < 0.66)
+ hue = 'green';
+ else
+ hue = 'blue';
+ context.fillStyle = hue;
+ context.fill();
+}
+
+function start() {
+ startButton.onclick = hangup;
+ startButton.className = 'red';
+ startButton.innerHTML = 'Stop test';
+ draw();
+ stream = canvas.captureStream(DEFAULT_FRAME_RATE);
+ call();
+}
+
+function call() {
+ var servers = null;
+ pc1 = new RTCPeerConnection(servers);
+ pc1.onicecandidate = (event) => {
+ if (event.candidate) {
+ pc2.addIceCandidate(event.candidate);
+ }
+ };
+
+ pc2 = new RTCPeerConnection(servers);
+ pc2.onicecandidate = (event) => {
+ if (event.candidate) {
+ pc1.addIceCandidate(event.candidate);
+ }
+ };
+ pc2.onaddstream = (event) => {
+ remoteVideo.srcObject = event.stream;
+ };
+
+ pc1.addStream(stream);
+ pc1.createOffer({
+ offerToReceiveAudio: 1,
+ offerToReceiveVideo: 1
+ }).then(onCreateOfferSuccess, logError);
+}
+
+function onCreateOfferSuccess(desc) {
+ pc1.setLocalDescription(desc);
+ pc2.setRemoteDescription(desc);
+ pc2.createAnswer().then(onCreateAnswerSuccess, logError);
+}
+
+function onCreateAnswerSuccess(desc) {
+ pc2.setLocalDescription(desc);
+ pc1.setRemoteDescription(desc);
+}
+
+function hangup() {
+ pc1.close();
+ pc2.close();
+ pc1 = null;
+ pc2 = null;
+ startButton.onclick = start;
+ startButton.className = 'green';
+ startButton.innerHTML = 'Start test';
+}
diff --git a/tools/perf/page_sets/webrtc_cases/common.js b/tools/perf/page_sets/webrtc_cases/common.js
new file mode 100644
index 0000000..047a000
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/common.js
@@ -0,0 +1,12 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+ /* exported trace */
+
+// Logging utility function.
+function trace(arg) {
+ var now = (window.performance.now() / 1000).toFixed(3);
+ console.log(now + ': ', arg);
+}
diff --git a/tools/perf/page_sets/webrtc_cases/constraints.html b/tools/perf/page_sets/webrtc_cases/constraints.html
new file mode 100644
index 0000000..e094ae6da
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/constraints.html
@@ -0,0 +1,99 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html>
+<head>
+
+
+ <base target="_blank">
+
+ <title>Constraints and statistics</title>
+
+
+</head>
+
+<body>
+
+ <div id="container">
+
+ <h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a> <span>Constraints & statistics</span></h1>
+
+ <section id="blurb">
+ <p>This demo shows ways to use constraints and statistics in WebRTC applications.</p>
+ <p>Set camera constraints, and click <strong>Get media</strong> to (re)open the camera with these included. Click <strong>Connect</strong> to create a (local) peer connection. The RTCPeerConnection objects <code>localPeerConnection</code> and <code>remotePeerConnection</code> can be inspected from the console.</p>
+ <p>Setting a value to zero will remove that constraint. </p>
+ <p>The lefthand video shows the output of <code>getUserMedia()</code>; on the right is the video after being passed through the peer connection. The transmission bitrate is displayed below the righthand video.</p>
+ </section>
+
+ <div>
+ <button id="getMedia">Get media</button>
+ <button id="connect" disabled>Connect</button>
+ <button id="hangup" disabled>Hang Up</button>
+ </div>
+
+
+ <section id="constraints">
+ <div id="getUserMedia">
+ <div class="input">
+ <h2>Camera constraints</h2>
+ <div id="minWidth">
+ <label>Min width <span>300</span>px:</label>
+ <input type="range" min="0" max="1920" value="300">
+ </div>
+ <div id="maxWidth">
+ <label>Max width <span>640</span>px:</label>
+ <input type="range" min="0" max="1920" value="640">
+ </div>
+ <div id="minHeight">
+ <label>Min height <span>200</span>px:</label>
+ <input type="range" min="0" max="1080" value="200">
+ </div>
+ <div id="maxHeight">
+ <label>Max height <span>480</span>px:</label>
+ <input type="range" min="0" max="1080" value="480">
+ </div>
+ <div id="minFramerate">
+ <label>Min frameRate <span>0</span>fps:</label>
+ <input type="range" min="0" max="60" value="0">
+ </div>
+ <div id="maxFramerate">
+ <label>Max frameRate <span>0</span>fps:</label>
+ <input type="range" min="0" max="60" value="0">
+ </div>
+ </div>
+ <div id="getUserMediaConstraints" class="output"></div>
+ </div>
+
+ </section>
+
+ <section id="video">
+ <div id="localVideo">
+ <video autoplay muted></video>
+ <div></div>
+ </div>
+ <div id="remoteVideo">
+ <video autoplay muted></video>
+ <div></div>
+ <div id="bitrate"></div>
+ <div id="peer"></div>
+ </div>
+ </section>
+
+ <section id="statistics">
+ <div id="senderStats"></div>
+ <div id="receiverStats"></div>
+ </section>
+
+ <a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/peerconnection/constraints" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+
+ </div>
+
+
+
+<script src="constraints.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/constraints.js b/tools/perf/page_sets/webrtc_cases/constraints.js
new file mode 100644
index 0000000..16cf173
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/constraints.js
@@ -0,0 +1,307 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+'use strict';
+
+var getMediaButton = document.querySelector('button#getMedia');
+var connectButton = document.querySelector('button#connect');
+var hangupButton = document.querySelector('button#hangup');
+
+getMediaButton.onclick = getMedia;
+connectButton.onclick = createPeerConnection;
+hangupButton.onclick = hangup;
+
+var minWidthInput = document.querySelector('div#minWidth input');
+var maxWidthInput = document.querySelector('div#maxWidth input');
+var minHeightInput = document.querySelector('div#minHeight input');
+var maxHeightInput = document.querySelector('div#maxHeight input');
+var minFramerateInput = document.querySelector('div#minFramerate input');
+var maxFramerateInput = document.querySelector('div#maxFramerate input');
+
+minWidthInput.onchange = maxWidthInput.onchange =
+ minHeightInput.onchange = maxHeightInput.onchange =
+ minFramerateInput.onchange = maxFramerateInput.onchange = displayRangeValue;
+
+var getUserMediaConstraintsDiv =
+ document.querySelector('div#getUserMediaConstraints');
+var bitrateDiv = document.querySelector('div#bitrate');
+var peerDiv = document.querySelector('div#peer');
+var senderStatsDiv = document.querySelector('div#senderStats');
+var receiverStatsDiv = document.querySelector('div#receiverStats');
+
+var localVideo = document.querySelector('div#localVideo video');
+var remoteVideo = document.querySelector('div#remoteVideo video');
+var localVideoStatsDiv = document.querySelector('div#localVideo div');
+var remoteVideoStatsDiv = document.querySelector('div#remoteVideo div');
+
+var localPeerConnection;
+var remotePeerConnection;
+var localStream;
+var bytesPrev;
+var timestampPrev;
+
+main();
+
+function main() {
+ displayGetUserMediaConstraints();
+}
+
+function hangup() {
+ trace('Ending call');
+ localPeerConnection.close();
+ remotePeerConnection.close();
+ localPeerConnection = null;
+ remotePeerConnection = null;
+
+ localStream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ localStream = null;
+
+ hangupButton.disabled = true;
+ getMediaButton.disabled = false;
+}
+
+function getMedia() {
+ getMediaButton.disabled = true;
+ if (localStream) {
+ localStream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ var videoTracks = localStream.getVideoTracks();
+ for (var i = 0; i !== videoTracks.length; ++i) {
+ videoTracks[i].stop();
+ }
+ }
+ navigator.mediaDevices.getUserMedia(getUserMediaConstraints())
+ .then(gotStream)
+ .catch(function(e) {
+ var message = 'getUserMedia error: ' + e.name + '\n' +
+ 'PermissionDeniedError may mean invalid constraints.';
+ alert(message);
+ console.log(message);
+ getMediaButton.disabled = false;
+ });
+}
+
+function gotStream(stream) {
+ connectButton.disabled = false;
+ console.log('GetUserMedia succeeded');
+ localStream = stream;
+ localVideo.srcObject = stream;
+}
+
+function getUserMediaConstraints() {
+ var constraints = {};
+ constraints.audio = true;
+ constraints.video = {};
+ if (minWidthInput.value !== '0') {
+ constraints.video.width = {};
+ constraints.video.width.min = minWidthInput.value;
+ }
+ if (maxWidthInput.value !== '0') {
+ constraints.video.width = constraints.video.width || {};
+ constraints.video.width.max = maxWidthInput.value;
+ }
+ if (minHeightInput.value !== '0') {
+ constraints.video.height = {};
+ constraints.video.height.min = minHeightInput.value;
+ }
+ if (maxHeightInput.value !== '0') {
+ constraints.video.height = constraints.video.height || {};
+ constraints.video.height.max = maxHeightInput.value;
+ }
+ if (minFramerateInput.value !== '0') {
+ constraints.video.frameRate = {};
+ constraints.video.frameRate.min = minFramerateInput.value;
+ }
+ if (maxFramerateInput.value !== '0') {
+ constraints.video.frameRate = constraints.video.frameRate || {};
+ constraints.video.frameRate.max = maxFramerateInput.value;
+ }
+
+ return constraints;
+}
+
+function displayGetUserMediaConstraints() {
+ var constraints = getUserMediaConstraints();
+ console.log('getUserMedia constraints', constraints);
+ getUserMediaConstraintsDiv.textContent =
+ JSON.stringify(constraints, null, ' ');
+}
+
+function createPeerConnection() {
+ connectButton.disabled = true;
+ hangupButton.disabled = false;
+
+ bytesPrev = 0;
+ timestampPrev = 0;
+ localPeerConnection = new RTCPeerConnection(null);
+ remotePeerConnection = new RTCPeerConnection(null);
+ localPeerConnection.addStream(localStream);
+ console.log('localPeerConnection creating offer');
+ localPeerConnection.onnegotiationeeded = function() {
+ console.log('Negotiation needed - localPeerConnection');
+ };
+ remotePeerConnection.onnegotiationeeded = function() {
+ console.log('Negotiation needed - remotePeerConnection');
+ };
+ localPeerConnection.onicecandidate = function(e) {
+ console.log('Candidate localPeerConnection');
+ if (e.candidate) {
+ remotePeerConnection.addIceCandidate(e.candidate)
+ .then(
+ onAddIceCandidateSuccess,
+ onAddIceCandidateError
+ );
+ }
+ };
+ remotePeerConnection.onicecandidate = function(e) {
+ console.log('Candidate remotePeerConnection');
+ if (e.candidate) {
+ localPeerConnection.addIceCandidate(e.candidate)
+ .then(
+ onAddIceCandidateSuccess,
+ onAddIceCandidateError
+ );
+ }
+ };
+ remotePeerConnection.onaddstream = function(e) {
+ console.log('remotePeerConnection got stream');
+ remoteVideo.srcObject = e.stream;
+ };
+ localPeerConnection.createOffer().then(
+ function(desc) {
+ console.log('localPeerConnection offering');
+ localPeerConnection.setLocalDescription(desc);
+ remotePeerConnection.setRemoteDescription(desc);
+ remotePeerConnection.createAnswer().then(
+ function(desc2) {
+ console.log('remotePeerConnection answering');
+ remotePeerConnection.setLocalDescription(desc2);
+ localPeerConnection.setRemoteDescription(desc2);
+ },
+ function(err) {
+ console.log(err);
+ }
+ );
+ },
+ function(err) {
+ console.log(err);
+ }
+ );
+}
+
+function onAddIceCandidateSuccess() {
+ trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+ trace('Failed to add Ice Candidate: ' + error.toString());
+}
+
+// Display statistics
+setInterval(function() {
+ if (remotePeerConnection && remotePeerConnection.getRemoteStreams()[0]) {
+ remotePeerConnection.getStats(null)
+ .then(function(results) {
+ var statsString = dumpStats(results);
+ receiverStatsDiv.innerHTML = '<h2>Receiver stats</h2>' + statsString;
+ // calculate video bitrate
+ results.forEach(function(report) {
+ var now = report.timestamp;
+
+ var bitrate;
+ if (report.type === 'inboundrtp' && report.mediaType === 'video') {
+ // firefox calculates the bitrate for us
+ // https://bugzilla.mozilla.org/show_bug.cgi?id=951496
+ bitrate = Math.floor(report.bitrateMean / 1024);
+ } else if (report.type === 'ssrc' && report.bytesReceived &&
+ report.googFrameHeightReceived) {
+ // chrome does not so we need to do it ourselves
+ var bytes = report.bytesReceived;
+ if (timestampPrev) {
+ bitrate = 8 * (bytes - bytesPrev) / (now - timestampPrev);
+ bitrate = Math.floor(bitrate);
+ }
+ bytesPrev = bytes;
+ timestampPrev = now;
+ }
+ if (bitrate) {
+ bitrate += ' kbits/sec';
+ bitrateDiv.innerHTML = '<strong>Bitrate:</strong> ' + bitrate;
+ }
+ });
+
+ // figure out the peer's ip
+ var activeCandidatePair = null;
+ var remoteCandidate = null;
+
+ // search for the candidate pair
+ results.forEach(function(report) {
+ if (report.type === 'candidatepair' && report.selected ||
+ report.type === 'googCandidatePair' &&
+ report.googActiveConnection === 'true') {
+ activeCandidatePair = report;
+ }
+ });
+ if (activeCandidatePair && activeCandidatePair.remoteCandidateId) {
+ remoteCandidate = results[activeCandidatePair.remoteCandidateId];
+ }
+ if (remoteCandidate && remoteCandidate.ipAddress &&
+ remoteCandidate.portNumber) {
+ peerDiv.innerHTML = '<strong>Connected to:</strong> ' +
+ remoteCandidate.ipAddress +
+ ':' + remoteCandidate.portNumber;
+ }
+ }, function(err) {
+ console.log(err);
+ });
+ localPeerConnection.getStats(null)
+ .then(function(results) {
+ var statsString = dumpStats(results);
+ senderStatsDiv.innerHTML = '<h2>Sender stats</h2>' + statsString;
+ }, function(err) {
+ console.log(err);
+ });
+ } else {
+ console.log('Not connected yet');
+ }
+ // Collect some stats from the video tags.
+ if (localVideo.videoWidth) {
+ localVideoStatsDiv.innerHTML = '<strong>Video dimensions:</strong> ' +
+ localVideo.videoWidth + 'x' + localVideo.videoHeight + 'px';
+ }
+ if (remoteVideo.videoWidth) {
+ remoteVideoStatsDiv.innerHTML = '<strong>Video dimensions:</strong> ' +
+ remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight + 'px';
+ }
+}, 1000);
+
+// Dumping a stats variable as a string.
+// might be named toString?
+function dumpStats(results) {
+ var statsString = '';
+ results.forEach(function(res) {
+ statsString += '<h3>Report type=';
+ statsString += res.type;
+ statsString += '</h3>\n';
+ statsString += 'id ' + res.id + '<br>\n';
+ statsString += 'time ' + res.timestamp + '<br>\n';
+ Object.keys(res).forEach(function(k) {
+ if (k !== 'timestamp' && k !== 'type' && k !== 'id') {
+ statsString += k + ': ' + res[k] + '<br>\n';
+ }
+ });
+ });
+ return statsString;
+}
+
+// Utility to show the value of a range in a sibling span element
+function displayRangeValue(e) {
+ var span = e.target.parentElement.querySelector('span');
+ span.textContent = e.target.value;
+ displayGetUserMediaConstraints();
+}
diff --git a/tools/perf/page_sets/webrtc_cases/datatransfer.html b/tools/perf/page_sets/webrtc_cases/datatransfer.html
new file mode 100644
index 0000000..2c6315e
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/datatransfer.html
@@ -0,0 +1,73 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html>
+<head>
+
+
+ <base target="_blank">
+
+ <title>Generate and transfer data</title>
+
+
+</head>
+
+<body>
+
+ <div id="container">
+
+ <h1><a href="https://webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a> <span>Generate and transfer data</span></h1>
+ <section>
+
+ <p>This page generates and sends the specified amount of data via WebRTC datachannels.</p>
+
+ <p>To accomplish this in an interoperable way, the data is split into chunks which are then transferred via the datachannel. The datachannel is reliable and ordered by default which is well-suited to filetransfers.</p>
+
+ <p>Send and receive progress is monitored using HTML5 <i>progress</i> elements.</p>
+
+ </section>
+
+ <section>
+ <div id="button">
+ <button id="sendTheData" type="button">Generate and send data</button>
+ </div>
+ <div class="input">
+ <input type="number" id="megsToSend" min="1" name="megs" value="128"/>
+ <label for="megsToSend">MB</label>
+ <div id="errorMsg"></div>
+ </div>
+ <div class="input">
+ <input type="checkbox" id="ordered" checked>
+ <label for="ordered">Ordered mode</label>
+ </div>
+ <div class="progress">
+ <div class="label">Send progress: </div>
+ <progress id="sendProgress" max="0" value="0"></progress>
+ </div>
+
+ <div class="progress">
+ <div class="label">Receive progress: </div>
+ <progress id="receiveProgress" max="0" value="0"></progress>
+ </div>
+ </section>
+
+ <section>
+ <p>View the console to see logging.</p>
+
+ <p>The <code>RTCPeerConnection</code> objects <code>localConnection</code> and <code>remoteConnection</code> are in global scope, so you can inspect them in the console as well.</p>
+
+ <p>For more information about RTCDataChannel, see <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/#toc-rtcdatachannel" title="RTCDataChannel section of HTML5 Rocks article about WebRTC">Getting Started With WebRTC</a>.</p>
+ </section>
+
+ <a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/datachannel/datatransfer" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+ </div>
+
+
+
+<script src="datatransfer.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/datatransfer.js b/tools/perf/page_sets/webrtc_cases/datatransfer.js
new file mode 100644
index 0000000..7c50507
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/datatransfer.js
@@ -0,0 +1,229 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+'use strict';
+
+var localConnection;
+var remoteConnection;
+var sendChannel;
+var receiveChannel;
+var pcConstraint;
+var megsToSend = document.querySelector('input#megsToSend');
+var sendButton = document.querySelector('button#sendTheData');
+var orderedCheckbox = document.querySelector('input#ordered');
+var sendProgress = document.querySelector('progress#sendProgress');
+var receiveProgress = document.querySelector('progress#receiveProgress');
+var errorMessage = document.querySelector('div#errorMsg');
+
+var receivedSize = 0;
+var bytesToSend = 0;
+
+sendButton.onclick = createConnection;
+
+// Prevent data sent to be set to 0.
+megsToSend.addEventListener('change', function(e) {
+ if (this.value <= 0) {
+ sendButton.disabled = true;
+ errorMessage.innerHTML = '<p>Please enter a number greater than zero.</p>';
+ } else {
+ errorMessage.innerHTML = '';
+ sendButton.disabled = false;
+ }
+});
+
+function createConnection() {
+ sendButton.disabled = true;
+ megsToSend.disabled = true;
+ var servers = null;
+ pcConstraint = null;
+
+ bytesToSend = Math.round(megsToSend.value) * 1024 * 1024;
+
+ // Add localConnection to global scope to make it visible
+ // from the browser console.
+ window.localConnection = localConnection = new RTCPeerConnection(servers,
+ pcConstraint);
+ trace('Created local peer connection object localConnection');
+
+ var dataChannelParams = {ordered: false};
+ if (orderedCheckbox.checked) {
+ dataChannelParams.ordered = true;
+ }
+
+ sendChannel = localConnection.createDataChannel(
+ 'sendDataChannel', dataChannelParams);
+ sendChannel.binaryType = 'arraybuffer';
+ trace('Created send data channel');
+
+ sendChannel.onopen = onSendChannelStateChange;
+ sendChannel.onclose = onSendChannelStateChange;
+ localConnection.onicecandidate = function(e) {
+ onIceCandidate(localConnection, e);
+ };
+
+ localConnection.createOffer().then(
+ gotDescription1,
+ onCreateSessionDescriptionError
+ );
+
+ // Add remoteConnection to global scope to make it visible
+ // from the browser console.
+ window.remoteConnection = remoteConnection = new RTCPeerConnection(servers,
+ pcConstraint);
+ trace('Created remote peer connection object remoteConnection');
+
+ remoteConnection.onicecandidate = function(e) {
+ onIceCandidate(remoteConnection, e);
+ };
+ remoteConnection.ondatachannel = receiveChannelCallback;
+}
+
+function onCreateSessionDescriptionError(error) {
+ trace('Failed to create session description: ' + error.toString());
+}
+
+function randomAsciiString(length) {
+ var result = '';
+ for (var i = 0; i < length; i++) {
+ // Visible ASCII chars are between 33 and 126.
+ result += String.fromCharCode(33 + Math.random() * 93);
+ }
+ return result;
+}
+
+function sendGeneratedData() {
+ sendProgress.max = bytesToSend;
+ receiveProgress.max = sendProgress.max;
+ sendProgress.value = 0;
+ receiveProgress.value = 0;
+
+ var chunkSize = 16384;
+ var stringToSendRepeatedly = randomAsciiString(chunkSize);
+ var bufferFullThreshold = 5 * chunkSize;
+ var usePolling = true;
+ if (typeof sendChannel.bufferedAmountLowThreshold === 'number') {
+ trace('Using the bufferedamountlow event for flow control');
+ usePolling = false;
+
+ // Reduce the buffer fullness threshold, since we now have more efficient
+ // buffer management.
+ bufferFullThreshold = chunkSize / 2;
+
+ // This is "overcontrol": our high and low thresholds are the same.
+ sendChannel.bufferedAmountLowThreshold = bufferFullThreshold;
+ }
+ // Listen for one bufferedamountlow event.
+ var listener = function() {
+ sendChannel.removeEventListener('bufferedamountlow', listener);
+ sendAllData();
+ };
+ var sendAllData = function() {
+ // Try to queue up a bunch of data and back off when the channel starts to
+ // fill up. We don't setTimeout after each send since this lowers our
+ // throughput quite a bit (setTimeout(fn, 0) can take hundreds of milli-
+ // seconds to execute).
+ while (sendProgress.value < sendProgress.max) {
+ if (sendChannel.bufferedAmount > bufferFullThreshold) {
+ if (usePolling) {
+ setTimeout(sendAllData, 250);
+ } else {
+ sendChannel.addEventListener('bufferedamountlow', listener);
+ }
+ return;
+ }
+ sendProgress.value += chunkSize;
+ sendChannel.send(stringToSendRepeatedly);
+ }
+ };
+ setTimeout(sendAllData, 0);
+}
+
+function closeDataChannels() {
+ trace('Closing data channels');
+ sendChannel.close();
+ trace('Closed data channel with label: ' + sendChannel.label);
+ receiveChannel.close();
+ trace('Closed data channel with label: ' + receiveChannel.label);
+ localConnection.close();
+ remoteConnection.close();
+ localConnection = null;
+ remoteConnection = null;
+ trace('Closed peer connections');
+}
+
+function gotDescription1(desc) {
+ localConnection.setLocalDescription(desc);
+ trace('Offer from localConnection \n' + desc.sdp);
+ remoteConnection.setRemoteDescription(desc);
+ remoteConnection.createAnswer().then(
+ gotDescription2,
+ onCreateSessionDescriptionError
+ );
+}
+
+function gotDescription2(desc) {
+ remoteConnection.setLocalDescription(desc);
+ trace('Answer from remoteConnection \n' + desc.sdp);
+ localConnection.setRemoteDescription(desc);
+}
+
+function getOtherPc(pc) {
+ return (pc === localConnection) ? remoteConnection : localConnection;
+}
+
+function getName(pc) {
+ return (pc === localConnection) ? 'localPeerConnection' :
+ 'remotePeerConnection';
+}
+
+function onIceCandidate(pc, event) {
+ getOtherPc(pc).addIceCandidate(event.candidate)
+ .then(
+ function() {
+ onAddIceCandidateSuccess(pc);
+ },
+ function(err) {
+ onAddIceCandidateError(pc, err);
+ }
+ );
+ trace(getName(pc) + ' ICE candidate: \n' + (event.candidate ?
+ event.candidate.candidate : '(null)'));
+}
+
+function onAddIceCandidateSuccess() {
+ trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+ trace('Failed to add Ice Candidate: ' + error.toString());
+}
+
+function receiveChannelCallback(event) {
+ trace('Receive Channel Callback');
+ receiveChannel = event.channel;
+ receiveChannel.binaryType = 'arraybuffer';
+ receiveChannel.onmessage = onReceiveMessageCallback;
+
+ receivedSize = 0;
+}
+
+function onReceiveMessageCallback(event) {
+ receivedSize += event.data.length;
+ receiveProgress.value = receivedSize;
+
+ if (receivedSize === bytesToSend) {
+ closeDataChannels();
+ sendButton.disabled = false;
+ megsToSend.disabled = false;
+ }
+}
+
+function onSendChannelStateChange() {
+ var readyState = sendChannel.readyState;
+ trace('Send channel state is: ' + readyState);
+ if (readyState === 'open') {
+ sendGeneratedData();
+ }
+}
diff --git a/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.html b/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.html
new file mode 100644
index 0000000..dff5a91
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.html
@@ -0,0 +1,45 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html>
+<head>
+
+ <title>Multiple peerconnections</title>
+
+
+</head>
+
+<body>
+
+<div id="wrapper">
+ <div id="container">
+ <div class="bottom-border">
+ <h2>PeerConnection</h2>
+ <label>
+ Number of peer connections
+ <input id="num-peerconnections" value="10">
+ </label><br>
+ <label>
+ Enable googCpuOveruseDetection
+ <input type="checkbox" id="cpuoveruse-detection" checked>
+ </label><br>
+ <button class="green" id="start-test">
+ Start Test
+ </button><br>
+ </div>
+ <div class="video-area">
+ <br>
+ <h2>Remote Streams</h2>
+ <table border="0" id="test-table"></table>
+ </div>
+ </div>
+</div>
+
+
+<script src="multiple-peerconnections.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.js b/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.js
new file mode 100644
index 0000000..4401d10
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/multiple-peerconnections.js
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+/*jshint esversion: 6 */
+
+'use strict';
+
+var $ = document.getElementById.bind(document);
+
+var testTable = $('test-table');
+var nPeerConnectionsInput = $('num-peerconnections');
+var startTestButton = $('start-test');
+var cpuOveruseDetectionCheckbox = $('cpuoveruse-detection');
+
+startTestButton.onclick = startTest;
+
+function logError(err) {
+ console.err(err);
+}
+
+function addNewVideoElement() {
+ var newRow = testTable.insertRow(-1);
+ var newCell = newRow.insertCell(-1);
+ var video = document.createElement('video');
+ video.autoplay = true;
+ newCell.appendChild(video);
+ return video;
+}
+
+function PeerConnection(id, cpuOveruseDetection) {
+ this.id = id;
+ this.cpuOveruseDetection = cpuOveruseDetection;
+
+ this.localConnection = null;
+ this.remoteConnection = null;
+
+ this.remoteView = addNewVideoElement();
+
+ this.start = function() {
+ var onGetUserMediaSuccess = this.onGetUserMediaSuccess.bind(this);
+ navigator.mediaDevices.getUserMedia({
+ audio: true,
+ video: true
+ })
+ .then(onGetUserMediaSuccess)
+ .catch(logError);
+ };
+
+ this.onGetUserMediaSuccess = function(stream) {
+ // Create local peer connection.
+ this.localConnection = new RTCPeerConnection(null, {
+ 'optional': [{
+ 'googCpuOveruseDetection': this.cpuOveruseDetection
+ }]
+ });
+ this.localConnection.onicecandidate = (event) => {
+ this.onIceCandidate(this.remoteConnection, event);
+ };
+ this.localConnection.addStream(stream);
+
+ // Create remote peer connection.
+ this.remoteConnection = new RTCPeerConnection(null, {
+ 'optional': [{
+ 'googCpuOveruseDetection': this.cpuOveruseDetection
+ }]
+ });
+ this.remoteConnection.onicecandidate = (event) => {
+ this.onIceCandidate(this.localConnection, event);
+ };
+ this.remoteConnection.onaddstream = (e) => {
+ this.remoteView.srcObject = e.stream;
+ };
+
+ // Initiate call.
+ var onCreateOfferSuccess = this.onCreateOfferSuccess.bind(this);
+ this.localConnection.createOffer({
+ offerToReceiveAudio: 1,
+ offerToReceiveVideo: 1
+ })
+ .then(onCreateOfferSuccess, logError);
+ };
+
+ this.onCreateOfferSuccess = function(desc) {
+ this.localConnection.setLocalDescription(desc);
+ this.remoteConnection.setRemoteDescription(desc);
+
+ var onCreateAnswerSuccess = this.onCreateAnswerSuccess.bind(this);
+ this.remoteConnection.createAnswer()
+ .then(onCreateAnswerSuccess, logError);
+ };
+
+ this.onCreateAnswerSuccess = function(desc) {
+ this.remoteConnection.setLocalDescription(desc);
+ this.localConnection.setRemoteDescription(desc);
+ };
+
+ this.onIceCandidate = function(connection, event) {
+ if (event.candidate) {
+ connection.addIceCandidate(new RTCIceCandidate(event.candidate));
+ }
+ };
+}
+
+function startTest() {
+ var cpuOveruseDetection = cpuOveruseDetectionCheckbox.checked;
+ var nPeerConnections = nPeerConnectionsInput.value;
+ for (var i = 0; i < nPeerConnections; ++i) {
+ new PeerConnection(i, cpuOveruseDetection).start();
+ }
+}
diff --git a/tools/perf/page_sets/webrtc_cases/resolution.html b/tools/perf/page_sets/webrtc_cases/resolution.html
new file mode 100644
index 0000000..e92e0799
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/resolution.html
@@ -0,0 +1,80 @@
+<!DOCTYPE html>
+<!--
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+-->
+<html>
+<head>
+
+
+ <base target="_blank">
+
+ <title>getUserMedia: select resolution</title>
+
+
+ <style>
+ body, html {
+ height: 100%;
+ }
+
+ button {
+ margin: 0 10px 20px 0;
+ width: 90px;
+ }
+
+ div#buttons {
+ margin: 0 0 1em 0;
+ }
+
+ div#container {
+ max-width: 100%;
+ }
+
+ p#dimensions {
+ height: 1em;
+ margin: 0 0 1.5em 0;
+ }
+
+ video {
+ background: none;
+ height: auto;
+ width: auto;
+ }
+ </style>
+
+</head>
+
+<body>
+
+ <div id="container">
+
+ <h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a> <span>getUserMedia: select resolution</span></h1>
+ <p></p>
+
+ <p>This example uses <a href="https://w3c.github.io/mediacapture-main/getusermedia.html#media-track-constraints" title="W3C getusermedia specification - constraints section">constraints</a>.</p>
+
+ <p>Click a button to call <code>getUserMedia()</code> with appropriate resolution.</p>
+
+ <div id="buttons">
+ <button id="qvga">QVGA</button>
+ <button id="vga">VGA</button>
+ <button id="hd">HD</button>
+ <button id="full-hd">Full HD</button>
+ </div>
+
+ <p id="dimensions"></p>
+
+ <video id="gum-res-local" autoplay></video>
+
+ <p>For more information, see <a href="http://www.html5rocks.com/en/tutorials/getusermedia/intro/" title="Media capture article by Eric Bidelman on HTML5 Rocks">Capturing Audio & Video in HTML5</a> on HTML5 Rocks.</p>
+
+ <a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/getusermedia/resolution" title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>
+ </div>
+
+
+
+<script src="resolution.js"></script>
+<script src="adapter.js"></script>
+<script src="common.js"></script>
+</body></html>
diff --git a/tools/perf/page_sets/webrtc_cases/resolution.js b/tools/perf/page_sets/webrtc_cases/resolution.js
new file mode 100644
index 0000000..4b807b0
--- /dev/null
+++ b/tools/perf/page_sets/webrtc_cases/resolution.js
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2017 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+'use strict';
+
+var dimensions = document.querySelector('#dimensions');
+var video = document.querySelector('video');
+var stream;
+
+var vgaButton = document.querySelector('#vga');
+var qvgaButton = document.querySelector('#qvga');
+var hdButton = document.querySelector('#hd');
+var fullHdButton = document.querySelector('#full-hd');
+
+vgaButton.onclick = function() {
+ getMedia(vgaConstraints);
+};
+
+qvgaButton.onclick = function() {
+ getMedia(qvgaConstraints);
+};
+
+hdButton.onclick = function() {
+ getMedia(hdConstraints);
+};
+
+fullHdButton.onclick = function() {
+ getMedia(fullHdConstraints);
+};
+
+var qvgaConstraints = {
+ video: {width: {exact: 320}, height: {exact: 240}}
+};
+
+var vgaConstraints = {
+ video: {width: {exact: 640}, height: {exact: 480}}
+};
+
+var hdConstraints = {
+ video: {width: {exact: 1280}, height: {exact: 720}}
+};
+
+var fullHdConstraints = {
+ video: {width: {exact: 1920}, height: {exact: 1080}}
+};
+
+function gotStream(mediaStream) {
+ window.stream = mediaStream; // stream available to console
+ video.srcObject = mediaStream;
+}
+
+function displayVideoDimensions() {
+ if (!video.videoWidth) {
+ setTimeout(displayVideoDimensions, 500);
+ }
+ dimensions.innerHTML = 'Actual video dimensions: ' + video.videoWidth +
+ 'x' + video.videoHeight + 'px.';
+}
+
+video.onloadedmetadata = displayVideoDimensions;
+
+function getMedia(constraints) {
+ if (stream) {
+ stream.getTracks().forEach(function(track) {
+ track.stop();
+ });
+ }
+
+ navigator.mediaDevices.getUserMedia(constraints)
+ .then(gotStream)
+ .catch(function(e) {
+ var message = 'getUserMedia error: ' + e.name;
+ alert(message);
+ console.log(message);
+ });
+}