diff --git a/samples/js/OWNERS b/samples/js/OWNERS
new file mode 100644
index 0000000000..79b28349ac
--- /dev/null
+++ b/samples/js/OWNERS
@@ -0,0 +1,9 @@
+braveyao@webrtc.org
+dutton@google.com
+henrika@webrtc.org
+hta@webrtc.org
+juberti@webrtc.org
+kjellander@webrtc.org
+phoglund@webrtc.org
+vikasmarwaha@webrtc.org
+wu@webrtc.org
diff --git a/samples/js/apprtc/app.yaml b/samples/js/apprtc/app.yaml
new file mode 100644
index 0000000000..6ef5e75052
--- /dev/null
+++ b/samples/js/apprtc/app.yaml
@@ -0,0 +1,29 @@
+application: apprtc
+version: 6
+runtime: python27
+threadsafe: true
+api_version: 1
+
+handlers:
+- url: /html
+ static_dir: html
+
+- url: /images
+ static_dir: images
+
+- url: /js
+ static_dir: js
+
+- url: /css
+ static_dir: css
+
+- url: /.*
+ script: apprtc.app
+ secure: always
+
+inbound_services:
+- channel_presence
+
+libraries:
+- name: jinja2
+ version: latest
diff --git a/samples/js/apprtc/apprtc.py b/samples/js/apprtc/apprtc.py
new file mode 100644
index 0000000000..3652c8efd3
--- /dev/null
+++ b/samples/js/apprtc/apprtc.py
@@ -0,0 +1,482 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""WebRTC Demo
+
+This module demonstrates the WebRTC API by implementing a simple video chat app.
+"""
+
+import cgi
+import logging
+import os
+import random
+import re
+import json
+import jinja2
+import webapp2
+import threading
+from google.appengine.api import channel
+from google.appengine.ext import db
+
+jinja_environment = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
+
+# Lock for syncing DB operation in concurrent requests handling.
+# TODO(brave): keeping working on improving performance with thread syncing.
+# One possible method for near future is to reduce the message caching.
+LOCK = threading.RLock()
+
+def generate_random(length):
+ word = ''
+ for _ in range(length):
+ word += random.choice('0123456789')
+ return word
+
+def sanitize(key):
+ return re.sub('[^a-zA-Z0-9\-]', '-', key)
+
+def make_client_id(room, user):
+ return room.key().id_or_name() + '/' + user
+
+def get_default_stun_server(user_agent):
+ default_stun_server = 'stun.l.google.com:19302'
+ if 'Firefox' in user_agent:
+ default_stun_server = 'stun.services.mozilla.com'
+ return default_stun_server
+
+def get_preferred_audio_receive_codec():
+ return 'opus/48000'
+
+def get_preferred_audio_send_codec(user_agent):
+ # Empty string means no preference.
+ preferred_audio_send_codec = ''
+ # Prefer to send ISAC on Chrome for Android.
+ if 'Android' in user_agent and 'Chrome' in user_agent:
+ preferred_audio_send_codec = 'ISAC/16000'
+ return preferred_audio_send_codec
+
+def make_pc_config(stun_server, turn_server, ts_pwd):
+ servers = []
+ if turn_server:
+ turn_config = 'turn:{}'.format(turn_server)
+ servers.append({'urls':turn_config, 'credential':ts_pwd})
+ if stun_server:
+ stun_config = 'stun:{}'.format(stun_server)
+ servers.append({'urls':stun_config})
+ return {'iceServers':servers}
+
+def create_channel(room, user, duration_minutes):
+ client_id = make_client_id(room, user)
+ return channel.create_channel(client_id, duration_minutes)
+
+def make_loopback_answer(message):
+ message = message.replace("\"offer\"", "\"answer\"")
+ message = message.replace("a=ice-options:google-ice\\r\\n", "")
+ return message
+
+def handle_message(room, user, message):
+ message_obj = json.loads(message)
+ other_user = room.get_other_user(user)
+ room_key = room.key().id_or_name()
+ if message_obj['type'] == 'bye':
+ # This would remove the other_user in loopback test too.
+ # So check its availability before forwarding Bye message.
+ room.remove_user(user)
+ logging.info('User ' + user + ' quit from room ' + room_key)
+ logging.info('Room ' + room_key + ' has state ' + str(room))
+ if other_user and room.has_user(other_user):
+ if message_obj['type'] == 'offer':
+ # Special case the loopback scenario.
+ if other_user == user:
+ message = make_loopback_answer(message)
+ on_message(room, other_user, message)
+ else:
+ # For unittest
+ on_message(room, user, message)
+
+def get_saved_messages(client_id):
+ return Message.gql("WHERE client_id = :id", id=client_id)
+
+def delete_saved_messages(client_id):
+ messages = get_saved_messages(client_id)
+ for message in messages:
+ message.delete()
+ logging.info('Deleted the saved message for ' + client_id)
+
+def send_saved_messages(client_id):
+ messages = get_saved_messages(client_id)
+ for message in messages:
+ channel.send_message(client_id, message.msg)
+ logging.info('Delivered saved message to ' + client_id)
+ message.delete()
+
+def on_message(room, user, message):
+ client_id = make_client_id(room, user)
+ if room.is_connected(user):
+ channel.send_message(client_id, message)
+ logging.info('Delivered message to user ' + user)
+ else:
+ new_message = Message(client_id = client_id, msg = message)
+ new_message.put()
+ logging.info('Saved message for user ' + user)
+
+def make_media_track_constraints(constraints_string):
+ if not constraints_string or constraints_string.lower() == 'true':
+ track_constraints = True
+ elif constraints_string.lower() == 'false':
+ track_constraints = False
+ else:
+ track_constraints = {'mandatory': {}, 'optional': []}
+ for constraint_string in constraints_string.split(','):
+ constraint = constraint_string.split('=')
+ if len(constraint) != 2:
+ logging.error('Ignoring malformed constraint: ' + constraint_string)
+ continue
+ if constraint[0].startswith('goog'):
+ track_constraints['optional'].append({constraint[0]: constraint[1]})
+ else:
+ track_constraints['mandatory'][constraint[0]] = constraint[1]
+
+ return track_constraints
+
+def make_media_stream_constraints(audio, video):
+ stream_constraints = (
+ {'audio': make_media_track_constraints(audio),
+ 'video': make_media_track_constraints(video)})
+ logging.info('Applying media constraints: ' + str(stream_constraints))
+ return stream_constraints
+
+def maybe_add_constraint(constraints, param, constraint):
+ if (param.lower() == 'true'):
+ constraints['optional'].append({constraint: True})
+ elif (param.lower() == 'false'):
+ constraints['optional'].append({constraint: False})
+
+ return constraints
+
+def make_pc_constraints(dtls, dscp, ipv6):
+ constraints = { 'optional': [] }
+ maybe_add_constraint(constraints, dtls, 'DtlsSrtpKeyAgreement')
+ maybe_add_constraint(constraints, dscp, 'googDscp')
+ maybe_add_constraint(constraints, ipv6, 'googIPv6')
+
+ return constraints
+
+def make_offer_constraints():
+ constraints = { 'mandatory': {}, 'optional': [] }
+ return constraints
+
+def append_url_arguments(request, link):
+ for argument in request.arguments():
+ if argument != 'r':
+ link += ('&' + cgi.escape(argument, True) + '=' +
+ cgi.escape(request.get(argument), True))
+ return link
+
+# This database is to store the messages from the sender client when the
+# receiver client is not ready to receive the messages.
+# Use TextProperty instead of StringProperty for msg because
+# the session description can be more than 500 characters.
+class Message(db.Model):
+ client_id = db.StringProperty()
+ msg = db.TextProperty()
+
+class Room(db.Model):
+ """All the data we store for a room"""
+ user1 = db.StringProperty()
+ user2 = db.StringProperty()
+ user1_connected = db.BooleanProperty(default=False)
+ user2_connected = db.BooleanProperty(default=False)
+
+ def __str__(self):
+ result = '['
+ if self.user1:
+ result += "%s-%r" % (self.user1, self.user1_connected)
+ if self.user2:
+ result += ", %s-%r" % (self.user2, self.user2_connected)
+ result += ']'
+ return result
+
+ def get_occupancy(self):
+ occupancy = 0
+ if self.user1:
+ occupancy += 1
+ if self.user2:
+ occupancy += 1
+ return occupancy
+
+ def get_other_user(self, user):
+ if user == self.user1:
+ return self.user2
+ elif user == self.user2:
+ return self.user1
+ else:
+ return None
+
+ def has_user(self, user):
+ return (user and (user == self.user1 or user == self.user2))
+
+ def add_user(self, user):
+ if not self.user1:
+ self.user1 = user
+ elif not self.user2:
+ self.user2 = user
+ else:
+ raise RuntimeError('room is full')
+ self.put()
+
+ def remove_user(self, user):
+ delete_saved_messages(make_client_id(self, user))
+ if user == self.user2:
+ self.user2 = None
+ self.user2_connected = False
+ if user == self.user1:
+ if self.user2:
+ self.user1 = self.user2
+ self.user1_connected = self.user2_connected
+ self.user2 = None
+ self.user2_connected = False
+ else:
+ self.user1 = None
+ self.user1_connected = False
+ if self.get_occupancy() > 0:
+ self.put()
+ else:
+ self.delete()
+
+ def set_connected(self, user):
+ if user == self.user1:
+ self.user1_connected = True
+ if user == self.user2:
+ self.user2_connected = True
+ self.put()
+
+ def is_connected(self, user):
+ if user == self.user1:
+ return self.user1_connected
+ if user == self.user2:
+ return self.user2_connected
+
+@db.transactional
+def connect_user_to_room(room_key, user):
+ room = Room.get_by_key_name(room_key)
+ # Check if room has user in case that disconnect message comes before
+ # connect message with unknown reason, observed with local AppEngine SDK.
+ if room and room.has_user(user):
+ room.set_connected(user)
+ logging.info('User ' + user + ' connected to room ' + room_key)
+ logging.info('Room ' + room_key + ' has state ' + str(room))
+ else:
+ logging.warning('Unexpected Connect Message to room ' + room_key)
+ return room
+
+class ConnectPage(webapp2.RequestHandler):
+ def post(self):
+ key = self.request.get('from')
+ room_key, user = key.split('/')
+ with LOCK:
+ room = connect_user_to_room(room_key, user)
+ if room and room.has_user(user):
+ send_saved_messages(make_client_id(room, user))
+
+class DisconnectPage(webapp2.RequestHandler):
+ def post(self):
+ key = self.request.get('from')
+ room_key, user = key.split('/')
+ with LOCK:
+ room = Room.get_by_key_name(room_key)
+ if room and room.has_user(user):
+ other_user = room.get_other_user(user)
+ room.remove_user(user)
+ logging.info('User ' + user + ' removed from room ' + room_key)
+ logging.info('Room ' + room_key + ' has state ' + str(room))
+ if other_user and other_user != user:
+ channel.send_message(make_client_id(room, other_user),
+ '{"type":"bye"}')
+ logging.info('Sent BYE to ' + other_user)
+ logging.warning('User ' + user + ' disconnected from room ' + room_key)
+
+
+class MessagePage(webapp2.RequestHandler):
+ def post(self):
+ message = self.request.body
+ room_key = self.request.get('r')
+ user = self.request.get('u')
+ with LOCK:
+ room = Room.get_by_key_name(room_key)
+ if room:
+ handle_message(room, user, message)
+ else:
+ logging.warning('Unknown room ' + room_key)
+
+class MainPage(webapp2.RequestHandler):
+ """The main UI page, renders the 'index.html' template."""
+ def get(self):
+ """Renders the main page. When this page is shown, we create a new
+ channel to push asynchronous updates to the client."""
+
+ # Append strings to this list to have them thrown up in message boxes. This
+ # will also cause the app to fail.
+ error_messages = []
+ # Get the base url without arguments.
+ base_url = self.request.path_url
+ user_agent = self.request.headers['User-Agent']
+ room_key = sanitize(self.request.get('r'))
+ stun_server = self.request.get('ss')
+ if not stun_server:
+ stun_server = get_default_stun_server(user_agent)
+ turn_server = self.request.get('ts')
+ ts_pwd = self.request.get('tp')
+
+ # Use "audio" and "video" to set the media stream constraints. Defined here:
+ # http://goo.gl/V7cZg
+ #
+ # "true" and "false" are recognized and interpreted as bools, for example:
+ # "?audio=true&video=false" (Start an audio-only call.)
+ # "?audio=false" (Start a video-only call.)
+ # If unspecified, the stream constraint defaults to True.
+ #
+ # To specify media track constraints, pass in a comma-separated list of
+ # key/value pairs, separated by a "=". Examples:
+ # "?audio=googEchoCancellation=false,googAutoGainControl=true"
+ # (Disable echo cancellation and enable gain control.)
+ #
+ # "?video=minWidth=1280,minHeight=720,googNoiseReduction=true"
+ # (Set the minimum resolution to 1280x720 and enable noise reduction.)
+ #
+ # Keys starting with "goog" will be added to the "optional" key; all others
+ # will be added to the "mandatory" key.
+ #
+ # The audio keys are defined here: talk/app/webrtc/localaudiosource.cc
+ # The video keys are defined here: talk/app/webrtc/videosource.cc
+ audio = self.request.get('audio')
+ video = self.request.get('video')
+
+ if self.request.get('hd').lower() == 'true':
+ if video:
+ message = 'The "hd" parameter has overridden video=' + str(video)
+ logging.error(message)
+ error_messages.append(message)
+ video = 'minWidth=1280,minHeight=720'
+
+ if self.request.get('minre') or self.request.get('maxre'):
+ message = ('The "minre" and "maxre" parameters are no longer supported. '
+ 'Use "video" instead.')
+ logging.error(message)
+ error_messages.append(message)
+
+ audio_send_codec = self.request.get('asc')
+ if not audio_send_codec:
+ audio_send_codec = get_preferred_audio_send_codec(user_agent)
+
+ audio_receive_codec = self.request.get('arc')
+ if not audio_receive_codec:
+ audio_receive_codec = get_preferred_audio_receive_codec()
+
+ # Set stereo to false by default.
+ stereo = 'false'
+ if self.request.get('stereo'):
+ stereo = self.request.get('stereo')
+
+ # Options for making pcConstraints
+ dtls = self.request.get('dtls')
+ dscp = self.request.get('dscp')
+ ipv6 = self.request.get('ipv6')
+
+ debug = self.request.get('debug')
+ if debug == 'loopback':
+ # Set dtls to false as DTLS does not work for loopback.
+ dtls = 'false'
+
+ # token_timeout for channel creation, default 30min, max 1 days, min 3min.
+ token_timeout = self.request.get_range('tt',
+ min_value = 3,
+ max_value = 1440,
+ default = 30)
+
+ unittest = self.request.get('unittest')
+ if unittest:
+ # Always create a new room for the unit tests.
+ room_key = generate_random(8)
+
+ if not room_key:
+ room_key = generate_random(8)
+ redirect = '/?r=' + room_key
+ redirect = append_url_arguments(self.request, redirect)
+ self.redirect(redirect)
+ logging.info('Redirecting visitor to base URL to ' + redirect)
+ return
+
+ user = None
+ initiator = 0
+ with LOCK:
+ room = Room.get_by_key_name(room_key)
+ if not room and debug != "full":
+ # New room.
+ user = generate_random(8)
+ room = Room(key_name = room_key)
+ room.add_user(user)
+ if debug != 'loopback':
+ initiator = 0
+ else:
+ room.add_user(user)
+ initiator = 1
+ elif room and room.get_occupancy() == 1 and debug != 'full':
+ # 1 occupant.
+ user = generate_random(8)
+ room.add_user(user)
+ initiator = 1
+ else:
+ # 2 occupants (full).
+ template = jinja_environment.get_template('full.html')
+ self.response.out.write(template.render({ 'room_key': room_key }))
+ logging.info('Room ' + room_key + ' is full')
+ return
+
+ if turn_server == 'false':
+ turn_server = None
+ turn_url = ''
+ else:
+ turn_url = 'https://computeengineondemand.appspot.com/'
+ turn_url = turn_url + 'turn?' + 'username=' + user + '&key=4080218913'
+
+ room_link = base_url + '?r=' + room_key
+ room_link = append_url_arguments(self.request, room_link)
+ token = create_channel(room, user, token_timeout)
+ pc_config = make_pc_config(stun_server, turn_server, ts_pwd)
+ pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
+ offer_constraints = make_offer_constraints()
+ media_constraints = make_media_stream_constraints(audio, video)
+ template_values = {'error_messages': error_messages,
+ 'token': token,
+ 'me': user,
+ 'room_key': room_key,
+ 'room_link': room_link,
+ 'initiator': initiator,
+ 'pc_config': json.dumps(pc_config),
+ 'pc_constraints': json.dumps(pc_constraints),
+ 'offer_constraints': json.dumps(offer_constraints),
+ 'media_constraints': json.dumps(media_constraints),
+ 'turn_url': turn_url,
+ 'stereo': stereo,
+ 'audio_send_codec': audio_send_codec,
+ 'audio_receive_codec': audio_receive_codec
+ }
+ if unittest:
+ target_page = 'test/test_' + unittest + '.html'
+ else:
+ target_page = 'index.html'
+
+ template = jinja_environment.get_template(target_page)
+ self.response.out.write(template.render(template_values))
+ logging.info('User ' + user + ' added to room ' + room_key)
+ logging.info('Room ' + room_key + ' has state ' + str(room))
+
+
+app = webapp2.WSGIApplication([
+ ('/', MainPage),
+ ('/message', MessagePage),
+ ('/_ah/channel/connected/', ConnectPage),
+ ('/_ah/channel/disconnected/', DisconnectPage)
+ ], debug=True)
diff --git a/samples/js/apprtc/css/main.css b/samples/js/apprtc/css/main.css
new file mode 100644
index 0000000000..15d9eee2ba
--- /dev/null
+++ b/samples/js/apprtc/css/main.css
@@ -0,0 +1,95 @@
+a:link { color: #FFFFFF; }
+a:visited {color: #FFFFFF; }
+html, body {
+ background-color: #000000;
+ height: 100%;
+ font-family: Verdana, Arial, Helvetica, sans-serif;
+}
+body {
+ margin: 0;
+ padding: 0;
+}
+footer {
+ position: absolute;
+ bottom: 0;
+ width: 100%;
+ height: 28px;
+ background-color: #3F3F3F;
+ color: #FFFFFF;
+ font-size: 13px; font-weight: bold;
+ line-height: 28px;
+ text-align: center;
+}
+#container {
+ background-color: #000000;
+ position: absolute;
+ height: 100%;
+ width: 100%;
+ margin: 0px auto;
+ -webkit-perspective: 1000;
+}
+#card {
+ -webkit-transition-duration: 2s;
+ -webkit-transform-style: preserve-3d;
+}
+#local {
+ position: absolute;
+ width: 100%;
+ transform: scale(-1, 1);
+ -webkit-transform: scale(-1, 1);
+ -webkit-backface-visibility: hidden;
+}
+#remote {
+ position: absolute;
+ width: 100%;
+ -webkit-transform: rotateY(180deg);
+ -webkit-backface-visibility: hidden;
+}
+#mini {
+ position: absolute;
+ height: 30%;
+ width: 30%;
+ bottom: 32px;
+ right: 4px;
+ opacity: 1.0;
+ transform: scale(-1, 1);
+ -webkit-transform: scale(-1, 1);
+}
+#localVideo {
+ width: 100%;
+ height: 100%;
+ opacity: 0;
+ -webkit-transition-property: opacity;
+ -webkit-transition-duration: 2s;
+}
+#remoteVideo {
+ width: 100%;
+ height: 100%;
+ opacity: 0;
+ -webkit-transition-property: opacity;
+ -webkit-transition-duration: 2s;
+}
+#miniVideo {
+ width: 100%;
+ height: 100%;
+ opacity: 0;
+ -webkit-transition-property: opacity;
+ -webkit-transition-duration: 2s;
+}
+#hangup {
+ font-size: 13px; font-weight: bold;
+ color: #FFFFFF;
+ width: 128px;
+ height: 24px;
+ background-color: #808080;
+ border-style: solid;
+ border-color: #FFFFFF;
+ margin: 2px;
+}
+#infoDiv {
+ position: absolute;
+ float: right;
+ background-color: grey;
+ margin: 2px;
+ display: none;
+}
diff --git a/samples/js/apprtc/full.html b/samples/js/apprtc/full.html
new file mode 100644
index 0000000000..b14ac6009e
--- /dev/null
+++ b/samples/js/apprtc/full.html
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/js/apprtc/html/help.html b/samples/js/apprtc/html/help.html
new file mode 100644
index 0000000000..7fd2bf6245
--- /dev/null
+++ b/samples/js/apprtc/html/help.html
@@ -0,0 +1,11 @@
+
+
+
+
+ WebRtc Demo App Help
+
+
+TODO
+
+
diff --git a/samples/js/apprtc/images/webrtc_black_20p.png b/samples/js/apprtc/images/webrtc_black_20p.png
new file mode 100644
index 0000000000..a35c1dff2e
Binary files /dev/null and b/samples/js/apprtc/images/webrtc_black_20p.png differ
diff --git a/samples/js/apprtc/index.html b/samples/js/apprtc/index.html
new file mode 100644
index 0000000000..a240f29809
--- /dev/null
+++ b/samples/js/apprtc/index.html
@@ -0,0 +1,53 @@
+
+
+
+WebRTC Reference App
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/js/apprtc/js/adapter.js b/samples/js/apprtc/js/adapter.js
new file mode 120000
index 0000000000..c19e2ce990
--- /dev/null
+++ b/samples/js/apprtc/js/adapter.js
@@ -0,0 +1 @@
+../../base/adapter.js
\ No newline at end of file
diff --git a/samples/js/apprtc/js/main.js b/samples/js/apprtc/js/main.js
new file mode 100644
index 0000000000..79313dcb9b
--- /dev/null
+++ b/samples/js/apprtc/js/main.js
@@ -0,0 +1,763 @@
+var localVideo;
+var miniVideo;
+var remoteVideo;
+var hasLocalStream;
+var localStream;
+var remoteStream;
+var channel;
+var pc;
+var socket;
+var xmlhttp;
+var started = false;
+var turnDone = false;
+var channelReady = false;
+var signalingReady = false;
+var msgQueue = [];
+// Set up audio and video regardless of what devices are present.
+var sdpConstraints = {'mandatory': {
+ 'OfferToReceiveAudio': true,
+ 'OfferToReceiveVideo': true }};
+var isVideoMuted = false;
+var isAudioMuted = false;
+// Types of gathered ICE Candidates.
+var gatheredIceCandidateTypes = { Local: {}, Remote: {} };
+var infoDivErrors = [];
+
+function initialize() {
+ if (errorMessages.length > 0) {
+ for (i = 0; i < errorMessages.length; ++i) {
+ window.alert(errorMessages[i]);
+ }
+ return;
+ }
+
+ console.log('Initializing; room=' + roomKey + '.');
+ card = document.getElementById('card');
+ localVideo = document.getElementById('localVideo');
+ // Reset localVideo display to center.
+ localVideo.addEventListener('loadedmetadata', function(){
+ window.onresize();});
+ miniVideo = document.getElementById('miniVideo');
+ remoteVideo = document.getElementById('remoteVideo');
+ resetStatus();
+ // NOTE: AppRTCClient.java searches & parses this line; update there when
+ // changing here.
+ openChannel();
+ maybeRequestTurn();
+
+ // Caller is always ready to create peerConnection.
+ signalingReady = initiator;
+
+ if (mediaConstraints.audio === false &&
+ mediaConstraints.video === false) {
+ hasLocalStream = false;
+ maybeStart();
+ } else {
+ hasLocalStream = true;
+ doGetUserMedia();
+ }
+}
+
+function openChannel() {
+ console.log('Opening channel.');
+ var channel = new goog.appengine.Channel(channelToken);
+ var handler = {
+ 'onopen': onChannelOpened,
+ 'onmessage': onChannelMessage,
+ 'onerror': onChannelError,
+ 'onclose': onChannelClosed
+ };
+ socket = channel.open(handler);
+}
+
+function maybeRequestTurn() {
+ // Allow to skip turn by passing ts=false to apprtc.
+ if (turnUrl == '') {
+ turnDone = true;
+ return;
+ }
+
+ for (var i = 0, len = pcConfig.iceServers.length; i < len; i++) {
+ if (pcConfig.iceServers[i].urls.substr(0, 5) === 'turn:') {
+ turnDone = true;
+ return;
+ }
+ }
+
+ var currentDomain = document.domain;
+ if (currentDomain.search('localhost') === -1 &&
+ currentDomain.search('apprtc') === -1) {
+ // Not authorized domain. Try with default STUN instead.
+ turnDone = true;
+ return;
+ }
+
+ // No TURN server. Get one from computeengineondemand.appspot.com.
+ xmlhttp = new XMLHttpRequest();
+ xmlhttp.onreadystatechange = onTurnResult;
+ xmlhttp.open('GET', turnUrl, true);
+ xmlhttp.send();
+}
+
+function onTurnResult() {
+ if (xmlhttp.readyState !== 4)
+ return;
+
+ if (xmlhttp.status === 200) {
+ var turnServer = JSON.parse(xmlhttp.responseText);
+ // Create turnUris using the polyfill (adapter.js).
+ var iceServers = createIceServers(turnServer.uris,
+ turnServer.username,
+ turnServer.password);
+ if (iceServers !== null) {
+ pcConfig.iceServers = pcConfig.iceServers.concat(iceServers);
+ }
+ } else {
+ messageError('No TURN server; unlikely that media will traverse networks. '
+ + 'If this persists please report it to '
+ + 'discuss-webrtc@googlegroups.com.');
+ }
+ // If TURN request failed, continue the call with default STUN.
+ turnDone = true;
+ maybeStart();
+}
+
+function resetStatus() {
+ if (!initiator) {
+ setStatus('Waiting for someone to join: \
+ ' + roomLink + '');
+ } else {
+ setStatus('Initializing...');
+ }
+}
+
+function doGetUserMedia() {
+ // Call into getUserMedia via the polyfill (adapter.js).
+ try {
+ getUserMedia(mediaConstraints, onUserMediaSuccess,
+ onUserMediaError);
+ console.log('Requested access to local media with mediaConstraints:\n' +
+ ' \'' + JSON.stringify(mediaConstraints) + '\'');
+ } catch (e) {
+ alert('getUserMedia() failed. Is this a WebRTC capable browser?');
+ messageError('getUserMedia failed with exception: ' + e.message);
+ }
+}
+
+function createPeerConnection() {
+ try {
+ // Create an RTCPeerConnection via the polyfill (adapter.js).
+ pc = new RTCPeerConnection(pcConfig, pcConstraints);
+ pc.onicecandidate = onIceCandidate;
+ console.log('Created RTCPeerConnnection with:\n' +
+ ' config: \'' + JSON.stringify(pcConfig) + '\';\n' +
+ ' constraints: \'' + JSON.stringify(pcConstraints) + '\'.');
+ } catch (e) {
+ messageError('Failed to create PeerConnection, exception: ' + e.message);
+ alert('Cannot create RTCPeerConnection object; \
+ WebRTC is not supported by this browser.');
+ return;
+ }
+ pc.onaddstream = onRemoteStreamAdded;
+ pc.onremovestream = onRemoteStreamRemoved;
+ pc.onsignalingstatechange = onSignalingStateChanged;
+ pc.oniceconnectionstatechange = onIceConnectionStateChanged;
+}
+
+function maybeStart() {
+ if (!started && signalingReady && channelReady && turnDone &&
+ (localStream || !hasLocalStream)) {
+ setStatus('Connecting...');
+ console.log('Creating PeerConnection.');
+ createPeerConnection();
+
+ if (hasLocalStream) {
+ console.log('Adding local stream.');
+ pc.addStream(localStream);
+ } else {
+ console.log('Not sending any stream.');
+ }
+ started = true;
+
+ if (initiator)
+ doCall();
+ else
+ calleeStart();
+ }
+}
+
+function setStatus(state) {
+ document.getElementById('status').innerHTML = state;
+}
+
+function doCall() {
+ var constraints = mergeConstraints(offerConstraints, sdpConstraints);
+ console.log('Sending offer to peer, with constraints: \n' +
+ ' \'' + JSON.stringify(constraints) + '\'.')
+ pc.createOffer(setLocalAndSendMessage,
+ onCreateSessionDescriptionError, constraints);
+}
+
+function calleeStart() {
+ // Callee starts to process cached offer and other messages.
+ while (msgQueue.length > 0) {
+ processSignalingMessage(msgQueue.shift());
+ }
+}
+
+function doAnswer() {
+ console.log('Sending answer to peer.');
+ pc.createAnswer(setLocalAndSendMessage,
+ onCreateSessionDescriptionError, sdpConstraints);
+}
+
+function mergeConstraints(cons1, cons2) {
+ var merged = cons1;
+ for (var name in cons2.mandatory) {
+ merged.mandatory[name] = cons2.mandatory[name];
+ }
+ merged.optional.concat(cons2.optional);
+ return merged;
+}
+
+function setLocalAndSendMessage(sessionDescription) {
+ sessionDescription.sdp = maybePreferAudioReceiveCodec(sessionDescription.sdp);
+ pc.setLocalDescription(sessionDescription,
+ onSetSessionDescriptionSuccess, onSetSessionDescriptionError);
+ sendMessage(sessionDescription);
+}
+
+function setRemote(message) {
+ // Set Opus in Stereo, if stereo enabled.
+ if (stereo)
+ message.sdp = addStereo(message.sdp);
+ message.sdp = maybePreferAudioSendCodec(message.sdp);
+ pc.setRemoteDescription(new RTCSessionDescription(message),
+ onSetRemoteDescriptionSuccess, onSetSessionDescriptionError);
+
+ function onSetRemoteDescriptionSuccess() {
+ console.log("Set remote session description success.");
+ // By now all addstream events for the setRemoteDescription have fired.
+ // So we can know if the peer is sending any stream or is only receiving.
+ if (remoteStream) {
+ waitForRemoteVideo();
+ } else {
+ console.log("Not receiving any stream.");
+ transitionToActive();
+ }
+ }
+}
+
+function sendMessage(message) {
+ var msgString = JSON.stringify(message);
+ console.log('C->S: ' + msgString);
+ // NOTE: AppRTCClient.java searches & parses this line; update there when
+ // changing here.
+ path = '/message?r=' + roomKey + '&u=' + me;
+ var xhr = new XMLHttpRequest();
+ xhr.open('POST', path, true);
+ xhr.send(msgString);
+}
+
+function processSignalingMessage(message) {
+ if (!started) {
+ messageError('peerConnection has not been created yet!');
+ return;
+ }
+
+ if (message.type === 'offer') {
+ setRemote(message);
+ doAnswer();
+ } else if (message.type === 'answer') {
+ setRemote(message);
+ } else if (message.type === 'candidate') {
+ var candidate = new RTCIceCandidate({sdpMLineIndex: message.label,
+ candidate: message.candidate});
+ noteIceCandidate("Remote", iceCandidateType(message.candidate));
+ pc.addIceCandidate(candidate,
+ onAddIceCandidateSuccess, onAddIceCandidateError);
+ } else if (message.type === 'bye') {
+ onRemoteHangup();
+ }
+}
+
+function onAddIceCandidateSuccess() {
+ console.log('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+ messageError('Failed to add Ice Candidate: ' + error.toString());
+}
+
+function onChannelOpened() {
+ console.log('Channel opened.');
+ channelReady = true;
+ maybeStart();
+}
+
+function onChannelMessage(message) {
+ console.log('S->C: ' + message.data);
+ var msg = JSON.parse(message.data);
+ // Since the turn response is async and also GAE might disorder the
+ // Message delivery due to possible datastore query at server side,
+ // So callee needs to cache messages before peerConnection is created.
+ if (!initiator && !started) {
+ if (msg.type === 'offer') {
+ // Add offer to the beginning of msgQueue, since we can't handle
+ // Early candidates before offer at present.
+ msgQueue.unshift(msg);
+ // Callee creates PeerConnection
+ signalingReady = true;
+ maybeStart();
+ } else {
+ msgQueue.push(msg);
+ }
+ } else {
+ processSignalingMessage(msg);
+ }
+}
+
+function onChannelError() {
+ messageError('Channel error.');
+}
+
+function onChannelClosed() {
+ console.log('Channel closed.');
+}
+
+function messageError(msg) {
+ console.log(msg);
+ infoDivErrors.push(msg);
+ updateInfoDiv();
+}
+
+function onUserMediaSuccess(stream) {
+ console.log('User has granted access to local media.');
+ // Call the polyfill wrapper to attach the media stream to this element.
+ attachMediaStream(localVideo, stream);
+ localVideo.style.opacity = 1;
+ localStream = stream;
+ // Caller creates PeerConnection.
+ maybeStart();
+}
+
+function onUserMediaError(error) {
+ messageError('Failed to get access to local media. Error code was ' +
+ error.code + '. Continuing without sending a stream.');
+ alert('Failed to get access to local media. Error code was ' +
+ error.code + '. Continuing without sending a stream.');
+
+ hasLocalStream = false;
+ maybeStart();
+}
+
+function onCreateSessionDescriptionError(error) {
+ messageError('Failed to create session description: ' + error.toString());
+}
+
+function onSetSessionDescriptionSuccess() {
+ console.log('Set session description success.');
+}
+
+function onSetSessionDescriptionError(error) {
+ messageError('Failed to set session description: ' + error.toString());
+}
+
+function iceCandidateType(candidateSDP) {
+ if (candidateSDP.indexOf("typ relay ") >= 0)
+ return "TURN";
+ if (candidateSDP.indexOf("typ srflx ") >= 0)
+ return "STUN";
+ if (candidateSDP.indexOf("typ host ") >= 0)
+ return "HOST";
+ return "UNKNOWN";
+}
+
+function onIceCandidate(event) {
+ if (event.candidate) {
+ sendMessage({type: 'candidate',
+ label: event.candidate.sdpMLineIndex,
+ id: event.candidate.sdpMid,
+ candidate: event.candidate.candidate});
+ noteIceCandidate("Local", iceCandidateType(event.candidate.candidate));
+ } else {
+ console.log('End of candidates.');
+ }
+}
+
+function onRemoteStreamAdded(event) {
+ console.log('Remote stream added.');
+ attachMediaStream(remoteVideo, event.stream);
+ remoteStream = event.stream;
+}
+
+function onRemoteStreamRemoved(event) {
+ console.log('Remote stream removed.');
+}
+
+function onSignalingStateChanged(event) {
+ updateInfoDiv();
+}
+
+function onIceConnectionStateChanged(event) {
+ updateInfoDiv();
+}
+
+function onHangup() {
+ console.log('Hanging up.');
+ transitionToDone();
+ localStream.stop();
+ stop();
+ // will trigger BYE from server
+ socket.close();
+}
+
+function onRemoteHangup() {
+ console.log('Session terminated.');
+ initiator = 0;
+ transitionToWaiting();
+ stop();
+}
+
+function stop() {
+ started = false;
+ signalingReady = false;
+ isAudioMuted = false;
+ isVideoMuted = false;
+ pc.close();
+ pc = null;
+ remoteStream = null;
+ msgQueue.length = 0;
+}
+
+function waitForRemoteVideo() {
+ // Call the getVideoTracks method via adapter.js.
+ videoTracks = remoteStream.getVideoTracks();
+ if (videoTracks.length === 0 || remoteVideo.currentTime > 0) {
+ transitionToActive();
+ } else {
+ setTimeout(waitForRemoteVideo, 100);
+ }
+}
+
+function transitionToActive() {
+ reattachMediaStream(miniVideo, localVideo);
+ remoteVideo.style.opacity = 1;
+ card.style.webkitTransform = 'rotateY(180deg)';
+ setTimeout(function() { localVideo.src = ''; }, 500);
+ setTimeout(function() { miniVideo.style.opacity = 1; }, 1000);
+ // Reset window display according to the asperio of remote video.
+ window.onresize();
+ setStatus('');
+}
+
+function transitionToWaiting() {
+ card.style.webkitTransform = 'rotateY(0deg)';
+ setTimeout(function() {
+ localVideo.src = miniVideo.src;
+ miniVideo.src = '';
+ remoteVideo.src = '' }, 500);
+ miniVideo.style.opacity = 0;
+ remoteVideo.style.opacity = 0;
+ resetStatus();
+}
+
+function transitionToDone() {
+ localVideo.style.opacity = 0;
+ remoteVideo.style.opacity = 0;
+ miniVideo.style.opacity = 0;
+ setStatus('You have left the call. \
+ Click here to rejoin.');
+}
+
+function enterFullScreen() {
+ container.webkitRequestFullScreen();
+}
+
+function noteIceCandidate(location, type) {
+ if (gatheredIceCandidateTypes[location][type])
+ return;
+ gatheredIceCandidateTypes[location][type] = 1;
+ updateInfoDiv();
+}
+
+function getInfoDiv() {
+ return document.getElementById("infoDiv");
+}
+
+function updateInfoDiv() {
+ var contents = "Gathered ICE Candidates\n";
+ for (var endpoint in gatheredIceCandidateTypes) {
+ contents += endpoint + ":\n";
+ for (var type in gatheredIceCandidateTypes[endpoint])
+ contents += " " + type + "\n";
+ }
+ if (pc != null) {
+ contents += "Gathering: " + pc.iceGatheringState + "\n";
+ contents += "\n";
+ contents += "PC State:\n";
+ contents += "Signaling: " + pc.signalingState + "\n";
+ contents += "ICE: " + pc.iceConnectionState + "\n";
+ }
+ var div = getInfoDiv();
+ div.innerHTML = contents + "
";
+
+ for (var msg in infoDivErrors) {
+ div.innerHTML += '' +
+ infoDivErrors[msg] + '
';
+ }
+ if (infoDivErrors.length)
+ showInfoDiv();
+}
+
+function toggleInfoDiv() {
+ var div = getInfoDiv();
+ if (div.style.display == "block") {
+ div.style.display = "none";
+ } else {
+ showInfoDiv();
+ }
+}
+
+function showInfoDiv() {
+ var div = getInfoDiv();
+ div.style.display = "block";
+}
+
+function toggleVideoMute() {
+ // Call the getVideoTracks method via adapter.js.
+ videoTracks = localStream.getVideoTracks();
+
+ if (videoTracks.length === 0) {
+ console.log('No local video available.');
+ return;
+ }
+
+ if (isVideoMuted) {
+ for (i = 0; i < videoTracks.length; i++) {
+ videoTracks[i].enabled = true;
+ }
+ console.log('Video unmuted.');
+ } else {
+ for (i = 0; i < videoTracks.length; i++) {
+ videoTracks[i].enabled = false;
+ }
+ console.log('Video muted.');
+ }
+
+ isVideoMuted = !isVideoMuted;
+}
+
+function toggleAudioMute() {
+ // Call the getAudioTracks method via adapter.js.
+ audioTracks = localStream.getAudioTracks();
+
+ if (audioTracks.length === 0) {
+ console.log('No local audio available.');
+ return;
+ }
+
+ if (isAudioMuted) {
+ for (i = 0; i < audioTracks.length; i++) {
+ audioTracks[i].enabled = true;
+ }
+ console.log('Audio unmuted.');
+ } else {
+ for (i = 0; i < audioTracks.length; i++){
+ audioTracks[i].enabled = false;
+ }
+ console.log('Audio muted.');
+ }
+
+ isAudioMuted = !isAudioMuted;
+}
+
+// Mac: hotkey is Command.
+// Non-Mac: hotkey is Control.
+// -D: toggle audio mute.
+// -E: toggle video mute.
+// -I: toggle Info box.
+// Return false to screen out original Chrome shortcuts.
+document.onkeydown = function(event) {
+ var hotkey = event.ctrlKey;
+ if (navigator.appVersion.indexOf('Mac') != -1)
+ hotkey = event.metaKey;
+ if (!hotkey)
+ return;
+ switch (event.keyCode) {
+ case 68:
+ toggleAudioMute();
+ return false;
+ case 69:
+ toggleVideoMute();
+ return false;
+ case 73:
+ toggleInfoDiv();
+ return false;
+ default:
+ return;
+ }
+}
+
+function maybePreferAudioSendCodec(sdp) {
+ if (audio_send_codec == '') {
+ console.log('No preference on audio send codec.');
+ return sdp;
+ }
+ console.log('Prefer audio send codec: ' + audio_send_codec);
+ return preferAudioCodec(sdp, audio_send_codec);
+}
+
+function maybePreferAudioReceiveCodec(sdp) {
+ if (audio_receive_codec == '') {
+ console.log('No preference on audio receive codec.');
+ return sdp;
+ }
+ console.log('Prefer audio receive codec: ' + audio_receive_codec);
+ return preferAudioCodec(sdp, audio_receive_codec);
+}
+
+// Set |codec| as the default audio codec if it's present.
+// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
+function preferAudioCodec(sdp, codec) {
+ var fields = codec.split('/');
+ if (fields.length != 2) {
+ console.log('Invalid codec setting: ' + codec);
+ return sdp;
+ }
+ var name = fields[0];
+ var rate = fields[1];
+ var sdpLines = sdp.split('\r\n');
+
+ // Search for m line.
+ for (var i = 0; i < sdpLines.length; i++) {
+ if (sdpLines[i].search('m=audio') !== -1) {
+ var mLineIndex = i;
+ break;
+ }
+ }
+ if (mLineIndex === null)
+ return sdp;
+
+ // If the codec is available, set it as the default in m line.
+ for (var i = 0; i < sdpLines.length; i++) {
+ if (sdpLines[i].search(name + '/' + rate) !== -1) {
+ var regexp = new RegExp(':(\\d+) ' + name + '\\/' + rate, 'i');
+ var payload = extractSdp(sdpLines[i], regexp);
+ if (payload)
+ sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex],
+ payload);
+ break;
+ }
+ }
+
+ // Remove CN in m line and sdp.
+ sdpLines = removeCN(sdpLines, mLineIndex);
+
+ sdp = sdpLines.join('\r\n');
+ return sdp;
+}
+
+// Set Opus in stereo if stereo is enabled.
+function addStereo(sdp) {
+ var sdpLines = sdp.split('\r\n');
+
+ // Find opus payload.
+ for (var i = 0; i < sdpLines.length; i++) {
+ if (sdpLines[i].search('opus/48000') !== -1) {
+ var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
+ break;
+ }
+ }
+
+ // Find the payload in fmtp line.
+ for (var i = 0; i < sdpLines.length; i++) {
+ if (sdpLines[i].search('a=fmtp') !== -1) {
+ var payload = extractSdp(sdpLines[i], /a=fmtp:(\d+)/ );
+ if (payload === opusPayload) {
+ var fmtpLineIndex = i;
+ break;
+ }
+ }
+ }
+ // No fmtp line found.
+ if (fmtpLineIndex === null)
+ return sdp;
+
+ // Append stereo=1 to fmtp line.
+ sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat(' stereo=1');
+
+ sdp = sdpLines.join('\r\n');
+ return sdp;
+}
+
+function extractSdp(sdpLine, pattern) {
+ var result = sdpLine.match(pattern);
+ return (result && result.length == 2)? result[1]: null;
+}
+
+// Set the selected codec to the first in m line.
+function setDefaultCodec(mLine, payload) {
+ var elements = mLine.split(' ');
+ var newLine = new Array();
+ var index = 0;
+ for (var i = 0; i < elements.length; i++) {
+ if (index === 3) // Format of media starts from the fourth.
+ newLine[index++] = payload; // Put target payload to the first.
+ if (elements[i] !== payload)
+ newLine[index++] = elements[i];
+ }
+ return newLine.join(' ');
+}
+
+// Strip CN from sdp before CN constraints is ready.
+function removeCN(sdpLines, mLineIndex) {
+ var mLineElements = sdpLines[mLineIndex].split(' ');
+ // Scan from end for the convenience of removing an item.
+ for (var i = sdpLines.length-1; i >= 0; i--) {
+ var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
+ if (payload) {
+ var cnPos = mLineElements.indexOf(payload);
+ if (cnPos !== -1) {
+ // Remove CN payload from m line.
+ mLineElements.splice(cnPos, 1);
+ }
+ // Remove CN line in sdp
+ sdpLines.splice(i, 1);
+ }
+ }
+
+ sdpLines[mLineIndex] = mLineElements.join(' ');
+ return sdpLines;
+}
+
+// Send BYE on refreshing(or leaving) a demo page
+// to ensure the room is cleaned for next session.
+window.onbeforeunload = function() {
+ sendMessage({type: 'bye'});
+}
+
+// Set the video diplaying in the center of window.
+window.onresize = function(){
+ var aspectRatio;
+ if (remoteVideo.style.opacity === '1') {
+ aspectRatio = remoteVideo.videoWidth/remoteVideo.videoHeight;
+ } else if (localVideo.style.opacity === '1') {
+ aspectRatio = localVideo.videoWidth/localVideo.videoHeight;
+ } else {
+ return;
+ }
+
+ var innerHeight = this.innerHeight;
+ var innerWidth = this.innerWidth;
+ var videoWidth = innerWidth < aspectRatio * window.innerHeight ?
+ innerWidth : aspectRatio * window.innerHeight;
+ var videoHeight = innerHeight < window.innerWidth / aspectRatio ?
+ innerHeight : window.innerWidth / aspectRatio;
+ containerDiv = document.getElementById('container');
+ containerDiv.style.width = videoWidth + 'px';
+ containerDiv.style.height = videoHeight + 'px';
+ containerDiv.style.left = (innerWidth - videoWidth) / 2 + 'px';
+ containerDiv.style.top = (innerHeight - videoHeight) / 2 + 'px';
+};
diff --git a/samples/js/apprtc/test/test_channel.html b/samples/js/apprtc/test/test_channel.html
new file mode 100644
index 0000000000..1668ce0dff
--- /dev/null
+++ b/samples/js/apprtc/test/test_channel.html
@@ -0,0 +1,93 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/js/apprtc/turn-prober/README b/samples/js/apprtc/turn-prober/README
new file mode 100644
index 0000000000..58ba3398ab
--- /dev/null
+++ b/samples/js/apprtc/turn-prober/README
@@ -0,0 +1,9 @@
+This script contains a simple prober that verifies that:
+- CEOD vends TURN server URIs with credentials on demand (mimicking apprtc)
+- rfc5766-turn-server vends TURN candidates from the servers vended by CEOD.
+
+To use simply run ./turn-prober.sh
+If it prints "PASS" (and exits 0) then all is well.
+If it prints a mess of logs (and exits non-0) then something has gone sideways
+and apprtc.appspot.com is probably not working well (b/c of missing TURN
+functionality).
diff --git a/samples/js/apprtc/turn-prober/turn-prober.html b/samples/js/apprtc/turn-prober/turn-prober.html
new file mode 100644
index 0000000000..94cf68ecfc
--- /dev/null
+++ b/samples/js/apprtc/turn-prober/turn-prober.html
@@ -0,0 +1,132 @@
+
+
+
+
+
+
+
diff --git a/samples/js/apprtc/turn-prober/turn-prober.sh b/samples/js/apprtc/turn-prober/turn-prober.sh
new file mode 100755
index 0000000000..2a063c58ec
--- /dev/null
+++ b/samples/js/apprtc/turn-prober/turn-prober.sh
@@ -0,0 +1,49 @@
+#!/bin/bash -e
+
+function chrome_pids() {
+ ps axuwww|grep $D|grep c[h]rome|awk '{print $2}'
+}
+
+cd $(dirname $0)
+export D=$(mktemp -d)
+
+CHROME_LOG_FILE="${D}/chrome_debug.log"
+touch $CHROME_LOG_FILE
+
+XVFB="xvfb-run -a -e $CHROME_LOG_FILE -s '-screen 0 1024x768x24'"
+if [ -n "$DISPLAY" ]; then
+ XVFB=""
+fi
+
+# "eval" below is required by $XVFB containing a quoted argument.
+eval $XVFB chrome \
+ --enable-logging=stderr \
+ --no-first-run \
+ --disable-web-security \
+ --user-data-dir=$D \
+ --vmodule="*media/*=3,*turn*=3" \
+ "file://${PWD}/turn-prober.html" > $CHROME_LOG_FILE 2>&1 &
+CHROME_PID=$!
+
+while ! grep -q DONE $CHROME_LOG_FILE && chrome_pids|grep -q .; do
+ sleep 0.1
+done
+
+# Suppress bash's Killed message for the chrome above.
+exec 3>&2
+exec 2>/dev/null
+while [ ! -z "$(chrome_pids)" ]; do
+ kill -9 $(chrome_pids)
+done
+exec 2>&3
+exec 3>&-
+
+DONE=$(grep DONE $CHROME_LOG_FILE)
+EXIT_CODE=0
+if ! grep -q "DONE: PASS" $CHROME_LOG_FILE; then
+ cat $CHROME_LOG_FILE
+ EXIT_CODE=1
+fi
+
+rm -rf $D
+exit $EXIT_CODE
diff --git a/samples/js/base/adapter.js b/samples/js/base/adapter.js
new file mode 100644
index 0000000000..3dd894784f
--- /dev/null
+++ b/samples/js/base/adapter.js
@@ -0,0 +1,198 @@
+var RTCPeerConnection = null;
+var getUserMedia = null;
+var attachMediaStream = null;
+var reattachMediaStream = null;
+var webrtcDetectedBrowser = null;
+var webrtcDetectedVersion = null;
+
+function trace(text) {
+ // This function is used for logging.
+ if (text[text.length - 1] == '\n') {
+ text = text.substring(0, text.length - 1);
+ }
+ console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+function maybeFixConfiguration(pcConfig) {
+ if (pcConfig == null) {
+ return;
+ }
+ for (var i = 0; i < pcConfig.iceServers.length; i++) {
+ if (pcConfig.iceServers[i].hasOwnProperty('urls')){
+ pcConfig.iceServers[i]['url'] = pcConfig.iceServers[i]['urls'];
+ delete pcConfig.iceServers[i]['urls'];
+ }
+ }
+}
+
+if (navigator.mozGetUserMedia) {
+ console.log("This appears to be Firefox");
+
+ webrtcDetectedBrowser = "firefox";
+
+ webrtcDetectedVersion =
+ parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
+
+ // The RTCPeerConnection object.
+ var RTCPeerConnection = function(pcConfig, pcConstraints) {
+ // .urls is not supported in FF yet.
+ maybeFixConfiguration(pcConfig);
+ return new mozRTCPeerConnection(pcConfig, pcConstraints);
+ }
+
+ // The RTCSessionDescription object.
+ RTCSessionDescription = mozRTCSessionDescription;
+
+ // The RTCIceCandidate object.
+ RTCIceCandidate = mozRTCIceCandidate;
+
+ // Get UserMedia (only difference is the prefix).
+ // Code from Adam Barth.
+ getUserMedia = navigator.mozGetUserMedia.bind(navigator);
+ navigator.getUserMedia = getUserMedia;
+
+ // Creates iceServer from the url for FF.
+ createIceServer = function(url, username, password) {
+ var iceServer = null;
+ var url_parts = url.split(':');
+ if (url_parts[0].indexOf('stun') === 0) {
+ // Create iceServer with stun url.
+ iceServer = { 'url': url };
+ } else if (url_parts[0].indexOf('turn') === 0) {
+ if (webrtcDetectedVersion < 27) {
+ // Create iceServer with turn url.
+ // Ignore the transport parameter from TURN url for FF version <=27.
+ var turn_url_parts = url.split("?");
+ // Return null for createIceServer if transport=tcp.
+ if (turn_url_parts.length === 1 ||
+ turn_url_parts[1].indexOf('transport=udp') === 0) {
+ iceServer = {'url': turn_url_parts[0],
+ 'credential': password,
+ 'username': username};
+ }
+ } else {
+ // FF 27 and above supports transport parameters in TURN url,
+ // So passing in the full url to create iceServer.
+ iceServer = {'url': url,
+ 'credential': password,
+ 'username': username};
+ }
+ }
+ return iceServer;
+ };
+
+ createIceServers = function(urls, username, password) {
+ var iceServers = [];
+ // Use .url for FireFox.
+ for (i = 0; i < urls.length; i++) {
+ var iceServer = createIceServer(urls[i],
+ username,
+ password);
+ if (iceServer !== null) {
+ iceServers.push(iceServer);
+ }
+ }
+ return iceServers;
+ }
+
+ // Attach a media stream to an element.
+ attachMediaStream = function(element, stream) {
+ console.log("Attaching media stream");
+ element.mozSrcObject = stream;
+ element.play();
+ };
+
+ reattachMediaStream = function(to, from) {
+ console.log("Reattaching media stream");
+ to.mozSrcObject = from.mozSrcObject;
+ to.play();
+ };
+
+ // Fake get{Video,Audio}Tracks
+ if (!MediaStream.prototype.getVideoTracks) {
+ MediaStream.prototype.getVideoTracks = function() {
+ return [];
+ };
+ }
+
+ if (!MediaStream.prototype.getAudioTracks) {
+ MediaStream.prototype.getAudioTracks = function() {
+ return [];
+ };
+ }
+} else if (navigator.webkitGetUserMedia) {
+ console.log("This appears to be Chrome");
+
+ webrtcDetectedBrowser = "chrome";
+ webrtcDetectedVersion =
+ parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
+
+ // Creates iceServer from the url for Chrome M33 and earlier.
+ createIceServer = function(url, username, password) {
+ var iceServer = null;
+ var url_parts = url.split(':');
+ if (url_parts[0].indexOf('stun') === 0) {
+ // Create iceServer with stun url.
+ iceServer = { 'url': url };
+ } else if (url_parts[0].indexOf('turn') === 0) {
+ // Chrome M28 & above uses below TURN format.
+ iceServer = {'url': url,
+ 'credential': password,
+ 'username': username};
+ }
+ return iceServer;
+ };
+
+ // Creates iceServers from the urls for Chrome M34 and above.
+ createIceServers = function(urls, username, password) {
+ var iceServers = [];
+ if (webrtcDetectedVersion >= 34) {
+ // .urls is supported since Chrome M34.
+ iceServers = {'urls': urls,
+ 'credential': password,
+ 'username': username };
+ } else {
+ for (i = 0; i < urls.length; i++) {
+ var iceServer = createIceServer(urls[i],
+ username,
+ password);
+ if (iceServer !== null) {
+ iceServers.push(iceServer);
+ }
+ }
+ }
+ return iceServers;
+ };
+
+ // The RTCPeerConnection object.
+ var RTCPeerConnection = function(pcConfig, pcConstraints) {
+ // .urls is supported since Chrome M34.
+ if (webrtcDetectedVersion < 34) {
+ maybeFixConfiguration(pcConfig);
+ }
+ return new webkitRTCPeerConnection(pcConfig, pcConstraints);
+ }
+
+ // Get UserMedia (only difference is the prefix).
+ // Code from Adam Barth.
+ getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
+ navigator.getUserMedia = getUserMedia;
+
+ // Attach a media stream to an element.
+ attachMediaStream = function(element, stream) {
+ if (typeof element.srcObject !== 'undefined') {
+ element.srcObject = stream;
+ } else if (typeof element.mozSrcObject !== 'undefined') {
+ element.mozSrcObject = stream;
+ } else if (typeof element.src !== 'undefined') {
+ element.src = URL.createObjectURL(stream);
+ } else {
+ console.log('Error attaching stream to element.');
+ }
+ };
+
+ reattachMediaStream = function(to, from) {
+ to.src = from.src;
+ };
+} else {
+ console.log("Browser does not appear to be WebRTC-capable");
+}