Adding webrtc-sample demos under trunk/samples.
Review URL: https://webrtc-codereview.appspot.com/1126005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3578 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/samples/js/demos/html/constraints-and-stats.html b/samples/js/demos/html/constraints-and-stats.html
new file mode 100644
index 0000000..d59a2f0
--- /dev/null
+++ b/samples/js/demos/html/constraints-and-stats.html
@@ -0,0 +1,262 @@
+<html>
+<head>
+<title>Constraints and Statistics</title>
+<script>
+var mystream;
+var pc1;
+var pc2;
+
+$ = function(id) {
+  return document.getElementById(id);
+}
+
+function log(txt) {
+  console.log(txt);
+}
+
+function openCamera() {
+  if (mystream) {
+    mystream.stop();
+  }
+  navigator.webkitGetUserMedia(cameraConstraints(), gotStream, function() {
+     log("GetUserMedia failed");
+    });
+} 
+
+function gotStream(stream) {
+  log("GetUserMedia succeeded");
+  mystream = stream;
+  $("local-video").src = webkitURL.createObjectURL(stream);
+}
+
+function cameraConstraints() {
+  var constraints = {};
+  constraints.audio = true;
+  constraints.video = { mandatory: {}, optional: [] };
+  if ($("minwidth").value != "0") {
+    constraints.video.mandatory.minWidth = $("minwidth").value;
+  }
+  if ($("maxwidth").value != "0") {
+    constraints.video.mandatory.maxWidth = $("maxwidth").value;
+  }
+  if ($("minheight").value != "0") {
+    constraints.video.mandatory.minHeight = $("minheight").value;
+  }
+  if ($("maxheight").value != "0") {
+    constraints.video.mandatory.maxHeight = $("maxheight").value;
+  }
+  if ($("frameRate").value != "0") {
+    constraints.video.mandatory.minFrameRate = $("frameRate").value;
+  }
+  log('Camera constraints are ' + JSON.stringify(constraints));
+  $("cameraConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
+  return constraints;
+}
+
+function streamConstraints() {
+  var constraints = { mandatory: {}, optional: [] };
+  if ($("bandwidth").value != "0") {
+    constraints.optional[0] = { 'bandwidth' : $('bandwidth').value };
+  }
+  log('Constraints are ' + JSON.stringify(constraints));
+  $("addStreamConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
+  return constraints;
+}
+
+
+
+function connect() {
+  pc1 = new webkitRTCPeerConnection(null);
+  pc2 = new webkitRTCPeerConnection(null);
+  pc1.addStream(mystream, streamConstraints());
+  log('PC1 creating offer');
+  pc1.onnegotiationeeded = function() {
+    log('Negotiation needed - PC1');
+  }
+  pc2.onnegotiationeeded = function() {
+    log('Negotiation needed - PC2');
+  }
+  pc1.onicecandidate = function(e) {
+    log('Candidate PC1');
+    if (e.candidate) {
+      pc2.addIceCandidate(new RTCIceCandidate(e.candidate));
+    }
+  }
+  pc2.onicecandidate = function(e) {
+    log('Candidate PC2');
+    if (e.candidate) {
+      pc1.addIceCandidate(new RTCIceCandidate(e.candidate));
+    }
+  }
+  pc2.onaddstream = function(e) {
+    log('PC2 got stream');
+    $('remote-video').src = webkitURL.createObjectURL(e.stream);
+    log('Remote video is ' + $('remote-video').src);
+  }
+  pc1.createOffer(function(desc) {
+    log('PC1 offering');
+    pc1.setLocalDescription(desc);
+    pc2.setRemoteDescription(desc);
+    pc2.createAnswer(function(desc2) {
+      log('PC2 answering');
+      pc2.setLocalDescription(desc2);
+      pc1.setRemoteDescription(desc2);
+    });
+  });
+}
+
+// Display statistics
+var statCollector = setInterval(function() {
+  var display = function(str) {
+    $('bitrate').innerHTML = str;
+  }
+
+  display("No stream");
+  if (pc2 && pc2.remoteStreams[0]) {
+    if (pc2.getStats) {
+      display('No stats callback');
+      pc2.getStats(function(stats) {
+        log('Raw stats ' + stats);
+        var statsString = '';
+        var results = stats.result();
+        log('Raw results ' + results);
+        for (var i = 0; i < results.length; ++i) {
+          var res = results[i];
+          log(i + ': ' + JSON.stringify(res));
+          statsString += '<h3>Report ';
+          statsString += i;
+          statsString += '</h3>';
+          if (res.local) {
+            statsString += "<p>Local ";
+            statsString += dumpStats(res.local);
+          }
+          if (res.remote) {
+            statsString += "<p>Remote ";
+            statsString += dumpStats(res.remote);
+          }
+        }
+        $('stats').innerHTML = statsString;
+        display('No bitrate stats');
+      });
+    } else {
+      display('No stats function. Use at least Chrome 24.0.1285');
+    }
+  } else {
+    log('Not connected yet');
+  }
+  // Collect some stats from the video tags.
+  local_video = $('local-video');
+  if (local_video) {
+     $('local-video-stats').innerHTML = local_video.videoWidth +
+         'x' + local_video.videoHeight;
+  }
+  remote_video = $('remote-video');
+  if (remote_video) {
+     $('remote-video-stats').innerHTML = remote_video.videoWidth +
+         'x' + remote_video.videoHeight;
+  }
+}, 1000);
+
+// Dumping a stats variable as a string.
+// might be named toString?
+function dumpStats(obj) {
+  var statsString = 'Timestamp:';
+  statsString += obj.timestamp;
+  if (obj.names) {
+    log('Have names function');
+    names = obj.names();
+    for (var i = 0; i < names.length; ++i) {
+       statsString += '<br>';
+       statsString += names[i];
+       statsString += ':';
+       statsString += obj.stat(names[i]);
+    }
+  } else {
+    log('No names function');
+    if (obj.stat('audioOutputLevel')) {
+      statsString += "audioOutputLevel: ";
+      statsString += obj.stat('audioOutputLevel');
+      statsString += "<br>";
+    }
+  }
+  return statsString;
+}
+  
+
+// Utility to show the value of a field in a span called name+Display
+function showValue(name, value) {
+  $(name + 'Display').innerHTML = value;
+}
+</script>
+</head>
+<body>
+<h1>Constraints and Statistics</h1>
+This page is meant to give some hints on how one can use constraints and statistics in WebRTC applications.
+<p>
+The form to the left gives constraints you can set on the getUserMedia call.
+When you hit "open", it will (re)open the camera with these constraints.
+<p>
+The left picture is the local preview. The right picture is the picture
+after being passed through the PeerConnection (locally).
+<p>
+Underneath the picture you will see a running display of how many Kbits/sec
+the video feed uses for transmission.
+<hr>
+<table>
+<tr>
+<td align="top">
+<h2>getUserMedia constraints</h2>
+<table>
+<tr><td><td>Min<td>Max
+<tr><td>Horizontal
+<td><input type="range" id="minwidth" min="0" max="1280" value="300"
+  onchange="showValue(this.id, this.value)">
+<td><input type="range" id="maxwidth" min="0" max="1280" value="640"
+  onchange="showValue(this.id, this.value)">
+<td><span id="minwidthDisplay">300</span>-<span id="maxwidthDisplay">640</span>
+<tr><td>Vertical
+<td><input type="range" id="minheight" min="0" max="1280" value="200"
+  onchange="showValue(this.id, this.value)">
+<td><input type="range" id="maxheight" min="0" max="1280" value="480"
+  onchange="showValue(this.id, this.value)">
+<td><span id="minheightDisplay">200</span>-<span id="maxheightDisplay">480</span>
+<tr><td>
+FrameRate
+<td colspan=2><input type="range" id="frameRate" min="0" max="60" value="30"
+  onchange="showValue(this.id, this.value)">
+<td><span id="frameRateDisplay">30</span>
+</table>
+<input type="submit" name="capture" value="Capture!" onclick="openCamera()">
+</td>
+<td align="top">
+<h2>addStream constraints</h2>
+Maximum bitrate
+<input type="range" id="bandwidth" min="0" max="2000" value="1000"
+  onchange="showValue(this.id, this.value)">
+<span id="bandwidthDisplay">1000</span>
+<br>
+<input type="submit" name="connect" value="Connect!" onclick="connect()">
+</td>
+</tr>
+<tr>
+<td>
+<video id="local-video" autoplay width=400></video>
+</td>
+<td>
+<video id="remote-video" autoplay width=400></video>
+</td>
+<tr>
+<td><span id="local-video-stats"></span>
+<td><span id="remote-video-stats"></span>
+<br>
+<span id="bitrate">Bitrate unknown</span>
+</td>
+</tr>
+<tr>
+<td><pre><span id="cameraConstraints"></span></pre>
+<td><pre><span id="addStreamConstraints"></span></pre>
+</table>
+<h2>Statistics report display</h2>
+<div id="stats">Stats will appear here.</div>
+</body>
+</html>
diff --git a/samples/js/demos/html/dc1.html b/samples/js/demos/html/dc1.html
new file mode 100755
index 0000000..f327b79
--- /dev/null
+++ b/samples/js/demos/html/dc1.html
@@ -0,0 +1,176 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>Data Channel Demo 1</title>
+<style>
+button {
+  font: 18px sans-serif;
+  padding: 8px;
+}
+textarea {
+  font-family: monospace;
+  margin: 2px;
+  width: 480px;
+  height: 640px;
+}
+#left { position: absolute; left: 0; top: 0; width: 50%; }
+#right { position: absolute; right: 0; top: 0; width: 50%; }
+ 
+</style>
+</head>
+<body>
+<div id="left">
+<br>
+<h2>Send data</h2>
+<textarea id="dataChannelSend" rows="5" cols="15" disabled="true">
+</textarea><br>
+<button id="startButton" onclick="createConnection()">Start</button>
+<button id="sendButton" onclick="sendData()">Send Data</button>
+<button id="closeButton" onclick="closeDataChannels()">Stop Send Data
+</button>
+<br>
+</div>
+<div id="right">
+<br>
+<h2>Received Data</h2>
+<textarea id="dataChannelReceive" rows="5" cols="15" disabled="true">
+</textarea><br>
+</div>
+<script>
+var pc1, pc2, sendChannel, receiveChannel;
+startButton.disabled = false;
+sendButton.disabled = true;
+closeButton.disabled = true;
+
+function trace(text) {
+  // This function is used for logging.
+  if (text[text.length - 1] == '\n') {
+    text = text.substring(0, text.length - 1);
+  }
+  console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+ 
+function createConnection() {
+  var servers = null;
+  pc1 = new webkitRTCPeerConnection(servers,
+                                    {optional: [{RtpDataChannels: true}]});
+  trace('Created local peer connection object pc1');
+  
+  try {
+    // Reliable Data Channels not yet supported in Chrome
+    // Data Channel api supported from Chrome M25. 
+    // You need to start chrome with  --enable-data-channels flag.
+    sendChannel = pc1.createDataChannel("sendDataChannel", 
+                                         {reliable: false});
+    trace('Created send data channel');
+  } catch (e) {
+    alert('Failed to create data channel. ' +
+          'You need Chrome M25 or later with --enable-data-channels flag');
+    trace('Create Data channel failed with exception: ' + e.message);  
+  }
+  pc1.onicecandidate = iceCallback1; 
+  sendChannel.onopen = onSendChannelStateChange;
+  sendChannel.onclose = onSendChannelStateChange;
+  
+  pc2 = new webkitRTCPeerConnection(servers, 
+                                    {optional: [{RtpDataChannels: true}]});
+  trace('Created remote peer connection object pc2');
+  
+  pc2.onicecandidate = iceCallback2;
+  pc2.ondatachannel = receiveChannelCallback;
+  
+  pc1.createOffer(gotDescription1);
+  startButton.disabled = true;
+  closeButton.disabled = false;  
+}
+ 
+function sendData() {
+  var data = document.getElementById("dataChannelSend").value;
+  sendChannel.send(data);
+  trace('Sent Data: ' + data);
+}
+
+function closeDataChannels() {
+  trace('Closing data Channels');
+  sendChannel.close();
+  trace('Closed data channel with label: ' + sendChannel.label);
+  receiveChannel.close();
+  trace('Closed data channel with label: ' + receiveChannel.label);
+  pc1.close(); 
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  trace('Closed peer connections');
+  startButton.disabled = false;
+  sendButton.disabled = true;
+  closeButton.disabled = true;
+  document.getElementById("dataChannelSend").value = "";
+  document.getElementById("dataChannelReceive").value = "";
+  document.getElementById("dataChannelSend").disabled = true;
+}
+
+function gotDescription1(desc) {
+  pc1.setLocalDescription(desc);
+  trace('Offer from pc1 \n' + desc.sdp);
+  pc2.setRemoteDescription(desc);
+  pc2.createAnswer(gotDescription2);
+}
+
+function gotDescription2(desc) {
+  pc2.setLocalDescription(desc);
+  trace('Answer from pc2 \n' + desc.sdp);
+  pc1.setRemoteDescription(desc);
+}
+
+function iceCallback1(event) { 
+  trace('local ice callback');
+  if (event.candidate) {
+    pc2.addIceCandidate(event.candidate);
+    trace('Local ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+      
+function iceCallback2(event) {
+  trace('remote ice callback');
+  if (event.candidate) {
+    pc1.addIceCandidate(event.candidate);
+    trace('Remote ICE candidate: \n ' + event.candidate.candidate);
+  }
+}
+
+function receiveChannelCallback(event) {
+  trace('Receive Channel Callback');
+  receiveChannel = event.channel;
+  receiveChannel.onmessage = onReceiveMessageCallback;
+  receiveChannel.onopen = onReceiveChannelStateChange;
+  receiveChannel.onclose = onReceiveChannelStateChange;  
+}
+
+function onReceiveMessageCallback(event) {
+  trace('Received Message');
+  document.getElementById("dataChannelReceive").value = event.data;
+}
+
+function onSendChannelStateChange() {
+  var readyState = sendChannel.readyState;
+  trace('Send channel state is: ' + readyState);
+  if (readyState == "open") {
+    document.getElementById("dataChannelSend").disabled = false;
+    sendButton.disabled = false;
+    closeButton.disabled = false;
+  } else {
+    document.getElementById("dataChannelSend").disabled = true;
+    sendButton.disabled = true;
+    closeButton.disabled = true;
+  }
+}
+
+function onReceiveChannelStateChange() {
+  var readyState = receiveChannel.readyState;
+  trace('Receive channel state is: ' + readyState);
+}
+
+</script>
+</body>
+</html>
+
diff --git a/samples/js/demos/html/face.html b/samples/js/demos/html/face.html
new file mode 100644
index 0000000..7750e8c
--- /dev/null
+++ b/samples/js/demos/html/face.html
@@ -0,0 +1,151 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

+<html>

+<head>

+<script type="text/javascript" src="../js/ccv.js"></script>

+<script type="text/javascript" src="../js/face.js"></script>

+  <script src="/_ah/channel/jsapi"></script>

+  <style type="text/css">

+* { margin:0; padding:0; } /* to remove the top and left whitespace */

+html, body { width:100%; height:100%; } /* just to be sure these are full screen*/

+body {font-family: 'Helvetica';background-color: #000000; }

+a:link { color: #ffffff; } a:visited {color: #ffffff; }

+

+#localCanvas {

+    display: block;

+    position: absolute;  

+    width: 100%;

+    height: 100%;

+}

+

+#localVideo {

+    display: block;

+    position: absolute;

+    top: 0;

+    bottom: 0;

+    left: 0;

+    right: 0;

+    width: 100%;

+    height: 100%;

+    -webkit-transition-property: opacity;

+		-webkit-transition-duration: 2s;

+    opacity: 0;

+}

+#logo {

+    display: block;

+    top:4;

+    right:4;

+    position:absolute;

+    float:right;    

+    #opacity: 0.8;    

+}

+

+#credit {

+    display: block;

+    top:28;

+    right:4;

+    position:absolute;

+    float:right; 

+    font-size:10px;    

+}

+

+  </style>

+  <title>WebRTC Face Reco Demo Application</title>

+</head>

+<body>

+<script type="text/javascript">

+var localVideo;

+var localCanvas;

+//var worker = new Worker('ccv.js');

+

+initialize = function() {

+localVideo = document.getElementById("localVideo");

+localCanvas = document.getElementById("localCanvas");

+getUserMedia();

+}

+

+getUserMedia = function() {

+try { navigator.webkitGetUserMedia({video:true,audio:true}, onGotStream, onFailedStream);

+//trace("Requested access to local media");

+} catch (e) {

+alert("getUserMedia error " + e);

+//trace_e(e, "getUserMedia error");

+}

+}

+

+poll = function() {  

+  var w = localVideo.videoWidth;

+	var h = localVideo.videoHeight;

+	var canvas = document.createElement('canvas');

+	canvas.width  = w;

+	canvas.height = h;

+	var ctx = canvas.getContext('2d');

+	ctx.drawImage(localVideo, 0, 0, w, h);  

+  var comp = ccv.detect_objects({ "canvas" : ccv.grayscale(canvas),

+										"cascade" : cascade,

+										"interval" : 5,

+										"min_neighbors" : 1 });    

+		/* draw detected area */

+    

+    

+    //localCanvas.left = 400;

+//localCanvas.top = localVideo.top;

+/*localCanvas.right = localVideo.right;

+localCanvas.bottom = localVideo.bottom;*/

+localCanvas.width = localVideo.clientWidth;

+localCanvas.height = localVideo.clientHeight;

+    var ctx2 = localCanvas.getContext('2d');

+    ctx2.lineWidth = 2;

+    ctx2.lineJoin = "round";	  

+    ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);  

+    var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;

+    if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {

+      x_offset = (localVideo.clientWidth - localVideo.clientHeight * localVideo.videoWidth / localVideo.videoHeight) / 2;

+    } else {

+      y_offset = (localVideo.clientHeight - localVideo.clientWidth * localVideo.videoHeight / localVideo.videoWidth) / 2;

+    }

+    x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;

+    y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;

+		for (var i = 0; i < comp.length; i++) {

+      comp[i].x = comp[i].x * x_scale + x_offset;

+      comp[i].y = comp[i].y * y_scale + y_offset;   

+      comp[i].width = comp[i].width * x_scale;

+      comp[i].height = comp[i].height * y_scale;

+      var opacity = 0.1;

+      if (comp[i].confidence > 0) {

+        opacity += comp[i].confidence / 10;

+        if (opacity > 1.0) opacity = 1.0;

+      }

+      //ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";       

+      ctx2.lineWidth = opacity * 10;

+      ctx2.strokeStyle = "rgb(255,0,0)";

+			ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);                    

+      }

+   setTimeout(poll, 1000);

+   

+}

+

+

+onGotStream = function(stream) {

+var url = webkitURL.createObjectURL(stream);

+localVideo.style.opacity = 1; localVideo.src = url;

+localStream = stream;

+

+//trace("User has granted access to local media. url = " + url);

+setTimeout(poll, 2000);

+}

+

+onFailedStream = function(error) {

+alert("Failed to get access to local media. Error code was " + error.code + ".");

+//trace_warning("Failed to get access to local media. Error code was " + error.code);

+} 

+

+

+setTimeout(initialize, 1);

+</script>

+                      

+  <video id="localVideo" autoplay="autoplay"></video>

+  <canvas width="1000" height="1000" id="localCanvas"></canvas>      

+  <a href="http://www.webrtc.org"><img id="logo" alt="WebRTC" src="../images/webrtc_black_20p.png"></a>

+  <a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>

+</body>

+</html>

diff --git a/samples/js/demos/html/gum1.html b/samples/js/demos/html/gum1.html
new file mode 100644
index 0000000..d5e8020
--- /dev/null
+++ b/samples/js/demos/html/gum1.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>getUserMedia Demo 1</title>
+<style>
+video {
+    border:5px solid black;
+    width:480px;
+    height:360px;
+}
+button {
+    font: 18px sans-serif;
+    padding: 8px;
+}
+</style>
+</head>
+<body>
+<video id="vid" autoplay="true"></video>
+<br>
+<button id="btn" onclick="start()">Start</button>
+<script>
+video = document.getElementById("vid");
+function start() {
+  navigator.webkitGetUserMedia({video:true}, gotStream, function() {}); 
+  btn.disabled = true;
+}
+function gotStream(stream) {
+  video.src = webkitURL.createObjectURL(stream);
+}
+</script>
+</body>
+</html>
+
diff --git a/samples/js/demos/html/gum2.html b/samples/js/demos/html/gum2.html
new file mode 100644
index 0000000..02a40c9
--- /dev/null
+++ b/samples/js/demos/html/gum2.html
@@ -0,0 +1,48 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>getUserMedia Demo 2</title>
+<style>
+video {
+    border:5px solid black;
+    width:480px;
+    height:360px;
+}
+canvas {
+    border:5px solid black;
+    width:480px;
+    height:360px;     
+}
+button {
+  font: 18px sans-serif;
+  padding: 8px;
+}
+</style>
+</head>
+<body>
+<video id="vid" autoplay="true"></video>
+<canvas id="cvs"></canvas>
+<br>
+<button id="btn1" onclick="start()">Start</button>
+<button id="btn2" onclick="snap()">Snapshot</button>
+<script>
+video = document.getElementById("vid");
+canvas = document.getElementById("cvs");
+canvas.width = 480;
+canvas.height = 360;  
+btn2.disabled = true;
+function start() {
+  navigator.webkitGetUserMedia({video:true}, gotStream, function() {}); 
+  btn1.disabled = true;
+}
+function gotStream(stream) {
+  video.src = webkitURL.createObjectURL(stream);
+  btn2.disabled = false
+}
+function snap() {
+  canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
+}
+</script>
+</body>
+</html>
+
diff --git a/samples/js/demos/html/gum3.html b/samples/js/demos/html/gum3.html
new file mode 100644
index 0000000..5ab4f8b
--- /dev/null
+++ b/samples/js/demos/html/gum3.html
@@ -0,0 +1,74 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>getUserMedia Demo 3</title>
+<style>
+video {
+    border:5px solid black;
+    width:480px;
+    height:360px;
+}
+canvas {
+    border:5px solid black;
+    width:480px;
+    height:360px;
+}
+button {
+    font: 18px sans-serif;
+    padding: 8px;
+}  
+.grayscale {
+  -webkit-filter: grayscale(1);
+}
+.sepia {
+  -webkit-filter: sepia(1);
+}
+.invert {
+  -webkit-filter: invert(1);
+}
+.blur {
+  -webkit-filter: blur(3px);
+}
+</style>
+</head>
+<body>
+<video id="vid" autoplay="true"></video>
+<canvas id="cvs"></canvas>
+<br>
+<button id="btn1" onclick="start()">Start</button>
+<button id="btn2" onclick="change()">Change Filter</button>
+<button id="btn3" onclick="snap()">Snapshot</button>
+<script>
+filters = ["", "sepia", "invert", "blur", "grayscale"];
+findex = 0;
+video = document.getElementById("vid");
+canvas = document.getElementById("cvs");
+canvas.width = 480;
+canvas.height = 360;  
+btn2.disabled = true;
+btn3.disabled = true;
+function start() {
+  navigator.webkitGetUserMedia({video:true}, gotStream, function() {}); 
+  btn1.disabled = true;
+}
+function gotStream(stream) {
+  video.src = webkitURL.createObjectURL(stream);
+  btn2.disabled = false;
+  btn3.disabled = false;
+}
+function change() {
+  video.className = '';
+  findex = (findex + 1) % filters.length;
+  if (findex != 0)
+    video.classList.add(filters[findex]);
+}
+function snap() {
+  canvas.className = '';
+  if (findex != 0)
+    canvas.classList.add(filters[findex]);
+  canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
+}
+</script>
+</body>
+</html>
+
diff --git a/samples/js/demos/html/local-audio-rendering.html b/samples/js/demos/html/local-audio-rendering.html
new file mode 100644
index 0000000..6ffaa97
--- /dev/null
+++ b/samples/js/demos/html/local-audio-rendering.html
@@ -0,0 +1,91 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="utf-8">
+<title>Local Audio Rendering Demo</title>
+<script type="text/javascript" src="../../base/adapter.js"></script>
+<script>
+  var audioElement;
+  var buttonStart;
+  var buttonStop;
+  var localStream;
+  
+  $ = function(id) {
+    return document.getElementById(id);
+  };
+
+  function start() {
+    var constraints = {audio:true, video:false};
+    getUserMedia(constraints, gotStream, gotStreamFailed);
+    buttonStart.disabled = true;
+    buttonStop.disabled = false;
+  }
+  
+  function stop() {
+    buttonStart.enabled = true;
+    buttonStop.enabled = false;
+    localStream.stop();
+  }
+  
+  function gotStream(stream) {
+    videoTracks = stream.getVideoTracks();
+    audioTracks = stream.getAudioTracks();
+    if (audioTracks.length == 1 && videoTracks.length == 0) {
+      console.log('gotStream({audio:true, video:false})');
+      console.log('Using audio device: ' + audioTracks[0].label);
+      attachMediaStream(audioElement, stream);
+      
+      // The audio will be muted by default from start.
+      // Unmute and set volume to max level so we can listen to audio in
+      // loopback. We restore the volume in a 'play' event to ensure that
+      // loading has been done (auto-mute is performed during load).
+      audioElement.addEventListener('play', function() {
+        audioElement.muted = false;
+        audioElement.volume = 1;
+        console.log('Unmuting and setting volume to max level');
+      }, false);
+      
+      stream.onended = function() {
+        console.log('stream.onended');
+        buttonStart.disabled = false;
+        buttonStop.disabled = true;
+      };
+      
+      localStream = stream;
+    } else {
+      alert('The media stream contains an invalid amount of audio tracks.');
+      stream.stop();
+    }
+  }
+  
+  function gotStreamFailed(error) {
+    buttonStart.disabled = false;
+    buttonStop.disabled = true;
+    alert('Failed to get access to local media. Error code: ' + error.code);
+  }
+  
+  function onload() {
+    audioElement = $('audio');
+    buttonStart = $('start');
+    buttonStop = $('stop');
+    buttonStart.enabled = true;
+    buttonStop.disabled = true;
+  } 
+</script>
+</head>
+
+<body onload="onload()">
+  <h2>Rendering of a local media stream using &lt;audio&gt;</h2>
+  <p>Demonstrates usage of a local media stream connected to an HTML5 audio tag.<br>
+     Press Start, select a microphone and listen to your own voice in loopback.</p>
+  <style>
+    button {
+      font: 14px sans-serif;
+      padding: 8px;
+    }
+  </style>
+  <audio id="audio" autoplay="autoplay" controls="controls"></audio><br><br>
+  <button id="start" onclick="start()">Start</button>
+  <button id="stop" onclick="stop()">Stop</button>
+</body>
+</html>
diff --git a/samples/js/demos/html/multiple.html b/samples/js/demos/html/multiple.html
new file mode 100644
index 0000000..81d768e
--- /dev/null
+++ b/samples/js/demos/html/multiple.html
@@ -0,0 +1,187 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>PeerConnection Demo 1</title>
+<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
+<script src="../../base/adapter.js"></script>
+<style>
+video {
+  border:5px solid black;
+  width:480px;
+  height:360px;
+}
+button {
+  font: 18px sans-serif;
+  padding: 8px;
+}
+textarea {
+  font-family: monospace;
+  margin: 2px;
+  width:480px;
+  height:640px;
+}
+</style>
+</head>
+<body>
+<video id="vid1" autoplay></video>
+<video id="vid2" autoplay></video>
+<video id="vid3" autoplay></video>
+<br>
+<button id="btn1" onclick="start()">Start</button>
+<button id="btn2" onclick="call()">Call</button>
+<button id="btn3" onclick="hangup()">Hang Up</button>
+<br>
+<script>
+//var vid1 = document.getElementById("vid1");
+//var vid2 = document.getElementById("vid2");
+btn1.disabled = false;
+btn2.disabled = true;
+btn3.disabled = true;
+var pc1_local, pc1_remote;
+var pc2_local, pc2_remote;
+var localstream;
+var sdpConstraints = {'mandatory': {
+                        'OfferToReceiveAudio':true, 
+                        'OfferToReceiveVideo':true }};
+
+function trace(text) {
+  // This function is used for logging.
+  if (text[text.length - 1] == '\n') {
+    text = text.substring(0, text.length - 1);
+  }
+  console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+
+function gotStream(stream){
+  trace("Received local stream");
+  // Call the polyfill wrapper to attach the media stream to this element.
+  attachMediaStream(vid1, stream);
+  localstream = stream;
+  btn2.disabled = false;
+}
+
+function start() {
+  trace("Requesting local stream");
+  btn1.disabled = true;
+  // Call into getUserMedia via the polyfill (adapter.js).
+  getUserMedia({audio:true, video:true},
+                gotStream, function() {});
+}  
+  
+function call() {
+  btn2.disabled = true;
+  btn3.disabled = false;
+  trace("Starting calls");
+  videoTracks = localstream.getVideoTracks();
+  audioTracks = localstream.getAudioTracks();
+  if (videoTracks.length > 0)
+    trace("Using Video device: " + videoTracks[0].label);  
+  if (audioTracks.length > 0)
+    trace("Using Audio device: " + audioTracks[0].label);
+
+  // Create an RTCPeerConnection via the polyfill (adapter.js).
+  var servers = null;
+  pc1_local = new RTCPeerConnection(servers);
+  pc1_remote = new RTCPeerConnection(servers);
+  pc1_remote.onaddstream = gotRemoteStream1;
+  pc1_local.onicecandidate = iceCallback1Local;
+  pc1_remote.onicecandidate = iceCallback1Remote;
+  trace("PC1: created local and remote peer connection objects"); 
+  
+  pc2_local = new RTCPeerConnection(servers);
+  pc2_remote = new RTCPeerConnection(servers);
+  pc2_remote.onaddstream = gotRemoteStream2;
+  pc2_local.onicecandidate = iceCallback2Local;
+  pc2_remote.onicecandidate = iceCallback2Remote;
+  trace("PC2: created local and remote peer connection objects");
+  
+  pc1_local.addStream(localstream);
+  trace("Adding local stream to pc1_local");
+  pc1_local.createOffer(gotDescription1Local);
+
+  pc2_local.addStream(localstream);
+  trace("Adding local stream to pc2_local");
+  pc2_local.createOffer(gotDescription2Local);
+}
+
+function gotDescription1Local(desc) {
+  pc1_local.setLocalDescription(desc);
+  trace("Offer from pc1_local \n" + desc.sdp);
+  pc1_remote.setRemoteDescription(desc);
+  // Since the "remote" side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc1_remote.createAnswer(gotDescription1Remote, null, sdpConstraints);
+}
+
+function gotDescription1Remote(desc) {
+  pc1_remote.setLocalDescription(desc);
+  trace("Answer from pc1_remote \n" + desc.sdp);
+  pc1_local.setRemoteDescription(desc);
+}
+
+function gotDescription2Local(desc) {
+  pc2_local.setLocalDescription(desc);
+  trace("Offer from pc2_local \n" + desc.sdp);
+  pc2_remote.setRemoteDescription(desc);
+  // Since the "remote" side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc2_remote.createAnswer(gotDescription2Remote, null, sdpConstraints);
+}
+
+function gotDescription2Remote(desc) {
+  pc2_remote.setLocalDescription(desc);
+  trace("Answer from pc2_remote \n" + desc.sdp);
+  pc2_local.setRemoteDescription(desc);
+}
+
+function hangup() {
+  trace("Ending calls");
+  pc1_local.close(); 
+  pc1_remote.close();
+  pc2_local.close();
+  pc2_remote.close();
+  pc1_local = pc1_remote = null;
+  pc2_local = pc2_remote = null;
+  btn3.disabled = true;
+  btn2.disabled = false;
+}
+
+function gotRemoteStream1(e) {
+  vid2.src = webkitURL.createObjectURL(e.stream);
+  trace("PC1: Received remote stream");
+}
+
+function gotRemoteStream2(e) {
+  vid3.src = webkitURL.createObjectURL(e.stream);
+  trace("PC2: Received remote stream");
+}
+
+function iceCallback1Local(event) {
+  handleCandidate(event.candidate, pc1_remote, "PC1: ", "local");
+}
+      
+function iceCallback1Remote(event) {
+  handleCandidate(event.candidate, pc1_local, "PC1: ", "remote");
+}
+
+function iceCallback2Local(event) {
+  handleCandidate(event.candidate, pc2_remote, "PC2: ", "local");
+}
+      
+function iceCallback2Remote(event) {
+  handleCandidate(event.candidate, pc2_local, "PC2: ", "remote");
+}
+
+function handleCandidate(candidate, dest, prefix, type) {
+  if (candidate) {
+    dest.addIceCandidate(new RTCIceCandidate(candidate));
+    trace(prefix + "New " + type + " ICE candidate: " + candidate.candidate);
+  }
+}
+</script>
+</body>
+</html>
+
+
diff --git a/samples/js/demos/html/pc1-deprecated.html b/samples/js/demos/html/pc1-deprecated.html
new file mode 100644
index 0000000..f12987d
--- /dev/null
+++ b/samples/js/demos/html/pc1-deprecated.html
@@ -0,0 +1,133 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>PeerConnection Demo 1</title>
+<style>
+video {
+  border:5px solid black;
+  width:480px;
+  height:360px;
+}
+button {
+  font: 18px sans-serif;
+  padding: 8px;
+}
+textarea {
+  font-family: monospace;
+  margin: 2px;
+  width:480px;
+  height:640px;
+}
+</style>
+</head>
+<body>
+<video id="vid1" autoplay></video>
+<video id="vid2" autoplay></video>
+<br>
+<button id="btn1" onclick="start()">Start</button>
+<button id="btn2" onclick="call()">Call</button>
+<button id="btn3" onclick="hangup()">Hang Up</button>
+<br>
+<xtextarea id="ta1"></textarea>
+<xtextarea id="ta2"></textarea>
+<script>
+//var vid1 = document.getElementById("vid1");
+//var vid2 = document.getElementById("vid2");
+btn1.disabled = false;
+btn2.disabled = true;
+btn3.disabled = true;
+var pc1,pc2;
+var localstream;
+
+function trace(text) {
+  // This function is used for logging.
+  if (text[text.length - 1] == '\n') {
+    text = text.substring(0, text.length - 1);
+  }
+  console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+
+function gotStream(stream){
+  trace("Received local stream");
+  vid1.src = webkitURL.createObjectURL(stream);
+  localstream = stream;
+  btn2.disabled = false;
+}
+
+function start() {
+  trace("Requesting local stream");
+  btn1.disabled = true;
+  navigator.webkitGetUserMedia({audio:true, video:true},
+                               gotStream, function() {});
+}  
+  
+function call() {
+  btn2.disabled = true;
+  btn3.disabled = false;
+  trace("Starting call");
+  if (localstream.videoTracks.length > 0)
+    trace('Using Video device: ' + localstream.videoTracks[0].label);  
+  if (localstream.audioTracks.length > 0)
+    trace('Using Audio device: ' + localstream.audioTracks[0].label);
+
+  pc1 = new webkitPeerConnection00(null, iceCallback1);
+  trace("Created local peer connection object pc1"); 
+  pc2 = new webkitPeerConnection00(null, iceCallback2);
+  trace("Created remote peer connection object pc2");
+  pc2.onaddstream = gotRemoteStream; 
+
+  pc1.addStream(localstream);
+  trace("Adding Local Stream to peer connection");
+  var offer = pc1.createOffer(null);
+  trace("Created offer:\n" + offer.toSdp());
+  pc1.setLocalDescription(pc1.SDP_OFFER, offer);
+  trace("SetLocalDesc1");
+  pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
+  trace("SetRemoteDesc2");
+  //ta1.value = offer.toSdp();
+  var answer = pc2.createAnswer(offer.toSdp(),
+                                {has_audio:true, has_video:true});
+  trace("Created answer:\n" + answer.toSdp());
+  pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
+  trace("SetLocalDesc2");
+  pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
+  trace("SetRemoteDesc1");
+  //ta2.value = answer.toSdp();
+  pc1.startIce();
+  pc2.startIce();
+  trace("Started ICE for both local & remote");
+}
+
+function hangup() {
+  trace("Ending call");
+  pc1.close(); 
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  btn3.disabled = true;
+  btn2.disabled = false;
+}
+
+function gotRemoteStream(e){
+  vid2.src = webkitURL.createObjectURL(e.stream);
+  trace("Received remote stream");
+}
+
+function iceCallback1(candidate,bMore){
+  if (candidate) {
+    pc2.processIceMessage(candidate);
+    trace("Local ICE candidate: " + candidate.toSdp());
+  }
+}
+      
+function iceCallback2(candidate,bMore){
+  if (candidate) {
+    pc1.processIceMessage(candidate);
+    trace("Remote ICE candidate: " + candidate.toSdp());
+  }
+}
+</script>
+</body>
+</html>
+
+
diff --git a/samples/js/demos/html/pc1.html b/samples/js/demos/html/pc1.html
new file mode 100644
index 0000000..0a3e81e
--- /dev/null
+++ b/samples/js/demos/html/pc1.html
@@ -0,0 +1,143 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>PeerConnection Demo 1</title>
+<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
+<script src="../../base/adapter.js"></script>
+<style>
+video {
+  border:5px solid black;
+  width:480px;
+  height:360px;
+}
+button {
+  font: 18px sans-serif;
+  padding: 8px;
+}
+textarea {
+  font-family: monospace;
+  margin: 2px;
+  width:480px;
+  height:640px;
+}
+</style>
+</head>
+<body>
+<video id="vid1" autoplay></video>
+<video id="vid2" autoplay></video>
+<br>
+<button id="btn1" onclick="start()">Start</button>
+<button id="btn2" onclick="call()">Call</button>
+<button id="btn3" onclick="hangup()">Hang Up</button>
+<br>
+<xtextarea id="ta1"></textarea>
+<xtextarea id="ta2"></textarea>
+<script>
+//var vid1 = document.getElementById("vid1");
+//var vid2 = document.getElementById("vid2");
+btn1.disabled = false;
+btn2.disabled = true;
+btn3.disabled = true;
+var pc1,pc2;
+var localstream;
+var sdpConstraints = {'mandatory': {
+                        'OfferToReceiveAudio':true, 
+                        'OfferToReceiveVideo':true }};
+function trace(text) {
+  // This function is used for logging.
+  if (text[text.length - 1] == '\n') {
+    text = text.substring(0, text.length - 1);
+  }
+  console.log((performance.now() / 1000).toFixed(3) + ": " + text);
+}
+
+function gotStream(stream){
+  trace("Received local stream");
+  // Call the polyfill wrapper to attach the media stream to this element.
+  attachMediaStream(vid1, stream);
+  localstream = stream;
+  btn2.disabled = false;
+}
+
+function start() {
+  trace("Requesting local stream");
+  btn1.disabled = true;
+  // Call into getUserMedia via the polyfill (adapter.js).
+  getUserMedia({audio:true, video:true},
+                gotStream, function() {});
+}  
+  
+function call() {
+  btn2.disabled = true;
+  btn3.disabled = false;
+  trace("Starting call");
+  videoTracks = localstream.getVideoTracks();
+  audioTracks = localstream.getAudioTracks();
+  if (videoTracks.length > 0)
+    trace('Using Video device: ' + videoTracks[0].label);  
+  if (audioTracks.length > 0)
+    trace('Using Audio device: ' + audioTracks[0].label);
+  var servers = null;
+  pc1 = new RTCPeerConnection(servers);
+  trace("Created local peer connection object pc1");
+  pc1.onicecandidate = iceCallback1; 
+  pc2 = new RTCPeerConnection(servers);
+  trace("Created remote peer connection object pc2");
+  pc2.onicecandidate = iceCallback2;
+  pc2.onaddstream = gotRemoteStream; 
+
+  pc1.addStream(localstream);
+  trace("Adding Local Stream to peer connection");
+  
+  pc1.createOffer(gotDescription1);
+}
+
+function gotDescription1(desc){
+  pc1.setLocalDescription(desc);
+  trace("Offer from pc1 \n" + desc.sdp);
+  pc2.setRemoteDescription(desc);
+  // Since the "remote" side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc2.createAnswer(gotDescription2, null, sdpConstraints);
+}
+
+function gotDescription2(desc){
+  pc2.setLocalDescription(desc);
+  trace("Answer from pc2 \n" + desc.sdp);
+  pc1.setRemoteDescription(desc);
+}
+
+function hangup() {
+  trace("Ending call");
+  pc1.close(); 
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  btn3.disabled = true;
+  btn2.disabled = false;
+}
+
+function gotRemoteStream(e){
+  vid2.src = webkitURL.createObjectURL(e.stream);
+  trace("Received remote stream");
+}
+
+function iceCallback1(event){
+  if (event.candidate) {
+    pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
+    trace("Local ICE candidate: \n" + event.candidate.candidate);
+  }
+}
+      
+function iceCallback2(event){
+  if (event.candidate) {
+    pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
+    trace("Remote ICE candidate: \n " + event.candidate.candidate);
+  }
+}
+</script>
+</body>
+</html>
+
+
diff --git a/samples/js/demos/html/pranswer.html b/samples/js/demos/html/pranswer.html
new file mode 100644
index 0000000..eb807a2
--- /dev/null
+++ b/samples/js/demos/html/pranswer.html
@@ -0,0 +1,125 @@
+<!DOCTYPE html>
+<html>
+<head>
+  <title>PeerConnection PRANSWER Demo</title>
+  <style>
+    video {
+      border:5px solid black;
+      width:320px;
+      height:240px;
+    }
+  </style>
+  </head>
+  <body>
+    <video id="vid1" autoplay></video>
+    <video id="vid2" autoplay></video>
+    <br>
+    <button id="btn1" onclick="start()">Call</button>
+    <button id="btn15" onclick="accept()">Accept</button>
+    <button id="btn2" onclick="stop()">Hang Up</button>
+    <script>
+//var vid1 = document.getElementById("vid1");
+//var vid2 = document.getElementById("vid2");
+btn1.disabled = true;
+btn2.disabled = true;
+var pc1,pc2;
+var localstream;
+
+function trace(txt) {
+  // This function is used for logging.
+  console.log(txt);
+}
+      
+function traceCandidate(kind, cand) {
+  trace("Candidate(" + kind + "): " + cand.label + ": " + 
+        cand.toSdp().replace("\n", ""));
+}
+
+function gotStream(stream){
+  trace("Received local stream");
+  vid1.src = webkitURL.createObjectURL(stream);
+  localstream = stream;
+  btn1.disabled = false;
+}
+
+navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {});
+
+function start() {
+  btn1.disabled = true;
+  btn2.disabled = false;
+  trace("Starting Call");
+  if (localstream.videoTracks.length > 0)
+    trace('Using Video device: ' + localstream.videoTracks[0].label);  // Prints audio & video device names
+  if (localstream.audioTracks.length > 0)
+    trace('Using Audio device: ' + localstream.audioTracks[0].label);
+
+  pc1 = new webkitPeerConnection00(null,iceCallback1);
+  trace("Created local peer connection object pc1"); 
+  pc2 = new webkitPeerConnection00(null,iceCallback2);
+  trace("Created remote peer connection object pc2");
+  pc2.onaddstream = gotRemoteStream; 
+
+  pc1.addStream(localstream);
+  trace("Adding Local Stream to peer connection");
+  var offer = pc1.createOffer(null);
+  trace("Created offer:\n" + offer.toSdp());
+  pc1.setLocalDescription(pc1.SDP_OFFER, offer);
+  trace("SetLocalDesc1");
+  pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
+  trace("SetRemoteDesc2");
+  var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});
+  var sdp = answer.toSdp();
+  sdp = sdp.replace(/a=sendrecv/g, "a=inactive");
+  answer = new SessionDescription(sdp);
+  trace("Created answer:\n" + answer.toSdp());
+  pc2.setLocalDescription(pc2.SDP_PRANSWER, answer);
+  trace("SetLocalDesc2");
+  pc1.setRemoteDescription(pc1.SDP_PRANSWER, answer);
+  trace("SetRemoteDesc1");
+  pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
+  pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
+  trace("Start ICE for both local & remote");
+}
+
+function accept() {
+  var sdp = pc1.remoteDescription.toSdp();
+  sdp = sdp.replace(/a=inactive/g, "a=sendrecv");
+  var answer = new SessionDescription(sdp);
+  pc2.setLocalDescription(pc1.SDP_ANSWER, answer);
+  pc1.setRemoteDescription(pc2.SDP_ANSWER, answer);
+  trace("Set final answer:" + sdp);
+}
+
+function stop() {
+  trace("Ending Call" + "\n\n");
+  pc1.close(); 
+  pc2.close();
+  pc1=null;
+  pc2=null;
+  btn2.disabled = true;
+  btn1.disabled = false;
+}
+
+function gotRemoteStream(e){
+  vid2.src = webkitURL.createObjectURL(e.stream);
+  trace("Received Remote Stream");
+}
+
+function iceCallback1(candidate,bMore){
+  if (candidate) {
+    pc2.processIceMessage(candidate);
+    traceCandidate("local", candidate);
+  }
+}
+      
+function iceCallback2(candidate,bMore){
+  if (candidate) {
+    pc1.processIceMessage(candidate);
+    traceCandidate("remote", candidate);
+  }
+} 
+    </script>
+  </body>
+</html>
+
+
diff --git a/samples/js/demos/html/rehydrate.html b/samples/js/demos/html/rehydrate.html
new file mode 100644
index 0000000..61206f9
--- /dev/null
+++ b/samples/js/demos/html/rehydrate.html
@@ -0,0 +1,142 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>PeerConnection Rehydration Demo</title>
+<style>
+video {
+    border:5px solid black;
+    width:320px;
+    height:240px;
+}
+</style>
+</head>
+<body>
+<video id="vid1" autoplay></video>
+<video id="vid2" autoplay></video>
+<br>
+<button id="btn1" onclick="start()">Start</button>  
+<button id="btn2" onclick="call()">Call</button>
+<button id="btn3" onclick="rehydrate()">Rehydrate</button>
+<button id="btn4" onclick="stop()">Hang Up</button>
+<script>
+//var vid1 = document.getElementById("vid1");
+//var vid2 = document.getElementById("vid2");
+btn2.disabled = true;
+btn3.disabled = true;
+btn4.disabled = true;
+var pc1,pc2;
+var localstream;
+
+function trace(txt) {
+  // This function is used for logging.
+  console.log(txt);
+}
+
+function start() {
+  btn1.disabled = true;
+  navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {}); 
+}  
+  
+function gotStream(stream){
+  trace("Received local stream");
+  vid1.src = webkitURL.createObjectURL(stream);
+  localstream = stream;
+  btn2.disabled = false;
+}
+
+function call() {
+  btn2.disabled = true;
+  btn3.disabled = false;
+  btn4.disabled = false;
+  trace("Starting Call");
+  if (localstream.videoTracks.length > 0)
+    trace('Using Video device: ' + localstream.videoTracks[0].label);  // Prints audio & video device names
+  if (localstream.audioTracks.length > 0)
+    trace('Using Audio device: ' + localstream.audioTracks[0].label);
+
+  pc1 = new webkitPeerConnection00(null,iceCallback1);
+  trace("Created local peer connection object pc1"); 
+  pc2 = new webkitPeerConnection00(null,iceCallback2);
+  trace("Created remote peer connection object pc2");
+  pc2.onaddstream = gotRemoteStream; 
+
+  pc1.addStream(localstream);
+  trace("Adding Local Stream to peer connection");
+  var offer = pc1.createOffer(null);
+  trace("Created offer");
+  pc1.setLocalDescription(pc1.SDP_OFFER, offer);
+  trace("SetLocalDesc1");
+  pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
+  trace("SetRemoteDesc2");
+  var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});;
+  trace("CreatedAnswer");
+  pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
+  trace("SetLocalDesc2");
+  pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
+  trace("SetRemoteDesc1");
+  pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
+  pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
+  trace("Start ICE for both local & remote");
+}
+
+function rehydrate() {
+  var oldLocal = pc2.localDescription;
+  // need to munge a=crypto
+  pc2 = null;
+  trace("Destroyed remote peer connection object pc2");
+  pc2 = new webkitPeerConnection00(null, iceCallback3);
+  trace("Created new remote peer connection object pc2");
+  pc2.onaddstream = gotRemoteStream;
+  pc2.setLocalDescription(pc2.SDP_OFFER, oldLocal);
+  pc1.setRemoteDescription(pc1.SDP_OFFER, oldLocal);
+  var answer = pc1.createAnswer(oldLocal.toSdp(), {has_audio:true, has_video:true});
+  pc1.setLocalDescription(pc1.SDP_ANSWER, answer);
+  pc2.setRemoteDescription(pc2.SDP_ANSWER, answer);
+  pc2.startIce();
+  trace("Inited new remote peer connection object pc2");
+}
+
+function stop() {
+  trace("Ending Call" + "\n\n");
+  pc1.close(); 
+  pc2.close();
+  pc1=null;
+  pc2=null;
+  btn2.disabled = false;
+  btn3.disabled = true;
+  btn4.disabled = true;
+}
+
+function gotRemoteStream(e){
+  vid2.src = webkitURL.createObjectURL(e.stream);
+  trace("Received Remote Stream");
+}
+
+function iceCallback1(candidate,bMore){
+  if (candidate) {
+    pc2.processIceMessage(candidate);
+    trace("Local ice candidate: " + candidate.toSdp());
+  }
+}
+      
+function iceCallback2(candidate,bMore){
+  if (candidate) {
+    pc1.processIceMessage(candidate);
+    trace("Remote ice candidate: " + candidate.toSdp());
+  }
+}
+      
+function iceCallback3(candidate,bMore){
+  if (candidate) {
+    var str = candidate.toSdp();
+    str = str.replace("generation 0", "generation 1");
+    var mungedCandidate = new IceCandidate(candidate.label, str);
+    trace("Remote ice candidate: " + mungedCandidate.toSdp());
+    pc1.processIceMessage(mungedCandidate);
+  }
+}
+    </script>
+  </body>
+</html>
+
+