blob: 173a30845cc82415466713561728272369b86ee0 [file] [log] [blame]
henrika@webrtc.org7a5615b2013-05-13 09:29:13 +00001<!DOCTYPE html>
2<html>
3<head>
4<meta charset="utf-8">
5<title>Audio effects with WebAudio in WebRTC</title>
6<script type="text/javascript" src="../../base/adapter.js"></script>
7<script>
8 var audioElement;
9 var buttonStart;
10 var buttonStop;
11 var localStream;
12 var pc1, pc2;
13 var display;
14
15 var webAudio;
16
17 // WebAudio helper class which takes care of the WebAudio related parts.
18
19 function WebAudio() {
20 this.context = new webkitAudioContext();
21 this.soundBuffer = null;
22 }
23
24 WebAudio.prototype.start = function() {
25 this.filter = this.context.createBiquadFilter();
26 this.filter.type = this.filter.HIGHPASS;
27 this.filter.frequency.value = 1500;
28 }
29
30 WebAudio.prototype.applyFilter = function(stream) {
31 this.mic = this.context.createMediaStreamSource(stream);
32 this.mic.connect(this.filter);
33 this.peer = this.context.createMediaStreamDestination();
34 this.filter.connect(this.peer);
35 return this.peer.stream;
36 }
37
38 WebAudio.prototype.renderLocally = function(enabled) {
39 if (enabled) {
40 this.mic.connect(this.context.destination);
41 } else {
42 this.mic.disconnect(0);
43 this.mic.connect(this.filter);
44 }
45 }
46
47 WebAudio.prototype.stop = function() {
48 this.mic.disconnect(0);
49 this.filter.disconnect(0);
50 mic = null;
51 peer = null;
52 }
53
54 WebAudio.prototype.addEffect = function() {
55 var effect = this.context.createBufferSource();
56 effect.buffer = this.soundBuffer;
57 if (this.peer) {
58 effect.connect(this.peer);
59 effect.start(0);
60 }
61 }
62
63 WebAudio.prototype.loadCompleted = function() {
64 this.soundBuffer = this.context.createBuffer(this.request.response, true);
65 }
66
67 WebAudio.prototype.loadSound = function(url) {
68 this.request = new XMLHttpRequest();
69 this.request.open('GET', url, true);
70 this.request.responseType = 'arraybuffer';
71 this.request.onload = this.loadCompleted.bind(this);
72 this.request.send();
73 }
74
75 // Global methods.
76
77 function trace(txt) {
78 display.innerHTML += txt + "<br>";
79 }
80
81 function logEvent(e) {
82 console.log(e.type + ':' + e.target + ':' + e.target.id + ':muted=' +
83 e.target.muted);
84 }
85
86 $ = function(id) {
87 return document.getElementById(id);
88 };
89
90 function start() {
91 webAudio.start();
92 var constraints = {audio:true, video:false};
93 getUserMedia(constraints, gotStream, gotStreamFailed);
94 buttonStart.disabled = true;
95 buttonStop.disabled = false;
96 }
97
98 function stop() {
99 webAudio.stop();
100 pc1.close();
101 pc2.close();
102 pc1 = null;
103 pc2 = null;
104 buttonStart.enabled = true;
105 buttonStop.enabled = false;
106 localStream.stop();
107 }
108
109 function gotStream(stream) {
110 audioTracks = stream.getAudioTracks();
111 if (audioTracks.length == 1) {
112 console.log('gotStream({audio:true, video:false})');
113
114 var filteredStream = webAudio.applyFilter(stream);
115
116 var servers = null;
117 pc1 = new webkitRTCPeerConnection(servers);
118 console.log('Created local peer connection object pc1');
119 pc1.onicecandidate = iceCallback1;
120 pc2 = new webkitRTCPeerConnection(servers);
121 console.log('Created remote peer connection object pc2');
122 pc2.onicecandidate = iceCallback2;
123 pc2.onaddstream = gotRemoteStream;
124
125 pc1.addStream(filteredStream);
126 pc1.createOffer(gotDescription1);
127
128 stream.onended = function() {
129 console.log('stream.onended');
130 buttonStart.disabled = false;
131 buttonStop.disabled = true;
132 };
133
134 localStream = stream;
135 } else {
136 alert('The media stream contains an invalid amount of audio tracks.');
137 stream.stop();
138 }
139 }
140
141 function gotStreamFailed(error) {
142 buttonStart.disabled = false;
143 buttonStop.disabled = true;
144 alert('Failed to get access to local media. Error code: ' + error.code);
145 }
146
147 function forceOpus(sdp) {
148 // Remove all other codecs (not the video codecs though).
149 sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
150 'm=audio $1 RTP/SAVPF 111\r\n');
151 sdp = sdp.replace(/a=rtpmap:(?!111)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
152 return sdp;
153}
154
155 function gotDescription1(desc){
156 console.log('Offer from pc1 \n' + desc.sdp);
157 var modifiedOffer = new RTCSessionDescription({type: 'offer',
158 sdp: forceOpus(desc.sdp)});
159 pc1.setLocalDescription(modifiedOffer);
160 console.log('Offer from pc1 \n' + modifiedOffer.sdp);
161 pc2.setRemoteDescription(modifiedOffer);
162 pc2.createAnswer(gotDescription2);
163 }
164
165 function gotDescription2(desc){
166 pc2.setLocalDescription(desc);
167 console.log('Answer from pc2 \n' + desc.sdp);
168 pc1.setRemoteDescription(desc);
169 }
170
171 function gotRemoteStream(e){
172 attachMediaStream(audioElement, e.stream);
173 }
174
175 function iceCallback1(event){
176 if (event.candidate) {
177 pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
178 console.log('Local ICE candidate: \n' + event.candidate.candidate);
179 }
180 }
181
182 function iceCallback2(event){
183 if (event.candidate) {
184 pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
185 console.log('Remote ICE candidate: \n ' + event.candidate.candidate);
186 }
187 }
188
189 function handleKeyDown(event) {
190 var keyCode = event.keyCode;
191 webAudio.addEffect();
192 }
193
194 function doMix(checkbox) {
195 webAudio.renderLocally(checkbox.checked);
196 }
197
198 function onload() {
199 webAudio = new WebAudio();
200 webAudio.loadSound('../sounds/Shamisen-C4.wav');
201
202 audioElement = $('audio');
203 buttonStart = $('start');
204 buttonStop = $('stop');
205 display = $('display');
206
207 document.addEventListener('keydown', handleKeyDown, false);
208
209 buttonStart.enabled = true;
210 buttonStop.disabled = true;
211 }
212</script>
213</head>
214
215<body onload='onload()'>
216 <h2>Capture microphone input and stream it out to a peer with a processing
217 effect applied to the audio.</h2>
218 <p>The audio stream is: <br><br>
219 o Recorded using <a href="http://www.html5audio.org/2012/09/live-audio-input-comes-to-googles-chrome-canary.html"
220 title="Live audio input comes to Google's Chrome Canary">live-audio
221 input.</a><br>
222 o Filtered using an HP filter with fc=1500 Hz.<br>
223 o Encoded using <a href="http://www.opus-codec.org/" title="Opus Codec">
224 Opus.</a><br>
225 o Transmitted (in loopback) to remote peer using
226 <a href="http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcpeerconnection-interface"
227 title="RTCPeerConnection Interface">RTCPeerConnection</a> where it is decoded.<br>
228 o Finally, the received remote stream is used as source to an &lt;audio&gt;
229 tag and played out locally.<br>
230 <br>Press any key to add an effect to the transmitted audio while talking.
231 </p>
232 <p>Please note that: <br><br>
233 o Linux is currently not supported.<br>
234 o Sample rate and channel configuration must be the same for input and
235 output sides on Windows.<br>
236 o Only the Default microphone device can be used for capturing.
237 </p>
238 <p>For more information, see <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/webrtc-integration.html"
239 title="Example 3: Capture microphone input and stream it out to a peer with a processing effect applied to the audio">
240 WebRTC integration with the Web Audio API.</a>
241 </p>
242 <style>
243 button {
244 font: 14px sans-serif;
245 padding: 8px;
246 }
247 </style>
248 <audio id="audio" autoplay controls></audio><br><br>
249 <button id="start" onclick="start()">Start</button>
250 <button id="stop" onclick="stop()">Stop</button><br><br>
251 Add local audio to output:<input id="mix" type="checkbox" onclick="doMix(this);"><br><br>
252 <pre id="display"></pre>
253</body>
254</html>