Permalink
Cannot retrieve contributors at this time
Name already in use
A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
webRTCTests/demos/html/webaudio-and-webrtc.html
Go to fileThis commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
264 lines (226 sloc)
7.96 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html> | |
<head> | |
<meta charset="utf-8"> | |
<title>Audio effects with WebAudio in WebRTC</title> | |
<script type="text/javascript" src="../../base/adapter.js"></script> | |
<script> | |
var audioElement; | |
var buttonStart; | |
var buttonStop; | |
var localStream; | |
var pc1, pc2; | |
var display; | |
var webAudio; | |
// WebAudio helper class which takes care of the WebAudio related parts. | |
function WebAudio() { | |
this.context = new webkitAudioContext(); | |
this.soundBuffer = null; | |
} | |
WebAudio.prototype.start = function() { | |
this.filter = this.context.createBiquadFilter(); | |
this.filter.type = this.filter.HIGHPASS; | |
this.filter.frequency.value = 1500; | |
} | |
WebAudio.prototype.applyFilter = function(stream) { | |
this.mic = this.context.createMediaStreamSource(stream); | |
this.mic.connect(this.filter); | |
this.peer = this.context.createMediaStreamDestination(); | |
this.filter.connect(this.peer); | |
return this.peer.stream; | |
} | |
WebAudio.prototype.renderLocally = function(enabled) { | |
if (enabled) { | |
this.mic.connect(this.context.destination); | |
} else { | |
this.mic.disconnect(0); | |
this.mic.connect(this.filter); | |
} | |
} | |
WebAudio.prototype.stop = function() { | |
this.mic.disconnect(0); | |
this.filter.disconnect(0); | |
mic = null; | |
peer = null; | |
} | |
WebAudio.prototype.addEffect = function() { | |
var effect = this.context.createBufferSource(); | |
effect.buffer = this.soundBuffer; | |
if (this.peer) { | |
effect.connect(this.peer); | |
effect.start(0); | |
} | |
} | |
WebAudio.prototype.loadCompleted = function() { | |
this.soundBuffer = this.context.createBuffer(this.request.response, true); | |
} | |
WebAudio.prototype.loadSound = function(url) { | |
this.request = new XMLHttpRequest(); | |
this.request.open('GET', url, true); | |
this.request.responseType = 'arraybuffer'; | |
this.request.onload = this.loadCompleted.bind(this); | |
this.request.send(); | |
} | |
// Global methods. | |
function trace(txt) { | |
display.innerHTML += txt + "<br>"; | |
} | |
function logEvent(e) { | |
console.log(e.type + ':' + e.target + ':' + e.target.id + ':muted=' + | |
e.target.muted); | |
} | |
$ = function(id) { | |
return document.getElementById(id); | |
}; | |
function start() { | |
webAudio.start(); | |
var constraints = {audio:true, video:false}; | |
getUserMedia(constraints, gotStream, gotStreamFailed); | |
buttonStart.disabled = true; | |
buttonStop.disabled = false; | |
} | |
function stop() { | |
webAudio.stop(); | |
pc1.close(); | |
pc2.close(); | |
pc1 = null; | |
pc2 = null; | |
buttonStart.enabled = true; | |
buttonStop.enabled = false; | |
localStream.stop(); | |
} | |
function gotStream(stream) { | |
audioTracks = stream.getAudioTracks(); | |
if (audioTracks.length == 1) { | |
console.log('gotStream({audio:true, video:false})'); | |
var filteredStream = webAudio.applyFilter(stream); | |
var servers = null; | |
pc1 = new webkitRTCPeerConnection(servers); | |
console.log('Created local peer connection object pc1'); | |
pc1.onicecandidate = iceCallback1; | |
pc2 = new webkitRTCPeerConnection(servers); | |
console.log('Created remote peer connection object pc2'); | |
pc2.onicecandidate = iceCallback2; | |
pc2.onaddstream = gotRemoteStream; | |
pc1.addStream(filteredStream); | |
pc1.createOffer(gotDescription1); | |
stream.onended = function() { | |
console.log('stream.onended'); | |
buttonStart.disabled = false; | |
buttonStop.disabled = true; | |
}; | |
localStream = stream; | |
} else { | |
alert('The media stream contains an invalid amount of audio tracks.'); | |
stream.stop(); | |
} | |
} | |
function gotStreamFailed(error) { | |
buttonStart.disabled = false; | |
buttonStop.disabled = true; | |
alert('Failed to get access to local media. Error code: ' + error.code); | |
} | |
function forceOpus(sdp) { | |
// Remove all other codecs (not the video codecs though). | |
sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g, | |
'm=audio $1 RTP/SAVPF 111\r\n'); | |
sdp = sdp.replace(/a=rtpmap:(?!111)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, ''); | |
return sdp; | |
} | |
function gotDescription1(desc){ | |
console.log('Offer from pc1 \n' + desc.sdp); | |
var modifiedOffer = new RTCSessionDescription({type: 'offer', | |
sdp: forceOpus(desc.sdp)}); | |
pc1.setLocalDescription(modifiedOffer); | |
console.log('Offer from pc1 \n' + modifiedOffer.sdp); | |
pc2.setRemoteDescription(modifiedOffer); | |
pc2.createAnswer(gotDescription2); | |
} | |
function gotDescription2(desc){ | |
pc2.setLocalDescription(desc); | |
console.log('Answer from pc2 \n' + desc.sdp); | |
pc1.setRemoteDescription(desc); | |
} | |
function gotRemoteStream(e){ | |
attachMediaStream(audioElement, e.stream); | |
} | |
function iceCallback1(event){ | |
if (event.candidate) { | |
pc2.addIceCandidate(new RTCIceCandidate(event.candidate), | |
onAddIceCandidateSuccess, onAddIceCandidateError); | |
console.log('Local ICE candidate: \n' + event.candidate.candidate); | |
} | |
} | |
function iceCallback2(event){ | |
if (event.candidate) { | |
pc1.addIceCandidate(new RTCIceCandidate(event.candidate), | |
onAddIceCandidateSuccess, onAddIceCandidateError); | |
console.log('Remote ICE candidate: \n ' + event.candidate.candidate); | |
} | |
} | |
function onAddIceCandidateSuccess() { | |
trace("AddIceCandidate success."); | |
} | |
function onAddIceCandidateError(error) { | |
trace("Failed to add Ice Candidate: " + error.toString()); | |
} | |
function handleKeyDown(event) { | |
var keyCode = event.keyCode; | |
webAudio.addEffect(); | |
} | |
function doMix(checkbox) { | |
webAudio.renderLocally(checkbox.checked); | |
} | |
function onload() { | |
webAudio = new WebAudio(); | |
webAudio.loadSound('../sounds/Shamisen-C4.wav'); | |
audioElement = $('audio'); | |
buttonStart = $('start'); | |
buttonStop = $('stop'); | |
display = $('display'); | |
document.addEventListener('keydown', handleKeyDown, false); | |
buttonStart.enabled = true; | |
buttonStop.disabled = true; | |
} | |
</script> | |
</head> | |
<body onload='onload()'> | |
<h2>Capture microphone input and stream it out to a peer with a processing | |
effect applied to the audio.</h2> | |
<p>The audio stream is: <br><br> | |
o Recorded using <a href="http://www.html5audio.org/2012/09/live-audio-input-comes-to-googles-chrome-canary.html" | |
title="Live audio input comes to Google's Chrome Canary">live-audio | |
input.</a><br> | |
o Filtered using an HP filter with fc=1500 Hz.<br> | |
o Encoded using <a href="http://www.opus-codec.org/" title="Opus Codec"> | |
Opus.</a><br> | |
o Transmitted (in loopback) to remote peer using | |
<a href="http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcpeerconnection-interface" | |
title="RTCPeerConnection Interface">RTCPeerConnection</a> where it is decoded.<br> | |
o Finally, the received remote stream is used as source to an <audio> | |
tag and played out locally.<br> | |
<br>Press any key to add an effect to the transmitted audio while talking. | |
</p> | |
<p>Please note that: <br><br> | |
o Linux is currently not supported.<br> | |
o Sample rate and channel configuration must be the same for input and | |
output sides on Windows.<br> | |
o Only the Default microphone device can be used for capturing. | |
</p> | |
<p>For more information, see <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/webrtc-integration.html" | |
title="Example 3: Capture microphone input and stream it out to a peer with a processing effect applied to the audio"> | |
WebRTC integration with the Web Audio API.</a> | |
</p> | |
<style> | |
button { | |
font: 14px sans-serif; | |
padding: 8px; | |
} | |
</style> | |
<audio id="audio" autoplay controls></audio><br><br> | |
<button id="start" onclick="start()">Start</button> | |
<button id="stop" onclick="stop()">Stop</button><br><br> | |
Add local audio to output:<input id="mix" type="checkbox" onclick="doMix(this);"><br><br> | |
<pre id="display"></pre> | |
</body> | |
</html> |