Skip to content
Permalink
Newer
Older
100644 289 lines (211 sloc) 8.18 KB
1
let heading = document.querySelector('h1');
2
heading.textContent = 'CLICK ANYWHERE TO START'
3
document.body.addEventListener('click', init);
7
heading.textContent = 'Voice-change-O-matic';
8
document.body.removeEventListener('click', init)
10
// Older browsers might not implement mediaDevices at all, so we set an empty object first
11
if (navigator.mediaDevices === undefined) {
12
navigator.mediaDevices = {};
13
}
16
// Some browsers partially implement mediaDevices. We can't just assign an object
17
// with getUserMedia as it would overwrite existing properties.
18
// Here, we will just add the getUserMedia property if it's missing.
19
if (navigator.mediaDevices.getUserMedia === undefined) {
20
navigator.mediaDevices.getUserMedia = function(constraints) {
22
// First get ahold of the legacy getUserMedia, if present
23
var getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
24
25
// Some browsers just don't implement it - return a rejected promise with an error
26
// to keep a consistent interface
27
if (!getUserMedia) {
28
return Promise.reject(new Error('getUserMedia is not implemented in this browser'));
29
}
30
31
// Otherwise, wrap the call to the old navigator.getUserMedia with a Promise
32
return new Promise(function(resolve, reject) {
33
getUserMedia.call(navigator, constraints, resolve, reject);
34
});
35
}
36
}
37
38
39
40
// set up forked web audio context, for multiple browsers
41
// window. is needed otherwise Safari explodes
42
43
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
44
var voiceSelect = document.getElementById("voice");
45
var source;
46
var stream;
47
48
// grab the mute button to use below
50
var mute = document.querySelector('.mute');
52
//set up the different audio nodes we will use for the app
54
var analyser = audioCtx.createAnalyser();
55
analyser.minDecibels = -90;
56
analyser.maxDecibels = -10;
57
analyser.smoothingTimeConstant = 0.85;
59
var distortion = audioCtx.createWaveShaper();
60
var gainNode = audioCtx.createGain();
61
var biquadFilter = audioCtx.createBiquadFilter();
62
var convolver = audioCtx.createConvolver();
64
// distortion curve for the waveshaper, thanks to Kevin Ennis
65
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
Jun 12, 2014
66
67
function makeDistortionCurve(amount) {
68
var k = typeof amount === 'number' ? amount : 50,
69
n_samples = 44100,
70
curve = new Float32Array(n_samples),
71
deg = Math.PI / 180,
72
i = 0,
73
x;
74
for ( ; i < n_samples; ++i ) {
75
x = i * 2 / n_samples - 1;
76
curve[i] = ( 3 + k ) * x * 20 * deg / ( Math.PI + k * Math.abs(x) );
77
}
78
return curve;
79
};
81
// grab audio track via XHR for convolver node
85
ajaxRequest = new XMLHttpRequest();
87
ajaxRequest.open('GET', 'https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg', true);
Jun 12, 2014
88
89
ajaxRequest.responseType = 'arraybuffer';
Jun 12, 2014
90
91
92
ajaxRequest.onload = function() {
93
var audioData = ajaxRequest.response;
95
audioCtx.decodeAudioData(audioData, function(buffer) {
96
soundSource = audioCtx.createBufferSource();
97
convolver.buffer = buffer;
98
}, function(e){ console.log("Error with decoding audio data" + e.err);});
99
100
//soundSource.connect(audioCtx.destination);
101
//soundSource.loop = true;
102
//soundSource.start();
103
};
104
107
// set up canvas context for visualizer
109
var canvas = document.querySelector('.visualizer');
110
var canvasCtx = canvas.getContext("2d");
111
112
var intendedWidth = document.querySelector('.wrapper').clientWidth;
113
114
canvas.setAttribute('width',intendedWidth);
115
116
var visualSelect = document.getElementById("visual");
119
120
//main block for doing the audio recording
121
122
if (navigator.mediaDevices.getUserMedia) {
123
console.log('getUserMedia supported.');
124
var constraints = {audio: true}
125
navigator.mediaDevices.getUserMedia (constraints)
126
.then(
127
function(stream) {
128
source = audioCtx.createMediaStreamSource(stream);
129
source.connect(distortion);
130
distortion.connect(biquadFilter);
131
biquadFilter.connect(gainNode);
132
convolver.connect(gainNode);
133
gainNode.connect(analyser);
134
analyser.connect(audioCtx.destination);
135
136
visualize();
137
voiceChange();
138
})
139
.catch( function(err) { console.log('The following gUM error occured: ' + err);})
140
} else {
141
console.log('getUserMedia not supported on your browser!');
142
}
143
144
function visualize() {
145
WIDTH = canvas.width;
146
HEIGHT = canvas.height;
147
148
149
var visualSetting = visualSelect.value;
150
console.log(visualSetting);
151
152
if(visualSetting === "sinewave") {
153
analyser.fftSize = 2048;
154
var bufferLength = analyser.fftSize;
155
console.log(bufferLength);
156
var dataArray = new Uint8Array(bufferLength);
157
158
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
159
161
162
drawVisual = requestAnimationFrame(draw);
163
164
analyser.getByteTimeDomainData(dataArray);
165
166
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
167
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
168
169
canvasCtx.lineWidth = 2;
170
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
173
174
var sliceWidth = WIDTH * 1.0 / bufferLength;
175
var x = 0;
176
177
for(var i = 0; i < bufferLength; i++) {
178
179
var v = dataArray[i] / 128.0;
180
var y = v * HEIGHT/2;
181
182
if(i === 0) {
183
canvasCtx.moveTo(x, y);
184
} else {
185
canvasCtx.lineTo(x, y);
186
}
187
191
canvasCtx.lineTo(canvas.width, canvas.height/2);
192
canvasCtx.stroke();
193
};
197
} else if(visualSetting == "frequencybars") {
198
analyser.fftSize = 256;
199
var bufferLengthAlt = analyser.frequencyBinCount;
200
console.log(bufferLengthAlt);
201
var dataArrayAlt = new Uint8Array(bufferLengthAlt);
203
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
205
var drawAlt = function() {
206
drawVisual = requestAnimationFrame(drawAlt);
208
analyser.getByteFrequencyData(dataArrayAlt);
210
canvasCtx.fillStyle = 'rgb(0, 0, 0)';
211
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
213
var barWidth = (WIDTH / bufferLengthAlt) * 2.5;
214
var barHeight;
215
var x = 0;
217
for(var i = 0; i < bufferLengthAlt; i++) {
218
barHeight = dataArrayAlt[i];
220
canvasCtx.fillStyle = 'rgb(' + (barHeight+100) + ',50,50)';
221
canvasCtx.fillRect(x,HEIGHT-barHeight/2,barWidth,barHeight/2);
222
226
229
} else if(visualSetting == "off") {
230
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
231
canvasCtx.fillStyle = "red";
232
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
233
}
236
237
function voiceChange() {
238
239
distortion.oversample = '4x';
240
biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0)
241
242
var voiceSetting = voiceSelect.value;
243
console.log(voiceSetting);
244
245
//when convolver is selected it is connected back into the audio path
246
if(voiceSetting == "convolver") {
247
biquadFilter.disconnect(0);
248
biquadFilter.connect(convolver);
249
} else {
250
biquadFilter.disconnect(0);
251
biquadFilter.connect(gainNode);
252
253
if(voiceSetting == "distortion") {
254
distortion.curve = makeDistortionCurve(400);
255
} else if(voiceSetting == "biquad") {
256
biquadFilter.type = "lowshelf";
257
biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0)
258
biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0)
259
} else if(voiceSetting == "off") {
260
console.log("Voice settings turned off");
261
}
263
}
264
265
// event listeners to change visualize and voice settings
266
267
visualSelect.onchange = function() {
268
window.cancelAnimationFrame(drawVisual);
269
visualize();
270
};
271
272
voiceSelect.onchange = function() {
273
voiceChange();
274
};
275
277
278
function voiceMute() {
279
if(mute.id === "") {
281
mute.id = "activated";
282
mute.innerHTML = "Unmute";
283
} else {
285
mute.id = "";
286
mute.innerHTML = "Mute";
287
}