Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Web Audio] Added Audio API tests: MediaElementAudioSource interface #396

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
24 changes: 24 additions & 0 deletions webaudio/js/helpers.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,27 @@ function assert_array_approx_equals(actual, expected, epsilon, description)
assert_approx_equals(actual[i], expected[i], epsilon, (description + ": element " + i))
}
}

/*
Returns an array (typed or not), of the passed array with removed trailing and ending
zero-valued elements
*/
function trimEmptyElements(array) {
var start = 0;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think I see why this function is required - it's because calling play on an audio element doesn't happen at a predictable time?

In the case of these tests I think it's required, but we should be careful not to use this function as a crutch.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct, couldn't get enough precision (per sample) with the media events such as play. Would be happy to get rid of this function, but couldn't find a better way to compare the actual play/stop of the media.

var end = array.length;

while (start < array.length) {
if (array[start] !== 0) {
break;
}
start++;
}

while (end > 0) {
end--;
if (array[end] !== 0) {
break;
}
}
return array.subarray(start, end);
}
Binary file added webaudio/resources/noise_1sec_1channels.wav
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
<!doctype html>

<!--
Tests a MediaElementSource node by creating it and passing it through an OfflineAudioContext.
The rendered results are compared with an expected loaded buffer, based on the same file.


NOTE: No known implementation
-->

<html class="a">
<head>
<title>MediaElementAudioSource interface test (to OfflineContext)</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/vendor-prefixes.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
var elementSourceTest = async_test("Element Source tests completed");

var src = '/webaudio/resources/noise_1sec_1channels.wav';
var sampleRate = 44100.0;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How was this file generated? In this test we need to test the file playback, so I understand why we need the binary file. It would be good if we could document alongside the file (preferably as something executable) how the file was generated. Any ideas?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Generated using http://www.wavtones.com/functiongenerator.php .

I agree it would be preferable to have something to generate the file as part of the platform tests, I'll see if I can make it. If not, is this an acceptable source (annotating the code with the url)?

var lengthInSeconds = 3;

var context = null, audio = null, source = null;
var actualBuffer = null;


function loadExpectedBuffer(event) {
actualBuffer = event.renderedBuffer;
bufferLoader = new BufferLoader(
context,
[src],
bufferLoadCompleted
);
bufferLoader.load();
};

function bufferLoadCompleted(buffer) {
runTests(buffer);
};

context = new OfflineAudioContext(1, sampleRate * lengthInSeconds, sampleRate);
var audio = document.createElement('audio');
audio.src = src;
var source = context.createMediaElementSource(audio);
source.connect(context.destination);

audio.addEventListener("playing", function(e) {
console.log("play", e);
context.startRendering();
});

context.oncomplete = loadExpectedBuffer;

audio.play();


function runTests(expected) {
expectedBuffer = expected[0];

test(function() {
var actualTrimmed = trimEmptyElements(actualBuffer.getChannelData(0));
assert_greater_than(actualTrimmed.length, 0,
"processed data array (Channel 0) length greater than 0");
}, "The actual buffer contains some data");


test(function() {
var length = expectedBuffer.getChannelData(0).length;
assert_array_approx_equals(
actualBuffer.getChannelData(0).subarray(0, length),
expectedBuffer.getChannelData(0),
1e-4,
"comparing expected and (the start of) rendered buffers (channel 0)");
}, "start of data is correct");

elementSourceTest.done();
};
</script>
</body>
</html>
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
<!doctype html>

<!--
Tests that a create MediaElementSourceNode that is passed through
a script processor passes the stream data.
The the script processor saves the input buffers it gets to a temporary
array, and after the playback has stopped, the contents are compared
to those of a loaded AudioBuffer with the same source.

Somewhat similiar to a test from Mozilla:
(http://mxr.mozilla.org/mozilla-central/source/content/media/webaudio/test/test_mediaElementAudioSourceNode.html?force=1)
-->

<html class="a">
<head>
<title>MediaElementAudioSource interface test (to scriptProcessor)</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/vendor-prefixes.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
var elementSourceTest = async_test("Element Source tests completed");

// File generated using /webaudio/utilities/wave-file-generator.html
var src = '/webaudio/resources/noise_1sec_1channels.wav';
var BUFFER_SIZE = 2048;
var context = null;
var actualBufferArrayC0 = new Float32Array(0);
var actualBufferArrayC1 = new Float32Array(0);
var audio = null, source = null, processor = null



function loadExpectedBuffer(event) {
bufferLoader = new BufferLoader(
context,
[src],
bufferLoadCompleted
);
bufferLoader.load();
};

function bufferLoadCompleted(buffer) {
runTests(buffer);
};

function concatTypedArray(arr1, arr2) {
var result = new Float32Array(arr1.length + arr2.length);
result.set(arr1);
result.set(arr2, arr1.length);
return result;
}

function processAudio(e) {
var inputBufferData = e.inputBuffer.getChannelData(0);
actualBufferArrayC0 = concatTypedArray(actualBufferArrayC0, e.inputBuffer.getChannelData(0));
}

// Create Audio context
context = new AudioContext();

// Create an audio element, and a media element source
audio = document.createElement('audio');
audio.src = src;
source = context.createMediaElementSource(audio);

// Create a processor node, and set its audio process
// event to save the data
processor = context.createScriptProcessor(BUFFER_SIZE);
source.connect(processor);
processor.connect(context.destination);
processor.addEventListener('audioprocess', processAudio);


// Nothing to do with this event at the moment to control precisely
// Saving the input
audio.addEventListener("canplaythrough", function() {
//console.log("oncanplaythorough");
});

// When media playback ended, save the begin to compare with expected buffer
audio.addEventListener("ended", function(e) {
// Setting a timeout since we need audioProcess event to run for all samples
window.setTimeout(loadExpectedBuffer, 50);
});

audio.play();


function runTests(expected) {

source.disconnect();
processor.disconnect();
// firefox seems to process events after disconnect
processor.removeEventListener('audioprocess', processAudio)

var expectedBuffer = expected[0];
window.expectedBuffer = expectedBuffer;
// Trim the actual elements because we don't have a fine-grained
// control over the start and end time of recording the data.
var actualTrimmedC0 = trimEmptyElements(actualBufferArrayC0);
var expectedLength = trimEmptyElements(expectedBuffer.getChannelData(0)).length;


// Test that there is some data.
test(function() {
assert_greater_than(actualTrimmedC0.length, 0,
"processed data array (C0) length greater than 0");
}, "Channel 0 processed some data");

// Test the actual contents of the 1st and second channel.
test(function() {
assert_array_approx_equals(
actualTrimmedC0,
trimEmptyElements(expectedBuffer.getChannelData(0)),
1e-4,
"comparing expected and rendered buffers (channel 0)");
}, "All data processed correctly");

// Test the actual contents of the 1st and second channel.
test(function() {
assert_array_approx_equals(
actualTrimmedC0.subarray(0, expectedLength),
trimEmptyElements(expectedBuffer.getChannelData(0)),
1e-4,
"comparing expected and rendered buffers (channel 0)");
}, "Start of data processed correctly");

elementSourceTest.done();
};
</script>
</body>
</html>
27 changes: 27 additions & 0 deletions webaudio/utilities/wave-file-generator.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Wave file generator</title>
<script src="wave-file-generator.js"></script>
</head>
<body>

<form id="generateForm">
<label for="seconds">Length in seconds</label>
<input type="number" id="seconds" min="0" value="1">
<label for="channels">channels</label>
<input type="number" id="channels" min="1" max="2" step="1" value="1">

<input type='button' id="generate" value="Generate" />
</form>
<div>
<audio controls="true" id="generated"></audio>
<a id='download'>Download</a>
</div>

<script>
document.getElementById('generate').addEventListener('click', generateWave);
</script>
</body>
</html>