blob: 69427b495b073513f82d11b3f7fc8a39f664656e [file] [log] [blame]
<!doctype html>
<!--
Tests that a create MediaElementSourceNode that is passed through
a script processor passes the stream data.
The the script processor saves the input buffers it gets to a temporary
array, and after the playback has stopped, the contents are compared
to those of a loaded AudioBuffer with the same source.
Somewhat similiar to a test from Mozilla:
(http://mxr.mozilla.org/mozilla-central/source/content/media/webaudio/test/test_mediaElementAudioSourceNode.html?force=1)
-->
<html class="a">
<head>
<title>MediaElementAudioSource interface test (to scriptProcessor)</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/vendor-prefixes.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
var elementSourceTest = async_test("Element Source tests completed");
var src = '/webaudio/resources/sin_440Hz_-6dBFS_1s.wav';
var BUFFER_SIZE = 2048;
var context = null;
var actualBufferArrayC0 = new Float32Array(0);
var actualBufferArrayC1 = new Float32Array(0);
var audio = null, source = null, processor = null
function loadExpectedBuffer(event) {
bufferLoader = new BufferLoader(
context,
[src],
bufferLoadCompleted
);
bufferLoader.load();
};
function bufferLoadCompleted(buffer) {
runTests(buffer);
};
function concatTypedArray(arr1, arr2) {
var result = new Float32Array(arr1.length + arr2.length);
result.set(arr1);
result.set(arr2, arr1.length);
return result;
}
// Create Audio context
context = new AudioContext();
// Create an audio element, and a media element source
audio = document.createElement('audio');
audio.src = src;
source = context.createMediaElementSource(audio);
function processListener (e) {
actualBufferArrayC0 = concatTypedArray(actualBufferArrayC0, e.inputBuffer.getChannelData(0));
actualBufferArrayC1 = concatTypedArray(actualBufferArrayC1, e.inputBuffer.getChannelData(1));
}
// Create a processor node to copy the input to the actual buffer
processor = context.createScriptProcessor(BUFFER_SIZE);
source.connect(processor);
processor.connect(context.destination);
processor.addEventListener('audioprocess', processListener);
// When media playback ended, save the begin to compare with expected buffer
audio.addEventListener("ended", function(e) {
// Setting a timeout since we need audioProcess event to run for all samples
window.setTimeout(loadExpectedBuffer, 50);
});
audio.play();
function runTests(expected) {
source.disconnect();
processor.disconnect();
// firefox seems to process events after disconnect
processor.removeEventListener('audioprocess', processListener)
var expectedBuffer = expected[0];
// Trim the actual elements because we don't have a fine-grained
// control over the start and end time of recording the data.
var actualTrimmedC0 = trimEmptyElements(actualBufferArrayC0);
var actualTrimmedC1 = trimEmptyElements(actualBufferArrayC1);
var expectedLength = trimEmptyElements(expectedBuffer.getChannelData(0)).length;
// Test that there is some data.
test(function() {
assert_greater_than(actualTrimmedC0.length, 0,
"processed data array (C0) length greater than 0");
assert_greater_than(actualTrimmedC1.length, 0,
"processed data array (C1) length greater than 0");
}, "Channel 0 processed some data");
// Test the actual contents of the 1st and second channel.
test(function() {
assert_array_approx_equals(
actualTrimmedC0,
trimEmptyElements(expectedBuffer.getChannelData(0)),
1e-4,
"comparing expected and rendered buffers (channel 0)");
assert_array_approx_equals(
actualTrimmedC1,
trimEmptyElements(expectedBuffer.getChannelData(1)),
1e-4,
"comparing expected and rendered buffers (channel 1)");
}, "All data processed correctly");
elementSourceTest.done();
};
</script>
</body>
</html>