summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html
diff options
context:
space:
mode:
Diffstat (limited to 'testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html')
-rw-r--r--testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html78
1 files changed, 78 insertions, 0 deletions
diff --git a/testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html b/testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html
new file mode 100644
index 0000000000..4e0de21e96
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audiobuffersourcenode-interface/audiobuffersource-multi-channels.html
@@ -0,0 +1,78 @@
+<!DOCTYPE html>
+<!--
+Test AudioBufferSourceNode supports 5.1 channel.
+-->
+<html>
+ <head>
+ <title>
+ audiobuffersource-multi-channels.html
+ </title>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/webaudio/resources/audit-util.js"></script>
+ <script src="/webaudio/resources/audit.js"></script>
+ <script src="/webaudio/resources/mix-testing.js"></script>
+ </head>
+ <body>
+ <script id="layout-test-code">
+ let audit = Audit.createTaskRunner();
+ let context;
+ let expectedAudio;
+
+ audit.define('initialize', (task, should) => {
+ // Create offline audio context
+ let sampleRate = 44100.0;
+ should(() => {
+ context = new OfflineAudioContext(
+ 6, sampleRate * toneLengthSeconds, sampleRate);
+ }, 'Creating context for testing').notThrow();
+ should(
+ Audit
+ .loadFileFromUrl('resources/audiobuffersource-multi-channels-expected.wav')
+ .then(arrayBuffer => {
+ context.decodeAudioData(arrayBuffer).then(audioBuffer => {
+ expectedAudio = audioBuffer;
+ task.done();
+ }).catch(error => {
+ assert_unreached("Could not decode audio data due to " + error.message);
+ })
+ })
+ , 'Fetching expected audio').beResolved();
+ });
+
+ audit.define(
+ {label: 'test', description: 'AudioBufferSource with 5.1 buffer'},
+ (task, should) => {
+ let toneBuffer =
+ createToneBuffer(context, 440, toneLengthSeconds, 6);
+
+ let source = context.createBufferSource();
+ source.buffer = toneBuffer;
+
+ source.connect(context.destination);
+ source.start(0);
+
+ context.startRendering()
+ .then(renderedAudio => {
+ // Compute a threshold based on the maximum error, |maxUlp|,
+ // in ULP. This is experimentally determined. Assuming that
+ // the reference file is a 16-bit wav file, the max values in
+ // the wave file are +/- 32768.
+ let maxUlp = 1;
+ let threshold = maxUlp / 32768;
+ for (let k = 0; k < renderedAudio.numberOfChannels; ++k) {
+ should(
+ renderedAudio.getChannelData(k),
+ 'Rendered audio for channel ' + k)
+ .beCloseToArray(
+ expectedAudio.getChannelData(k),
+ {absoluteThreshold: threshold});
+ }
+ })
+ .then(() => task.done());
+ });
+
+ audit.run();
+ </script>
+ </body>
+</html>