summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/webaudio/idlharness.https.window.js
blob: e941a75c26752c6359ccef31b0635759cd784c0c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
// META: script=/resources/WebIDLParser.js
// META: script=/resources/idlharness.js
// META: timeout=long

// https://webaudio.github.io/web-audio-api/

'use strict';

idl_test(
  ['webaudio'],
  ['cssom', 'uievents', 'mediacapture-streams', 'html', 'dom'],
  async idl_array => {
    idl_array.add_untested_idls('interface SVGElement {};');

    idl_array.add_objects({
      BaseAudioContext: [],
      AudioContext: ['context'],
      OfflineAudioContext: ['new OfflineAudioContext(1, 1, sample_rate)'],
      OfflineAudioCompletionEvent: [
        'new OfflineAudioCompletionEvent("", {renderedBuffer: buffer})'
      ],
      AudioBuffer: ['buffer'],
      AudioNode: [],
      AudioParam: ['new AudioBufferSourceNode(context).playbackRate'],
      AudioScheduledSourceNode: [],
      AnalyserNode: ['new AnalyserNode(context)'],
      AudioBufferSourceNode: ['new AudioBufferSourceNode(context)'],
      AudioDestinationNode: ['context.destination'],
      AudioListener: ['context.listener'],
      AudioProcessingEvent: [`new AudioProcessingEvent('', {
        playbackTime: 0, inputBuffer: buffer, outputBuffer: buffer
      })`],
      BiquadFilterNode: ['new BiquadFilterNode(context)'],
      ChannelMergerNode: ['new ChannelMergerNode(context)'],
      ChannelSplitterNode: ['new ChannelSplitterNode(context)'],
      ConstantSourceNode: ['new ConstantSourceNode(context)'],
      ConvolverNode: ['new ConvolverNode(context)'],
      DelayNode: ['new DelayNode(context)'],
      DynamicsCompressorNode: ['new DynamicsCompressorNode(context)'],
      GainNode: ['new GainNode(context)'],
      IIRFilterNode: [
        'new IIRFilterNode(context, {feedforward: [1], feedback: [1]})'
      ],
      MediaElementAudioSourceNode: [
        'new MediaElementAudioSourceNode(context, {mediaElement: new Audio})'
      ],
      MediaStreamAudioDestinationNode: [
        'new MediaStreamAudioDestinationNode(context)'
      ],
      MediaStreamAudioSourceNode: [],
      MediaStreamTrackAudioSourceNode: [],
      OscillatorNode: ['new OscillatorNode(context)'],
      PannerNode: ['new PannerNode(context)'],
      PeriodicWave: ['new PeriodicWave(context)'],
      ScriptProcessorNode: ['context.createScriptProcessor()'],
      StereoPannerNode: ['new StereoPannerNode(context)'],
      WaveShaperNode: ['new WaveShaperNode(context)'],
      AudioWorklet: ['context.audioWorklet'],
      AudioWorkletGlobalScope: [],
      AudioParamMap: ['worklet_node.parameters'],
      AudioWorkletNode: ['worklet_node'],
      AudioWorkletProcessor: [],
    });

    self.sample_rate = 44100;
    self.context = new AudioContext;
    self.buffer = new AudioBuffer({length: 1, sampleRate: sample_rate});
    await context.audioWorklet.addModule(
      'the-audio-api/the-audioworklet-interface/processors/dummy-processor.js');
    self.worklet_node = new AudioWorkletNode(context, 'dummy');
  }
);