summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/webaudio/the-audio-api/the-pannernode-interface/automation-changes.html
blob: 8aa73552aab5e1639ac95e4ae7e06c725c9cad0c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
<!doctype html>
<html>
  <head>
    <title>Panner Node Automation</title>
    <script src="/resources/testharness.js"></script>
    <script src="/resources/testharnessreport.js"></script>
    <script src="../../resources/audit-util.js"></script>
    <script src="../../resources/audit.js"></script>
  </head>

  <body>
    <script>
      // Use a power-of-two to eliminate some round-off; otherwise, this isn't
      // really important.
      const sampleRate = 16384;

      // Render enough for the test; we don't need a lot.
      const renderFrames = 2048;

      // Initial panner positionX and final positionX for listener.
      const positionX = 2000;

      const audit = Audit.createTaskRunner();

      // Test that listener.positionX.value setter does the right thing.
      audit.define('Set Listener.positionX.value', (task, should) => {
        const context = new OfflineAudioContext(2, renderFrames, sampleRate);

        createGraph(context);

        // Frame at which the listener instantaneously moves to a new location.
        const moveFrame = 512;

        context.suspend(moveFrame / context.sampleRate)
            .then(() => {
              context.listener.positionX.value = positionX;
            })
            .then(() => context.resume());

        verifyOutput(context, moveFrame, should, 'listenr.positionX.value')
            .then(() => task.done());
      });

      // Test that listener.positionX.setValueAtTime() does the right thing.
      audit.define('Listener.positionX.setValue', (task, should) => {
        const context = new OfflineAudioContext(2, renderFrames, sampleRate);

        createGraph(context);

        // Frame at which the listener instantaneously moves to a new location.
        const moveFrame = 512;

        context.listener.positionX.setValueAtTime(
            positionX, moveFrame / context.sampleRate);

        verifyOutput(
            context, moveFrame, should, 'listener.positionX.setValueATTime')
            .then(() => task.done());
      });

      // Test that listener.setPosition() does the right thing.
      audit.define('Listener.setPosition', (task, should) => {
        const context = new OfflineAudioContext(2, renderFrames, sampleRate);

        createGraph(context);

        // Frame at which the listener instantaneously moves to a new location.
        const moveFrame = 512;

        context.suspend(moveFrame / context.sampleRate)
            .then(() => {
              context.listener.setPosition(positionX, 0, 0);
            })
            .then(() => context.resume());

        verifyOutput(context, moveFrame, should, 'listener.setPostion')
            .then(() => task.done());
      });

      audit.run();


      // Create the basic graph for testing which consists of an oscillator node
      // connected to a panner node.
      function createGraph(context) {
        const listener = context.listener;

        listener.positionX.value = 0;
        listener.positionY.value = 0;
        listener.positionZ.value = 0;

        const src = new OscillatorNode(context);

        const panner = new PannerNode(context, {
          distanceModel: 'linear',
          refDistance: 1,
          maxDistance: 3000,
          positionX: positionX,
          positionY: 0,
          positionZ: 0
        });
        src.connect(panner).connect(context.destination);

        src.start();
      }


      // Verify the output from the panner is correct.
      function verifyOutput(context, moveFrame, should, prefix) {
        return context.startRendering().then(resultBuffer => {
          // Get the outputs (left and right)
          const c0 = resultBuffer.getChannelData(0);
          const c1 = resultBuffer.getChannelData(1);

          // The src/listener set up is such that audio should only come
          // from the right for until |moveFrame|.  Hence the left channel
          // should be 0 (or very nearly 0).
          const zero = new Float32Array(moveFrame);

          should(
              c0.slice(0, moveFrame), `${prefix}: output0[0:${moveFrame - 1}]`)
              .beCloseToArray(zero, {absoluteThreshold: 1e-16});
          should(
              c1.slice(0, moveFrame), `${prefix}: output1[0:${moveFrame - 1}]`)
              .notBeConstantValueOf(0);

          // At |moveFrame| and beyond, the listener and source are at the
          // same position, so the outputs from the left and right should be
          // identical, and the left channel should not be 0 anymore.

          should(c0.slice(moveFrame), `${prefix}: output0[${moveFrame}:]`)
              .notBeConstantValueOf(0);
          should(c1.slice(moveFrame), `${prefix}: output1[${moveFrame}:]`)
              .beCloseToArray(c0.slice(moveFrame));
        });
      }
    </script>
  </body>
</html>