summaryrefslogtreecommitdiffstats
path: root/dom/vr/test/reftest/change_size.html
blob: 87d59f6a3c42b824fbfa0ba20b0ed1a0154efbfd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
<!DOCTYPE html>
<meta charset='UTF-8'>
<!-- Viewport size change in WebGL and submit it to the VR device as a base64 image.
If this fails, something is seriously wrong. -->
<html class="reftest-wait">
<head>
  <script type='text/javascript' src='webgl-util.js'></script>
  <script type='text/javascript' src="VRSimulationDriver.js"></script>
  <script id="vs" type="x-shader/x-vertex">
    attribute vec2 aVertCoord;

    void main(void) {
      gl_Position = vec4(aVertCoord, 0.0, 1.0);
    }
  </script>
  <script id="fs" type="x-shader/x-fragment">
    precision mediump float;

    void main(void) {
      gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
    }
  </script>
  <script type='text/javascript'>
    'use strict';

    var submitResult = null;
    var vrDisplay = null;
    var webglCanvas = null;
    var gl = null;
    var prog = null;
    var img = null;
    // The resolution is 540 : 300 (the ratio of 2160 * 1200, like Vive and Oculus)
    const eyeWidth = 270;
    const eyeHeight = 300;

    function setStatus(text) {
      var elem = document.getElementById('status');
      elem.innerHTML = text;
    }

    function initVRMock() {
      VRServiceTest = navigator.requestVRServiceTest();
      if (!VRServiceTest) {
        setStatus('VRServiceTest get failed.');
        return;
      }

      VRSimulationDriver.AttachWebVRDisplay().then(() => {
        VRSimulationDriver.SetEyeResolution(eyeWidth, eyeHeight);
        VRSimulationDriver.UpdateVRDisplay();
      }).then(() => {
        // Looking for VR displays
        if (navigator.getVRDisplays) {
          submitResult = new VRSubmitFrameResult();
          navigator.getVRDisplays().then(function (displays) {
            if (displays.length > 0) {
              window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);

              vrDisplay = displays[0];
              vrDisplay.requestPresent([{ source: webglCanvas }]);
              vrDisplay.requestAnimationFrame(onAnimationFrame);
            }
          });
        }
      });
    }

    function onVRPresentChange() {
      if (vrDisplay && vrDisplay.isPresenting) {
        const leftEye = vrDisplay.getEyeParameters("left");
        const rightEye = vrDisplay.getEyeParameters("right");

        if (leftEye.renderWidth != rightEye.renderWidth ||
            leftEye.renderWidth != eyeWidth) {
          setStatus('renderWidth is not equal to eyeWidth.');
        }

        if (leftEye.renderHeight != rightEye.renderHeight ||
            leftEye.renderHeight != eyeHeight) {
          setStatus('renderHeight is not equal to eyeHeight.');
        }
        webglCanvas.width = leftEye.renderWidth * 2;
        webglCanvas.height = leftEye.renderHeight;
      }
    }

    function onAnimationFrame() {
      if (!vrDisplay.isPresenting) {
        return;
      }

      gl.clearColor(0.0, 1.0, 0.0, 1.0);
      gl.clear(gl.COLOR_BUFFER_BIT);

      // Presenting render a stereo view.
      gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

      gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

      // Indicate VRDisplay we're done rendering.
      vrDisplay.submitFrame();
      if (vrDisplay.getSubmitFrameResult(submitResult)) {
        if (!img) {
          img = document.createElement("img");
          img.onload = function(){
            // img width will not be eyeWidth * 2 (540), it would
            // be 544. It is because D3D11 CopyResource changes
            // the destination image size.
            if ((img.height == eyeHeight)) {
              webglCanvas.style.display = 'none';
              vrDisplay.exitPresent();
              setTimeout(testComplete, 0);
            }
          };
          img.src = submitResult.base64Image;
          document.body.appendChild(img);
        } else {
          img.src = submitResult.base64Image;
        }
      }
      vrDisplay.requestAnimationFrame(onAnimationFrame);
    }

    function runTest() {
      webglCanvas = document.getElementById('canvas');
      gl = webglCanvas.getContext('webgl');
      if (!gl) {
        setStatus('WebGL context creation failed.');
        return;
      }
      gl.disable(gl.DEPTH_TEST);
      prog = WebGLUtil.createProgramByIds(gl, 'vs', 'fs');
      if (!prog) {
        setStatus('Program linking failed.');
        return;
      }
      prog.aVertCoord = gl.getAttribLocation(prog, "aVertCoord");

      var vertCoordArr = new Float32Array([
        -0.5, -0.5,
        0.5, -0.5,
        -0.5, 0.5,
        0.5, 0.5,
      ]);
      var vertCoordBuff = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertCoordBuff);
      gl.bufferData(gl.ARRAY_BUFFER, vertCoordArr, gl.STATIC_DRAW);
      gl.useProgram(prog);
      gl.enableVertexAttribArray(prog.aVertCoord);
      gl.vertexAttribPointer(prog.aVertCoord, 2, gl.FLOAT, false, 0, 0);

      initVRMock();
    }

    function testComplete() {
      document.documentElement.removeAttribute("class");
    }
  </script>
</head>

<body onload='runTest();'>
  <canvas id='canvas' width='128' height='128'></canvas>
  <div id='status'></div>
</body>

</html>