diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
commit | 36d22d82aa202bb199967e9512281e9a53db42c9 (patch) | |
tree | 105e8c98ddea1c1e4784a60a5a6410fa416be2de /dom/vr/test/reftest | |
parent | Initial commit. (diff) | |
download | firefox-esr-upstream.tar.xz firefox-esr-upstream.zip |
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'dom/vr/test/reftest')
-rw-r--r-- | dom/vr/test/reftest/VRSimulationDriver.js | 60 | ||||
-rw-r--r-- | dom/vr/test/reftest/change_size.html | 168 | ||||
-rw-r--r-- | dom/vr/test/reftest/change_size.png | bin | 0 -> 1439 bytes | |||
-rw-r--r-- | dom/vr/test/reftest/draw_rect.html | 136 | ||||
-rw-r--r-- | dom/vr/test/reftest/draw_rect.png | bin | 0 -> 1747 bytes | |||
-rw-r--r-- | dom/vr/test/reftest/reftest.list | 10 | ||||
-rw-r--r-- | dom/vr/test/reftest/webgl-util.js | 61 | ||||
-rw-r--r-- | dom/vr/test/reftest/wrapper.html | 26 |
8 files changed, 461 insertions, 0 deletions
diff --git a/dom/vr/test/reftest/VRSimulationDriver.js b/dom/vr/test/reftest/VRSimulationDriver.js new file mode 100644 index 0000000000..971cdb8626 --- /dev/null +++ b/dom/vr/test/reftest/VRSimulationDriver.js @@ -0,0 +1,60 @@ + +var VRServiceTest; +var vrMockDisplay; + +var VRSimulationDriver = (function() { +"use strict"; + +var AttachWebVRDisplay = function() { + if (vrMockDisplay) { + // Avoid creating multiple displays + return Promise.resolve(vrMockDisplay); + } + var promise = VRServiceTest.attachVRDisplay("VRDisplayTest"); + promise.then(function (display) { + vrMockDisplay = display; + }); + + return promise; +}; + +var SetVRDisplayPose = function(position, + linearVelocity, linearAcceleration, + orientation, angularVelocity, + angularAcceleration) { + vrMockDisplay.setPose(position, linearVelocity, linearAcceleration, + orientation, angularVelocity, angularAcceleration); +}; + +var SetEyeResolution = function(width, height) { + vrMockDisplay.setEyeResolution(width, height); +} + +var SetEyeParameter = function(eye, offsetX, offsetY, offsetZ, + upDegree, rightDegree, downDegree, leftDegree) { + vrMockDisplay.setEyeParameter(eye, offsetX, offsetY, offsetZ, upDegree, rightDegree, + downDegree, leftDegree); +} + +var SetMountState = function(isMounted) { + vrMockDisplay.setMountState(isMounted); +} + +var UpdateVRDisplay = function() { + vrMockDisplay.update(); +} + +var API = { + AttachWebVRDisplay: AttachWebVRDisplay, + SetVRDisplayPose: SetVRDisplayPose, + SetEyeResolution: SetEyeResolution, + SetEyeParameter: SetEyeParameter, + SetMountState: SetMountState, + UpdateVRDisplay: UpdateVRDisplay, + + none: false +}; + +return API; + +}());
\ No newline at end of file diff --git a/dom/vr/test/reftest/change_size.html b/dom/vr/test/reftest/change_size.html new file mode 100644 index 0000000000..87d59f6a3c --- /dev/null +++ b/dom/vr/test/reftest/change_size.html @@ -0,0 +1,168 @@ +<!DOCTYPE html> +<meta charset='UTF-8'> +<!-- Viewport size change in WebGL and submit it to the VR device as a base64 image. +If this fails, something is seriously wrong. --> +<html class="reftest-wait"> +<head> + <script type='text/javascript' src='webgl-util.js'></script> + <script type='text/javascript' src="VRSimulationDriver.js"></script> + <script id="vs" type="x-shader/x-vertex"> + attribute vec2 aVertCoord; + + void main(void) { + gl_Position = vec4(aVertCoord, 0.0, 1.0); + } + </script> + <script id="fs" type="x-shader/x-fragment"> + precision mediump float; + + void main(void) { + gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); + } + </script> + <script type='text/javascript'> + 'use strict'; + + var submitResult = null; + var vrDisplay = null; + var webglCanvas = null; + var gl = null; + var prog = null; + var img = null; + // The resolution is 540 : 300 (the ratio of 2160 * 1200, like Vive and Oculus) + const eyeWidth = 270; + const eyeHeight = 300; + + function setStatus(text) { + var elem = document.getElementById('status'); + elem.innerHTML = text; + } + + function initVRMock() { + VRServiceTest = navigator.requestVRServiceTest(); + if (!VRServiceTest) { + setStatus('VRServiceTest get failed.'); + return; + } + + VRSimulationDriver.AttachWebVRDisplay().then(() => { + VRSimulationDriver.SetEyeResolution(eyeWidth, eyeHeight); + VRSimulationDriver.UpdateVRDisplay(); + }).then(() => { + // Looking for VR displays + if (navigator.getVRDisplays) { + submitResult = new VRSubmitFrameResult(); + navigator.getVRDisplays().then(function (displays) { + if (displays.length > 0) { + window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false); + + vrDisplay = displays[0]; + vrDisplay.requestPresent([{ source: webglCanvas }]); + vrDisplay.requestAnimationFrame(onAnimationFrame); + } + }); + } + }); + } + + function onVRPresentChange() { + if (vrDisplay && vrDisplay.isPresenting) { + const leftEye = vrDisplay.getEyeParameters("left"); + const rightEye = vrDisplay.getEyeParameters("right"); + + if (leftEye.renderWidth != rightEye.renderWidth || + leftEye.renderWidth != eyeWidth) { + setStatus('renderWidth is not equal to eyeWidth.'); + } + + if (leftEye.renderHeight != rightEye.renderHeight || + leftEye.renderHeight != eyeHeight) { + setStatus('renderHeight is not equal to eyeHeight.'); + } + webglCanvas.width = leftEye.renderWidth * 2; + webglCanvas.height = leftEye.renderHeight; + } + } + + function onAnimationFrame() { + if (!vrDisplay.isPresenting) { + return; + } + + gl.clearColor(0.0, 1.0, 0.0, 1.0); + gl.clear(gl.COLOR_BUFFER_BIT); + + // Presenting render a stereo view. + gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + + gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + + // Indicate VRDisplay we're done rendering. + vrDisplay.submitFrame(); + if (vrDisplay.getSubmitFrameResult(submitResult)) { + if (!img) { + img = document.createElement("img"); + img.onload = function(){ + // img width will not be eyeWidth * 2 (540), it would + // be 544. It is because D3D11 CopyResource changes + // the destination image size. + if ((img.height == eyeHeight)) { + webglCanvas.style.display = 'none'; + vrDisplay.exitPresent(); + setTimeout(testComplete, 0); + } + }; + img.src = submitResult.base64Image; + document.body.appendChild(img); + } else { + img.src = submitResult.base64Image; + } + } + vrDisplay.requestAnimationFrame(onAnimationFrame); + } + + function runTest() { + webglCanvas = document.getElementById('canvas'); + gl = webglCanvas.getContext('webgl'); + if (!gl) { + setStatus('WebGL context creation failed.'); + return; + } + gl.disable(gl.DEPTH_TEST); + prog = WebGLUtil.createProgramByIds(gl, 'vs', 'fs'); + if (!prog) { + setStatus('Program linking failed.'); + return; + } + prog.aVertCoord = gl.getAttribLocation(prog, "aVertCoord"); + + var vertCoordArr = new Float32Array([ + -0.5, -0.5, + 0.5, -0.5, + -0.5, 0.5, + 0.5, 0.5, + ]); + var vertCoordBuff = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertCoordBuff); + gl.bufferData(gl.ARRAY_BUFFER, vertCoordArr, gl.STATIC_DRAW); + gl.useProgram(prog); + gl.enableVertexAttribArray(prog.aVertCoord); + gl.vertexAttribPointer(prog.aVertCoord, 2, gl.FLOAT, false, 0, 0); + + initVRMock(); + } + + function testComplete() { + document.documentElement.removeAttribute("class"); + } + </script> +</head> + +<body onload='runTest();'> + <canvas id='canvas' width='128' height='128'></canvas> + <div id='status'></div> +</body> + +</html> diff --git a/dom/vr/test/reftest/change_size.png b/dom/vr/test/reftest/change_size.png Binary files differnew file mode 100644 index 0000000000..fe03114b20 --- /dev/null +++ b/dom/vr/test/reftest/change_size.png diff --git a/dom/vr/test/reftest/draw_rect.html b/dom/vr/test/reftest/draw_rect.html new file mode 100644 index 0000000000..acb8580c1b --- /dev/null +++ b/dom/vr/test/reftest/draw_rect.html @@ -0,0 +1,136 @@ +<!DOCTYPE html> +<meta charset='UTF-8'> +<!-- Draw rect in WebGL and submit it to the VR device as a base64 image. +If this fails, something is seriously wrong. --> +<html class="reftest-wait"> +<head> + <script type='text/javascript' src='webgl-util.js'></script> + <script type='text/javascript' src="VRSimulationDriver.js"></script> + <script id="vs" type="x-shader/x-vertex"> + attribute vec2 aVertCoord; + + void main(void) { + gl_Position = vec4(aVertCoord, 0.0, 1.0); + } + </script> + <script id="fs" type="x-shader/x-fragment"> + precision mediump float; + + void main(void) { + gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); + } + </script> + <script type='text/javascript'> + 'use strict'; + + var submitResult = null; + var vrDisplay = null; + var webglCanvas = null; + var gl = null; + var prog = null; + var img = null; + + function setStatus(text) { + var elem = document.getElementById('status'); + elem.innerHTML = text; + } + + function initVRMock() { + VRServiceTest = navigator.requestVRServiceTest(); + if (!VRServiceTest) { + setStatus('VRServiceTest get failed.'); + return; + } + + VRSimulationDriver.AttachWebVRDisplay().then(() => { + // Looking for VR displays + if (navigator.getVRDisplays) { + submitResult = new VRSubmitFrameResult(); + navigator.getVRDisplays().then(function (displays) { + if (displays.length > 0) { + vrDisplay = displays[0]; + vrDisplay.requestPresent([{ source: webglCanvas }]); + vrDisplay.requestAnimationFrame(onAnimationFrame); + } + }); + } + }); + } + + function onAnimationFrame() { + if (!vrDisplay.isPresenting) { + return; + } + + gl.clearColor(0.0, 1.0, 0.0, 1.0); + gl.clear(gl.COLOR_BUFFER_BIT); + + // Presenting render a stereo view. + gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + + gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + + // Indicate VRDisplay we're done rendering. + vrDisplay.submitFrame(); + if (vrDisplay.getSubmitFrameResult(submitResult)) { + if (!img) { + img = document.createElement("img"); + img.onload = function(){ + webglCanvas.style.display = 'none'; + vrDisplay.exitPresent(); + setTimeout(testComplete, 0); + }; + img.src = submitResult.base64Image; + document.body.appendChild(img); + } else { + img.src = submitResult.base64Image; + } + } + vrDisplay.requestAnimationFrame(onAnimationFrame); + } + + function runTest() { + webglCanvas = document.getElementById('canvas'); + gl = webglCanvas.getContext('webgl'); + if (!gl) { + setStatus('WebGL context creation failed.'); + return; + } + gl.disable(gl.DEPTH_TEST); + prog = WebGLUtil.createProgramByIds(gl, 'vs', 'fs'); + if (!prog) { + setStatus('Program linking failed.'); + return; + } + prog.aVertCoord = gl.getAttribLocation(prog, "aVertCoord"); + + var vertCoordArr = new Float32Array([ + -0.5, -0.5, + 0.5, -0.5, + -0.5, 0.5, + 0.5, 0.5, + ]); + var vertCoordBuff = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertCoordBuff); + gl.bufferData(gl.ARRAY_BUFFER, vertCoordArr, gl.STATIC_DRAW); + gl.useProgram(prog); + gl.enableVertexAttribArray(prog.aVertCoord); + gl.vertexAttribPointer(prog.aVertCoord, 2, gl.FLOAT, false, 0, 0); + + initVRMock(); + } + + function testComplete() { + document.documentElement.removeAttribute("class"); + } + </script> +</head> + +<body onload='runTest();'> + <canvas id='canvas' width='256' height='256'></canvas> + <div id='status'></div> +</body> + +</html> diff --git a/dom/vr/test/reftest/draw_rect.png b/dom/vr/test/reftest/draw_rect.png Binary files differnew file mode 100644 index 0000000000..0f4d24a0d1 --- /dev/null +++ b/dom/vr/test/reftest/draw_rect.png diff --git a/dom/vr/test/reftest/reftest.list b/dom/vr/test/reftest/reftest.list new file mode 100644 index 0000000000..c1ca292c71 --- /dev/null +++ b/dom/vr/test/reftest/reftest.list @@ -0,0 +1,10 @@ +# WebVR Reftests +# Please confirm there is no other VR display connected. Otherwise, VRPuppetDisplay can't be attached. +defaults pref(dom.vr.enabled,true) pref(dom.vr.puppet.enabled,true) pref(dom.vr.test.enabled,true) pref(dom.vr.require-gesture,false) pref(dom.vr.puppet.submitframe,1) pref(dom.vr.display.rafMaxDuration,200) pref(dom.vr.display.enumerate.interval,0) pref(dom.vr.controller.enumerate.interval,0) +# WebVR Tests have been disabled as refactoring of gfxVRPuppet is landing. Dependencies for re-enabling these are tracked by meta bug 1555185. +# VR SubmitFrame is only implemented for D3D11.1 and MacOSX now. +# Our Windows 7 test machines don't support D3D11.1, so we run these tests on Windows 8+ only. +# skip-if((!winWidget&&release_or_beta)||Android||gtkWidget||!layersGPUAccelerated) == draw_rect.html wrapper.html?draw_rect.png +# On MacOSX platform, getting different color interpolation result. +# For lower resolution Mac hardware, we need to adjust it to fuzzy-if(cocoaWidget,0-1,0-1200). +# fuzzy-if(/^Windows\x20NT\x2010\.0/.test(http.oscpu)||cocoaWidget,0-1,0-600) skip-if((!winWidget&&release_or_beta)||Android||gtkWidget||!layersGPUAccelerated) == change_size.html wrapper.html?change_size.png diff --git a/dom/vr/test/reftest/webgl-util.js b/dom/vr/test/reftest/webgl-util.js new file mode 100644 index 0000000000..42f1c5ccd5 --- /dev/null +++ b/dom/vr/test/reftest/webgl-util.js @@ -0,0 +1,61 @@ +WebGLUtil = (function() { + // --------------------------------------------------------------------------- + // WebGL helpers + + // Returns a valid shader, or null on errors. + function createShaderById(gl, id) { + var elem = document.getElementById(id); + if (!elem) { + throw new Error( + "Failed to create shader from non-existent id '" + id + "'." + ); + } + + var src = elem.innerHTML.trim(); + + var shader; + if (elem.type == "x-shader/x-fragment") { + shader = gl.createShader(gl.FRAGMENT_SHADER); + } else if (elem.type == "x-shader/x-vertex") { + shader = gl.createShader(gl.VERTEX_SHADER); + } else { + throw new Error( + "Bad MIME type for shader '" + id + "': " + elem.type + "." + ); + } + + gl.shaderSource(shader, src); + gl.compileShader(shader); + + return shader; + } + + function createProgramByIds(gl, vsId, fsId) { + var vs = createShaderById(gl, vsId); + var fs = createShaderById(gl, fsId); + if (!vs || !fs) { + return null; + } + + var prog = gl.createProgram(); + gl.attachShader(prog, vs); + gl.attachShader(prog, fs); + gl.linkProgram(prog); + + if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) { + var str = "Shader program linking failed:"; + str += "\nShader program info log:\n" + gl.getProgramInfoLog(prog); + str += "\n\nVert shader log:\n" + gl.getShaderInfoLog(vs); + str += "\n\nFrag shader log:\n" + gl.getShaderInfoLog(fs); + console.error(str); + return null; + } + + return prog; + } + + return { + createShaderById, + createProgramByIds, + }; +})(); diff --git a/dom/vr/test/reftest/wrapper.html b/dom/vr/test/reftest/wrapper.html new file mode 100644 index 0000000000..40d0de6e42 --- /dev/null +++ b/dom/vr/test/reftest/wrapper.html @@ -0,0 +1,26 @@ +<!DOCTYPE HTML> +<html class="reftest-wait"> +<head> +<title>Image reftest wrapper</title> +<style type="text/css"> + #image1 { background-color: rgb(10, 100, 250); } +</style> +<script> + // The image is loaded async after the page loads + // wait for it to finish loading + function onImageLoad() { + document.documentElement.removeAttribute("class"); + }; +</script> +</head> +<body> +<img id="image1"> +<script> + // Use as "wrapper.html?image.png" + var imgURL = document.location.search.substr(1); + document.images[0].onload = onImageLoad; + document.images[0].onerror = onImageLoad; + document.images[0].src = imgURL; +</script> +</body> +</html> |