summaryrefslogtreecommitdiffstats
path: root/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts
diff options
context:
space:
mode:
Diffstat (limited to 'dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts')
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts349
1 files changed, 237 insertions, 112 deletions
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts
index baa2a985d2..8e812ccd2a 100644
--- a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/external_texture/video.spec.ts
@@ -1,20 +1,25 @@
export const description = `
Tests for external textures from HTMLVideoElement (and other video-type sources?).
-- videos with various encodings/formats (webm vp8, webm vp9, ogg theora, mp4), color spaces
- (bt.601, bt.709, bt.2020)
+- videos with various encodings/formats (webm vp8, webm vp9, ogg theora, mp4), video color spaces
+ (bt.601, bt.709, bt.2020) and dst color spaces(display-p3, srgb)
TODO: consider whether external_texture and copyToTexture video tests should be in the same file
+TODO(#3193): Test video in BT.2020 color space
`;
import { makeTestGroup } from '../../../common/framework/test_group.js';
import { GPUTest, TextureTestMixin } from '../../gpu_test.js';
+import { createCanvas } from '../../util/create_elements.js';
import {
startPlayingAndWaitForVideo,
getVideoFrameFromVideoElement,
getVideoElement,
- kVideoExpectations,
- kVideoRotationExpectations,
+ convertToUnorm8,
+ kPredefinedColorSpace,
+ kVideoNames,
+ kVideoInfo,
+ kVideoExpectedColors,
} from '../../web_platform/util.js';
const kHeight = 16;
@@ -23,7 +28,10 @@ const kFormat = 'rgba8unorm';
export const g = makeTestGroup(TextureTestMixin(GPUTest));
-function createExternalTextureSamplingTestPipeline(t: GPUTest): GPURenderPipeline {
+function createExternalTextureSamplingTestPipeline(
+ t: GPUTest,
+ colorAttachmentFormat: GPUTextureFormat = kFormat
+): GPURenderPipeline {
const pipeline = t.device.createRenderPipeline({
layout: 'auto',
vertex: {
@@ -59,7 +67,7 @@ function createExternalTextureSamplingTestPipeline(t: GPUTest): GPURenderPipelin
entryPoint: 'main',
targets: [
{
- format: kFormat,
+ format: colorAttachmentFormat,
},
],
},
@@ -73,13 +81,14 @@ function createExternalTextureSamplingTestBindGroup(
t: GPUTest,
checkNonStandardIsZeroCopy: true | undefined,
source: HTMLVideoElement | VideoFrame,
- pipeline: GPURenderPipeline
+ pipeline: GPURenderPipeline,
+ dstColorSpace: PredefinedColorSpace
): GPUBindGroup {
const linearSampler = t.device.createSampler();
const externalTexture = t.device.importExternalTexture({
- /* eslint-disable-next-line @typescript-eslint/no-explicit-any */
- source: source as any,
+ source,
+ colorSpace: dstColorSpace,
});
if (checkNonStandardIsZeroCopy) {
@@ -133,22 +142,24 @@ g.test('importExternalTexture,sample')
.desc(
`
Tests that we can import an HTMLVideoElement/VideoFrame into a GPUExternalTexture, sample from it
-for several combinations of video format and color space.
+for several combinations of video format, video color spaces and dst color spaces.
`
)
.params(u =>
u //
.combineWithParams(checkNonStandardIsZeroCopyIfAvailable())
+ .combine('videoName', kVideoNames)
.combine('sourceType', ['VideoElement', 'VideoFrame'] as const)
- .combineWithParams(kVideoExpectations)
+ .combine('dstColorSpace', kPredefinedColorSpace)
)
.fn(async t => {
- const sourceType = t.params.sourceType;
+ const { videoName, sourceType, dstColorSpace } = t.params;
+
if (sourceType === 'VideoFrame' && typeof VideoFrame === 'undefined') {
t.skip('WebCodec is not supported');
}
- const videoElement = getVideoElement(t, t.params.videoName);
+ const videoElement = getVideoElement(t, videoName);
await startPlayingAndWaitForVideo(videoElement, async () => {
const source =
@@ -167,7 +178,8 @@ for several combinations of video format and color space.
t,
t.params.checkNonStandardIsZeroCopy,
source,
- pipeline
+ pipeline,
+ dstColorSpace
);
const commandEncoder = t.device.createCommandEncoder();
@@ -187,88 +199,162 @@ for several combinations of video format and color space.
passEncoder.end();
t.device.queue.submit([commandEncoder.finish()]);
+ const srcColorSpace = kVideoInfo[videoName].colorSpace;
+ const presentColors = kVideoExpectedColors[srcColorSpace][dstColorSpace];
+
+ // visible rect is whole frame, no clipping.
+ const expect = kVideoInfo[videoName].display;
+
// For validation, we sample a few pixels away from the edges to avoid compression
// artifacts.
t.expectSinglePixelComparisonsAreOkInTexture({ texture: colorAttachment }, [
- // Top-left should be yellow.
- { coord: { x: kWidth * 0.25, y: kHeight * 0.25 }, exp: t.params._yellowExpectation },
- // Top-right should be red.
- { coord: { x: kWidth * 0.75, y: kHeight * 0.25 }, exp: t.params._redExpectation },
- // Bottom-left should be blue.
- { coord: { x: kWidth * 0.25, y: kHeight * 0.75 }, exp: t.params._blueExpectation },
- // Bottom-right should be green.
- { coord: { x: kWidth * 0.75, y: kHeight * 0.75 }, exp: t.params._greenExpectation },
+ // Top-left.
+ {
+ coord: { x: kWidth * 0.25, y: kHeight * 0.25 },
+ exp: convertToUnorm8(presentColors[expect.topLeftColor]),
+ },
+ // Top-right.
+ {
+ coord: { x: kWidth * 0.75, y: kHeight * 0.25 },
+ exp: convertToUnorm8(presentColors[expect.topRightColor]),
+ },
+ // Bottom-left.
+ {
+ coord: { x: kWidth * 0.25, y: kHeight * 0.75 },
+ exp: convertToUnorm8(presentColors[expect.bottomLeftColor]),
+ },
+ // Bottom-right.
+ {
+ coord: { x: kWidth * 0.75, y: kHeight * 0.75 },
+ exp: convertToUnorm8(presentColors[expect.bottomRightColor]),
+ },
]);
-
- if (sourceType === 'VideoFrame') (source as VideoFrame).close();
});
});
-g.test('importExternalTexture,sampleWithRotationMetadata')
+g.test('importExternalTexture,sample_non_YUV_video_frame')
.desc(
`
-Tests that when importing an HTMLVideoElement/VideoFrame into a GPUExternalTexture, sampling from
-it will honor rotation metadata.
+Tests that we can import an VideoFrame with non-YUV pixel format into a GPUExternalTexture and sample it.
`
)
.params(u =>
u //
- .combineWithParams(checkNonStandardIsZeroCopyIfAvailable())
- .combine('sourceType', ['VideoElement', 'VideoFrame'] as const)
- .combineWithParams(kVideoRotationExpectations)
+ .combine('videoFrameFormat', ['RGBA', 'RGBX', 'BGRA', 'BGRX'] as const)
)
- .fn(async t => {
- const sourceType = t.params.sourceType;
- const videoElement = getVideoElement(t, t.params.videoName);
+ .fn(t => {
+ const { videoFrameFormat } = t.params;
- await startPlayingAndWaitForVideo(videoElement, async () => {
- const source =
- sourceType === 'VideoFrame'
- ? await getVideoFrameFromVideoElement(t, videoElement)
- : videoElement;
+ if (typeof VideoFrame === 'undefined') {
+ t.skip('WebCodec is not supported');
+ }
- const colorAttachment = t.device.createTexture({
- format: kFormat,
- size: { width: kWidth, height: kHeight, depthOrArrayLayers: 1 },
- usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.RENDER_ATTACHMENT,
- });
+ const canvas = createCanvas(t, 'onscreen', kWidth, kHeight);
- const pipeline = createExternalTextureSamplingTestPipeline(t);
- const bindGroup = createExternalTextureSamplingTestBindGroup(
- t,
- t.params.checkNonStandardIsZeroCopy,
- source,
- pipeline
- );
+ const canvasContext = canvas.getContext('2d');
- const commandEncoder = t.device.createCommandEncoder();
- const passEncoder = commandEncoder.beginRenderPass({
- colorAttachments: [
- {
- view: colorAttachment.createView(),
- clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
- loadOp: 'clear',
- storeOp: 'store',
- },
- ],
- });
- passEncoder.setPipeline(pipeline);
- passEncoder.setBindGroup(0, bindGroup);
- passEncoder.draw(6);
- passEncoder.end();
- t.device.queue.submit([commandEncoder.finish()]);
+ if (canvasContext === null) {
+ t.skip(' onscreen canvas 2d context not available');
+ }
- // For validation, we sample a few pixels away from the edges to avoid compression
- // artifacts.
- t.expectSinglePixelComparisonsAreOkInTexture({ texture: colorAttachment }, [
- { coord: { x: kWidth * 0.25, y: kHeight * 0.25 }, exp: t.params._topLeftExpectation },
- { coord: { x: kWidth * 0.75, y: kHeight * 0.25 }, exp: t.params._topRightExpectation },
- { coord: { x: kWidth * 0.25, y: kHeight * 0.75 }, exp: t.params._bottomLeftExpectation },
- { coord: { x: kWidth * 0.75, y: kHeight * 0.75 }, exp: t.params._bottomRightExpectation },
- ]);
+ const ctx = canvasContext as CanvasRenderingContext2D;
+
+ const rectWidth = Math.floor(kWidth / 2);
+ const rectHeight = Math.floor(kHeight / 2);
+
+ // Red
+ ctx.fillStyle = `rgba(255, 0, 0, 1.0)`;
+ ctx.fillRect(0, 0, rectWidth, rectHeight);
+ // Lime
+ ctx.fillStyle = `rgba(0, 255, 0, 1.0)`;
+ ctx.fillRect(rectWidth, 0, kWidth - rectWidth, rectHeight);
+ // Blue
+ ctx.fillStyle = `rgba(0, 0, 255, 1.0)`;
+ ctx.fillRect(0, rectHeight, rectWidth, kHeight - rectHeight);
+ // Fuchsia
+ ctx.fillStyle = `rgba(255, 0, 255, 1.0)`;
+ ctx.fillRect(rectWidth, rectHeight, kWidth - rectWidth, kHeight - rectHeight);
+
+ const imageData = ctx.getImageData(0, 0, kWidth, kHeight);
+
+ // Create video frame with default color space 'srgb'
+ const frameInit: VideoFrameBufferInit = {
+ format: videoFrameFormat,
+ codedWidth: kWidth,
+ codedHeight: kHeight,
+ timestamp: 0,
+ };
+
+ const frame = new VideoFrame(imageData.data.buffer, frameInit);
+ let textureFormat: GPUTextureFormat = 'rgba8unorm';
+
+ if (videoFrameFormat === 'BGRA' || videoFrameFormat === 'BGRX') {
+ textureFormat = 'bgra8unorm';
+ }
- if (sourceType === 'VideoFrame') (source as VideoFrame).close();
+ const colorAttachment = t.device.createTexture({
+ format: textureFormat,
+ size: { width: kWidth, height: kHeight, depthOrArrayLayers: 1 },
+ usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.RENDER_ATTACHMENT,
+ });
+
+ const pipeline = createExternalTextureSamplingTestPipeline(t, textureFormat);
+ const bindGroup = createExternalTextureSamplingTestBindGroup(
+ t,
+ undefined /* checkNonStandardIsZeroCopy */,
+ frame,
+ pipeline,
+ 'srgb'
+ );
+
+ const commandEncoder = t.device.createCommandEncoder();
+ const passEncoder = commandEncoder.beginRenderPass({
+ colorAttachments: [
+ {
+ view: colorAttachment.createView(),
+ clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
});
+ passEncoder.setPipeline(pipeline);
+ passEncoder.setBindGroup(0, bindGroup);
+ passEncoder.draw(6);
+ passEncoder.end();
+ t.device.queue.submit([commandEncoder.finish()]);
+
+ const expected = {
+ topLeft: new Uint8Array([255, 0, 0, 255]),
+ topRight: new Uint8Array([0, 255, 0, 255]),
+ bottomLeft: new Uint8Array([0, 0, 255, 255]),
+ bottomRight: new Uint8Array([255, 0, 255, 255]),
+ };
+
+ // For validation, we sample a few pixels away from the edges to avoid compression
+ // artifacts.
+ t.expectSinglePixelComparisonsAreOkInTexture({ texture: colorAttachment }, [
+ // Top-left.
+ {
+ coord: { x: kWidth * 0.25, y: kHeight * 0.25 },
+ exp: expected.topLeft,
+ },
+ // Top-right.
+ {
+ coord: { x: kWidth * 0.75, y: kHeight * 0.25 },
+ exp: expected.topRight,
+ },
+ // Bottom-left.
+ {
+ coord: { x: kWidth * 0.25, y: kHeight * 0.75 },
+ exp: expected.bottomLeft,
+ },
+ // Bottom-right.
+ {
+ coord: { x: kWidth * 0.75, y: kHeight * 0.75 },
+ exp: expected.bottomRight,
+ },
+ ]);
});
g.test('importExternalTexture,sampleWithVideoFrameWithVisibleRectParam')
@@ -281,10 +367,13 @@ parameters are present.
.params(u =>
u //
.combineWithParams(checkNonStandardIsZeroCopyIfAvailable())
- .combineWithParams(kVideoExpectations)
+ .combine('videoName', kVideoNames)
+ .combine('dstColorSpace', kPredefinedColorSpace)
)
.fn(async t => {
- const videoElement = getVideoElement(t, t.params.videoName);
+ const { videoName, dstColorSpace } = t.params;
+
+ const videoElement = getVideoElement(t, videoName);
await startPlayingAndWaitForVideo(videoElement, async () => {
const source = await getVideoFrameFromVideoElement(t, videoElement);
@@ -292,15 +381,24 @@ parameters are present.
// All tested videos are derived from an image showing yellow, red, blue or green in each
// quadrant. In this test we crop the video to each quadrant and check that desired color
// is sampled from each corner of the cropped image.
- const srcVideoHeight = 240;
- const srcVideoWidth = 320;
+ // visible rect clip applies on raw decoded frame, which defines based on video frame coded size.
+ const srcVideoHeight = source.codedHeight;
+ const srcVideoWidth = source.codedWidth;
+
+ const srcColorSpace = kVideoInfo[videoName].colorSpace;
+ const presentColors = kVideoExpectedColors[srcColorSpace][dstColorSpace];
+
+ // The test crops raw decoded videos first and then apply transform. Expectation should
+ // use coded colors as reference.
+ const expect = kVideoInfo[videoName].coded;
+
const cropParams = [
- // Top left (yellow)
+ // Top left
{
subRect: { x: 0, y: 0, width: srcVideoWidth / 2, height: srcVideoHeight / 2 },
- color: t.params._yellowExpectation,
+ color: convertToUnorm8(presentColors[expect.topLeftColor]),
},
- // Top right (red)
+ // Top right
{
subRect: {
x: srcVideoWidth / 2,
@@ -308,9 +406,9 @@ parameters are present.
width: srcVideoWidth / 2,
height: srcVideoHeight / 2,
},
- color: t.params._redExpectation,
+ color: convertToUnorm8(presentColors[expect.topRightColor]),
},
- // Bottom left (blue)
+ // Bottom left
{
subRect: {
x: 0,
@@ -318,9 +416,9 @@ parameters are present.
width: srcVideoWidth / 2,
height: srcVideoHeight / 2,
},
- color: t.params._blueExpectation,
+ color: convertToUnorm8(presentColors[expect.bottomLeftColor]),
},
- // Bottom right (green)
+ // Bottom right
{
subRect: {
x: srcVideoWidth / 2,
@@ -328,14 +426,12 @@ parameters are present.
width: srcVideoWidth / 2,
height: srcVideoHeight / 2,
},
- color: t.params._greenExpectation,
+ color: convertToUnorm8(presentColors[expect.bottomRightColor]),
},
];
for (const cropParam of cropParams) {
- // MAINTENANCE_TODO: remove cast with TypeScript 4.9.6+.
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const subRect = new VideoFrame(source as any, { visibleRect: cropParam.subRect });
+ const subRect = new VideoFrame(source, { visibleRect: cropParam.subRect });
const colorAttachment = t.device.createTexture({
format: kFormat,
@@ -348,7 +444,8 @@ parameters are present.
t,
t.params.checkNonStandardIsZeroCopy,
subRect,
- pipeline
+ pipeline,
+ dstColorSpace
);
const commandEncoder = t.device.createCommandEncoder();
@@ -387,22 +484,24 @@ g.test('importExternalTexture,compute')
.desc(
`
Tests that we can import an HTMLVideoElement/VideoFrame into a GPUExternalTexture and use it in a
-compute shader, for several combinations of video format and color space.
+compute shader, for several combinations of video format, video color spaces and dst color spaces.
`
)
.params(u =>
u //
.combineWithParams(checkNonStandardIsZeroCopyIfAvailable())
+ .combine('videoName', kVideoNames)
.combine('sourceType', ['VideoElement', 'VideoFrame'] as const)
- .combineWithParams(kVideoExpectations)
+ .combine('dstColorSpace', kPredefinedColorSpace)
)
.fn(async t => {
- const sourceType = t.params.sourceType;
+ const { videoName, sourceType, dstColorSpace } = t.params;
+
if (sourceType === 'VideoFrame' && typeof VideoFrame === 'undefined') {
t.skip('WebCodec is not supported');
}
- const videoElement = getVideoElement(t, t.params.videoName);
+ const videoElement = getVideoElement(t, videoName);
await startPlayingAndWaitForVideo(videoElement, async () => {
const source =
@@ -410,8 +509,8 @@ compute shader, for several combinations of video format and color space.
? await getVideoFrameFromVideoElement(t, videoElement)
: videoElement;
const externalTexture = t.device.importExternalTexture({
- /* eslint-disable-next-line @typescript-eslint/no-explicit-any */
- source: source as any,
+ source,
+ colorSpace: dstColorSpace,
});
if (t.params.checkNonStandardIsZeroCopy) {
expectZeroCopyNonStandard(t, externalTexture);
@@ -422,29 +521,51 @@ compute shader, for several combinations of video format and color space.
usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.STORAGE_BINDING,
});
+ // Use display size of VideoFrame and video size of HTMLVideoElement as frame size. These sizes are presenting size which
+ // apply transformation in video metadata if any.
+
const pipeline = t.device.createComputePipeline({
layout: 'auto',
compute: {
- // Shader loads 4 pixels near each corner, and then store them in a storage texture.
+ // Shader loads 4 pixels, and then store them in a storage texture.
module: t.device.createShaderModule({
code: `
+ override frameWidth : i32 = 0;
+ override frameHeight : i32 = 0;
@group(0) @binding(0) var t : texture_external;
@group(0) @binding(1) var outImage : texture_storage_2d<rgba8unorm, write>;
@compute @workgroup_size(1) fn main() {
- var yellow : vec4<f32> = textureLoad(t, vec2<i32>(80, 60));
+ let coordTopLeft = vec2<i32>(frameWidth / 4, frameHeight / 4);
+ let coordTopRight = vec2<i32>(frameWidth / 4 * 3, frameHeight / 4);
+ let coordBottomLeft = vec2<i32>(frameWidth / 4, frameHeight / 4 * 3);
+ let coordBottomRight = vec2<i32>(frameWidth / 4 * 3, frameHeight / 4 * 3);
+ var yellow : vec4<f32> = textureLoad(t, coordTopLeft);
textureStore(outImage, vec2<i32>(0, 0), yellow);
- var red : vec4<f32> = textureLoad(t, vec2<i32>(240, 60));
+ var red : vec4<f32> = textureLoad(t, coordTopRight);
textureStore(outImage, vec2<i32>(0, 1), red);
- var blue : vec4<f32> = textureLoad(t, vec2<i32>(80, 180));
+ var blue : vec4<f32> = textureLoad(t, coordBottomLeft);
textureStore(outImage, vec2<i32>(1, 0), blue);
- var green : vec4<f32> = textureLoad(t, vec2<i32>(240, 180));
+ var green : vec4<f32> = textureLoad(t, coordBottomRight);
textureStore(outImage, vec2<i32>(1, 1), green);
return;
}
`,
}),
entryPoint: 'main',
+
+ // Use display size of VideoFrame and video size of HTMLVideoElement as frame size. These sizes are presenting size which
+ // apply transformation in video metadata if any.
+ constants: {
+ frameWidth:
+ sourceType === 'VideoFrame'
+ ? (source as VideoFrame).displayWidth
+ : (source as HTMLVideoElement).videoWidth,
+ frameHeight:
+ sourceType === 'VideoFrame'
+ ? (source as VideoFrame).displayHeight
+ : (source as HTMLVideoElement).videoHeight,
+ },
},
});
@@ -464,17 +585,21 @@ compute shader, for several combinations of video format and color space.
pass.end();
t.device.queue.submit([encoder.finish()]);
+ const srcColorSpace = kVideoInfo[videoName].colorSpace;
+ const presentColors = kVideoExpectedColors[srcColorSpace][dstColorSpace];
+
+ // visible rect is whole frame, no clipping.
+ const expect = kVideoInfo[videoName].display;
+
t.expectSinglePixelComparisonsAreOkInTexture({ texture: outputTexture }, [
- // Top-left should be yellow.
- { coord: { x: 0, y: 0 }, exp: t.params._yellowExpectation },
- // Top-right should be red.
- { coord: { x: 0, y: 1 }, exp: t.params._redExpectation },
- // Bottom-left should be blue.
- { coord: { x: 1, y: 0 }, exp: t.params._blueExpectation },
- // Bottom-right should be green.
- { coord: { x: 1, y: 1 }, exp: t.params._greenExpectation },
+ // Top-left.
+ { coord: { x: 0, y: 0 }, exp: convertToUnorm8(presentColors[expect.topLeftColor]) },
+ // Top-right.
+ { coord: { x: 0, y: 1 }, exp: convertToUnorm8(presentColors[expect.topRightColor]) },
+ // Bottom-left.
+ { coord: { x: 1, y: 0 }, exp: convertToUnorm8(presentColors[expect.bottomLeftColor]) },
+ // Bottom-right.
+ { coord: { x: 1, y: 1 }, exp: convertToUnorm8(presentColors[expect.bottomRightColor]) },
]);
-
- if (sourceType === 'VideoFrame') (source as VideoFrame).close();
});
});