summaryrefslogtreecommitdiffstats
path: root/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas
diff options
context:
space:
mode:
Diffstat (limited to 'dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas')
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/README.txt1
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/configure.spec.ts426
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/context_creation.spec.ts47
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getCurrentTexture.spec.ts383
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getPreferredCanvasFormat.spec.ts19
-rw-r--r--dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/readbackFromWebGPUCanvas.spec.ts481
6 files changed, 1357 insertions, 0 deletions
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/README.txt b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/README.txt
new file mode 100644
index 0000000000..83194d5b11
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/README.txt
@@ -0,0 +1 @@
+Tests for WebGPU <canvas> and OffscreenCanvas presentation.
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/configure.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/configure.spec.ts
new file mode 100644
index 0000000000..163930e20e
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/configure.spec.ts
@@ -0,0 +1,426 @@
+export const description = `
+Tests for GPUCanvasContext.configure.
+
+TODO:
+- Test colorSpace
+- Test viewFormats
+`;
+
+import { makeTestGroup } from '../../../common/framework/test_group.js';
+import { assert } from '../../../common/util/util.js';
+import { kCanvasTextureFormats, kTextureUsages } from '../../capability_info.js';
+import { GPUConst } from '../../constants.js';
+import {
+ kAllTextureFormats,
+ kFeaturesForFormats,
+ kTextureFormats,
+ filterFormatsByFeature,
+ viewCompatible,
+} from '../../format_info.js';
+import { GPUTest } from '../../gpu_test.js';
+import { kAllCanvasTypes, createCanvas } from '../../util/create_elements.js';
+
+export const g = makeTestGroup(GPUTest);
+
+g.test('defaults')
+ .desc(
+ `
+ Ensure that the defaults for GPUCanvasConfiguration are correct.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const { canvasType } = t.params;
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: t.device,
+ format: 'rgba8unorm',
+ });
+
+ const currentTexture = ctx.getCurrentTexture();
+ t.expect(currentTexture.format === 'rgba8unorm');
+ t.expect(currentTexture.usage === GPUTextureUsage.RENDER_ATTACHMENT);
+ t.expect(currentTexture.dimension === '2d');
+ t.expect(currentTexture.width === canvas.width);
+ t.expect(currentTexture.height === canvas.height);
+ t.expect(currentTexture.depthOrArrayLayers === 1);
+ t.expect(currentTexture.mipLevelCount === 1);
+ t.expect(currentTexture.sampleCount === 1);
+ });
+
+g.test('device')
+ .desc(
+ `
+ Ensure that configure reacts appropriately to various device states.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const { canvasType } = t.params;
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ // Calling configure without a device should throw a TypeError.
+ t.shouldThrow('TypeError', () => {
+ ctx.configure({
+ format: 'rgba8unorm',
+ } as GPUCanvasConfiguration);
+ });
+
+ // Device is not configured, so getCurrentTexture will throw an InvalidStateError.
+ t.shouldThrow('InvalidStateError', () => {
+ ctx.getCurrentTexture();
+ });
+
+ // Calling configure with a device should succeed.
+ ctx.configure({
+ device: t.device,
+ format: 'rgba8unorm',
+ });
+
+ // getCurrentTexture will succeed with a valid device.
+ ctx.getCurrentTexture();
+
+ // Unconfiguring should cause the device to be cleared.
+ ctx.unconfigure();
+ t.shouldThrow('InvalidStateError', () => {
+ ctx.getCurrentTexture();
+ });
+
+ // Should be able to successfully configure again after unconfiguring.
+ ctx.configure({
+ device: t.device,
+ format: 'rgba8unorm',
+ });
+ ctx.getCurrentTexture();
+ });
+
+g.test('format')
+ .desc(
+ `
+ Ensure that only valid texture formats are allowed when calling configure.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .combine('format', kAllTextureFormats)
+ )
+ .beforeAllSubcases(t => {
+ t.selectDeviceForTextureFormatOrSkipTestCase(t.params.format);
+ })
+ .fn(t => {
+ const { canvasType, format } = t.params;
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ // Would prefer to use kCanvasTextureFormats.includes(format), but that's giving TS errors.
+ let validFormat = false;
+ for (const canvasFormat of kCanvasTextureFormats) {
+ if (format === canvasFormat) {
+ validFormat = true;
+ break;
+ }
+ }
+
+ t.expectValidationError(() => {
+ ctx.configure({
+ device: t.device,
+ format,
+ });
+ }, !validFormat);
+
+ t.expectValidationError(() => {
+ // Should always return a texture, whether the configured format was valid or not.
+ const currentTexture = ctx.getCurrentTexture();
+ t.expect(currentTexture instanceof GPUTexture);
+ }, !validFormat);
+ });
+
+g.test('usage')
+ .desc(
+ `
+ Ensure that getCurrentTexture returns a texture with the configured usages.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .beginSubcases()
+ .expand('usage', () => {
+ const usageSet = new Set<number>();
+ for (const usage0 of kTextureUsages) {
+ for (const usage1 of kTextureUsages) {
+ usageSet.add(usage0 | usage1);
+ }
+ }
+ return usageSet;
+ })
+ )
+ .fn(t => {
+ const { canvasType, usage } = t.params;
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: t.device,
+ format: 'rgba8unorm',
+ usage,
+ });
+
+ const currentTexture = ctx.getCurrentTexture();
+ t.expect(currentTexture instanceof GPUTexture);
+ t.expect(currentTexture.usage === usage);
+
+ // Try to use the texture with the given usage
+
+ if (usage & GPUConst.TextureUsage.RENDER_ATTACHMENT) {
+ const encoder = t.device.createCommandEncoder();
+ const pass = encoder.beginRenderPass({
+ colorAttachments: [
+ {
+ view: currentTexture.createView(),
+ clearValue: [1.0, 0.0, 0.0, 1.0],
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
+ });
+ pass.end();
+ t.device.queue.submit([encoder.finish()]);
+ }
+
+ if (usage & GPUConst.TextureUsage.TEXTURE_BINDING) {
+ const bgl = t.device.createBindGroupLayout({
+ entries: [
+ {
+ binding: 0,
+ visibility: GPUShaderStage.FRAGMENT,
+ texture: {},
+ },
+ ],
+ });
+
+ t.device.createBindGroup({
+ layout: bgl,
+ entries: [
+ {
+ binding: 0,
+ resource: currentTexture.createView(),
+ },
+ ],
+ });
+ }
+
+ if (usage & GPUConst.TextureUsage.STORAGE_BINDING) {
+ const bgl = t.device.createBindGroupLayout({
+ entries: [
+ {
+ binding: 0,
+ visibility: GPUShaderStage.FRAGMENT,
+ storageTexture: { access: 'write-only', format: currentTexture.format },
+ },
+ ],
+ });
+
+ t.device.createBindGroup({
+ layout: bgl,
+ entries: [
+ {
+ binding: 0,
+ resource: currentTexture.createView(),
+ },
+ ],
+ });
+ }
+
+ if (usage & GPUConst.TextureUsage.COPY_DST) {
+ const rgbaData = new Uint8Array([255, 0, 0, 255]);
+
+ t.device.queue.writeTexture({ texture: currentTexture }, rgbaData, {}, [1, 1, 1]);
+ }
+
+ if (usage & GPUConst.TextureUsage.COPY_SRC) {
+ const size = [currentTexture.width, currentTexture.height, 1];
+ const dstTexture = t.device.createTexture({
+ format: currentTexture.format,
+ usage: GPUTextureUsage.COPY_DST,
+ size,
+ });
+
+ const encoder = t.device.createCommandEncoder();
+ encoder.copyTextureToTexture({ texture: currentTexture }, { texture: dstTexture }, size);
+ t.device.queue.submit([encoder.finish()]);
+ }
+ });
+
+g.test('alpha_mode')
+ .desc(
+ `
+ Ensure that all valid alphaMode values are allowed when calling configure.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .beginSubcases()
+ .combine('alphaMode', ['opaque', 'premultiplied'] as const)
+ )
+ .fn(t => {
+ const { canvasType, alphaMode } = t.params;
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: t.device,
+ format: 'rgba8unorm',
+ alphaMode,
+ });
+
+ const currentTexture = ctx.getCurrentTexture();
+ t.expect(currentTexture instanceof GPUTexture);
+ });
+
+g.test('size_zero_before_configure')
+ .desc(`Ensure a validation error is raised in configure() if the size of the canvas is zero.`)
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .combine('zeroDimension', ['width', 'height'] as const)
+ )
+ .fn(t => {
+ const { canvasType, zeroDimension } = t.params;
+ const canvas = createCanvas(t, canvasType, 1, 1);
+ canvas[zeroDimension] = 0;
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ // Validation error, the canvas size is 0 which doesn't make a valid GPUTextureDescriptor.
+ t.expectValidationError(() => {
+ ctx.configure({
+ device: t.device,
+ format: 'bgra8unorm',
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
+ });
+ });
+
+ canvas[zeroDimension] = 1;
+
+ // The size being incorrect doesn't make for an invalid configuration. Now that it is fixed
+ // getting textures from the canvas should work.
+ const currentTexture = ctx.getCurrentTexture();
+
+ // Try rendering to it even!
+ const encoder = t.device.createCommandEncoder();
+ const pass = encoder.beginRenderPass({
+ colorAttachments: [
+ {
+ view: currentTexture.createView(),
+ clearValue: [1.0, 0.0, 0.0, 1.0],
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
+ });
+ pass.end();
+ t.device.queue.submit([encoder.finish()]);
+ });
+
+g.test('size_zero_after_configure')
+ .desc(
+ `Ensure a validation error is raised after configure() if the size of the canvas becomes zero.`
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .combine('zeroDimension', ['width', 'height'] as const)
+ )
+ .fn(t => {
+ const { canvasType, zeroDimension } = t.params;
+ const canvas = createCanvas(t, canvasType, 1, 1);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: t.device,
+ format: 'bgra8unorm',
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
+ });
+
+ canvas[zeroDimension] = 0;
+
+ // The size is incorrect, we should be getting an error texture and a validation error.
+ let currentTexture: GPUTexture;
+ t.expectValidationError(() => {
+ currentTexture = ctx.getCurrentTexture();
+ });
+
+ t.expect(currentTexture![zeroDimension] === 0);
+
+ // Using the texture should produce a validation error.
+ t.expectValidationError(() => {
+ currentTexture.createView();
+ });
+ });
+
+g.test('viewFormats')
+ .desc(
+ `Test the validation that viewFormats are compatible with the format (for all canvas format / view formats)`
+ )
+ .params(u =>
+ u
+ .combine('canvasType', kAllCanvasTypes)
+ .combine('format', kCanvasTextureFormats)
+ .combine('viewFormatFeature', kFeaturesForFormats)
+ .beginSubcases()
+ .expand('viewFormat', ({ viewFormatFeature }) =>
+ filterFormatsByFeature(viewFormatFeature, kTextureFormats)
+ )
+ )
+ .beforeAllSubcases(t => {
+ t.selectDeviceOrSkipTestCase([t.params.viewFormatFeature]);
+ })
+ .fn(t => {
+ const { canvasType, format, viewFormat } = t.params;
+
+ t.skipIfTextureFormatNotSupported(viewFormat);
+
+ const canvas = createCanvas(t, canvasType, 1, 1);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ const compatible = viewCompatible(format, viewFormat);
+
+ // Test configure() produces an error if the formats aren't compatible.
+ t.expectValidationError(() => {
+ ctx.configure({
+ device: t.device,
+ format,
+ viewFormats: [viewFormat],
+ });
+ }, !compatible);
+
+ // Likewise for getCurrentTexture().
+ let currentTexture: GPUTexture;
+ t.expectValidationError(() => {
+ currentTexture = ctx.getCurrentTexture();
+ }, !compatible);
+
+ // The returned texture is an error texture.
+ t.expectValidationError(() => {
+ currentTexture.createView();
+ }, !compatible);
+ });
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/context_creation.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/context_creation.spec.ts
new file mode 100644
index 0000000000..3f016cffcd
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/context_creation.spec.ts
@@ -0,0 +1,47 @@
+export const description = `
+Tests for canvas context creation.
+
+Note there are no context creation attributes for WebGPU (as of this writing).
+Options are configured in configure() instead.
+`;
+
+import { Fixture } from '../../../common/framework/fixture.js';
+import { makeTestGroup } from '../../../common/framework/test_group.js';
+
+export const g = makeTestGroup(Fixture);
+
+g.test('return_type')
+ .desc(
+ `Test the return type of getContext for WebGPU.
+
+ TODO: Test OffscreenCanvas made from transferControlToOffscreen.`
+ )
+ .params(u =>
+ u //
+ .combine('offscreen', [false, true])
+ .beginSubcases()
+ .combine('attributes', [undefined, {}])
+ )
+ .fn(t => {
+ let canvas: HTMLCanvasElement | OffscreenCanvas;
+ if (t.params.offscreen) {
+ if (typeof OffscreenCanvas === 'undefined') {
+ // Skip if the current context doesn't have OffscreenCanvas (e.g. Node).
+ t.skip('OffscreenCanvas is not available in this context');
+ }
+
+ canvas = new OffscreenCanvas(10, 10);
+ } else {
+ if (typeof document === 'undefined') {
+ // Skip if there is no document (Workers, Node)
+ t.skip('DOM is not available to create canvas element');
+ }
+
+ canvas = document.createElement('canvas', t.params.attributes);
+ canvas.width = 10;
+ canvas.height = 10;
+ }
+
+ const ctx = canvas.getContext('webgpu');
+ t.expect(ctx instanceof GPUCanvasContext);
+ });
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getCurrentTexture.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getCurrentTexture.spec.ts
new file mode 100644
index 0000000000..609dacb907
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getCurrentTexture.spec.ts
@@ -0,0 +1,383 @@
+export const description = `
+Tests for GPUCanvasContext.getCurrentTexture.
+`;
+
+import { SkipTestCase } from '../../../common/framework/fixture.js';
+import { makeTestGroup } from '../../../common/framework/test_group.js';
+import { timeout } from '../../../common/util/timeout.js';
+import { assert, unreachable } from '../../../common/util/util.js';
+import { GPUTest } from '../../gpu_test.js';
+import { kAllCanvasTypes, createCanvas, CanvasType } from '../../util/create_elements.js';
+
+const kFormat = 'bgra8unorm';
+
+class GPUContextTest extends GPUTest {
+ initCanvasContext(canvasType: CanvasType = 'onscreen'): GPUCanvasContext {
+ const canvas = createCanvas(this, canvasType, 2, 2);
+ if (canvasType === 'onscreen') {
+ // To make sure onscreen canvas are visible
+ const onscreencanvas = canvas as HTMLCanvasElement;
+ onscreencanvas.style.position = 'fixed';
+ onscreencanvas.style.top = '0';
+ onscreencanvas.style.left = '0';
+ // Set it to transparent so that if multiple canvas are created, they are still visible.
+ onscreencanvas.style.opacity = '50%';
+ document.body.appendChild(onscreencanvas);
+ this.trackForCleanup({
+ close() {
+ document.body.removeChild(onscreencanvas);
+ },
+ });
+ }
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: this.device,
+ format: kFormat,
+ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC,
+ });
+
+ return ctx;
+ }
+}
+
+export const g = makeTestGroup(GPUContextTest);
+
+g.test('configured')
+ .desc(
+ `Checks that calling getCurrentTexture requires the context to be configured first, and
+ that each call to configure causes getCurrentTexture to return a new texture.`
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const canvas = createCanvas(t, t.params.canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ // Calling getCurrentTexture prior to configuration should throw an InvalidStateError exception.
+ t.shouldThrow('InvalidStateError', () => {
+ ctx.getCurrentTexture();
+ });
+
+ // Once the context has been configured getCurrentTexture can be called.
+ ctx.configure({
+ device: t.device,
+ format: kFormat,
+ });
+
+ let prevTexture = ctx.getCurrentTexture();
+
+ // Calling configure again with different values will change the texture returned.
+ ctx.configure({
+ device: t.device,
+ format: 'bgra8unorm',
+ });
+
+ let currentTexture = ctx.getCurrentTexture();
+ t.expect(prevTexture !== currentTexture);
+ prevTexture = currentTexture;
+
+ // Calling configure again with the same values will still change the texture returned.
+ ctx.configure({
+ device: t.device,
+ format: 'bgra8unorm',
+ });
+
+ currentTexture = ctx.getCurrentTexture();
+ t.expect(prevTexture !== currentTexture);
+ prevTexture = currentTexture;
+
+ // Calling getCurrentTexture after calling unconfigure should throw an InvalidStateError exception.
+ ctx.unconfigure();
+
+ t.shouldThrow('InvalidStateError', () => {
+ ctx.getCurrentTexture();
+ });
+ });
+
+g.test('single_frames')
+ .desc(`Checks that the value of getCurrentTexture is consistent within a single frame.`)
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const ctx = t.initCanvasContext(t.params.canvasType);
+ const frameTexture = ctx.getCurrentTexture();
+
+ // Calling getCurrentTexture a second time returns the same texture.
+ t.expect(frameTexture === ctx.getCurrentTexture());
+
+ const encoder = t.device.createCommandEncoder();
+ const pass = encoder.beginRenderPass({
+ colorAttachments: [
+ {
+ view: frameTexture.createView(),
+ clearValue: [1.0, 0.0, 0.0, 1.0],
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
+ });
+ pass.end();
+ t.device.queue.submit([encoder.finish()]);
+
+ // Calling getCurrentTexture after performing some work on the texture returns the same texture.
+ t.expect(frameTexture === ctx.getCurrentTexture());
+
+ // Ensure that getCurrentTexture does not clear the texture.
+ t.expectSingleColor(frameTexture, frameTexture.format, {
+ size: [frameTexture.width, frameTexture.height, 1],
+ exp: { R: 1, G: 0, B: 0, A: 1 },
+ });
+
+ frameTexture.destroy();
+
+ // Calling getCurrentTexture after destroying the texture still returns the same texture.
+ t.expect(frameTexture === ctx.getCurrentTexture());
+ });
+
+g.test('multiple_frames')
+ .desc(`Checks that the value of getCurrentTexture differs across multiple frames.`)
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .beginSubcases()
+ .combine('clearTexture', [true, false])
+ )
+ .beforeAllSubcases(t => {
+ const { canvasType } = t.params;
+ if (canvasType === 'offscreen' && !('transferToImageBitmap' in OffscreenCanvas.prototype)) {
+ throw new SkipTestCase('transferToImageBitmap not supported');
+ }
+ })
+ .fn(t => {
+ const { canvasType, clearTexture } = t.params;
+
+ return new Promise(resolve => {
+ const ctx = t.initCanvasContext(canvasType);
+ let prevTexture: GPUTexture | undefined;
+ let frameCount = 0;
+
+ function frameCheck() {
+ const currentTexture = ctx.getCurrentTexture();
+
+ if (prevTexture) {
+ // Ensure that each frame a new texture object is returned.
+ t.expect(currentTexture !== prevTexture);
+
+ // Ensure that texture contents are transparent black.
+ t.expectSingleColor(currentTexture, currentTexture.format, {
+ size: [currentTexture.width, currentTexture.height, 1],
+ exp: { R: 0, G: 0, B: 0, A: 0 },
+ });
+ }
+
+ if (clearTexture) {
+ // Clear the texture to test that texture contents don't carry over from frame to frame.
+ const encoder = t.device.createCommandEncoder();
+ const pass = encoder.beginRenderPass({
+ colorAttachments: [
+ {
+ view: currentTexture.createView(),
+ clearValue: [1.0, 0.0, 0.0, 1.0],
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
+ });
+ pass.end();
+ t.device.queue.submit([encoder.finish()]);
+ }
+
+ prevTexture = currentTexture;
+
+ if (frameCount++ < 5) {
+ // Which method will be used to begin a new "frame"?
+ switch (canvasType) {
+ case 'onscreen':
+ requestAnimationFrame(frameCheck);
+ break;
+ case 'offscreen': {
+ (ctx.canvas as OffscreenCanvas).transferToImageBitmap();
+ frameCheck();
+ break;
+ }
+ default:
+ unreachable();
+ }
+ } else {
+ resolve();
+ }
+ }
+
+ // Call frameCheck for the first time from requestAnimationFrame
+ // To make sure two frameChecks are run in different frames for onscreen canvas.
+ // offscreen canvas doesn't care.
+ requestAnimationFrame(frameCheck);
+ });
+ });
+
+g.test('resize')
+ .desc(`Checks the value of getCurrentTexture differs when the canvas is resized.`)
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const ctx = t.initCanvasContext(t.params.canvasType);
+ let prevTexture = ctx.getCurrentTexture();
+
+ // Trigger a resize by changing the width.
+ ctx.canvas.width = 4;
+
+ // When the canvas resizes the texture returned by getCurrentTexture should immediately begin
+ // returning a new texture matching the update dimensions.
+ let currentTexture = ctx.getCurrentTexture();
+ t.expect(prevTexture !== currentTexture);
+ t.expect(currentTexture.width === ctx.canvas.width);
+ t.expect(currentTexture.height === ctx.canvas.height);
+
+ // The width and height of the previous texture should remain unchanged.
+ t.expect(prevTexture.width === 2);
+ t.expect(prevTexture.height === 2);
+ prevTexture = currentTexture;
+
+ // Ensure that texture contents are transparent black.
+ t.expectSingleColor(currentTexture, currentTexture.format, {
+ size: [currentTexture.width, currentTexture.height, 1],
+ exp: { R: 0, G: 0, B: 0, A: 0 },
+ });
+
+ // Trigger a resize by changing the height.
+ ctx.canvas.height = 4;
+
+ // Check to ensure the texture is resized again.
+ currentTexture = ctx.getCurrentTexture();
+ t.expect(prevTexture !== currentTexture);
+ t.expect(currentTexture.width === ctx.canvas.width);
+ t.expect(currentTexture.height === ctx.canvas.height);
+ t.expect(prevTexture.width === 4);
+ t.expect(prevTexture.height === 2);
+ prevTexture = currentTexture;
+
+ // Ensure that texture contents are transparent black.
+ t.expectSingleColor(currentTexture, currentTexture.format, {
+ size: [currentTexture.width, currentTexture.height, 1],
+ exp: { R: 0, G: 0, B: 0, A: 0 },
+ });
+
+ // Simply setting the canvas width and height values to their current values should not trigger
+ // a change in the texture.
+ ctx.canvas.width = 4;
+ ctx.canvas.height = 4;
+
+ currentTexture = ctx.getCurrentTexture();
+ t.expect(prevTexture === currentTexture);
+ });
+
+g.test('expiry')
+ .desc(
+ `
+Test automatic WebGPU canvas texture expiry on all canvas types with the following requirements:
+- getCurrentTexture returns the same texture object until the next task:
+ - after previous frame update the rendering
+ - before current frame update the rendering
+ - in a microtask off the current frame task
+- getCurrentTexture returns a new texture object and the old texture object becomes invalid
+ as soon as possible after HTML update the rendering.
+
+TODO: test more canvas types, and ways to update the rendering
+- if on a different thread, expiry happens when the worker updates its rendering (worker "rPAF") OR transferToImageBitmap is called
+- [draw, transferControlToOffscreen, then canvas is displayed] on either {main thread, or transferred to worker}
+- [draw, canvas is displayed, then transferControlToOffscreen] on either {main thread, or transferred to worker}
+- reftests for the above 2 (what gets displayed when the canvas is displayed)
+- with canvas element added to DOM or not (applies to other canvas tests as well)
+ - canvas is added to DOM after being rendered
+ - canvas is already in DOM but becomes visible after being rendered
+ `
+ )
+ .params(u =>
+ u //
+ .combine('canvasType', kAllCanvasTypes)
+ .combine('prevFrameCallsite', ['runInNewCanvasFrame', 'requestAnimationFrame'] as const)
+ .combine('getCurrentTextureAgain', [true, false] as const)
+ )
+ .fn(t => {
+ const { canvasType, prevFrameCallsite, getCurrentTextureAgain } = t.params;
+ const ctx = t.initCanvasContext(t.params.canvasType);
+ // Create a bindGroupLayout to test invalid texture view usage later.
+ const bgl = t.device.createBindGroupLayout({
+ entries: [
+ {
+ binding: 0,
+ visibility: GPUShaderStage.COMPUTE,
+ texture: {},
+ },
+ ],
+ });
+
+ // The fn is called immediately after previous frame updating the rendering.
+ // Polyfill by calling the callback by setTimeout, in the requestAnimationFrame callback (for onscreen canvas)
+ // or after transferToImageBitmap (for offscreen canvas).
+ function runInNewCanvasFrame(fn: () => void) {
+ switch (canvasType) {
+ case 'onscreen':
+ requestAnimationFrame(() => timeout(fn));
+ break;
+ case 'offscreen':
+ // for offscreen canvas, after calling transferToImageBitmap, we are in a new frame immediately
+ (ctx.canvas as OffscreenCanvas).transferToImageBitmap();
+ fn();
+ break;
+ default:
+ unreachable();
+ }
+ }
+
+ function checkGetCurrentTexture() {
+ // Call getCurrentTexture on previous frame.
+ const prevTexture = ctx.getCurrentTexture();
+
+ // Call getCurrentTexture immediately after the frame, the texture object should stay the same.
+ queueMicrotask(() => {
+ if (getCurrentTextureAgain) {
+ t.expect(prevTexture === ctx.getCurrentTexture());
+ }
+
+ // Call getCurrentTexture in a new frame.
+ // It should expire the previous texture object return a new texture object by the next frame by then.
+ // Call runInNewCanvasFrame in the micro task to make sure the new frame run after the getCurrentTexture in the micro task for offscreen canvas.
+ runInNewCanvasFrame(() => {
+ if (getCurrentTextureAgain) {
+ t.expect(prevTexture !== ctx.getCurrentTexture());
+ }
+
+ // Event when prevTexture expired, createView should still succeed anyway.
+ const prevTextureView = prevTexture.createView();
+ // Using the invalid view should fail if it expires.
+ t.expectValidationError(() => {
+ t.device.createBindGroup({
+ layout: bgl,
+ entries: [{ binding: 0, resource: prevTextureView }],
+ });
+ });
+ });
+ });
+ }
+
+ switch (prevFrameCallsite) {
+ case 'runInNewCanvasFrame':
+ runInNewCanvasFrame(checkGetCurrentTexture);
+ break;
+ case 'requestAnimationFrame':
+ requestAnimationFrame(checkGetCurrentTexture);
+ break;
+ default:
+ break;
+ }
+ });
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getPreferredCanvasFormat.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getPreferredCanvasFormat.spec.ts
new file mode 100644
index 0000000000..cd582b4f3a
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/getPreferredCanvasFormat.spec.ts
@@ -0,0 +1,19 @@
+export const description = `
+Tests for navigator.gpu.getPreferredCanvasFormat.
+`;
+
+import { Fixture } from '../../../common/framework/fixture.js';
+import { makeTestGroup } from '../../../common/framework/test_group.js';
+
+export const g = makeTestGroup(Fixture);
+
+g.test('value')
+ .desc(
+ `
+ Ensure getPreferredCanvasFormat returns one of the valid values.
+ `
+ )
+ .fn(t => {
+ const preferredFormat = navigator.gpu.getPreferredCanvasFormat();
+ t.expect(preferredFormat === 'bgra8unorm' || preferredFormat === 'rgba8unorm');
+ });
diff --git a/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/readbackFromWebGPUCanvas.spec.ts b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/readbackFromWebGPUCanvas.spec.ts
new file mode 100644
index 0000000000..7fd7142f00
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/webgpu/web_platform/canvas/readbackFromWebGPUCanvas.spec.ts
@@ -0,0 +1,481 @@
+export const description = `
+Tests for readback from WebGPU Canvas.
+
+This includes testing that colorSpace makes it through from the WebGPU canvas
+to the form of copy (toDataURL, toBlob, ImageBitmap, drawImage)
+
+The color space support is tested by drawing the readback form of the WebGPU
+canvas into a 2D canvas of a different color space via drawImage (A). Another
+2D canvas is created with the same source data and color space as the WebGPU
+canvas and also drawn into another 2D canvas of a different color space (B).
+The contents of A and B should match.
+
+TODO: implement all canvas types, see TODO on kCanvasTypes.
+`;
+
+import { makeTestGroup } from '../../../common/framework/test_group.js';
+import { assert, raceWithRejectOnTimeout, unreachable } from '../../../common/util/util.js';
+import {
+ kCanvasAlphaModes,
+ kCanvasColorSpaces,
+ kCanvasTextureFormats,
+} from '../../capability_info.js';
+import { GPUTest } from '../../gpu_test.js';
+import { checkElementsEqual } from '../../util/check_contents.js';
+import {
+ kAllCanvasTypes,
+ CanvasType,
+ createCanvas,
+ createOnscreenCanvas,
+} from '../../util/create_elements.js';
+
+export const g = makeTestGroup(GPUTest);
+
+// We choose 0x66 as the value for each color and alpha channel
+// 0x66 / 0xff = 0.4
+// Given a pixel value of RGBA = (0x66, 0, 0, 0x66) in the source WebGPU canvas,
+// For alphaMode = opaque, the copy output should be RGBA = (0x66, 0, 0, 0xff)
+// For alphaMode = premultiplied, the copy output should be RGBA = (0xff, 0, 0, 0x66)
+const kPixelValue = 0x66;
+const kPixelValueFloat = 0x66 / 0xff; // 0.4
+
+// Use four pixels rectangle for the test:
+// blue: top-left;
+// green: top-right;
+// red: bottom-left;
+// yellow: bottom-right;
+const expect = {
+ /* prettier-ignore */
+ 'opaque': new Uint8ClampedArray([
+ 0x00, 0x00, kPixelValue, 0xff, // blue
+ 0x00, kPixelValue, 0x00, 0xff, // green
+ kPixelValue, 0x00, 0x00, 0xff, // red
+ kPixelValue, kPixelValue, 0x00, 0xff, // yellow
+ ]),
+ /* prettier-ignore */
+ 'premultiplied': new Uint8ClampedArray([
+ 0x00, 0x00, 0xff, kPixelValue, // blue
+ 0x00, 0xff, 0x00, kPixelValue, // green
+ 0xff, 0x00, 0x00, kPixelValue, // red
+ 0xff, 0xff, 0x00, kPixelValue, // yellow
+ ]),
+};
+
+function initWebGPUCanvasContent<T extends CanvasType>(
+ t: GPUTest,
+ format: GPUTextureFormat,
+ alphaMode: GPUCanvasAlphaMode,
+ colorSpace: PredefinedColorSpace,
+ canvasType: T
+) {
+ const canvas = createCanvas(t, canvasType, 2, 2);
+ const ctx = canvas.getContext('webgpu');
+ assert(ctx instanceof GPUCanvasContext, 'Failed to get WebGPU context from canvas');
+
+ ctx.configure({
+ device: t.device,
+ format,
+ usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.COPY_DST,
+ alphaMode,
+ colorSpace,
+ });
+
+ const canvasTexture = ctx.getCurrentTexture();
+ const tempTexture = t.device.createTexture({
+ size: { width: 1, height: 1, depthOrArrayLayers: 1 },
+ format,
+ usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.RENDER_ATTACHMENT,
+ });
+ const tempTextureView = tempTexture.createView();
+ const encoder = t.device.createCommandEncoder();
+
+ const clearOnePixel = (origin: GPUOrigin3D, color: GPUColor) => {
+ const pass = encoder.beginRenderPass({
+ colorAttachments: [
+ { view: tempTextureView, clearValue: color, loadOp: 'clear', storeOp: 'store' },
+ ],
+ });
+ pass.end();
+ encoder.copyTextureToTexture(
+ { texture: tempTexture },
+ { texture: canvasTexture, origin },
+ { width: 1, height: 1 }
+ );
+ };
+
+ clearOnePixel([0, 0], [0, 0, kPixelValueFloat, kPixelValueFloat]);
+ clearOnePixel([1, 0], [0, kPixelValueFloat, 0, kPixelValueFloat]);
+ clearOnePixel([0, 1], [kPixelValueFloat, 0, 0, kPixelValueFloat]);
+ clearOnePixel([1, 1], [kPixelValueFloat, kPixelValueFloat, 0, kPixelValueFloat]);
+
+ t.device.queue.submit([encoder.finish()]);
+ tempTexture.destroy();
+
+ return canvas;
+}
+
+function drawImageSourceIntoCanvas(
+ t: GPUTest,
+ image: CanvasImageSource,
+ colorSpace: PredefinedColorSpace
+) {
+ const canvas: HTMLCanvasElement = createOnscreenCanvas(t, 2, 2);
+ const ctx = canvas.getContext('2d', { colorSpace });
+ assert(ctx !== null);
+ ctx.drawImage(image, 0, 0);
+ return ctx;
+}
+
+function checkImageResultWithSameColorSpaceCanvas(
+ t: GPUTest,
+ image: CanvasImageSource,
+ sourceColorSpace: PredefinedColorSpace,
+ expect: Uint8ClampedArray
+) {
+ const ctx = drawImageSourceIntoCanvas(t, image, sourceColorSpace);
+ readPixelsFrom2DCanvasAndCompare(t, ctx, expect);
+}
+
+function checkImageResultWithDifferentColorSpaceCanvas(
+ t: GPUTest,
+ image: CanvasImageSource,
+ sourceColorSpace: PredefinedColorSpace,
+ sourceData: Uint8ClampedArray
+) {
+ const destinationColorSpace = sourceColorSpace === 'srgb' ? 'display-p3' : 'srgb';
+
+ // draw the WebGPU derived data into a canvas
+ const fromWebGPUCtx = drawImageSourceIntoCanvas(t, image, destinationColorSpace);
+
+ // create a 2D canvas with the same source data in the same color space as the WebGPU
+ // canvas
+ const source2DCanvas: HTMLCanvasElement = createOnscreenCanvas(t, 2, 2);
+ const source2DCtx = source2DCanvas.getContext('2d', { colorSpace: sourceColorSpace });
+ assert(source2DCtx !== null);
+ const imgData = source2DCtx.getImageData(0, 0, 2, 2);
+ imgData.data.set(sourceData);
+ source2DCtx.putImageData(imgData, 0, 0);
+
+ // draw the source 2D canvas into another 2D canvas with the destination color space and
+ // then pull out the data. This result should be the same as the WebGPU derived data
+ // written to a 2D canvas of the same destination color space.
+ const from2DCtx = drawImageSourceIntoCanvas(t, source2DCanvas, destinationColorSpace);
+ const expect = from2DCtx.getImageData(0, 0, 2, 2).data;
+
+ readPixelsFrom2DCanvasAndCompare(t, fromWebGPUCtx, expect);
+}
+
+function checkImageResult(
+ t: GPUTest,
+ image: CanvasImageSource,
+ sourceColorSpace: PredefinedColorSpace,
+ expect: Uint8ClampedArray
+) {
+ checkImageResultWithSameColorSpaceCanvas(t, image, sourceColorSpace, expect);
+ checkImageResultWithDifferentColorSpaceCanvas(t, image, sourceColorSpace, expect);
+}
+
+function readPixelsFrom2DCanvasAndCompare(
+ t: GPUTest,
+ ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D,
+ expect: Uint8ClampedArray
+) {
+ const actual = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height).data;
+
+ t.expectOK(checkElementsEqual(actual, expect));
+}
+
+g.test('onscreenCanvas,snapshot')
+ .desc(
+ `
+ Ensure snapshot of canvas with WebGPU context is correct with
+ - various WebGPU canvas texture formats
+ - WebGPU canvas alpha mode = {"opaque", "premultiplied"}
+ - colorSpace = {"srgb", "display-p3"}
+ - snapshot methods = {convertToBlob, transferToImageBitmap, createImageBitmap}
+
+ TODO: Snapshot canvas to jpeg, webp and other mime type and
+ different quality. Maybe we should test them in reftest.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('format', kCanvasTextureFormats)
+ .combine('alphaMode', kCanvasAlphaModes)
+ .combine('colorSpace', kCanvasColorSpaces)
+ .combine('snapshotType', ['toDataURL', 'toBlob', 'imageBitmap'])
+ )
+ .fn(async t => {
+ const canvas = initWebGPUCanvasContent(
+ t,
+ t.params.format,
+ t.params.alphaMode,
+ t.params.colorSpace,
+ 'onscreen'
+ );
+
+ let snapshot: HTMLImageElement | ImageBitmap;
+ switch (t.params.snapshotType) {
+ case 'toDataURL': {
+ const url = canvas.toDataURL();
+ const img = new Image(canvas.width, canvas.height);
+ img.src = url;
+ await raceWithRejectOnTimeout(img.decode(), 5000, 'load image timeout');
+ snapshot = img;
+ break;
+ }
+ case 'toBlob': {
+ const blobFromCanvas = new Promise(resolve => {
+ canvas.toBlob(blob => resolve(blob));
+ });
+ const blob = (await blobFromCanvas) as Blob;
+ const url = URL.createObjectURL(blob);
+ const img = new Image(canvas.width, canvas.height);
+ img.src = url;
+ await raceWithRejectOnTimeout(img.decode(), 5000, 'load image timeout');
+ snapshot = img;
+ break;
+ }
+ case 'imageBitmap': {
+ snapshot = await createImageBitmap(canvas);
+ break;
+ }
+ default:
+ unreachable();
+ }
+
+ checkImageResult(t, snapshot, t.params.colorSpace, expect[t.params.alphaMode]);
+ });
+
+g.test('offscreenCanvas,snapshot')
+ .desc(
+ `
+ Ensure snapshot of offscreenCanvas with WebGPU context is correct with
+ - various WebGPU canvas texture formats
+ - WebGPU canvas alpha mode = {"opaque", "premultiplied"}
+ - colorSpace = {"srgb", "display-p3"}
+ - snapshot methods = {convertToBlob, transferToImageBitmap, createImageBitmap}
+
+ TODO: Snapshot offscreenCanvas to jpeg, webp and other mime type and
+ different quality. Maybe we should test them in reftest.
+ `
+ )
+ .params(u =>
+ u //
+ .combine('format', kCanvasTextureFormats)
+ .combine('alphaMode', kCanvasAlphaModes)
+ .combine('colorSpace', kCanvasColorSpaces)
+ .combine('snapshotType', ['convertToBlob', 'transferToImageBitmap', 'imageBitmap'])
+ )
+ .fn(async t => {
+ const offscreenCanvas = initWebGPUCanvasContent(
+ t,
+ t.params.format,
+ t.params.alphaMode,
+ t.params.colorSpace,
+ 'offscreen'
+ );
+
+ let snapshot: HTMLImageElement | ImageBitmap;
+ switch (t.params.snapshotType) {
+ case 'convertToBlob': {
+ if (typeof offscreenCanvas.convertToBlob === 'undefined') {
+ t.skip("Browser doesn't support OffscreenCanvas.convertToBlob");
+ return;
+ }
+ const blob = await offscreenCanvas.convertToBlob();
+ const url = URL.createObjectURL(blob);
+ const img = new Image(offscreenCanvas.width, offscreenCanvas.height);
+ img.src = url;
+ await raceWithRejectOnTimeout(img.decode(), 5000, 'load image timeout');
+ snapshot = img;
+ break;
+ }
+ case 'transferToImageBitmap': {
+ if (typeof offscreenCanvas.transferToImageBitmap === 'undefined') {
+ t.skip("Browser doesn't support OffscreenCanvas.transferToImageBitmap");
+ return;
+ }
+ snapshot = offscreenCanvas.transferToImageBitmap();
+ break;
+ }
+ case 'imageBitmap': {
+ snapshot = await createImageBitmap(offscreenCanvas);
+ break;
+ }
+ default:
+ unreachable();
+ }
+
+ checkImageResult(t, snapshot, t.params.colorSpace, expect[t.params.alphaMode]);
+ });
+
+g.test('onscreenCanvas,uploadToWebGL')
+ .desc(
+ `
+ Ensure upload WebGPU context canvas to webgl texture is correct with
+ - various WebGPU canvas texture formats
+ - WebGPU canvas alpha mode = {"opaque", "premultiplied"}
+ - upload methods = {texImage2D, texSubImage2D}
+ `
+ )
+ .params(u =>
+ u //
+ .combine('format', kCanvasTextureFormats)
+ .combine('alphaMode', kCanvasAlphaModes)
+ .combine('webgl', ['webgl', 'webgl2'])
+ .combine('upload', ['texImage2D', 'texSubImage2D'])
+ )
+ .fn(t => {
+ const { format, webgl, upload } = t.params;
+ const canvas = initWebGPUCanvasContent(t, format, t.params.alphaMode, 'srgb', 'onscreen');
+
+ const expectCanvas: HTMLCanvasElement = createOnscreenCanvas(t, canvas.width, canvas.height);
+ const gl = expectCanvas.getContext(webgl) as WebGLRenderingContext | WebGL2RenderingContext;
+ if (gl === null) {
+ return;
+ }
+
+ const texture = gl.createTexture();
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ switch (upload) {
+ case 'texImage2D': {
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
+ break;
+ }
+ case 'texSubImage2D': {
+ gl.texImage2D(
+ gl.TEXTURE_2D,
+ 0,
+ gl.RGBA,
+ canvas.width,
+ canvas.height,
+ 0,
+ gl.RGBA,
+ gl.UNSIGNED_BYTE,
+ null
+ );
+ gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
+ break;
+ }
+ default:
+ unreachable();
+ }
+
+ const fb = gl.createFramebuffer();
+
+ gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
+ gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
+
+ const pixels = new Uint8Array(canvas.width * canvas.height * 4);
+ gl.readPixels(0, 0, 2, 2, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
+ const actual = new Uint8ClampedArray(pixels);
+
+ t.expectOK(checkElementsEqual(actual, expect[t.params.alphaMode]));
+ });
+
+g.test('drawTo2DCanvas')
+ .desc(
+ `
+ Ensure draw WebGPU context canvas to 2d context canvas/offscreenCanvas is correct with
+ - various WebGPU canvas texture formats
+ - WebGPU canvas alpha mode = {"opaque", "premultiplied"}
+ - colorSpace = {"srgb", "display-p3"}
+ - WebGPU canvas type = {"onscreen", "offscreen"}
+ - 2d canvas type = {"onscreen", "offscreen"}
+ `
+ )
+ .params(u =>
+ u //
+ .combine('format', kCanvasTextureFormats)
+ .combine('alphaMode', kCanvasAlphaModes)
+ .combine('colorSpace', kCanvasColorSpaces)
+ .combine('webgpuCanvasType', kAllCanvasTypes)
+ .combine('canvas2DType', kAllCanvasTypes)
+ )
+ .fn(t => {
+ const { format, webgpuCanvasType, alphaMode, colorSpace, canvas2DType } = t.params;
+
+ const canvas = initWebGPUCanvasContent(t, format, alphaMode, colorSpace, webgpuCanvasType);
+
+ const expectCanvas = createCanvas(t, canvas2DType, canvas.width, canvas.height);
+ const ctx = expectCanvas.getContext('2d') as CanvasRenderingContext2D;
+ if (ctx === null) {
+ t.skip(canvas2DType + ' canvas cannot get 2d context');
+ return;
+ }
+
+ ctx.drawImage(canvas, 0, 0);
+ readPixelsFrom2DCanvasAndCompare(t, ctx, expect[t.params.alphaMode]);
+ });
+
+g.test('transferToImageBitmap_unconfigured_nonzero_size')
+ .desc(
+ `Regression test for a crash when calling transferImageBitmap on an unconfigured. Case where the canvas is not empty`
+ )
+ .fn(t => {
+ const canvas = createCanvas(t, 'offscreen', 2, 3);
+ canvas.getContext('webgpu');
+
+ // Transferring gives an ImageBitmap of the correct size filled with transparent black.
+ const ib = canvas.transferToImageBitmap();
+ t.expect(ib.width === canvas.width);
+ t.expect(ib.height === canvas.height);
+
+ const readbackCanvas = document.createElement('canvas');
+ readbackCanvas.width = canvas.width;
+ readbackCanvas.height = canvas.height;
+ const readbackContext = readbackCanvas.getContext('2d', {
+ alpha: true,
+ });
+ if (readbackContext === null) {
+ t.skip('Cannot get a 2D canvas context');
+ return;
+ }
+
+ // Since there isn't a configuration we expect the ImageBitmap to have the default alphaMode of "opaque".
+ const expected = new Uint8ClampedArray(canvas.width * canvas.height * 4);
+ for (let i = 0; i < expected.byteLength; i += 4) {
+ expected[i + 0] = 0;
+ expected[i + 1] = 0;
+ expected[i + 2] = 0;
+ expected[i + 3] = 255;
+ }
+
+ readbackContext.drawImage(ib, 0, 0);
+ readPixelsFrom2DCanvasAndCompare(t, readbackContext, expected);
+ });
+
+g.test('transferToImageBitmap_zero_size')
+ .desc(
+ `Regression test for a crash when calling transferImageBitmap on an unconfigured. Case where the canvas is empty.
+
+ TODO: Spec and expect a particular Exception type here.`
+ )
+ .params(u => u.combine('configure', [true, false]))
+ .fn(t => {
+ const { configure } = t.params;
+ const canvas = createCanvas(t, 'offscreen', 0, 1);
+ const ctx = canvas.getContext('webgpu')!;
+
+ if (configure) {
+ t.expectValidationError(() => ctx.configure({ device: t.device, format: 'bgra8unorm' }));
+ }
+
+ // Transferring would give an empty ImageBitmap which is not possible, so an Exception is thrown.
+ t.shouldThrow(true, () => {
+ canvas.transferToImageBitmap();
+ });
+ });
+
+g.test('transferToImageBitmap_huge_size')
+ .desc(`Regression test for a crash when calling transferImageBitmap on a HUGE canvas.`)
+ .fn(t => {
+ const canvas = createCanvas(t, 'offscreen', 1000000, 1000000);
+ canvas.getContext('webgpu')!;
+
+ // Transferring to such a HUGE image bitmap would not be possible, so an Exception is thrown.
+ t.shouldThrow(true, () => {
+ canvas.transferToImageBitmap();
+ });
+ });