summaryrefslogtreecommitdiffstats
path: root/dom/webgpu/tests/cts/checkout/src/common/tools
diff options
context:
space:
mode:
Diffstat (limited to 'dom/webgpu/tests/cts/checkout/src/common/tools')
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/crawl.ts56
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/dev_server.ts13
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts278
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings.ts11
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings_and_webworkers.ts89
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/gen_wpt_cts_html.ts135
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts12
-rw-r--r--dom/webgpu/tests/cts/checkout/src/common/tools/validate.ts29
8 files changed, 445 insertions, 178 deletions
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/crawl.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/crawl.ts
index 50340dd68b..21a335b11c 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/crawl.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/crawl.ts
@@ -45,13 +45,16 @@ async function crawlFilesRecursively(dir: string): Promise<string[]> {
);
}
-export async function crawl(suiteDir: string, validate: boolean): Promise<TestSuiteListingEntry[]> {
+export async function crawl(
+ suiteDir: string,
+ opts: { validate: boolean; printMetadataWarnings: boolean } | null = null
+): Promise<TestSuiteListingEntry[]> {
if (!fs.existsSync(suiteDir)) {
throw new Error(`Could not find suite: ${suiteDir}`);
}
let validateTimingsEntries;
- if (validate) {
+ if (opts?.validate) {
const metadata = loadMetadataForSuite(suiteDir);
if (metadata) {
validateTimingsEntries = {
@@ -75,7 +78,7 @@ export async function crawl(suiteDir: string, validate: boolean): Promise<TestSu
const suite = path.basename(suiteDir);
- if (validate) {
+ if (opts?.validate) {
const filename = `../../${suite}/${filepathWithoutExtension}.spec.js`;
assert(!process.env.STANDALONE_DEV_SERVER);
@@ -109,8 +112,6 @@ export async function crawl(suiteDir: string, validate: boolean): Promise<TestSu
}
if (validateTimingsEntries) {
- let failed = false;
-
const zeroEntries = [];
const staleEntries = [];
for (const [metadataKey, metadataValue] of Object.entries(validateTimingsEntries.metadata)) {
@@ -125,36 +126,39 @@ export async function crawl(suiteDir: string, validate: boolean): Promise<TestSu
staleEntries.push(metadataKey);
}
}
- if (zeroEntries.length) {
- console.warn('WARNING: subcaseMS≤0 found in listing_meta.json (allowed, but try to avoid):');
+ if (zeroEntries.length && opts?.printMetadataWarnings) {
+ console.warn(
+ 'WARNING: subcaseMS ≤ 0 found in listing_meta.json (see docs/adding_timing_metadata.md):'
+ );
for (const metadataKey of zeroEntries) {
console.warn(` ${metadataKey}`);
}
}
- if (staleEntries.length) {
- console.error('ERROR: Non-existent tests found in listing_meta.json:');
- for (const metadataKey of staleEntries) {
- console.error(` ${metadataKey}`);
- }
- failed = true;
- }
- const missingEntries = [];
- for (const metadataKey of validateTimingsEntries.testsFoundInFiles) {
- if (!(metadataKey in validateTimingsEntries.metadata)) {
- missingEntries.push(metadataKey);
+ if (opts?.printMetadataWarnings) {
+ const missingEntries = [];
+ for (const metadataKey of validateTimingsEntries.testsFoundInFiles) {
+ if (!(metadataKey in validateTimingsEntries.metadata)) {
+ missingEntries.push(metadataKey);
+ }
+ }
+ if (missingEntries.length) {
+ console.error(
+ 'WARNING: Tests missing from listing_meta.json (see docs/adding_timing_metadata.md):'
+ );
+ for (const metadataKey of missingEntries) {
+ console.error(` ${metadataKey}`);
+ }
}
}
- if (missingEntries.length) {
- console.error(
- 'ERROR: Tests missing from listing_meta.json. Please add the new tests (See docs/adding_timing_metadata.md):'
- );
- for (const metadataKey of missingEntries) {
+
+ if (staleEntries.length) {
+ console.error('ERROR: Non-existent tests found in listing_meta.json. Please update:');
+ for (const metadataKey of staleEntries) {
console.error(` ${metadataKey}`);
- failed = true;
}
+ unreachable();
}
- assert(!failed);
}
return entries;
@@ -163,5 +167,5 @@ export async function crawl(suiteDir: string, validate: boolean): Promise<TestSu
export function makeListing(filename: string): Promise<TestSuiteListing> {
// Don't validate. This path is only used for the dev server and running tests with Node.
// Validation is done for listing generation and presubmit.
- return crawl(path.dirname(filename), false);
+ return crawl(path.dirname(filename));
}
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/dev_server.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/dev_server.ts
index 57cb6a7ea4..8e0e3bdbe6 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/dev_server.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/dev_server.ts
@@ -144,6 +144,19 @@ app.get('/out/:suite([a-zA-Z0-9_-]+)/listing.js', async (req, res, next) => {
}
});
+// Serve .worker.js files by generating the necessary wrapper.
+app.get('/out/:suite([a-zA-Z0-9_-]+)/webworker/:filepath(*).worker.js', (req, res, next) => {
+ const { suite, filepath } = req.params;
+ const result = `\
+import { g } from '/out/${suite}/${filepath}.spec.js';
+import { wrapTestGroupForWorker } from '/out/common/runtime/helper/wrap_for_worker.js';
+
+wrapTestGroupForWorker(g);
+`;
+ res.setHeader('Content-Type', 'application/javascript');
+ res.send(result);
+});
+
// Serve all other .js files by fetching the source .ts file and compiling it.
app.get('/out/**/*.js', async (req, res, next) => {
const jsUrl = path.relative('/out', req.url);
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts
index ce0854aa20..d8309ebcb1 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts
@@ -3,32 +3,41 @@ import * as path from 'path';
import * as process from 'process';
import { Cacheable, dataCache, setIsBuildingDataCache } from '../framework/data_cache.js';
+import { crc32, toHexString } from '../util/crc32.js';
+import { parseImports } from '../util/parse_imports.js';
function usage(rc: number): void {
- console.error(`Usage: tools/gen_cache [options] [OUT_DIR] [SUITE_DIRS...]
+ console.error(`Usage: tools/gen_cache [options] [SUITE_DIRS...]
For each suite in SUITE_DIRS, pre-compute data that is expensive to generate
-at runtime and store it under OUT_DIR. If the data file is found then the
-DataCache will load this instead of building the expensive data at CTS runtime.
+at runtime and store it under 'src/resources/cache'. If the data file is found
+then the DataCache will load this instead of building the expensive data at CTS
+runtime.
+Note: Due to differences in gzip compression, different versions of node can
+produce radically different binary cache files. gen_cache uses the hashes of the
+source files to determine whether a cache file is 'up to date'. This is faster
+and does not depend on the compressed output.
Options:
--help Print this message and exit.
--list Print the list of output files without writing them.
- --nth i/n Only process every file where (file_index % n == i)
- --validate Check that cache should build (Tests for collisions).
+ --force Rebuild cache even if they're up to date
+ --validate Check the cache is up to date
--verbose Print each action taken.
`);
process.exit(rc);
}
+// Where the cache is generated
+const outDir = 'src/resources/cache';
+
+let forceRebuild = false;
let mode: 'emit' | 'list' | 'validate' = 'emit';
-let nth = { i: 0, n: 1 };
let verbose = false;
const nonFlagsArgs: string[] = [];
-for (let i = 0; i < process.argv.length; i++) {
- const arg = process.argv[i];
+for (const arg of process.argv) {
if (arg.startsWith('-')) {
switch (arg) {
case '--list': {
@@ -39,6 +48,10 @@ for (let i = 0; i < process.argv.length; i++) {
usage(0);
break;
}
+ case '--force': {
+ forceRebuild = true;
+ break;
+ }
case '--verbose': {
verbose = true;
break;
@@ -47,28 +60,6 @@ for (let i = 0; i < process.argv.length; i++) {
mode = 'validate';
break;
}
- case '--nth': {
- const err = () => {
- console.error(
- `--nth requires a value of the form 'i/n', where i and n are positive integers and i < n`
- );
- process.exit(1);
- };
- i++;
- if (i >= process.argv.length) {
- err();
- }
- const value = process.argv[i];
- const parts = value.split('/');
- if (parts.length !== 2) {
- err();
- }
- nth = { i: parseInt(parts[0]), n: parseInt(parts[1]) };
- if (nth.i < 0 || nth.n < 1 || nth.i > nth.n) {
- err();
- }
- break;
- }
default: {
console.log('unrecognized flag: ', arg);
usage(1);
@@ -79,12 +70,10 @@ for (let i = 0; i < process.argv.length; i++) {
}
}
-if (nonFlagsArgs.length < 4) {
+if (nonFlagsArgs.length < 3) {
usage(0);
}
-const outRootDir = nonFlagsArgs[2];
-
dataCache.setStore({
load: (path: string) => {
return new Promise<Uint8Array>((resolve, reject) => {
@@ -100,57 +89,133 @@ dataCache.setStore({
});
setIsBuildingDataCache();
+const cacheFileSuffix = __filename.endsWith('.ts') ? '.cache.ts' : '.cache.js';
+
+/**
+ * @returns a list of all the files under 'dir' that has the given extension
+ * @param dir the directory to search
+ * @param ext the extension of the files to find
+ */
+function glob(dir: string, ext: string) {
+ const files: string[] = [];
+ for (const file of fs.readdirSync(dir)) {
+ const path = `${dir}/${file}`;
+ if (fs.statSync(path).isDirectory()) {
+ for (const child of glob(path, ext)) {
+ files.push(`${file}/${child}`);
+ }
+ }
+
+ if (path.endsWith(ext) && fs.statSync(path).isFile()) {
+ files.push(file);
+ }
+ }
+ return files;
+}
+
+/**
+ * Exception type thrown by SourceHasher.hashFile() when a file annotated with
+ * MUST_NOT_BE_IMPORTED_BY_DATA_CACHE is transitively imported by a .cache.ts file.
+ */
+class InvalidImportException {
+ constructor(path: string) {
+ this.stack = [path];
+ }
+ toString(): string {
+ return `invalid transitive import for cache:\n ${this.stack.join('\n ')}`;
+ }
+ readonly stack: string[];
+}
+/**
+ * SourceHasher is a utility for producing a hash of a source .ts file and its imported source files.
+ */
+class SourceHasher {
+ /**
+ * @param path the source file path
+ * @returns a hash of the source file and all of its imported dependencies.
+ */
+ public hashOf(path: string) {
+ this.u32Array[0] = this.hashFile(path);
+ return this.u32Array[0].toString(16);
+ }
+
+ hashFile(path: string): number {
+ if (!fs.existsSync(path) && path.endsWith('.js')) {
+ path = path.substring(0, path.length - 2) + 'ts';
+ }
+
+ const cached = this.hashes.get(path);
+ if (cached !== undefined) {
+ return cached;
+ }
+
+ this.hashes.set(path, 0); // Store a zero hash to handle cyclic imports
+
+ const content = fs.readFileSync(path, { encoding: 'utf-8' });
+ const normalized = content.replace('\r\n', '\n');
+ let hash = crc32(normalized);
+ for (const importPath of parseImports(path, normalized)) {
+ try {
+ const importHash = this.hashFile(importPath);
+ hash = this.hashCombine(hash, importHash);
+ } catch (ex) {
+ if (ex instanceof InvalidImportException) {
+ ex.stack.push(path);
+ throw ex;
+ }
+ }
+ }
+
+ if (content.includes('MUST_NOT_BE_IMPORTED_BY_DATA_CACHE')) {
+ throw new InvalidImportException(path);
+ }
+
+ this.hashes.set(path, hash);
+ return hash;
+ }
+
+ /** Simple non-cryptographic hash combiner */
+ hashCombine(a: number, b: number): number {
+ return crc32(`${toHexString(a)} ${toHexString(b)}`);
+ }
+
+ private hashes = new Map<string, number>();
+ private u32Array = new Uint32Array(1);
+}
+
void (async () => {
- for (const suiteDir of nonFlagsArgs.slice(3)) {
+ const suiteDirs = nonFlagsArgs.slice(2); // skip <exe> <js>
+ for (const suiteDir of suiteDirs) {
await build(suiteDir);
}
})();
-const specFileSuffix = __filename.endsWith('.ts') ? '.spec.ts' : '.spec.js';
-
-async function crawlFilesRecursively(dir: string): Promise<string[]> {
- const subpathInfo = await Promise.all(
- (await fs.promises.readdir(dir)).map(async d => {
- const p = path.join(dir, d);
- const stats = await fs.promises.stat(p);
- return {
- path: p,
- isDirectory: stats.isDirectory(),
- isFile: stats.isFile(),
- };
- })
- );
-
- const files = subpathInfo
- .filter(i => i.isFile && i.path.endsWith(specFileSuffix))
- .map(i => i.path);
-
- return files.concat(
- await subpathInfo
- .filter(i => i.isDirectory)
- .map(i => crawlFilesRecursively(i.path))
- .reduce(async (a, b) => (await a).concat(await b), Promise.resolve([]))
- );
-}
-
async function build(suiteDir: string) {
if (!fs.existsSync(suiteDir)) {
console.error(`Could not find ${suiteDir}`);
process.exit(1);
}
- // Crawl files and convert paths to be POSIX-style, relative to suiteDir.
- let filesToEnumerate = (await crawlFilesRecursively(suiteDir)).sort();
+ // Load hashes.json
+ const fileHashJsonPath = `${outDir}/hashes.json`;
+ let fileHashes: Record<string, string> = {};
+ if (fs.existsSync(fileHashJsonPath)) {
+ const json = fs.readFileSync(fileHashJsonPath, { encoding: 'utf8' });
+ fileHashes = JSON.parse(json);
+ }
- // Filter out non-spec files
- filesToEnumerate = filesToEnumerate.filter(f => f.endsWith(specFileSuffix));
+ // Crawl files and convert paths to be POSIX-style, relative to suiteDir.
+ const filesToEnumerate = glob(suiteDir, cacheFileSuffix)
+ .map(p => `${suiteDir}/${p}`)
+ .sort();
+ const fileHasher = new SourceHasher();
const cacheablePathToTS = new Map<string, string>();
+ const errors: Array<string> = [];
- let fileIndex = 0;
for (const file of filesToEnumerate) {
- const pathWithoutExtension = file.substring(0, file.length - specFileSuffix.length);
- const mod = await import(`../../../${pathWithoutExtension}.spec.js`);
+ const pathWithoutExtension = file.substring(0, file.length - 3);
+ const mod = await import(`../../../${pathWithoutExtension}.js`);
if (mod.d?.serialize !== undefined) {
const cacheable = mod.d as Cacheable<unknown>;
@@ -158,41 +223,78 @@ async function build(suiteDir: string) {
// Check for collisions
const existing = cacheablePathToTS.get(cacheable.path);
if (existing !== undefined) {
- console.error(
- `error: Cacheable '${cacheable.path}' is emitted by both:
+ errors.push(
+ `'${cacheable.path}' is emitted by both:
'${existing}'
and
'${file}'`
);
- process.exit(1);
}
cacheablePathToTS.set(cacheable.path, file);
}
- const outPath = `${outRootDir}/data/${cacheable.path}`;
+ const outPath = `${outDir}/${cacheable.path}`;
+ const fileHash = fileHasher.hashOf(file);
- if (fileIndex++ % nth.n === nth.i) {
- switch (mode) {
- case 'emit': {
+ switch (mode) {
+ case 'emit': {
+ if (!forceRebuild && fileHashes[cacheable.path] === fileHash) {
if (verbose) {
- console.log(`building '${outPath}'`);
+ console.log(`'${outPath}' is up to date`);
}
- const data = await cacheable.build();
- const serialized = cacheable.serialize(data);
- fs.mkdirSync(path.dirname(outPath), { recursive: true });
- fs.writeFileSync(outPath, serialized, 'binary');
- break;
+ continue;
}
- case 'list': {
- console.log(outPath);
- break;
- }
- case 'validate': {
- // Only check currently performed is the collision detection above
- break;
+ console.log(`building '${outPath}'`);
+ const data = await cacheable.build();
+ const serialized = cacheable.serialize(data);
+ fs.mkdirSync(path.dirname(outPath), { recursive: true });
+ fs.writeFileSync(outPath, serialized, 'binary');
+ fileHashes[cacheable.path] = fileHash;
+ break;
+ }
+ case 'list': {
+ console.log(outPath);
+ break;
+ }
+ case 'validate': {
+ if (fileHashes[cacheable.path] !== fileHash) {
+ errors.push(
+ `'${outPath}' needs rebuilding. Generate with 'npx grunt run:generate-cache'`
+ );
+ } else if (verbose) {
+ console.log(`'${outPath}' is up to date`);
}
}
}
}
}
+
+ // Check that there aren't stale files in the cache directory
+ for (const file of glob(outDir, '.bin')) {
+ if (cacheablePathToTS.get(file) === undefined) {
+ switch (mode) {
+ case 'emit':
+ fs.rmSync(file);
+ break;
+ case 'validate':
+ errors.push(
+ `cache file '${outDir}/${file}' is no longer generated. Remove with 'npx grunt run:generate-cache'`
+ );
+ break;
+ }
+ }
+ }
+
+ // Update hashes.json
+ if (mode === 'emit') {
+ const json = JSON.stringify(fileHashes, undefined, ' ');
+ fs.writeFileSync(fileHashJsonPath, json, { encoding: 'utf8' });
+ }
+
+ if (errors.length > 0) {
+ for (const error of errors) {
+ console.error(error);
+ }
+ process.exit(1);
+ }
}
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings.ts
index fc5e1f3cde..7cc8cb78f3 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings.ts
@@ -9,7 +9,7 @@ function usage(rc: number): void {
For each suite in SUITE_DIRS, generate listings and write each listing.js
into OUT_DIR/{suite}/listing.js. Example:
- tools/gen_listings out/ src/unittests/ src/webgpu/
+ tools/gen_listings gen/ src/unittests/ src/webgpu/
Options:
--help Print this message and exit.
@@ -40,7 +40,7 @@ const outDir = argv[2];
for (const suiteDir of argv.slice(3)) {
// Run concurrently for each suite (might be a tiny bit more efficient)
- void crawl(suiteDir, false).then(listing => {
+ void crawl(suiteDir).then(listing => {
const suite = path.basename(suiteDir);
const outFile = path.normalize(path.join(outDir, `${suite}/listing.js`));
fs.mkdirSync(path.join(outDir, suite), { recursive: true });
@@ -52,12 +52,5 @@ for (const suiteDir of argv.slice(3)) {
export const listing = ${JSON.stringify(listing, undefined, 2)};
`
);
-
- // If there was a sourcemap for the file we just replaced, delete it.
- try {
- fs.unlinkSync(outFile + '.map');
- } catch (ex) {
- // ignore if file didn't exist
- }
});
}
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings_and_webworkers.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings_and_webworkers.ts
new file mode 100644
index 0000000000..04ce669de3
--- /dev/null
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_listings_and_webworkers.ts
@@ -0,0 +1,89 @@
+import * as fs from 'fs';
+import * as path from 'path';
+import * as process from 'process';
+
+import { crawl } from './crawl.js';
+
+function usage(rc: number): void {
+ console.error(`Usage: tools/gen_listings_and_webworkers [options] [OUT_DIR] [SUITE_DIRS...]
+
+For each suite in SUITE_DIRS, generate listings into OUT_DIR/{suite}/listing.js,
+and generate Web Worker proxies in OUT_DIR/{suite}/webworker/**/*.worker.js for
+every .spec.js file. (Note {suite}/webworker/ is reserved for this purpose.)
+
+Example:
+ tools/gen_listings_and_webworkers gen/ src/unittests/ src/webgpu/
+
+Options:
+ --help Print this message and exit.
+`);
+ process.exit(rc);
+}
+
+const argv = process.argv;
+if (argv.indexOf('--help') !== -1) {
+ usage(0);
+}
+
+{
+ // Ignore old argument that is now the default
+ const i = argv.indexOf('--no-validate');
+ if (i !== -1) {
+ argv.splice(i, 1);
+ }
+}
+
+if (argv.length < 4) {
+ usage(0);
+}
+
+const myself = 'src/common/tools/gen_listings_and_webworkers.ts';
+
+const outDir = argv[2];
+
+for (const suiteDir of argv.slice(3)) {
+ // Run concurrently for each suite (might be a tiny bit more efficient)
+ void crawl(suiteDir).then(listing => {
+ const suite = path.basename(suiteDir);
+
+ // Write listing.js
+ const outFile = path.normalize(path.join(outDir, `${suite}/listing.js`));
+ fs.mkdirSync(path.join(outDir, suite), { recursive: true });
+ fs.writeFileSync(
+ outFile,
+ `\
+// AUTO-GENERATED - DO NOT EDIT. See ${myself}.
+
+export const listing = ${JSON.stringify(listing, undefined, 2)};
+`
+ );
+
+ // Write suite/webworker/**/*.worker.js
+ for (const entry of listing) {
+ if ('readme' in entry) continue;
+
+ const outFileDir = path.join(
+ outDir,
+ suite,
+ 'webworker',
+ ...entry.file.slice(0, entry.file.length - 1)
+ );
+ const outFile = path.join(outDir, suite, 'webworker', ...entry.file) + '.worker.js';
+
+ const relPathToSuiteRoot = Array<string>(entry.file.length).fill('..').join('/');
+
+ fs.mkdirSync(outFileDir, { recursive: true });
+ fs.writeFileSync(
+ outFile,
+ `\
+// AUTO-GENERATED - DO NOT EDIT. See ${myself}.
+
+import { g } from '${relPathToSuiteRoot}/${entry.file.join('/')}.spec.js';
+import { wrapTestGroupForWorker } from '${relPathToSuiteRoot}/../common/runtime/helper/wrap_for_worker.js';
+
+wrapTestGroupForWorker(g);
+`
+ );
+ }
+ });
+}
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_wpt_cts_html.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_wpt_cts_html.ts
index e8161304e9..46c2ae4354 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_wpt_cts_html.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_wpt_cts_html.ts
@@ -23,6 +23,7 @@ gen_wpt_cts_html.ts. Example:
{
"suite": "webgpu",
"out": "path/to/output/cts.https.html",
+ "outJSON": "path/to/output/webgpu_variant_list.json",
"template": "path/to/template/cts.https.html",
"maxChunkTimeMS": 2000
}
@@ -35,15 +36,15 @@ where arguments.txt is a file containing a list of arguments prefixes to both ge
in the expectations. The entire variant list generation runs *once per prefix*, so this
multiplies the size of the variant list.
- ?worker=0&q=
- ?worker=1&q=
+ ?debug=0&q=
+ ?debug=1&q=
and myexpectations.txt is a file containing a list of WPT paths to suppress, e.g.:
- path/to/cts.https.html?worker=0&q=webgpu:a/foo:bar={"x":1}
- path/to/cts.https.html?worker=1&q=webgpu:a/foo:bar={"x":1}
+ path/to/cts.https.html?debug=0&q=webgpu:a/foo:bar={"x":1}
+ path/to/cts.https.html?debug=1&q=webgpu:a/foo:bar={"x":1}
- path/to/cts.https.html?worker=1&q=webgpu:a/foo:bar={"x":3}
+ path/to/cts.https.html?debug=1&q=webgpu:a/foo:bar={"x":3}
`);
process.exit(rc);
}
@@ -51,9 +52,11 @@ and myexpectations.txt is a file containing a list of WPT paths to suppress, e.g
interface ConfigJSON {
/** Test suite to generate from. */
suite: string;
- /** Output filename, relative to JSON file. */
+ /** Output path for HTML file, relative to config file. */
out: string;
- /** Input template filename, relative to JSON file. */
+ /** Output path for JSON file containing the "variant" list, relative to config file. */
+ outVariantList?: string;
+ /** Input template filename, relative to config file. */
template: string;
/**
* Maximum time for a single WPT "variant" chunk, in milliseconds. Defaults to infinity.
@@ -71,18 +74,31 @@ interface ConfigJSON {
/** The prefix to trim from every line of the expectations_file. */
prefix: string;
};
+ /** Expend all subtrees for provided queries */
+ fullyExpandSubtrees?: {
+ file: string;
+ prefix: string;
+ };
+ /*No long path assert */
+ noLongPathAssert?: boolean;
}
interface Config {
suite: string;
out: string;
+ outVariantList?: string;
template: string;
maxChunkTimeMS: number;
argumentsPrefixes: string[];
+ noLongPathAssert: boolean;
expectations?: {
file: string;
prefix: string;
};
+ fullyExpandSubtrees?: {
+ file: string;
+ prefix: string;
+ };
}
let config: Config;
@@ -101,13 +117,23 @@ let config: Config;
template: path.resolve(jsonFileDir, configJSON.template),
maxChunkTimeMS: configJSON.maxChunkTimeMS ?? Infinity,
argumentsPrefixes: configJSON.argumentsPrefixes ?? ['?q='],
+ noLongPathAssert: configJSON.noLongPathAssert ?? false,
};
+ if (configJSON.outVariantList) {
+ config.outVariantList = path.resolve(jsonFileDir, configJSON.outVariantList);
+ }
if (configJSON.expectations) {
config.expectations = {
file: path.resolve(jsonFileDir, configJSON.expectations.file),
prefix: configJSON.expectations.prefix,
};
}
+ if (configJSON.fullyExpandSubtrees) {
+ config.fullyExpandSubtrees = {
+ file: path.resolve(jsonFileDir, configJSON.fullyExpandSubtrees.file),
+ prefix: configJSON.fullyExpandSubtrees.prefix,
+ };
+ }
break;
}
case 4:
@@ -130,6 +156,7 @@ let config: Config;
suite,
maxChunkTimeMS: Infinity,
argumentsPrefixes: ['?q='],
+ noLongPathAssert: false,
};
if (process.argv.length >= 7) {
config.argumentsPrefixes = (await fs.readFile(argsPrefixesFile, 'utf8'))
@@ -153,29 +180,16 @@ let config: Config;
config.argumentsPrefixes.sort((a, b) => b.length - a.length);
// Load expectations (if any)
- let expectationLines = new Set<string>();
- if (config.expectations) {
- expectationLines = new Set(
- (await fs.readFile(config.expectations.file, 'utf8')).split(/\r?\n/).filter(l => l.length)
- );
- }
+ const expectations: Map<string, string[]> = await loadQueryFile(
+ config.argumentsPrefixes,
+ config.expectations
+ );
- const expectations: Map<string, string[]> = new Map();
- for (const prefix of config.argumentsPrefixes) {
- expectations.set(prefix, []);
- }
-
- expLoop: for (const exp of expectationLines) {
- // Take each expectation for the longest prefix it matches.
- for (const argsPrefix of config.argumentsPrefixes) {
- const prefix = config.expectations!.prefix + argsPrefix;
- if (exp.startsWith(prefix)) {
- expectations.get(argsPrefix)!.push(exp.substring(prefix.length));
- continue expLoop;
- }
- }
- console.log('note: ignored expectation: ' + exp);
- }
+ // Load fullyExpandSubtrees queries (if any)
+ const fullyExpand: Map<string, string[]> = await loadQueryFile(
+ config.argumentsPrefixes,
+ config.fullyExpandSubtrees
+ );
const loader = new DefaultTestFileLoader();
const lines = [];
@@ -183,6 +197,7 @@ let config: Config;
const rootQuery = new TestQueryMultiFile(config.suite, []);
const tree = await loader.loadTree(rootQuery, {
subqueriesToExpand: expectations.get(prefix),
+ fullyExpandSubtrees: fullyExpand.get(prefix),
maxChunkTime: config.maxChunkTimeMS,
});
@@ -199,22 +214,24 @@ let config: Config;
alwaysExpandThroughLevel,
})) {
assert(query instanceof TestQueryMultiCase);
- const queryString = query.toString();
- // Check for a safe-ish path length limit. Filename must be <= 255, and on Windows the whole
- // path must be <= 259. Leave room for e.g.:
- // 'c:\b\s\w\xxxxxxxx\layout-test-results\external\wpt\webgpu\cts_worker=0_q=...-actual.txt'
- assert(
- queryString.length < 185,
- `Generated test variant would produce too-long -actual.txt filename. Possible solutions:
+ if (!config.noLongPathAssert) {
+ const queryString = query.toString();
+ // Check for a safe-ish path length limit. Filename must be <= 255, and on Windows the whole
+ // path must be <= 259. Leave room for e.g.:
+ // 'c:\b\s\w\xxxxxxxx\layout-test-results\external\wpt\webgpu\cts_worker=0_q=...-actual.txt'
+ assert(
+ queryString.length < 185,
+ `Generated test variant would produce too-long -actual.txt filename. Possible solutions:
- Reduce the length of the parts of the test query
- Reduce the parameterization of the test
- Make the test function faster and regenerate the listing_meta entry
- Reduce the specificity of test expectations (if you're using them)
${queryString}`
- );
+ );
+ }
lines.push({
- urlQueryString: prefix + query.toString(), // "?worker=0&q=..."
+ urlQueryString: prefix + query.toString(), // "?debug=0&q=..."
comment: useChunking ? `estimated: ${subtreeCounts?.totalTimeMS.toFixed(3)} ms` : undefined,
});
@@ -232,6 +249,39 @@ ${queryString}`
process.exit(1);
});
+async function loadQueryFile(
+ argumentsPrefixes: string[],
+ queryFile?: {
+ file: string;
+ prefix: string;
+ }
+): Promise<Map<string, string[]>> {
+ let lines = new Set<string>();
+ if (queryFile) {
+ lines = new Set(
+ (await fs.readFile(queryFile.file, 'utf8')).split(/\r?\n/).filter(l => l.length)
+ );
+ }
+
+ const result: Map<string, string[]> = new Map();
+ for (const prefix of argumentsPrefixes) {
+ result.set(prefix, []);
+ }
+
+ expLoop: for (const exp of lines) {
+ // Take each expectation for the longest prefix it matches.
+ for (const argsPrefix of argumentsPrefixes) {
+ const prefix = queryFile!.prefix + argsPrefix;
+ if (exp.startsWith(prefix)) {
+ result.get(argsPrefix)!.push(exp.substring(prefix.length));
+ continue expLoop;
+ }
+ }
+ console.log('note: ignored expectation: ' + exp);
+ }
+ return result;
+}
+
async function generateFile(
lines: Array<{ urlQueryString?: string; comment?: string } | undefined>
): Promise<void> {
@@ -240,13 +290,20 @@ async function generateFile(
result += await fs.readFile(config.template, 'utf8');
+ const variantList = [];
for (const line of lines) {
if (line !== undefined) {
- if (line.urlQueryString) result += `<meta name=variant content='${line.urlQueryString}'>`;
+ if (line.urlQueryString) {
+ result += `<meta name=variant content='${line.urlQueryString}'>`;
+ variantList.push(line.urlQueryString);
+ }
if (line.comment) result += `<!-- ${line.comment} -->`;
}
result += '\n';
}
await fs.writeFile(config.out, result);
+ if (config.outVariantList) {
+ await fs.writeFile(config.outVariantList, JSON.stringify(variantList, undefined, 2));
+ }
}
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts
index fb33ae20fb..a8bef354cc 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts
@@ -36,21 +36,13 @@ In more detail:
- For each suite seen, loads its listing_meta.json, takes the max of the old and
new data, and writes it back out.
-How to generate TIMING_LOG_FILES files:
-
-- Launch the 'websocket-logger' tool (see its README.md), which listens for
- log messages on localhost:59497.
-- Run the tests you want to capture data for, on the same system. Since
- logging is done through the websocket side-channel, you can run the tests
- under any runtime (standalone, WPT, etc.) as long as WebSocket support is
- available (always true in browsers).
-- Run \`tools/merge_listing_times webgpu -- tools/websocket-logger/wslog-*.txt\`
+See 'docs/adding_timing_metadata.md' for how to generate TIMING_LOG_FILES files.
`);
process.exit(rc);
}
const kHeader = `{
- "_comment": "SEMI AUTO-GENERATED: Please read docs/adding_timing_metadata.md.",
+ "_comment": "SEMI AUTO-GENERATED. This list is NOT exhaustive. Please read docs/adding_timing_metadata.md.",
`;
const kFooter = `\
"_end": ""
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/validate.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/validate.ts
index 164ee3259a..47aa9782a8 100644
--- a/dom/webgpu/tests/cts/checkout/src/common/tools/validate.ts
+++ b/dom/webgpu/tests/cts/checkout/src/common/tools/validate.ts
@@ -2,7 +2,7 @@ import * as process from 'process';
import { crawl } from './crawl.js';
-function usage(rc: number): void {
+function usage(rc: number): never {
console.error(`Usage: tools/validate [options] [SUITE_DIRS...]
For each suite in SUITE_DIRS, validate some properties about the file:
@@ -14,23 +14,40 @@ For each suite in SUITE_DIRS, validate some properties about the file:
- That each case query is not too long
Example:
- tools/validate src/unittests/ src/webgpu/
+ tools/validate src/unittests src/webgpu
Options:
- --help Print this message and exit.
+ --help Print this message and exit.
+ --print-metadata-warnings Print non-fatal warnings about listing_meta.json files.
`);
process.exit(rc);
}
const args = process.argv.slice(2);
+if (args.length < 1) {
+ usage(0);
+}
if (args.indexOf('--help') !== -1) {
usage(0);
}
-if (args.length < 1) {
+let printMetadataWarnings = false;
+const suiteDirs = [];
+for (const arg of args) {
+ if (arg === '--print-metadata-warnings') {
+ printMetadataWarnings = true;
+ } else {
+ suiteDirs.push(arg);
+ }
+}
+
+if (suiteDirs.length === 0) {
usage(0);
}
-for (const suiteDir of args) {
- void crawl(suiteDir, true);
+for (const suiteDir of suiteDirs) {
+ void crawl(suiteDir, {
+ validate: true,
+ printMetadataWarnings,
+ });
}