diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
commit | 59203c63bb777a3bacec32fb8830fba33540e809 (patch) | |
tree | 58298e711c0ff0575818c30485b44a2f21bf28a0 /dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts | |
parent | Adding upstream version 126.0.1. (diff) | |
download | firefox-59203c63bb777a3bacec32fb8830fba33540e809.tar.xz firefox-59203c63bb777a3bacec32fb8830fba33540e809.zip |
Adding upstream version 127.0.upstream/127.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts')
-rw-r--r-- | dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts | 278 |
1 files changed, 190 insertions, 88 deletions
diff --git a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts index ce0854aa20..d8309ebcb1 100644 --- a/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts +++ b/dom/webgpu/tests/cts/checkout/src/common/tools/gen_cache.ts @@ -3,32 +3,41 @@ import * as path from 'path'; import * as process from 'process'; import { Cacheable, dataCache, setIsBuildingDataCache } from '../framework/data_cache.js'; +import { crc32, toHexString } from '../util/crc32.js'; +import { parseImports } from '../util/parse_imports.js'; function usage(rc: number): void { - console.error(`Usage: tools/gen_cache [options] [OUT_DIR] [SUITE_DIRS...] + console.error(`Usage: tools/gen_cache [options] [SUITE_DIRS...] For each suite in SUITE_DIRS, pre-compute data that is expensive to generate -at runtime and store it under OUT_DIR. If the data file is found then the -DataCache will load this instead of building the expensive data at CTS runtime. +at runtime and store it under 'src/resources/cache'. If the data file is found +then the DataCache will load this instead of building the expensive data at CTS +runtime. +Note: Due to differences in gzip compression, different versions of node can +produce radically different binary cache files. gen_cache uses the hashes of the +source files to determine whether a cache file is 'up to date'. This is faster +and does not depend on the compressed output. Options: --help Print this message and exit. --list Print the list of output files without writing them. - --nth i/n Only process every file where (file_index % n == i) - --validate Check that cache should build (Tests for collisions). + --force Rebuild cache even if they're up to date + --validate Check the cache is up to date --verbose Print each action taken. `); process.exit(rc); } +// Where the cache is generated +const outDir = 'src/resources/cache'; + +let forceRebuild = false; let mode: 'emit' | 'list' | 'validate' = 'emit'; -let nth = { i: 0, n: 1 }; let verbose = false; const nonFlagsArgs: string[] = []; -for (let i = 0; i < process.argv.length; i++) { - const arg = process.argv[i]; +for (const arg of process.argv) { if (arg.startsWith('-')) { switch (arg) { case '--list': { @@ -39,6 +48,10 @@ for (let i = 0; i < process.argv.length; i++) { usage(0); break; } + case '--force': { + forceRebuild = true; + break; + } case '--verbose': { verbose = true; break; @@ -47,28 +60,6 @@ for (let i = 0; i < process.argv.length; i++) { mode = 'validate'; break; } - case '--nth': { - const err = () => { - console.error( - `--nth requires a value of the form 'i/n', where i and n are positive integers and i < n` - ); - process.exit(1); - }; - i++; - if (i >= process.argv.length) { - err(); - } - const value = process.argv[i]; - const parts = value.split('/'); - if (parts.length !== 2) { - err(); - } - nth = { i: parseInt(parts[0]), n: parseInt(parts[1]) }; - if (nth.i < 0 || nth.n < 1 || nth.i > nth.n) { - err(); - } - break; - } default: { console.log('unrecognized flag: ', arg); usage(1); @@ -79,12 +70,10 @@ for (let i = 0; i < process.argv.length; i++) { } } -if (nonFlagsArgs.length < 4) { +if (nonFlagsArgs.length < 3) { usage(0); } -const outRootDir = nonFlagsArgs[2]; - dataCache.setStore({ load: (path: string) => { return new Promise<Uint8Array>((resolve, reject) => { @@ -100,57 +89,133 @@ dataCache.setStore({ }); setIsBuildingDataCache(); +const cacheFileSuffix = __filename.endsWith('.ts') ? '.cache.ts' : '.cache.js'; + +/** + * @returns a list of all the files under 'dir' that has the given extension + * @param dir the directory to search + * @param ext the extension of the files to find + */ +function glob(dir: string, ext: string) { + const files: string[] = []; + for (const file of fs.readdirSync(dir)) { + const path = `${dir}/${file}`; + if (fs.statSync(path).isDirectory()) { + for (const child of glob(path, ext)) { + files.push(`${file}/${child}`); + } + } + + if (path.endsWith(ext) && fs.statSync(path).isFile()) { + files.push(file); + } + } + return files; +} + +/** + * Exception type thrown by SourceHasher.hashFile() when a file annotated with + * MUST_NOT_BE_IMPORTED_BY_DATA_CACHE is transitively imported by a .cache.ts file. + */ +class InvalidImportException { + constructor(path: string) { + this.stack = [path]; + } + toString(): string { + return `invalid transitive import for cache:\n ${this.stack.join('\n ')}`; + } + readonly stack: string[]; +} +/** + * SourceHasher is a utility for producing a hash of a source .ts file and its imported source files. + */ +class SourceHasher { + /** + * @param path the source file path + * @returns a hash of the source file and all of its imported dependencies. + */ + public hashOf(path: string) { + this.u32Array[0] = this.hashFile(path); + return this.u32Array[0].toString(16); + } + + hashFile(path: string): number { + if (!fs.existsSync(path) && path.endsWith('.js')) { + path = path.substring(0, path.length - 2) + 'ts'; + } + + const cached = this.hashes.get(path); + if (cached !== undefined) { + return cached; + } + + this.hashes.set(path, 0); // Store a zero hash to handle cyclic imports + + const content = fs.readFileSync(path, { encoding: 'utf-8' }); + const normalized = content.replace('\r\n', '\n'); + let hash = crc32(normalized); + for (const importPath of parseImports(path, normalized)) { + try { + const importHash = this.hashFile(importPath); + hash = this.hashCombine(hash, importHash); + } catch (ex) { + if (ex instanceof InvalidImportException) { + ex.stack.push(path); + throw ex; + } + } + } + + if (content.includes('MUST_NOT_BE_IMPORTED_BY_DATA_CACHE')) { + throw new InvalidImportException(path); + } + + this.hashes.set(path, hash); + return hash; + } + + /** Simple non-cryptographic hash combiner */ + hashCombine(a: number, b: number): number { + return crc32(`${toHexString(a)} ${toHexString(b)}`); + } + + private hashes = new Map<string, number>(); + private u32Array = new Uint32Array(1); +} + void (async () => { - for (const suiteDir of nonFlagsArgs.slice(3)) { + const suiteDirs = nonFlagsArgs.slice(2); // skip <exe> <js> + for (const suiteDir of suiteDirs) { await build(suiteDir); } })(); -const specFileSuffix = __filename.endsWith('.ts') ? '.spec.ts' : '.spec.js'; - -async function crawlFilesRecursively(dir: string): Promise<string[]> { - const subpathInfo = await Promise.all( - (await fs.promises.readdir(dir)).map(async d => { - const p = path.join(dir, d); - const stats = await fs.promises.stat(p); - return { - path: p, - isDirectory: stats.isDirectory(), - isFile: stats.isFile(), - }; - }) - ); - - const files = subpathInfo - .filter(i => i.isFile && i.path.endsWith(specFileSuffix)) - .map(i => i.path); - - return files.concat( - await subpathInfo - .filter(i => i.isDirectory) - .map(i => crawlFilesRecursively(i.path)) - .reduce(async (a, b) => (await a).concat(await b), Promise.resolve([])) - ); -} - async function build(suiteDir: string) { if (!fs.existsSync(suiteDir)) { console.error(`Could not find ${suiteDir}`); process.exit(1); } - // Crawl files and convert paths to be POSIX-style, relative to suiteDir. - let filesToEnumerate = (await crawlFilesRecursively(suiteDir)).sort(); + // Load hashes.json + const fileHashJsonPath = `${outDir}/hashes.json`; + let fileHashes: Record<string, string> = {}; + if (fs.existsSync(fileHashJsonPath)) { + const json = fs.readFileSync(fileHashJsonPath, { encoding: 'utf8' }); + fileHashes = JSON.parse(json); + } - // Filter out non-spec files - filesToEnumerate = filesToEnumerate.filter(f => f.endsWith(specFileSuffix)); + // Crawl files and convert paths to be POSIX-style, relative to suiteDir. + const filesToEnumerate = glob(suiteDir, cacheFileSuffix) + .map(p => `${suiteDir}/${p}`) + .sort(); + const fileHasher = new SourceHasher(); const cacheablePathToTS = new Map<string, string>(); + const errors: Array<string> = []; - let fileIndex = 0; for (const file of filesToEnumerate) { - const pathWithoutExtension = file.substring(0, file.length - specFileSuffix.length); - const mod = await import(`../../../${pathWithoutExtension}.spec.js`); + const pathWithoutExtension = file.substring(0, file.length - 3); + const mod = await import(`../../../${pathWithoutExtension}.js`); if (mod.d?.serialize !== undefined) { const cacheable = mod.d as Cacheable<unknown>; @@ -158,41 +223,78 @@ async function build(suiteDir: string) { // Check for collisions const existing = cacheablePathToTS.get(cacheable.path); if (existing !== undefined) { - console.error( - `error: Cacheable '${cacheable.path}' is emitted by both: + errors.push( + `'${cacheable.path}' is emitted by both: '${existing}' and '${file}'` ); - process.exit(1); } cacheablePathToTS.set(cacheable.path, file); } - const outPath = `${outRootDir}/data/${cacheable.path}`; + const outPath = `${outDir}/${cacheable.path}`; + const fileHash = fileHasher.hashOf(file); - if (fileIndex++ % nth.n === nth.i) { - switch (mode) { - case 'emit': { + switch (mode) { + case 'emit': { + if (!forceRebuild && fileHashes[cacheable.path] === fileHash) { if (verbose) { - console.log(`building '${outPath}'`); + console.log(`'${outPath}' is up to date`); } - const data = await cacheable.build(); - const serialized = cacheable.serialize(data); - fs.mkdirSync(path.dirname(outPath), { recursive: true }); - fs.writeFileSync(outPath, serialized, 'binary'); - break; + continue; } - case 'list': { - console.log(outPath); - break; - } - case 'validate': { - // Only check currently performed is the collision detection above - break; + console.log(`building '${outPath}'`); + const data = await cacheable.build(); + const serialized = cacheable.serialize(data); + fs.mkdirSync(path.dirname(outPath), { recursive: true }); + fs.writeFileSync(outPath, serialized, 'binary'); + fileHashes[cacheable.path] = fileHash; + break; + } + case 'list': { + console.log(outPath); + break; + } + case 'validate': { + if (fileHashes[cacheable.path] !== fileHash) { + errors.push( + `'${outPath}' needs rebuilding. Generate with 'npx grunt run:generate-cache'` + ); + } else if (verbose) { + console.log(`'${outPath}' is up to date`); } } } } } + + // Check that there aren't stale files in the cache directory + for (const file of glob(outDir, '.bin')) { + if (cacheablePathToTS.get(file) === undefined) { + switch (mode) { + case 'emit': + fs.rmSync(file); + break; + case 'validate': + errors.push( + `cache file '${outDir}/${file}' is no longer generated. Remove with 'npx grunt run:generate-cache'` + ); + break; + } + } + } + + // Update hashes.json + if (mode === 'emit') { + const json = JSON.stringify(fileHashes, undefined, ' '); + fs.writeFileSync(fileHashJsonPath, json, { encoding: 'utf8' }); + } + + if (errors.length > 0) { + for (const error of errors) { + console.error(error); + } + process.exit(1); + } } |