summaryrefslogtreecommitdiffstats
path: root/dom/webgpu/tests/cts/vendor/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /dom/webgpu/tests/cts/vendor/src
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'dom/webgpu/tests/cts/vendor/src')
-rw-r--r--dom/webgpu/tests/cts/vendor/src/fs.rs331
-rw-r--r--dom/webgpu/tests/cts/vendor/src/main.rs565
-rw-r--r--dom/webgpu/tests/cts/vendor/src/path.rs23
-rw-r--r--dom/webgpu/tests/cts/vendor/src/process.rs85
4 files changed, 1004 insertions, 0 deletions
diff --git a/dom/webgpu/tests/cts/vendor/src/fs.rs b/dom/webgpu/tests/cts/vendor/src/fs.rs
new file mode 100644
index 0000000000..31697f9758
--- /dev/null
+++ b/dom/webgpu/tests/cts/vendor/src/fs.rs
@@ -0,0 +1,331 @@
+use std::{
+ ffi::OsStr,
+ fmt::{self, Display},
+ fs,
+ ops::Deref,
+ path::{Path, PathBuf, StripPrefixError},
+};
+
+use miette::{ensure, Context, IntoDiagnostic};
+
+#[derive(Debug)]
+pub(crate) struct FileRoot {
+ nickname: &'static str,
+ path: PathBuf,
+}
+
+impl Eq for FileRoot {}
+
+impl PartialEq for FileRoot {
+ fn eq(&self, other: &Self) -> bool {
+ self.path == other.path
+ }
+}
+
+impl Ord for FileRoot {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.path.cmp(&other.path)
+ }
+}
+
+impl PartialOrd for FileRoot {
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl FileRoot {
+ pub(crate) fn new<P>(nickname: &'static str, path: P) -> miette::Result<Self>
+ where
+ P: AsRef<Path>,
+ {
+ let path = path.as_ref();
+ Ok(Self {
+ nickname,
+ path: dunce::canonicalize(path)
+ .map_err(miette::Report::msg)
+ .wrap_err_with(|| format!("failed to canonicalize {path:?}"))?,
+ })
+ }
+
+ pub(crate) fn nickname(&self) -> &str {
+ self.nickname
+ }
+
+ pub(crate) fn try_child<P>(&self, path: P) -> Result<Child<'_>, StripPrefixError>
+ where
+ P: AsRef<Path>,
+ {
+ let path = path.as_ref();
+ if path.is_absolute() {
+ path.strip_prefix(&self.path)?;
+ }
+ Ok(Child {
+ root: self,
+ path: self.path.join(path),
+ })
+ }
+
+ #[track_caller]
+ pub(crate) fn child<P>(&self, path: P) -> Child<'_>
+ where
+ P: AsRef<Path>,
+ {
+ self.try_child(path)
+ .into_diagnostic()
+ .wrap_err("invariant violation: `path` is absolute and not a child of this file root")
+ .unwrap()
+ }
+
+ fn removed_dir<P>(&self, path: P) -> miette::Result<Child<'_>>
+ where
+ P: AsRef<Path>,
+ {
+ let path = path.as_ref();
+ let child = self.child(path);
+ if child.exists() {
+ log::info!("removing old contents of {child}…",);
+ log::trace!("removing directory {:?}", &*child);
+ fs::remove_dir_all(&*child)
+ .map_err(miette::Report::msg)
+ .wrap_err_with(|| format!("failed to remove old contents of {child}"))?;
+ }
+ Ok(child)
+ }
+
+ fn removed_file<P>(&self, path: P) -> miette::Result<Child<'_>>
+ where
+ P: AsRef<Path>,
+ {
+ let path = path.as_ref();
+ let child = self.child(path);
+ if child.exists() {
+ log::info!("removing old copy of {child}…",);
+ fs::remove_file(&*child)
+ .map_err(miette::Report::msg)
+ .wrap_err_with(|| format!("failed to remove old copy of {child}"))?;
+ }
+ Ok(child)
+ }
+
+ pub(crate) fn regen_dir<P>(
+ &self,
+ path: P,
+ gen: impl FnOnce(&Child<'_>) -> miette::Result<()>,
+ ) -> miette::Result<Child<'_>>
+ where
+ P: AsRef<Path>,
+ {
+ let child = self.removed_dir(path)?;
+ gen(&child)?;
+ ensure!(
+ child.is_dir(),
+ "{} was not regenerated for an unknown reason",
+ child,
+ );
+ Ok(child)
+ }
+
+ pub(crate) fn regen_file<P>(
+ &self,
+ path: P,
+ gen: impl FnOnce(&Child<'_>) -> miette::Result<()>,
+ ) -> miette::Result<Child<'_>>
+ where
+ P: AsRef<Path>,
+ {
+ let child = self.removed_file(path)?;
+ gen(&child)?;
+ ensure!(
+ child.is_file(),
+ "{} was not regenerated for an unknown reason",
+ child,
+ );
+ Ok(child)
+ }
+}
+
+impl Deref for FileRoot {
+ type Target = Path;
+
+ fn deref(&self) -> &Self::Target {
+ &self.path
+ }
+}
+
+impl AsRef<Path> for FileRoot {
+ fn as_ref(&self) -> &Path {
+ &self.path
+ }
+}
+
+impl AsRef<OsStr> for FileRoot {
+ fn as_ref(&self) -> &OsStr {
+ self.path.as_os_str()
+ }
+}
+
+impl Display for FileRoot {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self { nickname, path } = self;
+ write!(f, "`{}` (AKA `<{nickname}>`)", path.display())
+ }
+}
+
+#[derive(Debug, Eq, Ord, PartialEq, PartialOrd)]
+pub(crate) struct Child<'a> {
+ root: &'a FileRoot,
+ /// NOTE: This is always an absolute path that is a child of the `root`.
+ path: PathBuf,
+}
+
+impl Child<'_> {
+ pub(crate) fn relative_path(&self) -> &Path {
+ let Self { root, path } = self;
+ path.strip_prefix(root).unwrap()
+ }
+
+ pub(crate) fn try_child<P>(&self, path: P) -> Result<Self, StripPrefixError>
+ where
+ P: AsRef<Path>,
+ {
+ let child_path = path.as_ref();
+ let Self { root, path } = self;
+
+ if child_path.is_absolute() {
+ child_path.strip_prefix(path)?;
+ }
+ Ok(Child {
+ root,
+ path: path.join(child_path),
+ })
+ }
+
+ #[track_caller]
+ pub(crate) fn child<P>(&self, path: P) -> Self
+ where
+ P: AsRef<Path>,
+ {
+ self.try_child(path)
+ .into_diagnostic()
+ .wrap_err("invariant violation: `path` is absolute and not a child of this child")
+ .unwrap()
+ }
+}
+
+impl Deref for Child<'_> {
+ type Target = Path;
+
+ fn deref(&self) -> &Self::Target {
+ &self.path
+ }
+}
+
+impl AsRef<Path> for Child<'_> {
+ fn as_ref(&self) -> &Path {
+ &self.path
+ }
+}
+
+impl AsRef<OsStr> for Child<'_> {
+ fn as_ref(&self) -> &OsStr {
+ self.path.as_os_str()
+ }
+}
+
+impl Display for Child<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "`<{}>{}{}`",
+ self.root.nickname(),
+ std::path::MAIN_SEPARATOR,
+ self.relative_path().display()
+ )
+ }
+}
+
+pub(crate) fn existing_file<P>(path: P) -> P
+where
+ P: AsRef<Path>,
+{
+ let p = path.as_ref();
+ assert!(p.is_file(), "{p:?} does not exist as a file");
+ path
+}
+
+pub(crate) fn copy_dir<P, Q>(source: P, dest: Q) -> miette::Result<()>
+where
+ P: Display + AsRef<Path>,
+ Q: Display + AsRef<Path>,
+{
+ log::debug!(
+ "copy-merging directories from {} into {}",
+ source.as_ref().display(),
+ dest.as_ref().display(),
+ );
+ ::dircpy::copy_dir(&source, &dest)
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to copy files from {source} to {dest}"))
+}
+
+pub(crate) fn read_to_string<P>(path: P) -> miette::Result<String>
+where
+ P: AsRef<Path>,
+{
+ fs::read_to_string(&path)
+ .into_diagnostic()
+ .wrap_err_with(|| {
+ format!(
+ "failed to read UTF-8 string from path {}",
+ path.as_ref().display()
+ )
+ })
+}
+
+pub(crate) fn copy<P1, P2>(from: P1, to: P2) -> miette::Result<u64>
+where
+ P1: AsRef<Path>,
+ P2: AsRef<Path>,
+{
+ fs::copy(&from, &to).into_diagnostic().wrap_err_with(|| {
+ format!(
+ "failed to copy {} to {}",
+ from.as_ref().display(),
+ to.as_ref().display()
+ )
+ })
+}
+
+pub(crate) fn create_dir_all<P>(path: P) -> miette::Result<()>
+where
+ P: AsRef<Path>,
+{
+ fs::create_dir_all(&path)
+ .into_diagnostic()
+ .wrap_err_with(|| {
+ format!(
+ "failed to create directories leading up to {}",
+ path.as_ref().display()
+ )
+ })
+}
+
+pub(crate) fn remove_file<P>(path: P) -> miette::Result<()>
+where
+ P: AsRef<Path>,
+{
+ fs::remove_file(&path)
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to remove file at path {}", path.as_ref().display()))
+}
+
+pub(crate) fn write<P, C>(path: P, contents: C) -> miette::Result<()>
+where
+ P: AsRef<Path>,
+ C: AsRef<[u8]>,
+{
+ fs::write(&path, &contents)
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to write to path {}", path.as_ref().display()))
+}
diff --git a/dom/webgpu/tests/cts/vendor/src/main.rs b/dom/webgpu/tests/cts/vendor/src/main.rs
new file mode 100644
index 0000000000..750b65c62e
--- /dev/null
+++ b/dom/webgpu/tests/cts/vendor/src/main.rs
@@ -0,0 +1,565 @@
+use std::{
+ collections::{BTreeMap, BTreeSet},
+ env::{current_dir, set_current_dir},
+ path::{Path, PathBuf},
+ process::ExitCode,
+};
+
+use clap::Parser;
+use lets_find_up::{find_up_with, FindUpKind, FindUpOptions};
+use miette::{bail, ensure, miette, Context, Diagnostic, IntoDiagnostic, Report, SourceSpan};
+use regex::Regex;
+
+use crate::{
+ fs::{copy_dir, create_dir_all, existing_file, remove_file, FileRoot},
+ path::join_path,
+ process::{which, EasyCommand},
+};
+
+mod fs;
+mod path;
+mod process;
+
+/// Vendor WebGPU CTS tests from a local Git checkout of [our `gpuweb/cts` fork].
+///
+/// WPT tests are generated into `testing/web-platform/mozilla/tests/webgpu/`. If the set of tests
+/// changes upstream, make sure that the generated output still matches up with test expectation
+/// metadata in `testing/web-platform/mozilla/meta/webgpu/`.
+///
+/// [our `gpuweb/cts` fork]: https://github.com/mozilla/gpuweb-cts
+#[derive(Debug, Parser)]
+struct CliArgs {
+ /// A path to the top-level directory of your WebGPU CTS checkout.
+ cts_checkout_path: PathBuf,
+}
+
+fn main() -> ExitCode {
+ env_logger::builder()
+ .filter_level(log::LevelFilter::Info)
+ .parse_default_env()
+ .init();
+
+ let args = CliArgs::parse();
+
+ match run(args) {
+ Ok(()) => ExitCode::SUCCESS,
+ Err(e) => {
+ log::error!("{e:?}");
+ ExitCode::FAILURE
+ }
+ }
+}
+
+fn run(args: CliArgs) -> miette::Result<()> {
+ let CliArgs { cts_checkout_path } = args;
+
+ let orig_working_dir = current_dir().unwrap();
+
+ let cts_dir = join_path(["dom", "webgpu", "tests", "cts"]);
+ let cts_vendor_dir = join_path([&*cts_dir, "vendor".as_ref()]);
+ let gecko_ckt = {
+ let find_up_opts = || FindUpOptions {
+ cwd: Path::new("."),
+ kind: FindUpKind::Dir,
+ };
+ let find_up = |root_dir_name| {
+ let err = || {
+ miette!(
+ concat!(
+ "failed to find a Mercurial repository ({:?}) in any of current ",
+ "working directory and its parent directories",
+ ),
+ root_dir_name
+ )
+ };
+ find_up_with(root_dir_name, find_up_opts())
+ .map_err(Report::msg)
+ .wrap_err_with(err)
+ .and_then(|loc_opt| loc_opt.ok_or_else(err))
+ .map(|mut dir| {
+ dir.pop();
+ dir
+ })
+ };
+ let gecko_source_root = find_up(".hg").or_else(|e| match find_up(".git") {
+ Ok(path) => {
+ log::debug!("{e:?}");
+ Ok(path)
+ }
+ Err(e2) => {
+ log::warn!("{e:?}");
+ log::warn!("{e2:?}");
+ bail!("failed to find a Gecko repository root")
+ }
+ })?;
+
+ let root = FileRoot::new("gecko", &gecko_source_root)?;
+ log::info!("detected Gecko repository root at {root}");
+
+ ensure!(
+ root.try_child(&orig_working_dir)
+ .map_or(false, |c| c.relative_path() == cts_vendor_dir),
+ concat!(
+ "It is expected to run this tool from the root of its Cargo project, ",
+ "but this does not appear to have been done. Bailing."
+ )
+ );
+
+ root
+ };
+
+ let cts_vendor_dir = gecko_ckt.child(orig_working_dir.parent().unwrap());
+
+ let wpt_tests_dir = {
+ let child = gecko_ckt.child(join_path(["testing", "web-platform", "mozilla", "tests"]));
+ ensure!(
+ child.is_dir(),
+ "WPT tests dir ({child}) does not appear to exist"
+ );
+ child
+ };
+
+ let (cts_ckt_git_dir, cts_ckt) = {
+ let failed_find_git_err = || {
+ miette!(concat!(
+ "failed to find a Git repository (`.git` directory) in the provided path ",
+ "and all of its parent directories"
+ ))
+ };
+ let git_dir = find_up_with(
+ ".git",
+ FindUpOptions {
+ cwd: &cts_checkout_path,
+ kind: FindUpKind::Dir,
+ },
+ )
+ .map_err(Report::msg)
+ .wrap_err_with(failed_find_git_err)?
+ .ok_or_else(failed_find_git_err)?;
+
+ let ckt = FileRoot::new("cts", git_dir.parent().unwrap())?;
+ log::debug!("detected CTS checkout root at {ckt}");
+ (git_dir, ckt)
+ };
+
+ let git_bin = which("git", "Git binary")?;
+ let npm_bin = which("npm", "NPM binary")?;
+
+ // XXX: It'd be nice to expose separate operations for copying in source and generating WPT
+ // cases from the vendored copy. Checks like these really only matter when updating source.
+ let ensure_no_child = |p1: &FileRoot, p2| {
+ ensure!(
+ p1.try_child(p2).is_err(),
+ "{p1} is a child path of {p2}, which is not supported"
+ );
+ Ok(())
+ };
+ ensure_no_child(&cts_ckt, &gecko_ckt)?;
+ ensure_no_child(&gecko_ckt, &cts_ckt)?;
+
+ log::info!("making a vendored copy of checked-in files from {cts_ckt}…",);
+ gecko_ckt.regen_file(
+ join_path([&*cts_dir, "checkout_commit.txt".as_ref()]),
+ |checkout_commit_file| {
+ let mut git_status_porcelain_cmd = EasyCommand::new(&git_bin, |cmd| {
+ cmd.args(["status", "--porcelain"])
+ .envs([("GIT_DIR", &*cts_ckt_git_dir), ("GIT_WORK_TREE", &*cts_ckt)])
+ });
+ log::info!(
+ " …ensuring the working tree and index are clean with {}…",
+ git_status_porcelain_cmd
+ );
+ let git_status_porcelain_output = git_status_porcelain_cmd.just_stdout_utf8()?;
+ ensure!(
+ git_status_porcelain_output.is_empty(),
+ concat!(
+ "expected a clean CTS working tree and index, ",
+ "but {}'s output was not empty; ",
+ "for reference, it was:\n\n{}",
+ ),
+ git_status_porcelain_cmd,
+ git_status_porcelain_output,
+ );
+
+ gecko_ckt.regen_dir(&cts_vendor_dir.join("checkout"), |vendored_ckt_dir| {
+ log::info!(" …copying files tracked by Git to {vendored_ckt_dir}…");
+ let files_to_vendor = {
+ let mut git_ls_files_cmd = EasyCommand::new(&git_bin, |cmd| {
+ cmd.arg("ls-files").env("GIT_DIR", &cts_ckt_git_dir)
+ });
+ log::debug!(" …getting files to vendor from {git_ls_files_cmd}…");
+ let output = git_ls_files_cmd.just_stdout_utf8()?;
+ let mut files = output
+ .split_terminator('\n')
+ .map(PathBuf::from)
+ .collect::<BTreeSet<_>>();
+ log::trace!(" …files from {git_ls_files_cmd}: {files:#?}");
+
+ log::trace!(" …validating that files from Git repo still exist…");
+ let files_not_found = files
+ .iter()
+ .filter(|p| !cts_ckt.child(p).exists())
+ .collect::<Vec<_>>();
+ ensure!(
+ files_not_found.is_empty(),
+ concat!(
+ "the following files were returned by `git ls-files`, ",
+ "but do not exist on disk: {:#?}",
+ ),
+ files_not_found,
+ );
+
+ log::trace!(" …stripping files we actually don't want to vendor…");
+ let files_to_actually_not_vendor = [
+ // There's no reason to bring this over, and lots of reasons to not bring in
+ // security-sensitive content unless we have to.
+ "deploy_key.enc",
+ ]
+ .map(Path::new);
+ log::trace!(" …files we don't want: {files_to_actually_not_vendor:?}");
+ for path in files_to_actually_not_vendor {
+ ensure!(
+ files.remove(path),
+ concat!(
+ "failed to remove {} from list of files to vendor; ",
+ "does it still exist?"
+ ),
+ cts_ckt.child(path)
+ );
+ }
+ files
+ };
+
+ log::debug!(" …now doing the copying…");
+ for path in files_to_vendor {
+ let vendor_from_path = cts_ckt.child(&path);
+ let vendor_to_path = vendored_ckt_dir.child(&path);
+ if let Some(parent) = vendor_to_path.parent() {
+ create_dir_all(vendored_ckt_dir.child(parent))?;
+ }
+ log::trace!(" …copying {vendor_from_path} to {vendor_to_path}…");
+ fs::copy(&vendor_from_path, &vendor_to_path)?;
+ }
+
+ Ok(())
+ })?;
+
+ log::info!(" …writing commit ref pointed to by `HEAD` to {checkout_commit_file}…");
+ let mut git_rev_parse_head_cmd = EasyCommand::new(&git_bin, |cmd| {
+ cmd.args(["rev-parse", "HEAD"])
+ .env("GIT_DIR", &cts_ckt_git_dir)
+ });
+ log::trace!(" …getting output of {git_rev_parse_head_cmd}…");
+ fs::write(
+ checkout_commit_file,
+ git_rev_parse_head_cmd.just_stdout_utf8()?,
+ )
+ .wrap_err_with(|| format!("failed to write HEAD ref to {checkout_commit_file}"))
+ },
+ )?;
+
+ set_current_dir(&*cts_ckt)
+ .into_diagnostic()
+ .wrap_err("failed to change working directory to CTS checkout")?;
+ log::debug!("changed CWD to {cts_ckt}");
+
+ let mut npm_ci_cmd = EasyCommand::new(&npm_bin, |cmd| cmd.arg("ci"));
+ log::info!(
+ "ensuring a clean {} directory with {npm_ci_cmd}…",
+ cts_ckt.child("node_modules"),
+ );
+ npm_ci_cmd.spawn()?;
+
+ let out_wpt_dir = cts_ckt.regen_dir("out-wpt", |out_wpt_dir| {
+ let mut npm_run_wpt_cmd = EasyCommand::new(&npm_bin, |cmd| cmd.args(["run", "wpt"]));
+ log::info!("generating WPT test cases into {out_wpt_dir} with {npm_run_wpt_cmd}…");
+ npm_run_wpt_cmd.spawn()
+ })?;
+
+ let cts_https_html_path = out_wpt_dir.child("cts.https.html");
+ log::info!("refining the output of {cts_https_html_path} with `npm run gen_wpt_cts_html …`…");
+ EasyCommand::new(&npm_bin, |cmd| {
+ cmd.args(["run", "gen_wpt_cts_html"]).arg(existing_file(
+ &cts_ckt.child("tools/gen_wpt_cfg_unchunked.json"),
+ ))
+ })
+ .spawn()?;
+
+ {
+ let extra_cts_https_html_path = out_wpt_dir.child("cts-chunked2sec.https.html");
+ log::info!("removing extraneous {extra_cts_https_html_path}…");
+ remove_file(&*extra_cts_https_html_path)?;
+ }
+
+ log::info!("analyzing {cts_https_html_path}…");
+ let cts_https_html_content = fs::read_to_string(&*cts_https_html_path)?;
+ let cts_boilerplate_short_timeout;
+ let cts_boilerplate_long_timeout;
+ let cts_cases;
+ {
+ {
+ let (boilerplate, cases_start) = {
+ let cases_start_idx = cts_https_html_content
+ .find("<meta name=variant")
+ .ok_or_else(|| miette!("no test cases found; this is unexpected!"))?;
+ cts_https_html_content.split_at(cases_start_idx)
+ };
+
+ {
+ if !boilerplate.is_empty() {
+ #[derive(Debug, Diagnostic, thiserror::Error)]
+ #[error("last character before test cases was not a newline; bug, or weird?")]
+ #[diagnostic(severity("warning"))]
+ struct Oops {
+ #[label(
+ "this character ({:?}) was expected to be a newline, so that {}",
+ source_code.chars().last().unwrap(),
+ "the test spec. following it is on its own line"
+ )]
+ span: SourceSpan,
+ #[source_code]
+ source_code: String,
+ }
+ ensure!(
+ boilerplate.ends_with('\n'),
+ Oops {
+ span: SourceSpan::from(0..boilerplate.len()),
+ source_code: cts_https_html_content,
+ }
+ );
+ }
+
+ // NOTE: Adding `_mozilla` is necessary because [that's how it's mounted][source].
+ //
+ // [source]: https://searchfox.org/mozilla-central/rev/cd2121e7d83af1b421c95e8c923db70e692dab5f/testing/web-platform/mozilla/README#1-4]
+ log::info!(concat!(
+ " …fixing `script` paths in WPT boilerplate ",
+ "so they work as Mozilla-private WPT tests…"
+ ));
+ let expected_wpt_script_tag =
+ "<script type=module src=/webgpu/common/runtime/wpt.js></script>";
+ ensure!(
+ boilerplate.contains(expected_wpt_script_tag),
+ concat!(
+ "failed to find expected `script` tag for `wpt.js` ",
+ "({:?}); did something change upstream?",
+ ),
+ expected_wpt_script_tag
+ );
+ let mut boilerplate = boilerplate.replacen(
+ expected_wpt_script_tag,
+ "<script type=module src=/_mozilla/webgpu/common/runtime/wpt.js></script>",
+ 1,
+ );
+
+ cts_boilerplate_short_timeout = boilerplate.clone();
+
+ let timeout_insert_idx = {
+ let meta_charset_utf8 = "\n<meta charset=utf-8>\n";
+ let meta_charset_utf8_idx =
+ boilerplate.find(meta_charset_utf8).ok_or_else(|| {
+ miette!(
+ "could not find {:?} in document; did something change upstream?",
+ meta_charset_utf8
+ )
+ })?;
+ meta_charset_utf8_idx + meta_charset_utf8.len()
+ };
+ boilerplate.insert_str(
+ timeout_insert_idx,
+ concat!(
+ r#"<meta name="timeout" content="long">"#,
+ " <!-- TODO: narrow to only where it's needed, see ",
+ "https://bugzilla.mozilla.org/show_bug.cgi?id=1850537",
+ " -->\n"
+ ),
+ );
+ cts_boilerplate_long_timeout = boilerplate
+ };
+
+ log::info!(" …parsing test variants in {cts_https_html_path}…");
+ let mut parsing_failed = false;
+ let meta_variant_regex = Regex::new(concat!(
+ "^",
+ "<meta name=variant content='\\?q=([^']*?):\\*'>",
+ "$"
+ ))
+ .unwrap();
+ cts_cases = cases_start
+ .split_terminator('\n')
+ .filter_map(|line| {
+ let path_and_meta = meta_variant_regex
+ .captures(line)
+ .map(|caps| (caps[1].to_owned(), line));
+ if path_and_meta.is_none() {
+ parsing_failed = true;
+ log::error!("line is not a test case: {line:?}");
+ }
+ path_and_meta
+ })
+ .collect::<Vec<_>>();
+ ensure!(
+ !parsing_failed,
+ "one or more test case lines failed to parse, fix it and try again"
+ );
+ };
+ log::trace!("\"original\" HTML boilerplate:\n\n{cts_boilerplate_short_timeout}");
+
+ ensure!(
+ !cts_cases.is_empty(),
+ "no test cases found; this is unexpected!"
+ );
+ log::info!(" …found {} test cases", cts_cases.len());
+ }
+
+ cts_ckt.regen_dir(out_wpt_dir.join("cts"), |cts_tests_dir| {
+ log::info!("re-distributing tests into single file per test path…");
+ let mut failed_writing = false;
+ let mut cts_cases_by_spec_file_dir = BTreeMap::<_, BTreeSet<_>>::new();
+ for (path, meta) in cts_cases {
+ let case_dir = {
+ // Context: We want to mirror CTS upstream's `src/webgpu/**/*.spec.ts` paths as
+ // entire WPT tests, with each subtest being a WPT variant. Here's a diagram of
+ // a CTS path to explain why the logic below is correct:
+ //
+ // ```sh
+ // webgpu:this,is,the,spec.ts,file,path:subtest_in_file:…
+ // \____/ \___________________________/^\_____________/
+ // test `*.spec.ts` file path | |
+ // \__________________________________/| |
+ // | | |
+ // We want this… | …but not this. CTS upstream generates
+ // | this too, but we don't want to divide
+ // second ':' character here---/ here (yet).
+ // ```
+ let subtest_and_later_start_idx =
+ match path.match_indices(':').nth(1).map(|(idx, _s)| idx) {
+ Some(some) => some,
+ None => {
+ failed_writing = true;
+ log::error!(
+ concat!(
+ "failed to split suite and test path segments ",
+ "from CTS path `{}`"
+ ),
+ path
+ );
+ continue;
+ }
+ };
+ let slashed =
+ path[..subtest_and_later_start_idx].replace(|c| matches!(c, ':' | ','), "/");
+ cts_tests_dir.child(slashed)
+ };
+ if !cts_cases_by_spec_file_dir
+ .entry(case_dir)
+ .or_default()
+ .insert(meta)
+ {
+ log::warn!("duplicate entry {meta:?} detected")
+ }
+ }
+
+ struct WptEntry<'a> {
+ cases: BTreeSet<&'a str>,
+ timeout_length: TimeoutLength,
+ }
+ enum TimeoutLength {
+ Short,
+ Long,
+ }
+ let split_cases = {
+ let mut split_cases = BTreeMap::new();
+ fn insert_with_default_name<'a>(
+ split_cases: &mut BTreeMap<fs::Child<'a>, WptEntry<'a>>,
+ spec_file_dir: fs::Child<'a>,
+ cases: WptEntry<'a>,
+ ) {
+ let path = spec_file_dir.child("cts.https.html");
+ assert!(split_cases.insert(path, cases).is_none());
+ }
+ {
+ let dld_path =
+ &cts_tests_dir.child("webgpu/api/validation/state/device_lost/destroy");
+ let (spec_file_dir, cases) = cts_cases_by_spec_file_dir
+ .remove_entry(dld_path)
+ .expect("no `device_lost/destroy` tests found; did they move?");
+ insert_with_default_name(
+ &mut split_cases,
+ spec_file_dir,
+ WptEntry {
+ cases,
+ timeout_length: TimeoutLength::Short,
+ },
+ );
+ }
+ for (spec_file_dir, cases) in cts_cases_by_spec_file_dir {
+ insert_with_default_name(
+ &mut split_cases,
+ spec_file_dir,
+ WptEntry {
+ cases,
+ timeout_length: TimeoutLength::Long,
+ },
+ );
+ }
+ split_cases
+ };
+
+ for (path, entry) in split_cases {
+ let dir = path.parent().expect("no parent found for ");
+ match create_dir_all(&dir) {
+ Ok(()) => log::trace!("made directory {}", dir.display()),
+ Err(e) => {
+ failed_writing = true;
+ log::error!("{e:#}");
+ continue;
+ }
+ }
+ let file_contents = {
+ let WptEntry {
+ cases,
+ timeout_length,
+ } = entry;
+ let content = match timeout_length {
+ TimeoutLength::Short => &cts_boilerplate_short_timeout,
+ TimeoutLength::Long => &cts_boilerplate_long_timeout,
+ };
+ let mut content = content.as_bytes().to_vec();
+ for meta in cases {
+ content.extend(meta.as_bytes());
+ content.extend(b"\n");
+ }
+ content
+ };
+ match fs::write(&path, &file_contents)
+ .wrap_err_with(|| miette!("failed to write output to path {path:?}"))
+ {
+ Ok(()) => log::debug!(" …wrote {path}"),
+ Err(e) => {
+ failed_writing = true;
+ log::error!("{e:#}");
+ }
+ }
+ }
+ ensure!(
+ !failed_writing,
+ "failed to write one or more WPT test files; see above output for more details"
+ );
+ log::debug!(" …finished writing new WPT test files!");
+
+ log::info!(" …removing {cts_https_html_path}, now that it's been divided up…");
+ remove_file(&cts_https_html_path)?;
+
+ Ok(())
+ })?;
+
+ gecko_ckt.regen_dir(wpt_tests_dir.join("webgpu"), |wpt_webgpu_tests_dir| {
+ log::info!("copying contents of {out_wpt_dir} to {wpt_webgpu_tests_dir}…");
+ copy_dir(&out_wpt_dir, wpt_webgpu_tests_dir)
+ })?;
+
+ log::info!("All done! Now get your CTS _ON_! :)");
+
+ Ok(())
+}
diff --git a/dom/webgpu/tests/cts/vendor/src/path.rs b/dom/webgpu/tests/cts/vendor/src/path.rs
new file mode 100644
index 0000000000..aa5bae2e6d
--- /dev/null
+++ b/dom/webgpu/tests/cts/vendor/src/path.rs
@@ -0,0 +1,23 @@
+use std::path::{Path, PathBuf};
+
+/// Construct a [`PathBuf`] from individual [`Path`] components.
+///
+/// This is a simple and legible way to construct `PathBuf`s that use the system's native path
+/// separator character. (It's ugly to see paths mixing `\` and `/`.)
+///
+/// # Examples
+///
+/// ```rust
+/// # use std::path::Path;
+/// # use vendor_webgpu_cts::path::join_path;
+/// assert_eq!(&*join_path(["foo", "bar", "baz"]), Path::new("foo/bar/baz"));
+/// ```
+pub(crate) fn join_path<I, P>(iter: I) -> PathBuf
+where
+ I: IntoIterator<Item = P>,
+ P: AsRef<Path>,
+{
+ let mut path = PathBuf::new();
+ path.extend(iter);
+ path
+}
diff --git a/dom/webgpu/tests/cts/vendor/src/process.rs b/dom/webgpu/tests/cts/vendor/src/process.rs
new file mode 100644
index 0000000000..b36c3b953d
--- /dev/null
+++ b/dom/webgpu/tests/cts/vendor/src/process.rs
@@ -0,0 +1,85 @@
+use std::{
+ ffi::{OsStr, OsString},
+ fmt::{self, Display},
+ iter::once,
+ process::{Command, Output},
+};
+
+use format::lazy_format;
+use miette::{ensure, Context, IntoDiagnostic};
+
+pub(crate) fn which(name: &'static str, desc: &str) -> miette::Result<OsString> {
+ let found = ::which::which(name)
+ .into_diagnostic()
+ .wrap_err(lazy_format!("failed to find `{name}` executable"))?;
+ log::debug!("using {desc} from {}", found.display());
+ Ok(found.file_name().unwrap().to_owned())
+}
+
+pub(crate) struct EasyCommand {
+ inner: Command,
+}
+
+impl EasyCommand {
+ pub(crate) fn new<C>(cmd: C, f: impl FnOnce(&mut Command) -> &mut Command) -> Self
+ where
+ C: AsRef<OsStr>,
+ {
+ let mut cmd = Command::new(cmd);
+ f(&mut cmd);
+ Self { inner: cmd }
+ }
+
+ pub(crate) fn spawn(&mut self) -> miette::Result<()> {
+ log::debug!("spawning {self}…");
+ let status = self
+ .inner
+ .spawn()
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to spawn {self}"))?
+ .wait()
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to wait for exit code from {self}"))?;
+ log::debug!("{self} returned {:?}", status.code());
+ ensure!(status.success(), "{self} returned {:?}", status.code());
+ Ok(())
+ }
+
+ fn just_stdout(&mut self) -> miette::Result<Vec<u8>> {
+ log::debug!("getting `stdout` output of {self}");
+ let output = self
+ .inner
+ .output()
+ .into_diagnostic()
+ .wrap_err_with(|| format!("failed to execute `{self}`"))?;
+ let Output {
+ status,
+ stdout: _,
+ stderr,
+ } = &output;
+ log::debug!("{self} returned {:?}", status.code());
+ ensure!(
+ status.success(),
+ "{self} returned {:?}; full output: {output:#?}",
+ status.code(),
+ );
+ assert!(stderr.is_empty());
+ Ok(output.stdout)
+ }
+
+ pub(crate) fn just_stdout_utf8(&mut self) -> miette::Result<String> {
+ String::from_utf8(self.just_stdout()?)
+ .into_diagnostic()
+ .wrap_err_with(|| format!("output of {self} was not UTF-8 (!?)"))
+ }
+}
+
+impl Display for EasyCommand {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self { inner } = self;
+ let prog = inner.get_program().to_string_lossy();
+ let args = inner.get_args().map(|a| a.to_string_lossy());
+ let shell_words = ::shell_words::join(once(prog).chain(args));
+ write!(f, "`{shell_words}`")
+ }
+}