summaryrefslogtreecommitdiffstats
path: root/src/tools/tidy
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:03 +0000
commit64d98f8ee037282c35007b64c2649055c56af1db (patch)
tree5492bcf97fce41ee1c0b1cc2add283f3e66cdab0 /src/tools/tidy
parentAdding debian version 1.67.1+dfsg1-1. (diff)
downloadrustc-64d98f8ee037282c35007b64c2649055c56af1db.tar.xz
rustc-64d98f8ee037282c35007b64c2649055c56af1db.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/tidy')
-rw-r--r--src/tools/tidy/Cargo.toml3
-rw-r--r--src/tools/tidy/src/debug_artifacts.rs8
-rw-r--r--src/tools/tidy/src/deps.rs175
-rw-r--r--src/tools/tidy/src/edition.rs5
-rw-r--r--src/tools/tidy/src/error_codes.rs383
-rw-r--r--src/tools/tidy/src/error_codes_check.rs320
-rw-r--r--src/tools/tidy/src/errors.rs77
-rw-r--r--src/tools/tidy/src/features.rs11
-rw-r--r--src/tools/tidy/src/features/version/tests.rs4
-rw-r--r--src/tools/tidy/src/lib.rs37
-rw-r--r--src/tools/tidy/src/main.rs54
-rw-r--r--src/tools/tidy/src/mir_opt_tests.rs4
-rw-r--r--src/tools/tidy/src/rustdoc_gui_tests.rs33
-rw-r--r--src/tools/tidy/src/style.rs112
-rw-r--r--src/tools/tidy/src/target_specific_tests.rs3
-rw-r--r--src/tools/tidy/src/tests_placement.rs15
-rw-r--r--src/tools/tidy/src/ui_tests.rs10
-rw-r--r--src/tools/tidy/src/unit_tests.rs2
-rw-r--r--src/tools/tidy/src/x_version.rs68
19 files changed, 756 insertions, 568 deletions
diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml
index 97d038da7..cdf1dd366 100644
--- a/src/tools/tidy/Cargo.toml
+++ b/src/tools/tidy/Cargo.toml
@@ -6,11 +6,14 @@ autobins = false
[dependencies]
cargo_metadata = "0.14"
+cargo-platform = "0.1.2"
regex = "1"
miropt-test-tools = { path = "../miropt-test-tools" }
lazy_static = "1"
walkdir = "2"
ignore = "0.4.18"
+semver = "1.0"
+termcolor = "1.1.3"
[[bin]]
name = "rust-tidy"
diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs
index 9880a32ad..0dd9c1e16 100644
--- a/src/tools/tidy/src/debug_artifacts.rs
+++ b/src/tools/tidy/src/debug_artifacts.rs
@@ -1,14 +1,12 @@
//! Tidy check to prevent creation of unnecessary debug artifacts while running tests.
use crate::walk::{filter_dirs, walk};
-use std::path::{Path, PathBuf};
+use std::path::Path;
const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in test";
-pub fn check(path: &Path, bad: &mut bool) {
- let test_dir: PathBuf = path.join("test");
-
- walk(&test_dir, &mut filter_dirs, &mut |entry, contents| {
+pub fn check(test_dir: &Path, bad: &mut bool) {
+ walk(test_dir, &mut filter_dirs, &mut |entry, contents| {
let filename = entry.path();
let is_rust = filename.extension().map_or(false, |ext| ext == "rs");
if !is_rust {
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index 296db9dfb..bc2edf634 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -1,7 +1,7 @@
//! Checks the licenses of third-party dependencies.
-use cargo_metadata::{Metadata, Package, PackageId, Resolve};
-use std::collections::{BTreeSet, HashSet};
+use cargo_metadata::{DepKindInfo, Metadata, Package, PackageId};
+use std::collections::HashSet;
use std::path::Path;
/// These are licenses that are allowed for all crates, including the runtime,
@@ -40,6 +40,8 @@ const EXCEPTIONS: &[(&str, &str)] = &[
("im-rc", "MPL-2.0+"), // cargo
("sized-chunks", "MPL-2.0+"), // cargo via im-rc
("bitmaps", "MPL-2.0+"), // cargo via im-rc
+ ("fiat-crypto", "MIT OR Apache-2.0 OR BSD-1-Clause"), // cargo via pasetors
+ ("subtle", "BSD-3-Clause"), // cargo via pasetors
("instant", "BSD-3-Clause"), // rustc_driver/tracing-subscriber/parking_lot
("snap", "BSD-3-Clause"), // rustc
("fluent-langneg", "Apache-2.0"), // rustc (fluent translations)
@@ -50,7 +52,6 @@ const EXCEPTIONS: &[(&str, &str)] = &[
("similar", "Apache-2.0"), // cargo (dev dependency)
("normalize-line-endings", "Apache-2.0"), // cargo (dev dependency)
("dissimilar", "Apache-2.0"), // rustdoc, rustc_lexer (few tests) via expect-test, (dev deps)
- ("subtle", "BSD-3-Clause"), // cargo
];
const EXCEPTIONS_CRANELIFT: &[(&str, &str)] = &[
@@ -58,6 +59,7 @@ const EXCEPTIONS_CRANELIFT: &[(&str, &str)] = &[
("cranelift-codegen", "Apache-2.0 WITH LLVM-exception"),
("cranelift-codegen-meta", "Apache-2.0 WITH LLVM-exception"),
("cranelift-codegen-shared", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-egraph", "Apache-2.0 WITH LLVM-exception"),
("cranelift-entity", "Apache-2.0 WITH LLVM-exception"),
("cranelift-frontend", "Apache-2.0 WITH LLVM-exception"),
("cranelift-isle", "Apache-2.0 WITH LLVM-exception"),
@@ -68,6 +70,7 @@ const EXCEPTIONS_CRANELIFT: &[(&str, &str)] = &[
("mach", "BSD-2-Clause"),
("regalloc2", "Apache-2.0 WITH LLVM-exception"),
("target-lexicon", "Apache-2.0 WITH LLVM-exception"),
+ ("wasmtime-jit-icache-coherence", "Apache-2.0 WITH LLVM-exception"),
];
const EXCEPTIONS_BOOTSTRAP: &[(&str, &str)] = &[
@@ -101,7 +104,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"chalk-engine",
"chalk-ir",
"chalk-solve",
- "chrono",
"convert_case", // dependency of derive_more
"compiler_builtins",
"cpufeatures",
@@ -120,11 +122,9 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"dlmalloc",
"either",
"ena",
- "env_logger",
"expect-test",
"fallible-iterator", // dependency of `thorin`
"fastrand",
- "filetime",
"fixedbitset",
"flate2",
"fluent-bundle",
@@ -138,13 +138,11 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"gsgdt",
"hashbrown",
"hermit-abi",
- "humantime",
"icu_list",
"icu_locid",
"icu_provider",
"icu_provider_adapters",
"icu_provider_macros",
- "if_chain",
"indexmap",
"instant",
"intl-memoizer",
@@ -166,8 +164,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"memmap2",
"memoffset",
"miniz_oxide",
- "num-integer",
- "num-traits",
"num_cpus",
"object",
"odht",
@@ -185,12 +181,10 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"proc-macro2",
"psm",
"punycode",
- "quick-error",
"quote",
"rand",
"rand_chacha",
"rand_core",
- "rand_hc",
"rand_xorshift",
"rand_xoshiro",
"redox_syscall",
@@ -213,14 +207,15 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"serde",
"serde_derive",
"serde_json",
- "sha-1",
+ "sha1",
"sha2",
"sharded-slab",
"smallvec",
"snap",
"stable_deref_trait",
"stacker",
- "subtle",
+ "static_assertions",
+ "subtle", // dependency of cargo (via pasetors)
"syn",
"synstructure",
"tempfile",
@@ -230,7 +225,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"thiserror-impl",
"thorin-dwp",
"thread_local",
- "time",
"tinystr",
"tinyvec",
"tinyvec_macros",
@@ -241,6 +235,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"tracing-log",
"tracing-subscriber",
"tracing-tree",
+ "twox-hash",
"type-map",
"typenum",
"unic-char-property",
@@ -291,6 +286,7 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
"cranelift-codegen",
"cranelift-codegen-meta",
"cranelift-codegen-shared",
+ "cranelift-egraph",
"cranelift-entity",
"cranelift-frontend",
"cranelift-isle",
@@ -299,6 +295,7 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
"cranelift-native",
"cranelift-object",
"crc32fast",
+ "fallible-iterator",
"fxhash",
"getrandom",
"gimli",
@@ -315,9 +312,11 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
"region",
"slice-group-by",
"smallvec",
+ "stable_deref_trait",
"target-lexicon",
"version_check",
"wasi",
+ "wasmtime-jit-icache-coherence",
"winapi",
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
@@ -468,75 +467,57 @@ fn check_permitted_dependencies(
restricted_dependency_crates: &[&'static str],
bad: &mut bool,
) {
+ let mut deps = HashSet::new();
+ for to_check in restricted_dependency_crates {
+ let to_check = pkg_from_name(metadata, to_check);
+ use cargo_platform::Cfg;
+ use std::str::FromStr;
+ // We don't expect the compiler to ever run on wasm32, so strip
+ // out those dependencies to avoid polluting the permitted list.
+ deps_of_filtered(metadata, &to_check.id, &mut deps, &|dep_kinds| {
+ dep_kinds.iter().any(|dep_kind| {
+ dep_kind
+ .target
+ .as_ref()
+ .map(|target| {
+ !target.matches(
+ "wasm32-unknown-unknown",
+ &[
+ Cfg::from_str("target_arch=\"wasm32\"").unwrap(),
+ Cfg::from_str("target_os=\"unknown\"").unwrap(),
+ ],
+ )
+ })
+ .unwrap_or(true)
+ })
+ });
+ }
+
// Check that the PERMITTED_DEPENDENCIES does not have unused entries.
- for name in permitted_dependencies {
- if !metadata.packages.iter().any(|p| p.name == *name) {
+ for permitted in permitted_dependencies {
+ if !deps.iter().any(|dep_id| &pkg_from_id(metadata, dep_id).name == permitted) {
tidy_error!(
bad,
- "could not find allowed package `{}`\n\
+ "could not find allowed package `{permitted}`\n\
Remove from PERMITTED_DEPENDENCIES list if it is no longer used.",
- name
);
}
}
- // Get the list in a convenient form.
- let permitted_dependencies: HashSet<_> = permitted_dependencies.iter().cloned().collect();
- // Check dependencies.
- let mut visited = BTreeSet::new();
- let mut unapproved = BTreeSet::new();
- for &krate in restricted_dependency_crates.iter() {
- let pkg = pkg_from_name(metadata, krate);
- let mut bad =
- check_crate_dependencies(&permitted_dependencies, metadata, &mut visited, pkg);
- unapproved.append(&mut bad);
- }
+ // Get in a convenient form.
+ let permitted_dependencies: HashSet<_> = permitted_dependencies.iter().cloned().collect();
- if !unapproved.is_empty() {
- tidy_error!(bad, "Dependencies for {} not explicitly permitted:", descr);
- for dep in unapproved {
- println!("* {dep}");
+ for dep in deps {
+ let dep = pkg_from_id(metadata, dep);
+ // If this path is in-tree, we don't require it to be explicitly permitted.
+ if dep.source.is_some() {
+ if !permitted_dependencies.contains(dep.name.as_str()) {
+ tidy_error!(bad, "Dependency for {descr} not explicitly permitted: {}", dep.id);
+ }
}
}
}
-/// Checks the dependencies of the given crate from the given cargo metadata to see if they are on
-/// the list of permitted dependencies. Returns a list of disallowed dependencies.
-fn check_crate_dependencies<'a>(
- permitted_dependencies: &'a HashSet<&'static str>,
- metadata: &'a Metadata,
- visited: &mut BTreeSet<&'a PackageId>,
- krate: &'a Package,
-) -> BTreeSet<&'a PackageId> {
- // This will contain bad deps.
- let mut unapproved = BTreeSet::new();
-
- // Check if we have already visited this crate.
- if visited.contains(&krate.id) {
- return unapproved;
- }
-
- visited.insert(&krate.id);
-
- // If this path is in-tree, we don't require it to be explicitly permitted.
- if krate.source.is_some() {
- // If this dependency is not on `PERMITTED_DEPENDENCIES`, add to bad set.
- if !permitted_dependencies.contains(krate.name.as_str()) {
- unapproved.insert(&krate.id);
- }
- }
-
- // Do a DFS in the crate graph.
- let to_check = deps_of(metadata, &krate.id);
-
- for dep in to_check {
- let mut bad = check_crate_dependencies(permitted_dependencies, metadata, visited, dep);
- unapproved.append(&mut bad);
- }
-
- unapproved
-}
-
/// Prevents multiple versions of some expensive crates.
fn check_crate_duplicate(
metadata: &Metadata,
@@ -571,24 +552,6 @@ fn check_crate_duplicate(
}
}
-/// Returns a list of dependencies for the given package.
-fn deps_of<'a>(metadata: &'a Metadata, pkg_id: &'a PackageId) -> Vec<&'a Package> {
- let resolve = metadata.resolve.as_ref().unwrap();
- let node = resolve
- .nodes
- .iter()
- .find(|n| &n.id == pkg_id)
- .unwrap_or_else(|| panic!("could not find `{pkg_id}` in resolve"));
- node.deps
- .iter()
- .map(|dep| {
- metadata.packages.iter().find(|pkg| pkg.id == dep.pkg).unwrap_or_else(|| {
- panic!("could not find dep `{}` for pkg `{}` in resolve", dep.pkg, pkg_id)
- })
- })
- .collect()
-}
-
/// Finds a package with the given name.
fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package {
let mut i = metadata.packages.iter().filter(|p| p.name == name);
@@ -598,41 +561,57 @@ fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package
result
}
+fn pkg_from_id<'a>(metadata: &'a Metadata, id: &PackageId) -> &'a Package {
+ metadata.packages.iter().find(|p| &p.id == id).unwrap()
+}
+
/// Finds all the packages that are in the rust runtime.
fn compute_runtime_crates<'a>(metadata: &'a Metadata) -> HashSet<&'a PackageId> {
- let resolve = metadata.resolve.as_ref().unwrap();
let mut result = HashSet::new();
for name in RUNTIME_CRATES {
let id = &pkg_from_name(metadata, name).id;
- normal_deps_of_r(resolve, id, &mut result);
+ deps_of_filtered(metadata, id, &mut result, &|_| true);
}
result
}
-/// Recursively find all normal dependencies.
-fn normal_deps_of_r<'a>(
- resolve: &'a Resolve,
+/// Recursively find all dependencies.
+fn deps_of_filtered<'a>(
+ metadata: &'a Metadata,
pkg_id: &'a PackageId,
result: &mut HashSet<&'a PackageId>,
+ filter: &dyn Fn(&[DepKindInfo]) -> bool,
) {
if !result.insert(pkg_id) {
return;
}
- let node = resolve
+ let node = metadata
+ .resolve
+ .as_ref()
+ .unwrap()
.nodes
.iter()
.find(|n| &n.id == pkg_id)
.unwrap_or_else(|| panic!("could not find `{pkg_id}` in resolve"));
for dep in &node.deps {
- normal_deps_of_r(resolve, &dep.pkg, result);
+ if !filter(&dep.dep_kinds) {
+ continue;
+ }
+ deps_of_filtered(metadata, &dep.pkg, result, filter);
}
}
+fn direct_deps_of<'a>(metadata: &'a Metadata, pkg_id: &'a PackageId) -> Vec<&'a Package> {
+ let resolve = metadata.resolve.as_ref().unwrap();
+ let node = resolve.nodes.iter().find(|n| &n.id == pkg_id).unwrap();
+ node.deps.iter().map(|dep| pkg_from_id(metadata, &dep.pkg)).collect()
+}
+
fn check_rustfix(metadata: &Metadata, bad: &mut bool) {
let cargo = pkg_from_name(metadata, "cargo");
let compiletest = pkg_from_name(metadata, "compiletest");
- let cargo_deps = deps_of(metadata, &cargo.id);
- let compiletest_deps = deps_of(metadata, &compiletest.id);
+ let cargo_deps = direct_deps_of(metadata, &cargo.id);
+ let compiletest_deps = direct_deps_of(metadata, &compiletest.id);
let cargo_rustfix = cargo_deps.iter().find(|p| p.name == "rustfix").unwrap();
let compiletest_rustfix = compiletest_deps.iter().find(|p| p.name == "rustfix").unwrap();
if cargo_rustfix.version != compiletest_rustfix.version {
diff --git a/src/tools/tidy/src/edition.rs b/src/tools/tidy/src/edition.rs
index 8a7c4460d..8172e3d29 100644
--- a/src/tools/tidy/src/edition.rs
+++ b/src/tools/tidy/src/edition.rs
@@ -11,7 +11,10 @@ fn is_edition_2021(mut line: &str) -> bool {
pub fn check(path: &Path, bad: &mut bool) {
walk(
path,
- &mut |path| filter_dirs(path) || path.ends_with("src/test"),
+ &mut |path| {
+ filter_dirs(path)
+ || (path.ends_with("tests") && path.join("COMPILER_TESTS.md").exists())
+ },
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap();
diff --git a/src/tools/tidy/src/error_codes.rs b/src/tools/tidy/src/error_codes.rs
new file mode 100644
index 000000000..5b84b51a0
--- /dev/null
+++ b/src/tools/tidy/src/error_codes.rs
@@ -0,0 +1,383 @@
+//! Tidy check to ensure error codes are properly documented and tested.
+//!
+//! Overview of check:
+//!
+//! 1. We create a list of error codes used by the compiler. Error codes are extracted from `compiler/rustc_error_codes/src/error_codes.rs`.
+//!
+//! 2. We check that the error code has a long-form explanation in `compiler/rustc_error_codes/src/error_codes/`.
+//! - The explanation is expected to contain a `doctest` that fails with the correct error code. (`EXEMPT_FROM_DOCTEST` *currently* bypasses this check)
+//! - Note that other stylistic conventions for markdown files are checked in the `style.rs` tidy check.
+//!
+//! 3. We check that the error code has a UI test in `tests/ui/error-codes/`.
+//! - We ensure that there is both a `Exxxx.rs` file and a corresponding `Exxxx.stderr` file.
+//! - We also ensure that the error code is used in the tests.
+//! - *Currently*, it is possible to opt-out of this check with the `EXEMPTED_FROM_TEST` constant.
+//!
+//! 4. We check that the error code is actually emitted by the compiler.
+//! - This is done by searching `compiler/` with a regex.
+
+use std::{ffi::OsStr, fs, path::Path};
+
+use regex::Regex;
+
+use crate::walk::{filter_dirs, walk, walk_many};
+
+const ERROR_CODES_PATH: &str = "compiler/rustc_error_codes/src/error_codes.rs";
+const ERROR_DOCS_PATH: &str = "compiler/rustc_error_codes/src/error_codes/";
+const ERROR_TESTS_PATH: &str = "tests/ui/error-codes/";
+
+// Error codes that (for some reason) can't have a doctest in their explanation. Error codes are still expected to provide a code example, even if untested.
+const IGNORE_DOCTEST_CHECK: &[&str] = &["E0464", "E0570", "E0601", "E0602", "E0640", "E0717"];
+
+// Error codes that don't yet have a UI test. This list will eventually be removed.
+const IGNORE_UI_TEST_CHECK: &[&str] =
+ &["E0461", "E0465", "E0476", "E0514", "E0523", "E0554", "E0640", "E0717", "E0729", "E0789"];
+
+macro_rules! verbose_print {
+ ($verbose:expr, $($fmt:tt)*) => {
+ if $verbose {
+ println!("{}", format_args!($($fmt)*));
+ }
+ };
+}
+
+pub fn check(root_path: &Path, search_paths: &[&Path], verbose: bool, bad: &mut bool) {
+ let mut errors = Vec::new();
+
+ // Stage 1: create list
+ let error_codes = extract_error_codes(root_path, &mut errors, verbose);
+ println!("Found {} error codes", error_codes.len());
+ println!("Highest error code: `{}`", error_codes.iter().max().unwrap());
+
+ // Stage 2: check list has docs
+ let no_longer_emitted = check_error_codes_docs(root_path, &error_codes, &mut errors, verbose);
+
+ // Stage 3: check list has UI tests
+ check_error_codes_tests(root_path, &error_codes, &mut errors, verbose, &no_longer_emitted);
+
+ // Stage 4: check list is emitted by compiler
+ check_error_codes_used(search_paths, &error_codes, &mut errors, &no_longer_emitted, verbose);
+
+ // Print any errors.
+ for error in errors {
+ tidy_error!(bad, "{}", error);
+ }
+}
+
+/// Stage 1: Parses a list of error codes from `error_codes.rs`.
+fn extract_error_codes(root_path: &Path, errors: &mut Vec<String>, verbose: bool) -> Vec<String> {
+ let path = root_path.join(Path::new(ERROR_CODES_PATH));
+ let file =
+ fs::read_to_string(&path).unwrap_or_else(|e| panic!("failed to read `{path:?}`: {e}"));
+
+ let mut error_codes = Vec::new();
+ let mut reached_undocumented_codes = false;
+
+ for line in file.lines() {
+ let line = line.trim();
+
+ if !reached_undocumented_codes && line.starts_with('E') {
+ let split_line = line.split_once(':');
+
+ // Extract the error code from the line, emitting a fatal error if it is not in a correct format.
+ let err_code = if let Some(err_code) = split_line {
+ err_code.0.to_owned()
+ } else {
+ errors.push(format!(
+ "Expected a line with the format `Exxxx: include_str!(\"..\")`, but got \"{}\" \
+ without a `:` delimiter",
+ line,
+ ));
+ continue;
+ };
+
+ // If this is a duplicate of another error code, emit a fatal error.
+ if error_codes.contains(&err_code) {
+ errors.push(format!("Found duplicate error code: `{}`", err_code));
+ continue;
+ }
+
+ // Ensure that the line references the correct markdown file.
+ let expected_filename = format!(" include_str!(\"./error_codes/{}.md\"),", err_code);
+ if expected_filename != split_line.unwrap().1 {
+ errors.push(format!(
+ "Error code `{}` expected to reference docs with `{}` but instead found `{}` in \
+ `compiler/rustc_error_codes/src/error_codes.rs`",
+ err_code,
+ expected_filename,
+ split_line.unwrap().1,
+ ));
+ continue;
+ }
+
+ error_codes.push(err_code);
+ } else if reached_undocumented_codes && line.starts_with('E') {
+ let err_code = match line.split_once(',') {
+ None => line,
+ Some((err_code, _)) => err_code,
+ }
+ .to_string();
+
+ verbose_print!(verbose, "warning: Error code `{}` is undocumented.", err_code);
+
+ if error_codes.contains(&err_code) {
+ errors.push(format!("Found duplicate error code: `{}`", err_code));
+ }
+
+ error_codes.push(err_code);
+ } else if line == ";" {
+ // Once we reach the undocumented error codes, adapt to different syntax.
+ reached_undocumented_codes = true;
+ }
+ }
+
+ error_codes
+}
+
+/// Stage 2: Checks that long-form error code explanations exist and have doctests.
+fn check_error_codes_docs(
+ root_path: &Path,
+ error_codes: &[String],
+ errors: &mut Vec<String>,
+ verbose: bool,
+) -> Vec<String> {
+ let docs_path = root_path.join(Path::new(ERROR_DOCS_PATH));
+
+ let mut no_longer_emitted_codes = Vec::new();
+
+ walk(&docs_path, &mut |_| false, &mut |entry, contents| {
+ let path = entry.path();
+
+ // Error if the file isn't markdown.
+ if path.extension() != Some(OsStr::new("md")) {
+ errors.push(format!(
+ "Found unexpected non-markdown file in error code docs directory: {}",
+ path.display()
+ ));
+ return;
+ }
+
+ // Make sure that the file is referenced in `error_codes.rs`
+ let filename = path.file_name().unwrap().to_str().unwrap().split_once('.');
+ let err_code = filename.unwrap().0; // `unwrap` is ok because we know the filename is in the correct format.
+
+ if error_codes.iter().all(|e| e != err_code) {
+ errors.push(format!(
+ "Found valid file `{}` in error code docs directory without corresponding \
+ entry in `error_code.rs`",
+ path.display()
+ ));
+ return;
+ }
+
+ let (found_code_example, found_proper_doctest, emit_ignore_warning, no_longer_emitted) =
+ check_explanation_has_doctest(&contents, &err_code);
+
+ if emit_ignore_warning {
+ verbose_print!(
+ verbose,
+ "warning: Error code `{err_code}` uses the ignore header. This should not be used, add the error code to the \
+ `IGNORE_DOCTEST_CHECK` constant instead."
+ );
+ }
+
+ if no_longer_emitted {
+ no_longer_emitted_codes.push(err_code.to_owned());
+ }
+
+ if !found_code_example {
+ verbose_print!(
+ verbose,
+ "warning: Error code `{err_code}` doesn't have a code example, all error codes are expected to have one \
+ (even if untested)."
+ );
+ return;
+ }
+
+ let test_ignored = IGNORE_DOCTEST_CHECK.contains(&&err_code);
+
+ // Check that the explanation has a doctest, and if it shouldn't, that it doesn't
+ if !found_proper_doctest && !test_ignored {
+ errors.push(format!(
+ "`{}` doesn't use its own error code in compile_fail example",
+ path.display(),
+ ));
+ } else if found_proper_doctest && test_ignored {
+ errors.push(format!(
+ "`{}` has a compile_fail doctest with its own error code, it shouldn't \
+ be listed in `IGNORE_DOCTEST_CHECK`",
+ path.display(),
+ ));
+ }
+ });
+
+ no_longer_emitted_codes
+}
+
+/// This function returns a tuple indicating whether the provided explanation:
+/// a) has a code example, tested or not.
+/// b) has a valid doctest
+fn check_explanation_has_doctest(explanation: &str, err_code: &str) -> (bool, bool, bool, bool) {
+ let mut found_code_example = false;
+ let mut found_proper_doctest = false;
+
+ let mut emit_ignore_warning = false;
+ let mut no_longer_emitted = false;
+
+ for line in explanation.lines() {
+ let line = line.trim();
+
+ if line.starts_with("```") {
+ found_code_example = true;
+
+ // Check for the `rustdoc` doctest headers.
+ if line.contains("compile_fail") && line.contains(err_code) {
+ found_proper_doctest = true;
+ }
+
+ if line.contains("ignore") {
+ emit_ignore_warning = true;
+ found_proper_doctest = true;
+ }
+ } else if line
+ .starts_with("#### Note: this error code is no longer emitted by the compiler")
+ {
+ no_longer_emitted = true;
+ found_code_example = true;
+ found_proper_doctest = true;
+ }
+ }
+
+ (found_code_example, found_proper_doctest, emit_ignore_warning, no_longer_emitted)
+}
+
+// Stage 3: Checks that each error code has a UI test in the correct directory
+fn check_error_codes_tests(
+ root_path: &Path,
+ error_codes: &[String],
+ errors: &mut Vec<String>,
+ verbose: bool,
+ no_longer_emitted: &[String],
+) {
+ let tests_path = root_path.join(Path::new(ERROR_TESTS_PATH));
+
+ for code in error_codes {
+ let test_path = tests_path.join(format!("{}.stderr", code));
+
+ if !test_path.exists() && !IGNORE_UI_TEST_CHECK.contains(&code.as_str()) {
+ verbose_print!(
+ verbose,
+ "warning: Error code `{code}` needs to have at least one UI test in the `tests/error-codes/` directory`!"
+ );
+ continue;
+ }
+ if IGNORE_UI_TEST_CHECK.contains(&code.as_str()) {
+ if test_path.exists() {
+ errors.push(format!(
+ "Error code `{code}` has a UI test in `tests/ui/error-codes/{code}.rs`, it shouldn't be listed in `EXEMPTED_FROM_TEST`!"
+ ));
+ }
+ continue;
+ }
+
+ let file = match fs::read_to_string(&test_path) {
+ Ok(file) => file,
+ Err(err) => {
+ verbose_print!(
+ verbose,
+ "warning: Failed to read UI test file (`{}`) for `{code}` but the file exists. The test is assumed to work:\n{err}",
+ test_path.display()
+ );
+ continue;
+ }
+ };
+
+ if no_longer_emitted.contains(code) {
+ // UI tests *can't* contain error codes that are no longer emitted.
+ continue;
+ }
+
+ let mut found_code = false;
+
+ for line in file.lines() {
+ let s = line.trim();
+ // Assuming the line starts with `error[E`, we can substring the error code out.
+ if s.starts_with("error[E") {
+ if &s[6..11] == code {
+ found_code = true;
+ break;
+ }
+ };
+ }
+
+ if !found_code {
+ verbose_print!(
+ verbose,
+ "warning: Error code {code}`` has a UI test file, but doesn't contain its own error code!"
+ );
+ }
+ }
+}
+
+/// Stage 4: Search `compiler/` and ensure that every error code is actually used by the compiler and that no undocumented error codes exist.
+fn check_error_codes_used(
+ search_paths: &[&Path],
+ error_codes: &[String],
+ errors: &mut Vec<String>,
+ no_longer_emitted: &[String],
+ verbose: bool,
+) {
+ // We want error codes which match the following cases:
+ //
+ // * foo(a, E0111, a)
+ // * foo(a, E0111)
+ // * foo(E0111, a)
+ // * #[error = "E0111"]
+ let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
+
+ let mut found_codes = Vec::new();
+
+ walk_many(search_paths, &mut filter_dirs, &mut |entry, contents| {
+ let path = entry.path();
+
+ // Return early if we aren't looking at a source file.
+ if path.extension() != Some(OsStr::new("rs")) {
+ return;
+ }
+
+ for line in contents.lines() {
+ // We want to avoid parsing error codes in comments.
+ if line.trim_start().starts_with("//") {
+ continue;
+ }
+
+ for cap in regex.captures_iter(line) {
+ if let Some(error_code) = cap.get(1) {
+ let error_code = error_code.as_str().to_owned();
+
+ if !error_codes.contains(&error_code) {
+ // This error code isn't properly defined, we must error.
+ errors.push(format!("Error code `{}` is used in the compiler but not defined and documented in `compiler/rustc_error_codes/src/error_codes.rs`.", error_code));
+ continue;
+ }
+
+ // This error code can now be marked as used.
+ found_codes.push(error_code);
+ }
+ }
+ }
+ });
+
+ for code in error_codes {
+ if !found_codes.contains(code) && !no_longer_emitted.contains(code) {
+ errors.push(format!("Error code `{code}` exists, but is not emitted by the compiler!"))
+ }
+
+ if found_codes.contains(code) && no_longer_emitted.contains(code) {
+ verbose_print!(
+ verbose,
+ "warning: Error code `{code}` is used when it's marked as \"no longer emitted\""
+ );
+ }
+ }
+}
diff --git a/src/tools/tidy/src/error_codes_check.rs b/src/tools/tidy/src/error_codes_check.rs
deleted file mode 100644
index 610e322e1..000000000
--- a/src/tools/tidy/src/error_codes_check.rs
+++ /dev/null
@@ -1,320 +0,0 @@
-//! Checks that all error codes have at least one test to prevent having error
-//! codes that are silently not thrown by the compiler anymore.
-
-use crate::walk::{filter_dirs, walk};
-use std::collections::{HashMap, HashSet};
-use std::ffi::OsStr;
-use std::fs::read_to_string;
-use std::path::Path;
-
-use regex::Regex;
-
-// A few of those error codes can't be tested but all the others can and *should* be tested!
-const EXEMPTED_FROM_TEST: &[&str] = &[
- "E0313", "E0377", "E0461", "E0462", "E0465", "E0476", "E0490", "E0514", "E0519", "E0523",
- "E0554", "E0640", "E0717", "E0729", "E0789",
-];
-
-// Some error codes don't have any tests apparently...
-const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0464", "E0570", "E0601", "E0602", "E0729"];
-
-// If the file path contains any of these, we don't want to try to extract error codes from it.
-//
-// We need to declare each path in the windows version (with backslash).
-const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
- &["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
-
-#[derive(Default, Debug)]
-struct ErrorCodeStatus {
- has_test: bool,
- has_explanation: bool,
- is_used: bool,
-}
-
-fn check_error_code_explanation(
- f: &str,
- error_codes: &mut HashMap<String, ErrorCodeStatus>,
- err_code: String,
-) -> bool {
- let mut invalid_compile_fail_format = false;
- let mut found_error_code = false;
-
- for line in f.lines() {
- let s = line.trim();
- if s.starts_with("```") {
- if s.contains("compile_fail") && s.contains('E') {
- if !found_error_code {
- error_codes.get_mut(&err_code).map(|x| x.has_test = true);
- found_error_code = true;
- }
- } else if s.contains("compile-fail") {
- invalid_compile_fail_format = true;
- }
- } else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
- if !found_error_code {
- error_codes.get_mut(&err_code).map(|x| x.has_test = true);
- found_error_code = true;
- }
- }
- }
- invalid_compile_fail_format
-}
-
-fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
- let mut ignore_found = false;
-
- for line in f.lines() {
- let s = line.trim();
- if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
- return true;
- }
- if s.starts_with("```") {
- if s.contains("compile_fail") && s.contains(err_code) {
- return true;
- } else if s.contains("ignore") {
- // It's very likely that we can't actually make it fail compilation...
- ignore_found = true;
- }
- }
- }
- ignore_found
-}
-
-macro_rules! some_or_continue {
- ($e:expr) => {
- match $e {
- Some(e) => e,
- None => continue,
- }
- };
-}
-
-fn extract_error_codes(
- f: &str,
- error_codes: &mut HashMap<String, ErrorCodeStatus>,
- path: &Path,
- errors: &mut Vec<String>,
-) {
- let mut reached_no_explanation = false;
-
- for line in f.lines() {
- let s = line.trim();
- if !reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
- let err_code = s
- .split_once(':')
- .expect(
- format!(
- "Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
- without a `:` delimiter",
- s,
- )
- .as_str(),
- )
- .0
- .to_owned();
- error_codes.entry(err_code.clone()).or_default().has_explanation = true;
-
- // Now we extract the tests from the markdown file!
- let md_file_name = match s.split_once("include_str!(\"") {
- None => continue,
- Some((_, md)) => match md.split_once("\")") {
- None => continue,
- Some((file_name, _)) => file_name,
- },
- };
- let path = some_or_continue!(path.parent())
- .join(md_file_name)
- .canonicalize()
- .expect("failed to canonicalize error explanation file path");
- match read_to_string(&path) {
- Ok(content) => {
- let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
- if !has_test && !IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
- errors.push(format!(
- "`{}` doesn't use its own error code in compile_fail example",
- path.display(),
- ));
- } else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
- errors.push(format!(
- "`{}` has a compile_fail example with its own error code, it shouldn't \
- be listed in IGNORE_EXPLANATION_CHECK!",
- path.display(),
- ));
- }
- if check_error_code_explanation(&content, error_codes, err_code) {
- errors.push(format!(
- "`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
- path.display(),
- ));
- }
- }
- Err(e) => {
- eprintln!("Couldn't read `{}`: {}", path.display(), e);
- }
- }
- } else if reached_no_explanation && s.starts_with('E') {
- let err_code = match s.split_once(',') {
- None => s,
- Some((err_code, _)) => err_code,
- }
- .to_string();
- if !error_codes.contains_key(&err_code) {
- // this check should *never* fail!
- error_codes.insert(err_code, ErrorCodeStatus::default());
- }
- } else if s == ";" {
- reached_no_explanation = true;
- }
- }
-}
-
-fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
- for line in f.lines() {
- let s = line.trim();
- if s.starts_with("error[E") || s.starts_with("warning[E") {
- let err_code = match s.split_once(']') {
- None => continue,
- Some((err_code, _)) => match err_code.split_once('[') {
- None => continue,
- Some((_, err_code)) => err_code,
- },
- };
- error_codes.entry(err_code.to_owned()).or_default().has_test = true;
- }
- }
-}
-
-fn extract_error_codes_from_source(
- f: &str,
- error_codes: &mut HashMap<String, ErrorCodeStatus>,
- regex: &Regex,
-) {
- for line in f.lines() {
- if line.trim_start().starts_with("//") {
- continue;
- }
- for cap in regex.captures_iter(line) {
- if let Some(error_code) = cap.get(1) {
- error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
- }
- }
- }
-}
-
-pub fn check(paths: &[&Path], bad: &mut bool) {
- let mut errors = Vec::new();
- let mut found_explanations = 0;
- let mut found_tests = 0;
- let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
- let mut explanations: HashSet<String> = HashSet::new();
- // We want error codes which match the following cases:
- //
- // * foo(a, E0111, a)
- // * foo(a, E0111)
- // * foo(E0111, a)
- // * #[error = "E0111"]
- let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
-
- println!("Checking which error codes lack tests...");
-
- for path in paths {
- walk(path, &mut filter_dirs, &mut |entry, contents| {
- let file_name = entry.file_name();
- let entry_path = entry.path();
-
- if file_name == "error_codes.rs" {
- extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
- found_explanations += 1;
- } else if entry_path.extension() == Some(OsStr::new("stderr")) {
- extract_error_codes_from_tests(contents, &mut error_codes);
- found_tests += 1;
- } else if entry_path.extension() == Some(OsStr::new("rs")) {
- let path = entry.path().to_string_lossy();
- if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c| !path.contains(c)) {
- extract_error_codes_from_source(contents, &mut error_codes, &regex);
- }
- } else if entry_path
- .parent()
- .and_then(|p| p.file_name())
- .map(|p| p == "error_codes")
- .unwrap_or(false)
- && entry_path.extension() == Some(OsStr::new("md"))
- {
- explanations.insert(file_name.to_str().unwrap().replace(".md", ""));
- }
- });
- }
- if found_explanations == 0 {
- eprintln!("No error code explanation was tested!");
- *bad = true;
- }
- if found_tests == 0 {
- eprintln!("No error code was found in compilation errors!");
- *bad = true;
- }
- if explanations.is_empty() {
- eprintln!("No error code explanation was found!");
- *bad = true;
- }
- if errors.is_empty() {
- println!("Found {} error codes", error_codes.len());
-
- for (err_code, error_status) in &error_codes {
- if !error_status.has_test && !EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
- errors.push(format!("Error code {err_code} needs to have at least one UI test!"));
- } else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
- errors.push(format!(
- "Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
- err_code
- ));
- }
- if !error_status.is_used && !error_status.has_explanation {
- errors.push(format!(
- "Error code {} isn't used and doesn't have an error explanation, it should be \
- commented in error_codes.rs file",
- err_code
- ));
- }
- }
- }
- if errors.is_empty() {
- // Checking if local constants need to be cleaned.
- for err_code in EXEMPTED_FROM_TEST {
- match error_codes.get(err_code.to_owned()) {
- Some(status) => {
- if status.has_test {
- errors.push(format!(
- "{} error code has a test and therefore should be \
- removed from the `EXEMPTED_FROM_TEST` constant",
- err_code
- ));
- }
- }
- None => errors.push(format!(
- "{} error code isn't used anymore and therefore should be removed \
- from `EXEMPTED_FROM_TEST` constant",
- err_code
- )),
- }
- }
- }
- if errors.is_empty() {
- for explanation in explanations {
- if !error_codes.contains_key(&explanation) {
- errors.push(format!(
- "{} error code explanation should be listed in `error_codes.rs`",
- explanation
- ));
- }
- }
- }
- errors.sort();
- for err in &errors {
- eprintln!("{err}");
- }
- println!("Found {} error(s) in error codes", errors.len());
- if !errors.is_empty() {
- *bad = true;
- }
- println!("Done!");
-}
diff --git a/src/tools/tidy/src/errors.rs b/src/tools/tidy/src/errors.rs
deleted file mode 100644
index fe5fd72b9..000000000
--- a/src/tools/tidy/src/errors.rs
+++ /dev/null
@@ -1,77 +0,0 @@
-//! Tidy check to verify the validity of long error diagnostic codes.
-//!
-//! This ensures that error codes are used at most once and also prints out some
-//! statistics about the error codes.
-
-use crate::walk::{filter_dirs, walk};
-use std::collections::HashMap;
-use std::path::Path;
-
-pub fn check(path: &Path, bad: &mut bool) {
- let mut map: HashMap<_, Vec<_>> = HashMap::new();
- walk(
- path,
- &mut |path| filter_dirs(path) || path.ends_with("src/test"),
- &mut |entry, contents| {
- let file = entry.path();
- let filename = file.file_name().unwrap().to_string_lossy();
- if filename != "error_codes.rs" {
- return;
- }
-
- // In the `register_long_diagnostics!` macro, entries look like this:
- //
- // ```
- // EXXXX: r##"
- // <Long diagnostic message>
- // "##,
- // ```
- //
- // and these long messages often have error codes themselves inside
- // them, but we don't want to report duplicates in these cases. This
- // variable keeps track of whether we're currently inside one of these
- // long diagnostic messages.
- let mut inside_long_diag = false;
- for (num, line) in contents.lines().enumerate() {
- if inside_long_diag {
- inside_long_diag = !line.contains("\"##");
- continue;
- }
-
- let mut search = line;
- while let Some(i) = search.find('E') {
- search = &search[i + 1..];
- let code = if search.len() > 4 { search[..4].parse::<u32>() } else { continue };
- let code = match code {
- Ok(n) => n,
- Err(..) => continue,
- };
- map.entry(code).or_default().push((file.to_owned(), num + 1, line.to_owned()));
- break;
- }
-
- inside_long_diag = line.contains("r##\"");
- }
- },
- );
-
- let mut max = 0;
- for (&code, entries) in map.iter() {
- if code > max {
- max = code;
- }
- if entries.len() == 1 {
- continue;
- }
-
- tidy_error!(bad, "duplicate error code: {}", code);
- for &(ref file, line_num, ref line) in entries.iter() {
- tidy_error!(bad, "{}:{}: {}", file.display(), line_num, line);
- }
- }
-
- if !*bad {
- println!("* {} error codes", map.len());
- println!("* highest error code: E{:04}", max);
- }
-}
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
index f10ecf5f2..af92e6eb8 100644
--- a/src/tools/tidy/src/features.rs
+++ b/src/tools/tidy/src/features.rs
@@ -82,6 +82,7 @@ pub fn collect_lib_features(base_src_path: &Path) -> Features {
pub fn check(
src_path: &Path,
+ tests_path: &Path,
compiler_path: &Path,
lib_path: &Path,
bad: &mut bool,
@@ -95,10 +96,10 @@ pub fn check(
walk_many(
&[
- &src_path.join("test/ui"),
- &src_path.join("test/ui-fulldeps"),
- &src_path.join("test/rustdoc-ui"),
- &src_path.join("test/rustdoc"),
+ &tests_path.join("ui"),
+ &tests_path.join("ui-fulldeps"),
+ &tests_path.join("rustdoc-ui"),
+ &tests_path.join("rustdoc"),
],
&mut filter_dirs,
&mut |entry, contents| {
@@ -480,7 +481,7 @@ fn map_lib_features(
) {
walk(
base_src_path,
- &mut |path| filter_dirs(path) || path.ends_with("src/test"),
+ &mut |path| filter_dirs(path) || path.ends_with("tests"),
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
diff --git a/src/tools/tidy/src/features/version/tests.rs b/src/tools/tidy/src/features/version/tests.rs
index 31224fdf1..7701dce2d 100644
--- a/src/tools/tidy/src/features/version/tests.rs
+++ b/src/tools/tidy/src/features/version/tests.rs
@@ -12,8 +12,8 @@ fn test_try_from_invalid_version() {
#[test]
fn test_try_from_single() {
- assert_eq!("1.32.0".parse(), Ok(Version { parts: [1, 32, 0] }));
- assert_eq!("1.0.0".parse(), Ok(Version { parts: [1, 0, 0] }));
+ assert_eq!("1.32.0".parse(), Ok(Version::Explicit { parts: [1, 32, 0] }));
+ assert_eq!("1.0.0".parse(), Ok(Version::Explicit { parts: [1, 0, 0] }));
}
#[test]
diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs
index 698e4850b..35000320d 100644
--- a/src/tools/tidy/src/lib.rs
+++ b/src/tools/tidy/src/lib.rs
@@ -3,6 +3,10 @@
//! This library contains the tidy lints and exposes it
//! to be used by tools.
+use std::fmt::Display;
+
+use termcolor::WriteColor;
+
/// A helper macro to `unwrap` a result except also print out details like:
///
/// * The expression that failed
@@ -26,33 +30,44 @@ macro_rules! t {
}
macro_rules! tidy_error {
- ($bad:expr, $fmt:expr) => ({
- *$bad = true;
- eprint!("tidy error: ");
- eprintln!($fmt);
- });
- ($bad:expr, $fmt:expr, $($arg:tt)*) => ({
- *$bad = true;
- eprint!("tidy error: ");
- eprintln!($fmt, $($arg)*);
+ ($bad:expr, $($fmt:tt)*) => ({
+ $crate::tidy_error($bad, format_args!($($fmt)*)).expect("failed to output error");
});
}
+fn tidy_error(bad: &mut bool, args: impl Display) -> std::io::Result<()> {
+ use std::io::Write;
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream};
+
+ *bad = true;
+
+ let mut stderr = StandardStream::stdout(ColorChoice::Auto);
+ stderr.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
+
+ write!(&mut stderr, "tidy error")?;
+ stderr.set_color(&ColorSpec::new())?;
+
+ writeln!(&mut stderr, ": {args}")?;
+ Ok(())
+}
+
pub mod alphabetical;
pub mod bins;
pub mod debug_artifacts;
pub mod deps;
pub mod edition;
-pub mod error_codes_check;
-pub mod errors;
+pub mod error_codes;
pub mod extdeps;
pub mod features;
pub mod mir_opt_tests;
pub mod pal;
pub mod primitive_docs;
+pub mod rustdoc_gui_tests;
pub mod style;
pub mod target_specific_tests;
+pub mod tests_placement;
pub mod ui_tests;
pub mod unit_tests;
pub mod unstable_book;
pub mod walk;
+pub mod x_version;
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index b0b11cafc..505f9d724 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -25,8 +25,10 @@ fn main() {
.expect("concurrency must be a number");
let src_path = root_path.join("src");
+ let tests_path = root_path.join("tests");
let library_path = root_path.join("library");
let compiler_path = root_path.join("compiler");
+ let librustdoc_path = src_path.join("librustdoc");
let args: Vec<String> = env::args().skip(1).collect();
@@ -35,19 +37,30 @@ fn main() {
let bad = std::sync::Arc::new(AtomicBool::new(false));
+ let drain_handles = |handles: &mut VecDeque<ScopedJoinHandle<'_, ()>>| {
+ // poll all threads for completion before awaiting the oldest one
+ for i in (0..handles.len()).rev() {
+ if handles[i].is_finished() {
+ handles.swap_remove_back(i).unwrap().join().unwrap();
+ }
+ }
+
+ while handles.len() >= concurrency.get() {
+ handles.pop_front().unwrap().join().unwrap();
+ }
+ };
+
scope(|s| {
let mut handles: VecDeque<ScopedJoinHandle<'_, ()>> =
VecDeque::with_capacity(concurrency.get());
macro_rules! check {
($p:ident $(, $args:expr)* ) => {
- while handles.len() >= concurrency.get() {
- handles.pop_front().unwrap().join().unwrap();
- }
+ drain_handles(&mut handles);
let handle = s.spawn(|| {
let mut flag = false;
- $p::check($($args),* , &mut flag);
+ $p::check($($args, )* &mut flag);
if (flag) {
bad.store(true, Ordering::Relaxed);
}
@@ -56,20 +69,21 @@ fn main() {
}
}
- check!(target_specific_tests, &src_path);
+ check!(target_specific_tests, &tests_path);
// Checks that are done on the cargo workspace.
check!(deps, &root_path, &cargo);
check!(extdeps, &root_path);
// Checks over tests.
- check!(debug_artifacts, &src_path);
- check!(ui_tests, &src_path);
- check!(mir_opt_tests, &src_path, bless);
+ check!(tests_placement, &root_path);
+ check!(debug_artifacts, &tests_path);
+ check!(ui_tests, &tests_path);
+ check!(mir_opt_tests, &tests_path, bless);
+ check!(rustdoc_gui_tests, &tests_path);
// Checks that only make sense for the compiler.
- check!(errors, &compiler_path);
- check!(error_codes_check, &[&src_path, &compiler_path]);
+ check!(error_codes, &root_path, &[&compiler_path, &librustdoc_path], verbose);
// Checks that only make sense for the std libs.
check!(pal, &library_path);
@@ -77,6 +91,7 @@ fn main() {
// Checks that need to be done for both the compiler and std libraries.
check!(unit_tests, &src_path);
+ check!(unit_tests, &tests_path);
check!(unit_tests, &compiler_path);
check!(unit_tests, &library_path);
@@ -85,23 +100,34 @@ fn main() {
}
check!(style, &src_path);
+ check!(style, &tests_path);
check!(style, &compiler_path);
check!(style, &library_path);
check!(edition, &src_path);
check!(edition, &compiler_path);
check!(edition, &library_path);
+ check!(edition, &tests_path);
check!(alphabetical, &src_path);
+ check!(alphabetical, &tests_path);
check!(alphabetical, &compiler_path);
check!(alphabetical, &library_path);
+ check!(x_version, &root_path, &cargo);
+
let collected = {
- while handles.len() >= concurrency.get() {
- handles.pop_front().unwrap().join().unwrap();
- }
+ drain_handles(&mut handles);
+
let mut flag = false;
- let r = features::check(&src_path, &compiler_path, &library_path, &mut flag, verbose);
+ let r = features::check(
+ &src_path,
+ &tests_path,
+ &compiler_path,
+ &library_path,
+ &mut flag,
+ verbose,
+ );
if flag {
bad.store(true, Ordering::Relaxed);
}
diff --git a/src/tools/tidy/src/mir_opt_tests.rs b/src/tools/tidy/src/mir_opt_tests.rs
index 018573284..2a9dcac2e 100644
--- a/src/tools/tidy/src/mir_opt_tests.rs
+++ b/src/tools/tidy/src/mir_opt_tests.rs
@@ -6,7 +6,7 @@ use std::path::{Path, PathBuf};
fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
let mut rs_files = Vec::<PathBuf>::new();
let mut output_files = HashSet::<PathBuf>::new();
- let files = walkdir::WalkDir::new(&path.join("test/mir-opt")).into_iter();
+ let files = walkdir::WalkDir::new(&path.join("mir-opt")).into_iter();
for file in files.filter_map(Result::ok).filter(|e| e.file_type().is_file()) {
let filepath = file.path();
@@ -41,7 +41,7 @@ fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
}
fn check_dash_files(path: &Path, bless: bool, bad: &mut bool) {
- for file in walkdir::WalkDir::new(&path.join("test/mir-opt"))
+ for file in walkdir::WalkDir::new(&path.join("mir-opt"))
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
diff --git a/src/tools/tidy/src/rustdoc_gui_tests.rs b/src/tools/tidy/src/rustdoc_gui_tests.rs
new file mode 100644
index 000000000..feb513df3
--- /dev/null
+++ b/src/tools/tidy/src/rustdoc_gui_tests.rs
@@ -0,0 +1,33 @@
+//! Tidy check to ensure that rustdoc GUI tests start with a small description.
+
+use std::path::Path;
+
+pub fn check(path: &Path, bad: &mut bool) {
+ crate::walk::walk(
+ &path.join("rustdoc-gui"),
+ &mut |p| {
+ // If there is no extension, it's very likely a folder and we want to go into it.
+ p.extension().map(|e| e != "goml").unwrap_or(false)
+ },
+ &mut |entry, content| {
+ for line in content.lines() {
+ if !line.starts_with("// ") {
+ tidy_error!(
+ bad,
+ "{}: rustdoc-gui tests must start with a small description",
+ entry.path().display(),
+ );
+ return;
+ } else if line.starts_with("// ") {
+ let parts = line[2..].trim();
+ // We ignore tidy comments.
+ if parts.starts_with("// tidy-") {
+ continue;
+ }
+ // All good!
+ return;
+ }
+ }
+ },
+ );
+}
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index e3a094caf..6a0855405 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -15,15 +15,17 @@
//!
//! A number of these checks can be opted-out of with various directives of the form:
//! `// ignore-tidy-CHECK-NAME`.
+// ignore-tidy-dbg
use crate::walk::{filter_dirs, walk};
-use regex::Regex;
+use regex::{Regex, RegexSet};
use std::path::Path;
/// Error code markdown is restricted to 80 columns because they can be
/// displayed on the console with --example.
const ERROR_CODE_COLS: usize = 80;
const COLS: usize = 100;
+const GOML_COLS: usize = 120;
const LINES: usize = 3000;
@@ -43,6 +45,9 @@ C++ code used llvm_unreachable, which triggers undefined behavior
when executed when assertions are disabled.
Use llvm::report_fatal_error for increased robustness.";
+const DOUBLE_SPACE_AFTER_DOT: &str = r"\
+Use a single space after dots in comments.";
+
const ANNOTATIONS_TO_IGNORE: &[&str] = &[
"// @!has",
"// @has",
@@ -63,6 +68,8 @@ const PROBLEMATIC_CONSTS: &[u32] = &[
3735927486, 3735932941, 4027431614, 4276992702,
];
+const INTERNAL_COMPILER_DOCS_LINE: &str = "#### This error code is internal to the compiler and will not be emitted with normal Rust code.";
+
/// Parser states for `line_is_url`.
#[derive(Clone, Copy, PartialEq)]
#[allow(non_camel_case_types)]
@@ -131,6 +138,8 @@ fn long_line_is_ok(extension: &str, is_error_code: bool, max_columns: usize, lin
"ftl" => true,
// non-error code markdown is allowed to be any length
"md" if !is_error_code => true,
+ // HACK(Ezrashaw): there is no way to split a markdown header over multiple lines
+ "md" if line == INTERNAL_COMPILER_DOCS_LINE => true,
_ => line_is_url(is_error_code, max_columns, line) || should_ignore(line),
}
}
@@ -225,10 +234,12 @@ pub fn check(path: &Path, bad: &mut bool) {
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:x}", v)))
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:X}", v)))
.collect();
+ let problematic_regex = RegexSet::new(problematic_consts_strings.as_slice()).unwrap();
walk(path, &mut skip, &mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
- let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css", ".ftl"];
+ let extensions =
+ [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css", ".ftl", ".goml"];
if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") {
return;
}
@@ -238,7 +249,7 @@ pub fn check(path: &Path, bad: &mut bool) {
// This list should ideally be sourced from rustfmt.toml but we don't want to add a toml
// parser to tidy.
!file.ancestors().any(|a| {
- a.ends_with("src/test") ||
+ (a.ends_with("tests") && a.join("COMPILER_TESTS.md").exists()) ||
a.ends_with("src/doc/book")
});
@@ -253,8 +264,15 @@ pub fn check(path: &Path, bad: &mut bool) {
let extension = file.extension().unwrap().to_string_lossy();
let is_error_code = extension == "md" && is_in(file, "src", "error_codes");
+ let is_goml_code = extension == "goml";
- let max_columns = if is_error_code { ERROR_CODE_COLS } else { COLS };
+ let max_columns = if is_error_code {
+ ERROR_CODE_COLS
+ } else if is_goml_code {
+ GOML_COLS
+ } else {
+ COLS
+ };
let can_contain = contents.contains("// ignore-tidy-")
|| contents.contains("# ignore-tidy-")
@@ -264,6 +282,10 @@ pub fn check(path: &Path, bad: &mut bool) {
if filename.contains("ignore-tidy") {
return;
}
+ // apfloat shouldn't be changed because of license problems
+ if is_in(file, "compiler", "rustc_apfloat") {
+ return;
+ }
let mut skip_cr = contains_ignore_directive(can_contain, &contents, "cr");
let mut skip_undocumented_unsafe =
contains_ignore_directive(can_contain, &contents, "undocumented-unsafe");
@@ -277,14 +299,51 @@ pub fn check(path: &Path, bad: &mut bool) {
let mut skip_leading_newlines =
contains_ignore_directive(can_contain, &contents, "leading-newlines");
let mut skip_copyright = contains_ignore_directive(can_contain, &contents, "copyright");
+ let mut skip_dbg = contains_ignore_directive(can_contain, &contents, "dbg");
let mut leading_new_lines = false;
let mut trailing_new_lines = 0;
let mut lines = 0;
let mut last_safety_comment = false;
+ let is_test = file.components().any(|c| c.as_os_str() == "tests");
+ // scanning the whole file for multiple needles at once is more efficient than
+ // executing lines times needles separate searches.
+ let any_problematic_line = problematic_regex.is_match(contents);
for (i, line) in contents.split('\n').enumerate() {
+ if line.is_empty() {
+ if i == 0 {
+ leading_new_lines = true;
+ }
+ trailing_new_lines += 1;
+ continue;
+ } else {
+ trailing_new_lines = 0;
+ }
+
+ let trimmed = line.trim();
+
+ if !trimmed.starts_with("//") {
+ lines += 1;
+ }
+
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg);
};
+
+ if trimmed.contains("dbg!")
+ && !trimmed.starts_with("//")
+ && !file.ancestors().any(|a| {
+ (a.ends_with("tests") && a.join("COMPILER_TESTS.md").exists())
+ || a.ends_with("library/alloc/tests")
+ })
+ && filename != "tests.rs"
+ {
+ suppressible_tidy_err!(
+ err,
+ skip_dbg,
+ "`dbg!` macro is intended as a debugging tool. It should not be in version control."
+ )
+ }
+
if !under_rustfmt
&& line.chars().count() > max_columns
&& !long_line_is_ok(&extension, is_error_code, max_columns, line)
@@ -308,28 +367,29 @@ pub fn check(path: &Path, bad: &mut bool) {
suppressible_tidy_err!(err, skip_cr, "CR character");
}
if filename != "style.rs" {
- if line.contains("TODO") {
+ if trimmed.contains("TODO") {
err("TODO is deprecated; use FIXME")
}
- if line.contains("//") && line.contains(" XXX") {
+ if trimmed.contains("//") && trimmed.contains(" XXX") {
err("XXX is deprecated; use FIXME")
}
- for s in problematic_consts_strings.iter() {
- if line.contains(s) {
- err("Don't use magic numbers that spell things (consider 0x12345678)");
+ if any_problematic_line {
+ for s in problematic_consts_strings.iter() {
+ if trimmed.contains(s) {
+ err("Don't use magic numbers that spell things (consider 0x12345678)");
+ }
}
}
}
- let is_test = || file.components().any(|c| c.as_os_str() == "tests");
// for now we just check libcore
- if line.contains("unsafe {") && !line.trim().starts_with("//") && !last_safety_comment {
- if file.components().any(|c| c.as_os_str() == "core") && !is_test() {
+ if trimmed.contains("unsafe {") && !trimmed.starts_with("//") && !last_safety_comment {
+ if file.components().any(|c| c.as_os_str() == "core") && !is_test {
suppressible_tidy_err!(err, skip_undocumented_unsafe, "undocumented unsafe");
}
}
- if line.contains("// SAFETY:") {
+ if trimmed.contains("// SAFETY:") {
last_safety_comment = true;
- } else if line.trim().starts_with("//") || line.trim().is_empty() {
+ } else if trimmed.starts_with("//") || trimmed.is_empty() {
// keep previous value
} else {
last_safety_comment = false;
@@ -337,7 +397,8 @@ pub fn check(path: &Path, bad: &mut bool) {
if (line.starts_with("// Copyright")
|| line.starts_with("# Copyright")
|| line.starts_with("Copyright"))
- && (line.contains("Rust Developers") || line.contains("Rust Project Developers"))
+ && (trimmed.contains("Rust Developers")
+ || trimmed.contains("Rust Project Developers"))
{
suppressible_tidy_err!(
err,
@@ -351,17 +412,18 @@ pub fn check(path: &Path, bad: &mut bool) {
if filename.ends_with(".cpp") && line.contains("llvm_unreachable") {
err(LLVM_UNREACHABLE_INFO);
}
- if line.is_empty() {
- if i == 0 {
- leading_new_lines = true;
- }
- trailing_new_lines += 1;
- } else {
- trailing_new_lines = 0;
- }
- if !line.trim().starts_with("//") {
- lines += 1;
+ // For now only enforce in compiler
+ let is_compiler = || file.components().any(|c| c.as_os_str() == "compiler");
+ if is_compiler()
+ && line.contains("//")
+ && line
+ .chars()
+ .collect::<Vec<_>>()
+ .windows(4)
+ .any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
+ {
+ err(DOUBLE_SPACE_AFTER_DOT)
}
}
if leading_new_lines {
diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs
index 8ba257056..d7a157672 100644
--- a/src/tools/tidy/src/target_specific_tests.rs
+++ b/src/tools/tidy/src/target_specific_tests.rs
@@ -35,9 +35,8 @@ struct RevisionInfo<'a> {
}
pub fn check(path: &Path, bad: &mut bool) {
- let tests = path.join("test");
crate::walk::walk(
- &tests,
+ path,
&mut |path| path.extension().map(|p| p == "rs") == Some(false),
&mut |entry, content| {
let file = entry.path().display();
diff --git a/src/tools/tidy/src/tests_placement.rs b/src/tools/tidy/src/tests_placement.rs
new file mode 100644
index 000000000..9d0057df8
--- /dev/null
+++ b/src/tools/tidy/src/tests_placement.rs
@@ -0,0 +1,15 @@
+use std::path::Path;
+
+const FORBIDDEN_PATH: &str = "src/test";
+const ALLOWED_PATH: &str = "tests";
+
+pub fn check(root_path: impl AsRef<Path>, bad: &mut bool) {
+ if root_path.as_ref().join(FORBIDDEN_PATH).exists() {
+ tidy_error!(
+ bad,
+ "Tests have been moved, please move them from {} to {}",
+ root_path.as_ref().join(FORBIDDEN_PATH).display(),
+ root_path.as_ref().join(ALLOWED_PATH).display()
+ )
+ }
+}
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index ee326e190..806e84025 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -10,16 +10,16 @@ use std::path::Path;
const ENTRY_LIMIT: usize = 1000;
// FIXME: The following limits should be reduced eventually.
const ROOT_ENTRY_LIMIT: usize = 939;
-const ISSUES_ENTRY_LIMIT: usize = 2070;
+const ISSUES_ENTRY_LIMIT: usize = 1998;
fn check_entries(path: &Path, bad: &mut bool) {
- for dir in Walk::new(&path.join("test/ui")) {
+ for dir in Walk::new(&path.join("ui")) {
if let Ok(entry) = dir {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let dir_path = entry.path();
// Use special values for these dirs.
- let is_root = path.join("test/ui") == dir_path;
- let is_issues_dir = path.join("test/ui/issues") == dir_path;
+ let is_root = path.join("ui") == dir_path;
+ let is_issues_dir = path.join("ui/issues") == dir_path;
let limit = if is_root {
ROOT_ENTRY_LIMIT
} else if is_issues_dir {
@@ -53,7 +53,7 @@ fn check_entries(path: &Path, bad: &mut bool) {
pub fn check(path: &Path, bad: &mut bool) {
check_entries(&path, bad);
- for path in &[&path.join("test/ui"), &path.join("test/ui-fulldeps")] {
+ for path in &[&path.join("ui"), &path.join("ui-fulldeps")] {
crate::walk::walk_no_read(path, &mut |_| false, &mut |entry| {
let file_path = entry.path();
if let Some(ext) = file_path.extension() {
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
index 2c23b6ebc..27f36c855 100644
--- a/src/tools/tidy/src/unit_tests.rs
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -22,7 +22,7 @@ pub fn check(root_path: &Path, bad: &mut bool) {
let file_name = path.file_name().unwrap_or_default();
if path.is_dir() {
filter_dirs(path)
- || path.ends_with("src/test")
+ || path.ends_with("tests")
|| path.ends_with("src/doc")
|| (file_name == "tests" || file_name == "benches") && !is_core(path)
} else {
diff --git a/src/tools/tidy/src/x_version.rs b/src/tools/tidy/src/x_version.rs
new file mode 100644
index 000000000..c470d502a
--- /dev/null
+++ b/src/tools/tidy/src/x_version.rs
@@ -0,0 +1,68 @@
+use semver::Version;
+use std::path::Path;
+use std::process::{Command, Stdio};
+
+pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
+ let cargo_list = Command::new(cargo).args(["install", "--list"]).stdout(Stdio::piped()).spawn();
+
+ let child = match cargo_list {
+ Ok(child) => child,
+ Err(e) => return tidy_error!(bad, "failed to run `cargo`: {}", e),
+ };
+
+ let cargo_list = child.wait_with_output().unwrap();
+
+ if cargo_list.status.success() {
+ let exe_list = String::from_utf8_lossy(&cargo_list.stdout);
+ let exe_list = exe_list.lines();
+
+ let mut installed: Option<Version> = None;
+
+ for line in exe_list {
+ let mut iter = line.split_whitespace();
+ if iter.next() == Some("x") {
+ if let Some(version) = iter.next() {
+ // Check this is the rust-lang/rust x tool installation since it should be
+ // installed at a path containing `src/tools/x`.
+ if let Some(path) = iter.next() {
+ if path.contains(&"src/tools/x") {
+ let version = version.strip_prefix("v").unwrap();
+ installed = Some(Version::parse(version).unwrap());
+ break;
+ }
+ };
+ }
+ } else {
+ continue;
+ }
+ }
+ // Unwrap the some if x is installed, otherwise return because it's fine if x isn't installed.
+ let installed = if let Some(i) = installed { i } else { return };
+
+ if let Some(expected) = get_x_wrapper_version(root, cargo) {
+ if installed < expected {
+ return println!(
+ "Current version of x is {installed}, but the latest version is {expected}\nConsider updating to the newer version of x by running `cargo install --path src/tools/x`"
+ );
+ }
+ } else {
+ return tidy_error!(
+ bad,
+ "Unable to parse the latest version of `x` at `src/tools/x/Cargo.toml`"
+ );
+ }
+ } else {
+ return tidy_error!(bad, "failed to check version of `x`: {}", cargo_list.status);
+ }
+}
+
+// Parse latest version out of `x` Cargo.toml
+fn get_x_wrapper_version(root: &Path, cargo: &Path) -> Option<Version> {
+ let mut cmd = cargo_metadata::MetadataCommand::new();
+ cmd.cargo_path(cargo)
+ .manifest_path(root.join("src/tools/x/Cargo.toml"))
+ .no_deps()
+ .features(cargo_metadata::CargoOpt::AllFeatures);
+ let mut metadata = t!(cmd.exec());
+ metadata.packages.pop().map(|x| x.version)
+}