summaryrefslogtreecommitdiffstats
path: root/src/tools/tidy/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:38 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:12:43 +0000
commitcf94bdc0742c13e2a0cac864c478b8626b266e1b (patch)
tree044670aa50cc5e2b4229aa0b6b3df6676730c0a6 /src/tools/tidy/src
parentAdding debian version 1.65.0+dfsg1-2. (diff)
downloadrustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.tar.xz
rustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.zip
Merging upstream version 1.66.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/tidy/src')
-rw-r--r--src/tools/tidy/src/alphabetical.rs111
-rw-r--r--src/tools/tidy/src/bins.rs5
-rw-r--r--src/tools/tidy/src/debug_artifacts.rs3
-rw-r--r--src/tools/tidy/src/deps.rs45
-rw-r--r--src/tools/tidy/src/edition.rs5
-rw-r--r--src/tools/tidy/src/error_codes_check.rs3
-rw-r--r--src/tools/tidy/src/errors.rs5
-rw-r--r--src/tools/tidy/src/features.rs276
-rw-r--r--src/tools/tidy/src/lib.rs3
-rw-r--r--src/tools/tidy/src/main.rs4
-rw-r--r--src/tools/tidy/src/pal.rs3
-rw-r--r--src/tools/tidy/src/style.rs5
-rw-r--r--src/tools/tidy/src/target_specific_tests.rs2
-rw-r--r--src/tools/tidy/src/ui_tests.rs6
-rw-r--r--src/tools/tidy/src/unit_tests.rs5
-rw-r--r--src/tools/tidy/src/walk.rs6
16 files changed, 316 insertions, 171 deletions
diff --git a/src/tools/tidy/src/alphabetical.rs b/src/tools/tidy/src/alphabetical.rs
new file mode 100644
index 000000000..f913f6cde
--- /dev/null
+++ b/src/tools/tidy/src/alphabetical.rs
@@ -0,0 +1,111 @@
+//! Checks that a list of items is in alphabetical order
+//!
+//! To use, use the following annotation in the code:
+//! ```rust
+//! // tidy-alphabetical-start
+//! fn aaa() {}
+//! fn eee() {}
+//! fn z() {}
+//! // tidy-alphabetical-end
+//! ```
+//!
+//! The following lines are ignored:
+//! - Lines that are indented with more or less spaces than the first line
+//! - Lines starting with `//`, `#[`, `)`, `]`, `}` if the comment has the same indentation as
+//! the first line
+//!
+//! If a line ends with an opening bracket, the line is ignored and the next line will have
+//! its extra indentation ignored.
+
+use std::{fmt::Display, path::Path};
+
+use crate::walk::{filter_dirs, walk};
+
+fn indentation(line: &str) -> usize {
+ line.find(|c| c != ' ').unwrap_or(0)
+}
+
+fn is_close_bracket(c: char) -> bool {
+ matches!(c, ')' | ']' | '}')
+}
+
+// Don't let tidy check this here :D
+const START_COMMENT: &str = concat!("// tidy-alphabetical", "-start");
+const END_COMMENT: &str = "// tidy-alphabetical-end";
+
+fn check_section<'a>(
+ file: impl Display,
+ lines: impl Iterator<Item = (usize, &'a str)>,
+ bad: &mut bool,
+) {
+ let content_lines = lines.take_while(|(_, line)| !line.contains(END_COMMENT));
+
+ let mut prev_line = String::new();
+ let mut first_indent = None;
+ let mut in_split_line = None;
+
+ for (line_idx, line) in content_lines {
+ if line.contains(START_COMMENT) {
+ tidy_error!(
+ bad,
+ "{file}:{} found `{START_COMMENT}` expecting `{END_COMMENT}`",
+ line_idx
+ )
+ }
+
+ let indent = first_indent.unwrap_or_else(|| {
+ let indent = indentation(line);
+ first_indent = Some(indent);
+ indent
+ });
+
+ let line = if let Some(prev_split_line) = in_split_line {
+ in_split_line = None;
+ format!("{prev_split_line}{}", line.trim_start())
+ } else {
+ line.to_string()
+ };
+
+ if indentation(&line) != indent {
+ continue;
+ }
+
+ let trimmed_line = line.trim_start_matches(' ');
+
+ if trimmed_line.starts_with("//")
+ || trimmed_line.starts_with("#[")
+ || trimmed_line.starts_with(is_close_bracket)
+ {
+ continue;
+ }
+
+ if line.trim_end().ends_with('(') {
+ in_split_line = Some(line);
+ continue;
+ }
+
+ let prev_line_trimmed_lowercase = prev_line.trim_start_matches(' ').to_lowercase();
+
+ if trimmed_line.to_lowercase() < prev_line_trimmed_lowercase {
+ tidy_error!(bad, "{file}:{}: line not in alphabetical order", line_idx + 1,);
+ }
+
+ prev_line = line;
+ }
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+ walk(path, &mut filter_dirs, &mut |entry, contents| {
+ let file = &entry.path().display();
+
+ let mut lines = contents.lines().enumerate().peekable();
+ while let Some((_, line)) = lines.next() {
+ if line.contains(START_COMMENT) {
+ check_section(file, &mut lines, bad);
+ if lines.peek().is_none() {
+ tidy_error!(bad, "{file}: reached end of file expecting `{END_COMMENT}`")
+ }
+ }
+ }
+ });
+}
diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs
index 30903f56d..b898f20a5 100644
--- a/src/tools/tidy/src/bins.rs
+++ b/src/tools/tidy/src/bins.rs
@@ -21,6 +21,7 @@ mod os_impl {
#[cfg(unix)]
mod os_impl {
+ use crate::walk::{filter_dirs, walk_no_read};
use std::fs;
use std::os::unix::prelude::*;
use std::path::Path;
@@ -100,10 +101,10 @@ mod os_impl {
const ALLOWED: &[&str] = &["configure", "x"];
- crate::walk_no_read(
+ walk_no_read(
path,
&mut |path| {
- crate::filter_dirs(path)
+ filter_dirs(path)
|| path.ends_with("src/etc")
// This is a list of directories that we almost certainly
// don't need to walk. A future PR will likely want to
diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs
index ab87230f8..9880a32ad 100644
--- a/src/tools/tidy/src/debug_artifacts.rs
+++ b/src/tools/tidy/src/debug_artifacts.rs
@@ -1,5 +1,6 @@
//! Tidy check to prevent creation of unnecessary debug artifacts while running tests.
+use crate::walk::{filter_dirs, walk};
use std::path::{Path, PathBuf};
const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in test";
@@ -7,7 +8,7 @@ const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in t
pub fn check(path: &Path, bad: &mut bool) {
let test_dir: PathBuf = path.join("test");
- super::walk(&test_dir, &mut super::filter_dirs, &mut |entry, contents| {
+ walk(&test_dir, &mut filter_dirs, &mut |entry, contents| {
let filename = entry.path();
let is_rust = filename.extension().map_or(false, |ext| ext == "rs");
if !is_rust {
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index cbd8cfa01..8a0239ece 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -18,9 +18,11 @@ const LICENSES: &[&str] = &[
"ISC",
"Unlicense/MIT",
"Unlicense OR MIT",
- "0BSD OR MIT OR Apache-2.0", // adler license
- "Zlib OR Apache-2.0 OR MIT", // tinyvec
- "MIT OR Zlib OR Apache-2.0", // miniz_oxide
+ "0BSD OR MIT OR Apache-2.0", // adler license
+ "Zlib OR Apache-2.0 OR MIT", // tinyvec
+ "MIT OR Apache-2.0 OR Zlib", // tinyvec_macros
+ "MIT OR Zlib OR Apache-2.0", // miniz_oxide
+ "(MIT OR Apache-2.0) AND Unicode-DFS-2016", // unicode_ident
];
/// These are exceptions to Rust's permissive licensing policy, and
@@ -72,14 +74,11 @@ const EXCEPTIONS_BOOTSTRAP: &[(&str, &str)] = &[
/// these and all their dependencies *must not* be in the exception list.
const RUNTIME_CRATES: &[&str] = &["std", "core", "alloc", "test", "panic_abort", "panic_unwind"];
-/// Crates whose dependencies must be explicitly permitted.
-const RESTRICTED_DEPENDENCY_CRATES: &[&str] = &["rustc_driver", "rustc_codegen_llvm"];
-
/// Crates rustc is allowed to depend on. Avoid adding to the list if possible.
///
/// This list is here to provide a speed-bump to adding a new dependency to
/// rustc. Please check with the compiler team before adding an entry.
-const PERMITTED_DEPENDENCIES: &[&str] = &[
+const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"addr2line",
"adler",
"ahash",
@@ -218,6 +217,7 @@ const PERMITTED_DEPENDENCIES: &[&str] = &[
"time",
"tinystr",
"tinyvec",
+ "tinyvec_macros",
"thin-vec",
"tracing",
"tracing-attributes",
@@ -236,6 +236,7 @@ const PERMITTED_DEPENDENCIES: &[&str] = &[
"unic-langid-macros",
"unic-langid-macros-impl",
"unic-ucd-version",
+ "unicode-ident",
"unicode-normalization",
"unicode-script",
"unicode-security",
@@ -258,7 +259,9 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
"ahash",
"anyhow",
"ar",
+ "arrayvec",
"autocfg",
+ "bumpalo",
"bitflags",
"byteorder",
"cfg-if",
@@ -305,7 +308,7 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
];
const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
- // These two crates take quite a long time to build, so don't allow two versions of them
+ // This crate takes quite a long time to build, so don't allow two versions of them
// to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
// under control.
"cargo",
@@ -322,12 +325,12 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
.features(cargo_metadata::CargoOpt::AllFeatures);
let metadata = t!(cmd.exec());
let runtime_ids = compute_runtime_crates(&metadata);
- check_exceptions(&metadata, EXCEPTIONS, runtime_ids, bad);
- check_dependencies(
+ check_license_exceptions(&metadata, EXCEPTIONS, runtime_ids, bad);
+ check_permitted_dependencies(
&metadata,
- "main workspace",
- PERMITTED_DEPENDENCIES,
- RESTRICTED_DEPENDENCY_CRATES,
+ "rustc",
+ PERMITTED_RUSTC_DEPENDENCIES,
+ &["rustc_driver", "rustc_codegen_llvm"],
bad,
);
check_crate_duplicate(&metadata, FORBIDDEN_TO_HAVE_DUPLICATES, bad);
@@ -340,8 +343,8 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
.features(cargo_metadata::CargoOpt::AllFeatures);
let metadata = t!(cmd.exec());
let runtime_ids = HashSet::new();
- check_exceptions(&metadata, EXCEPTIONS_CRANELIFT, runtime_ids, bad);
- check_dependencies(
+ check_license_exceptions(&metadata, EXCEPTIONS_CRANELIFT, runtime_ids, bad);
+ check_permitted_dependencies(
&metadata,
"cranelift",
PERMITTED_CRANELIFT_DEPENDENCIES,
@@ -356,13 +359,13 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
.features(cargo_metadata::CargoOpt::AllFeatures);
let metadata = t!(cmd.exec());
let runtime_ids = HashSet::new();
- check_exceptions(&metadata, EXCEPTIONS_BOOTSTRAP, runtime_ids, bad);
+ check_license_exceptions(&metadata, EXCEPTIONS_BOOTSTRAP, runtime_ids, bad);
}
/// Check that all licenses are in the valid list in `LICENSES`.
///
-/// Packages listed in `EXCEPTIONS` are allowed for tools.
-fn check_exceptions(
+/// Packages listed in `exceptions` are allowed for tools.
+fn check_license_exceptions(
metadata: &Metadata,
exceptions: &[(&str, &str)],
runtime_ids: HashSet<&PackageId>,
@@ -432,11 +435,11 @@ fn check_exceptions(
}
}
-/// Checks the dependency of `RESTRICTED_DEPENDENCY_CRATES` at the given path. Changes `bad` to
+/// Checks the dependency of `restricted_dependency_crates` at the given path. Changes `bad` to
/// `true` if a check failed.
///
-/// Specifically, this checks that the dependencies are on the `PERMITTED_DEPENDENCIES`.
-fn check_dependencies(
+/// Specifically, this checks that the dependencies are on the `permitted_dependencies`.
+fn check_permitted_dependencies(
metadata: &Metadata,
descr: &str,
permitted_dependencies: &[&'static str],
diff --git a/src/tools/tidy/src/edition.rs b/src/tools/tidy/src/edition.rs
index b0abee459..8a7c4460d 100644
--- a/src/tools/tidy/src/edition.rs
+++ b/src/tools/tidy/src/edition.rs
@@ -1,5 +1,6 @@
//! Tidy check to ensure that crate `edition` is '2018' or '2021'.
+use crate::walk::{filter_dirs, walk};
use std::path::Path;
fn is_edition_2021(mut line: &str) -> bool {
@@ -8,9 +9,9 @@ fn is_edition_2021(mut line: &str) -> bool {
}
pub fn check(path: &Path, bad: &mut bool) {
- super::walk(
+ walk(
path,
- &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |path| filter_dirs(path) || path.ends_with("src/test"),
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap();
diff --git a/src/tools/tidy/src/error_codes_check.rs b/src/tools/tidy/src/error_codes_check.rs
index 0a226443e..610e322e1 100644
--- a/src/tools/tidy/src/error_codes_check.rs
+++ b/src/tools/tidy/src/error_codes_check.rs
@@ -1,6 +1,7 @@
//! Checks that all error codes have at least one test to prevent having error
//! codes that are silently not thrown by the compiler anymore.
+use crate::walk::{filter_dirs, walk};
use std::collections::{HashMap, HashSet};
use std::ffi::OsStr;
use std::fs::read_to_string;
@@ -217,7 +218,7 @@ pub fn check(paths: &[&Path], bad: &mut bool) {
println!("Checking which error codes lack tests...");
for path in paths {
- super::walk(path, &mut super::filter_dirs, &mut |entry, contents| {
+ walk(path, &mut filter_dirs, &mut |entry, contents| {
let file_name = entry.file_name();
let entry_path = entry.path();
diff --git a/src/tools/tidy/src/errors.rs b/src/tools/tidy/src/errors.rs
index dbcc9341a..fe5fd72b9 100644
--- a/src/tools/tidy/src/errors.rs
+++ b/src/tools/tidy/src/errors.rs
@@ -3,14 +3,15 @@
//! This ensures that error codes are used at most once and also prints out some
//! statistics about the error codes.
+use crate::walk::{filter_dirs, walk};
use std::collections::HashMap;
use std::path::Path;
pub fn check(path: &Path, bad: &mut bool) {
let mut map: HashMap<_, Vec<_>> = HashMap::new();
- super::walk(
+ walk(
path,
- &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |path| filter_dirs(path) || path.ends_with("src/test"),
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
index b306a527a..f10ecf5f2 100644
--- a/src/tools/tidy/src/features.rs
+++ b/src/tools/tidy/src/features.rs
@@ -9,7 +9,8 @@
//! * All unstable lang features have tests to ensure they are actually unstable.
//! * Language features in a group are sorted by feature name.
-use std::collections::HashMap;
+use crate::walk::{filter_dirs, walk, walk_many};
+use std::collections::hash_map::{Entry, HashMap};
use std::fmt;
use std::fs;
use std::num::NonZeroU32;
@@ -92,14 +93,14 @@ pub fn check(
let lib_features = get_and_check_lib_features(lib_path, bad, &features);
assert!(!lib_features.is_empty());
- super::walk_many(
+ walk_many(
&[
&src_path.join("test/ui"),
&src_path.join("test/ui-fulldeps"),
&src_path.join("test/rustdoc-ui"),
&src_path.join("test/rustdoc"),
],
- &mut |path| super::filter_dirs(path),
+ &mut filter_dirs,
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
@@ -279,13 +280,14 @@ fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {
}
pub fn collect_lang_features(base_compiler_path: &Path, bad: &mut bool) -> Features {
- let mut all = collect_lang_features_in(base_compiler_path, "active.rs", bad);
- all.extend(collect_lang_features_in(base_compiler_path, "accepted.rs", bad));
- all.extend(collect_lang_features_in(base_compiler_path, "removed.rs", bad));
- all
+ let mut features = Features::new();
+ collect_lang_features_in(&mut features, base_compiler_path, "active.rs", bad);
+ collect_lang_features_in(&mut features, base_compiler_path, "accepted.rs", bad);
+ collect_lang_features_in(&mut features, base_compiler_path, "removed.rs", bad);
+ features
}
-fn collect_lang_features_in(base: &Path, file: &str, bad: &mut bool) -> Features {
+fn collect_lang_features_in(features: &mut Features, base: &Path, file: &str, bad: &mut bool) {
let path = base.join("rustc_feature").join("src").join(file);
let contents = t!(fs::read_to_string(&path));
@@ -297,135 +299,145 @@ fn collect_lang_features_in(base: &Path, file: &str, bad: &mut bool) -> Features
let mut in_feature_group = false;
let mut prev_names = vec![];
- contents
- .lines()
- .zip(1..)
- .filter_map(|(line, line_number)| {
- let line = line.trim();
-
- // Within -start and -end, the tracking issue can be omitted.
- match line {
- "// no-tracking-issue-start" => {
- next_feature_omits_tracking_issue = true;
- return None;
- }
- "// no-tracking-issue-end" => {
- next_feature_omits_tracking_issue = false;
- return None;
- }
- _ => {}
+ let lines = contents.lines().zip(1..);
+ for (line, line_number) in lines {
+ let line = line.trim();
+
+ // Within -start and -end, the tracking issue can be omitted.
+ match line {
+ "// no-tracking-issue-start" => {
+ next_feature_omits_tracking_issue = true;
+ continue;
}
+ "// no-tracking-issue-end" => {
+ next_feature_omits_tracking_issue = false;
+ continue;
+ }
+ _ => {}
+ }
- if line.starts_with(FEATURE_GROUP_START_PREFIX) {
- if in_feature_group {
- tidy_error!(
- bad,
- "{}:{}: \
+ if line.starts_with(FEATURE_GROUP_START_PREFIX) {
+ if in_feature_group {
+ tidy_error!(
+ bad,
+ "{}:{}: \
new feature group is started without ending the previous one",
- path.display(),
- line_number,
- );
- }
-
- in_feature_group = true;
- prev_names = vec![];
- return None;
- } else if line.starts_with(FEATURE_GROUP_END_PREFIX) {
- in_feature_group = false;
- prev_names = vec![];
- return None;
+ path.display(),
+ line_number,
+ );
}
- let mut parts = line.split(',');
- let level = match parts.next().map(|l| l.trim().trim_start_matches('(')) {
- Some("active") => Status::Unstable,
- Some("incomplete") => Status::Unstable,
- Some("removed") => Status::Removed,
- Some("accepted") => Status::Stable,
- _ => return None,
- };
- let name = parts.next().unwrap().trim();
-
- let since_str = parts.next().unwrap().trim().trim_matches('"');
- let since = match since_str.parse() {
- Ok(since) => Some(since),
- Err(err) => {
- tidy_error!(
- bad,
- "{}:{}: failed to parse since: {} ({:?})",
- path.display(),
- line_number,
- since_str,
- err,
- );
- None
- }
- };
- if in_feature_group {
- if prev_names.last() > Some(&name) {
- // This assumes the user adds the feature name at the end of the list, as we're
- // not looking ahead.
- let correct_index = match prev_names.binary_search(&name) {
- Ok(_) => {
- // This only occurs when the feature name has already been declared.
- tidy_error!(
- bad,
- "{}:{}: duplicate feature {}",
- path.display(),
- line_number,
- name,
- );
- // skip any additional checks for this line
- return None;
- }
- Err(index) => index,
- };
+ in_feature_group = true;
+ prev_names = vec![];
+ continue;
+ } else if line.starts_with(FEATURE_GROUP_END_PREFIX) {
+ in_feature_group = false;
+ prev_names = vec![];
+ continue;
+ }
- let correct_placement = if correct_index == 0 {
- "at the beginning of the feature group".to_owned()
- } else if correct_index == prev_names.len() {
- // I don't believe this is reachable given the above assumption, but it
- // doesn't hurt to be safe.
- "at the end of the feature group".to_owned()
- } else {
- format!(
- "between {} and {}",
- prev_names[correct_index - 1],
- prev_names[correct_index],
- )
- };
+ let mut parts = line.split(',');
+ let level = match parts.next().map(|l| l.trim().trim_start_matches('(')) {
+ Some("active") => Status::Unstable,
+ Some("incomplete") => Status::Unstable,
+ Some("removed") => Status::Removed,
+ Some("accepted") => Status::Stable,
+ _ => continue,
+ };
+ let name = parts.next().unwrap().trim();
+
+ let since_str = parts.next().unwrap().trim().trim_matches('"');
+ let since = match since_str.parse() {
+ Ok(since) => Some(since),
+ Err(err) => {
+ tidy_error!(
+ bad,
+ "{}:{}: failed to parse since: {} ({:?})",
+ path.display(),
+ line_number,
+ since_str,
+ err,
+ );
+ None
+ }
+ };
+ if in_feature_group {
+ if prev_names.last() > Some(&name) {
+ // This assumes the user adds the feature name at the end of the list, as we're
+ // not looking ahead.
+ let correct_index = match prev_names.binary_search(&name) {
+ Ok(_) => {
+ // This only occurs when the feature name has already been declared.
+ tidy_error!(
+ bad,
+ "{}:{}: duplicate feature {}",
+ path.display(),
+ line_number,
+ name,
+ );
+ // skip any additional checks for this line
+ continue;
+ }
+ Err(index) => index,
+ };
- tidy_error!(
- bad,
- "{}:{}: feature {} is not sorted by feature name (should be {})",
- path.display(),
- line_number,
- name,
- correct_placement,
- );
- }
- prev_names.push(name);
+ let correct_placement = if correct_index == 0 {
+ "at the beginning of the feature group".to_owned()
+ } else if correct_index == prev_names.len() {
+ // I don't believe this is reachable given the above assumption, but it
+ // doesn't hurt to be safe.
+ "at the end of the feature group".to_owned()
+ } else {
+ format!(
+ "between {} and {}",
+ prev_names[correct_index - 1],
+ prev_names[correct_index],
+ )
+ };
+
+ tidy_error!(
+ bad,
+ "{}:{}: feature {} is not sorted by feature name (should be {})",
+ path.display(),
+ line_number,
+ name,
+ correct_placement,
+ );
}
+ prev_names.push(name);
+ }
- let issue_str = parts.next().unwrap().trim();
- let tracking_issue = if issue_str.starts_with("None") {
- if level == Status::Unstable && !next_feature_omits_tracking_issue {
- tidy_error!(
- bad,
- "{}:{}: no tracking issue for feature {}",
- path.display(),
- line_number,
- name,
- );
- }
- None
- } else {
- let s = issue_str.split('(').nth(1).unwrap().split(')').next().unwrap();
- Some(s.parse().unwrap())
- };
- Some((name.to_owned(), Feature { level, since, has_gate_test: false, tracking_issue }))
- })
- .collect()
+ let issue_str = parts.next().unwrap().trim();
+ let tracking_issue = if issue_str.starts_with("None") {
+ if level == Status::Unstable && !next_feature_omits_tracking_issue {
+ tidy_error!(
+ bad,
+ "{}:{}: no tracking issue for feature {}",
+ path.display(),
+ line_number,
+ name,
+ );
+ }
+ None
+ } else {
+ let s = issue_str.split('(').nth(1).unwrap().split(')').next().unwrap();
+ Some(s.parse().unwrap())
+ };
+ match features.entry(name.to_owned()) {
+ Entry::Occupied(e) => {
+ tidy_error!(
+ bad,
+ "{}:{} feature {name} already specified with status '{}'",
+ path.display(),
+ line_number,
+ e.get().level,
+ );
+ }
+ Entry::Vacant(e) => {
+ e.insert(Feature { level, since, has_gate_test: false, tracking_issue });
+ }
+ }
+ }
}
fn get_and_check_lib_features(
@@ -466,9 +478,9 @@ fn map_lib_features(
base_src_path: &Path,
mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize),
) {
- super::walk(
+ walk(
base_src_path,
- &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |path| filter_dirs(path) || path.ends_with("src/test"),
&mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
@@ -537,7 +549,9 @@ fn map_lib_features(
becoming_feature = None;
if line.contains("rustc_const_unstable(") {
// `const fn` features are handled specially.
- let feature_name = match find_attr_val(line, "feature") {
+ let feature_name = match find_attr_val(line, "feature").or_else(|| {
+ iter_lines.peek().and_then(|next| find_attr_val(next.1, "feature"))
+ }) {
Some(name) => name,
None => err!("malformed stability attribute: missing `feature` key"),
};
diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs
index 12d3bdcd7..fc0bce585 100644
--- a/src/tools/tidy/src/lib.rs
+++ b/src/tools/tidy/src/lib.rs
@@ -3,8 +3,6 @@
//! This library contains the tidy lints and exposes it
//! to be used by tools.
-use walk::{filter_dirs, walk, walk_many, walk_no_read};
-
/// A helper macro to `unwrap` a result except also print out details like:
///
/// * The expression that failed
@@ -40,6 +38,7 @@ macro_rules! tidy_error {
});
}
+pub mod alphabetical;
pub mod bins;
pub mod debug_artifacts;
pub mod deps;
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index c1ce94f47..ca785042a 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -90,6 +90,10 @@ fn main() {
check!(edition, &compiler_path);
check!(edition, &library_path);
+ check!(alphabetical, &src_path);
+ check!(alphabetical, &compiler_path);
+ check!(alphabetical, &library_path);
+
let collected = {
while handles.len() >= concurrency.get() {
handles.pop_front().unwrap().join().unwrap();
diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs
index 4d86fe8be..f4592fdcf 100644
--- a/src/tools/tidy/src/pal.rs
+++ b/src/tools/tidy/src/pal.rs
@@ -30,6 +30,7 @@
//! platform-specific cfgs are allowed. Not sure yet how to deal with
//! this in the long term.
+use crate::walk::{filter_dirs, walk};
use std::iter::Iterator;
use std::path::Path;
@@ -67,7 +68,7 @@ pub fn check(path: &Path, bad: &mut bool) {
// Sanity check that the complex parsing here works.
let mut saw_target_arch = false;
let mut saw_cfg_bang = false;
- super::walk(path, &mut super::filter_dirs, &mut |entry, contents| {
+ walk(path, &mut filter_dirs, &mut |entry, contents| {
let file = entry.path();
let filestr = file.to_string_lossy().replace("\\", "/");
if !filestr.ends_with(".rs") {
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index dee58ff2f..541380ceb 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -16,6 +16,7 @@
//! A number of these checks can be opted-out of with various directives of the form:
//! `// ignore-tidy-CHECK-NAME`.
+use crate::walk::{filter_dirs, walk};
use regex::Regex;
use std::path::Path;
@@ -218,13 +219,13 @@ fn is_unexplained_ignore(extension: &str, line: &str) -> bool {
pub fn check(path: &Path, bad: &mut bool) {
fn skip(path: &Path) -> bool {
- super::filter_dirs(path) || skip_markdown_path(path)
+ filter_dirs(path) || skip_markdown_path(path)
}
let problematic_consts_strings: Vec<String> = (PROBLEMATIC_CONSTS.iter().map(u32::to_string))
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:x}", v)))
.chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:X}", v)))
.collect();
- super::walk(path, &mut skip, &mut |entry, contents| {
+ walk(path, &mut skip, &mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css", ".ftl"];
diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs
index 723684bfa..8ba257056 100644
--- a/src/tools/tidy/src/target_specific_tests.rs
+++ b/src/tools/tidy/src/target_specific_tests.rs
@@ -36,7 +36,7 @@ struct RevisionInfo<'a> {
pub fn check(path: &Path, bad: &mut bool) {
let tests = path.join("test");
- super::walk(
+ crate::walk::walk(
&tests,
&mut |path| path.extension().map(|p| p == "rs") == Some(false),
&mut |entry, content| {
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index 8ec5c3324..c600f99c2 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -7,8 +7,8 @@ use std::path::Path;
const ENTRY_LIMIT: usize = 1000;
// FIXME: The following limits should be reduced eventually.
-const ROOT_ENTRY_LIMIT: usize = 968;
-const ISSUES_ENTRY_LIMIT: usize = 2147;
+const ROOT_ENTRY_LIMIT: usize = 948;
+const ISSUES_ENTRY_LIMIT: usize = 2117;
fn check_entries(path: &Path, bad: &mut bool) {
let dirs = walkdir::WalkDir::new(&path.join("test/ui"))
@@ -47,7 +47,7 @@ fn check_entries(path: &Path, bad: &mut bool) {
pub fn check(path: &Path, bad: &mut bool) {
check_entries(&path, bad);
for path in &[&path.join("test/ui"), &path.join("test/ui-fulldeps")] {
- super::walk_no_read(path, &mut |_| false, &mut |entry| {
+ crate::walk::walk_no_read(path, &mut |_| false, &mut |entry| {
let file_path = entry.path();
if let Some(ext) = file_path.extension() {
if ext == "stderr" || ext == "stdout" {
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
index f675b7865..2c23b6ebc 100644
--- a/src/tools/tidy/src/unit_tests.rs
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -7,6 +7,7 @@
//! named `tests.rs` or `benches.rs`, or directories named `tests` or `benches` unconfigured
//! during normal build.
+use crate::walk::{filter_dirs, walk};
use std::path::Path;
pub fn check(root_path: &Path, bad: &mut bool) {
@@ -20,7 +21,7 @@ pub fn check(root_path: &Path, bad: &mut bool) {
let mut skip = |path: &Path| {
let file_name = path.file_name().unwrap_or_default();
if path.is_dir() {
- super::filter_dirs(path)
+ filter_dirs(path)
|| path.ends_with("src/test")
|| path.ends_with("src/doc")
|| (file_name == "tests" || file_name == "benches") && !is_core(path)
@@ -34,7 +35,7 @@ pub fn check(root_path: &Path, bad: &mut bool) {
}
};
- super::walk(root_path, &mut skip, &mut |entry, contents| {
+ walk(root_path, &mut skip, &mut |entry, contents| {
let path = entry.path();
let is_core = path.starts_with(core);
for (i, line) in contents.lines().enumerate() {
diff --git a/src/tools/tidy/src/walk.rs b/src/tools/tidy/src/walk.rs
index b07e80767..4cfb70fa3 100644
--- a/src/tools/tidy/src/walk.rs
+++ b/src/tools/tidy/src/walk.rs
@@ -21,6 +21,12 @@ pub fn filter_dirs(path: &Path) -> bool {
"src/tools/rust-installer",
"src/tools/rustfmt",
"src/doc/book",
+ "src/doc/edition-guide",
+ "src/doc/embedded-book",
+ "src/doc/nomicon",
+ "src/doc/rust-by-example",
+ "src/doc/rustc-dev-guide",
+ "src/doc/reference",
// Filter RLS output directories
"target/rls",
"src/bootstrap/target",