summaryrefslogtreecommitdiffstats
path: root/src/bootstrap
diff options
context:
space:
mode:
Diffstat (limited to 'src/bootstrap')
-rw-r--r--src/bootstrap/CHANGELOG.md1
-rw-r--r--src/bootstrap/Cargo.lock125
-rw-r--r--src/bootstrap/Cargo.toml11
-rw-r--r--src/bootstrap/bin/main.rs6
-rw-r--r--src/bootstrap/bin/rustc.rs11
-rw-r--r--src/bootstrap/bin/rustdoc.rs9
-rw-r--r--src/bootstrap/bolt.rs2
-rw-r--r--src/bootstrap/bootstrap.py350
-rw-r--r--src/bootstrap/bootstrap_test.py16
-rw-r--r--src/bootstrap/builder.rs88
-rw-r--r--src/bootstrap/builder/tests.rs36
-rw-r--r--src/bootstrap/channel.rs2
-rw-r--r--src/bootstrap/check.rs100
-rw-r--r--src/bootstrap/clean.rs2
-rw-r--r--src/bootstrap/compile.rs121
-rw-r--r--src/bootstrap/config.rs262
-rw-r--r--src/bootstrap/config/tests.rs83
-rwxr-xr-xsrc/bootstrap/configure.py28
-rw-r--r--src/bootstrap/dist.rs19
-rw-r--r--src/bootstrap/doc.rs7
-rw-r--r--src/bootstrap/download-ci-llvm-stamp2
-rw-r--r--src/bootstrap/download.rs55
-rw-r--r--src/bootstrap/flags.rs1033
-rw-r--r--src/bootstrap/format.rs6
-rw-r--r--src/bootstrap/install.rs4
-rw-r--r--src/bootstrap/lib.rs133
-rw-r--r--src/bootstrap/llvm.rs15
-rw-r--r--src/bootstrap/metadata.rs75
-rw-r--r--src/bootstrap/metrics.rs142
-rw-r--r--src/bootstrap/render_tests.rs12
-rw-r--r--src/bootstrap/run.rs61
-rw-r--r--src/bootstrap/sanity.rs2
-rw-r--r--src/bootstrap/test.rs787
-rw-r--r--src/bootstrap/tool.rs245
-rw-r--r--src/bootstrap/util.rs2
35 files changed, 2062 insertions, 1791 deletions
diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md
index 74dd22df9..d6924cf2c 100644
--- a/src/bootstrap/CHANGELOG.md
+++ b/src/bootstrap/CHANGELOG.md
@@ -27,6 +27,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
from the default rust toolchain. [#78513](https://github.com/rust-lang/rust/pull/78513)
- Add options for enabling overflow checks, one for std (`overflow-checks-std`) and one for everything else (`overflow-checks`). Both default to false.
- Add llvm option `enable-warnings` to have control on llvm compilation warnings. Default to false.
+- Add `rpath` option in `target` section to support set rpath option for each target independently. [#111242](https://github.com/rust-lang/rust/pull/111242)
## [Version 2] - 2020-09-25
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index a158d1f71..8f8778efe 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -12,6 +12,12 @@ dependencies = [
]
[[package]]
+name = "anstyle"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d"
+
+[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -38,10 +44,11 @@ version = "0.0.0"
dependencies = [
"build_helper",
"cc",
+ "clap",
+ "clap_complete",
"cmake",
"fd-lock",
"filetime",
- "getopts",
"hex",
"ignore",
"is-terminal",
@@ -51,6 +58,7 @@ dependencies = [
"once_cell",
"opener",
"pretty_assertions",
+ "semver",
"serde",
"serde_derive",
"serde_json",
@@ -92,6 +100,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
+name = "clap"
+version = "4.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "956ac1f6381d8d82ab4684768f89c0ea3afe66925ceadb4eeb3fc452ffc55d62"
+dependencies = [
+ "clap_builder",
+ "clap_derive",
+ "once_cell",
+]
+
+[[package]]
+name = "clap_builder"
+version = "4.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84080e799e54cff944f4b4a4b0e71630b0e0443b25b985175c7dddc1a859b749"
+dependencies = [
+ "anstyle",
+ "bitflags",
+ "clap_lex",
+]
+
+[[package]]
+name = "clap_complete"
+version = "4.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36774babb166352bb4f7b9cb16f781ffa3439d2a8f12cd31bea85a38c888fea3"
+dependencies = [
+ "clap",
+]
+
+[[package]]
+name = "clap_derive"
+version = "4.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.8",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1"
+
+[[package]]
name = "cmake"
version = "0.1.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -175,7 +232,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c"
dependencies = [
"quote",
- "syn",
+ "syn 1.0.102",
]
[[package]]
@@ -261,15 +318,6 @@ dependencies = [
]
[[package]]
-name = "getopts"
-version = "0.2.21"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
-dependencies = [
- "unicode-width",
-]
-
-[[package]]
name = "globset"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -283,6 +331,12 @@ dependencies = [
]
[[package]]
+name = "heck"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+
+[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -374,9 +428,9 @@ checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c"
[[package]]
name = "linux-raw-sys"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f"
+checksum = "3f508063cc7bb32987c71511216bd5a32be15bccb6a80b52df8b9d7f01fc3aa2"
[[package]]
name = "log"
@@ -434,9 +488,9 @@ dependencies = [
[[package]]
name = "object"
-version = "0.29.0"
+version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
dependencies = [
"memchr",
]
@@ -486,18 +540,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.46"
+version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
+checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.18"
+version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
dependencies = [
"proc-macro2",
]
@@ -593,20 +647,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
+name = "semver"
+version = "1.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
+
+[[package]]
name = "serde"
-version = "1.0.137"
+version = "1.0.160"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
+checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c"
[[package]]
name = "serde_derive"
-version = "1.0.137"
+version = "1.0.160"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
+checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.8",
]
[[package]]
@@ -643,6 +703,17 @@ dependencies = [
]
[[package]]
+name = "syn"
+version = "2.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcc02725fd69ab9f26eab07fad303e2497fad6fb9eba4f96c4d1687bdf704ad9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
name = "sysinfo"
version = "0.26.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -708,12 +779,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee"
[[package]]
-name = "unicode-width"
-version = "0.1.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
-
-[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index eeda6d7c1..367c61909 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -34,11 +34,10 @@ is-terminal = "0.4"
build_helper = { path = "../tools/build_helper" }
cmake = "0.1.38"
filetime = "0.2"
-getopts = "0.2.19"
cc = "1.0.69"
libc = "0.2"
hex = "0.4"
-object = { version = "0.29.0", default-features = false, features = ["archive", "coff", "read_core", "unaligned"] }
+object = { version = "0.31.1", default-features = false, features = ["archive", "coff", "read_core", "unaligned"] }
serde = "1.0.137"
# Directly use serde_derive rather than through the derive feature of serde to allow building both
# in parallel and to allow serde_json and toml to start building as soon as serde has been built.
@@ -56,6 +55,9 @@ walkdir = "2"
# Dependencies needed by the build-metrics feature
sysinfo = { version = "0.26.0", optional = true }
+clap = { version = "4.2.4", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] }
+clap_complete = "4.2.2"
+semver = "1.0.17"
# Solaris doesn't support flock() and thus fd-lock is not option now
[target.'cfg(not(target_os = "solaris"))'.dependencies]
@@ -69,13 +71,9 @@ version = "0.46.0"
features = [
"Win32_Foundation",
"Win32_Security",
- "Win32_Storage_FileSystem",
"Win32_System_Diagnostics_Debug",
- "Win32_System_IO",
- "Win32_System_Ioctl",
"Win32_System_JobObjects",
"Win32_System_ProcessStatus",
- "Win32_System_SystemServices",
"Win32_System_Threading",
"Win32_System_Time",
]
@@ -90,6 +88,7 @@ build-metrics = ["sysinfo"]
# dependencies, only bootstrap itself.
[profile.dev]
debug = 0
+
[profile.dev.package]
# Only use debuginfo=1 to further reduce compile times.
bootstrap.debug = 1
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
index 912d875e4..a80379e85 100644
--- a/src/bootstrap/bin/main.rs
+++ b/src/bootstrap/bin/main.rs
@@ -16,9 +16,11 @@ fn main() {
let config = Config::parse(&args);
#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+ let mut build_lock;
+ #[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+ let _build_lock_guard;
+ #[cfg(all(any(unix, windows), not(target_os = "solaris")))]
{
- let mut build_lock;
- let _build_lock_guard;
let path = config.out.join("lock");
build_lock = fd_lock::RwLock::new(t!(std::fs::File::create(&path)));
_build_lock_guard = match build_lock.try_write() {
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index 040fec361..e87125a49 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -19,7 +19,7 @@ include!("../dylib_util.rs");
use std::env;
use std::path::PathBuf;
-use std::process::{Child, Command};
+use std::process::{exit, Child, Command};
use std::str::FromStr;
use std::time::Instant;
@@ -47,7 +47,12 @@ fn main() {
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
- let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
+ let stage = env::var("RUSTC_STAGE").unwrap_or_else(|_| {
+ // Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead.
+ eprintln!("rustc shim: fatal: RUSTC_STAGE was not set");
+ eprintln!("rustc shim: note: use `x.py build -vvv` to see all environment variables set by bootstrap");
+ exit(101);
+ });
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
@@ -150,7 +155,7 @@ fn main() {
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those
// may be proc macros, in which case we might ship them.
- if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() && (stage != "0" || target.is_some()) {
+ if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
index 23828f475..d2b85f7a6 100644
--- a/src/bootstrap/bin/rustdoc.rs
+++ b/src/bootstrap/bin/rustdoc.rs
@@ -5,13 +5,18 @@
use std::env;
use std::ffi::OsString;
use std::path::PathBuf;
-use std::process::Command;
+use std::process::{exit, Command};
include!("../dylib_util.rs");
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
- let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
+ let stage = env::var("RUSTC_STAGE").unwrap_or_else(|_| {
+ // Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead.
+ eprintln!("rustc shim: fatal: RUSTC_STAGE was not set");
+ eprintln!("rustc shim: note: use `x.py build -vvv` to see all environment variables set by bootstrap");
+ exit(101);
+ });
let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
diff --git a/src/bootstrap/bolt.rs b/src/bootstrap/bolt.rs
index 10e6d2e7d..5384181ea 100644
--- a/src/bootstrap/bolt.rs
+++ b/src/bootstrap/bolt.rs
@@ -40,7 +40,7 @@ pub fn optimize_with_bolt(path: &Path, profile_path: &Path, output_path: &Path)
// Reorder functions within the binary
.arg("-reorder-functions=hfsort+")
// Split function code into hot and code regions
- .arg("-split-functions=2")
+ .arg("-split-functions")
// Split as many basic blocks as possible
.arg("-split-all-cold")
// Move jump tables to a separate section
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 025145244..58d1926ad 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -13,17 +13,35 @@ import tarfile
import tempfile
from time import time
+from multiprocessing import Pool, cpu_count
try:
import lzma
except ImportError:
lzma = None
-if sys.platform == 'win32':
+def platform_is_win32():
+ return sys.platform == 'win32'
+
+if platform_is_win32():
EXE_SUFFIX = ".exe"
else:
EXE_SUFFIX = ""
+def get_cpus():
+ if hasattr(os, "sched_getaffinity"):
+ return len(os.sched_getaffinity(0))
+ if hasattr(os, "cpu_count"):
+ cpus = os.cpu_count()
+ if cpus is not None:
+ return cpus
+ try:
+ return cpu_count()
+ except NotImplementedError:
+ return 1
+
+
+
def get(base, url, path, checksums, verbose=False):
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_path = temp_file.name
@@ -39,23 +57,23 @@ def get(base, url, path, checksums, verbose=False):
if os.path.exists(path):
if verify(path, sha256, False):
if verbose:
- print("using already-download file", path)
+ print("using already-download file", path, file=sys.stderr)
return
else:
if verbose:
print("ignoring already-download file",
- path, "due to failed verification")
+ path, "due to failed verification", file=sys.stderr)
os.unlink(path)
download(temp_path, "{}/{}".format(base, url), True, verbose)
if not verify(temp_path, sha256, verbose):
raise RuntimeError("failed verification")
if verbose:
- print("moving {} to {}".format(temp_path, path))
+ print("moving {} to {}".format(temp_path, path), file=sys.stderr)
shutil.move(temp_path, path)
finally:
if os.path.isfile(temp_path):
if verbose:
- print("removing", temp_path)
+ print("removing", temp_path, file=sys.stderr)
os.unlink(temp_path)
@@ -65,7 +83,7 @@ def download(path, url, probably_big, verbose):
_download(path, url, probably_big, verbose, True)
return
except RuntimeError:
- print("\nspurious failure, trying again")
+ print("\nspurious failure, trying again", file=sys.stderr)
_download(path, url, probably_big, verbose, False)
@@ -76,9 +94,8 @@ def _download(path, url, probably_big, verbose, exception):
# - If we are on win32 fallback to powershell
# - Otherwise raise the error if appropriate
if probably_big or verbose:
- print("downloading {}".format(url))
+ print("downloading {}".format(url), file=sys.stderr)
- platform_is_win32 = sys.platform == 'win32'
try:
if probably_big or verbose:
option = "-#"
@@ -86,21 +103,21 @@ def _download(path, url, probably_big, verbose, exception):
option = "-s"
# If curl is not present on Win32, we should not sys.exit
# but raise `CalledProcessError` or `OSError` instead
- require(["curl", "--version"], exception=platform_is_win32)
+ require(["curl", "--version"], exception=platform_is_win32())
with open(path, "wb") as outfile:
run(["curl", option,
"-L", # Follow redirect.
"-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds
"--connect-timeout", "30", # timeout if cannot connect within 30 seconds
- "--retry", "3", "-Sf", url],
+ "--retry", "3", "-SRf", url],
stdout=outfile, #Implements cli redirect operator '>'
verbose=verbose,
exception=True, # Will raise RuntimeError on failure
)
except (subprocess.CalledProcessError, OSError, RuntimeError):
# see http://serverfault.com/questions/301128/how-to-download
- if platform_is_win32:
- run(["PowerShell.exe", "/nologo", "-Command",
+ if platform_is_win32():
+ run_powershell([
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)],
verbose=verbose,
@@ -113,20 +130,20 @@ def _download(path, url, probably_big, verbose, exception):
def verify(path, expected, verbose):
"""Check if the sha256 sum of the given path is valid"""
if verbose:
- print("verifying", path)
+ print("verifying", path, file=sys.stderr)
with open(path, "rb") as source:
found = hashlib.sha256(source.read()).hexdigest()
verified = found == expected
if not verified:
print("invalid checksum:\n"
" found: {}\n"
- " expected: {}".format(found, expected))
+ " expected: {}".format(found, expected), file=sys.stderr)
return verified
def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
"""Unpack the given tarball file"""
- print("extracting", tarball)
+ print("extracting", tarball, file=sys.stderr)
fname = os.path.basename(tarball).replace(tarball_suffix, "")
with contextlib.closing(tarfile.open(tarball)) as tar:
for member in tar.getnames():
@@ -139,7 +156,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
dst_path = os.path.join(dst, name)
if verbose:
- print(" extracting", member)
+ print(" extracting", member, file=sys.stderr)
tar.extract(member, dst)
src_path = os.path.join(dst, member)
if os.path.isdir(src_path) and os.path.exists(dst_path):
@@ -151,7 +168,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
"""Run a child program in a new process"""
if verbose:
- print("running: " + ' '.join(args))
+ print("running: " + ' '.join(args), file=sys.stderr)
sys.stdout.flush()
# Ensure that the .exe is used on Windows just in case a Linux ELF has been
# compiled in the same directory.
@@ -174,6 +191,10 @@ def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
else:
sys.exit(err)
+def run_powershell(script, *args, **kwargs):
+ """Run a powershell script"""
+ run(["PowerShell.exe", "/nologo", "-Command"] + script, *args, **kwargs)
+
def require(cmd, exit=True, exception=False):
'''Run a command, returning its output.
@@ -187,8 +208,8 @@ def require(cmd, exit=True, exception=False):
if exception:
raise
elif exit:
- print("error: unable to run `{}`: {}".format(' '.join(cmd), exc))
- print("Please make sure it's installed and in the path.")
+ print("error: unable to run `{}`: {}".format(' '.join(cmd), exc), file=sys.stderr)
+ print("Please make sure it's installed and in the path.", file=sys.stderr)
sys.exit(1)
return None
@@ -205,38 +226,41 @@ def format_build_time(duration):
def default_build_triple(verbose):
"""Build triple as in LLVM"""
- # If the user already has a host build triple with an existing `rustc`
- # install, use their preference. This fixes most issues with Windows builds
- # being detected as GNU instead of MSVC.
+ # If we're on Windows and have an existing `rustc` toolchain, use `rustc --version --verbose`
+ # to find our host target triple. This fixes an issue with Windows builds being detected
+ # as GNU instead of MSVC.
+ # Otherwise, detect it via `uname`
default_encoding = sys.getdefaultencoding()
- try:
- version = subprocess.check_output(["rustc", "--version", "--verbose"],
- stderr=subprocess.DEVNULL)
- version = version.decode(default_encoding)
- host = next(x for x in version.split('\n') if x.startswith("host: "))
- triple = host.split("host: ")[1]
- if verbose:
- print("detected default triple {} from pre-installed rustc".format(triple))
- return triple
- except Exception as e:
- if verbose:
- print("pre-installed rustc not detected: {}".format(e))
- print("falling back to auto-detect")
- required = sys.platform != 'win32'
- ostype = require(["uname", "-s"], exit=required)
- cputype = require(['uname', '-m'], exit=required)
+ if platform_is_win32():
+ try:
+ version = subprocess.check_output(["rustc", "--version", "--verbose"],
+ stderr=subprocess.DEVNULL)
+ version = version.decode(default_encoding)
+ host = next(x for x in version.split('\n') if x.startswith("host: "))
+ triple = host.split("host: ")[1]
+ if verbose:
+ print("detected default triple {} from pre-installed rustc".format(triple),
+ file=sys.stderr)
+ return triple
+ except Exception as e:
+ if verbose:
+ print("pre-installed rustc not detected: {}".format(e),
+ file=sys.stderr)
+ print("falling back to auto-detect", file=sys.stderr)
+
+ required = not platform_is_win32()
+ uname = require(["uname", "-smp"], exit=required)
# If we do not have `uname`, assume Windows.
- if ostype is None or cputype is None:
+ if uname is None:
return 'x86_64-pc-windows-msvc'
- ostype = ostype.decode(default_encoding)
- cputype = cputype.decode(default_encoding)
+ kernel, cputype, processor = uname.decode(default_encoding).split()
# The goal here is to come up with the same triple as LLVM would,
# at least for the subset of platforms we're willing to target.
- ostype_mapper = {
+ kerneltype_mapper = {
'Darwin': 'apple-darwin',
'DragonFly': 'unknown-dragonfly',
'FreeBSD': 'unknown-freebsd',
@@ -246,17 +270,18 @@ def default_build_triple(verbose):
}
# Consider the direct transformation first and then the special cases
- if ostype in ostype_mapper:
- ostype = ostype_mapper[ostype]
- elif ostype == 'Linux':
- os_from_sp = subprocess.check_output(
- ['uname', '-o']).strip().decode(default_encoding)
- if os_from_sp == 'Android':
- ostype = 'linux-android'
+ if kernel in kerneltype_mapper:
+ kernel = kerneltype_mapper[kernel]
+ elif kernel == 'Linux':
+ # Apple doesn't support `-o` so this can't be used in the combined
+ # uname invocation above
+ ostype = require(["uname", "-o"], exit=required).decode(default_encoding)
+ if ostype == 'Android':
+ kernel = 'linux-android'
else:
- ostype = 'unknown-linux-gnu'
- elif ostype == 'SunOS':
- ostype = 'pc-solaris'
+ kernel = 'unknown-linux-gnu'
+ elif kernel == 'SunOS':
+ kernel = 'pc-solaris'
# On Solaris, uname -m will return a machine classification instead
# of a cpu type, so uname -p is recommended instead. However, the
# output from that option is too generic for our purposes (it will
@@ -265,34 +290,34 @@ def default_build_triple(verbose):
cputype = require(['isainfo', '-k']).decode(default_encoding)
# sparc cpus have sun as a target vendor
if 'sparc' in cputype:
- ostype = 'sun-solaris'
- elif ostype.startswith('MINGW'):
+ kernel = 'sun-solaris'
+ elif kernel.startswith('MINGW'):
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
# msys1 is always i686 and msys2 is always x86_64.
# instead, msys defines $MSYSTEM which is MINGW32 on i686 and
# MINGW64 on x86_64.
- ostype = 'pc-windows-gnu'
+ kernel = 'pc-windows-gnu'
cputype = 'i686'
if os.environ.get('MSYSTEM') == 'MINGW64':
cputype = 'x86_64'
- elif ostype.startswith('MSYS'):
- ostype = 'pc-windows-gnu'
- elif ostype.startswith('CYGWIN_NT'):
+ elif kernel.startswith('MSYS'):
+ kernel = 'pc-windows-gnu'
+ elif kernel.startswith('CYGWIN_NT'):
cputype = 'i686'
- if ostype.endswith('WOW64'):
+ if kernel.endswith('WOW64'):
cputype = 'x86_64'
- ostype = 'pc-windows-gnu'
- elif sys.platform == 'win32':
+ kernel = 'pc-windows-gnu'
+ elif platform_is_win32():
# Some Windows platforms might have a `uname` command that returns a
# non-standard string (e.g. gnuwin32 tools returns `windows32`). In
# these cases, fall back to using sys.platform.
return 'x86_64-pc-windows-msvc'
else:
- err = "unknown OS type: {}".format(ostype)
+ err = "unknown OS type: {}".format(kernel)
sys.exit(err)
- if cputype in ['powerpc', 'riscv'] and ostype == 'unknown-freebsd':
+ if cputype in ['powerpc', 'riscv'] and kernel == 'unknown-freebsd':
cputype = subprocess.check_output(
['uname', '-p']).strip().decode(default_encoding)
cputype_mapper = {
@@ -325,24 +350,23 @@ def default_build_triple(verbose):
cputype = cputype_mapper[cputype]
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
- if ostype == 'linux-android':
- ostype = 'linux-androideabi'
- elif ostype == 'unknown-freebsd':
- cputype = subprocess.check_output(
- ['uname', '-p']).strip().decode(default_encoding)
- ostype = 'unknown-freebsd'
+ if kernel == 'linux-android':
+ kernel = 'linux-androideabi'
+ elif kernel == 'unknown-freebsd':
+ cputype = processor
+ kernel = 'unknown-freebsd'
elif cputype == 'armv6l':
cputype = 'arm'
- if ostype == 'linux-android':
- ostype = 'linux-androideabi'
+ if kernel == 'linux-android':
+ kernel = 'linux-androideabi'
else:
- ostype += 'eabihf'
+ kernel += 'eabihf'
elif cputype in {'armv7l', 'armv8l'}:
cputype = 'armv7'
- if ostype == 'linux-android':
- ostype = 'linux-androideabi'
+ if kernel == 'linux-android':
+ kernel = 'linux-androideabi'
else:
- ostype += 'eabihf'
+ kernel += 'eabihf'
elif cputype == 'mips':
if sys.byteorder == 'big':
cputype = 'mips'
@@ -358,14 +382,14 @@ def default_build_triple(verbose):
else:
raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
# only the n64 ABI is supported, indicate it
- ostype += 'abi64'
+ kernel += 'abi64'
elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
pass
else:
err = "unknown cpu type: {}".format(cputype)
sys.exit(err)
- return "{}-{}".format(cputype, ostype)
+ return "{}-{}".format(cputype, kernel)
@contextlib.contextmanager
@@ -392,6 +416,48 @@ class Stage0Toolchain:
return self.version + "-" + self.date
+class DownloadInfo:
+ """A helper class that can be pickled into a parallel subprocess"""
+
+ def __init__(
+ self,
+ base_download_url,
+ download_path,
+ bin_root,
+ tarball_path,
+ tarball_suffix,
+ checksums_sha256,
+ pattern,
+ verbose,
+ ):
+ self.base_download_url = base_download_url
+ self.download_path = download_path
+ self.bin_root = bin_root
+ self.tarball_path = tarball_path
+ self.tarball_suffix = tarball_suffix
+ self.checksums_sha256 = checksums_sha256
+ self.pattern = pattern
+ self.verbose = verbose
+
+def download_component(download_info):
+ if not os.path.exists(download_info.tarball_path):
+ get(
+ download_info.base_download_url,
+ download_info.download_path,
+ download_info.tarball_path,
+ download_info.checksums_sha256,
+ verbose=download_info.verbose,
+ )
+
+def unpack_component(download_info):
+ unpack(
+ download_info.tarball_path,
+ download_info.tarball_suffix,
+ download_info.bin_root,
+ match=download_info.pattern,
+ verbose=download_info.verbose,
+ )
+
class RustBuild(object):
"""Provide all the methods required to build Rust"""
def __init__(self):
@@ -428,18 +494,71 @@ class RustBuild(object):
(not os.path.exists(self.rustc()) or
self.program_out_of_date(self.rustc_stamp(), key)):
if os.path.exists(bin_root):
+ # HACK: On Windows, we can't delete rust-analyzer-proc-macro-server while it's
+ # running. Kill it.
+ if platform_is_win32():
+ print("Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain")
+ regex = '{}\\\\(host|{})\\\\stage0\\\\libexec'.format(
+ os.path.basename(self.build_dir),
+ self.build
+ )
+ script = (
+ # NOTE: can't use `taskkill` or `Get-Process -Name` because they error if
+ # the server isn't running.
+ 'Get-Process | ' +
+ 'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |' +
+ 'Where-Object {{$_.Path -match "{}"}} |'.format(regex) +
+ 'Stop-Process'
+ )
+ run_powershell([script])
shutil.rmtree(bin_root)
+
+ key = self.stage0_compiler.date
+ cache_dst = os.path.join(self.build_dir, "cache")
+ rustc_cache = os.path.join(cache_dst, key)
+ if not os.path.exists(rustc_cache):
+ os.makedirs(rustc_cache)
+
tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz'
- filename = "rust-std-{}-{}{}".format(
- rustc_channel, self.build, tarball_suffix)
- pattern = "rust-std-{}".format(self.build)
- self._download_component_helper(filename, pattern, tarball_suffix)
- filename = "rustc-{}-{}{}".format(rustc_channel, self.build,
- tarball_suffix)
- self._download_component_helper(filename, "rustc", tarball_suffix)
- filename = "cargo-{}-{}{}".format(rustc_channel, self.build,
- tarball_suffix)
- self._download_component_helper(filename, "cargo", tarball_suffix)
+
+ toolchain_suffix = "{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
+
+ tarballs_to_download = [
+ ("rust-std-{}".format(toolchain_suffix), "rust-std-{}".format(self.build)),
+ ("rustc-{}".format(toolchain_suffix), "rustc"),
+ ("cargo-{}".format(toolchain_suffix), "cargo"),
+ ]
+
+ tarballs_download_info = [
+ DownloadInfo(
+ base_download_url=self.download_url,
+ download_path="dist/{}/{}".format(self.stage0_compiler.date, filename),
+ bin_root=self.bin_root(),
+ tarball_path=os.path.join(rustc_cache, filename),
+ tarball_suffix=tarball_suffix,
+ checksums_sha256=self.checksums_sha256,
+ pattern=pattern,
+ verbose=self.verbose,
+ )
+ for filename, pattern in tarballs_to_download
+ ]
+
+ # Download the components serially to show the progress bars properly.
+ for download_info in tarballs_download_info:
+ download_component(download_info)
+
+ # Unpack the tarballs in parallle.
+ # In Python 2.7, Pool cannot be used as a context manager.
+ pool_size = min(len(tarballs_download_info), get_cpus())
+ if self.verbose:
+ print('Choosing a pool size of', pool_size, 'for the unpacking of the tarballs')
+ p = Pool(pool_size)
+ try:
+ p.map(unpack_component, tarballs_download_info)
+ finally:
+ p.close()
+ p.join()
+
if self.should_fix_bins_and_dylibs():
self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root))
@@ -455,13 +574,9 @@ class RustBuild(object):
rust_stamp.write(key)
def _download_component_helper(
- self, filename, pattern, tarball_suffix,
+ self, filename, pattern, tarball_suffix, rustc_cache,
):
key = self.stage0_compiler.date
- cache_dst = os.path.join(self.build_dir, "cache")
- rustc_cache = os.path.join(cache_dst, key)
- if not os.path.exists(rustc_cache):
- os.makedirs(rustc_cache)
tarball = os.path.join(rustc_cache, filename)
if not os.path.exists(tarball):
@@ -516,7 +631,7 @@ class RustBuild(object):
answer = self._should_fix_bins_and_dylibs = get_answer()
if answer:
- print("info: You seem to be using Nix.")
+ print("info: You seem to be using Nix.", file=sys.stderr)
return answer
def fix_bin_or_dylib(self, fname):
@@ -529,7 +644,7 @@ class RustBuild(object):
Please see https://nixos.org/patchelf.html for more information
"""
assert self._should_fix_bins_and_dylibs is True
- print("attempting to patch", fname)
+ print("attempting to patch", fname, file=sys.stderr)
# Only build `.nix-deps` once.
nix_deps_dir = self.nix_deps_dir
@@ -562,7 +677,7 @@ class RustBuild(object):
"nix-build", "-E", nix_expr, "-o", nix_deps_dir,
])
except subprocess.CalledProcessError as reason:
- print("warning: failed to call nix-build:", reason)
+ print("warning: failed to call nix-build:", reason, file=sys.stderr)
return
self.nix_deps_dir = nix_deps_dir
@@ -575,14 +690,14 @@ class RustBuild(object):
]
patchelf_args = ["--set-rpath", ":".join(rpath_entries)]
if not fname.endswith(".so"):
- # Finally, set the corret .interp for binaries
+ # Finally, set the correct .interp for binaries
with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker:
patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()]
try:
subprocess.check_output([patchelf] + patchelf_args + [fname])
except subprocess.CalledProcessError as reason:
- print("warning: failed to call patchelf:", reason)
+ print("warning: failed to call patchelf:", reason, file=sys.stderr)
return
def rustc_stamp(self):
@@ -722,11 +837,14 @@ class RustBuild(object):
def build_bootstrap(self, color, verbose_count):
"""Build bootstrap"""
- print("Building bootstrap")
+ env = os.environ.copy()
+ if "GITHUB_ACTIONS" in env:
+ print("::group::Building bootstrap")
+ else:
+ print("Building bootstrap", file=sys.stderr)
build_dir = os.path.join(self.build_dir, "bootstrap")
if self.clean and os.path.exists(build_dir):
shutil.rmtree(build_dir)
- env = os.environ.copy()
# `CARGO_BUILD_TARGET` breaks bootstrap build.
# See also: <https://github.com/rust-lang/rust/issues/70208>.
if "CARGO_BUILD_TARGET" in env:
@@ -798,6 +916,9 @@ class RustBuild(object):
# Run this from the source directory so cargo finds .cargo/config
run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
+ if "GITHUB_ACTIONS" in env:
+ print("::endgroup::")
+
def build_triple(self):
"""Build triple as in LLVM
@@ -814,25 +935,33 @@ class RustBuild(object):
if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
if os.getuid() == 0:
self.use_vendored_sources = True
- print('info: looks like you\'re trying to run this command as root')
- print(' and so in order to preserve your $HOME this will now')
- print(' use vendored sources by default.')
+ print('info: looks like you\'re trying to run this command as root',
+ file=sys.stderr)
+ print(' and so in order to preserve your $HOME this will now',
+ file=sys.stderr)
+ print(' use vendored sources by default.',
+ file=sys.stderr)
cargo_dir = os.path.join(self.rust_root, '.cargo')
if self.use_vendored_sources:
vendor_dir = os.path.join(self.rust_root, 'vendor')
if not os.path.exists(vendor_dir):
- sync_dirs = "--sync ./src/tools/rust-analyzer/Cargo.toml " \
+ sync_dirs = "--sync ./src/tools/cargo/Cargo.toml " \
+ "--sync ./src/tools/rust-analyzer/Cargo.toml " \
"--sync ./compiler/rustc_codegen_cranelift/Cargo.toml " \
"--sync ./src/bootstrap/Cargo.toml "
- print('error: vendoring required, but vendor directory does not exist.')
+ print('error: vendoring required, but vendor directory does not exist.',
+ file=sys.stderr)
print(' Run `cargo vendor {}` to initialize the '
- 'vendor directory.'.format(sync_dirs))
- print('Alternatively, use the pre-vendored `rustc-src` dist component.')
+ 'vendor directory.'.format(sync_dirs),
+ file=sys.stderr)
+ print('Alternatively, use the pre-vendored `rustc-src` dist component.',
+ file=sys.stderr)
raise Exception("{} not found".format(vendor_dir))
if not os.path.exists(cargo_dir):
- print('error: vendoring required, but .cargo/config does not exist.')
+ print('error: vendoring required, but .cargo/config does not exist.',
+ file=sys.stderr)
raise Exception("{} not found".format(cargo_dir))
else:
if os.path.exists(cargo_dir):
@@ -942,7 +1071,7 @@ def main():
print(
"info: Downloading and building bootstrap before processing --help command.\n"
" See src/bootstrap/README.md for help with common commands."
- )
+ , file=sys.stderr)
exit_code = 0
success_word = "successfully"
@@ -953,11 +1082,12 @@ def main():
exit_code = error.code
else:
exit_code = 1
- print(error)
+ print(error, file=sys.stderr)
success_word = "unsuccessfully"
if not help_triggered:
- print("Build completed", success_word, "in", format_build_time(time() - start_time))
+ print("Build completed", success_word, "in", format_build_time(time() - start_time),
+ file=sys.stderr)
sys.exit(exit_code)
diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py
index 20bd71f06..5ecda83ee 100644
--- a/src/bootstrap/bootstrap_test.py
+++ b/src/bootstrap/bootstrap_test.py
@@ -97,6 +97,7 @@ class GenerateAndParseConfig(unittest.TestCase):
def test_no_args(self):
build = self.serialize_and_parse([])
self.assertEqual(build.get_toml("changelog-seen"), '2')
+ self.assertEqual(build.get_toml("profile"), 'user')
self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
def test_set_section(self):
@@ -107,10 +108,17 @@ class GenerateAndParseConfig(unittest.TestCase):
build = self.serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
- # Uncomment when #108928 is fixed.
- # def test_set_top_level(self):
- # build = self.serialize_and_parse(["--set", "profile=compiler"])
- # self.assertEqual(build.get_toml("profile"), 'compiler')
+ def test_set_top_level(self):
+ build = self.serialize_and_parse(["--set", "profile=compiler"])
+ self.assertEqual(build.get_toml("profile"), 'compiler')
+
+ def test_set_codegen_backends(self):
+ build = self.serialize_and_parse(["--set", "rust.codegen-backends=cranelift"])
+ self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift']"), -1)
+ build = self.serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"])
+ self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1)
+ build = self.serialize_and_parse(["--enable-full-tools"])
+ self.assertNotEqual(build.config_toml.find("codegen-backends = ['llvm']"), -1)
if __name__ == '__main__':
SUITE = unittest.TestSuite()
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index e959ea06f..2fa445506 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -33,6 +33,7 @@ pub use crate::Compiler;
// - use std::lazy for `Lazy`
// - use std::cell for `OnceCell`
// Once they get stabilized and reach beta.
+use clap::ValueEnum;
use once_cell::sync::{Lazy, OnceCell};
pub struct Builder<'a> {
@@ -263,7 +264,7 @@ impl PathSet {
/// A convenience wrapper for Steps which know they have no aliases and all their sets contain only a single path.
///
- /// This can be used with [`ShouldRun::krate`], [`ShouldRun::path`], or [`ShouldRun::alias`].
+ /// This can be used with [`ShouldRun::crate_or_deps`], [`ShouldRun::path`], or [`ShouldRun::alias`].
#[track_caller]
pub fn assert_single_path(&self) -> &TaskPath {
match self {
@@ -576,19 +577,24 @@ impl<'a> ShouldRun<'a> {
}
}
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, ValueEnum)]
pub enum Kind {
+ #[clap(alias = "b")]
Build,
+ #[clap(alias = "c")]
Check,
Clippy,
Fix,
Format,
+ #[clap(alias = "t")]
Test,
Bench,
+ #[clap(alias = "d")]
Doc,
Clean,
Dist,
Install,
+ #[clap(alias = "r")]
Run,
Setup,
Suggest,
@@ -634,6 +640,14 @@ impl Kind {
Kind::Suggest => "suggest",
}
}
+
+ pub fn test_description(&self) -> &'static str {
+ match self {
+ Kind::Test => "Testing",
+ Kind::Bench => "Benchmarking",
+ _ => panic!("not a test command: {}!", self.as_str()),
+ }
+ }
}
impl<'a> Builder<'a> {
@@ -675,7 +689,8 @@ impl<'a> Builder<'a> {
tool::Miri,
tool::CargoMiri,
llvm::Lld,
- llvm::CrtBeginEnd
+ llvm::CrtBeginEnd,
+ tool::RustdocGUITest,
),
Kind::Check | Kind::Clippy | Kind::Fix => describe!(
check::Std,
@@ -695,7 +710,6 @@ impl<'a> Builder<'a> {
crate::toolstate::ToolStateCheck,
test::ExpandYamlAnchors,
test::Tidy,
- test::TidySelfTest,
test::Ui,
test::RunPassValgrind,
test::MirOpt,
@@ -711,11 +725,9 @@ impl<'a> Builder<'a> {
test::CrateLibrustc,
test::CrateRustdoc,
test::CrateRustdocJsonTypes,
- test::CrateJsonDocLint,
- test::SuggestTestsCrate,
+ test::CrateBootstrap,
test::Linkcheck,
test::TierCheck,
- test::ReplacePlaceholderTest,
test::Cargotest,
test::Cargo,
test::RustAnalyzer,
@@ -776,6 +788,7 @@ impl<'a> Builder<'a> {
doc::EditionGuide,
doc::StyleGuide,
doc::Tidy,
+ doc::Bootstrap,
),
Kind::Dist => describe!(
dist::Docs,
@@ -827,6 +840,8 @@ impl<'a> Builder<'a> {
run::Miri,
run::CollectLicenseMetadata,
run::GenerateCopyright,
+ run::GenerateWindowsSys,
+ run::GenerateCompletions,
),
Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode),
Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std),
@@ -882,18 +897,19 @@ impl<'a> Builder<'a> {
}
pub fn new(build: &Build) -> Builder<'_> {
+ let paths = &build.config.paths;
let (kind, paths) = match build.config.cmd {
- Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
- Subcommand::Check { ref paths } => (Kind::Check, &paths[..]),
- Subcommand::Clippy { ref paths, .. } => (Kind::Clippy, &paths[..]),
- Subcommand::Fix { ref paths } => (Kind::Fix, &paths[..]),
- Subcommand::Doc { ref paths, .. } => (Kind::Doc, &paths[..]),
- Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
- Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
- Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
- Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
- Subcommand::Run { ref paths, .. } => (Kind::Run, &paths[..]),
- Subcommand::Clean { ref paths, .. } => (Kind::Clean, &paths[..]),
+ Subcommand::Build => (Kind::Build, &paths[..]),
+ Subcommand::Check { .. } => (Kind::Check, &paths[..]),
+ Subcommand::Clippy { .. } => (Kind::Clippy, &paths[..]),
+ Subcommand::Fix => (Kind::Fix, &paths[..]),
+ Subcommand::Doc { .. } => (Kind::Doc, &paths[..]),
+ Subcommand::Test { .. } => (Kind::Test, &paths[..]),
+ Subcommand::Bench { .. } => (Kind::Bench, &paths[..]),
+ Subcommand::Dist => (Kind::Dist, &paths[..]),
+ Subcommand::Install => (Kind::Install, &paths[..]),
+ Subcommand::Run { .. } => (Kind::Run, &paths[..]),
+ Subcommand::Clean { .. } => (Kind::Clean, &paths[..]),
Subcommand::Format { .. } => (Kind::Format, &[][..]),
Subcommand::Suggest { .. } => (Kind::Suggest, &[][..]),
Subcommand::Setup { profile: ref path } => (
@@ -928,7 +944,6 @@ impl<'a> Builder<'a> {
self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths);
}
- /// NOTE: keep this in sync with `rustdoc::clean::utils::doc_rust_lang_org_channel`, or tests will fail on beta/stable.
pub fn doc_rust_lang_org_channel(&self) -> String {
let channel = match &*self.config.channel {
"stable" => &self.version,
@@ -1008,9 +1023,24 @@ impl<'a> Builder<'a> {
// Avoid deleting the rustlib/ directory we just copied
// (in `impl Step for Sysroot`).
if !builder.download_rustc() {
+ builder.verbose(&format!(
+ "Removing sysroot {} to avoid caching bugs",
+ sysroot.display()
+ ));
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
}
+
+ if self.compiler.stage == 0 {
+ // The stage 0 compiler for the build triple is always pre-built.
+ // Ensure that `libLLVM.so` ends up in the target libdir, so that ui-fulldeps tests can use it when run.
+ dist::maybe_install_llvm_target(
+ builder,
+ self.compiler.host,
+ &builder.sysroot(self.compiler),
+ );
+ }
+
INTERNER.intern_path(sysroot)
}
}
@@ -1384,7 +1414,7 @@ impl<'a> Builder<'a> {
// Add extra cfg not defined in/by rustc
//
- // Note: Altrough it would seems that "-Zunstable-options" to `rustflags` is useless as
+ // Note: Although it would seems that "-Zunstable-options" to `rustflags` is useless as
// cargo would implicitly add it, it was discover that sometimes bootstrap only use
// `rustflags` without `cargo` making it required.
rustflags.arg("-Zunstable-options");
@@ -1574,8 +1604,8 @@ impl<'a> Builder<'a> {
// which adds to the runtime dynamic loader path when looking for
// dynamic libraries. We use this by default on Unix platforms to ensure
// that our nightlies behave the same on Windows, that is they work out
- // of the box. This can be disabled, of course, but basically that's why
- // we're gated on RUSTC_RPATH here.
+ // of the box. This can be disabled by setting `rpath = false` in `[rust]`
+ // table of `config.toml`
//
// Ok, so the astute might be wondering "why isn't `-C rpath` used
// here?" and that is indeed a good question to ask. This codegen
@@ -1595,7 +1625,7 @@ impl<'a> Builder<'a> {
// argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
// fun to pass a flag to a tool to pass a flag to pass a flag to a tool
// to change a flag in a binary?
- if self.config.rust_rpath && util::use_host_linker(target) {
+ if self.config.rpath_enabled(target) && util::use_host_linker(target) {
let rpath = if target.contains("apple") {
// Note that we need to take one extra step on macOS to also pass
// `-Wl,-instal_name,@rpath/...` to get things to work right. To
@@ -1887,10 +1917,10 @@ impl<'a> Builder<'a> {
}
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
- // This replaces spaces with newlines because RUSTDOCFLAGS does not
+ // This replaces spaces with tabs because RUSTDOCFLAGS does not
// support arguments with regular spaces. Hopefully someday Cargo will
// have space support.
- let rust_version = self.rust_version().replace(' ', "\n");
+ let rust_version = self.rust_version().replace(' ', "\t");
rustdocflags.arg("--crate-version").arg(&rust_version);
// Environment variables *required* throughout the build
@@ -2030,7 +2060,7 @@ impl<'a> Builder<'a> {
}
#[cfg(feature = "build-metrics")]
- self.metrics.enter_step(&step);
+ self.metrics.enter_step(&step, self);
let (out, dur) = {
let start = Instant::now();
@@ -2056,7 +2086,7 @@ impl<'a> Builder<'a> {
}
#[cfg(feature = "build-metrics")]
- self.metrics.exit_step();
+ self.metrics.exit_step(self);
{
let mut stack = self.stack.borrow_mut();
@@ -2131,6 +2161,10 @@ impl<'a> Builder<'a> {
#[cfg(test)]
mod tests;
+/// Represents flag values in `String` form with whitespace delimiter to pass it to the compiler later.
+///
+/// `-Z crate-attr` flags will be applied recursively on the target code using the `rustc_parse::parser::Parser`.
+/// See `rustc_builtin_macros::cmdline_attrs::inject` for more information.
#[derive(Debug, Clone)]
struct Rustflags(String, TargetSelection);
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index 3574f1118..edca8fe9b 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -146,6 +146,22 @@ fn alias_and_path_for_library() {
);
}
+#[test]
+fn test_beta_rev_parsing() {
+ use crate::extract_beta_rev;
+
+ // single digit revision
+ assert_eq!(extract_beta_rev("1.99.9-beta.7 (xxxxxx)"), Some("7".to_string()));
+ // multiple digits
+ assert_eq!(extract_beta_rev("1.99.9-beta.777 (xxxxxx)"), Some("777".to_string()));
+ // nightly channel (no beta revision)
+ assert_eq!(extract_beta_rev("1.99.9-nightly (xxxxxx)"), None);
+ // stable channel (no beta revision)
+ assert_eq!(extract_beta_rev("1.99.9 (xxxxxxx)"), None);
+ // invalid string
+ assert_eq!(extract_beta_rev("invalid"), None);
+}
+
mod defaults {
use super::{configure, first, run_build};
use crate::builder::*;
@@ -236,7 +252,7 @@ mod defaults {
fn doc_default() {
let mut config = configure("doc", &["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
+ config.cmd = Subcommand::Doc { open: false, json: false };
let mut cache = run_build(&[], config);
let a = TargetSelection::from_user("A");
@@ -545,12 +561,13 @@ mod dist {
fn test_with_no_doc_stage0() {
let mut config = configure(&["A"], &["A"]);
config.stage = 0;
+ config.paths = vec!["library/std".into()];
config.cmd = Subcommand::Test {
- paths: vec!["library/std".into()],
test_args: vec![],
rustc_args: vec![],
- fail_fast: true,
- doc_tests: DocTests::No,
+ no_fail_fast: false,
+ no_doc: true,
+ doc: false,
bless: false,
force_rerun: false,
compare_mode: None,
@@ -558,6 +575,7 @@ mod dist {
pass: None,
run: None,
only_modified: false,
+ skip: vec![],
};
let build = Build::new(config);
@@ -578,7 +596,6 @@ mod dist {
compiler: Compiler { host, stage: 0 },
target: host,
mode: Mode::Std,
- test_kind: test::TestKind::Test,
crates: vec![INTERNER.intern_str("std")],
},]
);
@@ -588,7 +605,7 @@ mod dist {
fn doc_ci() {
let mut config = configure(&["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
+ config.cmd = Subcommand::Doc { open: false, json: false };
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]);
@@ -617,11 +634,12 @@ mod dist {
// Behavior of `x.py test` doing various documentation tests.
let mut config = configure(&["A"], &["A"]);
config.cmd = Subcommand::Test {
- paths: vec![],
test_args: vec![],
rustc_args: vec![],
- fail_fast: true,
- doc_tests: DocTests::Yes,
+ no_fail_fast: false,
+ doc: true,
+ no_doc: false,
+ skip: vec![],
bless: false,
force_rerun: false,
compare_mode: None,
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index c3e3fa009..870185740 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -22,7 +22,7 @@ pub enum GitInfo {
/// If the info should be used (`omit_git_hash` is false), this will be
/// `Some`, otherwise it will be `None`.
Present(Option<Info>),
- /// This is not a git repostory, but the info can be fetched from the
+ /// This is not a git repository, but the info can be fetched from the
/// `git-commit-info` file.
RecordedForTarball(Info),
}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index fcaa69831..b11be96ce 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -20,15 +20,7 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
arr.iter().copied().map(String::from)
}
- if let Subcommand::Clippy {
- fix,
- clippy_lint_allow,
- clippy_lint_deny,
- clippy_lint_warn,
- clippy_lint_forbid,
- ..
- } = &builder.config.cmd
- {
+ if let Subcommand::Clippy { fix, allow, deny, warn, forbid, .. } = &builder.config.cmd {
// disable the most spammy clippy lints
let ignored_lints = vec![
"many_single_char_names", // there are a lot in stdarch
@@ -53,10 +45,10 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
args.extend(strings(&["--", "--cap-lints", "warn"]));
args.extend(ignored_lints.iter().map(|lint| format!("-Aclippy::{}", lint)));
let mut clippy_lint_levels: Vec<String> = Vec::new();
- clippy_lint_allow.iter().for_each(|v| clippy_lint_levels.push(format!("-A{}", v)));
- clippy_lint_deny.iter().for_each(|v| clippy_lint_levels.push(format!("-D{}", v)));
- clippy_lint_warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v)));
- clippy_lint_forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v)));
+ allow.iter().for_each(|v| clippy_lint_levels.push(format!("-A{}", v)));
+ deny.iter().for_each(|v| clippy_lint_levels.push(format!("-D{}", v)));
+ warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v)));
+ forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v)));
args.extend(clippy_lint_levels);
args.extend(builder.config.free_args.clone());
args
@@ -79,7 +71,7 @@ impl Step for Std {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.all_krates("test").path("library")
+ run.all_krates("sysroot").path("library")
}
fn make_run(run: RunConfig<'_>) {
@@ -105,15 +97,7 @@ impl Step for Std {
cargo.arg("--lib");
}
- let msg = if compiler.host == target {
- format!("Checking stage{} library artifacts ({target})", builder.top_stage)
- } else {
- format!(
- "Checking stage{} library artifacts ({} -> {})",
- builder.top_stage, &compiler.host, target
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check("library artifacts", target);
run_cargo(
builder,
cargo,
@@ -167,18 +151,7 @@ impl Step for Std {
cargo.arg("-p").arg(krate.name);
}
- let msg = if compiler.host == target {
- format!(
- "Checking stage{} library test/bench/example targets ({target})",
- builder.top_stage
- )
- } else {
- format!(
- "Checking stage{} library test/bench/example targets ({} -> {})",
- builder.top_stage, &compiler.host, target
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check("library test/bench/example targets", target);
run_cargo(
builder,
cargo,
@@ -252,15 +225,7 @@ impl Step for Rustc {
cargo.arg("-p").arg(krate.name);
}
- let msg = if compiler.host == target {
- format!("Checking stage{} compiler artifacts ({target})", builder.top_stage)
- } else {
- format!(
- "Checking stage{} compiler artifacts ({} -> {})",
- builder.top_stage, &compiler.host, target
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check("compiler artifacts", target);
run_cargo(
builder,
cargo,
@@ -271,17 +236,9 @@ impl Step for Rustc {
false,
);
- // HACK: This avoids putting the newly built artifacts in the sysroot if we're using
- // `download-rustc`, to avoid "multiple candidates for `rmeta`" errors. Technically, that's
- // not quite right: people can set `download-rustc = true` to download even if there are
- // changes to the compiler, and in that case ideally we would put the *new* artifacts in the
- // sysroot, in case there are API changes that should be used by tools. In practice,
- // though, that should be very uncommon, and people can still disable download-rustc.
- if !builder.download_rustc() {
- let libdir = builder.sysroot_libdir(compiler, target);
- let hostdir = builder.sysroot_libdir(compiler, compiler.host);
- add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
- }
+ let libdir = builder.sysroot_libdir(compiler, target);
+ let hostdir = builder.sysroot_libdir(compiler, compiler.host);
+ add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
}
}
@@ -325,15 +282,7 @@ impl Step for CodegenBackend {
.arg(builder.src.join(format!("compiler/rustc_codegen_{}/Cargo.toml", backend)));
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
- let msg = if compiler.host == target {
- format!("Checking stage{} {} artifacts ({target})", builder.top_stage, backend)
- } else {
- format!(
- "Checking stage{} {} library ({} -> {})",
- builder.top_stage, backend, &compiler.host.triple, target.triple
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check(&backend, target);
run_cargo(
builder,
@@ -393,15 +342,7 @@ impl Step for RustAnalyzer {
cargo.arg("--benches");
}
- let msg = if compiler.host == target {
- format!("Checking stage{} {} artifacts ({target})", compiler.stage, "rust-analyzer")
- } else {
- format!(
- "Checking stage{} {} artifacts ({} -> {})",
- compiler.stage, "rust-analyzer", &compiler.host.triple, target.triple
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check("rust-analyzer artifacts", target);
run_cargo(
builder,
cargo,
@@ -468,18 +409,7 @@ macro_rules! tool_check_step {
// NOTE: this doesn't enable lints for any other tools unless they explicitly add `#![warn(rustc::internal)]`
// See https://github.com/rust-lang/rust/pull/80573#issuecomment-754010776
cargo.rustflag("-Zunstable-options");
- let msg = if compiler.host == target {
- format!("Checking stage{} {} artifacts ({target})", builder.top_stage, stringify!($name).to_lowercase())
- } else {
- format!(
- "Checking stage{} {} artifacts ({} -> {})",
- builder.top_stage,
- stringify!($name).to_lowercase(),
- &compiler.host.triple,
- target.triple
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_check(&concat!(stringify!($name), " artifacts").to_lowercase(), target);
run_cargo(
builder,
cargo,
diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs
index 7ebd0a8f2..0d9fd56b0 100644
--- a/src/bootstrap/clean.rs
+++ b/src/bootstrap/clean.rs
@@ -81,7 +81,7 @@ macro_rules! clean_crate_tree {
clean_crate_tree! {
Rustc, Mode::Rustc, "rustc-main";
- Std, Mode::Std, "test";
+ Std, Mode::Std, "sysroot";
}
fn clean_default(build: &Build, all: bool) {
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index 4a4e7adcb..33addb90d 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -9,6 +9,7 @@
use std::borrow::Cow;
use std::collections::HashSet;
use std::env;
+use std::ffi::OsStr;
use std::fs;
use std::io::prelude::*;
use std::io::BufReader;
@@ -55,7 +56,7 @@ impl Step for Std {
// When downloading stage1, the standard library has already been copied to the sysroot, so
// there's no need to rebuild it.
let builder = run.builder;
- run.crate_or_deps("test")
+ run.crate_or_deps("sysroot")
.path("library")
.lazy_default_condition(Box::new(|| !builder.download_rustc()))
}
@@ -142,23 +143,13 @@ impl Step for Std {
cargo.arg("-p").arg(krate);
}
- let msg = if compiler.host == target {
- format!(
- "Building{} stage{} library artifacts ({}) ",
- crate_description(&self.crates),
- compiler.stage,
- compiler.host
- )
- } else {
- format!(
- "Building{} stage{} library artifacts ({} -> {})",
- crate_description(&self.crates),
- compiler.stage,
- compiler.host,
- target,
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg(
+ Kind::Build,
+ compiler.stage,
+ format_args!("library artifacts{}", crate_description(&self.crates)),
+ compiler.host,
+ target,
+ );
run_cargo(
builder,
cargo,
@@ -373,7 +364,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
.arg("--features")
.arg(features)
.arg("--manifest-path")
- .arg(builder.src.join("library/test/Cargo.toml"));
+ .arg(builder.src.join("library/sysroot/Cargo.toml"));
// Help the libc crate compile by assisting it in finding various
// sysroot native libraries.
@@ -421,6 +412,8 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
format!("-Zcrate-attr=doc(html_root_url=\"{}/\")", builder.doc_rust_lang_org_channel(),);
cargo.rustflag(&html_root);
cargo.rustdocflag(&html_root);
+
+ cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)");
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -652,8 +645,19 @@ impl Step for Rustc {
// so its artifacts can't be reused.
if builder.download_rustc() && compiler.stage != 0 {
// Copy the existing artifacts instead of rebuilding them.
- // NOTE: this path is only taken for tools linking to rustc-dev.
- builder.ensure(Sysroot { compiler });
+ // NOTE: this path is only taken for tools linking to rustc-dev (including ui-fulldeps tests).
+ let sysroot = builder.ensure(Sysroot { compiler });
+
+ let ci_rustc_dir = builder.out.join(&*builder.build.build.triple).join("ci-rustc");
+ for file in builder.config.rustc_dev_contents() {
+ let src = ci_rustc_dir.join(&file);
+ let dst = sysroot.join(file);
+ if src.is_dir() {
+ t!(fs::create_dir_all(dst));
+ } else {
+ builder.copy(&src, &dst);
+ }
+ }
return;
}
@@ -778,24 +782,13 @@ impl Step for Rustc {
cargo.arg("-p").arg(krate);
}
- let msg = if compiler.host == target {
- format!(
- "Building{} compiler artifacts (stage{} -> stage{})",
- crate_description(&self.crates),
- compiler.stage,
- compiler.stage + 1
- )
- } else {
- format!(
- "Building{} compiler artifacts (stage{}:{} -> stage{}:{})",
- crate_description(&self.crates),
- compiler.stage,
- compiler.host,
- compiler.stage + 1,
- target,
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_sysroot_tool(
+ Kind::Build,
+ compiler.stage,
+ format_args!("compiler artifacts{}", crate_description(&self.crates)),
+ compiler.host,
+ target,
+ );
run_cargo(
builder,
cargo,
@@ -819,6 +812,9 @@ pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelec
.arg(builder.rustc_features(builder.kind))
.arg("--manifest-path")
.arg(builder.src.join("compiler/rustc/Cargo.toml"));
+
+ cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)");
+
rustc_cargo_env(builder, cargo, target, stage);
}
@@ -1102,15 +1098,7 @@ impl Step for CodegenBackend {
let tmp_stamp = out_dir.join(".tmp.stamp");
- let msg = if compiler.host == target {
- format!("Building stage{} codegen backend {}", compiler.stage, backend)
- } else {
- format!(
- "Building stage{} codegen backend {} ({} -> {})",
- compiler.stage, backend, compiler.host, target
- )
- };
- builder.info(&msg);
+ let _guard = builder.msg_build(compiler, format_args!("codegen backend {backend}"), target);
let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false);
if builder.config.dry_run() {
return;
@@ -1260,6 +1248,7 @@ impl Step for Sysroot {
};
let sysroot = sysroot_dir(compiler.stage);
+ builder.verbose(&format!("Removing sysroot {} to avoid caching bugs", sysroot.display()));
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
@@ -1281,8 +1270,40 @@ impl Step for Sysroot {
}
// Copy the compiler into the correct sysroot.
- builder.cp_r(&builder.ci_rustc_dir(builder.build.build), &sysroot);
- return INTERNER.intern_path(sysroot);
+ // NOTE(#108767): We intentionally don't copy `rustc-dev` artifacts until they're requested with `builder.ensure(Rustc)`.
+ // This fixes an issue where we'd have multiple copies of libc in the sysroot with no way to tell which to load.
+ // There are a few quirks of bootstrap that interact to make this reliable:
+ // 1. The order `Step`s are run is hard-coded in `builder.rs` and not configurable. This
+ // avoids e.g. reordering `test::UiFulldeps` before `test::Ui` and causing the latter to
+ // fail because of duplicate metadata.
+ // 2. The sysroot is deleted and recreated between each invocation, so running `x test
+ // ui-fulldeps && x test ui` can't cause failures.
+ let mut filtered_files = Vec::new();
+ // Don't trim directories or files that aren't loaded per-target; they can't cause conflicts.
+ let suffix = format!("lib/rustlib/{}/lib", compiler.host);
+ for path in builder.config.rustc_dev_contents() {
+ let path = Path::new(&path);
+ if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
+ filtered_files.push(path.file_name().unwrap().to_owned());
+ }
+ }
+
+ let filtered_extensions = [OsStr::new("rmeta"), OsStr::new("rlib"), OsStr::new("so")];
+ let ci_rustc_dir = builder.ci_rustc_dir(builder.config.build);
+ builder.cp_filtered(&ci_rustc_dir, &sysroot, &|path| {
+ if path.extension().map_or(true, |ext| !filtered_extensions.contains(&ext)) {
+ return true;
+ }
+ if !path.parent().map_or(true, |p| p.ends_with(&suffix)) {
+ return true;
+ }
+ if !filtered_files.iter().all(|f| f != path.file_name().unwrap()) {
+ builder.verbose_than(1, &format!("ignoring {}", path.display()));
+ false
+ } else {
+ true
+ }
+ });
}
// Symlink the source root into the same location inside the sysroot,
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index cc3b3bc25..e192cda9a 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -21,9 +21,10 @@ use crate::cache::{Interned, INTERNER};
use crate::cc_detect::{ndk_compiler, Language};
use crate::channel::{self, GitInfo};
pub use crate::flags::Subcommand;
-use crate::flags::{Color, Flags};
+use crate::flags::{Color, Flags, Warnings};
use crate::util::{exe, output, t};
use once_cell::sync::OnceCell;
+use semver::Version;
use serde::{Deserialize, Deserializer};
use serde_derive::Deserialize;
@@ -237,6 +238,8 @@ pub struct Config {
initial_rustfmt: RefCell<RustfmtState>,
#[cfg(test)]
pub initial_rustfmt: RefCell<RustfmtState>,
+
+ pub paths: Vec<PathBuf>,
}
#[derive(Default, Deserialize, Clone)]
@@ -347,7 +350,7 @@ impl SplitDebuginfo {
}
/// LTO mode used for compiling rustc itself.
-#[derive(Default, Clone, PartialEq)]
+#[derive(Default, Clone, PartialEq, Debug)]
pub enum RustcLto {
Off,
#[default]
@@ -376,6 +379,16 @@ pub struct TargetSelection {
file: Option<Interned<String>>,
}
+/// Newtype over `Vec<TargetSelection>` so we can implement custom parsing logic
+#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+pub struct TargetSelectionList(Vec<TargetSelection>);
+
+pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {
+ Ok(TargetSelectionList(
+ s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
+ ))
+}
+
impl TargetSelection {
pub fn from_user(selection: &str) -> Self {
let path = Path::new(selection);
@@ -455,6 +468,7 @@ pub struct Target {
pub ndk: Option<PathBuf>,
pub sanitizers: Option<bool>,
pub profiler: Option<bool>,
+ pub rpath: Option<bool>,
pub crt_static: Option<bool>,
pub musl_root: Option<PathBuf>,
pub musl_libdir: Option<PathBuf>,
@@ -494,29 +508,42 @@ struct TomlConfig {
profile: Option<String>,
}
+/// Describes how to handle conflicts in merging two [`TomlConfig`]
+#[derive(Copy, Clone, Debug)]
+enum ReplaceOpt {
+ /// Silently ignore a duplicated value
+ IgnoreDuplicate,
+ /// Override the current value, even if it's `Some`
+ Override,
+ /// Exit with an error on duplicate values
+ ErrorOnDuplicate,
+}
+
trait Merge {
- fn merge(&mut self, other: Self);
+ fn merge(&mut self, other: Self, replace: ReplaceOpt);
}
impl Merge for TomlConfig {
fn merge(
&mut self,
- TomlConfig { build, install, llvm, rust, dist, target, profile: _, changelog_seen: _ }: Self,
+ TomlConfig { build, install, llvm, rust, dist, target, profile: _, changelog_seen }: Self,
+ replace: ReplaceOpt,
) {
- fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>) {
+ fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
if let Some(new) = y {
if let Some(original) = x {
- original.merge(new);
+ original.merge(new, replace);
} else {
*x = Some(new);
}
}
}
- do_merge(&mut self.build, build);
- do_merge(&mut self.install, install);
- do_merge(&mut self.llvm, llvm);
- do_merge(&mut self.rust, rust);
- do_merge(&mut self.dist, dist);
+ self.changelog_seen.merge(changelog_seen, replace);
+ do_merge(&mut self.build, build, replace);
+ do_merge(&mut self.install, install, replace);
+ do_merge(&mut self.llvm, llvm, replace);
+ do_merge(&mut self.rust, rust, replace);
+ do_merge(&mut self.dist, dist, replace);
assert!(target.is_none(), "merging target-specific config is not currently supported");
}
}
@@ -533,10 +560,33 @@ macro_rules! define_config {
}
impl Merge for $name {
- fn merge(&mut self, other: Self) {
+ fn merge(&mut self, other: Self, replace: ReplaceOpt) {
$(
- if !self.$field.is_some() {
- self.$field = other.$field;
+ match replace {
+ ReplaceOpt::IgnoreDuplicate => {
+ if self.$field.is_none() {
+ self.$field = other.$field;
+ }
+ },
+ ReplaceOpt::Override => {
+ if other.$field.is_some() {
+ self.$field = other.$field;
+ }
+ }
+ ReplaceOpt::ErrorOnDuplicate => {
+ if other.$field.is_some() {
+ if self.$field.is_some() {
+ if cfg!(test) {
+ panic!("overriding existing option")
+ } else {
+ eprintln!("overriding existing option: `{}`", stringify!($field));
+ crate::detail_exit(2);
+ }
+ } else {
+ self.$field = other.$field;
+ }
+ }
+ }
}
)*
}
@@ -609,6 +659,37 @@ macro_rules! define_config {
}
}
+impl<T> Merge for Option<T> {
+ fn merge(&mut self, other: Self, replace: ReplaceOpt) {
+ match replace {
+ ReplaceOpt::IgnoreDuplicate => {
+ if self.is_none() {
+ *self = other;
+ }
+ }
+ ReplaceOpt::Override => {
+ if other.is_some() {
+ *self = other;
+ }
+ }
+ ReplaceOpt::ErrorOnDuplicate => {
+ if other.is_some() {
+ if self.is_some() {
+ if cfg!(test) {
+ panic!("overriding existing option")
+ } else {
+ eprintln!("overriding existing option");
+ crate::detail_exit(2);
+ }
+ } else {
+ *self = other;
+ }
+ }
+ }
+ }
+ }
+}
+
define_config! {
/// TOML representation of various global build decisions.
#[derive(Default)]
@@ -800,6 +881,7 @@ define_config! {
android_ndk: Option<String> = "android-ndk",
sanitizers: Option<bool> = "sanitizers",
profiler: Option<bool> = "profiler",
+ rpath: Option<bool> = "rpath",
crt_static: Option<bool> = "crt-static",
musl_root: Option<String> = "musl-root",
musl_libdir: Option<String> = "musl-libdir",
@@ -849,48 +931,45 @@ impl Config {
pub fn parse(args: &[String]) -> Config {
#[cfg(test)]
- let get_toml = |_: &_| TomlConfig::default();
+ fn get_toml(_: &Path) -> TomlConfig {
+ TomlConfig::default()
+ }
+
#[cfg(not(test))]
- let get_toml = |file: &Path| {
+ fn get_toml(file: &Path) -> TomlConfig {
let contents =
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
// TomlConfig and sub types to be monomorphized 5x by toml.
- match toml::from_str(&contents)
+ toml::from_str(&contents)
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
- {
- Ok(table) => table,
- Err(err) => {
- eprintln!("failed to parse TOML configuration '{}': {}", file.display(), err);
+ .unwrap_or_else(|err| {
+ eprintln!("failed to parse TOML configuration '{}': {err}", file.display());
crate::detail_exit(2);
- }
- }
- };
-
+ })
+ }
Self::parse_inner(args, get_toml)
}
- fn parse_inner<'a>(args: &[String], get_toml: impl 'a + Fn(&Path) -> TomlConfig) -> Config {
- let flags = Flags::parse(&args);
+ fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config {
+ let mut flags = Flags::parse(&args);
let mut config = Config::default_opts();
// Set flags.
+ config.paths = std::mem::take(&mut flags.paths);
config.exclude = flags.exclude.into_iter().map(|path| TaskPath::parse(path)).collect();
config.include_default_paths = flags.include_default_paths;
config.rustc_error_format = flags.rustc_error_format;
config.json_output = flags.json_output;
config.on_fail = flags.on_fail;
- config.jobs = flags.jobs.map(threads_from_config);
+ config.jobs = Some(threads_from_config(flags.jobs as u32));
config.cmd = flags.cmd;
config.incremental = flags.incremental;
config.dry_run = if flags.dry_run { DryRun::UserSelected } else { DryRun::Disabled };
config.keep_stage = flags.keep_stage;
config.keep_stage_std = flags.keep_stage_std;
config.color = flags.color;
- config.free_args = flags.free_args.clone().unwrap_or_default();
- if let Some(value) = flags.deny_warnings {
- config.deny_warnings = value;
- }
+ config.free_args = std::mem::take(&mut flags.free_args);
config.llvm_profile_use = flags.llvm_profile_use;
config.llvm_profile_generate = flags.llvm_profile_generate;
config.llvm_bolt_profile_generate = flags.llvm_bolt_profile_generate;
@@ -985,9 +1064,41 @@ impl Config {
include_path.push("defaults");
include_path.push(format!("config.{}.toml", include));
let included_toml = get_toml(&include_path);
- toml.merge(included_toml);
+ toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate);
}
+ let mut override_toml = TomlConfig::default();
+ for option in flags.set.iter() {
+ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
+ toml::from_str(&option)
+ .and_then(|table: toml::Value| TomlConfig::deserialize(table))
+ }
+
+ let mut err = match get_table(option) {
+ Ok(v) => {
+ override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate);
+ continue;
+ }
+ Err(e) => e,
+ };
+ // We want to be able to set string values without quotes,
+ // like in `configure.py`. Try adding quotes around the right hand side
+ if let Some((key, value)) = option.split_once("=") {
+ if !value.contains('"') {
+ match get_table(&format!(r#"{key}="{value}""#)) {
+ Ok(v) => {
+ override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate);
+ continue;
+ }
+ Err(e) => err = e,
+ }
+ }
+ }
+ eprintln!("failed to parse override `{option}`: `{err}");
+ crate::detail_exit(2)
+ }
+ toml.merge(override_toml, ReplaceOpt::Override);
+
config.changelog_seen = toml.changelog_seen;
let build = toml.build.unwrap_or_default();
@@ -1007,9 +1118,12 @@ impl Config {
config.download_beta_toolchain();
config.out.join(config.build.triple).join("stage0/bin/rustc")
});
+
config.initial_cargo = build
.cargo
- .map(PathBuf::from)
+ .map(|cargo| {
+ t!(PathBuf::from(cargo).canonicalize(), "`initial_cargo` not found on disk")
+ })
.unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo"));
// NOTE: it's important this comes *after* we set `initial_rustc` just above.
@@ -1019,14 +1133,14 @@ impl Config {
config.out = dir;
}
- config.hosts = if let Some(arg_host) = flags.host {
+ config.hosts = if let Some(TargetSelectionList(arg_host)) = flags.host {
arg_host
} else if let Some(file_host) = build.host {
file_host.iter().map(|h| TargetSelection::from_user(h)).collect()
} else {
vec![config.build]
};
- config.targets = if let Some(arg_target) = flags.target {
+ config.targets = if let Some(TargetSelectionList(arg_target)) = flags.target {
arg_target
} else if let Some(file_target) = build.target {
file_target.iter().map(|h| TargetSelection::from_user(h)).collect()
@@ -1062,7 +1176,7 @@ impl Config {
set(&mut config.print_step_rusage, build.print_step_rusage);
set(&mut config.patch_binaries_for_nix, build.patch_binaries_for_nix);
- config.verbose = cmp::max(config.verbose, flags.verbose);
+ config.verbose = cmp::max(config.verbose, flags.verbose as usize);
if let Some(install) = toml.install {
config.prefix = install.prefix.map(PathBuf::from);
@@ -1135,7 +1249,14 @@ impl Config {
config.rustc_default_linker = rust.default_linker;
config.musl_root = rust.musl_root.map(PathBuf::from);
config.save_toolstates = rust.save_toolstates.map(PathBuf::from);
- set(&mut config.deny_warnings, flags.deny_warnings.or(rust.deny_warnings));
+ set(
+ &mut config.deny_warnings,
+ match flags.warnings {
+ Warnings::Deny => Some(true),
+ Warnings::Warn => Some(false),
+ Warnings::Default => rust.deny_warnings,
+ },
+ );
set(&mut config.backtrace_on_ice, rust.backtrace_on_ice);
set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir);
config.rust_thin_lto_import_instr_limit = rust.thin_lto_import_instr_limit;
@@ -1299,6 +1420,7 @@ impl Config {
target.qemu_rootfs = cfg.qemu_rootfs.map(PathBuf::from);
target.sanitizers = cfg.sanitizers;
target.profiler = cfg.profiler;
+ target.rpath = cfg.rpath;
config.target_config.insert(TargetSelection::from_user(&triple), target);
}
@@ -1307,7 +1429,7 @@ impl Config {
if config.llvm_from_ci {
let triple = &config.build.triple;
let ci_llvm_bin = config.ci_llvm_root().join("bin");
- let mut build_target = config
+ let build_target = config
.target_config
.entry(config.build)
.or_insert_with(|| Target::from_triple(&triple));
@@ -1440,6 +1562,28 @@ impl Config {
git
}
+ pub(crate) fn test_args(&self) -> Vec<&str> {
+ let mut test_args = match self.cmd {
+ Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => {
+ test_args.iter().flat_map(|s| s.split_whitespace()).collect()
+ }
+ _ => vec![],
+ };
+ test_args.extend(self.free_args.iter().map(|s| s.as_str()));
+ test_args
+ }
+
+ pub(crate) fn args(&self) -> Vec<&str> {
+ let mut args = match self.cmd {
+ Subcommand::Run { ref args, .. } => {
+ args.iter().flat_map(|s| s.split_whitespace()).collect()
+ }
+ _ => vec![],
+ };
+ args.extend(self.free_args.iter().map(|s| s.as_str()));
+ args
+ }
+
/// Bootstrap embeds a version number into the name of shared libraries it uploads in CI.
/// Return the version it would have used for the given commit.
pub(crate) fn artifact_version_part(&self, commit: &str) -> String {
@@ -1608,6 +1752,10 @@ impl Config {
self.target_config.values().any(|t| t.profiler == Some(true)) || self.profiler
}
+ pub fn rpath_enabled(&self, target: TargetSelection) -> bool {
+ self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath)
+ }
+
pub fn llvm_enabled(&self) -> bool {
self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm"))
}
@@ -1632,6 +1780,42 @@ impl Config {
self.rust_codegen_backends.get(0).cloned()
}
+ pub fn check_build_rustc_version(&self) {
+ if self.dry_run() {
+ return;
+ }
+
+ // check rustc version is same or lower with 1 apart from the building one
+ let mut cmd = Command::new(&self.initial_rustc);
+ cmd.arg("--version");
+ let rustc_output = output(&mut cmd)
+ .lines()
+ .next()
+ .unwrap()
+ .split(' ')
+ .nth(1)
+ .unwrap()
+ .split('-')
+ .next()
+ .unwrap()
+ .to_owned();
+ let rustc_version = Version::parse(&rustc_output.trim()).unwrap();
+ let source_version =
+ Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim())
+ .unwrap();
+ if !(source_version == rustc_version
+ || (source_version.major == rustc_version.major
+ && source_version.minor == rustc_version.minor + 1))
+ {
+ let prev_version = format!("{}.{}.x", source_version.major, source_version.minor - 1);
+ eprintln!(
+ "Unexpected rustc version: {}, we should use {}/{} to build source with {}",
+ rustc_version, prev_version, source_version, source_version
+ );
+ crate::detail_exit(1);
+ }
+ }
+
/// Returns the commit to download, or `None` if we shouldn't download CI artifacts.
fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Option<String> {
// If `download-rustc` is not set, default to rebuilding.
diff --git a/src/bootstrap/config/tests.rs b/src/bootstrap/config/tests.rs
index 50569eb4f..4de84b543 100644
--- a/src/bootstrap/config/tests.rs
+++ b/src/bootstrap/config/tests.rs
@@ -1,12 +1,11 @@
-use super::{Config, TomlConfig};
+use super::{Config, Flags};
+use clap::CommandFactory;
use std::{env, path::Path};
-fn toml(config: &str) -> impl '_ + Fn(&Path) -> TomlConfig {
- |&_| toml::from_str(config).unwrap()
-}
-
fn parse(config: &str) -> Config {
- Config::parse_inner(&["check".to_owned(), "--config=/does/not/exist".to_owned()], toml(config))
+ Config::parse_inner(&["check".to_owned(), "--config=/does/not/exist".to_owned()], |&_| {
+ toml::from_str(config).unwrap()
+ })
}
#[test]
@@ -88,3 +87,75 @@ fn detect_src_and_out() {
test(parse("build.build-dir = \"/tmp\""), build_dir);
}
}
+
+#[test]
+fn clap_verify() {
+ Flags::command().debug_assert();
+}
+
+#[test]
+fn override_toml() {
+ let config = Config::parse_inner(
+ &[
+ "check".to_owned(),
+ "--config=/does/not/exist".to_owned(),
+ "--set=changelog-seen=1".to_owned(),
+ "--set=rust.lto=fat".to_owned(),
+ "--set=rust.deny-warnings=false".to_owned(),
+ "--set=build.gdb=\"bar\"".to_owned(),
+ "--set=build.tools=[\"cargo\"]".to_owned(),
+ "--set=llvm.build-config={\"foo\" = \"bar\"}".to_owned(),
+ ],
+ |&_| {
+ toml::from_str(
+ r#"
+changelog-seen = 0
+[rust]
+lto = "off"
+deny-warnings = true
+
+[build]
+gdb = "foo"
+tools = []
+
+[llvm]
+download-ci-llvm = false
+build-config = {}
+ "#,
+ )
+ .unwrap()
+ },
+ );
+ assert_eq!(config.changelog_seen, Some(1), "setting top-level value");
+ assert_eq!(
+ config.rust_lto,
+ crate::config::RustcLto::Fat,
+ "setting string value without quotes"
+ );
+ assert_eq!(config.gdb, Some("bar".into()), "setting string value with quotes");
+ assert_eq!(config.deny_warnings, false, "setting boolean value");
+ assert_eq!(
+ config.tools,
+ Some(["cargo".to_string()].into_iter().collect()),
+ "setting list value"
+ );
+ assert_eq!(
+ config.llvm_build_config,
+ [("foo".to_string(), "bar".to_string())].into_iter().collect(),
+ "setting dictionary value"
+ );
+}
+
+#[test]
+#[should_panic]
+fn override_toml_duplicate() {
+ Config::parse_inner(
+ &[
+ "check".to_owned(),
+ "--config=/does/not/exist".to_owned(),
+ "--set=changelog-seen=1".to_owned(),
+ "--set=changelog-seen=2".to_owned(),
+ ],
+ |&_| toml::from_str("changelog-seen = 0").unwrap(),
+ );
+}
diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py
index abd28b400..571062a3a 100755
--- a/src/bootstrap/configure.py
+++ b/src/bootstrap/configure.py
@@ -139,6 +139,10 @@ v("musl-root-mips64", "target.mips64-unknown-linux-muslabi64.musl-root",
"mips64-unknown-linux-muslabi64 install directory")
v("musl-root-mips64el", "target.mips64el-unknown-linux-muslabi64.musl-root",
"mips64el-unknown-linux-muslabi64 install directory")
+v("musl-root-riscv32gc", "target.riscv32gc-unknown-linux-musl.musl-root",
+ "riscv32gc-unknown-linux-musl install directory")
+v("musl-root-riscv64gc", "target.riscv64gc-unknown-linux-musl.musl-root",
+ "riscv64gc-unknown-linux-musl install directory")
v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs",
"rootfs in qemu testing, you probably don't want to use this")
v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs",
@@ -149,8 +153,7 @@ v("experimental-targets", "llvm.experimental-targets",
"experimental LLVM targets to build")
v("release-channel", "rust.channel", "the name of the release channel to build")
v("release-description", "rust.description", "optional descriptive string for version output")
-v("dist-compression-formats", None,
- "comma-separated list of compression formats to use")
+v("dist-compression-formats", None, "List of compression formats to use")
# Used on systems where "cc" is unavailable
v("default-linker", "rust.default-linker", "the default linker")
@@ -164,8 +167,8 @@ o("extended", "build.extended", "build an extended rust tool set")
v("tools", None, "List of extended tools will be installed")
v("codegen-backends", None, "List of codegen backends to build")
v("build", "build.build", "GNUs ./configure syntax LLVM build triple")
-v("host", None, "GNUs ./configure syntax LLVM host triples")
-v("target", None, "GNUs ./configure syntax LLVM target triples")
+v("host", None, "List of GNUs ./configure syntax LLVM host triples")
+v("target", None, "List of GNUs ./configure syntax LLVM target triples")
v("set", None, "set arbitrary key/value pairs in TOML configuration")
@@ -178,6 +181,11 @@ def err(msg):
print("configure: error: " + msg)
sys.exit(1)
+def is_value_list(key):
+ for option in options:
+ if option.name == key and option.desc.startswith('List of'):
+ return True
+ return False
if '--help' in sys.argv or '-h' in sys.argv:
print('Usage: ./configure [options]')
@@ -291,6 +299,8 @@ def set(key, value, config):
parts = key.split('.')
for i, part in enumerate(parts):
if i == len(parts) - 1:
+ if is_value_list(part) and isinstance(value, str):
+ value = value.split(',')
arr[part] = value
else:
if part not in arr:
@@ -417,6 +427,8 @@ def parse_example_config(known_args, config):
# Avoid using quotes unless it's necessary.
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
+ if 'profile' not in config:
+ set('profile', 'user', config)
configure_file(sections, top_level_keys, targets, config)
return section_order, sections, targets
@@ -475,7 +487,7 @@ def configure_section(lines, config):
def configure_top_level_key(lines, top_level_key, value):
for i, line in enumerate(lines):
if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '):
- lines[i] = "{} = {}".format(top_level_key, value)
+ lines[i] = "{} = {}".format(top_level_key, to_toml(value))
return
raise RuntimeError("failed to find config line for {}".format(top_level_key))
@@ -521,8 +533,14 @@ def write_config_toml(writer, section_order, targets, sections):
else:
writer = write_uncommented(sections[section], writer)
+def quit_if_file_exists(file):
+ if os.path.isfile(file):
+ err("Existing '" + file + "' detected.")
if __name__ == "__main__":
+ # If 'config.toml' already exists, exit the script at this point
+ quit_if_file_exists('config.toml')
+
p("processing command line")
# Parse all known arguments into a configuration structure that reflects the
# TOML we're going to write out
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 76aad16c1..9cead7adc 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -822,6 +822,8 @@ fn copy_src_dirs(
"llvm-project\\compiler-rt",
"llvm-project/cmake",
"llvm-project\\cmake",
+ "llvm-project/runtimes",
+ "llvm-project\\runtimes",
];
if spath.contains("llvm-project")
&& !spath.ends_with("llvm-project")
@@ -997,12 +999,15 @@ impl Step for PlainSourceTarball {
// If we're building from git sources, we need to vendor a complete distribution.
if builder.rust_info().is_managed_git_subrepository() {
// Ensure we have the submodules checked out.
+ builder.update_submodule(Path::new("src/tools/cargo"));
builder.update_submodule(Path::new("src/tools/rust-analyzer"));
// Vendor all Cargo dependencies
let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("vendor")
.arg("--sync")
+ .arg(builder.src.join("./src/tools/cargo/Cargo.toml"))
+ .arg("--sync")
.arg(builder.src.join("./src/tools/rust-analyzer/Cargo.toml"))
.arg("--sync")
.arg(builder.src.join("./compiler/rustc_codegen_cranelift/Cargo.toml"))
@@ -1963,20 +1968,6 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
}
}
- // FIXME: for reasons I don't understand, the LLVM so in the `rustc` component is different than the one in `rust-dev`.
- // Only the one in `rustc` works with the downloaded compiler.
- if builder.download_rustc() && target == builder.build.build {
- let src_libdir = builder.ci_rustc_dir(target).join("lib");
- for entry in t!(std::fs::read_dir(&src_libdir)) {
- let entry = t!(entry);
- if entry.file_name().to_str().unwrap().starts_with("libLLVM-") {
- install_llvm_file(builder, &entry.path(), dst_libdir);
- return !builder.config.dry_run();
- }
- }
- panic!("libLLVM.so not found in src_libdir {}!", src_libdir.display());
- }
-
// On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib
// instead of libLLVM-11-rust-....dylib, as on linux. It's not entirely
// clear why this is the case, though. llvm-config will emit the versioned
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 9ad98eb57..b52c3b68c 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -438,7 +438,7 @@ impl Step for Std {
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.all_krates("test").path("library").default_condition(builder.config.docs)
+ run.all_krates("sysroot").path("library").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig<'_>) {
@@ -839,6 +839,8 @@ macro_rules! tool_doc {
)+
cargo.rustdocflag("--document-private-items");
+ // Since we always pass --document-private-items, there's no need to warn about linking to private items.
+ cargo.rustdocflag("-Arustdoc::private-intra-doc-links");
cargo.rustdocflag("--enable-index-page");
cargo.rustdocflag("--show-type-layout");
cargo.rustdocflag("--generate-link-to-definition");
@@ -882,7 +884,8 @@ tool_doc!(
// "cargo-credential-wincred",
]
);
-tool_doc!(Tidy, "tidy", "src/tools/tidy", ["tidy"]);
+tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, ["tidy"]);
+tool_doc!(Bootstrap, "bootstrap", "src/bootstrap", rustc_tool = false, ["bootstrap"]);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp
index 36f9aaa59..4111b7cc0 100644
--- a/src/bootstrap/download-ci-llvm-stamp
+++ b/src/bootstrap/download-ci-llvm-stamp
@@ -1,4 +1,4 @@
Change this file to make users of the `download-ci-llvm` configuration download
a new version of LLVM from CI, even if the LLVM submodule hasn’t changed.
-Last change is for: https://github.com/rust-lang/rust/pull/109373
+Last change is for: https://github.com/rust-lang/rust/pull/96971
diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs
index 242515565..c7969d2a2 100644
--- a/src/bootstrap/download.rs
+++ b/src/bootstrap/download.rs
@@ -2,7 +2,7 @@ use std::{
env,
ffi::{OsStr, OsString},
fs::{self, File},
- io::{BufRead, BufReader, ErrorKind},
+ io::{BufRead, BufReader, BufWriter, ErrorKind, Write},
path::{Path, PathBuf},
process::{Command, Stdio},
};
@@ -112,7 +112,7 @@ impl Config {
is_nixos && !Path::new("/lib").exists()
});
if val {
- println!("info: You seem to be using Nix.");
+ eprintln!("info: You seem to be using Nix.");
}
val
}
@@ -123,7 +123,7 @@ impl Config {
/// This is only required on NixOS and uses the PatchELF utility to
/// change the interpreter/RPATH of ELF executables.
///
- /// Please see https://nixos.org/patchelf.html for more information
+ /// Please see <https://nixos.org/patchelf.html> for more information
fn fix_bin_or_dylib(&self, fname: &Path) {
assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true));
println!("attempting to patch {}", fname.display());
@@ -219,14 +219,14 @@ impl Config {
"30", // timeout if cannot connect within 30 seconds
"--retry",
"3",
- "-Sf",
+ "-SRf",
]);
curl.arg(url);
let f = File::create(tempfile).unwrap();
curl.stdout(Stdio::from(f));
if !self.check_run(&mut curl) {
if self.build.contains("windows-msvc") {
- println!("Fallback to PowerShell");
+ eprintln!("Fallback to PowerShell");
for _ in 0..3 {
if self.try_run(Command::new("PowerShell.exe").args(&[
"/nologo",
@@ -239,7 +239,7 @@ impl Config {
])) {
return;
}
- println!("\nspurious failure, trying again");
+ eprintln!("\nspurious failure, trying again");
}
}
if !help_on_error.is_empty() {
@@ -250,7 +250,7 @@ impl Config {
}
fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) {
- println!("extracting {} to {}", tarball.display(), dst.display());
+ eprintln!("extracting {} to {}", tarball.display(), dst.display());
if !dst.exists() {
t!(fs::create_dir_all(dst));
}
@@ -262,10 +262,20 @@ impl Config {
let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
// decompress the file
- let data = t!(File::open(tarball));
+ let data = t!(File::open(tarball), format!("file {} not found", tarball.display()));
let decompressor = XzDecoder::new(BufReader::new(data));
let mut tar = tar::Archive::new(decompressor);
+
+ // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding
+ // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow.
+ // Cache the entries when we extract it so we only have to read it once.
+ let mut recorded_entries = if dst.ends_with("ci-rustc") && pattern == "rustc-dev" {
+ Some(BufWriter::new(t!(File::create(dst.join(".rustc-dev-contents")))))
+ } else {
+ None
+ };
+
for member in t!(tar.entries()) {
let mut member = t!(member);
let original_path = t!(member.path()).into_owned();
@@ -283,13 +293,19 @@ impl Config {
if !t!(member.unpack_in(dst)) {
panic!("path traversal attack ??");
}
+ if let Some(record) = &mut recorded_entries {
+ t!(writeln!(record, "{}", short_path.to_str().unwrap()));
+ }
let src_path = dst.join(original_path);
if src_path.is_dir() && dst_path.exists() {
continue;
}
t!(fs::rename(src_path, dst_path));
}
- t!(fs::remove_dir_all(dst.join(directory_prefix)));
+ let dst_dir = dst.join(directory_prefix);
+ if dst_dir.exists() {
+ t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display()));
+ }
}
/// Returns whether the SHA256 checksum of `path` matches `expected`.
@@ -365,6 +381,13 @@ impl Config {
Some(rustfmt_path)
}
+ pub(crate) fn rustc_dev_contents(&self) -> Vec<String> {
+ assert!(self.download_rustc());
+ let ci_rustc_dir = self.out.join(&*self.build.triple).join("ci-rustc");
+ let rustc_dev_contents_file = t!(File::open(ci_rustc_dir.join(".rustc-dev-contents")));
+ t!(BufReader::new(rustc_dev_contents_file).lines().collect())
+ }
+
pub(crate) fn download_ci_rustc(&self, commit: &str) {
self.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
@@ -404,7 +427,6 @@ impl Config {
fn download_toolchain(
&self,
- // FIXME(ozkanonur) use CompilerMetadata instead of `version: &str`
version: &str,
sysroot: &str,
stamp_key: &str,
@@ -518,7 +540,18 @@ impl Config {
None
};
- self.download_file(&format!("{base_url}/{url}"), &tarball, "");
+ let mut help_on_error = "";
+ if destination == "ci-rustc" {
+ help_on_error = "error: failed to download pre-built rustc from CI
+
+note: old builds get deleted after a certain time
+help: if trying to compile an old commit of rustc, disable `download-rustc` in config.toml:
+
+[rust]
+download-rustc = false
+";
+ }
+ self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error);
if let Some(sha256) = checksum {
if !self.verify(&tarball, sha256) {
panic!("failed to verify {}", tarball.display());
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index b6f5f3103..80e715777 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -3,726 +3,415 @@
//! This module implements the command-line parsing of the build system which
//! has various flags to configure how it's run.
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
-use getopts::Options;
+use clap::{CommandFactory, Parser, ValueEnum};
use crate::builder::{Builder, Kind};
-use crate::config::{Config, TargetSelection};
+use crate::config::{target_selection_list, Config, TargetSelectionList};
use crate::setup::Profile;
-use crate::util::t;
use crate::{Build, DocTests};
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, Default, Debug, ValueEnum)]
pub enum Color {
Always,
Never,
+ #[default]
Auto,
}
-impl Default for Color {
- fn default() -> Self {
- Self::Auto
- }
-}
-
-impl std::str::FromStr for Color {
- type Err = ();
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s.to_lowercase().as_str() {
- "always" => Ok(Self::Always),
- "never" => Ok(Self::Never),
- "auto" => Ok(Self::Auto),
- _ => Err(()),
- }
- }
+/// Whether to deny warnings, emit them as warnings, or use the default behavior
+#[derive(Copy, Clone, Default, Debug, ValueEnum)]
+pub enum Warnings {
+ Deny,
+ Warn,
+ #[default]
+ Default,
}
/// Deserialized version of all flags for this compile.
+#[derive(Debug, Parser)]
+#[clap(
+ override_usage = "x.py <subcommand> [options] [<paths>...]",
+ disable_help_subcommand(true),
+ about = "",
+ next_line_help(false)
+)]
pub struct Flags {
- pub verbose: usize, // number of -v args; each extra -v after the first is passed to Cargo
- pub on_fail: Option<String>,
- pub stage: Option<u32>,
- pub keep_stage: Vec<u32>,
- pub keep_stage_std: Vec<u32>,
+ #[command(subcommand)]
+ pub cmd: Subcommand,
- pub host: Option<Vec<TargetSelection>>,
- pub target: Option<Vec<TargetSelection>>,
+ #[arg(global(true), short, long, action = clap::ArgAction::Count)]
+ /// use verbose output (-vv for very verbose)
+ pub verbose: u8, // each extra -v after the first is passed to Cargo
+ #[arg(global(true), short, long)]
+ /// use incremental compilation
+ pub incremental: bool,
+ #[arg(global(true), long, value_hint = clap::ValueHint::FilePath, value_name = "FILE")]
+ /// TOML configuration file for build
pub config: Option<PathBuf>,
+ #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")]
+ /// Build directory, overrides `build.build-dir` in `config.toml`
pub build_dir: Option<PathBuf>,
- pub jobs: Option<u32>,
- pub cmd: Subcommand,
- pub incremental: bool,
+
+ #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "BUILD")]
+ /// build target of the stage0 compiler
+ pub build: Option<String>,
+
+ #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "HOST", value_parser = target_selection_list)]
+ /// host targets to build
+ pub host: Option<TargetSelectionList>,
+
+ #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "TARGET", value_parser = target_selection_list)]
+ /// target targets to build
+ pub target: Option<TargetSelectionList>,
+
+ #[arg(global(true), long, value_name = "PATH")]
+ /// build paths to exclude
pub exclude: Vec<PathBuf>,
+ #[arg(global(true), long)]
+ /// include default paths in addition to the provided ones
pub include_default_paths: bool,
+
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long)]
pub rustc_error_format: Option<String>,
- pub json_output: bool,
+
+ #[arg(global(true), long, value_hint = clap::ValueHint::CommandString, value_name = "CMD")]
+ /// command to run on failure
+ pub on_fail: Option<String>,
+ #[arg(global(true), long)]
+ /// dry run; don't build anything
pub dry_run: bool,
- pub color: Color,
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
+ /// stage to build (indicates compiler to use/test, e.g., stage 0 uses the
+ /// bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)
+ pub stage: Option<u32>,
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
+ /// stage(s) to keep without recompiling
+ /// (pass multiple times to keep e.g., both stages 0 and 1)
+ pub keep_stage: Vec<u32>,
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
+ /// stage(s) of the standard library to keep without recompiling
+ /// (pass multiple times to keep e.g., both stages 0 and 1)
+ pub keep_stage_std: Vec<u32>,
+ #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")]
+ /// path to the root of the rust checkout
+ pub src: Option<PathBuf>,
+
+ #[arg(
+ global(true),
+ short,
+ long,
+ value_hint = clap::ValueHint::Other,
+ default_value_t = std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get),
+ value_name = "JOBS"
+ )]
+ /// number of jobs to run in parallel
+ pub jobs: usize,
// This overrides the deny-warnings configuration option,
// which passes -Dwarnings to the compiler invocations.
- //
- // true => deny, false => warn
- pub deny_warnings: Option<bool>,
+ #[arg(global(true), long)]
+ #[clap(value_enum, default_value_t=Warnings::Default, value_name = "deny|warn")]
+ /// if value is deny, will deny warnings
+ /// if value is warn, will emit warnings
+ /// otherwise, use the default configured behaviour
+ pub warnings: Warnings,
+
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "FORMAT")]
+ /// rustc error format
+ pub error_format: Option<String>,
+ #[arg(global(true), long)]
+ /// use message-format=json
+ pub json_output: bool,
- pub rust_profile_use: Option<String>,
- pub rust_profile_generate: Option<String>,
+ #[arg(global(true), long, value_name = "STYLE")]
+ #[clap(value_enum, default_value_t = Color::Auto)]
+ /// whether to use color in cargo and rustc output
+ pub color: Color,
+ /// whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml
+ #[arg(global(true), long, value_name = "VALUE")]
+ pub llvm_skip_rebuild: Option<bool>,
+ /// generate PGO profile with rustc build
+ #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
+ pub rust_profile_generate: Option<String>,
+ /// use PGO profile for rustc build
+ #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
+ pub rust_profile_use: Option<String>,
+ /// use PGO profile for LLVM build
+ #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
pub llvm_profile_use: Option<String>,
// LLVM doesn't support a custom location for generating profile
// information.
//
// llvm_out/build/profiles/ is the location this writes to.
+ /// generate PGO profile with llvm built for rustc
+ #[arg(global(true), long)]
pub llvm_profile_generate: bool,
+ /// generate BOLT profile for LLVM build
+ #[arg(global(true), long)]
pub llvm_bolt_profile_generate: bool,
+ /// use BOLT profile for LLVM build
+ #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
pub llvm_bolt_profile_use: Option<String>,
+ #[arg(global(true))]
+ /// paths for the subcommand
+ pub paths: Vec<PathBuf>,
+ /// override options in config.toml
+ #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "section.option=value")]
+ pub set: Vec<String>,
+ /// arguments passed to subcommands
+ #[arg(global(true), last(true), value_name = "ARGS")]
+ pub free_args: Vec<String>,
+}
+
+impl Flags {
+ pub fn parse(args: &[String]) -> Self {
+ let first = String::from("x.py");
+ let it = std::iter::once(&first).chain(args.iter());
+ // We need to check for `<cmd> -h -v`, in which case we list the paths
+ #[derive(Parser)]
+ #[clap(disable_help_flag(true))]
+ struct HelpVerboseOnly {
+ #[arg(short, long)]
+ help: bool,
+ #[arg(global(true), short, long, action = clap::ArgAction::Count)]
+ pub verbose: u8,
+ #[arg(value_enum)]
+ cmd: Kind,
+ }
+ if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) =
+ HelpVerboseOnly::try_parse_from(it.clone())
+ {
+ println!("note: updating submodules before printing available paths");
+ let config = Config::parse(&[String::from("build")]);
+ let build = Build::new(config);
+ let paths = Builder::get_help(&build, subcommand);
+ if let Some(s) = paths {
+ println!("{}", s);
+ } else {
+ panic!("No paths available for subcommand `{}`", subcommand.as_str());
+ }
+ crate::detail_exit(0);
+ }
- /// Arguments appearing after `--` to be forwarded to tools,
- /// e.g. `--fix-broken` or test arguments.
- pub free_args: Option<Vec<String>>,
+ Flags::parse_from(it)
+ }
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Default, clap::Subcommand)]
pub enum Subcommand {
- Build {
- paths: Vec<PathBuf>,
- },
+ #[clap(aliases = ["b"], long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to compile. For example, for a quick build of a usable
+ compiler:
+ ./x.py build --stage 1 library/std
+ This will build a compiler and standard library from the local source code.
+ Once this is done, build/$ARCH/stage1 contains a usable compiler.
+ If no arguments are passed then the default artifacts for that stage are
+ compiled. For example:
+ ./x.py build --stage 0
+ ./x.py build ")]
+ /// Compile either the compiler or libraries
+ #[default]
+ Build,
+ #[clap(aliases = ["c"], long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to compile. For example:
+ ./x.py check library/std
+ If no arguments are passed then many artifacts are checked.")]
+ /// Compile either the compiler or libraries, using cargo check
Check {
- paths: Vec<PathBuf>,
+ #[arg(long)]
+ /// Check all targets
+ all_targets: bool,
},
+ /// Run Clippy (uses rustup/cargo-installed clippy binary)
+ #[clap(long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to run clippy against. For example:
+ ./x.py clippy library/core
+ ./x.py clippy library/core library/proc_macro")]
Clippy {
+ #[arg(long)]
fix: bool,
- paths: Vec<PathBuf>,
- clippy_lint_allow: Vec<String>,
- clippy_lint_deny: Vec<String>,
- clippy_lint_warn: Vec<String>,
- clippy_lint_forbid: Vec<String>,
- },
- Fix {
- paths: Vec<PathBuf>,
+ /// clippy lints to allow
+ #[arg(global(true), short = 'A', action = clap::ArgAction::Append, value_name = "LINT")]
+ allow: Vec<String>,
+ /// clippy lints to deny
+ #[arg(global(true), short = 'D', action = clap::ArgAction::Append, value_name = "LINT")]
+ deny: Vec<String>,
+ /// clippy lints to warn on
+ #[arg(global(true), short = 'W', action = clap::ArgAction::Append, value_name = "LINT")]
+ warn: Vec<String>,
+ /// clippy lints to forbid
+ #[arg(global(true), short = 'F', action = clap::ArgAction::Append, value_name = "LINT")]
+ forbid: Vec<String>,
},
+ /// Run cargo fix
+ #[clap(long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to run `cargo fix` against. For example:
+ ./x.py fix library/core
+ ./x.py fix library/core library/proc_macro")]
+ Fix,
+ #[clap(
+ name = "fmt",
+ long_about = "\n
+ Arguments:
+ This subcommand optionally accepts a `--check` flag which succeeds if formatting is correct and
+ fails if it is not. For example:
+ ./x.py fmt
+ ./x.py fmt --check"
+ )]
+ /// Run rustfmt
Format {
- paths: Vec<PathBuf>,
+ /// check formatting instead of applying
+ #[arg(long)]
check: bool,
},
+ #[clap(aliases = ["d"], long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to directories of documentation
+ to build. For example:
+ ./x.py doc src/doc/book
+ ./x.py doc src/doc/nomicon
+ ./x.py doc src/doc/book library/std
+ ./x.py doc library/std --json
+ ./x.py doc library/std --open
+ If no arguments are passed then everything is documented:
+ ./x.py doc
+ ./x.py doc --stage 1")]
+ /// Build documentation
Doc {
- paths: Vec<PathBuf>,
+ #[arg(long)]
+ /// open the docs in a browser
open: bool,
+ #[arg(long)]
+ /// render the documentation in JSON format in addition to the usual HTML format
json: bool,
},
+ #[clap(aliases = ["t"], long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to test directories that
+ should be compiled and run. For example:
+ ./x.py test tests/ui
+ ./x.py test library/std --test-args hash_map
+ ./x.py test library/std --stage 0 --no-doc
+ ./x.py test tests/ui --bless
+ ./x.py test tests/ui --compare-mode chalk
+ Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`;
+ just like `build library/std --stage N` it tests the compiler produced by the previous
+ stage.
+ Execute tool tests with a tool name argument:
+ ./x.py test tidy
+ If no arguments are passed then the complete artifacts for that stage are
+ compiled and tested.
+ ./x.py test
+ ./x.py test --stage 1")]
+ /// Build and run some test suites
Test {
- paths: Vec<PathBuf>,
- /// Whether to automatically update stderr/stdout files
+ #[arg(long)]
+ /// run all tests regardless of failure
+ no_fail_fast: bool,
+ #[arg(long, value_name = "SUBSTRING")]
+ /// skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times
+ skip: Vec<String>,
+ #[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
+ /// extra arguments to be passed for the test tool being used
+ /// (e.g. libtest, compiletest or rustdoc)
+ test_args: Vec<String>,
+ /// extra options to pass the compiler when running tests
+ #[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
+ rustc_args: Vec<String>,
+ #[arg(long)]
+ /// do not run doc tests
+ no_doc: bool,
+ #[arg(long)]
+ /// only run doc tests
+ doc: bool,
+ #[arg(long)]
+ /// whether to automatically update stderr/stdout files
bless: bool,
+ #[arg(long)]
+ /// rerun tests even if the inputs are unchanged
force_rerun: bool,
+ #[arg(long)]
+ /// only run tests that result has been changed
+ only_modified: bool,
+ #[arg(long, value_name = "COMPARE MODE")]
+ /// mode describing what file the actual ui output will be compared to
compare_mode: Option<String>,
+ #[arg(long, value_name = "check | build | run")]
+ /// force {check,build,run}-pass tests to this mode.
pass: Option<String>,
+ #[arg(long, value_name = "auto | always | never")]
+ /// whether to execute run-* tests
run: Option<String>,
- test_args: Vec<String>,
- rustc_args: Vec<String>,
- fail_fast: bool,
- doc_tests: DocTests,
+ #[arg(long)]
+ /// enable this to generate a Rustfix coverage file, which is saved in
+ /// `/<build_base>/rustfix_missing_coverage.txt`
rustfix_coverage: bool,
- only_modified: bool,
},
+ /// Build and run some benchmarks
Bench {
- paths: Vec<PathBuf>,
+ #[arg(long, allow_hyphen_values(true))]
test_args: Vec<String>,
},
+ /// Clean out build directories
Clean {
- paths: Vec<PathBuf>,
+ #[arg(long)]
all: bool,
},
- Dist {
- paths: Vec<PathBuf>,
- },
- Install {
- paths: Vec<PathBuf>,
- },
+ /// Build distribution artifacts
+ Dist,
+ /// Install distribution artifacts
+ Install,
+ #[clap(aliases = ["r"], long_about = "\n
+ Arguments:
+ This subcommand accepts a number of paths to tools to build and run. For
+ example:
+ ./x.py run src/tools/expand-yaml-anchors
+ At least a tool needs to be called.")]
+ /// Run tools contained in this repository
Run {
- paths: Vec<PathBuf>,
+ /// arguments for the tool
+ #[arg(long, allow_hyphen_values(true))]
args: Vec<String>,
},
- Setup {
- profile: Option<PathBuf>,
- },
- Suggest {
- run: bool,
- },
-}
-
-impl Default for Subcommand {
- fn default() -> Subcommand {
- Subcommand::Build { paths: vec![PathBuf::from("nowhere")] }
- }
-}
-
-impl Flags {
- pub fn parse(args: &[String]) -> Flags {
- let (args, free_args) = if let Some(pos) = args.iter().position(|s| s == "--") {
- let (args, free) = args.split_at(pos);
- (args, Some(free[1..].to_vec()))
- } else {
- (args, None)
- };
- let mut subcommand_help = String::from(
- "\
-Usage: x.py <subcommand> [options] [<paths>...]
-
-Subcommands:
- build, b Compile either the compiler or libraries
- check, c Compile either the compiler or libraries, using cargo check
- clippy Run clippy (uses rustup/cargo-installed clippy binary)
- fix Run cargo fix
- fmt Run rustfmt
- test, t Build and run some test suites
- bench Build and run some benchmarks
- doc, d Build documentation
- clean Clean out build directories
- dist Build distribution artifacts
- install Install distribution artifacts
- run, r Run tools contained in this repository
- setup Create a config.toml (making it easier to use `x.py` itself)
- suggest Suggest a subset of tests to run, based on modified files
-
-To learn more about a subcommand, run `./x.py <subcommand> -h`",
- );
-
- let mut opts = Options::new();
- // Options common to all subcommands
- opts.optflagmulti("v", "verbose", "use verbose output (-vv for very verbose)");
- opts.optflag("i", "incremental", "use incremental compilation");
- opts.optopt("", "config", "TOML configuration file for build", "FILE");
- opts.optopt(
- "",
- "build-dir",
- "Build directory, overrides `build.build-dir` in `config.toml`",
- "DIR",
- );
- opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
- opts.optmulti("", "host", "host targets to build", "HOST");
- opts.optmulti("", "target", "target targets to build", "TARGET");
- opts.optmulti("", "exclude", "build paths to exclude", "PATH");
- opts.optflag(
- "",
- "include-default-paths",
- "include default paths in addition to the provided ones",
- );
- opts.optopt("", "on-fail", "command to run on failure", "CMD");
- opts.optflag("", "dry-run", "dry run; don't build anything");
- opts.optopt(
- "",
- "stage",
- "stage to build (indicates compiler to use/test, e.g., stage 0 uses the \
- bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)",
- "N",
- );
- opts.optmulti(
- "",
- "keep-stage",
- "stage(s) to keep without recompiling \
- (pass multiple times to keep e.g., both stages 0 and 1)",
- "N",
- );
- opts.optmulti(
- "",
- "keep-stage-std",
- "stage(s) of the standard library to keep without recompiling \
- (pass multiple times to keep e.g., both stages 0 and 1)",
- "N",
- );
- opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
- let j_msg = format!(
- "number of jobs to run in parallel; \
- defaults to {} (this host's logical CPU count)",
- std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get)
- );
- opts.optopt("j", "jobs", &j_msg, "JOBS");
- opts.optflag("h", "help", "print this help message");
- opts.optopt(
- "",
- "warnings",
- "if value is deny, will deny warnings, otherwise use default",
- "VALUE",
- );
- opts.optopt("", "error-format", "rustc error format", "FORMAT");
- opts.optflag("", "json-output", "use message-format=json");
- opts.optopt("", "color", "whether to use color in cargo and rustc output", "STYLE");
- opts.optopt(
- "",
- "rust-profile-generate",
- "generate PGO profile with rustc build",
- "PROFILE",
- );
- opts.optopt("", "rust-profile-use", "use PGO profile for rustc build", "PROFILE");
- opts.optflag("", "llvm-profile-generate", "generate PGO profile with llvm built for rustc");
- opts.optopt("", "llvm-profile-use", "use PGO profile for llvm build", "PROFILE");
- opts.optmulti("A", "", "allow certain clippy lints", "OPT");
- opts.optmulti("D", "", "deny certain clippy lints", "OPT");
- opts.optmulti("W", "", "warn about certain clippy lints", "OPT");
- opts.optmulti("F", "", "forbid certain clippy lints", "OPT");
- opts.optflag("", "llvm-bolt-profile-generate", "generate BOLT profile for LLVM build");
- opts.optopt("", "llvm-bolt-profile-use", "use BOLT profile for LLVM build", "PROFILE");
-
- // We can't use getopt to parse the options until we have completed specifying which
- // options are valid, but under the current implementation, some options are conditional on
- // the subcommand. Therefore we must manually identify the subcommand first, so that we can
- // complete the definition of the options. Then we can use the getopt::Matches object from
- // there on out.
- let subcommand = match args.iter().find_map(|s| Kind::parse(&s)) {
- Some(s) => s,
- None => {
- // No or an invalid subcommand -- show the general usage and subcommand help
- // An exit code will be 0 when no subcommand is given, and 1 in case of an invalid
- // subcommand.
- println!("{}\n", subcommand_help);
- let exit_code = if args.is_empty() { 0 } else { 1 };
- crate::detail_exit(exit_code);
- }
- };
-
- // Some subcommands get extra options
- match subcommand {
- Kind::Test => {
- opts.optflag("", "no-fail-fast", "Run all tests regardless of failure");
- opts.optmulti("", "skip", "skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times", "SUBSTRING");
- opts.optmulti(
- "",
- "test-args",
- "extra arguments to be passed for the test tool being used \
- (e.g. libtest, compiletest or rustdoc)",
- "ARGS",
- );
- opts.optmulti(
- "",
- "rustc-args",
- "extra options to pass the compiler when running tests",
- "ARGS",
- );
- opts.optflag("", "no-doc", "do not run doc tests");
- opts.optflag("", "doc", "only run doc tests");
- opts.optflag("", "bless", "update all stderr/stdout files of failing ui tests");
- opts.optflag("", "force-rerun", "rerun tests even if the inputs are unchanged");
- opts.optflag("", "only-modified", "only run tests that result has been changed");
- opts.optopt(
- "",
- "compare-mode",
- "mode describing what file the actual ui output will be compared to",
- "COMPARE MODE",
- );
- opts.optopt(
- "",
- "pass",
- "force {check,build,run}-pass tests to this mode.",
- "check | build | run",
- );
- opts.optopt("", "run", "whether to execute run-* tests", "auto | always | never");
- opts.optflag(
- "",
- "rustfix-coverage",
- "enable this to generate a Rustfix coverage file, which is saved in \
- `/<build_base>/rustfix_missing_coverage.txt`",
- );
- }
- Kind::Check => {
- opts.optflag("", "all-targets", "Check all targets");
- }
- Kind::Bench => {
- opts.optmulti("", "test-args", "extra arguments", "ARGS");
- }
- Kind::Clippy => {
- opts.optflag("", "fix", "automatically apply lint suggestions");
- }
- Kind::Doc => {
- opts.optflag("", "open", "open the docs in a browser");
- opts.optflag(
- "",
- "json",
- "render the documentation in JSON format in addition to the usual HTML format",
- );
- }
- Kind::Clean => {
- opts.optflag("", "all", "clean all build artifacts");
- }
- Kind::Format => {
- opts.optflag("", "check", "check formatting instead of applying.");
- }
- Kind::Run => {
- opts.optmulti("", "args", "arguments for the tool", "ARGS");
- }
- Kind::Suggest => {
- opts.optflag("", "run", "run suggested tests");
- }
- _ => {}
- };
-
- // fn usage()
- let usage = |exit_code: i32, opts: &Options, verbose: bool, subcommand_help: &str| -> ! {
- println!("{}", opts.usage(subcommand_help));
- if verbose {
- // We have an unfortunate situation here: some Steps use `builder.in_tree_crates` to determine their paths.
- // To determine those crates, we need to run `cargo metadata`, which means we need all submodules to be checked out.
- // That takes a while to run, so only do it when paths were explicitly requested, not on all CLI errors.
- // `Build::new` won't load submodules for the `setup` command.
- let cmd = if verbose {
- println!("note: updating submodules before printing available paths");
- "build"
- } else {
- "setup"
- };
- let config = Config::parse(&[cmd.to_string()]);
- let build = Build::new(config);
- let paths = Builder::get_help(&build, subcommand);
-
- if let Some(s) = paths {
- println!("{}", s);
- } else {
- panic!("No paths available for subcommand `{}`", subcommand.as_str());
- }
- } else {
- println!(
- "Run `./x.py {} -h -v` to see a list of available paths.",
- subcommand.as_str()
- );
- }
- crate::detail_exit(exit_code);
- };
-
- // Done specifying what options are possible, so do the getopts parsing
- let matches = opts.parse(args).unwrap_or_else(|e| {
- // Invalid argument/option format
- println!("\n{}\n", e);
- usage(1, &opts, false, &subcommand_help);
- });
-
- // Extra sanity check to make sure we didn't hit this crazy corner case:
- //
- // ./x.py --frobulate clean build
- // ^-- option ^ ^- actual subcommand
- // \_ arg to option could be mistaken as subcommand
- let mut pass_sanity_check = true;
- match matches.free.get(0).and_then(|s| Kind::parse(&s)) {
- Some(check_subcommand) => {
- if check_subcommand != subcommand {
- pass_sanity_check = false;
- }
- }
- None => {
- pass_sanity_check = false;
- }
- }
- if !pass_sanity_check {
- eprintln!("{}\n", subcommand_help);
- eprintln!(
- "Sorry, I couldn't figure out which subcommand you were trying to specify.\n\
- You may need to move some options to after the subcommand.\n"
- );
- crate::detail_exit(1);
- }
- // Extra help text for some commands
- match subcommand {
- Kind::Build => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to directories to the crates
- and/or artifacts to compile. For example, for a quick build of a usable
- compiler:
-
- ./x.py build --stage 1 library/std
-
- This will build a compiler and standard library from the local source code.
- Once this is done, build/$ARCH/stage1 contains a usable compiler.
-
- If no arguments are passed then the default artifacts for that stage are
- compiled. For example:
-
- ./x.py build --stage 0
- ./x.py build ",
- );
- }
- Kind::Check => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to directories to the crates
- and/or artifacts to compile. For example:
-
- ./x.py check library/std
-
- If no arguments are passed then many artifacts are checked.",
- );
- }
- Kind::Clippy => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to directories to the crates
- and/or artifacts to run clippy against. For example:
-
- ./x.py clippy library/core
- ./x.py clippy library/core library/proc_macro",
- );
- }
- Kind::Fix => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to directories to the crates
- and/or artifacts to run `cargo fix` against. For example:
-
- ./x.py fix library/core
- ./x.py fix library/core library/proc_macro",
- );
- }
- Kind::Format => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand optionally accepts a `--check` flag which succeeds if formatting is correct and
- fails if it is not. For example:
-
- ./x.py fmt
- ./x.py fmt --check",
- );
- }
- Kind::Test => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to test directories that
- should be compiled and run. For example:
-
- ./x.py test tests/ui
- ./x.py test library/std --test-args hash_map
- ./x.py test library/std --stage 0 --no-doc
- ./x.py test tests/ui --bless
- ./x.py test tests/ui --compare-mode chalk
-
- Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`;
- just like `build library/std --stage N` it tests the compiler produced by the previous
- stage.
-
- Execute tool tests with a tool name argument:
-
- ./x.py test tidy
-
- If no arguments are passed then the complete artifacts for that stage are
- compiled and tested.
-
- ./x.py test
- ./x.py test --stage 1",
- );
- }
- Kind::Doc => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to directories of documentation
- to build. For example:
-
- ./x.py doc src/doc/book
- ./x.py doc src/doc/nomicon
- ./x.py doc src/doc/book library/std
- ./x.py doc library/std --json
- ./x.py doc library/std --open
-
- If no arguments are passed then everything is documented:
-
- ./x.py doc
- ./x.py doc --stage 1",
- );
- }
- Kind::Run => {
- subcommand_help.push_str(
- "\n
-Arguments:
- This subcommand accepts a number of paths to tools to build and run. For
- example:
-
- ./x.py run src/tools/expand-yaml-anchors
-
- At least a tool needs to be called.",
- );
- }
- Kind::Setup => {
- subcommand_help.push_str(&format!(
- "\n
+ /// Set up the environment for development
+ #[clap(long_about = format!(
+ "\n
x.py setup creates a `config.toml` which changes the defaults for x.py itself,
-as well as setting up a git pre-push hook, VS code config and toolchain link.
-
+as well as setting up a git pre-push hook, VS Code config and toolchain link.
Arguments:
This subcommand accepts a 'profile' to use for builds. For example:
-
./x.py setup library
-
The profile is optional and you will be prompted interactively if it is not given.
The following profiles are available:
-
{}
-
- To only set up the git hook, VS code or toolchain link, you may use
+ To only set up the git hook, VS Code config or toolchain link, you may use
./x.py setup hook
./x.py setup vscode
- ./x.py setup link
-",
- Profile::all_for_help(" ").trim_end()
- ));
- }
- Kind::Bench | Kind::Clean | Kind::Dist | Kind::Install | Kind::Suggest => {}
- };
- // Get any optional paths which occur after the subcommand
- let mut paths = matches.free[1..].iter().map(|p| p.into()).collect::<Vec<PathBuf>>();
-
- let verbose = matches.opt_present("verbose");
-
- // User passed in -h/--help?
- if matches.opt_present("help") {
- usage(0, &opts, verbose, &subcommand_help);
- }
-
- let cmd = match subcommand {
- Kind::Build => Subcommand::Build { paths },
- Kind::Check => {
- if matches.opt_present("all-targets") {
- println!(
- "Warning: --all-targets is now on by default and does not need to be passed explicitly."
- );
- }
- Subcommand::Check { paths }
- }
- Kind::Clippy => Subcommand::Clippy {
- paths,
- fix: matches.opt_present("fix"),
- clippy_lint_allow: matches.opt_strs("A"),
- clippy_lint_warn: matches.opt_strs("W"),
- clippy_lint_deny: matches.opt_strs("D"),
- clippy_lint_forbid: matches.opt_strs("F"),
- },
- Kind::Fix => Subcommand::Fix { paths },
- Kind::Test => Subcommand::Test {
- paths,
- bless: matches.opt_present("bless"),
- force_rerun: matches.opt_present("force-rerun"),
- compare_mode: matches.opt_str("compare-mode"),
- pass: matches.opt_str("pass"),
- run: matches.opt_str("run"),
- test_args: matches.opt_strs("test-args"),
- rustc_args: matches.opt_strs("rustc-args"),
- fail_fast: !matches.opt_present("no-fail-fast"),
- rustfix_coverage: matches.opt_present("rustfix-coverage"),
- only_modified: matches.opt_present("only-modified"),
- doc_tests: if matches.opt_present("doc") {
- DocTests::Only
- } else if matches.opt_present("no-doc") {
- DocTests::No
- } else {
- DocTests::Yes
- },
- },
- Kind::Bench => Subcommand::Bench { paths, test_args: matches.opt_strs("test-args") },
- Kind::Doc => Subcommand::Doc {
- paths,
- open: matches.opt_present("open"),
- json: matches.opt_present("json"),
- },
- Kind::Clean => Subcommand::Clean { all: matches.opt_present("all"), paths },
- Kind::Format => Subcommand::Format { check: matches.opt_present("check"), paths },
- Kind::Dist => Subcommand::Dist { paths },
- Kind::Install => Subcommand::Install { paths },
- Kind::Suggest => Subcommand::Suggest { run: matches.opt_present("run") },
- Kind::Run => {
- if paths.is_empty() {
- println!("\nrun requires at least a path!\n");
- usage(1, &opts, verbose, &subcommand_help);
- }
- Subcommand::Run { paths, args: matches.opt_strs("args") }
- }
- Kind::Setup => {
- let profile = if paths.len() > 1 {
- eprintln!("\nerror: At most one option can be passed to setup\n");
- usage(1, &opts, verbose, &subcommand_help)
- } else if let Some(path) = paths.pop() {
- let profile_string = t!(path.into_os_string().into_string().map_err(
- |path| format!("{} is not a valid UTF8 string", path.to_string_lossy())
- ));
-
- let profile = profile_string.parse().unwrap_or_else(|err| {
- eprintln!("error: {}", err);
- eprintln!("help: the available profiles are:");
- eprint!("{}", Profile::all_for_help("- "));
- crate::detail_exit(1);
- });
- Some(profile)
- } else {
- None
- };
- Subcommand::Setup { profile }
- }
- };
-
- Flags {
- verbose: matches.opt_count("verbose"),
- stage: matches.opt_str("stage").map(|j| j.parse().expect("`stage` should be a number")),
- dry_run: matches.opt_present("dry-run"),
- on_fail: matches.opt_str("on-fail"),
- rustc_error_format: matches.opt_str("error-format"),
- json_output: matches.opt_present("json-output"),
- keep_stage: matches
- .opt_strs("keep-stage")
- .into_iter()
- .map(|j| j.parse().expect("`keep-stage` should be a number"))
- .collect(),
- keep_stage_std: matches
- .opt_strs("keep-stage-std")
- .into_iter()
- .map(|j| j.parse().expect("`keep-stage-std` should be a number"))
- .collect(),
- host: if matches.opt_present("host") {
- Some(
- split(&matches.opt_strs("host"))
- .into_iter()
- .map(|x| TargetSelection::from_user(&x))
- .collect::<Vec<_>>(),
- )
- } else {
- None
- },
- target: if matches.opt_present("target") {
- Some(
- split(&matches.opt_strs("target"))
- .into_iter()
- .map(|x| TargetSelection::from_user(&x))
- .collect::<Vec<_>>(),
- )
- } else {
- None
- },
- config: matches.opt_str("config").map(PathBuf::from),
- build_dir: matches.opt_str("build-dir").map(PathBuf::from),
- jobs: matches.opt_str("jobs").map(|j| j.parse().expect("`jobs` should be a number")),
- cmd,
- incremental: matches.opt_present("incremental"),
- exclude: split(&matches.opt_strs("exclude"))
- .into_iter()
- .map(|p| p.into())
- .collect::<Vec<_>>(),
- include_default_paths: matches.opt_present("include-default-paths"),
- deny_warnings: parse_deny_warnings(&matches),
- color: matches
- .opt_get_default("color", Color::Auto)
- .expect("`color` should be `always`, `never`, or `auto`"),
- rust_profile_use: matches.opt_str("rust-profile-use"),
- rust_profile_generate: matches.opt_str("rust-profile-generate"),
- llvm_profile_use: matches.opt_str("llvm-profile-use"),
- llvm_profile_generate: matches.opt_present("llvm-profile-generate"),
- llvm_bolt_profile_generate: matches.opt_present("llvm-bolt-profile-generate"),
- llvm_bolt_profile_use: matches.opt_str("llvm-bolt-profile-use"),
- free_args,
- }
- }
+ ./x.py setup link", Profile::all_for_help(" ").trim_end()))]
+ Setup {
+ /// Either the profile for `config.toml` or another setup action.
+ /// May be omitted to set up interactively
+ #[arg(value_name = "<PROFILE>|hook|vscode|link")]
+ profile: Option<PathBuf>,
+ },
+ /// Suggest a subset of tests to run, based on modified files
+ #[clap(long_about = "\n")]
+ Suggest {
+ /// run suggested tests
+ #[arg(long)]
+ run: bool,
+ },
}
impl Subcommand {
@@ -745,15 +434,6 @@ impl Subcommand {
}
}
- pub fn test_args(&self) -> Vec<&str> {
- match *self {
- Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => {
- test_args.iter().flat_map(|s| s.split_whitespace()).collect()
- }
- _ => vec![],
- }
- }
-
pub fn rustc_args(&self) -> Vec<&str> {
match *self {
Subcommand::Test { ref rustc_args, .. } => {
@@ -763,25 +443,24 @@ impl Subcommand {
}
}
- pub fn args(&self) -> Vec<&str> {
- match *self {
- Subcommand::Run { ref args, .. } => {
- args.iter().flat_map(|s| s.split_whitespace()).collect()
- }
- _ => vec![],
- }
- }
-
pub fn fail_fast(&self) -> bool {
match *self {
- Subcommand::Test { fail_fast, .. } => fail_fast,
+ Subcommand::Test { no_fail_fast, .. } => !no_fail_fast,
_ => false,
}
}
pub fn doc_tests(&self) -> DocTests {
match *self {
- Subcommand::Test { doc_tests, .. } => doc_tests,
+ Subcommand::Test { doc, no_doc, .. } => {
+ if doc {
+ DocTests::Only
+ } else if no_doc {
+ DocTests::No
+ } else {
+ DocTests::Yes
+ }
+ }
_ => DocTests::Yes,
}
}
@@ -850,18 +529,22 @@ impl Subcommand {
}
}
-fn split(s: &[String]) -> Vec<String> {
- s.iter().flat_map(|s| s.split(',')).filter(|s| !s.is_empty()).map(|s| s.to_string()).collect()
-}
-
-fn parse_deny_warnings(matches: &getopts::Matches) -> Option<bool> {
- match matches.opt_str("warnings").as_deref() {
- Some("deny") => Some(true),
- Some("warn") => Some(false),
- Some(value) => {
- eprintln!(r#"invalid value for --warnings: {:?}, expected "warn" or "deny""#, value,);
- crate::detail_exit(1);
- }
- None => None,
+/// Returns the shell completion for a given shell, if the result differs from the current
+/// content of `path`. If `path` does not exist, always returns `Some`.
+pub fn get_completion<G: clap_complete::Generator>(shell: G, path: &Path) -> Option<String> {
+ let mut cmd = Flags::command();
+ let current = if !path.exists() {
+ String::new()
+ } else {
+ std::fs::read_to_string(path).unwrap_or_else(|_| {
+ eprintln!("couldn't read {}", path.display());
+ crate::detail_exit(1)
+ })
+ };
+ let mut buf = Vec::new();
+ clap_complete::generate(shell, &mut cmd, "x.py", &mut buf);
+ if buf == current.as_bytes() {
+ return None;
}
+ Some(String::from_utf8(buf).expect("completion script should be UTF-8"))
}
diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs
index b79969663..d8d3f300a 100644
--- a/src/bootstrap/format.rs
+++ b/src/bootstrap/format.rs
@@ -145,10 +145,8 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
let untracked_paths = untracked_paths_output
.lines()
.filter(|entry| entry.starts_with("??"))
- .filter_map(|entry| {
- let path =
- entry.split(' ').nth(1).expect("every git status entry should list a path");
- path.ends_with(".rs").then_some(path)
+ .map(|entry| {
+ entry.split(' ').nth(1).expect("every git status entry should list a path")
});
for untracked_path in untracked_paths {
println!("skip untracked path {} during rustfmt invocations", untracked_path);
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index 42d895a34..b62aa9992 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -12,7 +12,7 @@ use crate::util::t;
use crate::dist;
use crate::tarball::GeneratedTarball;
-use crate::Compiler;
+use crate::{Compiler, Kind};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::config::{Config, TargetSelection};
@@ -52,7 +52,7 @@ fn install_sh(
host: Option<TargetSelection>,
tarball: &GeneratedTarball,
) {
- builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));
+ let _guard = builder.msg(Kind::Install, stage, package, host, host);
let prefix = default_path(&builder.config.prefix, "/usr/local");
let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc"));
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 419bcbc63..943f51341 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -19,13 +19,14 @@
use std::cell::{Cell, RefCell};
use std::collections::{HashMap, HashSet};
use std::env;
+use std::fmt::Display;
use std::fs::{self, File};
use std::io;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::str;
-use build_helper::ci::CiEnv;
+use build_helper::ci::{gha, CiEnv};
use channel::GitInfo;
use config::{DryRun, Target};
use filetime::FileTime;
@@ -130,8 +131,7 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
/* Extra values not defined in the built-in targets yet, but used in std */
(Some(Mode::Std), "target_env", Some(&["libnx"])),
// (Some(Mode::Std), "target_os", Some(&[])),
- // #[cfg(bootstrap)] loongarch64
- (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa", "loongarch64"])),
+ (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])),
/* Extra names used by dependencies */
// FIXME: Used by serde_json, but we should not be triggering on external dependencies.
(Some(Mode::Rustc), "no_btreemap_remove_entry", None),
@@ -151,8 +151,6 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
// Needed to avoid the need to copy windows.lib into the sysroot.
(Some(Mode::Rustc), "windows_raw_dylib", None),
(Some(Mode::ToolRustc), "windows_raw_dylib", None),
- // #[cfg(bootstrap)] ohos
- (Some(Mode::Std), "target_env", Some(&["ohos"])),
];
/// A structure representing a Rust compiler.
@@ -238,8 +236,6 @@ pub struct Build {
ci_env: CiEnv,
delayed_failures: RefCell<Vec<String>>,
prerelease_version: Cell<Option<u32>>,
- tool_artifacts:
- RefCell<HashMap<TargetSelection, HashMap<String, (&'static str, PathBuf, Vec<String>)>>>,
#[cfg(feature = "build-metrics")]
metrics: metrics::BuildMetrics,
@@ -250,6 +246,7 @@ struct Crate {
name: Interned<String>,
deps: HashSet<Interned<String>>,
path: PathBuf,
+ has_lib: bool,
}
impl Crate {
@@ -417,6 +414,7 @@ impl Build {
bootstrap_out.display()
)
}
+ config.check_build_rustc_version();
if rust_info.is_from_tarball() && config.description.is_none() {
config.description = Some("built from a source tarball".to_owned());
@@ -458,7 +456,6 @@ impl Build {
ci_env: CiEnv::current(),
delayed_failures: RefCell::new(Vec::new()),
prerelease_version: Cell::new(None),
- tool_artifacts: Default::default(),
#[cfg(feature = "build-metrics")]
metrics: metrics::BuildMetrics::init(),
@@ -664,8 +661,8 @@ impl Build {
// hardcoded subcommands
match &self.config.cmd {
- Subcommand::Format { check, paths } => {
- return format::format(&builder::Builder::new(&self), *check, &paths);
+ Subcommand::Format { check } => {
+ return format::format(&builder::Builder::new(&self), *check, &self.config.paths);
}
Subcommand::Suggest { run } => {
return suggest::suggest(&builder::Builder::new(&self), *run);
@@ -996,6 +993,89 @@ impl Build {
}
}
+ fn msg_check(
+ &self,
+ what: impl Display,
+ target: impl Into<Option<TargetSelection>>,
+ ) -> Option<gha::Group> {
+ self.msg(Kind::Check, self.config.stage, what, self.config.build, target)
+ }
+
+ fn msg_build(
+ &self,
+ compiler: Compiler,
+ what: impl Display,
+ target: impl Into<Option<TargetSelection>>,
+ ) -> Option<gha::Group> {
+ self.msg(Kind::Build, compiler.stage, what, compiler.host, target)
+ }
+
+ /// Return a `Group` guard for a [`Step`] that is built for each `--stage`.
+ ///
+ /// [`Step`]: crate::builder::Step
+ fn msg(
+ &self,
+ action: impl Into<Kind>,
+ stage: u32,
+ what: impl Display,
+ host: impl Into<Option<TargetSelection>>,
+ target: impl Into<Option<TargetSelection>>,
+ ) -> Option<gha::Group> {
+ let action = action.into();
+ let msg = |fmt| format!("{action:?}ing stage{stage} {what}{fmt}");
+ let msg = if let Some(target) = target.into() {
+ let host = host.into().unwrap();
+ if host == target {
+ msg(format_args!(" ({target})"))
+ } else {
+ msg(format_args!(" ({host} -> {target})"))
+ }
+ } else {
+ msg(format_args!(""))
+ };
+ self.group(&msg)
+ }
+
+ /// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`.
+ ///
+ /// [`Step`]: crate::builder::Step
+ fn msg_unstaged(
+ &self,
+ action: impl Into<Kind>,
+ what: impl Display,
+ target: TargetSelection,
+ ) -> Option<gha::Group> {
+ let action = action.into();
+ let msg = format!("{action:?}ing {what} for {target}");
+ self.group(&msg)
+ }
+
+ fn msg_sysroot_tool(
+ &self,
+ action: impl Into<Kind>,
+ stage: u32,
+ what: impl Display,
+ host: TargetSelection,
+ target: TargetSelection,
+ ) -> Option<gha::Group> {
+ let action = action.into();
+ let msg = |fmt| format!("{action:?}ing {what} {fmt}");
+ let msg = if host == target {
+ msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1))
+ } else {
+ msg(format_args!("(stage{stage}:{host} -> stage{}:{target})", stage + 1))
+ };
+ self.group(&msg)
+ }
+
+ fn group(&self, msg: &str) -> Option<gha::Group> {
+ self.info(&msg);
+ match self.config.dry_run {
+ DryRun::SelfCheck => None,
+ DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)),
+ }
+ }
+
/// Returns the number of parallel jobs that have been configured for this
/// build.
fn jobs(&self) -> u32 {
@@ -1248,7 +1328,7 @@ impl Build {
match &self.config.channel[..] {
"stable" => num.to_string(),
"beta" => {
- if self.rust_info().is_managed_git_subrepository() && !self.config.omit_git_hash {
+ if !self.config.omit_git_hash {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
@@ -1260,18 +1340,28 @@ impl Build {
}
fn beta_prerelease_version(&self) -> u32 {
+ fn extract_beta_rev_from_file<P: AsRef<Path>>(version_file: P) -> Option<String> {
+ let version = fs::read_to_string(version_file).ok()?;
+
+ extract_beta_rev(&version)
+ }
+
if let Some(s) = self.prerelease_version.get() {
return s;
}
- // Figure out how many merge commits happened since we branched off master.
- // That's our beta number!
- // (Note that we use a `..` range, not the `...` symmetric difference.)
- let count =
+ // First check if there is a version file available.
+ // If available, we read the beta revision from that file.
+ // This only happens when building from a source tarball when Git should not be used.
+ let count = extract_beta_rev_from_file(self.src.join("version")).unwrap_or_else(|| {
+ // Figure out how many merge commits happened since we branched off master.
+ // That's our beta number!
+ // (Note that we use a `..` range, not the `...` symmetric difference.)
output(self.config.git().arg("rev-list").arg("--count").arg("--merges").arg(format!(
"refs/remotes/origin/{}..HEAD",
self.config.stage0_metadata.config.nightly_branch
- )));
+ )))
+ });
let n = count.trim().parse().unwrap();
self.prerelease_version.set(Some(n));
n
@@ -1631,6 +1721,17 @@ to download LLVM rather than building it.
}
}
+/// Extract the beta revision from the full version string.
+///
+/// The full version string looks like "a.b.c-beta.y". And we need to extract
+/// the "y" part from the string.
+pub fn extract_beta_rev(version: &str) -> Option<String> {
+ let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
+ let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
+
+ count
+}
+
#[cfg(unix)]
fn chmod(path: &Path, perms: u32) {
use std::os::unix::fs::*;
diff --git a/src/bootstrap/llvm.rs b/src/bootstrap/llvm.rs
index a893c3a47..040a12f5d 100644
--- a/src/bootstrap/llvm.rs
+++ b/src/bootstrap/llvm.rs
@@ -21,7 +21,7 @@ use crate::channel;
use crate::config::{Config, TargetSelection};
use crate::util::get_clang_cl_resource_dir;
use crate::util::{self, exe, output, t, up_to_date};
-use crate::{CLang, GitRepo};
+use crate::{CLang, GitRepo, Kind};
use build_helper::ci::CiEnv;
@@ -185,6 +185,7 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool {
("arm-unknown-linux-gnueabi", false),
("arm-unknown-linux-gnueabihf", false),
("armv7-unknown-linux-gnueabihf", false),
+ ("loongarch64-unknown-linux-gnu", false),
("mips-unknown-linux-gnu", false),
("mips64-unknown-linux-gnuabi64", false),
("mips64el-unknown-linux-gnuabi64", false),
@@ -271,7 +272,7 @@ impl Step for Llvm {
panic!("shared linking to LLVM is not currently supported on {}", target.triple);
}
- builder.info(&format!("Building LLVM for {}", target));
+ let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target);
t!(stamp.remove());
let _time = util::timeit(&builder);
t!(fs::create_dir_all(&out_dir));
@@ -813,7 +814,7 @@ impl Step for Lld {
return out_dir;
}
- builder.info(&format!("Building LLD for {}", target));
+ let _guard = builder.msg_unstaged(Kind::Build, "LLD", target);
let _time = util::timeit(&builder);
t!(fs::create_dir_all(&out_dir));
@@ -911,7 +912,7 @@ impl Step for Sanitizers {
return runtimes;
}
- builder.info(&format!("Building sanitizers for {}", self.target));
+ let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target);
t!(stamp.remove());
let _time = util::timeit(&builder);
@@ -1103,7 +1104,7 @@ impl Step for CrtBeginEnd {
return out_dir;
}
- builder.info("Building crtbegin.o and crtend.o");
+ let _guard = builder.msg_unstaged(Kind::Build, "crtbegin.o and crtend.o", self.target);
t!(fs::create_dir_all(&out_dir));
let mut cfg = cc::Build::new();
@@ -1153,7 +1154,7 @@ impl Step for Libunwind {
run.builder.ensure(Libunwind { target: run.target });
}
- /// Build linunwind.a
+ /// Build libunwind.a
fn run(self, builder: &Builder<'_>) -> Self::Output {
builder.update_submodule(&Path::new("src/llvm-project"));
@@ -1168,7 +1169,7 @@ impl Step for Libunwind {
return out_dir;
}
- builder.info(&format!("Building libunwind.a for {}", self.target.triple));
+ let _guard = builder.msg_unstaged(Kind::Build, "libunwind.a", self.target);
t!(fs::create_dir_all(&out_dir));
let mut cc_cfg = cc::Build::new();
diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs
index bba4d65e8..8f2c3faca 100644
--- a/src/bootstrap/metadata.rs
+++ b/src/bootstrap/metadata.rs
@@ -5,40 +5,43 @@ use serde_derive::Deserialize;
use crate::cache::INTERNER;
use crate::util::output;
-use crate::{Build, Crate};
+use crate::{t, Build, Crate};
-#[derive(Deserialize)]
+/// For more information, see the output of
+/// <https://doc.rust-lang.org/nightly/cargo/commands/cargo-metadata.html>
+#[derive(Debug, Deserialize)]
struct Output {
packages: Vec<Package>,
}
-#[derive(Deserialize)]
+/// For more information, see the output of
+/// <https://doc.rust-lang.org/nightly/cargo/commands/cargo-metadata.html>
+#[derive(Debug, Deserialize)]
struct Package {
name: String,
source: Option<String>,
manifest_path: String,
dependencies: Vec<Dependency>,
+ targets: Vec<Target>,
}
-#[derive(Deserialize)]
+/// For more information, see the output of
+/// <https://doc.rust-lang.org/nightly/cargo/commands/cargo-metadata.html>
+#[derive(Debug, Deserialize)]
struct Dependency {
name: String,
source: Option<String>,
}
+#[derive(Debug, Deserialize)]
+struct Target {
+ kind: Vec<String>,
+}
+
+/// Collects and stores package metadata of each workspace members into `build`,
+/// by executing `cargo metadata` commands.
pub fn build(build: &mut Build) {
- // Run `cargo metadata` to figure out what crates we're testing.
- let mut cargo = Command::new(&build.initial_cargo);
- cargo
- .arg("metadata")
- .arg("--format-version")
- .arg("1")
- .arg("--no-deps")
- .arg("--manifest-path")
- .arg(build.src.join("Cargo.toml"));
- let output = output(&mut cargo);
- let output: Output = serde_json::from_str(&output).unwrap();
- for package in output.packages {
+ for package in workspace_members(build) {
if package.source.is_none() {
let name = INTERNER.intern_string(package.name);
let mut path = PathBuf::from(package.manifest_path);
@@ -49,11 +52,47 @@ pub fn build(build: &mut Build) {
.filter(|dep| dep.source.is_none())
.map(|dep| INTERNER.intern_string(dep.name))
.collect();
- let krate = Crate { name, deps, path };
+ let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib"));
+ let krate = Crate { name, deps, path, has_lib };
let relative_path = krate.local_path(build);
build.crates.insert(name, krate);
let existing_path = build.crate_paths.insert(relative_path, name);
- assert!(existing_path.is_none(), "multiple crates with the same path");
+ assert!(
+ existing_path.is_none(),
+ "multiple crates with the same path: {}",
+ existing_path.unwrap()
+ );
}
}
}
+
+/// Invokes `cargo metadata` to get package metadata of each workspace member.
+///
+/// Note that `src/tools/cargo` is no longer a workspace member but we still
+/// treat it as one here, by invoking an additional `cargo metadata` command.
+fn workspace_members(build: &Build) -> impl Iterator<Item = Package> {
+ let collect_metadata = |manifest_path| {
+ let mut cargo = Command::new(&build.initial_cargo);
+ cargo
+ .arg("metadata")
+ .arg("--format-version")
+ .arg("1")
+ .arg("--no-deps")
+ .arg("--manifest-path")
+ .arg(build.src.join(manifest_path));
+ let metadata_output = output(&mut cargo);
+ let Output { packages, .. } = t!(serde_json::from_str(&metadata_output));
+ packages
+ };
+
+ // Collects `metadata.packages` from all workspaces.
+ let packages = collect_metadata("Cargo.toml");
+ let cargo_packages = collect_metadata("src/tools/cargo/Cargo.toml");
+ let ra_packages = collect_metadata("src/tools/rust-analyzer/Cargo.toml");
+ let bootstrap_packages = collect_metadata("src/bootstrap/Cargo.toml");
+
+ // We only care about the root package from `src/tool/cargo` workspace.
+ let cargo_package = cargo_packages.into_iter().find(|pkg| pkg.name == "cargo").into_iter();
+
+ packages.into_iter().chain(cargo_package).chain(ra_packages).chain(bootstrap_packages)
+}
diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs
index 82b123ec8..5990f33b9 100644
--- a/src/bootstrap/metrics.rs
+++ b/src/bootstrap/metrics.rs
@@ -4,7 +4,7 @@
//! As this module requires additional dependencies not present during local builds, it's cfg'd
//! away whenever the `build.metrics` config option is not set to `true`.
-use crate::builder::Step;
+use crate::builder::{Builder, Step};
use crate::util::t;
use crate::Build;
use serde_derive::{Deserialize, Serialize};
@@ -14,6 +14,25 @@ use std::io::BufWriter;
use std::time::{Duration, Instant, SystemTime};
use sysinfo::{CpuExt, System, SystemExt};
+// Update this number whenever a breaking change is made to the build metrics.
+//
+// The output format is versioned for two reasons:
+//
+// - The metadata is intended to be consumed by external tooling, and exposing a format version
+// helps the tools determine whether they're compatible with a metrics file.
+//
+// - If a developer enables build metrics in their local checkout, making a breaking change to the
+// metrics format would result in a hard-to-diagnose error message when an existing metrics file
+// is not compatible with the new changes. With a format version number, bootstrap can discard
+// incompatible metrics files instead of appending metrics to them.
+//
+// Version changelog:
+//
+// - v0: initial version
+// - v1: replaced JsonNode::Test with JsonNode::TestSuite
+//
+const CURRENT_FORMAT_VERSION: usize = 1;
+
pub(crate) struct BuildMetrics {
state: RefCell<MetricsState>,
}
@@ -33,7 +52,12 @@ impl BuildMetrics {
BuildMetrics { state }
}
- pub(crate) fn enter_step<S: Step>(&self, step: &S) {
+ pub(crate) fn enter_step<S: Step>(&self, step: &S, builder: &Builder<'_>) {
+ // Do not record dry runs, as they'd be duplicates of the actual steps.
+ if builder.config.dry_run() {
+ return;
+ }
+
let mut state = self.state.borrow_mut();
// Consider all the stats gathered so far as the parent's.
@@ -52,11 +76,16 @@ impl BuildMetrics {
duration_excluding_children_sec: Duration::ZERO,
children: Vec::new(),
- tests: Vec::new(),
+ test_suites: Vec::new(),
});
}
- pub(crate) fn exit_step(&self) {
+ pub(crate) fn exit_step(&self, builder: &Builder<'_>) {
+ // Do not record dry runs, as they'd be duplicates of the actual steps.
+ if builder.config.dry_run() {
+ return;
+ }
+
let mut state = self.state.borrow_mut();
self.collect_stats(&mut *state);
@@ -74,14 +103,31 @@ impl BuildMetrics {
}
}
- pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome) {
+ pub(crate) fn begin_test_suite(&self, metadata: TestSuiteMetadata, builder: &Builder<'_>) {
+ // Do not record dry runs, as they'd be duplicates of the actual steps.
+ if builder.config.dry_run() {
+ return;
+ }
+
+ let mut state = self.state.borrow_mut();
+ let step = state.running_steps.last_mut().unwrap();
+ step.test_suites.push(TestSuite { metadata, tests: Vec::new() });
+ }
+
+ pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Builder<'_>) {
+ // Do not record dry runs, as they'd be duplicates of the actual steps.
+ if builder.config.dry_run() {
+ return;
+ }
+
let mut state = self.state.borrow_mut();
- state
- .running_steps
- .last_mut()
- .unwrap()
- .tests
- .push(Test { name: name.to_string(), outcome });
+ let step = state.running_steps.last_mut().unwrap();
+
+ if let Some(test_suite) = step.test_suites.last_mut() {
+ test_suite.tests.push(Test { name: name.to_string(), outcome });
+ } else {
+ panic!("metrics.record_test() called without calling metrics.begin_test_suite() first");
+ }
}
fn collect_stats(&self, state: &mut MetricsState) {
@@ -116,7 +162,20 @@ impl BuildMetrics {
// Some of our CI builds consist of multiple independent CI invocations. Ensure all the
// previous invocations are still present in the resulting file.
let mut invocations = match std::fs::read(&dest) {
- Ok(contents) => t!(serde_json::from_slice::<JsonRoot>(&contents)).invocations,
+ Ok(contents) => {
+ // We first parse just the format_version field to have the check succeed even if
+ // the rest of the contents are not valid anymore.
+ let version: OnlyFormatVersion = t!(serde_json::from_slice(&contents));
+ if version.format_version == CURRENT_FORMAT_VERSION {
+ t!(serde_json::from_slice::<JsonRoot>(&contents)).invocations
+ } else {
+ println!(
+ "warning: overriding existing build/metrics.json, as it's not \
+ compatible with build metrics format version {CURRENT_FORMAT_VERSION}."
+ );
+ Vec::new()
+ }
+ }
Err(err) => {
if err.kind() != std::io::ErrorKind::NotFound {
panic!("failed to open existing metrics file at {}: {err}", dest.display());
@@ -134,7 +193,7 @@ impl BuildMetrics {
children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(),
});
- let json = JsonRoot { system_stats, invocations };
+ let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations };
t!(std::fs::create_dir_all(dest.parent().unwrap()));
let mut file = BufWriter::new(t!(File::create(&dest)));
@@ -144,11 +203,7 @@ impl BuildMetrics {
fn prepare_json_step(&self, step: StepMetrics) -> JsonNode {
let mut children = Vec::new();
children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child)));
- children.extend(
- step.tests
- .into_iter()
- .map(|test| JsonNode::Test { name: test.name, outcome: test.outcome }),
- );
+ children.extend(step.test_suites.into_iter().map(JsonNode::TestSuite));
JsonNode::RustbuildStep {
type_: step.type_,
@@ -183,17 +238,14 @@ struct StepMetrics {
duration_excluding_children_sec: Duration,
children: Vec<StepMetrics>,
- tests: Vec<Test>,
-}
-
-struct Test {
- name: String,
- outcome: TestOutcome,
+ test_suites: Vec<TestSuite>,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
struct JsonRoot {
+ #[serde(default)] // For version 0 the field was not present.
+ format_version: usize,
system_stats: JsonInvocationSystemStats,
invocations: Vec<JsonInvocation>,
}
@@ -222,14 +274,42 @@ enum JsonNode {
children: Vec<JsonNode>,
},
- Test {
- name: String,
- #[serde(flatten)]
- outcome: TestOutcome,
+ TestSuite(TestSuite),
+}
+
+#[derive(Serialize, Deserialize)]
+struct TestSuite {
+ metadata: TestSuiteMetadata,
+ tests: Vec<Test>,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum TestSuiteMetadata {
+ CargoPackage {
+ crates: Vec<String>,
+ target: String,
+ host: String,
+ stage: u32,
+ },
+ Compiletest {
+ suite: String,
+ mode: String,
+ compare_mode: Option<String>,
+ target: String,
+ host: String,
+ stage: u32,
},
}
#[derive(Serialize, Deserialize)]
+pub(crate) struct Test {
+ name: String,
+ #[serde(flatten)]
+ outcome: TestOutcome,
+}
+
+#[derive(Serialize, Deserialize)]
#[serde(tag = "outcome", rename_all = "snake_case")]
pub(crate) enum TestOutcome {
Passed,
@@ -251,3 +331,9 @@ struct JsonInvocationSystemStats {
struct JsonStepSystemStats {
cpu_utilization_percent: f64,
}
+
+#[derive(Deserialize)]
+struct OnlyFormatVersion {
+ #[serde(default)] // For version 0 the field was not present.
+ format_version: usize,
+}
diff --git a/src/bootstrap/render_tests.rs b/src/bootstrap/render_tests.rs
index 19019ad2c..fa0a48066 100644
--- a/src/bootstrap/render_tests.rs
+++ b/src/bootstrap/render_tests.rs
@@ -1,7 +1,7 @@
//! This module renders the JSON output of libtest into a human-readable form, trying to be as
//! similar to libtest's native output as possible.
//!
-//! This is needed because we need to use libtest in JSON mode to extract granluar information
+//! This is needed because we need to use libtest in JSON mode to extract granular information
//! about the executed tests. Doing so suppresses the human-readable output, and (compared to Cargo
//! and rustc) libtest doesn't include the rendered human-readable output as a JSON field. We had
//! to reimplement all the rendering logic in this module because of that.
@@ -124,6 +124,7 @@ impl<'a> Renderer<'a> {
ignore_reason: reason.map(|s| s.to_string()),
},
},
+ self.builder,
);
if self.builder.config.verbose_tests {
@@ -167,9 +168,14 @@ impl<'a> Renderer<'a> {
if !self.failures.is_empty() {
println!("\nfailures:\n");
for failure in &self.failures {
- if let Some(stdout) = &failure.stdout {
+ if failure.stdout.is_some() || failure.message.is_some() {
println!("---- {} stdout ----", failure.name);
- println!("{stdout}");
+ if let Some(stdout) = &failure.stdout {
+ println!("{stdout}");
+ }
+ if let Some(message) = &failure.message {
+ println!("note: {message}");
+ }
}
}
diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs
index e14440f57..ec01f744b 100644
--- a/src/bootstrap/run.rs
+++ b/src/bootstrap/run.rs
@@ -1,9 +1,12 @@
use std::path::PathBuf;
use std::process::Command;
+use clap_complete::shells;
+
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::config::TargetSelection;
use crate::dist::distdir;
+use crate::flags::get_completion;
use crate::test;
use crate::tool::{self, SourceType, Tool};
use crate::util::output;
@@ -105,7 +108,7 @@ impl Step for BumpStage0 {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let mut cmd = builder.tool_cmd(Tool::BumpStage0);
- cmd.args(builder.config.cmd.args());
+ cmd.args(builder.config.args());
builder.run(&mut cmd);
}
}
@@ -182,8 +185,7 @@ impl Step for Miri {
miri.add_rustc_lib_path(builder, compiler);
// Forward arguments.
miri.arg("--").arg("--target").arg(target.rustc_target_arg());
- miri.args(builder.config.cmd.args());
- miri.args(&builder.config.free_args);
+ miri.args(builder.config.args());
// miri tests need to know about the stage sysroot
miri.env("MIRI_SYSROOT", &miri_sysroot);
@@ -254,3 +256,56 @@ impl Step for GenerateCopyright {
dest
}
}
+
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct GenerateWindowsSys;
+
+impl Step for GenerateWindowsSys {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/generate-windows-sys")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(GenerateWindowsSys);
+ }
+
+ fn run(self, builder: &Builder<'_>) {
+ let mut cmd = builder.tool_cmd(Tool::GenerateWindowsSys);
+ cmd.arg(&builder.src);
+ builder.run(&mut cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct GenerateCompletions;
+
+impl Step for GenerateCompletions {
+ type Output = ();
+
+ /// Uses `clap_complete` to generate shell completions.
+ fn run(self, builder: &Builder<'_>) {
+ // FIXME(clubby789): enable zsh when clap#4898 is fixed
+ let [bash, fish, powershell] = ["x.py.sh", "x.py.fish", "x.py.ps1"]
+ .map(|filename| builder.src.join("src/etc/completions").join(filename));
+ if let Some(comp) = get_completion(shells::Bash, &bash) {
+ std::fs::write(&bash, comp).expect("writing bash completion");
+ }
+ if let Some(comp) = get_completion(shells::Fish, &fish) {
+ std::fs::write(&fish, comp).expect("writing fish completion");
+ }
+ if let Some(comp) = get_completion(shells::PowerShell, &powershell) {
+ std::fs::write(&powershell, comp).expect("writing powershell completion");
+ }
+ }
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.alias("generate-completions")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(GenerateCompletions);
+ }
+}
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index 8a40b0f64..140259b02 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -100,7 +100,7 @@ pub fn check(build: &mut Build) {
Couldn't find required command: cmake
You should install cmake, or set `download-ci-llvm = true` in the
-`[llvm]` section section of `config.toml` to download LLVM rather
+`[llvm]` section of `config.toml` to download LLVM rather
than building it.
"
);
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index aedf1ecab..44cd84be7 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -5,15 +5,17 @@
use std::env;
use std::ffi::OsString;
-use std::fmt;
use std::fs;
use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
+use clap_complete::shells;
+
use crate::builder::crate_description;
use crate::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
use crate::cache::Interned;
+use crate::cache::INTERNER;
use crate::compile;
use crate::config::TargetSelection;
use crate::dist;
@@ -28,44 +30,6 @@ use crate::{envify, CLang, DocTests, GitRepo, Mode};
const ADB_TEST_DIR: &str = "/data/local/tmp/work";
-/// The two modes of the test runner; tests or benchmarks.
-#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
-pub enum TestKind {
- /// Run `cargo test`.
- Test,
- /// Run `cargo bench`.
- Bench,
-}
-
-impl From<Kind> for TestKind {
- fn from(kind: Kind) -> Self {
- match kind {
- Kind::Test => TestKind::Test,
- Kind::Bench => TestKind::Bench,
- _ => panic!("unexpected kind in crate: {:?}", kind),
- }
- }
-}
-
-impl TestKind {
- // Return the cargo subcommand for this test kind
- fn subcommand(self) -> &'static str {
- match self {
- TestKind::Test => "test",
- TestKind::Bench => "bench",
- }
- }
-}
-
-impl fmt::Display for TestKind {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.write_str(match *self {
- TestKind::Test => "Testing",
- TestKind::Bench => "Benchmarking",
- })
- }
-}
-
fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> bool {
if !builder.fail_fast {
if !builder.try_run(cmd) {
@@ -93,26 +57,37 @@ fn try_run_quiet(builder: &Builder<'_>, cmd: &mut Command) -> bool {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct CrateJsonDocLint {
+pub struct CrateBootstrap {
+ path: Interned<PathBuf>,
host: TargetSelection,
}
-impl Step for CrateJsonDocLint {
+impl Step for CrateBootstrap {
type Output = ();
const ONLY_HOSTS: bool = true;
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/jsondoclint")
+ .path("src/tools/suggest-tests")
+ .path("src/tools/replace-version-placeholder")
+ .alias("tidyselftest")
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(CrateJsonDocLint { host: run.target });
+ for path in run.paths {
+ let path = INTERNER.intern_path(path.assert_single_path().path.clone());
+ run.builder.ensure(CrateBootstrap { host: run.target, path });
+ }
}
fn run(self, builder: &Builder<'_>) {
let bootstrap_host = builder.config.build;
let compiler = builder.compiler(0, bootstrap_host);
+ let mut path = self.path.to_str().unwrap();
+ if path == "tidyselftest" {
+ path = "src/tools/tidy";
+ }
let cargo = tool::prepare_tool_cargo(
builder,
@@ -120,47 +95,18 @@ impl Step for CrateJsonDocLint {
Mode::ToolBootstrap,
bootstrap_host,
"test",
- "src/tools/jsondoclint",
+ path,
SourceType::InTree,
&[],
);
- add_flags_and_try_run_tests(builder, &mut cargo.into());
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct SuggestTestsCrate {
- host: TargetSelection,
-}
-
-impl Step for SuggestTestsCrate {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.path("src/tools/suggest-tests")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(SuggestTestsCrate { host: run.target });
- }
-
- fn run(self, builder: &Builder<'_>) {
- let bootstrap_host = builder.config.build;
- let compiler = builder.compiler(0, bootstrap_host);
-
- let suggest_tests = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolBootstrap,
+ builder.info(&format!(
+ "{} {} stage0 ({})",
+ builder.kind.test_description(),
+ path,
bootstrap_host,
- "test",
- "src/tools/suggest-tests",
- SourceType::InTree,
- &[],
- );
- add_flags_and_try_run_tests(builder, &mut suggest_tests.into());
+ ));
+ let crate_name = path.rsplit_once('/').unwrap().1;
+ run_cargo_test(cargo, &[], &[], crate_name, compiler, bootstrap_host, builder);
}
}
@@ -209,7 +155,11 @@ You can skip linkcheck with --exclude src/tools/linkchecker"
SourceType::InTree,
&[],
);
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "linkchecker", compiler, bootstrap_host, builder);
+
+ if builder.doc_tests == DocTests::No {
+ return;
+ }
// Build all the default documentation.
builder.default_doc(&[]);
@@ -315,7 +265,7 @@ impl Step for Cargotest {
builder,
cmd.arg(&cargo)
.arg(&out_dir)
- .args(builder.config.cmd.test_args())
+ .args(builder.config.test_args())
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler)),
);
@@ -345,7 +295,7 @@ impl Step for Cargo {
let compiler = builder.compiler(self.stage, self.host);
builder.ensure(tool::Cargo { compiler, target: self.host });
- let mut cargo = tool::prepare_tool_cargo(
+ let cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
@@ -356,10 +306,8 @@ impl Step for Cargo {
&[],
);
- if !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
- cargo.arg("--").args(builder.config.cmd.test_args());
+ // NOTE: can't use `run_cargo_test` because we need to overwrite `PATH`
+ let mut cargo = prepare_cargo_test(cargo, &[], &[], "cargo", compiler, self.host, builder);
// Don't run cross-compile tests, we may not have cross-compiled libstd libs
// available.
@@ -367,10 +315,21 @@ impl Step for Cargo {
// Forcibly disable tests using nightly features since any changes to
// those features won't be able to land.
cargo.env("CARGO_TEST_DISABLE_NIGHTLY", "1");
-
cargo.env("PATH", &path_for_cargo(builder, compiler));
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::CargoPackage {
+ crates: vec!["cargo".into()],
+ target: self.host.triple.to_string(),
+ host: self.host.triple.to_string(),
+ stage: self.stage,
+ },
+ builder,
+ );
+
+ let _time = util::timeit(&builder);
+ add_flags_and_try_run_tests(builder, &mut cargo);
}
}
@@ -427,9 +386,7 @@ impl Step for RustAnalyzer {
cargo.env("SKIP_SLOW_TESTS", "1");
cargo.add_rustc_lib_path(builder, compiler);
- cargo.arg("--").args(builder.config.cmd.test_args());
-
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "rust-analyzer", compiler, host, builder);
}
}
@@ -472,17 +429,13 @@ impl Step for Rustfmt {
&[],
);
- if !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
let dir = testdir(builder, compiler.host);
t!(fs::create_dir_all(&dir));
cargo.env("RUSTFMT_TEST_DIR", dir);
cargo.add_rustc_lib_path(builder, compiler);
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "rustfmt", compiler, host, builder);
}
}
@@ -528,12 +481,9 @@ impl Step for RustDemangler {
t!(fs::create_dir_all(&dir));
cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler);
-
- cargo.arg("--").args(builder.config.cmd.test_args());
-
cargo.add_rustc_lib_path(builder, compiler);
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "rust-demangler", compiler, host, builder);
}
}
@@ -656,10 +606,6 @@ impl Step for Miri {
);
cargo.add_rustc_lib_path(builder, compiler);
- if !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
// miri tests need to know about the stage sysroot
cargo.env("MIRI_SYSROOT", &miri_sysroot);
cargo.env("MIRI_HOST_SYSROOT", sysroot);
@@ -671,13 +617,31 @@ impl Step for Miri {
// Set the target.
cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg());
- // Forward test filters.
- cargo.arg("--").args(builder.config.cmd.test_args());
- // This can NOT be `add_flags_and_try_run_tests` since the Miri test runner
- // does not understand those flags!
- let mut cargo = Command::from(cargo);
- builder.run(&mut cargo);
+ // This can NOT be `run_cargo_test` since the Miri test runner
+ // does not understand the flags added by `add_flags_and_try_run_test`.
+ let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
+ {
+ let _time = util::timeit(&builder);
+ builder.run(&mut cargo);
+ }
+
+ // Run it again for mir-opt-level 4 to catch some miscompilations.
+ if builder.config.test_args().is_empty() {
+ cargo.env("MIRIFLAGS", "-O -Zmir-opt-level=4 -Cdebug-assertions=yes");
+ // Optimizations can change backtraces
+ cargo.env("MIRI_SKIP_UI_CHECKS", "1");
+ // `MIRI_SKIP_UI_CHECKS` and `MIRI_BLESS` are incompatible
+ cargo.env_remove("MIRI_BLESS");
+ // Optimizations can change error locations and remove UB so don't run `fail` tests.
+ cargo.args(&["tests/pass", "tests/panic"]);
+
+ let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
+ {
+ let _time = util::timeit(&builder);
+ builder.run(&mut cargo);
+ }
+ }
// # Run `cargo miri test`.
// This is just a smoke test (Miri's own CI invokes this in a bunch of different ways and ensures
@@ -700,7 +664,7 @@ impl Step for Miri {
.arg(builder.src.join("src/tools/miri/test-cargo-miri/Cargo.toml"));
cargo.arg("--target").arg(target.rustc_target_arg());
cargo.arg("--tests"); // don't run doctests, they are too confused by the staging
- cargo.arg("--").args(builder.config.cmd.test_args());
+ cargo.arg("--").args(builder.config.test_args());
// Tell `cargo miri` where to find things.
cargo.env("MIRI_SYSROOT", &miri_sysroot);
@@ -710,7 +674,10 @@ impl Step for Miri {
cargo.env("RUST_BACKTRACE", "1");
let mut cargo = Command::from(cargo);
- builder.run(&mut cargo);
+ {
+ let _time = util::timeit(&builder);
+ builder.run(&mut cargo);
+ }
}
}
@@ -749,8 +716,7 @@ impl Step for CompiletestTest {
&[],
);
cargo.allow_features("test");
-
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "compiletest", compiler, host, builder);
}
}
@@ -793,20 +759,15 @@ impl Step for Clippy {
&[],
);
- if !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir());
cargo.env("HOST_LIBS", host_libs);
- cargo.arg("--").args(builder.config.cmd.test_args());
-
cargo.add_rustc_lib_path(builder, compiler);
+ let mut cargo = prepare_cargo_test(cargo, &[], &[], "clippy", compiler, host, builder);
- if builder.try_run(&mut cargo.into()) {
+ if builder.try_run(&mut cargo) {
// The tests succeeded; nothing to do.
return;
}
@@ -994,28 +955,6 @@ fn get_browser_ui_test_version(npm: &Path) -> Option<String> {
.or_else(|| get_browser_ui_test_version_inner(npm, true))
}
-fn compare_browser_ui_test_version(installed_version: &str, src: &Path) {
- match fs::read_to_string(
- src.join("src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version"),
- ) {
- Ok(v) => {
- if v.trim() != installed_version {
- eprintln!(
- "⚠️ Installed version of browser-ui-test (`{}`) is different than the \
- one used in the CI (`{}`)",
- installed_version, v
- );
- eprintln!(
- "You can install this version using `npm update browser-ui-test` or by using \
- `npm install browser-ui-test@{}`",
- v,
- );
- }
- }
- Err(e) => eprintln!("Couldn't find the CI browser-ui-test version: {:?}", e),
- }
-}
-
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustdocGUI {
pub target: TargetSelection,
@@ -1047,79 +986,30 @@ impl Step for RustdocGUI {
}
fn run(self, builder: &Builder<'_>) {
- let nodejs = builder.config.nodejs.as_ref().expect("nodejs isn't available");
- let npm = builder.config.npm.as_ref().expect("npm isn't available");
-
builder.ensure(compile::Std::new(self.compiler, self.target));
- // The goal here is to check if the necessary packages are installed, and if not, we
- // panic.
- match get_browser_ui_test_version(&npm) {
- Some(version) => {
- // We also check the version currently used in CI and emit a warning if it's not the
- // same one.
- compare_browser_ui_test_version(&version, &builder.build.src);
- }
- None => {
- eprintln!(
- "error: rustdoc-gui test suite cannot be run because npm `browser-ui-test` \
- dependency is missing",
- );
- eprintln!(
- "If you want to install the `{0}` dependency, run `npm install {0}`",
- "browser-ui-test",
- );
- panic!("Cannot run rustdoc-gui tests");
- }
- }
+ let mut cmd = builder.tool_cmd(Tool::RustdocGUITest);
let out_dir = builder.test_out(self.target).join("rustdoc-gui");
-
- // We remove existing folder to be sure there won't be artifacts remaining.
builder.clear_if_dirty(&out_dir, &builder.rustdoc(self.compiler));
- let src_path = builder.build.src.join("tests/rustdoc-gui/src");
- // We generate docs for the libraries present in the rustdoc-gui's src folder.
- for entry in src_path.read_dir().expect("read_dir call failed") {
- if let Ok(entry) = entry {
- let path = entry.path();
+ if let Some(src) = builder.config.src.to_str() {
+ cmd.arg("--rust-src").arg(src);
+ }
- if !path.is_dir() {
- continue;
- }
+ if let Some(out_dir) = out_dir.to_str() {
+ cmd.arg("--out-dir").arg(out_dir);
+ }
- let mut cargo = Command::new(&builder.initial_cargo);
- cargo
- .arg("doc")
- .arg("--target-dir")
- .arg(&out_dir)
- .env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTDOC", builder.rustdoc(self.compiler))
- .env("RUSTC", builder.rustc(self.compiler))
- .current_dir(path);
- // FIXME: implement a `// compile-flags` command or similar
- // instead of hard-coding this test
- if entry.file_name() == "link_to_definition" {
- cargo.env("RUSTDOCFLAGS", "-Zunstable-options --generate-link-to-definition");
- } else if entry.file_name() == "scrape_examples" {
- cargo.arg("-Zrustdoc-scrape-examples");
- } else if entry.file_name() == "extend_css" {
- cargo.env("RUSTDOCFLAGS", &format!("--extend-css extra.css"));
- }
- builder.run(&mut cargo);
- }
+ if let Some(initial_cargo) = builder.config.initial_cargo.to_str() {
+ cmd.arg("--initial-cargo").arg(initial_cargo);
}
- // We now run GUI tests.
- let mut command = Command::new(&nodejs);
- command
- .arg(builder.build.src.join("src/tools/rustdoc-gui/tester.js"))
- .arg("--jobs")
- .arg(&builder.jobs().to_string())
- .arg("--doc-folder")
- .arg(out_dir.join("doc"))
- .arg("--tests-folder")
- .arg(builder.build.src.join("tests/rustdoc-gui"));
+ cmd.arg("--jobs").arg(builder.jobs().to_string());
+
+ cmd.env("RUSTDOC", builder.rustdoc(self.compiler))
+ .env("RUSTC", builder.rustc(self.compiler));
+
for path in &builder.paths {
if let Some(p) = util::is_valid_test_suite_arg(path, "tests/rustdoc-gui", builder) {
if !p.ends_with(".goml") {
@@ -1127,14 +1017,25 @@ impl Step for RustdocGUI {
panic!("Cannot run rustdoc-gui tests");
}
if let Some(name) = path.file_name().and_then(|f| f.to_str()) {
- command.arg("--file").arg(name);
+ cmd.arg("--goml-file").arg(name);
}
}
}
- for test_arg in builder.config.cmd.test_args() {
- command.arg(test_arg);
+
+ for test_arg in builder.config.test_args() {
+ cmd.arg("--test-arg").arg(test_arg);
}
- builder.run(&mut command);
+
+ if let Some(ref nodejs) = builder.config.nodejs {
+ cmd.arg("--nodejs").arg(nodejs);
+ }
+
+ if let Some(ref npm) = builder.config.npm {
+ cmd.arg("--npm").arg(npm);
+ }
+
+ let _time = util::timeit(&builder);
+ crate::render_tests::try_run_tests(builder, &mut cmd);
}
}
@@ -1192,7 +1093,24 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy`
builder.info("tidy check");
try_run(builder, &mut cmd);
- builder.ensure(ExpandYamlAnchors {});
+ builder.ensure(ExpandYamlAnchors);
+
+ builder.info("x.py completions check");
+ let [bash, fish, powershell] = ["x.py.sh", "x.py.fish", "x.py.ps1"]
+ .map(|filename| builder.src.join("src/etc/completions").join(filename));
+ if builder.config.cmd.bless() {
+ builder.ensure(crate::run::GenerateCompletions);
+ } else {
+ if crate::flags::get_completion(shells::Bash, &bash).is_some()
+ || crate::flags::get_completion(shells::Fish, &fish).is_some()
+ || crate::flags::get_completion(shells::PowerShell, &powershell).is_some()
+ {
+ eprintln!(
+ "x.py completions were changed; run `x.py run generate-completions` to update them"
+ );
+ crate::detail_exit(1);
+ }
+ }
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -1204,40 +1122,6 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy`
}
}
-/// Runs tidy's own tests.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct TidySelfTest;
-
-impl Step for TidySelfTest {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.alias("tidyselftest")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(TidySelfTest);
- }
-
- fn run(self, builder: &Builder<'_>) {
- let bootstrap_host = builder.config.build;
- let compiler = builder.compiler(0, bootstrap_host);
- let cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolBootstrap,
- bootstrap_host,
- "test",
- "src/tools/tidy",
- SourceType::InTree,
- &[],
- );
- add_flags_and_try_run_tests(builder, &mut cargo.into());
- }
-}
-
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ExpandYamlAnchors;
@@ -1448,7 +1332,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
crate::detail_exit(1);
}
- let compiler = self.compiler;
+ let mut compiler = self.compiler;
let target = self.target;
let mode = self.mode;
let suite = self.suite;
@@ -1461,15 +1345,28 @@ note: if you're sure you want to do this, please open an issue as to why. In the
return;
}
- if suite == "debuginfo" {
- builder
- .ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target });
- }
+ // Support stage 1 ui-fulldeps. This is somewhat complicated: ui-fulldeps tests for the most
+ // part test the *API* of the compiler, not how it compiles a given file. As a result, we
+ // can run them against the stage 1 sources as long as we build them with the stage 0
+ // bootstrap compiler.
+ // NOTE: Only stage 1 is special cased because we need the rustc_private artifacts to match the
+ // running compiler in stage 2 when plugins run.
+ let stage_id = if suite == "ui-fulldeps" && compiler.stage == 1 {
+ compiler = builder.compiler(compiler.stage - 1, target);
+ format!("stage{}-{}", compiler.stage + 1, target)
+ } else {
+ format!("stage{}-{}", compiler.stage, target)
+ };
if suite.ends_with("fulldeps") {
builder.ensure(compile::Rustc::new(compiler, target));
}
+ if suite == "debuginfo" {
+ builder
+ .ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target });
+ }
+
builder.ensure(compile::Std::new(compiler, target));
// ensure that `libproc_macro` is available on the host.
builder.ensure(compile::Std::new(compiler, compiler.host));
@@ -1528,7 +1425,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--src-base").arg(builder.src.join("tests").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
cmd.arg("--sysroot-base").arg(builder.sysroot(compiler));
- cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+ cmd.arg("--stage-id").arg(stage_id);
cmd.arg("--suite").arg(suite);
cmd.arg("--mode").arg(mode);
cmd.arg("--target").arg(target.rustc_target_arg());
@@ -1637,7 +1534,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
// Get paths from cmd args
let paths = match &builder.config.cmd {
- Subcommand::Test { ref paths, .. } => &paths[..],
+ Subcommand::Test { .. } => &builder.config.paths[..],
_ => &[],
};
@@ -1647,8 +1544,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
.filter_map(|p| util::is_valid_test_suite_arg(p, suite_path, builder))
.collect();
- test_args.append(&mut builder.config.cmd.test_args());
- test_args.extend(builder.config.free_args.iter().map(|s| s.as_str()));
+ test_args.append(&mut builder.config.test_args());
// On Windows, replace forward slashes in test-args by backslashes
// so the correct filters are passed to libtest
@@ -1804,12 +1700,29 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--channel").arg(&builder.config.channel);
+ if !builder.config.omit_git_hash {
+ cmd.arg("--git-hash");
+ }
+
if let Some(commit) = builder.config.download_rustc_commit() {
cmd.env("FAKE_DOWNLOAD_RUSTC_PREFIX", format!("/rustc/{commit}"));
}
builder.ci_env.force_coloring_in_ci(&mut cmd);
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::Compiletest {
+ suite: suite.into(),
+ mode: mode.into(),
+ compare_mode: None,
+ target: self.target.triple.to_string(),
+ host: self.compiler.host.triple.to_string(),
+ stage: self.compiler.stage,
+ },
+ builder,
+ );
+
builder.info(&format!(
"Check compiletest suite={} mode={} ({} -> {})",
suite, mode, &compiler.host, target
@@ -1819,6 +1732,20 @@ note: if you're sure you want to do this, please open an issue as to why. In the
if let Some(compare_mode) = compare_mode {
cmd.arg("--compare-mode").arg(compare_mode);
+
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::Compiletest {
+ suite: suite.into(),
+ mode: mode.into(),
+ compare_mode: Some(compare_mode.into()),
+ target: self.target.triple.to_string(),
+ host: self.compiler.host.triple.to_string(),
+ stage: self.compiler.stage,
+ },
+ builder,
+ );
+
builder.info(&format!(
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
suite, mode, compare_mode, &compiler.host, target
@@ -1888,7 +1815,13 @@ impl BookTest {
rustbook_cmd.env("RUSTC_BOOTSTRAP", "1");
rustbook_cmd.env("PATH", new_path).arg("test").arg(path);
builder.add_rust_test_threads(&mut rustbook_cmd);
- builder.info(&format!("Testing rustbook {}", self.path.display()));
+ let _guard = builder.msg(
+ Kind::Test,
+ compiler.stage,
+ format_args!("rustbook {}", self.path.display()),
+ compiler.host,
+ compiler.host,
+ );
let _time = util::timeit(&builder);
let toolstate = if try_run(builder, &mut rustbook_cmd) {
ToolState::TestPass
@@ -2016,7 +1949,8 @@ impl Step for ErrorIndex {
let mut tool = tool::ErrorIndex::command(builder);
tool.arg("markdown").arg(&output);
- builder.info(&format!("Testing error-index stage{}", compiler.stage));
+ let _guard =
+ builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
let _time = util::timeit(&builder);
builder.run_quiet(&mut tool);
// The tests themselves need to link to std, so make sure it is
@@ -2043,7 +1977,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) ->
cmd.arg(markdown);
cmd.env("RUSTC_BOOTSTRAP", "1");
- let test_args = builder.config.cmd.test_args().join(" ");
+ let test_args = builder.config.test_args().join(" ");
cmd.arg("--test-args").arg(test_args);
if builder.config.verbose_tests {
@@ -2088,7 +2022,6 @@ impl Step for RustcGuide {
pub struct CrateLibrustc {
compiler: Compiler,
target: TargetSelection,
- test_kind: TestKind,
crates: Vec<Interned<String>>,
}
@@ -2110,9 +2043,8 @@ impl Step for CrateLibrustc {
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
- let test_kind = builder.kind.into();
- builder.ensure(CrateLibrustc { compiler, target: run.target, test_kind, crates });
+ builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
}
fn run(self, builder: &Builder<'_>) {
@@ -2120,18 +2052,117 @@ impl Step for CrateLibrustc {
compiler: self.compiler,
target: self.target,
mode: Mode::Rustc,
- test_kind: self.test_kind,
crates: self.crates,
});
}
}
+/// Given a `cargo test` subcommand, add the appropriate flags and run it.
+///
+/// Returns whether the test succeeded.
+fn run_cargo_test(
+ cargo: impl Into<Command>,
+ libtest_args: &[&str],
+ crates: &[Interned<String>],
+ primary_crate: &str,
+ compiler: Compiler,
+ target: TargetSelection,
+ builder: &Builder<'_>,
+) -> bool {
+ let mut cargo =
+ prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
+ let _time = util::timeit(&builder);
+
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::CargoPackage {
+ crates: crates.iter().map(|c| c.to_string()).collect(),
+ target: target.triple.to_string(),
+ host: compiler.host.triple.to_string(),
+ stage: compiler.stage,
+ },
+ builder,
+ );
+ add_flags_and_try_run_tests(builder, &mut cargo)
+}
+
+/// Given a `cargo test` subcommand, pass it the appropriate test flags given a `builder`.
+fn prepare_cargo_test(
+ cargo: impl Into<Command>,
+ libtest_args: &[&str],
+ crates: &[Interned<String>],
+ primary_crate: &str,
+ compiler: Compiler,
+ target: TargetSelection,
+ builder: &Builder<'_>,
+) -> Command {
+ let mut cargo = cargo.into();
+
+ // Pass in some standard flags then iterate over the graph we've discovered
+ // in `cargo metadata` with the maps above and figure out what `-p`
+ // arguments need to get passed.
+ if builder.kind == Kind::Test && !builder.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+ match builder.doc_tests {
+ DocTests::Only => {
+ cargo.arg("--doc");
+ }
+ DocTests::No => {
+ let krate = &builder
+ .crates
+ .get(&INTERNER.intern_str(primary_crate))
+ .unwrap_or_else(|| panic!("missing crate {primary_crate}"));
+ if krate.has_lib {
+ cargo.arg("--lib");
+ }
+ cargo.args(&["--bins", "--examples", "--tests", "--benches"]);
+ }
+ DocTests::Yes => {}
+ }
+
+ for &krate in crates {
+ cargo.arg("-p").arg(krate);
+ }
+
+ cargo.arg("--").args(&builder.config.test_args()).args(libtest_args);
+ if !builder.config.verbose_tests {
+ cargo.arg("--quiet");
+ }
+
+ // The tests are going to run with the *target* libraries, so we need to
+ // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+ //
+ // Note that to run the compiler we need to run with the *host* libraries,
+ // but our wrapper scripts arrange for that to be the case anyway.
+ let mut dylib_path = dylib_path();
+ dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
+ cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+ if target.contains("emscripten") {
+ cargo.env(
+ format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
+ builder.config.nodejs.as_ref().expect("nodejs not configured"),
+ );
+ } else if target.starts_with("wasm32") {
+ let node = builder.config.nodejs.as_ref().expect("nodejs not configured");
+ let runner = format!("{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display());
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), &runner);
+ } else if builder.remote_tested(target) {
+ cargo.env(
+ format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
+ format!("{} run 0", builder.tool_exe(Tool::RemoteTestClient).display()),
+ );
+ }
+
+ cargo
+}
+
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Crate {
pub compiler: Compiler,
pub target: TargetSelection,
pub mode: Mode,
- pub test_kind: TestKind,
pub crates: Vec<Interned<String>>,
}
@@ -2140,21 +2171,20 @@ impl Step for Crate {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.crate_or_deps("test")
+ run.crate_or_deps("sysroot")
}
fn make_run(run: RunConfig<'_>) {
let builder = run.builder;
let host = run.build_triple();
let compiler = builder.compiler_for(builder.top_stage, host, host);
- let test_kind = builder.kind.into();
let crates = run
.paths
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
- builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, test_kind, crates });
+ builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
}
/// Runs all unit tests plus documentation tests for a given crate defined
@@ -2169,7 +2199,6 @@ impl Step for Crate {
let compiler = self.compiler;
let target = self.target;
let mode = self.mode;
- let test_kind = self.test_kind;
builder.ensure(compile::Std::new(compiler, target));
builder.ensure(RemoteCopyLibs { compiler, target });
@@ -2181,7 +2210,7 @@ impl Step for Crate {
let compiler = builder.compiler_for(compiler.stage, compiler.host, target);
let mut cargo =
- builder.cargo(compiler, mode, SourceType::InTree, target, test_kind.subcommand());
+ builder.cargo(compiler, mode, SourceType::InTree, target, builder.kind.as_str());
match mode {
Mode::Std => {
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
@@ -2192,70 +2221,14 @@ impl Step for Crate {
_ => panic!("can only test libraries"),
};
- // Build up the base `cargo test` command.
- //
- // Pass in some standard flags then iterate over the graph we've discovered
- // in `cargo metadata` with the maps above and figure out what `-p`
- // arguments need to get passed.
- if test_kind.subcommand() == "test" && !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
- match builder.doc_tests {
- DocTests::Only => {
- cargo.arg("--doc");
- }
- DocTests::No => {
- cargo.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]);
- }
- DocTests::Yes => {}
- }
-
- for krate in &self.crates {
- cargo.arg("-p").arg(krate);
- }
-
- // The tests are going to run with the *target* libraries, so we need to
- // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
- //
- // Note that to run the compiler we need to run with the *host* libraries,
- // but our wrapper scripts arrange for that to be the case anyway.
- let mut dylib_path = dylib_path();
- dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
- cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
- cargo.arg("--");
- cargo.args(&builder.config.cmd.test_args());
-
- cargo.arg("-Z").arg("unstable-options");
- cargo.arg("--format").arg("json");
-
- if target.contains("emscripten") {
- cargo.env(
- format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
- builder.config.nodejs.as_ref().expect("nodejs not configured"),
- );
- } else if target.starts_with("wasm32") {
- let node = builder.config.nodejs.as_ref().expect("nodejs not configured");
- let runner =
- format!("{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display());
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), &runner);
- } else if builder.remote_tested(target) {
- cargo.env(
- format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
- format!("{} run 0", builder.tool_exe(Tool::RemoteTestClient).display()),
- );
- }
-
- builder.info(&format!(
- "{}{} stage{} ({} -> {})",
- test_kind,
- crate_description(&self.crates),
+ let _guard = builder.msg(
+ builder.kind,
compiler.stage,
- &compiler.host,
- target
- ));
- let _time = util::timeit(&builder);
- crate::render_tests::try_run_tests(builder, &mut cargo.into());
+ crate_description(&self.crates),
+ compiler.host,
+ target,
+ );
+ run_cargo_test(cargo, &[], &self.crates, &self.crates[0], compiler, target, builder);
}
}
@@ -2263,7 +2236,6 @@ impl Step for Crate {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CrateRustdoc {
host: TargetSelection,
- test_kind: TestKind,
}
impl Step for CrateRustdoc {
@@ -2278,13 +2250,10 @@ impl Step for CrateRustdoc {
fn make_run(run: RunConfig<'_>) {
let builder = run.builder;
- let test_kind = builder.kind.into();
-
- builder.ensure(CrateRustdoc { host: run.target, test_kind });
+ builder.ensure(CrateRustdoc { host: run.target });
}
fn run(self, builder: &Builder<'_>) {
- let test_kind = self.test_kind;
let target = self.host;
let compiler = if builder.download_rustc() {
@@ -2303,29 +2272,11 @@ impl Step for CrateRustdoc {
compiler,
Mode::ToolRustc,
target,
- test_kind.subcommand(),
+ builder.kind.as_str(),
"src/tools/rustdoc",
SourceType::InTree,
&[],
);
- if test_kind.subcommand() == "test" && !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
- match builder.doc_tests {
- DocTests::Only => {
- cargo.arg("--doc");
- }
- DocTests::No => {
- cargo.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]);
- }
- DocTests::Yes => {}
- }
-
- cargo.arg("-p").arg("rustdoc:0.0.0");
-
- cargo.arg("--");
- cargo.args(&builder.config.cmd.test_args());
-
if self.host.contains("musl") {
cargo.arg("'-Ctarget-feature=-crt-static'");
}
@@ -2365,24 +2316,22 @@ impl Step for CrateRustdoc {
dylib_path.insert(0, PathBuf::from(&*libdir));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
- if !builder.config.verbose_tests {
- cargo.arg("--quiet");
- }
-
- builder.info(&format!(
- "{} rustdoc stage{} ({} -> {})",
- test_kind, compiler.stage, &compiler.host, target
- ));
- let _time = util::timeit(&builder);
-
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ let _guard = builder.msg(builder.kind, compiler.stage, "rustdoc", compiler.host, target);
+ run_cargo_test(
+ cargo,
+ &[],
+ &[INTERNER.intern_str("rustdoc:0.0.0")],
+ "rustdoc",
+ compiler,
+ target,
+ builder,
+ );
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CrateRustdocJsonTypes {
host: TargetSelection,
- test_kind: TestKind,
}
impl Step for CrateRustdocJsonTypes {
@@ -2397,13 +2346,10 @@ impl Step for CrateRustdocJsonTypes {
fn make_run(run: RunConfig<'_>) {
let builder = run.builder;
- let test_kind = builder.kind.into();
-
- builder.ensure(CrateRustdocJsonTypes { host: run.target, test_kind });
+ builder.ensure(CrateRustdocJsonTypes { host: run.target });
}
fn run(self, builder: &Builder<'_>) {
- let test_kind = self.test_kind;
let target = self.host;
// Use the previous stage compiler to reuse the artifacts that are
@@ -2413,36 +2359,35 @@ impl Step for CrateRustdocJsonTypes {
let compiler = builder.compiler_for(builder.top_stage, target, target);
builder.ensure(compile::Rustc::new(compiler, target));
- let mut cargo = tool::prepare_tool_cargo(
+ let cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
target,
- test_kind.subcommand(),
+ builder.kind.as_str(),
"src/rustdoc-json-types",
SourceType::InTree,
&[],
);
- if test_kind.subcommand() == "test" && !builder.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- cargo.arg("-p").arg("rustdoc-json-types");
- cargo.arg("--");
- cargo.args(&builder.config.cmd.test_args());
-
- if self.host.contains("musl") {
- cargo.arg("'-Ctarget-feature=-crt-static'");
- }
-
- builder.info(&format!(
- "{} rustdoc-json-types stage{} ({} -> {})",
- test_kind, compiler.stage, &compiler.host, target
- ));
- let _time = util::timeit(&builder);
+ // FIXME: this looks very wrong, libtest doesn't accept `-C` arguments and the quotes are fishy.
+ let libtest_args = if self.host.contains("musl") {
+ ["'-Ctarget-feature=-crt-static'"].as_slice()
+ } else {
+ &[]
+ };
- add_flags_and_try_run_tests(builder, &mut cargo.into());
+ let _guard =
+ builder.msg(builder.kind, compiler.stage, "rustdoc-json-types", compiler.host, target);
+ run_cargo_test(
+ cargo,
+ libtest_args,
+ &[INTERNER.intern_str("rustdoc-json-types")],
+ "rustdoc-json-types",
+ compiler,
+ target,
+ builder,
+ );
}
}
@@ -2580,13 +2525,15 @@ impl Step for Bootstrap {
check_bootstrap.arg("bootstrap_test.py").current_dir(builder.src.join("src/bootstrap/"));
try_run(builder, &mut check_bootstrap);
+ let host = builder.config.build;
+ let compiler = builder.compiler(0, host);
let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("test")
.current_dir(builder.src.join("src/bootstrap"))
.env("RUSTFLAGS", "-Cdebuginfo=2")
.env("CARGO_TARGET_DIR", builder.out.join("bootstrap"))
.env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTDOC", builder.rustdoc(builder.compiler(0, builder.build.build)))
+ .env("RUSTDOC", builder.rustdoc(compiler))
.env("RUSTC", &builder.initial_rustc);
if let Some(flags) = option_env!("RUSTFLAGS") {
// Use the same rustc flags for testing as for "normal" compilation,
@@ -2594,24 +2541,9 @@ impl Step for Bootstrap {
// https://github.com/rust-lang/rust/issues/49215
cmd.env("RUSTFLAGS", flags);
}
- if !builder.fail_fast {
- cmd.arg("--no-fail-fast");
- }
- match builder.doc_tests {
- DocTests::Only => {
- cmd.arg("--doc");
- }
- DocTests::No => {
- cmd.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]);
- }
- DocTests::Yes => {}
- }
-
- cmd.arg("--").args(&builder.config.cmd.test_args());
// rustbuild tests are racy on directory creation so just run them one at a time.
// Since there's not many this shouldn't be a problem.
- cmd.arg("--test-threads=1");
- add_flags_and_try_run_tests(builder, &mut cmd);
+ run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", compiler, host, builder);
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -2668,43 +2600,6 @@ impl Step for TierCheck {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct ReplacePlaceholderTest;
-
-impl Step for ReplacePlaceholderTest {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- /// Ensure the version placeholder replacement tool builds
- fn run(self, builder: &Builder<'_>) {
- builder.info("build check for version replacement placeholder");
-
- // Test the version placeholder replacement tool itself.
- let bootstrap_host = builder.config.build;
- let compiler = builder.compiler(0, bootstrap_host);
- let cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolBootstrap,
- bootstrap_host,
- "test",
- "src/tools/replace-version-placeholder",
- SourceType::InTree,
- &[],
- );
- add_flags_and_try_run_tests(builder, &mut cargo.into());
- }
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.path("src/tools/replace-version-placeholder")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Self);
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct LintDocs {
pub compiler: Compiler,
pub target: TargetSelection,
@@ -2761,7 +2656,7 @@ impl Step for RustInstaller {
SourceType::InTree,
&[],
);
- try_run(builder, &mut cargo.into());
+ run_cargo_test(cargo, &[], &[], "installer", compiler, bootstrap_host, builder);
// We currently don't support running the test.sh script outside linux(?) environments.
// Eventually this should likely migrate to #[test]s in rust-installer proper rather than a
@@ -2828,7 +2723,7 @@ impl Step for TestHelpers {
return;
}
- builder.info("Building test helpers");
+ let _guard = builder.msg_unstaged(Kind::Build, "test helpers", target);
t!(fs::create_dir_all(&dst));
let mut cfg = cc::Build::new();
// FIXME: Workaround for https://github.com/emscripten-core/emscripten/issues/9013
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index d1fd2e8c4..b3791efaf 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -1,4 +1,3 @@
-use std::collections::HashSet;
use std::env;
use std::fs;
use std::path::PathBuf;
@@ -12,6 +11,7 @@ use crate::toolstate::ToolState;
use crate::util::{add_dylib_path, exe, t};
use crate::Compiler;
use crate::Mode;
+use crate::{gha, Kind};
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum SourceType {
@@ -33,41 +33,27 @@ struct ToolBuild {
allow_features: &'static str,
}
-fn tooling_output(
- mode: Mode,
- tool: &str,
- build_stage: u32,
- host: &TargetSelection,
- target: &TargetSelection,
-) -> String {
- match mode {
- // depends on compiler stage, different to host compiler
- Mode::ToolRustc => {
- if host == target {
- format!("Building tool {} (stage{} -> stage{})", tool, build_stage, build_stage + 1)
- } else {
- format!(
- "Building tool {} (stage{}:{} -> stage{}:{})",
- tool,
- build_stage,
- host,
- build_stage + 1,
- target
- )
- }
+impl Builder<'_> {
+ fn msg_tool(
+ &self,
+ mode: Mode,
+ tool: &str,
+ build_stage: u32,
+ host: &TargetSelection,
+ target: &TargetSelection,
+ ) -> Option<gha::Group> {
+ match mode {
+ // depends on compiler stage, different to host compiler
+ Mode::ToolRustc => self.msg_sysroot_tool(
+ Kind::Build,
+ build_stage,
+ format_args!("tool {tool}"),
+ *host,
+ *target,
+ ),
+ // doesn't depend on compiler, same as host compiler
+ _ => self.msg(Kind::Build, build_stage, format_args!("tool {tool}"), *host, *target),
}
- // doesn't depend on compiler, same as host compiler
- Mode::ToolStd => {
- if host == target {
- format!("Building tool {} (stage{})", tool, build_stage)
- } else {
- format!(
- "Building tool {} (stage{}:{} -> stage{}:{})",
- tool, build_stage, host, build_stage, target
- )
- }
- }
- _ => format!("Building tool {} (stage{})", tool, build_stage),
}
}
@@ -112,143 +98,16 @@ impl Step for ToolBuild {
if !self.allow_features.is_empty() {
cargo.allow_features(self.allow_features);
}
- let msg = tooling_output(
+ let _guard = builder.msg_tool(
self.mode,
self.tool,
self.compiler.stage,
&self.compiler.host,
&self.target,
);
- builder.info(&msg);
- let mut duplicates = Vec::new();
- let is_expected = compile::stream_cargo(builder, cargo, vec![], &mut |msg| {
- // Only care about big things like the RLS/Cargo for now
- match tool {
- "rls" | "cargo" | "clippy-driver" | "miri" | "rustfmt" => {}
-
- _ => return,
- }
- let (id, features, filenames) = match msg {
- compile::CargoMessage::CompilerArtifact {
- package_id,
- features,
- filenames,
- target: _,
- } => (package_id, features, filenames),
- _ => return,
- };
- let features = features.iter().map(|s| s.to_string()).collect::<Vec<_>>();
-
- for path in filenames {
- let val = (tool, PathBuf::from(&*path), features.clone());
- // we're only interested in deduplicating rlibs for now
- if val.1.extension().and_then(|s| s.to_str()) != Some("rlib") {
- continue;
- }
-
- // Don't worry about compiles that turn out to be host
- // dependencies or build scripts. To skip these we look for
- // anything that goes in `.../release/deps` but *doesn't* go in
- // `$target/release/deps`. This ensure that outputs in
- // `$target/release` are still considered candidates for
- // deduplication.
- if let Some(parent) = val.1.parent() {
- if parent.ends_with("release/deps") {
- let maybe_target = parent
- .parent()
- .and_then(|p| p.parent())
- .and_then(|p| p.file_name())
- .and_then(|p| p.to_str())
- .unwrap();
- if maybe_target != &*target.triple {
- continue;
- }
- }
- }
-
- // Record that we've built an artifact for `id`, and if one was
- // already listed then we need to see if we reused the same
- // artifact or produced a duplicate.
- let mut artifacts = builder.tool_artifacts.borrow_mut();
- let prev_artifacts = artifacts.entry(target).or_default();
- let prev = match prev_artifacts.get(&*id) {
- Some(prev) => prev,
- None => {
- prev_artifacts.insert(id.to_string(), val);
- continue;
- }
- };
- if prev.1 == val.1 {
- return; // same path, same artifact
- }
- // If the paths are different and one of them *isn't* inside of
- // `release/deps`, then it means it's probably in
- // `$target/release`, or it's some final artifact like
- // `libcargo.rlib`. In these situations Cargo probably just
- // copied it up from `$target/release/deps/libcargo-xxxx.rlib`,
- // so if the features are equal we can just skip it.
- let prev_no_hash = prev.1.parent().unwrap().ends_with("release/deps");
- let val_no_hash = val.1.parent().unwrap().ends_with("release/deps");
- if prev.2 == val.2 || !prev_no_hash || !val_no_hash {
- return;
- }
-
- // ... and otherwise this looks like we duplicated some sort of
- // compilation, so record it to generate an error later.
- duplicates.push((id.to_string(), val, prev.clone()));
- }
- });
-
- if is_expected && !duplicates.is_empty() {
- eprintln!(
- "duplicate artifacts found when compiling a tool, this \
- typically means that something was recompiled because \
- a transitive dependency has different features activated \
- than in a previous build:\n"
- );
- let (same, different): (Vec<_>, Vec<_>) =
- duplicates.into_iter().partition(|(_, cur, prev)| cur.2 == prev.2);
- if !same.is_empty() {
- eprintln!(
- "the following dependencies are duplicated although they \
- have the same features enabled:"
- );
- for (id, cur, prev) in same {
- eprintln!(" {}", id);
- // same features
- eprintln!(" `{}` ({:?})\n `{}` ({:?})", cur.0, cur.1, prev.0, prev.1);
- }
- }
- if !different.is_empty() {
- eprintln!("the following dependencies have different features:");
- for (id, cur, prev) in different {
- eprintln!(" {}", id);
- let cur_features: HashSet<_> = cur.2.into_iter().collect();
- let prev_features: HashSet<_> = prev.2.into_iter().collect();
- eprintln!(
- " `{}` additionally enabled features {:?} at {:?}",
- cur.0,
- &cur_features - &prev_features,
- cur.1
- );
- eprintln!(
- " `{}` additionally enabled features {:?} at {:?}",
- prev.0,
- &prev_features - &cur_features,
- prev.1
- );
- }
- }
- eprintln!();
- eprintln!(
- "to fix this you will probably want to edit the local \
- src/tools/rustc-workspace-hack/Cargo.toml crate, as \
- that will update the dependency graph to ensure that \
- these crates all share the same feature set"
- );
- panic!("tools should not compile multiple copies of the same crate");
- }
+ let mut cargo = Command::from(cargo);
+ let is_expected = builder.try_run(&mut cargo);
builder.save_toolstate(
tool,
@@ -282,7 +141,7 @@ pub fn prepare_tool_cargo(
mode: Mode,
target: TargetSelection,
command: &'static str,
- path: &'static str,
+ path: &str,
source_type: SourceType,
extra_features: &[String],
) -> CargoCommand {
@@ -299,7 +158,9 @@ pub fn prepare_tool_cargo(
|| path.ends_with("rustfmt")
{
cargo.env("LIBZ_SYS_STATIC", "1");
- features.push("rustc-workspace-hack/all-static".to_string());
+ }
+ if path.ends_with("cargo") {
+ features.push("all-static".to_string());
}
}
@@ -319,6 +180,12 @@ pub fn prepare_tool_cargo(
cargo.env("CFG_VERSION", builder.rust_version());
cargo.env("CFG_RELEASE_NUM", &builder.version);
cargo.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel());
+ if let Some(ref ver_date) = builder.rust_info().commit_date() {
+ cargo.env("CFG_VER_DATE", ver_date);
+ }
+ if let Some(ref ver_hash) = builder.rust_info().sha() {
+ cargo.env("CFG_VER_HASH", ver_hash);
+ }
let info = GitInfo::new(builder.config.omit_git_hash, &dir);
if let Some(sha) = info.sha() {
@@ -434,6 +301,8 @@ bootstrap_tool!(
CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata";
GenerateCopyright, "src/tools/generate-copyright", "generate-copyright";
SuggestTests, "src/tools/suggest-tests", "suggest-tests";
+ GenerateWindowsSys, "src/tools/generate-windows-sys", "generate-windows-sys";
+ RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
@@ -611,14 +480,13 @@ impl Step for Rustdoc {
cargo.rustflag("--cfg=parallel_compiler");
}
- let msg = tooling_output(
+ let _guard = builder.msg_tool(
Mode::ToolRustc,
"rustdoc",
build_compiler.stage,
&self.compiler.host,
&target,
);
- builder.info(&msg);
builder.run(&mut cargo.into());
// Cargo adds a number of paths to the dylib search path on windows, which results in
@@ -707,18 +575,18 @@ impl Step for Cargo {
if self.target.contains("windows") {
build_cred(
"cargo-credential-wincred",
- "src/tools/cargo/crates/credential/cargo-credential-wincred",
+ "src/tools/cargo/credential/cargo-credential-wincred",
);
}
if self.target.contains("apple-darwin") {
build_cred(
"cargo-credential-macos-keychain",
- "src/tools/cargo/crates/credential/cargo-credential-macos-keychain",
+ "src/tools/cargo/credential/cargo-credential-macos-keychain",
);
}
build_cred(
"cargo-credential-1password",
- "src/tools/cargo/crates/credential/cargo-credential-1password",
+ "src/tools/cargo/credential/cargo-credential-1password",
);
cargo_bin_path
}
@@ -867,6 +735,7 @@ macro_rules! tool_extended {
stable = $stable:expr
$(,tool_std = $tool_std:literal)?
$(,allow_features = $allow_features:expr)?
+ $(,add_bins_to_sysroot = $add_bins_to_sysroot:expr)?
;)+) => {
$(
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
@@ -909,7 +778,7 @@ macro_rules! tool_extended {
#[allow(unused_mut)]
fn run(mut $sel, $builder: &Builder<'_>) -> Option<PathBuf> {
- $builder.ensure(ToolBuild {
+ let tool = $builder.ensure(ToolBuild {
compiler: $sel.compiler,
target: $sel.target,
tool: $tool_name,
@@ -919,7 +788,27 @@ macro_rules! tool_extended {
is_optional_tool: true,
source_type: SourceType::InTree,
allow_features: concat!($($allow_features)*),
- })
+ })?;
+
+ if (false $(|| !$add_bins_to_sysroot.is_empty())?) && $sel.compiler.stage > 0 {
+ let bindir = $builder.sysroot($sel.compiler).join("bin");
+ t!(fs::create_dir_all(&bindir));
+
+ #[allow(unused_variables)]
+ let tools_out = $builder
+ .cargo_out($sel.compiler, Mode::ToolRustc, $sel.target);
+
+ $(for add_bin in $add_bins_to_sysroot {
+ let bin_source = tools_out.join(exe(add_bin, $sel.target));
+ let bin_destination = bindir.join(exe(add_bin, $sel.compiler.host));
+ $builder.copy(&bin_source, &bin_destination);
+ })?
+
+ let tool = bindir.join(exe($tool_name, $sel.compiler.host));
+ Some(tool)
+ } else {
+ Some(tool)
+ }
}
}
)+
@@ -933,15 +822,15 @@ macro_rules! tool_extended {
tool_extended!((self, builder),
Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true;
CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true;
- Clippy, "src/tools/clippy", "clippy-driver", stable=true;
- Miri, "src/tools/miri", "miri", stable=false;
- CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=true;
+ Clippy, "src/tools/clippy", "clippy-driver", stable=true, add_bins_to_sysroot = ["clippy-driver", "cargo-clippy"];
+ Miri, "src/tools/miri", "miri", stable=false, add_bins_to_sysroot = ["miri"];
+ CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=true, add_bins_to_sysroot = ["cargo-miri"];
// FIXME: tool_std is not quite right, we shouldn't allow nightly features.
// But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0,
// and this is close enough for now.
Rls, "src/tools/rls", "rls", stable=true, tool_std=true;
RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, tool_std=true;
- Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true;
+ Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, add_bins_to_sysroot = ["rustfmt", "cargo-fmt"];
);
impl<'a> Builder<'a> {
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 2e1adbf63..9bfdc77e6 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -488,7 +488,7 @@ fn absolute_windows(path: &std::path::Path) -> std::io::Result<std::path::PathBu
}
}
-/// Adapted from https://github.com/llvm/llvm-project/blob/782e91224601e461c019e0a4573bbccc6094fbcd/llvm/cmake/modules/HandleLLVMOptions.cmake#L1058-L1079
+/// Adapted from <https://github.com/llvm/llvm-project/blob/782e91224601e461c019e0a4573bbccc6094fbcd/llvm/cmake/modules/HandleLLVMOptions.cmake#L1058-L1079>
///
/// When `clang-cl` is used with instrumentation, we need to add clang's runtime library resource
/// directory to the linker flags, otherwise there will be linker errors about the profiler runtime