summaryrefslogtreecommitdiffstats
path: root/src/bootstrap
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:18:25 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:18:25 +0000
commit5363f350887b1e5b5dd21a86f88c8af9d7fea6da (patch)
tree35ca005eb6e0e9a1ba3bb5dbc033209ad445dc17 /src/bootstrap
parentAdding debian version 1.66.0+dfsg1-1. (diff)
downloadrustc-5363f350887b1e5b5dd21a86f88c8af9d7fea6da.tar.xz
rustc-5363f350887b1e5b5dd21a86f88c8af9d7fea6da.zip
Merging upstream version 1.67.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/bootstrap')
-rw-r--r--src/bootstrap/Cargo.lock136
-rw-r--r--src/bootstrap/Cargo.toml7
-rw-r--r--src/bootstrap/bin/main.rs2
-rw-r--r--src/bootstrap/bin/rustc.rs8
-rw-r--r--src/bootstrap/bin/rustdoc.rs8
-rw-r--r--src/bootstrap/bootstrap.py1
-rw-r--r--src/bootstrap/builder.rs301
-rw-r--r--src/bootstrap/builder/tests.rs4
-rw-r--r--src/bootstrap/cache.rs8
-rw-r--r--src/bootstrap/cc_detect.rs21
-rw-r--r--src/bootstrap/channel.rs3
-rw-r--r--src/bootstrap/check.rs8
-rw-r--r--src/bootstrap/compile.rs61
-rw-r--r--src/bootstrap/config.rs418
-rw-r--r--src/bootstrap/dist.rs93
-rw-r--r--src/bootstrap/doc.rs194
-rw-r--r--src/bootstrap/download.rs520
-rw-r--r--src/bootstrap/flags.rs34
-rw-r--r--src/bootstrap/format.rs2
-rw-r--r--src/bootstrap/install.rs12
-rw-r--r--src/bootstrap/lib.rs215
-rw-r--r--src/bootstrap/metrics.rs2
-rw-r--r--src/bootstrap/native.rs102
-rw-r--r--src/bootstrap/run.rs131
-rw-r--r--src/bootstrap/sanity.rs29
-rw-r--r--src/bootstrap/setup.rs91
-rw-r--r--src/bootstrap/tarball.rs4
-rw-r--r--src/bootstrap/test.rs168
-rw-r--r--src/bootstrap/tool.rs34
-rw-r--r--src/bootstrap/toolstate.rs4
-rw-r--r--src/bootstrap/util.rs26
31 files changed, 1518 insertions, 1129 deletions
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index baecca44c..efe8ae316 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -12,15 +12,6 @@ dependencies = [
]
[[package]]
-name = "ansi_term"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
-dependencies = [
- "winapi",
-]
-
-[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -53,6 +44,7 @@ dependencies = [
"hex",
"ignore",
"libc",
+ "object",
"once_cell",
"opener",
"pretty_assertions",
@@ -107,18 +99,18 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
[[package]]
name = "cpufeatures"
-version = "0.2.2"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b"
+checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
dependencies = [
"libc",
]
[[package]]
name = "crossbeam-channel"
-version = "0.5.4"
+version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
dependencies = [
"cfg-if",
"crossbeam-utils",
@@ -126,9 +118,9 @@ dependencies = [
[[package]]
name = "crossbeam-deque"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
dependencies = [
"cfg-if",
"crossbeam-epoch",
@@ -137,26 +129,24 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.8"
+version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
- "lazy_static",
"memoffset",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.8"
+version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if",
- "lazy_static",
]
[[package]]
@@ -224,9 +214,9 @@ dependencies = [
[[package]]
name = "fd-lock"
-version = "3.0.6"
+version = "3.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e11dcc7e4d79a8c89b9ab4c6f5c30b1fc4a83c420792da3542fd31179ed5f517"
+checksum = "bb21c69b9fea5e15dbc1049e4b77145dd0ba1c84019c488102de0dc4ea4b0a27"
dependencies = [
"cfg-if",
"rustix",
@@ -318,9 +308,13 @@ dependencies = [
[[package]]
name = "io-lifetimes"
-version = "0.7.2"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24c3f4eff5495aee4c0399d7b6a0dc2b6e81be84242ffbfcf253ebacccc1d0cb"
+checksum = "a7d367024b3f3414d8e01f437f704f41a9f64ab36f9067fa73e526ad4c763c87"
+dependencies = [
+ "libc",
+ "windows-sys",
+]
[[package]]
name = "itoa"
@@ -336,15 +330,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.126"
+version = "0.2.137"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
[[package]]
name = "linux-raw-sys"
-version = "0.0.46"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d"
+checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
[[package]]
name = "log"
@@ -374,18 +368,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
-version = "0.6.5"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "ntapi"
-version = "0.3.7"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f"
+checksum = "bc51db7b362b205941f71232e56c625156eb9a929f8cf74a428fd5bc094a4afc"
dependencies = [
"winapi",
]
@@ -401,6 +395,15 @@ dependencies = [
]
[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
name = "once_cell"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -433,14 +436,14 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
[[package]]
name = "pretty_assertions"
-version = "0.7.2"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1cab0e7c02cf376875e9335e0ba1da535775beb5450d21e1dffca068818ed98b"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
dependencies = [
- "ansi_term",
"ctor",
"diff",
"output_vt100",
+ "yansi",
]
[[package]]
@@ -463,11 +466,10 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.5.3"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
dependencies = [
- "autocfg",
"crossbeam-deque",
"either",
"rayon-core",
@@ -475,9 +477,9 @@ dependencies = [
[[package]]
name = "rayon-core"
-version = "1.9.3"
+version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -519,9 +521,9 @@ checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
[[package]]
name = "rustix"
-version = "0.35.6"
+version = "0.36.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef258c11e17f5c01979a10543a30a4e12faef6aab217a74266e747eefa3aed88"
+checksum = "0b1fbb4dfc4eb1d390c02df47760bb19a84bb80b301ecc947ab5406394d8223e"
dependencies = [
"bitflags",
"errno",
@@ -607,9 +609,9 @@ dependencies = [
[[package]]
name = "sysinfo"
-version = "0.24.2"
+version = "0.26.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a2809487b962344ca55d9aea565f9ffbcb6929780802217acc82561f6746770"
+checksum = "c375d5fd899e32847b8566e10598d6e9f1d9b55ec6de3cdf9e7da4bdc51371bc"
dependencies = [
"cfg-if",
"core-foundation-sys",
@@ -717,46 +719,60 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [
+ "windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+
+[[package]]
name = "windows_aarch64_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
[[package]]
name = "windows_i686_gnu"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
[[package]]
name = "windows_i686_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.36.1"
+version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.36.1"
+version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
[[package]]
name = "xattr"
@@ -775,3 +791,9 @@ checksum = "c179869f34fc7c01830d3ce7ea2086bc3a07e0d35289b667d0a8bf910258926c"
dependencies = [
"lzma-sys",
]
+
+[[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 95e711737..ccc7ec1fc 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -36,12 +36,13 @@ test = false
[dependencies]
cmake = "0.1.38"
-fd-lock = "3.0.6"
+fd-lock = "3.0.8"
filetime = "0.2"
getopts = "0.2.19"
cc = "1.0.69"
libc = "0.2"
hex = "0.4"
+object = { version = "0.29.0", default-features = false, features = ["archive", "coff", "read_core", "unaligned"] }
serde = { version = "1.0.8", features = ["derive"] }
serde_json = "1.0.2"
sha2 = "0.10"
@@ -54,7 +55,7 @@ xz2 = "0.1"
walkdir = "2"
# Dependencies needed by the build-metrics feature
-sysinfo = { version = "0.24.1", optional = true }
+sysinfo = { version = "0.26.0", optional = true }
[target.'cfg(windows)'.dependencies.winapi]
version = "0.3"
@@ -71,7 +72,7 @@ features = [
]
[dev-dependencies]
-pretty_assertions = "0.7"
+pretty_assertions = "1.2"
[features]
build-metrics = ["sysinfo"]
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
index 9b4861ccd..be69f819c 100644
--- a/src/bootstrap/bin/main.rs
+++ b/src/bootstrap/bin/main.rs
@@ -35,7 +35,7 @@ fn main() {
// NOTE: Since `./configure` generates a `config.toml`, distro maintainers will see the
// changelog warning, not the `x.py setup` message.
- let suggest_setup = !config.config.exists() && !matches!(config.cmd, Subcommand::Setup { .. });
+ let suggest_setup = config.config.is_none() && !matches!(config.cmd, Subcommand::Setup { .. });
if suggest_setup {
println!("warning: you have not made a `config.toml`");
println!(
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index 776d73b98..1d5260228 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -97,13 +97,7 @@ fn main() {
// This... is a bit of a hack how we detect this. Ideally this
// information should be encoded in the crate I guess? Would likely
// require an RFC amendment to RFC 1513, however.
- //
- // `compiler_builtins` are unconditionally compiled with panic=abort to
- // workaround undefined references to `rust_eh_unwind_resume` generated
- // otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
- if crate_name == Some("panic_abort")
- || crate_name == Some("compiler_builtins") && stage != "0"
- {
+ if crate_name == Some("panic_abort") {
cmd.arg("-C").arg("panic=abort");
}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
index e69cab956..23828f475 100644
--- a/src/bootstrap/bin/rustdoc.rs
+++ b/src/bootstrap/bin/rustdoc.rs
@@ -55,13 +55,9 @@ fn main() {
arg.push(&linker);
cmd.arg(arg);
}
- if env::var_os("RUSTDOC_FUSE_LD_LLD").is_some() {
+ if let Ok(no_threads) = env::var("RUSTDOC_LLD_NO_THREADS") {
cmd.arg("-Clink-arg=-fuse-ld=lld");
- if cfg!(windows) {
- cmd.arg("-Clink-arg=-Wl,/threads:1");
- } else {
- cmd.arg("-Clink-arg=-Wl,--threads=1");
- }
+ cmd.arg(format!("-Clink-arg=-Wl,{}", no_threads));
}
// Cargo doesn't pass RUSTDOCFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 57128685d..2d5018d93 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -441,6 +441,7 @@ class RustBuild(object):
self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root))
self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root))
+ self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root))
lib_dir = "{}/lib".format(bin_root)
for lib in os.listdir(lib_dir):
if lib.endswith(".so"):
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 8b144f146..8ee6d49da 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -2,14 +2,13 @@ use std::any::{type_name, Any};
use std::cell::{Cell, RefCell};
use std::collections::BTreeSet;
use std::env;
-use std::ffi::{OsStr, OsString};
+use std::ffi::OsStr;
use std::fmt::{Debug, Write};
-use std::fs::{self, File};
+use std::fs::{self};
use std::hash::Hash;
-use std::io::{BufRead, BufReader, ErrorKind};
use std::ops::Deref;
use std::path::{Component, Path, PathBuf};
-use std::process::{Command, Stdio};
+use std::process::Command;
use std::time::{Duration, Instant};
use crate::cache::{Cache, Interned, INTERNER};
@@ -24,14 +23,12 @@ use crate::test;
use crate::tool::{self, SourceType};
use crate::util::{self, add_dylib_path, add_link_lib_path, exe, libdir, output, t};
use crate::EXTRA_CHECK_CFGS;
-use crate::{check, Config};
-use crate::{compile, Crate};
+use crate::{check, compile, Crate};
use crate::{Build, CLang, DocTests, GitRepo, Mode};
pub use crate::Compiler;
// FIXME: replace with std::lazy after it gets stabilized and reaches beta
-use once_cell::sync::{Lazy, OnceCell};
-use xz2::bufread::XzDecoder;
+use once_cell::sync::Lazy;
pub struct Builder<'a> {
pub build: &'a Build,
@@ -621,6 +618,8 @@ impl<'a> Builder<'a> {
check::CodegenBackend,
check::Clippy,
check::Miri,
+ check::CargoMiri,
+ check::MiroptTestTools,
check::Rls,
check::RustAnalyzer,
check::Rustfmt,
@@ -645,6 +644,7 @@ impl<'a> Builder<'a> {
test::CrateLibrustc,
test::CrateRustdoc,
test::CrateRustdocJsonTypes,
+ test::CrateJsonDocLint,
test::Linkcheck,
test::TierCheck,
test::ReplacePlaceholderTest,
@@ -753,6 +753,9 @@ impl<'a> Builder<'a> {
run::BuildManifest,
run::BumpStage0,
run::ReplaceVersionPlaceholder,
+ run::Miri,
+ run::CollectLicenseMetadata,
+ run::GenerateCopyright,
),
// These commands either don't use paths, or they're special-cased in Build::build()
Kind::Clean | Kind::Format | Kind::Setup => vec![],
@@ -816,7 +819,7 @@ impl<'a> Builder<'a> {
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
- Subcommand::Run { ref paths } => (Kind::Run, &paths[..]),
+ Subcommand::Run { ref paths, .. } => (Kind::Run, &paths[..]),
Subcommand::Format { .. } => (Kind::Format, &[][..]),
Subcommand::Clean { .. } | Subcommand::Setup { .. } => {
panic!()
@@ -850,241 +853,6 @@ impl<'a> Builder<'a> {
StepDescription::run(v, self, paths);
}
- /// Modifies the interpreter section of 'fname' to fix the dynamic linker,
- /// or the RPATH section, to fix the dynamic library search path
- ///
- /// This is only required on NixOS and uses the PatchELF utility to
- /// change the interpreter/RPATH of ELF executables.
- ///
- /// Please see https://nixos.org/patchelf.html for more information
- pub(crate) fn fix_bin_or_dylib(&self, fname: &Path) {
- // FIXME: cache NixOS detection?
- match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() {
- Err(_) => return,
- Ok(output) if !output.status.success() => return,
- Ok(output) => {
- let mut s = output.stdout;
- if s.last() == Some(&b'\n') {
- s.pop();
- }
- if s != b"Linux" {
- return;
- }
- }
- }
-
- // If the user has asked binaries to be patched for Nix, then
- // don't check for NixOS or `/lib`, just continue to the patching.
- // NOTE: this intentionally comes after the Linux check:
- // - patchelf only works with ELF files, so no need to run it on Mac or Windows
- // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc.
- if !self.config.patch_binaries_for_nix {
- // Use `/etc/os-release` instead of `/etc/NIXOS`.
- // The latter one does not exist on NixOS when using tmpfs as root.
- const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""];
- let os_release = match File::open("/etc/os-release") {
- Err(e) if e.kind() == ErrorKind::NotFound => return,
- Err(e) => panic!("failed to access /etc/os-release: {}", e),
- Ok(f) => f,
- };
- if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) {
- return;
- }
- if Path::new("/lib").exists() {
- return;
- }
- }
-
- // At this point we're pretty sure the user is running NixOS or using Nix
- println!("info: you seem to be using Nix. Attempting to patch {}", fname.display());
-
- // Only build `.nix-deps` once.
- static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new();
- let mut nix_build_succeeded = true;
- let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| {
- // Run `nix-build` to "build" each dependency (which will likely reuse
- // the existing `/nix/store` copy, or at most download a pre-built copy).
- //
- // Importantly, we create a gc-root called `.nix-deps` in the `build/`
- // directory, but still reference the actual `/nix/store` path in the rpath
- // as it makes it significantly more robust against changes to the location of
- // the `.nix-deps` location.
- //
- // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
- // zlib: Needed as a system dependency of `libLLVM-*.so`.
- // patchelf: Needed for patching ELF binaries (see doc comment above).
- let nix_deps_dir = self.out.join(".nix-deps");
- const NIX_EXPR: &str = "
- with (import <nixpkgs> {});
- symlinkJoin {
- name = \"rust-stage0-dependencies\";
- paths = [
- zlib
- patchelf
- stdenv.cc.bintools
- ];
- }
- ";
- nix_build_succeeded = self.try_run(Command::new("nix-build").args(&[
- Path::new("-E"),
- Path::new(NIX_EXPR),
- Path::new("-o"),
- &nix_deps_dir,
- ]));
- nix_deps_dir
- });
- if !nix_build_succeeded {
- return;
- }
-
- let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf"));
- let rpath_entries = {
- // ORIGIN is a relative default, all binary and dynamic libraries we ship
- // appear to have this (even when `../lib` is redundant).
- // NOTE: there are only two paths here, delimited by a `:`
- let mut entries = OsString::from("$ORIGIN/../lib:");
- entries.push(t!(fs::canonicalize(nix_deps_dir)));
- entries.push("/lib");
- entries
- };
- patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
- if !fname.extension().map_or(false, |ext| ext == "so") {
- // Finally, set the correct .interp for binaries
- let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
- // FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
- let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
- patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
- }
-
- self.try_run(patchelf.arg(fname));
- }
-
- pub(crate) fn download_component(&self, url: &str, dest_path: &Path, help_on_error: &str) {
- self.verbose(&format!("download {url}"));
- // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/.
- let tempfile = self.tempdir().join(dest_path.file_name().unwrap());
- // While bootstrap itself only supports http and https downloads, downstream forks might
- // need to download components from other protocols. The match allows them adding more
- // protocols without worrying about merge conflicts if we change the HTTP implementation.
- match url.split_once("://").map(|(proto, _)| proto) {
- Some("http") | Some("https") => {
- self.download_http_with_retries(&tempfile, url, help_on_error)
- }
- Some(other) => panic!("unsupported protocol {other} in {url}"),
- None => panic!("no protocol in {url}"),
- }
- t!(std::fs::rename(&tempfile, dest_path));
- }
-
- fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) {
- println!("downloading {}", url);
- // Try curl. If that fails and we are on windows, fallback to PowerShell.
- let mut curl = Command::new("curl");
- curl.args(&[
- "-#",
- "-y",
- "30",
- "-Y",
- "10", // timeout if speed is < 10 bytes/sec for > 30 seconds
- "--connect-timeout",
- "30", // timeout if cannot connect within 30 seconds
- "--retry",
- "3",
- "-Sf",
- "-o",
- ]);
- curl.arg(tempfile);
- curl.arg(url);
- if !self.check_run(&mut curl) {
- if self.build.build.contains("windows-msvc") {
- println!("Fallback to PowerShell");
- for _ in 0..3 {
- if self.try_run(Command::new("PowerShell.exe").args(&[
- "/nologo",
- "-Command",
- "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
- &format!(
- "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
- url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"),
- ),
- ])) {
- return;
- }
- println!("\nspurious failure, trying again");
- }
- }
- if !help_on_error.is_empty() {
- eprintln!("{}", help_on_error);
- }
- crate::detail_exit(1);
- }
- }
-
- pub(crate) fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) {
- println!("extracting {} to {}", tarball.display(), dst.display());
- if !dst.exists() {
- t!(fs::create_dir_all(dst));
- }
-
- // `tarball` ends with `.tar.xz`; strip that suffix
- // example: `rust-dev-nightly-x86_64-unknown-linux-gnu`
- let uncompressed_filename =
- Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap();
- let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
-
- // decompress the file
- let data = t!(File::open(tarball));
- let decompressor = XzDecoder::new(BufReader::new(data));
-
- let mut tar = tar::Archive::new(decompressor);
- for member in t!(tar.entries()) {
- let mut member = t!(member);
- let original_path = t!(member.path()).into_owned();
- // skip the top-level directory
- if original_path == directory_prefix {
- continue;
- }
- let mut short_path = t!(original_path.strip_prefix(directory_prefix));
- if !short_path.starts_with(pattern) {
- continue;
- }
- short_path = t!(short_path.strip_prefix(pattern));
- let dst_path = dst.join(short_path);
- self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display()));
- if !t!(member.unpack_in(dst)) {
- panic!("path traversal attack ??");
- }
- let src_path = dst.join(original_path);
- if src_path.is_dir() && dst_path.exists() {
- continue;
- }
- t!(fs::rename(src_path, dst_path));
- }
- t!(fs::remove_dir_all(dst.join(directory_prefix)));
- }
-
- /// Returns whether the SHA256 checksum of `path` matches `expected`.
- pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool {
- use sha2::Digest;
-
- self.verbose(&format!("verifying {}", path.display()));
- let mut hasher = sha2::Sha256::new();
- // FIXME: this is ok for rustfmt (4.1 MB large at time of writing), but it seems memory-intensive for rustc and larger components.
- // Consider using streaming IO instead?
- let contents = if self.config.dry_run { vec![] } else { t!(fs::read(path)) };
- hasher.update(&contents);
- let found = hex::encode(hasher.finalize().as_slice());
- let verified = found == expected;
- if !verified && !self.config.dry_run {
- println!(
- "invalid checksum: \n\
- found: {found}\n\
- expected: {expected}",
- );
- }
- return verified;
- }
-
/// Obtain a compiler at a given stage and for a given host. Explicitly does
/// not take `Compiler` since all `Compiler` instances are meant to be
/// obtained through this function, since it ensures that they are valid
@@ -1289,7 +1057,7 @@ impl<'a> Builder<'a> {
/// Note that this returns `None` if LLVM is disabled, or if we're in a
/// check build or dry-run, where there's no need to build all of LLVM.
fn llvm_config(&self, target: TargetSelection) -> Option<PathBuf> {
- if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run {
+ if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run() {
let llvm_config = self.ensure(native::Llvm { target });
if llvm_config.is_file() {
return Some(llvm_config);
@@ -1298,19 +1066,6 @@ impl<'a> Builder<'a> {
None
}
- /// Convenience wrapper to allow `builder.llvm_link_shared()` instead of `builder.config.llvm_link_shared(&builder)`.
- pub(crate) fn llvm_link_shared(&self) -> bool {
- Config::llvm_link_shared(self)
- }
-
- pub(crate) fn download_rustc(&self) -> bool {
- Config::download_rustc(self)
- }
-
- pub(crate) fn initial_rustfmt(&self) -> Option<PathBuf> {
- Config::initial_rustfmt(self)
- }
-
/// Prepares an invocation of `cargo` to be run.
///
/// This will create a `Command` that represents a pending execution of
@@ -1342,7 +1097,13 @@ impl<'a> Builder<'a> {
let my_out = match mode {
// This is the intended out directory for compiler documentation.
Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target),
- Mode::Std => out_dir.join(target.triple).join("doc"),
+ Mode::Std => {
+ if self.config.cmd.json() {
+ out_dir.join(target.triple).join("json-doc")
+ } else {
+ out_dir.join(target.triple).join("doc")
+ }
+ }
_ => panic!("doc mode {:?} not expected", mode),
};
let rustdoc = self.rustdoc(compiler);
@@ -1641,7 +1402,7 @@ impl<'a> Builder<'a> {
//
// Only clear out the directory if we're compiling std; otherwise, we
// should let Cargo take care of things for us (via depdep info)
- if !self.config.dry_run && mode == Mode::Std && cmd == "build" {
+ if !self.config.dry_run() && mode == Mode::Std && cmd == "build" {
self.clear_if_dirty(&out_dir, &self.rustc(compiler));
}
@@ -2139,7 +1900,7 @@ impl<'a> Builder<'a> {
(out, dur - deps)
};
- if self.config.print_step_timings && !self.config.dry_run {
+ if self.config.print_step_timings && !self.config.dry_run() {
let step_string = format!("{:?}", step);
let brace_index = step_string.find("{").unwrap_or(0);
let type_string = type_name::<S>();
@@ -2205,6 +1966,24 @@ impl<'a> Builder<'a> {
false
}
+
+ pub(crate) fn maybe_open_in_browser<S: Step>(&self, path: impl AsRef<Path>) {
+ if self.was_invoked_explicitly::<S>(Kind::Doc) {
+ self.open_in_browser(path);
+ }
+ }
+
+ pub(crate) fn open_in_browser(&self, path: impl AsRef<Path>) {
+ if self.config.dry_run() || !self.config.cmd.open() {
+ return;
+ }
+
+ let path = path.as_ref();
+ self.info(&format!("Opening doc {}", path.display()));
+ if let Err(err) = opener::open(path) {
+ self.info(&format!("{}\n", err));
+ }
+ }
}
#[cfg(test)]
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index 88bbcc93d..5f21d2b00 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -1,5 +1,5 @@
use super::*;
-use crate::config::{Config, TargetSelection};
+use crate::config::{Config, DryRun, TargetSelection};
use std::thread;
fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config {
@@ -10,7 +10,7 @@ fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config
let mut config = Config::parse(cmd);
// don't save toolstates
config.save_toolstates = None;
- config.dry_run = true;
+ config.dry_run = DryRun::SelfCheck;
// Ignore most submodules, since we don't need them for a dry run.
// But make sure to check out the `doc` and `rust-analyzer` submodules, since some steps need them
diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs
index be5c9bb07..05f25af68 100644
--- a/src/bootstrap/cache.rs
+++ b/src/bootstrap/cache.rs
@@ -89,16 +89,16 @@ impl<T: Internable + Hash> Hash for Interned<T> {
impl<T: Internable + Deref> Deref for Interned<T> {
type Target = T::Target;
- fn deref(&self) -> &'static Self::Target {
+ fn deref(&self) -> &Self::Target {
let l = T::intern_cache().lock().unwrap();
- unsafe { mem::transmute::<&Self::Target, &'static Self::Target>(l.get(*self)) }
+ unsafe { mem::transmute::<&Self::Target, &Self::Target>(l.get(*self)) }
}
}
impl<T: Internable + AsRef<U>, U: ?Sized> AsRef<U> for Interned<T> {
- fn as_ref(&self) -> &'static U {
+ fn as_ref(&self) -> &U {
let l = T::intern_cache().lock().unwrap();
- unsafe { mem::transmute::<&U, &'static U>(l.get(*self).as_ref()) }
+ unsafe { mem::transmute::<&U, &U>(l.get(*self).as_ref()) }
}
}
diff --git a/src/bootstrap/cc_detect.rs b/src/bootstrap/cc_detect.rs
index 759a99c33..7128d542a 100644
--- a/src/bootstrap/cc_detect.rs
+++ b/src/bootstrap/cc_detect.rs
@@ -166,14 +166,7 @@ fn set_compiler(
// compiler already takes into account the triple in question.
t if t.contains("android") => {
if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
- let target = target
- .triple
- .replace("armv7neon", "arm")
- .replace("armv7", "arm")
- .replace("thumbv7neon", "arm")
- .replace("thumbv7", "arm");
- let compiler = format!("{}-{}", target, compiler.clang());
- cfg.compiler(ndk.join("bin").join(compiler));
+ cfg.compiler(ndk_compiler(compiler, &*target.triple, ndk));
}
}
@@ -225,8 +218,18 @@ fn set_compiler(
}
}
+pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf {
+ let triple_translated = triple
+ .replace("armv7neon", "arm")
+ .replace("armv7", "arm")
+ .replace("thumbv7neon", "arm")
+ .replace("thumbv7", "arm");
+ let compiler = format!("{}-{}", triple_translated, compiler.clang());
+ ndk.join("bin").join(compiler)
+}
+
/// The target programming language for a native compiler.
-enum Language {
+pub(crate) enum Language {
/// The compiler is targeting C.
C,
/// The compiler is targeting C++.
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index 258352a21..eae81b9fc 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -13,8 +13,10 @@ use crate::util::output;
use crate::util::t;
use crate::Build;
+#[derive(Clone, Default)]
pub enum GitInfo {
/// This is not a git repository.
+ #[default]
Absent,
/// This is a git repository.
/// If the info should be used (`ignore_git` is false), this will be
@@ -25,6 +27,7 @@ pub enum GitInfo {
RecordedForTarball(Info),
}
+#[derive(Clone)]
pub struct Info {
pub commit_date: String,
pub sha: String,
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index 229851238..2e1bd8d6d 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -451,16 +451,16 @@ macro_rules! tool_check_step {
}
tool_check_step!(Rustdoc, "src/tools/rustdoc", "src/librustdoc", SourceType::InTree);
-// Clippy and Rustfmt are hybrids. They are external tools, but use a git subtree instead
+// Clippy, miri and Rustfmt are hybrids. They are external tools, but use a git subtree instead
// of a submodule. Since the SourceType only drives the deny-warnings
// behavior, treat it as in-tree so that any new warnings in clippy will be
// rejected.
tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree);
-// Miri on the other hand is treated as out of tree, since InTree also causes it to
-// be run as part of `check`, which can fail on platforms which libffi-sys has no support for.
-tool_check_step!(Miri, "src/tools/miri", SourceType::Submodule);
+tool_check_step!(Miri, "src/tools/miri", SourceType::InTree);
+tool_check_step!(CargoMiri, "src/tools/miri/cargo-miri", SourceType::InTree);
tool_check_step!(Rls, "src/tools/rls", SourceType::InTree);
tool_check_step!(Rustfmt, "src/tools/rustfmt", SourceType::InTree);
+tool_check_step!(MiroptTestTools, "src/tools/miropt-test-tools", SourceType::InTree);
tool_check_step!(Bootstrap, "src/bootstrap", SourceType::InTree, false);
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index e02a10b81..0deed3f99 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -206,7 +206,6 @@ fn copy_third_party_objects(
}
if target == "x86_64-fortanix-unknown-sgx"
- || target.contains("pc-windows-gnullvm")
|| builder.config.llvm_libunwind(target) == LlvmLibunwind::InTree
&& (target.contains("linux") || target.contains("fuchsia"))
{
@@ -299,7 +298,9 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// Determine if we're going to compile in optimized C intrinsics to
// the `compiler-builtins` crate. These intrinsics live in LLVM's
- // `compiler-rt` repository.
+ // `compiler-rt` repository, but our `src/llvm-project` submodule isn't
+ // always checked out, so we need to conditionally look for this. (e.g. if
+ // an external LLVM is used we skip the LLVM submodule checkout).
//
// Note that this shouldn't affect the correctness of `compiler-builtins`,
// but only its speed. Some intrinsics in C haven't been translated to Rust
@@ -310,15 +311,8 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// If `compiler-rt` is available ensure that the `c` feature of the
// `compiler-builtins` crate is enabled and it's configured to learn where
// `compiler-rt` is located.
- let compiler_builtins_c_feature = if builder.config.optimized_compiler_builtins {
- if !builder.is_rust_llvm(target) {
- panic!(
- "need a managed LLVM submodule for optimized intrinsics support; unset `llvm-config` or `optimized-compiler-builtins`"
- );
- }
-
- builder.update_submodule(&Path::new("src").join("llvm-project"));
- let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
+ let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
+ let compiler_builtins_c_feature = if compiler_builtins_root.exists() {
// Note that `libprofiler_builtins/build.rs` also computes this so if
// you're changing something here please also change that.
cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
@@ -452,7 +446,7 @@ fn copy_sanitizers(
) -> Vec<PathBuf> {
let runtimes: Vec<native::SanitizerRuntime> = builder.ensure(native::Sanitizers { target });
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return Vec::new();
}
@@ -769,10 +763,10 @@ pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetS
cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
- if let Some(ref ver_date) = builder.rust_info.commit_date() {
+ if let Some(ref ver_date) = builder.rust_info().commit_date() {
cargo.env("CFG_VER_DATE", ver_date);
}
- if let Some(ref ver_hash) = builder.rust_info.sha() {
+ if let Some(ref ver_hash) = builder.rust_info().sha() {
cargo.env("CFG_VER_HASH", ver_hash);
}
if !builder.unstable_features() {
@@ -991,7 +985,7 @@ impl Step for CodegenBackend {
compiler.stage, backend, &compiler.host, target
));
let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false);
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
let mut files = files.into_iter().filter(|f| {
@@ -1039,7 +1033,7 @@ fn copy_codegen_backends_to_sysroot(
let dst = builder.sysroot_codegen_backends(target_compiler);
t!(fs::create_dir_all(&dst), dst);
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
@@ -1127,13 +1121,18 @@ impl Step for Sysroot {
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
let host_dir = builder.out.join(&compiler.host.triple);
- let sysroot = if compiler.stage == 0 {
- host_dir.join("stage0-sysroot")
- } else if builder.download_rustc() {
- host_dir.join("ci-rustc-sysroot")
- } else {
- host_dir.join(format!("stage{}", compiler.stage))
+
+ let sysroot_dir = |stage| {
+ if stage == 0 {
+ host_dir.join("stage0-sysroot")
+ } else if builder.download_rustc() && compiler.stage != builder.top_stage {
+ host_dir.join("ci-rustc-sysroot")
+ } else {
+ host_dir.join(format!("stage{}", stage))
+ }
};
+ let sysroot = sysroot_dir(compiler.stage);
+
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
@@ -1144,9 +1143,15 @@ impl Step for Sysroot {
"Cross-compiling is not yet supported with `download-rustc`",
);
- // #102002, cleanup stage1 and stage0-sysroot folders when using download-rustc so people don't use old versions of the toolchain by accident.
- let _ = fs::remove_dir_all(host_dir.join("stage1"));
- let _ = fs::remove_dir_all(host_dir.join("stage0-sysroot"));
+ // #102002, cleanup old toolchain folders when using download-rustc so people don't use them by accident.
+ for stage in 0..=2 {
+ if stage != compiler.stage {
+ let dir = sysroot_dir(stage);
+ if !dir.ends_with("ci-rustc-sysroot") {
+ let _ = fs::remove_dir_all(dir);
+ }
+ }
+ }
// Copy the compiler into the correct sysroot.
let ci_rustc_dir =
@@ -1337,7 +1342,7 @@ impl Step for Assemble {
if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) {
let llvm_config_bin = builder.ensure(native::Llvm { target: target_compiler.host });
- if !builder.config.dry_run {
+ if !builder.config.dry_run() {
let llvm_bin_dir = output(Command::new(llvm_config_bin).arg("--bindir"));
let llvm_bin_dir = Path::new(llvm_bin_dir.trim());
@@ -1407,7 +1412,7 @@ pub fn run_cargo(
additional_target_deps: Vec<(PathBuf, DependencyType)>,
is_check: bool,
) -> Vec<PathBuf> {
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return Vec::new();
}
@@ -1547,7 +1552,7 @@ pub fn stream_cargo(
cb: &mut dyn FnMut(CargoMessage<'_>),
) -> bool {
let mut cargo = Command::from(cargo);
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return true;
}
// Instruct Cargo to give us json messages on stdout, critically leaving
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index a8c403675..d8c15c76e 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -7,19 +7,19 @@ use std::cell::{Cell, RefCell};
use std::cmp;
use std::collections::{HashMap, HashSet};
use std::env;
-use std::ffi::OsStr;
use std::fmt;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::FromStr;
-use crate::builder::{Builder, TaskPath};
+use crate::builder::TaskPath;
use crate::cache::{Interned, INTERNER};
-use crate::channel::GitInfo;
+use crate::cc_detect::{ndk_compiler, Language};
+use crate::channel::{self, GitInfo};
pub use crate::flags::Subcommand;
use crate::flags::{Color, Flags};
-use crate::util::{exe, output, program_out_of_date, t};
+use crate::util::{exe, output, t};
use once_cell::sync::OnceCell;
use serde::{Deserialize, Deserializer};
@@ -33,6 +33,17 @@ macro_rules! check_ci_llvm {
};
}
+#[derive(Clone, Default)]
+pub enum DryRun {
+ /// This isn't a dry run.
+ #[default]
+ Disabled,
+ /// This is a dry run enabled by bootstrap itself, so it can verify that no work is done.
+ SelfCheck,
+ /// This is a dry run enabled by the `--dry-run` flag.
+ UserSelected,
+}
+
/// Global configuration for the entire build and/or bootstrap.
///
/// This structure is derived from a combination of both `config.toml` and
@@ -73,8 +84,6 @@ pub struct Config {
pub color: Color,
pub patch_binaries_for_nix: bool,
pub stage0_metadata: Stage0Metadata,
- /// Whether to use the `c` feature of the `compiler_builtins` crate.
- pub optimized_compiler_builtins: bool,
pub on_fail: Option<String>,
pub stage: u32,
@@ -82,11 +91,11 @@ pub struct Config {
pub keep_stage_std: Vec<u32>,
pub src: PathBuf,
/// defaults to `config.toml`
- pub config: PathBuf,
+ pub config: Option<PathBuf>,
pub jobs: Option<u32>,
pub cmd: Subcommand,
pub incremental: bool,
- pub dry_run: bool,
+ pub dry_run: DryRun,
/// `None` if we shouldn't download CI compiler artifacts, or the commit to download if we should.
#[cfg(not(test))]
download_rustc_commit: Option<String>,
@@ -204,6 +213,7 @@ pub struct Config {
pub npm: Option<PathBuf>,
pub gdb: Option<PathBuf>,
pub python: Option<PathBuf>,
+ pub reuse: Option<PathBuf>,
pub cargo_native_static: bool,
pub configure_args: Vec<String>,
@@ -215,6 +225,7 @@ pub struct Config {
#[cfg(test)]
pub initial_rustfmt: RefCell<RustfmtState>,
pub out: PathBuf,
+ pub rust_info: channel::GitInfo,
}
#[derive(Default, Deserialize)]
@@ -601,6 +612,7 @@ define_config! {
nodejs: Option<String> = "nodejs",
npm: Option<String> = "npm",
python: Option<String> = "python",
+ reuse: Option<String> = "reuse",
locked_deps: Option<bool> = "locked-deps",
vendor: Option<bool> = "vendor",
full_bootstrap: Option<bool> = "full-bootstrap",
@@ -624,7 +636,6 @@ define_config! {
bench_stage: Option<u32> = "bench-stage",
patch_binaries_for_nix: Option<bool> = "patch-binaries-for-nix",
metrics: Option<bool> = "metrics",
- optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
}
}
@@ -784,7 +795,7 @@ impl Config {
config.llvm_optimize = true;
config.ninja_in_file = true;
config.llvm_version_check = true;
- config.llvm_static_stdcpp = true;
+ config.llvm_static_stdcpp = false;
config.backtrace = true;
config.rust_optimize = true;
config.rust_optimize_tests = true;
@@ -823,7 +834,7 @@ impl Config {
config.jobs = flags.jobs.map(threads_from_config);
config.cmd = flags.cmd;
config.incremental = flags.incremental;
- config.dry_run = flags.dry_run;
+ config.dry_run = if flags.dry_run { DryRun::UserSelected } else { DryRun::Disabled };
config.keep_stage = flags.keep_stage;
config.keep_stage_std = flags.keep_stage_std;
config.color = flags.color;
@@ -929,8 +940,10 @@ impl Config {
// Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path,
// but not if `config.toml` hasn't been created.
let mut toml = if !using_default_path || toml_path.exists() {
+ config.config = Some(toml_path.clone());
get_toml(&toml_path)
} else {
+ config.config = None;
TomlConfig::default()
};
@@ -945,7 +958,6 @@ impl Config {
}
config.changelog_seen = toml.changelog_seen;
- config.config = toml_path;
let build = toml.build.unwrap_or_default();
@@ -967,7 +979,7 @@ impl Config {
.unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo"));
// NOTE: it's important this comes *after* we set `initial_rustc` just above.
- if config.dry_run {
+ if config.dry_run() {
let dir = config.out.join("tmp-dry-run");
t!(fs::create_dir_all(&dir));
config.out = dir;
@@ -994,6 +1006,7 @@ impl Config {
config.npm = build.npm.map(PathBuf::from);
config.gdb = build.gdb.map(PathBuf::from);
config.python = build.python.map(PathBuf::from);
+ config.reuse = build.reuse.map(PathBuf::from);
config.submodules = build.submodules;
set(&mut config.low_priority, build.low_priority);
set(&mut config.compiler_docs, build.compiler_docs);
@@ -1013,7 +1026,6 @@ impl Config {
set(&mut config.print_step_timings, build.print_step_timings);
set(&mut config.print_step_rusage, build.print_step_rusage);
set(&mut config.patch_binaries_for_nix, build.patch_binaries_for_nix);
- set(&mut config.optimized_compiler_builtins, build.optimized_compiler_builtins);
config.verbose = cmp::max(config.verbose, flags.verbose);
@@ -1196,7 +1208,7 @@ impl Config {
config.rust_codegen_units_std = rust.codegen_units_std.map(threads_from_config);
config.rust_profile_use = flags.rust_profile_use.or(rust.profile_use);
config.rust_profile_generate = flags.rust_profile_generate.or(rust.profile_generate);
- config.download_rustc_commit = download_ci_rustc_commit(&config, rust.download_rustc);
+ config.download_rustc_commit = config.download_ci_rustc_commit(rust.download_rustc);
config.rust_lto = rust
.lto
@@ -1229,8 +1241,12 @@ impl Config {
if let Some(s) = cfg.no_std {
target.no_std = s;
}
- target.cc = cfg.cc.map(PathBuf::from);
- target.cxx = cfg.cxx.map(PathBuf::from);
+ target.cc = cfg.cc.map(PathBuf::from).or_else(|| {
+ target.ndk.as_ref().map(|ndk| ndk_compiler(Language::C, &triple, ndk))
+ });
+ target.cxx = cfg.cxx.map(PathBuf::from).or_else(|| {
+ target.ndk.as_ref().map(|ndk| ndk_compiler(Language::CPlusPlus, &triple, ndk))
+ });
target.ar = cfg.ar.map(PathBuf::from);
target.ranlib = cfg.ranlib.map(PathBuf::from);
target.linker = cfg.linker.map(PathBuf::from);
@@ -1318,6 +1334,7 @@ impl Config {
let default = config.channel == "dev";
config.ignore_git = ignore_git.unwrap_or(default);
+ config.rust_info = GitInfo::new(config.ignore_git, &config.src);
let download_rustc = config.download_rustc_commit.is_some();
// See https://github.com/rust-lang/compiler-team/issues/326
@@ -1375,6 +1392,13 @@ impl Config {
config
}
+ pub(crate) fn dry_run(&self) -> bool {
+ match self.dry_run {
+ DryRun::Disabled => false,
+ DryRun::SelfCheck | DryRun::UserSelected => true,
+ }
+ }
+
/// A git invocation which runs inside the source directory.
///
/// Use this rather than `Command::new("git")` in order to support out-of-tree builds.
@@ -1384,21 +1408,46 @@ impl Config {
git
}
- pub(crate) fn artifact_channel(&self, builder: &Builder<'_>, commit: &str) -> String {
- if builder.rust_info.is_managed_git_subrepository() {
+ /// Bootstrap embeds a version number into the name of shared libraries it uploads in CI.
+ /// Return the version it would have used for the given commit.
+ pub(crate) fn artifact_version_part(&self, commit: &str) -> String {
+ let (channel, version) = if self.rust_info.is_managed_git_subrepository() {
let mut channel = self.git();
channel.arg("show").arg(format!("{}:src/ci/channel", commit));
let channel = output(&mut channel);
- channel.trim().to_owned()
- } else if let Ok(channel) = fs::read_to_string(builder.src.join("src/ci/channel")) {
- channel.trim().to_owned()
+ let mut version = self.git();
+ version.arg("show").arg(format!("{}:src/version", commit));
+ let version = output(&mut version);
+ (channel.trim().to_owned(), version.trim().to_owned())
} else {
- let src = builder.src.display();
- eprintln!("error: failed to determine artifact channel");
- eprintln!(
- "help: either use git or ensure that {src}/src/ci/channel contains the name of the channel to use"
- );
- panic!();
+ let channel = fs::read_to_string(self.src.join("src/ci/channel"));
+ let version = fs::read_to_string(self.src.join("src/version"));
+ match (channel, version) {
+ (Ok(channel), Ok(version)) => {
+ (channel.trim().to_owned(), version.trim().to_owned())
+ }
+ (channel, version) => {
+ let src = self.src.display();
+ eprintln!("error: failed to determine artifact channel and/or version");
+ eprintln!(
+ "help: consider using a git checkout or ensure these files are readable"
+ );
+ if let Err(channel) = channel {
+ eprintln!("reading {}/src/ci/channel failed: {:?}", src, channel);
+ }
+ if let Err(version) = version {
+ eprintln!("reading {}/src/version failed: {:?}", src, version);
+ }
+ panic!();
+ }
+ }
+ };
+
+ match channel.as_str() {
+ "stable" => version,
+ "beta" => channel,
+ "nightly" => channel,
+ other => unreachable!("{:?} is not recognized as a valid channel", other),
}
}
@@ -1437,17 +1486,17 @@ impl Config {
///
/// If `false`, llvm should be linked statically.
/// This is computed on demand since LLVM might have to first be downloaded from CI.
- pub(crate) fn llvm_link_shared(builder: &Builder<'_>) -> bool {
- let mut opt = builder.config.llvm_link_shared.get();
- if opt.is_none() && builder.config.dry_run {
+ pub(crate) fn llvm_link_shared(&self) -> bool {
+ let mut opt = self.llvm_link_shared.get();
+ if opt.is_none() && self.dry_run() {
// just assume static for now - dynamic linking isn't supported on all platforms
return false;
}
let llvm_link_shared = *opt.get_or_insert_with(|| {
- if builder.config.llvm_from_ci {
- crate::native::maybe_download_ci_llvm(builder);
- let ci_llvm = builder.config.ci_llvm_root();
+ if self.llvm_from_ci {
+ self.maybe_download_ci_llvm();
+ let ci_llvm = self.ci_llvm_root();
let link_type = t!(
std::fs::read_to_string(ci_llvm.join("link-type.txt")),
format!("CI llvm missing: {}", ci_llvm.display())
@@ -1459,36 +1508,42 @@ impl Config {
false
}
});
- builder.config.llvm_link_shared.set(opt);
+ self.llvm_link_shared.set(opt);
llvm_link_shared
}
/// Return whether we will use a downloaded, pre-compiled version of rustc, or just build from source.
- pub(crate) fn download_rustc(builder: &Builder<'_>) -> bool {
- static DOWNLOAD_RUSTC: OnceCell<bool> = OnceCell::new();
- if builder.config.dry_run && DOWNLOAD_RUSTC.get().is_none() {
+ pub(crate) fn download_rustc(&self) -> bool {
+ self.download_rustc_commit().is_some()
+ }
+
+ pub(crate) fn download_rustc_commit(&self) -> Option<&'static str> {
+ static DOWNLOAD_RUSTC: OnceCell<Option<String>> = OnceCell::new();
+ if self.dry_run() && DOWNLOAD_RUSTC.get().is_none() {
// avoid trying to actually download the commit
- return false;
+ return None;
}
- *DOWNLOAD_RUSTC.get_or_init(|| match &builder.config.download_rustc_commit {
- None => false,
- Some(commit) => {
- download_ci_rustc(builder, commit);
- true
- }
- })
+ DOWNLOAD_RUSTC
+ .get_or_init(|| match &self.download_rustc_commit {
+ None => None,
+ Some(commit) => {
+ self.download_ci_rustc(commit);
+ Some(commit.clone())
+ }
+ })
+ .as_deref()
}
- pub(crate) fn initial_rustfmt(builder: &Builder<'_>) -> Option<PathBuf> {
- match &mut *builder.config.initial_rustfmt.borrow_mut() {
+ pub(crate) fn initial_rustfmt(&self) -> Option<PathBuf> {
+ match &mut *self.initial_rustfmt.borrow_mut() {
RustfmtState::SystemToolchain(p) | RustfmtState::Downloaded(p) => Some(p.clone()),
RustfmtState::Unavailable => None,
r @ RustfmtState::LazyEvaluated => {
- if builder.config.dry_run {
+ if self.dry_run() {
return Some(PathBuf::new());
}
- let path = maybe_download_rustfmt(builder);
+ let path = self.maybe_download_rustfmt();
*r = if let Some(p) = &path {
RustfmtState::Downloaded(p.clone())
} else {
@@ -1499,8 +1554,10 @@ impl Config {
}
}
- pub fn verbose(&self) -> bool {
- self.verbose > 0
+ pub fn verbose(&self, msg: &str) {
+ if self.verbose > 0 {
+ println!("{}", msg);
+ }
}
pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool {
@@ -1538,218 +1595,77 @@ impl Config {
pub fn submodules(&self, rust_info: &GitInfo) -> bool {
self.submodules.unwrap_or(rust_info.is_managed_git_subrepository())
}
-}
-fn set<T>(field: &mut T, val: Option<T>) {
- if let Some(v) = val {
- *field = v;
- }
-}
-
-fn threads_from_config(v: u32) -> u32 {
- match v {
- 0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32,
- n => n,
- }
-}
+ /// Returns the commit to download, or `None` if we shouldn't download CI artifacts.
+ fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Option<String> {
+ // If `download-rustc` is not set, default to rebuilding.
+ let if_unchanged = match download_rustc {
+ None | Some(StringOrBool::Bool(false)) => return None,
+ Some(StringOrBool::Bool(true)) => false,
+ Some(StringOrBool::String(s)) if s == "if-unchanged" => true,
+ Some(StringOrBool::String(other)) => {
+ panic!("unrecognized option for download-rustc: {}", other)
+ }
+ };
-/// Returns the commit to download, or `None` if we shouldn't download CI artifacts.
-fn download_ci_rustc_commit(
- config: &Config,
- download_rustc: Option<StringOrBool>,
-) -> Option<String> {
- // If `download-rustc` is not set, default to rebuilding.
- let if_unchanged = match download_rustc {
- None | Some(StringOrBool::Bool(false)) => return None,
- Some(StringOrBool::Bool(true)) => false,
- Some(StringOrBool::String(s)) if s == "if-unchanged" => true,
- Some(StringOrBool::String(other)) => {
- panic!("unrecognized option for download-rustc: {}", other)
+ // Handle running from a directory other than the top level
+ let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"]));
+ let top_level = top_level.trim_end();
+ let compiler = format!("{top_level}/compiler/");
+ let library = format!("{top_level}/library/");
+
+ // Look for a version to compare to based on the current commit.
+ // Only commits merged by bors will have CI artifacts.
+ let merge_base = output(
+ self.git()
+ .arg("rev-list")
+ .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
+ .args(&["-n1", "--first-parent", "HEAD"]),
+ );
+ let commit = merge_base.trim_end();
+ if commit.is_empty() {
+ println!("error: could not find commit hash for downloading rustc");
+ println!("help: maybe your repository history is too shallow?");
+ println!("help: consider disabling `download-rustc`");
+ println!("help: or fetch enough history to include one upstream commit");
+ crate::detail_exit(1);
}
- };
- // Handle running from a directory other than the top level
- let top_level = output(config.git().args(&["rev-parse", "--show-toplevel"]));
- let top_level = top_level.trim_end();
- let compiler = format!("{top_level}/compiler/");
- let library = format!("{top_level}/library/");
-
- // Look for a version to compare to based on the current commit.
- // Only commits merged by bors will have CI artifacts.
- let merge_base = output(
- config
+ // Warn if there were changes to the compiler or standard library since the ancestor commit.
+ let has_changes = !t!(self
.git()
- .arg("rev-list")
- .arg(format!("--author={}", config.stage0_metadata.config.git_merge_commit_email))
- .args(&["-n1", "--first-parent", "HEAD"]),
- );
- let commit = merge_base.trim_end();
- if commit.is_empty() {
- println!("error: could not find commit hash for downloading rustc");
- println!("help: maybe your repository history is too shallow?");
- println!("help: consider disabling `download-rustc`");
- println!("help: or fetch enough history to include one upstream commit");
- crate::detail_exit(1);
- }
-
- // Warn if there were changes to the compiler or standard library since the ancestor commit.
- let has_changes = !t!(config
- .git()
- .args(&["diff-index", "--quiet", &commit, "--", &compiler, &library])
- .status())
- .success();
- if has_changes {
- if if_unchanged {
- if config.verbose > 0 {
- println!(
- "warning: saw changes to compiler/ or library/ since {commit}; \
- ignoring `download-rustc`"
- );
+ .args(&["diff-index", "--quiet", &commit, "--", &compiler, &library])
+ .status())
+ .success();
+ if has_changes {
+ if if_unchanged {
+ if self.verbose > 0 {
+ println!(
+ "warning: saw changes to compiler/ or library/ since {commit}; \
+ ignoring `download-rustc`"
+ );
+ }
+ return None;
}
- return None;
+ println!(
+ "warning: `download-rustc` is enabled, but there are changes to \
+ compiler/ or library/"
+ );
}
- println!(
- "warning: `download-rustc` is enabled, but there are changes to \
- compiler/ or library/"
- );
- }
-
- Some(commit.to_string())
-}
-fn maybe_download_rustfmt(builder: &Builder<'_>) -> Option<PathBuf> {
- let RustfmtMetadata { date, version } = builder.config.stage0_metadata.rustfmt.as_ref()?;
- let channel = format!("{version}-{date}");
-
- let host = builder.config.build;
- let rustfmt_path = builder.config.initial_rustc.with_file_name(exe("rustfmt", host));
- let bin_root = builder.config.out.join(host.triple).join("stage0");
- let rustfmt_stamp = bin_root.join(".rustfmt-stamp");
- if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) {
- return Some(rustfmt_path);
+ Some(commit.to_string())
}
-
- let filename = format!("rustfmt-{version}-{build}.tar.xz", build = host.triple);
- download_component(builder, DownloadSource::Dist, filename, "rustfmt-preview", &date, "stage0");
-
- builder.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt"));
- builder.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt"));
-
- builder.create(&rustfmt_stamp, &channel);
- Some(rustfmt_path)
}
-fn download_ci_rustc(builder: &Builder<'_>, commit: &str) {
- builder.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
- let channel = builder.config.artifact_channel(builder, commit);
- let host = builder.config.build.triple;
- let bin_root = builder.out.join(host).join("ci-rustc");
- let rustc_stamp = bin_root.join(".rustc-stamp");
-
- if !bin_root.join("bin").join("rustc").exists() || program_out_of_date(&rustc_stamp, commit) {
- if bin_root.exists() {
- t!(fs::remove_dir_all(&bin_root));
- }
- let filename = format!("rust-std-{channel}-{host}.tar.xz");
- let pattern = format!("rust-std-{host}");
- download_ci_component(builder, filename, &pattern, commit);
- let filename = format!("rustc-{channel}-{host}.tar.xz");
- download_ci_component(builder, filename, "rustc", commit);
- // download-rustc doesn't need its own cargo, it can just use beta's.
- let filename = format!("rustc-dev-{channel}-{host}.tar.xz");
- download_ci_component(builder, filename, "rustc-dev", commit);
-
- builder.fix_bin_or_dylib(&bin_root.join("bin").join("rustc"));
- builder.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc"));
- let lib_dir = bin_root.join("lib");
- for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) {
- let lib = t!(lib);
- if lib.path().extension() == Some(OsStr::new("so")) {
- builder.fix_bin_or_dylib(&lib.path());
- }
- }
- t!(fs::write(rustc_stamp, commit));
+fn set<T>(field: &mut T, val: Option<T>) {
+ if let Some(v) = val {
+ *field = v;
}
}
-pub(crate) enum DownloadSource {
- CI,
- Dist,
-}
-
-/// Download a single component of a CI-built toolchain (not necessarily a published nightly).
-// NOTE: intentionally takes an owned string to avoid downloading multiple times by accident
-fn download_ci_component(builder: &Builder<'_>, filename: String, prefix: &str, commit: &str) {
- download_component(builder, DownloadSource::CI, filename, prefix, commit, "ci-rustc")
-}
-
-fn download_component(
- builder: &Builder<'_>,
- mode: DownloadSource,
- filename: String,
- prefix: &str,
- key: &str,
- destination: &str,
-) {
- let cache_dst = builder.out.join("cache");
- let cache_dir = cache_dst.join(key);
- if !cache_dir.exists() {
- t!(fs::create_dir_all(&cache_dir));
- }
-
- let bin_root = builder.out.join(builder.config.build.triple).join(destination);
- let tarball = cache_dir.join(&filename);
- let (base_url, url, should_verify) = match mode {
- DownloadSource::CI => (
- builder.config.stage0_metadata.config.artifacts_server.clone(),
- format!("{key}/{filename}"),
- false,
- ),
- DownloadSource::Dist => {
- let dist_server = env::var("RUSTUP_DIST_SERVER")
- .unwrap_or(builder.config.stage0_metadata.config.dist_server.to_string());
- // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0.json
- (dist_server, format!("dist/{key}/{filename}"), true)
- }
- };
-
- // For the beta compiler, put special effort into ensuring the checksums are valid.
- // FIXME: maybe we should do this for download-rustc as well? but it would be a pain to update
- // this on each and every nightly ...
- let checksum = if should_verify {
- let error = format!(
- "src/stage0.json doesn't contain a checksum for {url}. \
- Pre-built artifacts might not be available for this \
- target at this time, see https://doc.rust-lang.org/nightly\
- /rustc/platform-support.html for more information."
- );
- let sha256 = builder.config.stage0_metadata.checksums_sha256.get(&url).expect(&error);
- if tarball.exists() {
- if builder.verify(&tarball, sha256) {
- builder.unpack(&tarball, &bin_root, prefix);
- return;
- } else {
- builder.verbose(&format!(
- "ignoring cached file {} due to failed verification",
- tarball.display()
- ));
- builder.remove(&tarball);
- }
- }
- Some(sha256)
- } else if tarball.exists() {
- builder.unpack(&tarball, &bin_root, prefix);
- return;
- } else {
- None
- };
-
- builder.download_component(&format!("{base_url}/{url}"), &tarball, "");
- if let Some(sha256) = checksum {
- if !builder.verify(&tarball, sha256) {
- panic!("failed to verify {}", tarball.display());
- }
+fn threads_from_config(v: u32) -> u32 {
+ match v {
+ 0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32,
+ n => n,
}
-
- builder.unpack(&tarball, &bin_root, prefix);
}
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 12585e80e..3cb0eccd3 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -10,15 +10,20 @@
use std::collections::HashSet;
use std::env;
+use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
+use object::read::archive::ArchiveFile;
+use object::BinaryFormat;
+
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::channel;
use crate::compile;
use crate::config::TargetSelection;
+use crate::doc::DocumentationFormat;
use crate::tarball::{GeneratedTarball, OverlayKind, Tarball};
use crate::tool::{self, Tool};
use crate::util::{exe, is_dylib, output, t, timeit};
@@ -97,7 +102,11 @@ impl Step for JsonDocs {
/// Builds the `rust-docs-json` installer component.
fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let host = self.host;
- builder.ensure(crate::doc::JsonStd { stage: builder.top_stage, target: host });
+ builder.ensure(crate::doc::Std {
+ stage: builder.top_stage,
+ target: host,
+ format: DocumentationFormat::JSON,
+ });
let dest = "share/doc/rust/json";
@@ -550,6 +559,39 @@ fn skip_host_target_lib(builder: &Builder<'_>, compiler: Compiler) -> bool {
}
}
+/// Check that all objects in rlibs for UEFI targets are COFF. This
+/// ensures that the C compiler isn't producing ELF objects, which would
+/// not link correctly with the COFF objects.
+fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp: &Path) {
+ if !target.ends_with("-uefi") {
+ return;
+ }
+
+ for (path, _) in builder.read_stamp_file(stamp) {
+ if path.extension() != Some(OsStr::new("rlib")) {
+ continue;
+ }
+
+ let data = t!(fs::read(&path));
+ let data = data.as_slice();
+ let archive = t!(ArchiveFile::parse(data));
+ for member in archive.members() {
+ let member = t!(member);
+ let member_data = t!(member.data(data));
+
+ let is_coff = match object::File::parse(member_data) {
+ Ok(member_file) => member_file.format() == BinaryFormat::Coff,
+ Err(_) => false,
+ };
+
+ if !is_coff {
+ let member_name = String::from_utf8_lossy(member.name());
+ panic!("member {} in {} is not COFF", member_name, path.display());
+ }
+ }
+ }
+}
+
/// Copy stamped files into an image's `target/lib` directory.
fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) {
let dst = image.join("lib/rustlib").join(target.triple).join("lib");
@@ -605,6 +647,7 @@ impl Step for Std {
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
+ verify_uefi_rlib_format(builder, target, &stamp);
copy_target_libs(builder, target, &tarball.image_dir(), &stamp);
Some(tarball.generate())
@@ -919,13 +962,13 @@ impl Step for PlainSourceTarball {
// Create the version file
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
- if let Some(info) = builder.rust_info.info() {
+ if let Some(info) = builder.rust_info().info() {
channel::write_commit_hash_file(&plain_dst_src, &info.sha);
channel::write_commit_info_file(&plain_dst_src, info);
}
// If we're building from git sources, we need to vendor a complete distribution.
- if builder.rust_info.is_managed_git_subrepository() {
+ if builder.rust_info().is_managed_git_subrepository() {
// Ensure we have the submodules checked out.
builder.update_submodule(Path::new("src/tools/rust-analyzer"));
@@ -940,7 +983,7 @@ impl Step for PlainSourceTarball {
.arg(builder.src.join("./src/bootstrap/Cargo.toml"))
.current_dir(&plain_dst_src);
- let config = if !builder.config.dry_run {
+ let config = if !builder.config.dry_run() {
t!(String::from_utf8(t!(cmd.output()).stdout))
} else {
String::new()
@@ -1381,7 +1424,7 @@ impl Step for Extended {
let etc = builder.src.join("src/etc/installer");
// Avoid producing tarballs during a dry run.
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
@@ -1819,7 +1862,7 @@ impl Step for Extended {
let _time = timeit(builder);
builder.run(&mut cmd);
- if !builder.config.dry_run {
+ if !builder.config.dry_run() {
t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename)));
}
}
@@ -1853,21 +1896,23 @@ fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
///
/// Returns whether the files were actually copied.
fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) -> bool {
- if !builder.is_rust_llvm(target) {
- // If the LLVM was externally provided, then we don't currently copy
- // artifacts into the sysroot. This is not necessarily the right
- // choice (in particular, it will require the LLVM dylib to be in
- // the linker's load path at runtime), but the common use case for
- // external LLVMs is distribution provided LLVMs, and in that case
- // they're usually in the standard search path (e.g., /usr/lib) and
- // copying them here is going to cause problems as we may end up
- // with the wrong files and isn't what distributions want.
- //
- // This behavior may be revisited in the future though.
- //
- // If the LLVM is coming from ourselves (just from CI) though, we
- // still want to install it, as it otherwise won't be available.
- return false;
+ if let Some(config) = builder.config.target_config.get(&target) {
+ if config.llvm_config.is_some() && !builder.config.llvm_from_ci {
+ // If the LLVM was externally provided, then we don't currently copy
+ // artifacts into the sysroot. This is not necessarily the right
+ // choice (in particular, it will require the LLVM dylib to be in
+ // the linker's load path at runtime), but the common use case for
+ // external LLVMs is distribution provided LLVMs, and in that case
+ // they're usually in the standard search path (e.g., /usr/lib) and
+ // copying them here is going to cause problems as we may end up
+ // with the wrong files and isn't what distributions want.
+ //
+ // This behavior may be revisited in the future though.
+ //
+ // If the LLVM is coming from ourselves (just from CI) though, we
+ // still want to install it, as it otherwise won't be available.
+ return false;
+ }
}
// On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib
@@ -1881,12 +1926,12 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
if llvm_dylib_path.exists() {
builder.install(&llvm_dylib_path, dst_libdir, 0o644);
}
- !builder.config.dry_run
+ !builder.config.dry_run()
} else if let Ok(llvm_config) = crate::native::prebuilt_llvm_config(builder, target) {
let mut cmd = Command::new(llvm_config);
cmd.arg("--libfiles");
builder.verbose(&format!("running {:?}", cmd));
- let files = if builder.config.dry_run { "".into() } else { output(&mut cmd) };
+ let files = if builder.config.dry_run() { "".into() } else { output(&mut cmd) };
let build_llvm_out = &builder.llvm_out(builder.config.build);
let target_llvm_out = &builder.llvm_out(target);
for file in files.trim_end().split(' ') {
@@ -1898,7 +1943,7 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
};
builder.install(&file, dst_libdir, 0o644);
}
- !builder.config.dry_run
+ !builder.config.dry_run()
} else {
false
}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index ea06caf9c..2c6fd1e1d 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -85,18 +85,6 @@ book!(
StyleGuide, "src/doc/style-guide", "style-guide";
);
-fn open(builder: &Builder<'_>, path: impl AsRef<Path>) {
- if builder.config.dry_run || !builder.config.cmd.open() {
- return;
- }
-
- let path = path.as_ref();
- builder.info(&format!("Opening doc {}", path.display()));
- if let Err(err) = opener::open(path) {
- builder.info(&format!("{}\n", err));
- }
-}
-
// "library/std" -> ["library", "std"]
//
// Used for deciding whether a particular step is one requested by the user on
@@ -163,7 +151,7 @@ impl Step for RustbookSrc {
let index = out.join("index.html");
let rustbook = builder.tool_exe(Tool::Rustbook);
let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
- if builder.config.dry_run || up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+ if builder.config.dry_run() || up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
return;
}
builder.info(&format!("Rustbook ({}) - {}", target, name));
@@ -240,11 +228,9 @@ impl Step for TheBook {
invoke_rustdoc(builder, compiler, &shared_assets, target, path);
}
- if builder.was_invoked_explicitly::<Self>(Kind::Doc) {
- let out = builder.doc_out(target);
- let index = out.join("book").join("index.html");
- open(builder, &index);
- }
+ let out = builder.doc_out(target);
+ let index = out.join("book").join("index.html");
+ builder.maybe_open_in_browser::<Self>(index);
}
}
@@ -345,8 +331,8 @@ impl Step for Standalone {
&& up_to_date(&footer, &html)
&& up_to_date(&favicon, &html)
&& up_to_date(&full_toc, &html)
- && (builder.config.dry_run || up_to_date(&version_info, &html))
- && (builder.config.dry_run || up_to_date(&rustdoc, &html))
+ && (builder.config.dry_run() || up_to_date(&version_info, &html))
+ && (builder.config.dry_run() || up_to_date(&rustdoc, &html))
{
continue;
}
@@ -386,7 +372,7 @@ impl Step for Standalone {
// with no particular explicit doc requested (e.g. library/core).
if builder.paths.is_empty() || builder.was_invoked_explicitly::<Self>(Kind::Doc) {
let index = out.join("index.html");
- open(builder, &index);
+ builder.open_in_browser(&index);
}
}
}
@@ -416,11 +402,11 @@ impl Step for SharedAssets {
let version_input = builder.src.join("src").join("doc").join("version_info.html.template");
let version_info = out.join("version_info.html");
- if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
+ if !builder.config.dry_run() && !up_to_date(&version_input, &version_info) {
let info = t!(fs::read_to_string(&version_input))
.replace("VERSION", &builder.rust_release())
- .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
- .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
+ .replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or(""))
+ .replace("STAMP", builder.rust_info().sha().unwrap_or(""));
t!(fs::write(&version_info, &info));
}
@@ -434,6 +420,7 @@ impl Step for SharedAssets {
pub struct Std {
pub stage: u32,
pub target: TargetSelection,
+ pub format: DocumentationFormat,
}
impl Step for Std {
@@ -446,7 +433,15 @@ impl Step for Std {
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Std { stage: run.builder.top_stage, target: run.target });
+ run.builder.ensure(Std {
+ stage: run.builder.top_stage,
+ target: run.target,
+ format: if run.builder.config.cmd.json() {
+ DocumentationFormat::JSON
+ } else {
+ DocumentationFormat::HTML
+ },
+ });
}
/// Compile all standard library documentation.
@@ -456,19 +451,28 @@ impl Step for Std {
fn run(self, builder: &Builder<'_>) {
let stage = self.stage;
let target = self.target;
- let out = builder.doc_out(target);
+ let out = match self.format {
+ DocumentationFormat::HTML => builder.doc_out(target),
+ DocumentationFormat::JSON => builder.json_doc_out(target),
+ };
+
t!(fs::create_dir_all(&out));
- builder.ensure(SharedAssets { target: self.target });
+ if self.format == DocumentationFormat::HTML {
+ builder.ensure(SharedAssets { target: self.target });
+ }
let index_page = builder.src.join("src/doc/index.md").into_os_string();
- let mut extra_args = vec![
- OsStr::new("--markdown-css"),
- OsStr::new("rust.css"),
- OsStr::new("--markdown-no-toc"),
- OsStr::new("--index-page"),
- &index_page,
- ];
+ let mut extra_args = match self.format {
+ DocumentationFormat::HTML => vec![
+ OsStr::new("--markdown-css"),
+ OsStr::new("rust.css"),
+ OsStr::new("--markdown-no-toc"),
+ OsStr::new("--index-page"),
+ &index_page,
+ ],
+ DocumentationFormat::JSON => vec![OsStr::new("--output-format"), OsStr::new("json")],
+ };
if !builder.config.docs_minification {
extra_args.push(OsStr::new("--disable-minification"));
@@ -492,59 +496,24 @@ impl Step for Std {
})
.collect::<Vec<_>>();
- doc_std(
- builder,
- DocumentationFormat::HTML,
- stage,
- target,
- &out,
- &extra_args,
- &requested_crates,
- );
+ doc_std(builder, self.format, stage, target, &out, &extra_args, &requested_crates);
+
+ // Don't open if the format is json
+ if let DocumentationFormat::JSON = self.format {
+ return;
+ }
// Look for library/std, library/core etc in the `x.py doc` arguments and
// open the corresponding rendered docs.
for requested_crate in requested_crates {
if STD_PUBLIC_CRATES.iter().any(|k| *k == requested_crate.as_str()) {
let index = out.join(requested_crate).join("index.html");
- open(builder, &index);
+ builder.open_in_browser(index);
}
}
}
}
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-pub struct JsonStd {
- pub stage: u32,
- pub target: TargetSelection,
-}
-
-impl Step for JsonStd {
- type Output = ();
- const DEFAULT: bool = false;
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- let default = run.builder.config.docs && run.builder.config.cmd.json();
- run.all_krates("test").path("library").default_condition(default)
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Std { stage: run.builder.top_stage, target: run.target });
- }
-
- /// Build JSON documentation for the standard library crates.
- ///
- /// This is largely just a wrapper around `cargo doc`.
- fn run(self, builder: &Builder<'_>) {
- let stage = self.stage;
- let target = self.target;
- let out = builder.json_doc_out(target);
- t!(fs::create_dir_all(&out));
- let extra_args = [OsStr::new("--output-format"), OsStr::new("json")];
- doc_std(builder, DocumentationFormat::JSON, stage, target, &out, &extra_args, &[])
- }
-}
-
/// Name of the crates that are visible to consumers of the standard library.
/// Documentation for internal crates is handled by the rustc step, so internal crates will show
/// up there.
@@ -557,7 +526,7 @@ impl Step for JsonStd {
const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-enum DocumentationFormat {
+pub enum DocumentationFormat {
HTML,
JSON,
}
@@ -597,27 +566,22 @@ fn doc_std(
);
}
let compiler = builder.compiler(stage, builder.config.build);
+
+ let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" };
+ let target_dir =
+ builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name);
+
// This is directory where the compiler will place the output of the command.
// We will then copy the files from this directory into the final `out` directory, the specified
// as a function parameter.
- let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
- // `cargo` uses the same directory for both JSON docs and HTML docs.
- // This could lead to cross-contamination when copying files into the specified `out` directory.
- // For example:
- // ```bash
- // x doc std
- // x doc std --json
- // ```
- // could lead to HTML docs being copied into the JSON docs output directory.
- // To avoid this issue, we clean the doc folder before invoking `cargo`.
- if out_dir.exists() {
- builder.remove_dir(&out_dir);
- }
+ let out_dir = target_dir.join(target.triple).join("doc");
let run_cargo_rustdoc_for = |package: &str| {
let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
cargo
+ .arg("--target-dir")
+ .arg(&*target_dir.to_string_lossy())
.arg("-p")
.arg(package)
.arg("-Zskip-rustdoc-fingerprint")
@@ -759,13 +723,13 @@ impl Step for Rustc {
// Let's open the first crate documentation page:
if let Some(krate) = to_open {
let index = out.join(krate).join("index.html");
- open(builder, &index);
+ builder.open_in_browser(index);
}
}
}
macro_rules! tool_doc {
- ($tool: ident, $should_run: literal, $path: literal, [$($krate: literal),+ $(,)?], in_tree = $in_tree:expr $(,)?) => {
+ ($tool: ident, $should_run: literal, $path: literal, [$($krate: literal),+ $(,)?] $(,)?) => {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $tool {
target: TargetSelection,
@@ -821,12 +785,6 @@ macro_rules! tool_doc {
t!(fs::create_dir_all(&out_dir));
t!(symlink_dir_force(&builder.config, &out, &out_dir));
- let source_type = if $in_tree == true {
- SourceType::InTree
- } else {
- SourceType::Submodule
- };
-
// Build cargo command.
let mut cargo = prepare_tool_cargo(
builder,
@@ -835,7 +793,7 @@ macro_rules! tool_doc {
target,
"doc",
$path,
- source_type,
+ SourceType::InTree,
&[],
);
@@ -851,38 +809,21 @@ macro_rules! tool_doc {
cargo.rustdocflag("--show-type-layout");
cargo.rustdocflag("--generate-link-to-definition");
cargo.rustdocflag("-Zunstable-options");
- if $in_tree == true {
- builder.run(&mut cargo.into());
- } else {
- // Allow out-of-tree docs to fail (since the tool might be in a broken state).
- if !builder.try_run(&mut cargo.into()) {
- builder.info(&format!(
- "WARNING: tool {} failed to document; ignoring failure because it is an out-of-tree tool",
- stringify!($tool).to_lowercase(),
- ));
- }
- }
+ builder.run(&mut cargo.into());
}
}
}
}
-tool_doc!(
- Rustdoc,
- "rustdoc-tool",
- "src/tools/rustdoc",
- ["rustdoc", "rustdoc-json-types"],
- in_tree = true
-);
+tool_doc!(Rustdoc, "rustdoc-tool", "src/tools/rustdoc", ["rustdoc", "rustdoc-json-types"],);
tool_doc!(
Rustfmt,
"rustfmt-nightly",
"src/tools/rustfmt",
["rustfmt-nightly", "rustfmt-config_proc_macro"],
- in_tree = true
);
-tool_doc!(Clippy, "clippy", "src/tools/clippy", ["clippy_utils"], in_tree = true);
-tool_doc!(Miri, "miri", "src/tools/miri", ["miri"], in_tree = false);
+tool_doc!(Clippy, "clippy", "src/tools/clippy", ["clippy_utils"]);
+tool_doc!(Miri, "miri", "src/tools/miri", ["miri"]);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
@@ -956,7 +897,7 @@ impl Step for UnstableBookGen {
}
fn symlink_dir_force(config: &Config, src: &Path, dst: &Path) -> io::Result<()> {
- if config.dry_run {
+ if config.dry_run() {
return Ok(());
}
if let Ok(m) = fs::symlink_metadata(dst) {
@@ -1021,7 +962,7 @@ impl Step for RustcBook {
cmd.arg("--rustc");
cmd.arg(&rustc);
cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg());
- if builder.config.verbose() {
+ if builder.is_verbose() {
cmd.arg("--verbose");
}
if self.validate {
@@ -1042,10 +983,9 @@ impl Step for RustcBook {
name: INTERNER.intern_str("rustc"),
src: INTERNER.intern_path(out_base),
});
- if builder.was_invoked_explicitly::<Self>(Kind::Doc) {
- let out = builder.doc_out(self.target);
- let index = out.join("rustc").join("index.html");
- open(builder, &index);
- }
+
+ let out = builder.doc_out(self.target);
+ let index = out.join("rustc").join("index.html");
+ builder.maybe_open_in_browser::<Self>(index);
}
}
diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs
new file mode 100644
index 000000000..6ae283f32
--- /dev/null
+++ b/src/bootstrap/download.rs
@@ -0,0 +1,520 @@
+use std::{
+ env,
+ ffi::{OsStr, OsString},
+ fs::{self, File},
+ io::{BufRead, BufReader, ErrorKind},
+ path::{Path, PathBuf},
+ process::{Command, Stdio},
+};
+
+use once_cell::sync::OnceCell;
+use xz2::bufread::XzDecoder;
+
+use crate::{
+ config::RustfmtMetadata,
+ native::detect_llvm_sha,
+ t,
+ util::{check_run, exe, program_out_of_date, try_run},
+ Config,
+};
+
+/// Generic helpers that are useful anywhere in bootstrap.
+impl Config {
+ pub fn is_verbose(&self) -> bool {
+ self.verbose > 0
+ }
+
+ pub(crate) fn create(&self, path: &Path, s: &str) {
+ if self.dry_run() {
+ return;
+ }
+ t!(fs::write(path, s));
+ }
+
+ pub(crate) fn remove(&self, f: &Path) {
+ if self.dry_run() {
+ return;
+ }
+ fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f));
+ }
+
+ /// Create a temporary directory in `out` and return its path.
+ ///
+ /// NOTE: this temporary directory is shared between all steps;
+ /// if you need an empty directory, create a new subdirectory inside it.
+ pub(crate) fn tempdir(&self) -> PathBuf {
+ let tmp = self.out.join("tmp");
+ t!(fs::create_dir_all(&tmp));
+ tmp
+ }
+
+ /// Runs a command, printing out nice contextual information if it fails.
+ /// Exits if the command failed to execute at all, otherwise returns its
+ /// `status.success()`.
+ pub(crate) fn try_run(&self, cmd: &mut Command) -> bool {
+ if self.dry_run() {
+ return true;
+ }
+ self.verbose(&format!("running: {:?}", cmd));
+ try_run(cmd, self.is_verbose())
+ }
+
+ /// Runs a command, printing out nice contextual information if it fails.
+ /// Returns false if do not execute at all, otherwise returns its
+ /// `status.success()`.
+ pub(crate) fn check_run(&self, cmd: &mut Command) -> bool {
+ if self.dry_run() {
+ return true;
+ }
+ self.verbose(&format!("running: {:?}", cmd));
+ check_run(cmd, self.is_verbose())
+ }
+
+ /// Modifies the interpreter section of 'fname' to fix the dynamic linker,
+ /// or the RPATH section, to fix the dynamic library search path
+ ///
+ /// This is only required on NixOS and uses the PatchELF utility to
+ /// change the interpreter/RPATH of ELF executables.
+ ///
+ /// Please see https://nixos.org/patchelf.html for more information
+ fn fix_bin_or_dylib(&self, fname: &Path) {
+ // FIXME: cache NixOS detection?
+ match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() {
+ Err(_) => return,
+ Ok(output) if !output.status.success() => return,
+ Ok(output) => {
+ let mut s = output.stdout;
+ if s.last() == Some(&b'\n') {
+ s.pop();
+ }
+ if s != b"Linux" {
+ return;
+ }
+ }
+ }
+
+ // If the user has asked binaries to be patched for Nix, then
+ // don't check for NixOS or `/lib`, just continue to the patching.
+ // NOTE: this intentionally comes after the Linux check:
+ // - patchelf only works with ELF files, so no need to run it on Mac or Windows
+ // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc.
+ if !self.patch_binaries_for_nix {
+ // Use `/etc/os-release` instead of `/etc/NIXOS`.
+ // The latter one does not exist on NixOS when using tmpfs as root.
+ const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""];
+ let os_release = match File::open("/etc/os-release") {
+ Err(e) if e.kind() == ErrorKind::NotFound => return,
+ Err(e) => panic!("failed to access /etc/os-release: {}", e),
+ Ok(f) => f,
+ };
+ if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) {
+ return;
+ }
+ if Path::new("/lib").exists() {
+ return;
+ }
+ }
+
+ // At this point we're pretty sure the user is running NixOS or using Nix
+ println!("info: you seem to be using Nix. Attempting to patch {}", fname.display());
+
+ // Only build `.nix-deps` once.
+ static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new();
+ let mut nix_build_succeeded = true;
+ let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| {
+ // Run `nix-build` to "build" each dependency (which will likely reuse
+ // the existing `/nix/store` copy, or at most download a pre-built copy).
+ //
+ // Importantly, we create a gc-root called `.nix-deps` in the `build/`
+ // directory, but still reference the actual `/nix/store` path in the rpath
+ // as it makes it significantly more robust against changes to the location of
+ // the `.nix-deps` location.
+ //
+ // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
+ // zlib: Needed as a system dependency of `libLLVM-*.so`.
+ // patchelf: Needed for patching ELF binaries (see doc comment above).
+ let nix_deps_dir = self.out.join(".nix-deps");
+ const NIX_EXPR: &str = "
+ with (import <nixpkgs> {});
+ symlinkJoin {
+ name = \"rust-stage0-dependencies\";
+ paths = [
+ zlib
+ patchelf
+ stdenv.cc.bintools
+ ];
+ }
+ ";
+ nix_build_succeeded = self.try_run(Command::new("nix-build").args(&[
+ Path::new("-E"),
+ Path::new(NIX_EXPR),
+ Path::new("-o"),
+ &nix_deps_dir,
+ ]));
+ nix_deps_dir
+ });
+ if !nix_build_succeeded {
+ return;
+ }
+
+ let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf"));
+ let rpath_entries = {
+ // ORIGIN is a relative default, all binary and dynamic libraries we ship
+ // appear to have this (even when `../lib` is redundant).
+ // NOTE: there are only two paths here, delimited by a `:`
+ let mut entries = OsString::from("$ORIGIN/../lib:");
+ entries.push(t!(fs::canonicalize(nix_deps_dir)));
+ entries.push("/lib");
+ entries
+ };
+ patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
+ if !fname.extension().map_or(false, |ext| ext == "so") {
+ // Finally, set the correct .interp for binaries
+ let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
+ // FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
+ let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
+ patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
+ }
+
+ self.try_run(patchelf.arg(fname));
+ }
+
+ fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) {
+ self.verbose(&format!("download {url}"));
+ // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/.
+ let tempfile = self.tempdir().join(dest_path.file_name().unwrap());
+ // While bootstrap itself only supports http and https downloads, downstream forks might
+ // need to download components from other protocols. The match allows them adding more
+ // protocols without worrying about merge conflicts if we change the HTTP implementation.
+ match url.split_once("://").map(|(proto, _)| proto) {
+ Some("http") | Some("https") => {
+ self.download_http_with_retries(&tempfile, url, help_on_error)
+ }
+ Some(other) => panic!("unsupported protocol {other} in {url}"),
+ None => panic!("no protocol in {url}"),
+ }
+ t!(std::fs::rename(&tempfile, dest_path));
+ }
+
+ fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) {
+ println!("downloading {}", url);
+ // Try curl. If that fails and we are on windows, fallback to PowerShell.
+ let mut curl = Command::new("curl");
+ curl.args(&[
+ "-#",
+ "-y",
+ "30",
+ "-Y",
+ "10", // timeout if speed is < 10 bytes/sec for > 30 seconds
+ "--connect-timeout",
+ "30", // timeout if cannot connect within 30 seconds
+ "--retry",
+ "3",
+ "-Sf",
+ "-o",
+ ]);
+ curl.arg(tempfile);
+ curl.arg(url);
+ if !self.check_run(&mut curl) {
+ if self.build.contains("windows-msvc") {
+ println!("Fallback to PowerShell");
+ for _ in 0..3 {
+ if self.try_run(Command::new("PowerShell.exe").args(&[
+ "/nologo",
+ "-Command",
+ "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
+ &format!(
+ "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
+ url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"),
+ ),
+ ])) {
+ return;
+ }
+ println!("\nspurious failure, trying again");
+ }
+ }
+ if !help_on_error.is_empty() {
+ eprintln!("{}", help_on_error);
+ }
+ crate::detail_exit(1);
+ }
+ }
+
+ fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) {
+ println!("extracting {} to {}", tarball.display(), dst.display());
+ if !dst.exists() {
+ t!(fs::create_dir_all(dst));
+ }
+
+ // `tarball` ends with `.tar.xz`; strip that suffix
+ // example: `rust-dev-nightly-x86_64-unknown-linux-gnu`
+ let uncompressed_filename =
+ Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap();
+ let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
+
+ // decompress the file
+ let data = t!(File::open(tarball));
+ let decompressor = XzDecoder::new(BufReader::new(data));
+
+ let mut tar = tar::Archive::new(decompressor);
+ for member in t!(tar.entries()) {
+ let mut member = t!(member);
+ let original_path = t!(member.path()).into_owned();
+ // skip the top-level directory
+ if original_path == directory_prefix {
+ continue;
+ }
+ let mut short_path = t!(original_path.strip_prefix(directory_prefix));
+ if !short_path.starts_with(pattern) {
+ continue;
+ }
+ short_path = t!(short_path.strip_prefix(pattern));
+ let dst_path = dst.join(short_path);
+ self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display()));
+ if !t!(member.unpack_in(dst)) {
+ panic!("path traversal attack ??");
+ }
+ let src_path = dst.join(original_path);
+ if src_path.is_dir() && dst_path.exists() {
+ continue;
+ }
+ t!(fs::rename(src_path, dst_path));
+ }
+ t!(fs::remove_dir_all(dst.join(directory_prefix)));
+ }
+
+ /// Returns whether the SHA256 checksum of `path` matches `expected`.
+ fn verify(&self, path: &Path, expected: &str) -> bool {
+ use sha2::Digest;
+
+ self.verbose(&format!("verifying {}", path.display()));
+ let mut hasher = sha2::Sha256::new();
+ // FIXME: this is ok for rustfmt (4.1 MB large at time of writing), but it seems memory-intensive for rustc and larger components.
+ // Consider using streaming IO instead?
+ let contents = if self.dry_run() { vec![] } else { t!(fs::read(path)) };
+ hasher.update(&contents);
+ let found = hex::encode(hasher.finalize().as_slice());
+ let verified = found == expected;
+ if !verified && !self.dry_run() {
+ println!(
+ "invalid checksum: \n\
+ found: {found}\n\
+ expected: {expected}",
+ );
+ }
+ return verified;
+ }
+}
+
+enum DownloadSource {
+ CI,
+ Dist,
+}
+
+/// Functions that are only ever called once, but named for clarify and to avoid thousand-line functions.
+impl Config {
+ pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> {
+ let RustfmtMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?;
+ let channel = format!("{version}-{date}");
+
+ let host = self.build;
+ let rustfmt_path = self.initial_rustc.with_file_name(exe("rustfmt", host));
+ let bin_root = self.out.join(host.triple).join("stage0");
+ let rustfmt_stamp = bin_root.join(".rustfmt-stamp");
+ if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) {
+ return Some(rustfmt_path);
+ }
+
+ let filename = format!("rustfmt-{version}-{build}.tar.xz", build = host.triple);
+ self.download_component(DownloadSource::Dist, filename, "rustfmt-preview", &date, "stage0");
+
+ self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt"));
+ self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt"));
+
+ self.create(&rustfmt_stamp, &channel);
+ Some(rustfmt_path)
+ }
+
+ pub(crate) fn download_ci_rustc(&self, commit: &str) {
+ self.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
+ let version = self.artifact_version_part(commit);
+ let host = self.build.triple;
+ let bin_root = self.out.join(host).join("ci-rustc");
+ let rustc_stamp = bin_root.join(".rustc-stamp");
+
+ if !bin_root.join("bin").join("rustc").exists() || program_out_of_date(&rustc_stamp, commit)
+ {
+ if bin_root.exists() {
+ t!(fs::remove_dir_all(&bin_root));
+ }
+ let filename = format!("rust-std-{version}-{host}.tar.xz");
+ let pattern = format!("rust-std-{host}");
+ self.download_ci_component(filename, &pattern, commit);
+ let filename = format!("rustc-{version}-{host}.tar.xz");
+ self.download_ci_component(filename, "rustc", commit);
+ // download-rustc doesn't need its own cargo, it can just use beta's.
+ let filename = format!("rustc-dev-{version}-{host}.tar.xz");
+ self.download_ci_component(filename, "rustc-dev", commit);
+ let filename = format!("rust-src-{version}.tar.xz");
+ self.download_ci_component(filename, "rust-src", commit);
+
+ self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc"));
+ self.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc"));
+ self.fix_bin_or_dylib(&bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"));
+ let lib_dir = bin_root.join("lib");
+ for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) {
+ let lib = t!(lib);
+ if lib.path().extension() == Some(OsStr::new("so")) {
+ self.fix_bin_or_dylib(&lib.path());
+ }
+ }
+ t!(fs::write(rustc_stamp, commit));
+ }
+ }
+
+ /// Download a single component of a CI-built toolchain (not necessarily a published nightly).
+ // NOTE: intentionally takes an owned string to avoid downloading multiple times by accident
+ fn download_ci_component(&self, filename: String, prefix: &str, commit: &str) {
+ Self::download_component(self, DownloadSource::CI, filename, prefix, commit, "ci-rustc")
+ }
+
+ fn download_component(
+ &self,
+ mode: DownloadSource,
+ filename: String,
+ prefix: &str,
+ key: &str,
+ destination: &str,
+ ) {
+ let cache_dst = self.out.join("cache");
+ let cache_dir = cache_dst.join(key);
+ if !cache_dir.exists() {
+ t!(fs::create_dir_all(&cache_dir));
+ }
+
+ let bin_root = self.out.join(self.build.triple).join(destination);
+ let tarball = cache_dir.join(&filename);
+ let (base_url, url, should_verify) = match mode {
+ DownloadSource::CI => (
+ self.stage0_metadata.config.artifacts_server.clone(),
+ format!("{key}/{filename}"),
+ false,
+ ),
+ DownloadSource::Dist => {
+ let dist_server = env::var("RUSTUP_DIST_SERVER")
+ .unwrap_or(self.stage0_metadata.config.dist_server.to_string());
+ // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0.json
+ (dist_server, format!("dist/{key}/{filename}"), true)
+ }
+ };
+
+ // For the beta compiler, put special effort into ensuring the checksums are valid.
+ // FIXME: maybe we should do this for download-rustc as well? but it would be a pain to update
+ // this on each and every nightly ...
+ let checksum = if should_verify {
+ let error = format!(
+ "src/stage0.json doesn't contain a checksum for {url}. \
+ Pre-built artifacts might not be available for this \
+ target at this time, see https://doc.rust-lang.org/nightly\
+ /rustc/platform-support.html for more information."
+ );
+ let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error);
+ if tarball.exists() {
+ if self.verify(&tarball, sha256) {
+ self.unpack(&tarball, &bin_root, prefix);
+ return;
+ } else {
+ self.verbose(&format!(
+ "ignoring cached file {} due to failed verification",
+ tarball.display()
+ ));
+ self.remove(&tarball);
+ }
+ }
+ Some(sha256)
+ } else if tarball.exists() {
+ self.unpack(&tarball, &bin_root, prefix);
+ return;
+ } else {
+ None
+ };
+
+ self.download_file(&format!("{base_url}/{url}"), &tarball, "");
+ if let Some(sha256) = checksum {
+ if !self.verify(&tarball, sha256) {
+ panic!("failed to verify {}", tarball.display());
+ }
+ }
+
+ self.unpack(&tarball, &bin_root, prefix);
+ }
+
+ pub(crate) fn maybe_download_ci_llvm(&self) {
+ if !self.llvm_from_ci {
+ return;
+ }
+ let llvm_root = self.ci_llvm_root();
+ let llvm_stamp = llvm_root.join(".llvm-stamp");
+ let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository());
+ let key = format!("{}{}", llvm_sha, self.llvm_assertions);
+ if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() {
+ self.download_ci_llvm(&llvm_sha);
+ for entry in t!(fs::read_dir(llvm_root.join("bin"))) {
+ self.fix_bin_or_dylib(&t!(entry).path());
+ }
+
+ // Update the timestamp of llvm-config to force rustc_llvm to be
+ // rebuilt. This is a hacky workaround for a deficiency in Cargo where
+ // the rerun-if-changed directive doesn't handle changes very well.
+ // https://github.com/rust-lang/cargo/issues/10791
+ // Cargo only compares the timestamp of the file relative to the last
+ // time `rustc_llvm` build script ran. However, the timestamps of the
+ // files in the tarball are in the past, so it doesn't trigger a
+ // rebuild.
+ let now = filetime::FileTime::from_system_time(std::time::SystemTime::now());
+ let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build));
+ t!(filetime::set_file_times(&llvm_config, now, now));
+
+ let llvm_lib = llvm_root.join("lib");
+ for entry in t!(fs::read_dir(&llvm_lib)) {
+ let lib = t!(entry).path();
+ if lib.extension().map_or(false, |ext| ext == "so") {
+ self.fix_bin_or_dylib(&lib);
+ }
+ }
+ t!(fs::write(llvm_stamp, key));
+ }
+ }
+
+ fn download_ci_llvm(&self, llvm_sha: &str) {
+ let llvm_assertions = self.llvm_assertions;
+
+ let cache_prefix = format!("llvm-{}-{}", llvm_sha, llvm_assertions);
+ let cache_dst = self.out.join("cache");
+ let rustc_cache = cache_dst.join(cache_prefix);
+ if !rustc_cache.exists() {
+ t!(fs::create_dir_all(&rustc_cache));
+ }
+ let base = if llvm_assertions {
+ &self.stage0_metadata.config.artifacts_with_llvm_assertions_server
+ } else {
+ &self.stage0_metadata.config.artifacts_server
+ };
+ let version = self.artifact_version_part(llvm_sha);
+ let filename = format!("rust-dev-{}-{}.tar.xz", version, self.build.triple);
+ let tarball = rustc_cache.join(&filename);
+ if !tarball.exists() {
+ let help_on_error = "error: failed to download llvm from ci
+
+ help: old builds get deleted after a certain time
+ help: if trying to compile an old commit of rustc, disable `download-ci-llvm` in config.toml:
+
+ [llvm]
+ download-ci-llvm = false
+ ";
+ self.download_file(&format!("{base}/{llvm_sha}/{filename}"), &tarball, help_on_error);
+ }
+ let llvm_root = self.ci_llvm_root();
+ self.unpack(&tarball, &llvm_root, "rust-dev");
+ }
+}
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index ee341a353..37a8eb884 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -140,9 +140,10 @@ pub enum Subcommand {
},
Run {
paths: Vec<PathBuf>,
+ args: Vec<String>,
},
Setup {
- profile: Profile,
+ profile: Option<Profile>,
},
}
@@ -342,6 +343,9 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
Kind::Format => {
opts.optflag("", "check", "check formatting instead of applying.");
}
+ Kind::Run => {
+ opts.optmulti("", "args", "arguments for the tool", "ARGS");
+ }
_ => {}
};
@@ -613,7 +617,7 @@ Arguments:
println!("\nrun requires at least a path!\n");
usage(1, &opts, verbose, &subcommand_help);
}
- Subcommand::Run { paths }
+ Subcommand::Run { paths, args: matches.opt_strs("args") }
}
Kind::Setup => {
let profile = if paths.len() > 1 {
@@ -624,14 +628,15 @@ Arguments:
|path| format!("{} is not a valid UTF8 string", path.to_string_lossy())
));
- profile_string.parse().unwrap_or_else(|err| {
+ let profile = profile_string.parse().unwrap_or_else(|err| {
eprintln!("error: {}", err);
eprintln!("help: the available profiles are:");
eprint!("{}", Profile::all_for_help("- "));
crate::detail_exit(1);
- })
+ });
+ Some(profile)
} else {
- t!(crate::setup::interactive_path())
+ None
};
Subcommand::Setup { profile }
}
@@ -721,16 +726,12 @@ impl Subcommand {
}
pub fn test_args(&self) -> Vec<&str> {
- let mut args = vec![];
-
match *self {
Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => {
- args.extend(test_args.iter().flat_map(|s| s.split_whitespace()))
+ test_args.iter().flat_map(|s| s.split_whitespace()).collect()
}
- _ => (),
+ _ => vec![],
}
-
- args
}
pub fn rustc_args(&self) -> Vec<&str> {
@@ -738,7 +739,16 @@ impl Subcommand {
Subcommand::Test { ref rustc_args, .. } => {
rustc_args.iter().flat_map(|s| s.split_whitespace()).collect()
}
- _ => Vec::new(),
+ _ => vec![],
+ }
+ }
+
+ pub fn args(&self) -> Vec<&str> {
+ match *self {
+ Subcommand::Run { ref args, .. } => {
+ args.iter().flat_map(|s| s.split_whitespace()).collect()
+ }
+ _ => vec![],
}
}
diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs
index 37322670e..5e7264fe7 100644
--- a/src/bootstrap/format.rs
+++ b/src/bootstrap/format.rs
@@ -43,7 +43,7 @@ struct RustfmtConfig {
}
pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
- if build.config.dry_run {
+ if build.config.dry_run() {
return;
}
let mut builder = ignore::types::TypesBuilder::new();
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index 7672b7c91..38426f3a4 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -200,10 +200,14 @@ install!((self, builder, _config),
install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball);
};
Miri, alias = "miri", Self::should_build(_config), only_hosts: true, {
- let tarball = builder
- .ensure(dist::Miri { compiler: self.compiler, target: self.target })
- .expect("missing miri");
- install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
+ if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) {
+ install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
+ } else {
+ // Miri is only available on nightly
+ builder.info(
+ &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target),
+ );
+ }
};
Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, {
if let Some(tarball) = builder.ensure(dist::Rustfmt {
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 7e70e99bb..3ed534523 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -112,15 +112,14 @@ use std::path::{Path, PathBuf};
use std::process::Command;
use std::str;
-use config::Target;
+use channel::GitInfo;
+use config::{DryRun, Target};
use filetime::FileTime;
use once_cell::sync::OnceCell;
use crate::builder::Kind;
use crate::config::{LlvmLibunwind, TargetSelection};
-use crate::util::{
- check_run, exe, libdir, mtime, output, run, run_suppressed, try_run, try_run_suppressed, CiEnv,
-};
+use crate::util::{exe, libdir, mtime, output, run, run_suppressed, try_run_suppressed, CiEnv};
mod bolt;
mod builder;
@@ -133,6 +132,7 @@ mod compile;
mod config;
mod dist;
mod doc;
+mod download;
mod flags;
mod format;
mod install;
@@ -281,7 +281,6 @@ pub struct Build {
src: PathBuf,
out: PathBuf,
bootstrap_out: PathBuf,
- rust_info: channel::GitInfo,
cargo_info: channel::GitInfo,
rust_analyzer_info: channel::GitInfo,
clippy_info: channel::GitInfo,
@@ -396,6 +395,28 @@ pub enum CLang {
Cxx,
}
+macro_rules! forward {
+ ( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => {
+ impl Build {
+ $( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? {
+ self.config.$fn( $($param),* )
+ } )+
+ }
+ }
+}
+
+forward! {
+ verbose(msg: &str),
+ is_verbose() -> bool,
+ create(path: &Path, s: &str),
+ remove(f: &Path),
+ tempdir() -> PathBuf,
+ try_run(cmd: &mut Command) -> bool,
+ llvm_link_shared() -> bool,
+ download_rustc() -> bool,
+ initial_rustfmt() -> Option<PathBuf>,
+}
+
impl Build {
/// Creates a new set of build configuration from the `flags` on the command
/// line and the filesystem `config`.
@@ -430,7 +451,7 @@ impl Build {
// we always try to use git for LLVM builds
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
- let initial_target_libdir_str = if config.dry_run {
+ let initial_target_libdir_str = if config.dry_run() {
"/dummy/lib/path/to/lib/".to_string()
} else {
output(
@@ -444,7 +465,7 @@ impl Build {
let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap();
let initial_lld = initial_target_dir.join("bin").join("rust-lld");
- let initial_sysroot = if config.dry_run {
+ let initial_sysroot = if config.dry_run() {
"/dummy".to_string()
} else {
output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot"))
@@ -499,7 +520,6 @@ impl Build {
out,
bootstrap_out,
- rust_info,
cargo_info,
rust_analyzer_info,
clippy_info,
@@ -522,16 +542,6 @@ impl Build {
metrics: metrics::BuildMetrics::init(),
};
- build.verbose("finding compilers");
- cc_detect::find(&mut build);
- // When running `setup`, the profile is about to change, so any requirements we have now may
- // be different on the next invocation. Don't check for them until the next time x.py is
- // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
- if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
- build.verbose("running sanity check");
- sanity::check(&mut build);
- }
-
// If local-rust is the same major.minor as the current version, then force a
// local-rebuild
let local_version_verbose =
@@ -547,16 +557,34 @@ impl Build {
build.local_rebuild = true;
}
- // Make sure we update these before gathering metadata so we don't get an error about missing
- // Cargo.toml files.
- let rust_submodules =
- ["src/tools/rust-installer", "src/tools/cargo", "library/backtrace", "library/stdarch"];
- for s in rust_submodules {
- build.update_submodule(Path::new(s));
- }
+ build.verbose("finding compilers");
+ cc_detect::find(&mut build);
+ // When running `setup`, the profile is about to change, so any requirements we have now may
+ // be different on the next invocation. Don't check for them until the next time x.py is
+ // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
+ //
+ // Similarly, for `setup` we don't actually need submodules or cargo metadata.
+ if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
+ build.verbose("running sanity check");
+ sanity::check(&mut build);
+
+ // Make sure we update these before gathering metadata so we don't get an error about missing
+ // Cargo.toml files.
+ let rust_submodules = [
+ "src/tools/rust-installer",
+ "src/tools/cargo",
+ "library/backtrace",
+ "library/stdarch",
+ ];
+ for s in rust_submodules {
+ build.update_submodule(Path::new(s));
+ }
+ // Now, update all existing submodules.
+ build.update_existing_submodules();
- build.verbose("learning about cargo");
- metadata::build(&mut build);
+ build.verbose("learning about cargo");
+ metadata::build(&mut build);
+ }
build
}
@@ -570,7 +598,7 @@ impl Build {
t!(std::fs::read_dir(dir)).next().is_none()
}
- if !self.config.submodules(&self.rust_info) {
+ if !self.config.submodules(&self.rust_info()) {
return;
}
@@ -628,15 +656,29 @@ impl Build {
self.run(&mut update(false));
}
+ // Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error).
+ let has_local_modifications = !self.try_run(
+ Command::new("git")
+ .args(&["diff-index", "--quiet", "HEAD"])
+ .current_dir(&absolute_path),
+ );
+ if has_local_modifications {
+ self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
+ }
+
self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
- self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(absolute_path));
+ self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path));
+
+ if has_local_modifications {
+ self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path));
+ }
}
/// If any submodule has been initialized already, sync it unconditionally.
/// This avoids contributors checking in a submodule change by accident.
- pub fn maybe_update_submodules(&self) {
+ pub fn update_existing_submodules(&self) {
// Avoid running git when there isn't a git checkout.
- if !self.config.submodules(&self.rust_info) {
+ if !self.config.submodules(&self.rust_info()) {
return;
}
let output = output(
@@ -663,8 +705,6 @@ impl Build {
job::setup(self);
}
- self.maybe_update_submodules();
-
if let Subcommand::Format { check, paths } = &self.config.cmd {
return format::format(&builder::Builder::new(&self), *check, &paths);
}
@@ -689,13 +729,13 @@ impl Build {
}
}
- if !self.config.dry_run {
+ if !self.config.dry_run() {
{
- self.config.dry_run = true;
+ self.config.dry_run = DryRun::SelfCheck;
let builder = builder::Builder::new(&self);
builder.execute_cli();
}
- self.config.dry_run = false;
+ self.config.dry_run = DryRun::Disabled;
let builder = builder::Builder::new(&self);
builder.execute_cli();
} else {
@@ -735,6 +775,10 @@ impl Build {
cleared
}
+ fn rust_info(&self) -> &GitInfo {
+ &self.config.rust_info
+ }
+
/// Gets the space-separated set of activated features for the standard
/// library.
fn std_features(&self, target: TargetSelection) -> String {
@@ -947,7 +991,7 @@ impl Build {
/// Runs a command, printing out nice contextual information if it fails.
fn run(&self, cmd: &mut Command) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
self.verbose(&format!("running: {:?}", cmd));
@@ -956,7 +1000,7 @@ impl Build {
/// Runs a command, printing out nice contextual information if it fails.
fn run_quiet(&self, cmd: &mut Command) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
self.verbose(&format!("running: {:?}", cmd));
@@ -966,47 +1010,14 @@ impl Build {
/// Runs a command, printing out nice contextual information if it fails.
/// Exits if the command failed to execute at all, otherwise returns its
/// `status.success()`.
- fn try_run(&self, cmd: &mut Command) -> bool {
- if self.config.dry_run {
- return true;
- }
- self.verbose(&format!("running: {:?}", cmd));
- try_run(cmd, self.is_verbose())
- }
-
- /// Runs a command, printing out nice contextual information if it fails.
- /// Exits if the command failed to execute at all, otherwise returns its
- /// `status.success()`.
fn try_run_quiet(&self, cmd: &mut Command) -> bool {
- if self.config.dry_run {
+ if self.config.dry_run() {
return true;
}
self.verbose(&format!("running: {:?}", cmd));
try_run_suppressed(cmd)
}
- /// Runs a command, printing out nice contextual information if it fails.
- /// Returns false if do not execute at all, otherwise returns its
- /// `status.success()`.
- fn check_run(&self, cmd: &mut Command) -> bool {
- if self.config.dry_run {
- return true;
- }
- self.verbose(&format!("running: {:?}", cmd));
- check_run(cmd, self.is_verbose())
- }
-
- pub fn is_verbose(&self) -> bool {
- self.verbosity > 0
- }
-
- /// Prints a message if this build is configured in verbose mode.
- fn verbose(&self, msg: &str) {
- if self.is_verbose() {
- println!("{}", msg);
- }
- }
-
pub fn is_verbose_than(&self, level: usize) -> bool {
self.verbosity > level
}
@@ -1019,10 +1030,12 @@ impl Build {
}
fn info(&self, msg: &str) {
- if self.config.dry_run {
- return;
+ match self.config.dry_run {
+ DryRun::SelfCheck => return,
+ DryRun::Disabled | DryRun::UserSelected => {
+ println!("{}", msg);
+ }
}
- println!("{}", msg);
}
/// Returns the number of parallel jobs that have been configured for this
@@ -1152,8 +1165,8 @@ impl Build {
options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
}
- let threads = if target.contains("windows") { "/threads:1" } else { "--threads=1" };
- options[1] = Some(format!("-Clink-arg=-Wl,{}", threads));
+ let no_threads = util::lld_flag_no_threads(target.contains("windows"));
+ options[1] = Some(format!("-Clink-arg=-Wl,{}", no_threads));
}
IntoIterator::into_iter(options).flatten()
@@ -1267,7 +1280,7 @@ impl Build {
match &self.config.channel[..] {
"stable" => num.to_string(),
"beta" => {
- if self.rust_info.is_managed_git_subrepository() && !self.config.ignore_git {
+ if self.rust_info().is_managed_git_subrepository() && !self.config.ignore_git {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
@@ -1327,7 +1340,7 @@ impl Build {
/// Note that this is a descriptive string which includes the commit date,
/// sha, version, etc.
fn rust_version(&self) -> String {
- let mut version = self.rust_info.version(self, &self.version);
+ let mut version = self.rust_info().version(self, &self.version);
if let Some(ref s) = self.config.description {
version.push_str(" (");
version.push_str(s);
@@ -1338,7 +1351,7 @@ impl Build {
/// Returns the full commit hash.
fn rust_sha(&self) -> Option<&str> {
- self.rust_info.sha()
+ self.rust_info().sha()
}
/// Returns the `a.b.c` version that the given package is at.
@@ -1400,7 +1413,7 @@ impl Build {
}
fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> {
- if self.config.dry_run {
+ if self.config.dry_run() {
return Vec::new();
}
@@ -1424,23 +1437,13 @@ impl Build {
paths
}
- /// Create a temporary directory in `out` and return its path.
- ///
- /// NOTE: this temporary directory is shared between all steps;
- /// if you need an empty directory, create a new subdirectory inside it.
- fn tempdir(&self) -> PathBuf {
- let tmp = self.out.join("tmp");
- t!(fs::create_dir_all(&tmp));
- tmp
- }
-
/// Copies a file from `src` to `dst`
pub fn copy(&self, src: &Path, dst: &Path) {
self.copy_internal(src, dst, false);
}
fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
self.verbose_than(1, &format!("Copy {:?} to {:?}", src, dst));
@@ -1477,7 +1480,7 @@ impl Build {
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called.
pub fn cp_r(&self, src: &Path, dst: &Path) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
for f in self.read_dir(src) {
@@ -1530,7 +1533,7 @@ impl Build {
}
fn install(&self, src: &Path, dstdir: &Path, perms: u32) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
let dst = dstdir.join(src.file_name().unwrap());
@@ -1543,29 +1546,22 @@ impl Build {
chmod(&dst, perms);
}
- fn create(&self, path: &Path, s: &str) {
- if self.config.dry_run {
- return;
- }
- t!(fs::write(path, s));
- }
-
fn read(&self, path: &Path) -> String {
- if self.config.dry_run {
+ if self.config.dry_run() {
return String::new();
}
t!(fs::read_to_string(path))
}
fn create_dir(&self, dir: &Path) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
t!(fs::create_dir_all(dir))
}
fn remove_dir(&self, dir: &Path) {
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
t!(fs::remove_dir_all(dir))
@@ -1574,7 +1570,7 @@ impl Build {
fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> {
let iter = match fs::read_dir(dir) {
Ok(v) => v,
- Err(_) if self.config.dry_run => return vec![].into_iter(),
+ Err(_) if self.config.dry_run() => return vec![].into_iter(),
Err(err) => panic!("could not read dir {:?}: {:?}", dir, err),
};
iter.map(|e| t!(e)).collect::<Vec<_>>().into_iter()
@@ -1585,14 +1581,7 @@ impl Build {
use std::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
- if !self.config.dry_run { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) }
- }
-
- fn remove(&self, f: &Path) {
- if self.config.dry_run {
- return;
- }
- fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f));
+ if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) }
}
/// Returns if config.ninja is enabled, and checks for ninja existence,
diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs
index 451febddc..c823dc796 100644
--- a/src/bootstrap/metrics.rs
+++ b/src/bootstrap/metrics.rs
@@ -97,7 +97,7 @@ impl BuildMetrics {
cpu_threads_count: system.cpus().len(),
cpu_model: system.cpus()[0].brand().into(),
- memory_total_bytes: system.total_memory() * 1024,
+ memory_total_bytes: system.total_memory(),
};
let steps = std::mem::take(&mut state.finished_steps);
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index 2f856c276..f6c453ebe 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -19,9 +19,9 @@ use std::process::Command;
use crate::bolt::{instrument_with_bolt_inplace, optimize_library_with_bolt_inplace};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::channel;
-use crate::config::TargetSelection;
+use crate::config::{Config, TargetSelection};
use crate::util::get_clang_cl_resource_dir;
-use crate::util::{self, exe, output, program_out_of_date, t, up_to_date};
+use crate::util::{self, exe, output, t, up_to_date};
use crate::{CLang, GitRepo};
pub struct Meta {
@@ -65,7 +65,7 @@ pub fn prebuilt_llvm_config(
builder: &Builder<'_>,
target: TargetSelection,
) -> Result<PathBuf, Meta> {
- maybe_download_ci_llvm(builder);
+ builder.config.maybe_download_ci_llvm();
// If we're using a custom LLVM bail out here, but we can only use a
// custom LLVM for the build triple.
@@ -117,7 +117,7 @@ pub fn prebuilt_llvm_config(
}
/// This retrieves the LLVM sha we *want* to use, according to git history.
-pub(crate) fn detect_llvm_sha(config: &crate::config::Config, is_git: bool) -> String {
+pub(crate) fn detect_llvm_sha(config: &Config, is_git: bool) -> String {
let llvm_sha = if is_git {
let mut rev_list = config.git();
rev_list.args(&[
@@ -155,7 +155,7 @@ pub(crate) fn detect_llvm_sha(config: &crate::config::Config, is_git: bool) -> S
/// This checks both the build triple platform to confirm we're usable at all,
/// and then verifies if the current HEAD matches the detected LLVM SHA head,
/// in which case LLVM is indicated as not available.
-pub(crate) fn is_ci_llvm_available(config: &crate::config::Config, asserts: bool) -> bool {
+pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool {
// This is currently all tier 1 targets and tier 2 targets with host tools
// (since others may not have CI artifacts)
// https://doc.rust-lang.org/rustc/platform-support.html#tier-1
@@ -217,80 +217,6 @@ pub(crate) fn is_ci_llvm_available(config: &crate::config::Config, asserts: bool
true
}
-pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
- let config = &builder.config;
- if !config.llvm_from_ci {
- return;
- }
- let llvm_root = config.ci_llvm_root();
- let llvm_stamp = llvm_root.join(".llvm-stamp");
- let llvm_sha = detect_llvm_sha(&config, builder.rust_info.is_managed_git_subrepository());
- let key = format!("{}{}", llvm_sha, config.llvm_assertions);
- if program_out_of_date(&llvm_stamp, &key) && !config.dry_run {
- download_ci_llvm(builder, &llvm_sha);
- for entry in t!(fs::read_dir(llvm_root.join("bin"))) {
- builder.fix_bin_or_dylib(&t!(entry).path());
- }
-
- // Update the timestamp of llvm-config to force rustc_llvm to be
- // rebuilt. This is a hacky workaround for a deficiency in Cargo where
- // the rerun-if-changed directive doesn't handle changes very well.
- // https://github.com/rust-lang/cargo/issues/10791
- // Cargo only compares the timestamp of the file relative to the last
- // time `rustc_llvm` build script ran. However, the timestamps of the
- // files in the tarball are in the past, so it doesn't trigger a
- // rebuild.
- let now = filetime::FileTime::from_system_time(std::time::SystemTime::now());
- let llvm_config = llvm_root.join("bin").join(exe("llvm-config", builder.config.build));
- t!(filetime::set_file_times(&llvm_config, now, now));
-
- let llvm_lib = llvm_root.join("lib");
- for entry in t!(fs::read_dir(&llvm_lib)) {
- let lib = t!(entry).path();
- if lib.extension().map_or(false, |ext| ext == "so") {
- builder.fix_bin_or_dylib(&lib);
- }
- }
- t!(fs::write(llvm_stamp, key));
- }
-}
-
-fn download_ci_llvm(builder: &Builder<'_>, llvm_sha: &str) {
- let llvm_assertions = builder.config.llvm_assertions;
-
- let cache_prefix = format!("llvm-{}-{}", llvm_sha, llvm_assertions);
- let cache_dst = builder.out.join("cache");
- let rustc_cache = cache_dst.join(cache_prefix);
- if !rustc_cache.exists() {
- t!(fs::create_dir_all(&rustc_cache));
- }
- let base = if llvm_assertions {
- &builder.config.stage0_metadata.config.artifacts_with_llvm_assertions_server
- } else {
- &builder.config.stage0_metadata.config.artifacts_server
- };
- let channel = builder.config.artifact_channel(builder, llvm_sha);
- let filename = format!("rust-dev-{}-{}.tar.xz", channel, builder.build.build.triple);
- let tarball = rustc_cache.join(&filename);
- if !tarball.exists() {
- let help_on_error = "error: failed to download llvm from ci
-
-help: old builds get deleted after a certain time
-help: if trying to compile an old commit of rustc, disable `download-ci-llvm` in config.toml:
-
-[llvm]
-download-ci-llvm = false
-";
- builder.download_component(
- &format!("{base}/{llvm_sha}/{filename}"),
- &tarball,
- help_on_error,
- );
- }
- let llvm_root = builder.config.ci_llvm_root();
- builder.unpack(&tarball, &llvm_root, "rust-dev");
-}
-
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Llvm {
pub target: TargetSelection,
@@ -505,7 +431,7 @@ impl Step for Llvm {
// https://llvm.org/docs/HowToCrossCompileLLVM.html
if target != builder.config.build {
let llvm_config = builder.ensure(Llvm { target: builder.config.build });
- if !builder.config.dry_run {
+ if !builder.config.dry_run() {
let llvm_bindir = output(Command::new(&llvm_config).arg("--bindir"));
let host_bin = Path::new(llvm_bindir.trim());
cfg.define(
@@ -519,7 +445,7 @@ impl Step for Llvm {
if builder.config.llvm_clang {
let build_bin = builder.llvm_out(builder.config.build).join("build").join("bin");
let clang_tblgen = build_bin.join("clang-tblgen").with_extension(EXE_EXTENSION);
- if !builder.config.dry_run && !clang_tblgen.exists() {
+ if !builder.config.dry_run() && !clang_tblgen.exists() {
panic!("unable to find {}", clang_tblgen.display());
}
cfg.define("CLANG_TABLEGEN", clang_tblgen);
@@ -553,7 +479,7 @@ impl Step for Llvm {
// tools. Figure out how to filter them down and only build the right
// tools and libs on all platforms.
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return build_llvm_config;
}
@@ -611,7 +537,7 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
return;
}
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
@@ -872,7 +798,7 @@ impl Step for Lld {
/// Compile LLD for `target`.
fn run(self, builder: &Builder<'_>) -> PathBuf {
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return PathBuf::from("lld-out-dir-test-gen");
}
let target = self.target;
@@ -990,7 +916,7 @@ impl Step for TestHelpers {
/// Compiles the `rust_test_helpers.c` library which we used in various
/// `run-pass` tests for ABI testing.
fn run(self, builder: &Builder<'_>) {
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
// The x86_64-fortanix-unknown-sgx target doesn't have a working C
@@ -1066,7 +992,7 @@ impl Step for Sanitizers {
}
let llvm_config = builder.ensure(Llvm { target: builder.config.build });
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return runtimes;
}
@@ -1240,7 +1166,7 @@ impl Step for CrtBeginEnd {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let out_dir = builder.native_dir(self.target).join("crt");
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return out_dir;
}
@@ -1304,7 +1230,7 @@ impl Step for Libunwind {
/// Build linunwind.a
fn run(self, builder: &Builder<'_>) -> Self::Output {
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return PathBuf::new();
}
diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs
index 511872903..05de51f8c 100644
--- a/src/bootstrap/run.rs
+++ b/src/bootstrap/run.rs
@@ -1,8 +1,13 @@
+use std::path::PathBuf;
+use std::process::Command;
+
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
+use crate::config::TargetSelection;
use crate::dist::distdir;
-use crate::tool::Tool;
+use crate::test;
+use crate::tool::{self, SourceType, Tool};
use crate::util::output;
-use std::process::Command;
+use crate::Mode;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ExpandYamlAnchors;
@@ -125,3 +130,125 @@ impl Step for ReplaceVersionPlaceholder {
builder.run(&mut cmd);
}
}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Miri {
+ stage: u32,
+ host: TargetSelection,
+ target: TargetSelection,
+}
+
+impl Step for Miri {
+ type Output = ();
+ const ONLY_HOSTS: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/miri")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.build_triple(),
+ target: run.target,
+ });
+ }
+
+ fn run(self, builder: &Builder<'_>) {
+ let stage = self.stage;
+ let host = self.host;
+ let target = self.target;
+ let compiler = builder.compiler(stage, host);
+
+ let miri = builder
+ .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
+ let miri_sysroot = test::Miri::build_miri_sysroot(builder, compiler, &miri, target);
+
+ // # Run miri.
+ // Running it via `cargo run` as that figures out the right dylib path.
+ // add_rustc_lib_path does not add the path that contains librustc_driver-<...>.so.
+ let mut miri = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "run",
+ "src/tools/miri",
+ SourceType::InTree,
+ &[],
+ );
+ miri.add_rustc_lib_path(builder, compiler);
+ // Forward arguments.
+ miri.arg("--").arg("--target").arg(target.rustc_target_arg());
+ miri.args(builder.config.cmd.args());
+
+ // miri tests need to know about the stage sysroot
+ miri.env("MIRI_SYSROOT", &miri_sysroot);
+
+ let mut miri = Command::from(miri);
+ builder.run(&mut miri);
+ }
+}
+
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct CollectLicenseMetadata;
+
+impl Step for CollectLicenseMetadata {
+ type Output = PathBuf;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/collect-license-metadata")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(CollectLicenseMetadata);
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ let Some(reuse) = &builder.config.reuse else {
+ panic!("REUSE is required to collect the license metadata");
+ };
+
+ // Temporary location, it will be moved to src/etc once it's accurate.
+ let dest = builder.out.join("license-metadata.json");
+
+ let mut cmd = builder.tool_cmd(Tool::CollectLicenseMetadata);
+ cmd.env("REUSE_EXE", reuse);
+ cmd.env("DEST", &dest);
+ builder.run(&mut cmd);
+
+ dest
+ }
+}
+
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct GenerateCopyright;
+
+impl Step for GenerateCopyright {
+ type Output = PathBuf;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/generate-copyright")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(GenerateCopyright);
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ let license_metadata = builder.ensure(CollectLicenseMetadata);
+
+ // Temporary location, it will be moved to the proper one once it's accurate.
+ let dest = builder.out.join("COPYRIGHT.md");
+
+ let mut cmd = builder.tool_cmd(Tool::GenerateCopyright);
+ cmd.env("LICENSE_METADATA", &license_metadata);
+ cmd.env("DEST", &dest);
+ builder.run(&mut cmd);
+
+ dest
+ }
+}
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index e90551725..8a40b0f64 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -74,7 +74,7 @@ pub fn check(build: &mut Build) {
let mut cmd_finder = Finder::new();
// If we've got a git directory we're gonna need git to update
// submodules and learn about various other aspects.
- if build.rust_info.is_managed_git_subrepository() {
+ if build.rust_info().is_managed_git_subrepository() {
cmd_finder.must_have("git");
}
@@ -140,6 +140,13 @@ than building it.
.map(|p| cmd_finder.must_have(p))
.or_else(|| cmd_finder.maybe_have("gdb"));
+ build.config.reuse = build
+ .config
+ .reuse
+ .take()
+ .map(|p| cmd_finder.must_have(p))
+ .or_else(|| cmd_finder.maybe_have("reuse"));
+
// We're gonna build some custom C code here and there, host triples
// also build some C++ shims for LLVM so we need a C++ compiler.
for target in &build.targets {
@@ -155,7 +162,15 @@ than building it.
continue;
}
- if !build.config.dry_run {
+ // Some environments don't want or need these tools, such as when testing Miri.
+ // FIXME: it would be better to refactor this code to split necessary setup from pure sanity
+ // checks, and have a regular flag for skipping the latter. Also see
+ // <https://github.com/rust-lang/rust/pull/103569#discussion_r1008741742>.
+ if env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some() {
+ continue;
+ }
+
+ if !build.config.dry_run() {
cmd_finder.must_have(build.cc(*target));
if let Some(ar) = build.ar(*target) {
cmd_finder.must_have(ar);
@@ -164,7 +179,7 @@ than building it.
}
for host in &build.hosts {
- if !build.config.dry_run {
+ if !build.config.dry_run() {
cmd_finder.must_have(build.cxx(*host).unwrap());
}
}
@@ -212,6 +227,14 @@ than building it.
}
}
+ // Some environments don't want or need these tools, such as when testing Miri.
+ // FIXME: it would be better to refactor this code to split necessary setup from pure sanity
+ // checks, and have a regular flag for skipping the latter. Also see
+ // <https://github.com/rust-lang/rust/pull/103569#discussion_r1008741742>.
+ if env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some() {
+ continue;
+ }
+
if need_cmake && target.contains("msvc") {
// There are three builds of cmake on windows: MSVC, MinGW, and
// Cygwin. The Cygwin build does not have generators for Visual
diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs
index eb7da1bda..c7f98a7d0 100644
--- a/src/bootstrap/setup.rs
+++ b/src/bootstrap/setup.rs
@@ -1,15 +1,13 @@
+use crate::Config;
use crate::{t, VERSION};
-use crate::{Config, TargetSelection};
use std::env::consts::EXE_SUFFIX;
use std::fmt::Write as _;
use std::fs::File;
+use std::io::Write;
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
use std::process::Command;
use std::str::FromStr;
-use std::{
- env, fmt, fs,
- io::{self, Write},
-};
+use std::{fmt, fs, io};
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Profile {
@@ -81,38 +79,10 @@ impl fmt::Display for Profile {
}
}
-pub fn setup(config: &Config, profile: Profile) {
- let path = &config.config;
-
- if path.exists() {
- eprintln!(
- "error: you asked `x.py` to setup a new config file, but one already exists at `{}`",
- path.display()
- );
- eprintln!("help: try adding `profile = \"{}\"` at the top of {}", profile, path.display());
- eprintln!(
- "note: this will use the configuration in {}",
- profile.include_path(&config.src).display()
- );
- crate::detail_exit(1);
- }
-
- let settings = format!(
- "# Includes one of the default files in src/bootstrap/defaults\n\
- profile = \"{}\"\n\
- changelog-seen = {}\n",
- profile, VERSION
- );
- t!(fs::write(path, settings));
-
- let include_path = profile.include_path(&config.src);
- println!("`x.py` will now use the configuration at {}", include_path.display());
-
- let build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
+pub fn setup(config: &Config, profile: Option<Profile>) {
+ let profile = profile.unwrap_or_else(|| t!(interactive_path()));
let stage_path =
- ["build", build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());
-
- println!();
+ ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());
if !rustup_installed() && profile != Profile::User {
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
@@ -134,8 +104,6 @@ pub fn setup(config: &Config, profile: Profile) {
Profile::User => &["dist", "build"],
};
- println!();
-
t!(install_git_hook_maybe(&config));
println!();
@@ -150,6 +118,36 @@ pub fn setup(config: &Config, profile: Profile) {
"For more suggestions, see https://rustc-dev-guide.rust-lang.org/building/suggested.html"
);
}
+
+ let path = &config.config.clone().unwrap_or(PathBuf::from("config.toml"));
+ setup_config_toml(path, profile, config);
+}
+
+fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
+ if path.exists() {
+ eprintln!();
+ eprintln!(
+ "error: you asked `x.py` to setup a new config file, but one already exists at `{}`",
+ path.display()
+ );
+ eprintln!("help: try adding `profile = \"{}\"` at the top of {}", profile, path.display());
+ eprintln!(
+ "note: this will use the configuration in {}",
+ profile.include_path(&config.src).display()
+ );
+ crate::detail_exit(1);
+ }
+
+ let settings = format!(
+ "# Includes one of the default files in src/bootstrap/defaults\n\
+ profile = \"{}\"\n\
+ changelog-seen = {}\n",
+ profile, VERSION
+ );
+ t!(fs::write(path, settings));
+
+ let include_path = profile.include_path(&config.src);
+ println!("`x.py` will now use the configuration at {}", include_path.display());
}
fn rustup_installed() -> bool {
@@ -303,7 +301,18 @@ pub fn interactive_path() -> io::Result<Profile> {
// install a git hook to automatically run tidy --bless, if they want
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
+ let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
+ assert!(output.status.success(), "failed to run `git`");
+ PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
+ }));
+ let dst = git.join("hooks").join("pre-push");
+ if dst.exists() {
+ // The git hook has already been set up, or the user already has a custom hook.
+ return Ok(());
+ }
+
let mut input = String::new();
+ println!();
println!(
"Rust's CI will automatically fail if it doesn't pass `tidy`, the internal tool for ensuring code quality.
If you'd like, x.py can install a git hook for you that will automatically run `tidy --bless` before
@@ -329,12 +338,6 @@ undesirable, simply delete the `pre-push` file from .git/hooks."
if should_install {
let src = config.src.join("src").join("etc").join("pre-push.sh");
- let git =
- t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
- assert!(output.status.success(), "failed to run `git`");
- PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
- }));
- let dst = git.join("hooks").join("pre-push");
match fs::hard_link(src, &dst) {
Err(e) => eprintln!(
"error: could not create hook {}: do you already have the git hook installed?\n{}",
diff --git a/src/bootstrap/tarball.rs b/src/bootstrap/tarball.rs
index d999b6c15..fc850a22b 100644
--- a/src/bootstrap/tarball.rs
+++ b/src/bootstrap/tarball.rs
@@ -298,7 +298,7 @@ impl<'a> Tarball<'a> {
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball {
t!(std::fs::create_dir_all(&self.overlay_dir));
self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder));
- if let Some(info) = self.builder.rust_info.info() {
+ if let Some(info) = self.builder.rust_info().info() {
channel::write_commit_hash_file(&self.overlay_dir, &info.sha);
channel::write_commit_info_file(&self.overlay_dir, info);
}
@@ -323,7 +323,7 @@ impl<'a> Tarball<'a> {
// Ensure there are no symbolic links in the tarball. In particular,
// rustup-toolchain-install-master and most versions of Windows can't handle symbolic links.
let decompressed_output = self.temp_dir.join(&package_name);
- if !self.builder.config.dry_run && !self.permit_symlinks {
+ if !self.builder.config.dry_run() && !self.permit_symlinks {
for entry in walkdir::WalkDir::new(&decompressed_output) {
let entry = t!(entry);
if entry.path_is_symlink() {
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index 791c35c36..39cedfdac 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -16,6 +16,7 @@ use crate::cache::Interned;
use crate::compile;
use crate::config::TargetSelection;
use crate::dist;
+use crate::doc::DocumentationFormat;
use crate::flags::Subcommand;
use crate::native;
use crate::tool::{self, SourceType, Tool};
@@ -90,6 +91,42 @@ fn try_run_quiet(builder: &Builder<'_>, cmd: &mut Command) -> bool {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct CrateJsonDocLint {
+ host: TargetSelection,
+}
+
+impl Step for CrateJsonDocLint {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/jsondoclint")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(CrateJsonDocLint { host: run.target });
+ }
+
+ fn run(self, builder: &Builder<'_>) {
+ let bootstrap_host = builder.config.build;
+ let compiler = builder.compiler(0, bootstrap_host);
+
+ let cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolBootstrap,
+ bootstrap_host,
+ "test",
+ "src/tools/jsondoclint",
+ SourceType::InTree,
+ &[],
+ );
+ try_run(builder, &mut cargo.into());
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Linkcheck {
host: TargetSelection,
}
@@ -464,52 +501,23 @@ pub struct Miri {
target: TargetSelection,
}
-impl Step for Miri {
- type Output = ();
- const ONLY_HOSTS: bool = false;
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.path("src/tools/miri")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Miri {
- stage: run.builder.top_stage,
- host: run.build_triple(),
- target: run.target,
- });
- }
-
- /// Runs `cargo test` for miri.
- fn run(self, builder: &Builder<'_>) {
- let stage = self.stage;
- let host = self.host;
- let target = self.target;
- let compiler = builder.compiler(stage, host);
- // We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri.
- // Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage.
- let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host);
-
- let miri = builder
- .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() })
- .expect("in-tree tool");
- let _cargo_miri = builder
- .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() })
- .expect("in-tree tool");
- // The stdlib we need might be at a different stage. And just asking for the
- // sysroot does not seem to populate it, so we do that first.
- builder.ensure(compile::Std::new(compiler_std, host));
- let sysroot = builder.sysroot(compiler_std);
-
- // # Run `cargo miri setup` for the given target.
+impl Miri {
+ /// Run `cargo miri setup` for the given target, return where the Miri sysroot was put.
+ pub fn build_miri_sysroot(
+ builder: &Builder<'_>,
+ compiler: Compiler,
+ miri: &Path,
+ target: TargetSelection,
+ ) -> String {
+ let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot");
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
- host,
+ compiler.host,
"run",
"src/tools/miri/cargo-miri",
- SourceType::Submodule,
+ SourceType::InTree,
&[],
);
cargo.add_rustc_lib_path(builder, compiler);
@@ -520,6 +528,8 @@ impl Step for Miri {
cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
// Tell it where to find Miri.
cargo.env("MIRI", &miri);
+ // Tell it where to put the sysroot.
+ cargo.env("MIRI_SYSROOT", &miri_sysroot);
// Debug things.
cargo.env("RUST_BACKTRACE", "1");
@@ -534,7 +544,7 @@ impl Step for Miri {
cargo.arg("--print-sysroot");
// FIXME: Is there a way in which we can re-use the usual `run` helpers?
- let miri_sysroot = if builder.config.dry_run {
+ if builder.config.dry_run() {
String::new()
} else {
builder.verbose(&format!("running: {:?}", cargo));
@@ -547,7 +557,48 @@ impl Step for Miri {
let sysroot = stdout.trim_end();
builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
sysroot.to_owned()
- };
+ }
+ }
+}
+
+impl Step for Miri {
+ type Output = ();
+ const ONLY_HOSTS: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/miri")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.build_triple(),
+ target: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for miri.
+ fn run(self, builder: &Builder<'_>) {
+ let stage = self.stage;
+ let host = self.host;
+ let target = self.target;
+ let compiler = builder.compiler(stage, host);
+ // We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri.
+ // Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage.
+ let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host);
+
+ let miri = builder
+ .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
+ let _cargo_miri = builder
+ .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
+ // The stdlib we need might be at a different stage. And just asking for the
+ // sysroot does not seem to populate it, so we do that first.
+ builder.ensure(compile::Std::new(compiler_std, host));
+ let sysroot = builder.sysroot(compiler_std);
+ // We also need a Miri sysroot.
+ let miri_sysroot = Miri::build_miri_sysroot(builder, compiler, &miri, target);
// # Run `cargo test`.
let mut cargo = tool::prepare_tool_cargo(
@@ -557,7 +608,7 @@ impl Step for Miri {
host,
"test",
"src/tools/miri",
- SourceType::Submodule,
+ SourceType::InTree,
&[],
);
cargo.add_rustc_lib_path(builder, compiler);
@@ -565,7 +616,6 @@ impl Step for Miri {
// miri tests need to know about the stage sysroot
cargo.env("MIRI_SYSROOT", &miri_sysroot);
cargo.env("MIRI_HOST_SYSROOT", sysroot);
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
cargo.env("MIRI", &miri);
// propagate --bless
if builder.config.cmd.bless() {
@@ -606,7 +656,6 @@ impl Step for Miri {
// Tell `cargo miri` where to find things.
cargo.env("MIRI_SYSROOT", &miri_sysroot);
cargo.env("MIRI_HOST_SYSROOT", sysroot);
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
cargo.env("MIRI", &miri);
// Debug things.
cargo.env("RUST_BACKTRACE", "1");
@@ -771,7 +820,10 @@ impl Step for RustdocTheme {
cmd.env("RUSTDOC_LINKER", linker);
}
if builder.is_fuse_ld_lld(self.compiler.host) {
- cmd.env("RUSTDOC_FUSE_LD_LLD", "1");
+ cmd.env(
+ "RUSTDOC_LLD_NO_THREADS",
+ util::lld_flag_no_threads(self.compiler.host.contains("windows")),
+ );
}
try_run(builder, &mut cmd);
}
@@ -819,7 +871,11 @@ impl Step for RustdocJSStd {
command.arg("--test-file").arg(path);
}
}
- builder.ensure(crate::doc::Std { target: self.target, stage: builder.top_stage });
+ builder.ensure(crate::doc::Std {
+ target: self.target,
+ stage: builder.top_stage,
+ format: DocumentationFormat::HTML,
+ });
builder.run(&mut command);
} else {
builder.info("No nodejs found, skipping \"src/test/rustdoc-js-std\" tests");
@@ -991,6 +1047,8 @@ impl Step for RustdocGUI {
// instead of hard-coding this test
if entry.file_name() == "link_to_definition" {
cargo.env("RUSTDOCFLAGS", "-Zunstable-options --generate-link-to-definition");
+ } else if entry.file_name() == "scrape_examples" {
+ cargo.arg("-Zrustdoc-scrape-examples=examples");
}
builder.run(&mut cargo);
}
@@ -1049,6 +1107,9 @@ impl Step for Tidy {
if builder.is_verbose() {
cmd.arg("--verbose");
}
+ if builder.config.cmd.bless() {
+ cmd.arg("--bless");
+ }
builder.info("tidy check");
try_run(builder, &mut cmd);
@@ -1378,6 +1439,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--src-base").arg(builder.src.join("src/test").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
+ cmd.arg("--sysroot-base").arg(builder.sysroot(compiler));
cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
cmd.arg("--suite").arg(suite);
cmd.arg("--mode").arg(mode);
@@ -1514,7 +1576,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
let mut copts_passed = false;
if builder.config.llvm_enabled() {
let llvm_config = builder.ensure(native::Llvm { target: builder.config.build });
- if !builder.config.dry_run {
+ if !builder.config.dry_run() {
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
let llvm_components = output(Command::new(&llvm_config).arg("--components"));
// Remove trailing newline from llvm-config output.
@@ -1532,14 +1594,14 @@ note: if you're sure you want to do this, please open an issue as to why. In the
// requirement, but the `-L` library path is not propagated across
// separate compilations. We can add LLVM's library path to the
// platform-specific environment variable as a workaround.
- if !builder.config.dry_run && suite.ends_with("fulldeps") {
+ if !builder.config.dry_run() && suite.ends_with("fulldeps") {
let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir"));
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd);
}
// Only pass correct values for these flags for the `run-make` suite as it
// requires that a C++ compiler was configured which isn't always the case.
- if !builder.config.dry_run && matches!(suite, "run-make" | "run-make-fulldeps") {
+ if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
// The llvm/bin directory contains many useful cross-platform
// tools. Pass the path to run-make tests so they can use them.
let llvm_bin_path = llvm_config
@@ -1567,7 +1629,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
// Only pass correct values for these flags for the `run-make` suite as it
// requires that a C++ compiler was configured which isn't always the case.
- if !builder.config.dry_run && matches!(suite, "run-make" | "run-make-fulldeps") {
+ if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
cmd.arg("--cc")
.arg(builder.cc(target))
.arg("--cxx")
@@ -1647,6 +1709,10 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--channel").arg(&builder.config.channel);
+ if let Some(commit) = builder.config.download_rustc_commit() {
+ cmd.env("FAKE_DOWNLOAD_RUSTC_PREFIX", format!("/rustc/{commit}"));
+ }
+
builder.ci_env.force_coloring_in_ci(&mut cmd);
builder.info(&format!(
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index eec74b267..e0be4c432 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -380,6 +380,8 @@ bootstrap_tool!(
HtmlChecker, "src/tools/html-checker", "html-checker";
BumpStage0, "src/tools/bump-stage0", "bump-stage0";
ReplaceVersionPlaceholder, "src/tools/replace-version-placeholder", "replace-version-placeholder";
+ CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata";
+ GenerateCopyright, "src/tools/generate-copyright", "generate-copyright";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
@@ -522,7 +524,7 @@ impl Step for Rustdoc {
builder.ensure(compile::Rustc::new(build_compiler, target_compiler.host));
// NOTE: this implies that `download-rustc` is pretty useless when compiling with the stage0
// compiler, since you do just as much work.
- if !builder.config.dry_run && builder.download_rustc() && build_compiler.stage == 0 {
+ if !builder.config.dry_run() && builder.download_rustc() && build_compiler.stage == 0 {
println!(
"warning: `download-rustc` does nothing when building stage1 tools; consider using `--stage 2` instead"
);
@@ -794,10 +796,9 @@ macro_rules! tool_extended {
$($name:ident,
$path:expr,
$tool_name:expr,
- stable = $stable:expr,
- $(in_tree = $in_tree:expr,)?
- $(tool_std = $tool_std:literal,)?
- $extra_deps:block;)+) => {
+ stable = $stable:expr
+ $(,tool_std = $tool_std:literal)?
+ ;)+) => {
$(
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
@@ -839,7 +840,6 @@ macro_rules! tool_extended {
#[allow(unused_mut)]
fn run(mut $sel, $builder: &Builder<'_>) -> Option<PathBuf> {
- $extra_deps
$builder.ensure(ToolBuild {
compiler: $sel.compiler,
target: $sel.target,
@@ -848,11 +848,7 @@ macro_rules! tool_extended {
path: $path,
extra_features: $sel.extra_features,
is_optional_tool: true,
- source_type: if false $(|| $in_tree)* {
- SourceType::InTree
- } else {
- SourceType::Submodule
- },
+ source_type: SourceType::InTree,
})
}
}
@@ -865,17 +861,17 @@ macro_rules! tool_extended {
// Note: Most submodule updates for tools are handled by bootstrap.py, since they're needed just to
// invoke Cargo to build bootstrap. See the comment there for more details.
tool_extended!((self, builder),
- Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true, in_tree=true, {};
- CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true, in_tree=true, {};
- Clippy, "src/tools/clippy", "clippy-driver", stable=true, in_tree=true, {};
- Miri, "src/tools/miri", "miri", stable=false, in_tree=true, {};
- CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, in_tree=true, {};
+ Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true;
+ CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true;
+ Clippy, "src/tools/clippy", "clippy-driver", stable=true;
+ Miri, "src/tools/miri", "miri", stable=false;
+ CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=true;
// FIXME: tool_std is not quite right, we shouldn't allow nightly features.
// But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0,
// and this is close enough for now.
- Rls, "src/tools/rls", "rls", stable=true, in_tree=true, tool_std=true, {};
- RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, in_tree=true, tool_std=true, {};
- Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, in_tree=true, {};
+ Rls, "src/tools/rls", "rls", stable=true, tool_std=true;
+ RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, tool_std=true;
+ Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true;
);
impl<'a> Builder<'a> {
diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs
index 1a1774432..1969e0b6f 100644
--- a/src/bootstrap/toolstate.rs
+++ b/src/bootstrap/toolstate.rs
@@ -158,7 +158,7 @@ impl Step for ToolStateCheck {
/// stable tool. That is, the status is not allowed to get worse
/// (test-pass to test-fail or build-fail).
fn run(self, builder: &Builder<'_>) {
- if builder.config.dry_run {
+ if builder.config.dry_run() {
return;
}
@@ -265,7 +265,7 @@ impl Builder<'_> {
// If we're in a dry run setting we don't want to save toolstates as
// that means if we e.g. panic down the line it'll look like we tested
// everything (but we actually haven't).
- if self.config.dry_run {
+ if self.config.dry_run() {
return;
}
// Toolstate isn't tracked for clippy or rustfmt, but since most tools do, we avoid checking
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 0ebabbd5c..582207832 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -13,6 +13,7 @@ use std::time::{Instant, SystemTime, UNIX_EPOCH};
use crate::builder::Builder;
use crate::config::{Config, TargetSelection};
+use crate::OnceCell;
/// A helper macro to `unwrap` a result except also print out details like:
///
@@ -43,7 +44,13 @@ pub use t;
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
pub fn exe(name: &str, target: TargetSelection) -> String {
- if target.contains("windows") { format!("{}.exe", name) } else { name.to_string() }
+ if target.contains("windows") {
+ format!("{}.exe", name)
+ } else if target.contains("uefi") {
+ format!("{}.efi", name)
+ } else {
+ name.to_string()
+ }
}
/// Returns `true` if the file name given looks like a dynamic library.
@@ -104,7 +111,7 @@ pub struct TimeIt(bool, Instant);
/// Returns an RAII structure that prints out how long it took to drop.
pub fn timeit(builder: &Builder<'_>) -> TimeIt {
- TimeIt(builder.config.dry_run, Instant::now())
+ TimeIt(builder.config.dry_run(), Instant::now())
}
impl Drop for TimeIt {
@@ -127,7 +134,7 @@ pub(crate) fn program_out_of_date(stamp: &Path, key: &str) -> bool {
/// Symlinks two directories, using junctions on Windows and normal symlinks on
/// Unix.
pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
- if config.dry_run {
+ if config.dry_run() {
return Ok(());
}
let _ = fs::remove_dir(dest);
@@ -607,3 +614,16 @@ pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf {
let clang_rt_dir = clang_rt_builtins.parent().expect("The clang lib folder should exist");
clang_rt_dir.to_path_buf()
}
+
+pub fn lld_flag_no_threads(is_windows: bool) -> &'static str {
+ static LLD_NO_THREADS: OnceCell<(&'static str, &'static str)> = OnceCell::new();
+ let (windows, other) = LLD_NO_THREADS.get_or_init(|| {
+ let out = output(Command::new("lld").arg("-flavor").arg("ld").arg("--version"));
+ let newer = match (out.find(char::is_numeric), out.find('.')) {
+ (Some(b), Some(e)) => out.as_str()[b..e].parse::<i32>().ok().unwrap_or(14) > 10,
+ _ => true,
+ };
+ if newer { ("/threads:1", "--threads=1") } else { ("/no-threads", "--no-threads") }
+ });
+ if is_windows { windows } else { other }
+}