summaryrefslogtreecommitdiffstats
path: root/src/bootstrap/builder.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/bootstrap/builder.rs')
-rw-r--r--src/bootstrap/builder.rs301
1 files changed, 40 insertions, 261 deletions
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 8b144f146..8ee6d49da 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -2,14 +2,13 @@ use std::any::{type_name, Any};
use std::cell::{Cell, RefCell};
use std::collections::BTreeSet;
use std::env;
-use std::ffi::{OsStr, OsString};
+use std::ffi::OsStr;
use std::fmt::{Debug, Write};
-use std::fs::{self, File};
+use std::fs::{self};
use std::hash::Hash;
-use std::io::{BufRead, BufReader, ErrorKind};
use std::ops::Deref;
use std::path::{Component, Path, PathBuf};
-use std::process::{Command, Stdio};
+use std::process::Command;
use std::time::{Duration, Instant};
use crate::cache::{Cache, Interned, INTERNER};
@@ -24,14 +23,12 @@ use crate::test;
use crate::tool::{self, SourceType};
use crate::util::{self, add_dylib_path, add_link_lib_path, exe, libdir, output, t};
use crate::EXTRA_CHECK_CFGS;
-use crate::{check, Config};
-use crate::{compile, Crate};
+use crate::{check, compile, Crate};
use crate::{Build, CLang, DocTests, GitRepo, Mode};
pub use crate::Compiler;
// FIXME: replace with std::lazy after it gets stabilized and reaches beta
-use once_cell::sync::{Lazy, OnceCell};
-use xz2::bufread::XzDecoder;
+use once_cell::sync::Lazy;
pub struct Builder<'a> {
pub build: &'a Build,
@@ -621,6 +618,8 @@ impl<'a> Builder<'a> {
check::CodegenBackend,
check::Clippy,
check::Miri,
+ check::CargoMiri,
+ check::MiroptTestTools,
check::Rls,
check::RustAnalyzer,
check::Rustfmt,
@@ -645,6 +644,7 @@ impl<'a> Builder<'a> {
test::CrateLibrustc,
test::CrateRustdoc,
test::CrateRustdocJsonTypes,
+ test::CrateJsonDocLint,
test::Linkcheck,
test::TierCheck,
test::ReplacePlaceholderTest,
@@ -753,6 +753,9 @@ impl<'a> Builder<'a> {
run::BuildManifest,
run::BumpStage0,
run::ReplaceVersionPlaceholder,
+ run::Miri,
+ run::CollectLicenseMetadata,
+ run::GenerateCopyright,
),
// These commands either don't use paths, or they're special-cased in Build::build()
Kind::Clean | Kind::Format | Kind::Setup => vec![],
@@ -816,7 +819,7 @@ impl<'a> Builder<'a> {
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
- Subcommand::Run { ref paths } => (Kind::Run, &paths[..]),
+ Subcommand::Run { ref paths, .. } => (Kind::Run, &paths[..]),
Subcommand::Format { .. } => (Kind::Format, &[][..]),
Subcommand::Clean { .. } | Subcommand::Setup { .. } => {
panic!()
@@ -850,241 +853,6 @@ impl<'a> Builder<'a> {
StepDescription::run(v, self, paths);
}
- /// Modifies the interpreter section of 'fname' to fix the dynamic linker,
- /// or the RPATH section, to fix the dynamic library search path
- ///
- /// This is only required on NixOS and uses the PatchELF utility to
- /// change the interpreter/RPATH of ELF executables.
- ///
- /// Please see https://nixos.org/patchelf.html for more information
- pub(crate) fn fix_bin_or_dylib(&self, fname: &Path) {
- // FIXME: cache NixOS detection?
- match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() {
- Err(_) => return,
- Ok(output) if !output.status.success() => return,
- Ok(output) => {
- let mut s = output.stdout;
- if s.last() == Some(&b'\n') {
- s.pop();
- }
- if s != b"Linux" {
- return;
- }
- }
- }
-
- // If the user has asked binaries to be patched for Nix, then
- // don't check for NixOS or `/lib`, just continue to the patching.
- // NOTE: this intentionally comes after the Linux check:
- // - patchelf only works with ELF files, so no need to run it on Mac or Windows
- // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc.
- if !self.config.patch_binaries_for_nix {
- // Use `/etc/os-release` instead of `/etc/NIXOS`.
- // The latter one does not exist on NixOS when using tmpfs as root.
- const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""];
- let os_release = match File::open("/etc/os-release") {
- Err(e) if e.kind() == ErrorKind::NotFound => return,
- Err(e) => panic!("failed to access /etc/os-release: {}", e),
- Ok(f) => f,
- };
- if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) {
- return;
- }
- if Path::new("/lib").exists() {
- return;
- }
- }
-
- // At this point we're pretty sure the user is running NixOS or using Nix
- println!("info: you seem to be using Nix. Attempting to patch {}", fname.display());
-
- // Only build `.nix-deps` once.
- static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new();
- let mut nix_build_succeeded = true;
- let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| {
- // Run `nix-build` to "build" each dependency (which will likely reuse
- // the existing `/nix/store` copy, or at most download a pre-built copy).
- //
- // Importantly, we create a gc-root called `.nix-deps` in the `build/`
- // directory, but still reference the actual `/nix/store` path in the rpath
- // as it makes it significantly more robust against changes to the location of
- // the `.nix-deps` location.
- //
- // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
- // zlib: Needed as a system dependency of `libLLVM-*.so`.
- // patchelf: Needed for patching ELF binaries (see doc comment above).
- let nix_deps_dir = self.out.join(".nix-deps");
- const NIX_EXPR: &str = "
- with (import <nixpkgs> {});
- symlinkJoin {
- name = \"rust-stage0-dependencies\";
- paths = [
- zlib
- patchelf
- stdenv.cc.bintools
- ];
- }
- ";
- nix_build_succeeded = self.try_run(Command::new("nix-build").args(&[
- Path::new("-E"),
- Path::new(NIX_EXPR),
- Path::new("-o"),
- &nix_deps_dir,
- ]));
- nix_deps_dir
- });
- if !nix_build_succeeded {
- return;
- }
-
- let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf"));
- let rpath_entries = {
- // ORIGIN is a relative default, all binary and dynamic libraries we ship
- // appear to have this (even when `../lib` is redundant).
- // NOTE: there are only two paths here, delimited by a `:`
- let mut entries = OsString::from("$ORIGIN/../lib:");
- entries.push(t!(fs::canonicalize(nix_deps_dir)));
- entries.push("/lib");
- entries
- };
- patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
- if !fname.extension().map_or(false, |ext| ext == "so") {
- // Finally, set the correct .interp for binaries
- let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
- // FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
- let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
- patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
- }
-
- self.try_run(patchelf.arg(fname));
- }
-
- pub(crate) fn download_component(&self, url: &str, dest_path: &Path, help_on_error: &str) {
- self.verbose(&format!("download {url}"));
- // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/.
- let tempfile = self.tempdir().join(dest_path.file_name().unwrap());
- // While bootstrap itself only supports http and https downloads, downstream forks might
- // need to download components from other protocols. The match allows them adding more
- // protocols without worrying about merge conflicts if we change the HTTP implementation.
- match url.split_once("://").map(|(proto, _)| proto) {
- Some("http") | Some("https") => {
- self.download_http_with_retries(&tempfile, url, help_on_error)
- }
- Some(other) => panic!("unsupported protocol {other} in {url}"),
- None => panic!("no protocol in {url}"),
- }
- t!(std::fs::rename(&tempfile, dest_path));
- }
-
- fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) {
- println!("downloading {}", url);
- // Try curl. If that fails and we are on windows, fallback to PowerShell.
- let mut curl = Command::new("curl");
- curl.args(&[
- "-#",
- "-y",
- "30",
- "-Y",
- "10", // timeout if speed is < 10 bytes/sec for > 30 seconds
- "--connect-timeout",
- "30", // timeout if cannot connect within 30 seconds
- "--retry",
- "3",
- "-Sf",
- "-o",
- ]);
- curl.arg(tempfile);
- curl.arg(url);
- if !self.check_run(&mut curl) {
- if self.build.build.contains("windows-msvc") {
- println!("Fallback to PowerShell");
- for _ in 0..3 {
- if self.try_run(Command::new("PowerShell.exe").args(&[
- "/nologo",
- "-Command",
- "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
- &format!(
- "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
- url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"),
- ),
- ])) {
- return;
- }
- println!("\nspurious failure, trying again");
- }
- }
- if !help_on_error.is_empty() {
- eprintln!("{}", help_on_error);
- }
- crate::detail_exit(1);
- }
- }
-
- pub(crate) fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) {
- println!("extracting {} to {}", tarball.display(), dst.display());
- if !dst.exists() {
- t!(fs::create_dir_all(dst));
- }
-
- // `tarball` ends with `.tar.xz`; strip that suffix
- // example: `rust-dev-nightly-x86_64-unknown-linux-gnu`
- let uncompressed_filename =
- Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap();
- let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
-
- // decompress the file
- let data = t!(File::open(tarball));
- let decompressor = XzDecoder::new(BufReader::new(data));
-
- let mut tar = tar::Archive::new(decompressor);
- for member in t!(tar.entries()) {
- let mut member = t!(member);
- let original_path = t!(member.path()).into_owned();
- // skip the top-level directory
- if original_path == directory_prefix {
- continue;
- }
- let mut short_path = t!(original_path.strip_prefix(directory_prefix));
- if !short_path.starts_with(pattern) {
- continue;
- }
- short_path = t!(short_path.strip_prefix(pattern));
- let dst_path = dst.join(short_path);
- self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display()));
- if !t!(member.unpack_in(dst)) {
- panic!("path traversal attack ??");
- }
- let src_path = dst.join(original_path);
- if src_path.is_dir() && dst_path.exists() {
- continue;
- }
- t!(fs::rename(src_path, dst_path));
- }
- t!(fs::remove_dir_all(dst.join(directory_prefix)));
- }
-
- /// Returns whether the SHA256 checksum of `path` matches `expected`.
- pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool {
- use sha2::Digest;
-
- self.verbose(&format!("verifying {}", path.display()));
- let mut hasher = sha2::Sha256::new();
- // FIXME: this is ok for rustfmt (4.1 MB large at time of writing), but it seems memory-intensive for rustc and larger components.
- // Consider using streaming IO instead?
- let contents = if self.config.dry_run { vec![] } else { t!(fs::read(path)) };
- hasher.update(&contents);
- let found = hex::encode(hasher.finalize().as_slice());
- let verified = found == expected;
- if !verified && !self.config.dry_run {
- println!(
- "invalid checksum: \n\
- found: {found}\n\
- expected: {expected}",
- );
- }
- return verified;
- }
-
/// Obtain a compiler at a given stage and for a given host. Explicitly does
/// not take `Compiler` since all `Compiler` instances are meant to be
/// obtained through this function, since it ensures that they are valid
@@ -1289,7 +1057,7 @@ impl<'a> Builder<'a> {
/// Note that this returns `None` if LLVM is disabled, or if we're in a
/// check build or dry-run, where there's no need to build all of LLVM.
fn llvm_config(&self, target: TargetSelection) -> Option<PathBuf> {
- if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run {
+ if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run() {
let llvm_config = self.ensure(native::Llvm { target });
if llvm_config.is_file() {
return Some(llvm_config);
@@ -1298,19 +1066,6 @@ impl<'a> Builder<'a> {
None
}
- /// Convenience wrapper to allow `builder.llvm_link_shared()` instead of `builder.config.llvm_link_shared(&builder)`.
- pub(crate) fn llvm_link_shared(&self) -> bool {
- Config::llvm_link_shared(self)
- }
-
- pub(crate) fn download_rustc(&self) -> bool {
- Config::download_rustc(self)
- }
-
- pub(crate) fn initial_rustfmt(&self) -> Option<PathBuf> {
- Config::initial_rustfmt(self)
- }
-
/// Prepares an invocation of `cargo` to be run.
///
/// This will create a `Command` that represents a pending execution of
@@ -1342,7 +1097,13 @@ impl<'a> Builder<'a> {
let my_out = match mode {
// This is the intended out directory for compiler documentation.
Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target),
- Mode::Std => out_dir.join(target.triple).join("doc"),
+ Mode::Std => {
+ if self.config.cmd.json() {
+ out_dir.join(target.triple).join("json-doc")
+ } else {
+ out_dir.join(target.triple).join("doc")
+ }
+ }
_ => panic!("doc mode {:?} not expected", mode),
};
let rustdoc = self.rustdoc(compiler);
@@ -1641,7 +1402,7 @@ impl<'a> Builder<'a> {
//
// Only clear out the directory if we're compiling std; otherwise, we
// should let Cargo take care of things for us (via depdep info)
- if !self.config.dry_run && mode == Mode::Std && cmd == "build" {
+ if !self.config.dry_run() && mode == Mode::Std && cmd == "build" {
self.clear_if_dirty(&out_dir, &self.rustc(compiler));
}
@@ -2139,7 +1900,7 @@ impl<'a> Builder<'a> {
(out, dur - deps)
};
- if self.config.print_step_timings && !self.config.dry_run {
+ if self.config.print_step_timings && !self.config.dry_run() {
let step_string = format!("{:?}", step);
let brace_index = step_string.find("{").unwrap_or(0);
let type_string = type_name::<S>();
@@ -2205,6 +1966,24 @@ impl<'a> Builder<'a> {
false
}
+
+ pub(crate) fn maybe_open_in_browser<S: Step>(&self, path: impl AsRef<Path>) {
+ if self.was_invoked_explicitly::<S>(Kind::Doc) {
+ self.open_in_browser(path);
+ }
+ }
+
+ pub(crate) fn open_in_browser(&self, path: impl AsRef<Path>) {
+ if self.config.dry_run() || !self.config.cmd.open() {
+ return;
+ }
+
+ let path = path.as_ref();
+ self.info(&format!("Opening doc {}", path.display()));
+ if let Err(err) = opener::open(path) {
+ self.info(&format!("{}\n", err));
+ }
+ }
}
#[cfg(test)]