summaryrefslogtreecommitdiffstats
path: root/src/bootstrap
diff options
context:
space:
mode:
Diffstat (limited to 'src/bootstrap')
-rw-r--r--src/bootstrap/CHANGELOG.md2
-rw-r--r--src/bootstrap/Cargo.lock8
-rw-r--r--src/bootstrap/README.md64
-rw-r--r--src/bootstrap/bin/rustc.rs2
-rw-r--r--src/bootstrap/bolt.rs71
-rw-r--r--src/bootstrap/bootstrap.py18
-rw-r--r--src/bootstrap/build.rs36
-rw-r--r--src/bootstrap/builder.rs24
-rw-r--r--src/bootstrap/builder/tests.rs4
-rw-r--r--src/bootstrap/channel.rs68
-rw-r--r--src/bootstrap/check.rs2
-rw-r--r--src/bootstrap/compile.rs67
-rw-r--r--src/bootstrap/config.rs153
-rw-r--r--src/bootstrap/dist.rs200
-rw-r--r--src/bootstrap/doc.rs285
-rw-r--r--src/bootstrap/download-ci-llvm-stamp2
-rw-r--r--src/bootstrap/flags.rs26
-rw-r--r--src/bootstrap/install.rs11
-rw-r--r--src/bootstrap/lib.rs66
-rw-r--r--src/bootstrap/native.rs116
-rw-r--r--src/bootstrap/sanity.rs2
-rw-r--r--src/bootstrap/tarball.rs6
-rw-r--r--src/bootstrap/test.rs237
-rw-r--r--src/bootstrap/tool.rs42
-rw-r--r--src/bootstrap/toolstate.rs1
25 files changed, 1022 insertions, 491 deletions
diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md
index 85afc1f5f..64b74ecc9 100644
--- a/src/bootstrap/CHANGELOG.md
+++ b/src/bootstrap/CHANGELOG.md
@@ -13,6 +13,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Change the names for `dist` commands to match the component they generate. [#90684](https://github.com/rust-lang/rust/pull/90684)
- The `build.fast-submodules` option has been removed. Fast submodule checkouts are enabled unconditionally. Automatic submodule handling can still be disabled with `build.submodules = false`.
- Several unsupported `./configure` options have been removed: `optimize`, `parallel-compiler`. These can still be enabled with `--set`, although it isn't recommended.
+- `remote-test-server`'s `verbose` argument has been removed in favor of the `--verbose` flag
+- `remote-test-server`'s `remote` argument has been removed in favor of the `--bind` flag. Use `--bind 0.0.0.0:12345` to replicate the behavior of the `remote` argument.
### Non-breaking changes
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index 84c06fdce..baecca44c 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -445,9 +445,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.39"
+version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
+checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [
"unicode-ident",
]
@@ -596,9 +596,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.95"
+version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
+checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [
"proc-macro2",
"quote",
diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md
index a2e596bf4..985727bdd 100644
--- a/src/bootstrap/README.md
+++ b/src/bootstrap/README.md
@@ -1,7 +1,7 @@
# rustbuild - Bootstrapping Rust
This is an in-progress README which is targeted at helping to explain how Rust
-is bootstrapped and in general some of the technical details of the build
+is bootstrapped and in general, some of the technical details of the build
system.
## Using rustbuild
@@ -12,7 +12,7 @@ The rustbuild build system has a primary entry point, a top level `x.py` script:
$ python ./x.py build
```
-Note that if you're on Unix you should be able to execute the script directly:
+Note that if you're on Unix, you should be able to execute the script directly:
```sh
$ ./x.py build
@@ -20,8 +20,8 @@ $ ./x.py build
The script accepts commands, flags, and arguments to determine what to do:
-* `build` - a general purpose command for compiling code. Alone `build` will
- bootstrap the entire compiler, and otherwise arguments passed indicate what to
+* `build` - a general purpose command for compiling code. Alone, `build` will
+ bootstrap the entire compiler, and otherwise, arguments passed indicate what to
build. For example:
```
@@ -38,7 +38,7 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py build --stage 0 library/test
```
- If files are dirty that would normally be rebuilt from stage 0, that can be
+ If files that would normally be rebuilt from stage 0 are dirty, the rebuild can be
overridden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
that belong to stage n or earlier:
@@ -47,8 +47,8 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py build --keep-stage 0
```
-* `test` - a command for executing unit tests. Like the `build` command this
- will execute the entire test suite by default, and otherwise it can be used to
+* `test` - a command for executing unit tests. Like the `build` command, this
+ will execute the entire test suite by default, and otherwise, it can be used to
select which test suite is run:
```
@@ -75,7 +75,7 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py test src/doc
```
-* `doc` - a command for building documentation. Like above can take arguments
+* `doc` - a command for building documentation. Like above, can take arguments
for what to document.
## Configuring rustbuild
@@ -110,12 +110,12 @@ compiler. What actually happens when you invoke rustbuild is:
compiles the build system itself (this folder). Finally, it then invokes the
actual `bootstrap` binary build system.
2. In Rust, `bootstrap` will slurp up all configuration, perform a number of
- sanity checks (compilers exist for example), and then start building the
+ sanity checks (whether compilers exist, for example), and then start building the
stage0 artifacts.
-3. The stage0 `cargo` downloaded earlier is used to build the standard library
+3. The stage0 `cargo`, downloaded earlier, is used to build the standard library
and the compiler, and then these binaries are then copied to the `stage1`
directory. That compiler is then used to generate the stage1 artifacts which
- are then copied to the stage2 directory, and then finally the stage2
+ are then copied to the stage2 directory, and then finally, the stage2
artifacts are generated using that compiler.
The goal of each stage is to (a) leverage Cargo as much as possible and failing
@@ -149,7 +149,7 @@ like this:
build/
# Location where the stage0 compiler downloads are all cached. This directory
- # only contains the tarballs themselves as they're extracted elsewhere.
+ # only contains the tarballs themselves, as they're extracted elsewhere.
cache/
2015-12-19/
2016-01-15/
@@ -172,10 +172,10 @@ build/
# hand.
x86_64-unknown-linux-gnu/
- # The build artifacts for the `compiler-rt` library for the target this
- # folder is under. The exact layout here will likely depend on the platform,
- # and this is also built with CMake so the build system is also likely
- # different.
+ # The build artifacts for the `compiler-rt` library for the target that
+ # this folder is under. The exact layout here will likely depend on the
+ # platform, and this is also built with CMake, so the build system is
+ # also likely different.
compiler-rt/
build/
@@ -183,11 +183,11 @@ build/
llvm/
# build folder (e.g. the platform-specific build system). Like with
- # compiler-rt this is compiled with CMake
+ # compiler-rt, this is compiled with CMake
build/
# Installation of LLVM. Note that we run the equivalent of 'make install'
- # for LLVM to setup these folders.
+ # for LLVM, to setup these folders.
bin/
lib/
include/
@@ -206,18 +206,18 @@ build/
# Location where the stage0 Cargo and Rust compiler are unpacked. This
# directory is purely an extracted and overlaid tarball of these two (done
- # by the bootstrapy python script). In theory the build system does not
+ # by the bootstrap python script). In theory, the build system does not
# modify anything under this directory afterwards.
stage0/
- # These to build directories are the cargo output directories for builds of
- # the standard library and compiler, respectively. Internally these may also
+ # These to-build directories are the cargo output directories for builds of
+ # the standard library and compiler, respectively. Internally, these may also
# have other target directories, which represent artifacts being compiled
# from the host to the specified target.
#
# Essentially, each of these directories is filled in by one `cargo`
# invocation. The build system instruments calling Cargo in the right order
- # with the right variables to ensure these are filled in correctly.
+ # with the right variables to ensure that these are filled in correctly.
stageN-std/
stageN-test/
stageN-rustc/
@@ -232,8 +232,8 @@ build/
# being compiled (e.g. after libstd has been built), *this* is used as the
# sysroot for the stage0 compiler being run.
#
- # Basically this directory is just a temporary artifact use to configure the
- # stage0 compiler to ensure that the libstd we just built is used to
+ # Basically, this directory is just a temporary artifact used to configure the
+ # stage0 compiler to ensure that the libstd that we just built is used to
# compile the stage1 compiler.
stage0-sysroot/lib/
@@ -242,7 +242,7 @@ build/
# system will link (using hard links) output from stageN-{std,rustc} into
# each of these directories.
#
- # In theory there is no extra build output in these directories.
+ # In theory, there is no extra build output in these directories.
stage1/
stage2/
stage3/
@@ -265,14 +265,14 @@ structure here serves two goals:
depend on `std`, so libstd is a separate project compiled ahead of time
before the actual compiler builds.
2. Splitting "host artifacts" from "target artifacts". That is, when building
- code for an arbitrary target you don't need the entire compiler, but you'll
+ code for an arbitrary target, you don't need the entire compiler, but you'll
end up needing libraries like libtest that depend on std but also want to use
crates.io dependencies. Hence, libtest is split out as its own project that
is sequenced after `std` but before `rustc`. This project is built for all
targets.
There is some loss in build parallelism here because libtest can be compiled in
-parallel with a number of rustc artifacts, but in theory the loss isn't too bad!
+parallel with a number of rustc artifacts, but in theory, the loss isn't too bad!
## Build tools
@@ -285,13 +285,13 @@ appropriate libstd/libtest/librustc compile above.
## Extending rustbuild
-So you'd like to add a feature to the rustbuild build system or just fix a bug.
+So, you'd like to add a feature to the rustbuild build system or just fix a bug.
Great! One of the major motivational factors for moving away from `make` is that
Rust is in theory much easier to read, modify, and write. If you find anything
-excessively confusing, please open an issue on this and we'll try to get it
-documented or simplified pronto.
+excessively confusing, please open an issue on this, and we'll try to get it
+documented or simplified, pronto.
-First up, you'll probably want to read over the documentation above as that'll
+First up, you'll probably want to read over the documentation above, as that'll
give you a high level overview of what rustbuild is doing. You also probably
want to play around a bit yourself by just getting it up and running before you
dive too much into the actual build system itself.
@@ -326,7 +326,7 @@ A 'major change' includes
Changes that do not affect contributors to the compiler or users
building rustc from source don't need an update to `VERSION`.
-If you have any questions feel free to reach out on the `#t-infra` channel in
+If you have any questions, feel free to reach out on the `#t-infra` channel in
the [Rust Zulip server][rust-zulip] or ask on internals.rust-lang.org. When
you encounter bugs, please file issues on the rust-lang/rust issue tracker.
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index e96f8b0d3..776d73b98 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -67,7 +67,7 @@ fn main() {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
- cmd.arg("-Ztime");
+ cmd.arg("-Ztime-passes");
}
}
}
diff --git a/src/bootstrap/bolt.rs b/src/bootstrap/bolt.rs
new file mode 100644
index 000000000..ea37cd470
--- /dev/null
+++ b/src/bootstrap/bolt.rs
@@ -0,0 +1,71 @@
+use std::path::Path;
+use std::process::Command;
+
+/// Uses the `llvm-bolt` binary to instrument the binary/library at the given `path` with BOLT.
+/// When the instrumented artifact is executed, it will generate BOLT profiles into
+/// `/tmp/prof.fdata.<pid>.fdata`.
+pub fn instrument_with_bolt_inplace(path: &Path) {
+ let dir = std::env::temp_dir();
+ let instrumented_path = dir.join("instrumented.so");
+
+ let status = Command::new("llvm-bolt")
+ .arg("-instrument")
+ .arg(&path)
+ // Make sure that each process will write its profiles into a separate file
+ .arg("--instrumentation-file-append-pid")
+ .arg("-o")
+ .arg(&instrumented_path)
+ .status()
+ .expect("Could not instrument artifact using BOLT");
+
+ if !status.success() {
+ panic!("Could not instrument {} with BOLT, exit code {:?}", path.display(), status.code());
+ }
+
+ std::fs::copy(&instrumented_path, path).expect("Cannot copy instrumented artifact");
+ std::fs::remove_file(instrumented_path).expect("Cannot delete instrumented artifact");
+}
+
+/// Uses the `llvm-bolt` binary to optimize the binary/library at the given `path` with BOLT,
+/// using merged profiles from `profile_path`.
+///
+/// The recorded profiles have to be merged using the `merge-fdata` tool from LLVM and the merged
+/// profile path should be then passed to this function.
+pub fn optimize_library_with_bolt_inplace(path: &Path, profile_path: &Path) {
+ let dir = std::env::temp_dir();
+ let optimized_path = dir.join("optimized.so");
+
+ let status = Command::new("llvm-bolt")
+ .arg(&path)
+ .arg("-data")
+ .arg(&profile_path)
+ .arg("-o")
+ .arg(&optimized_path)
+ // Reorder basic blocks within functions
+ .arg("-reorder-blocks=ext-tsp")
+ // Reorder functions within the binary
+ .arg("-reorder-functions=hfsort+")
+ // Split function code into hot and code regions
+ .arg("-split-functions=2")
+ // Split as many basic blocks as possible
+ .arg("-split-all-cold")
+ // Move jump tables to a separate section
+ .arg("-jump-tables=move")
+ // Use GNU_STACK program header for new segment (workaround for issues with strip/objcopy)
+ .arg("-use-gnu-stack")
+ // Fold functions with identical code
+ .arg("-icf=1")
+ // Update DWARF debug info in the final binary
+ .arg("-update-debug-sections")
+ // Print optimization statistics
+ .arg("-dyno-stats")
+ .status()
+ .expect("Could not optimize artifact using BOLT");
+
+ if !status.success() {
+ panic!("Could not optimize {} with BOLT, exit code {:?}", path.display(), status.code());
+ }
+
+ std::fs::copy(&optimized_path, path).expect("Cannot copy optimized artifact");
+ std::fs::remove_file(optimized_path).expect("Cannot delete optimized artifact");
+}
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index cc08ae5f9..57128685d 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -732,9 +732,19 @@ class RustBuild(object):
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
+ # Export Stage0 snapshot compiler related env variables
+ build_section = "target.{}".format(self.build)
+ host_triple_sanitized = self.build.replace("-", "_")
+ var_data = {
+ "CC": "cc", "CXX": "cxx", "LD": "linker", "AR": "ar", "RANLIB": "ranlib"
+ }
+ for var_name, toml_key in var_data.items():
+ toml_val = self.get_toml(toml_key, build_section)
+ if toml_val != None:
+ env["{}_{}".format(var_name, host_triple_sanitized)] = toml_val
+
# preserve existing RUSTFLAGS
env.setdefault("RUSTFLAGS", "")
- build_section = "target.{}".format(self.build)
target_features = []
if self.get_toml("crt-static", build_section) == "true":
target_features += ["+crt-static"]
@@ -742,9 +752,6 @@ class RustBuild(object):
target_features += ["-crt-static"]
if target_features:
env["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features))
- target_linker = self.get_toml("linker", build_section)
- if target_linker is not None:
- env["RUSTFLAGS"] += " -C linker=" + target_linker
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
env["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros"
if self.get_toml("deny-warnings", "rust") != "false":
@@ -771,7 +778,8 @@ class RustBuild(object):
elif color == "never":
args.append("--color=never")
- run(args, env=env, verbose=self.verbose)
+ # Run this from the source directory so cargo finds .cargo/config
+ run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
def build_triple(self):
"""Build triple as in LLVM
diff --git a/src/bootstrap/build.rs b/src/bootstrap/build.rs
index ab34d5c1e..cd1f41802 100644
--- a/src/bootstrap/build.rs
+++ b/src/bootstrap/build.rs
@@ -1,43 +1,7 @@
-use env::consts::{EXE_EXTENSION, EXE_SUFFIX};
use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-
-/// Given an executable called `name`, return the filename for the
-/// executable for a particular target.
-pub fn exe(name: &PathBuf) -> PathBuf {
- if EXE_EXTENSION != "" && name.extension() != Some(EXE_EXTENSION.as_ref()) {
- let mut name: OsString = name.clone().into();
- name.push(EXE_SUFFIX);
- name.into()
- } else {
- name.clone()
- }
-}
fn main() {
let host = env::var("HOST").unwrap();
println!("cargo:rerun-if-changed=build.rs");
- println!("cargo:rerun-if-env-changed=RUSTC");
println!("cargo:rustc-env=BUILD_TRIPLE={}", host);
-
- // This may not be a canonicalized path.
- let mut rustc = PathBuf::from(env::var_os("RUSTC").unwrap());
-
- if rustc.is_relative() {
- println!("cargo:rerun-if-env-changed=PATH");
- for dir in env::split_paths(&env::var_os("PATH").unwrap_or_default()) {
- let absolute = dir.join(&exe(&rustc));
- if absolute.exists() {
- rustc = absolute;
- break;
- }
- }
- }
- assert!(rustc.is_absolute());
-
- // FIXME: if the path is not utf-8, this is going to break. Unfortunately
- // Cargo doesn't have a way for us to specify non-utf-8 paths easily, so
- // we'll need to invent some encoding scheme if this becomes a problem.
- println!("cargo:rustc-env=RUSTC={}", rustc.to_str().unwrap());
}
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 14e8ebd68..8b144f146 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -704,10 +704,12 @@ impl<'a> Builder<'a> {
doc::Miri,
doc::EmbeddedBook,
doc::EditionGuide,
+ doc::StyleGuide,
),
Kind::Dist => describe!(
dist::Docs,
dist::RustcDocs,
+ dist::JsonDocs,
dist::Mingw,
dist::Rustc,
dist::Std,
@@ -723,6 +725,7 @@ impl<'a> Builder<'a> {
dist::Miri,
dist::LlvmTools,
dist::RustDev,
+ dist::Bootstrap,
dist::Extended,
// It seems that PlainSourceTarball somehow changes how some of the tools
// perceive their dependencies (see #93033) which would invalidate fingerprints
@@ -1325,6 +1328,9 @@ impl<'a> Builder<'a> {
) -> Cargo {
let mut cargo = Command::new(&self.initial_cargo);
let out_dir = self.stage_out(compiler, mode);
+ // Run cargo from the source root so it can find .cargo/config.
+ // This matters when using vendoring and the working directory is outside the repository.
+ cargo.current_dir(&self.src);
// Codegen backends are not yet tracked by -Zbinary-dep-depinfo,
// so we need to explicitly clear out if they've been updated.
@@ -1552,13 +1558,12 @@ impl<'a> Builder<'a> {
match mode {
Mode::ToolBootstrap => {
// Restrict the allowed features to those passed by rustbuild, so we don't depend on nightly accidentally.
- // HACK: because anyhow does feature detection in build.rs, we need to allow the backtrace feature too.
- rustflags.arg("-Zallow-features=binary-dep-depinfo,backtrace");
+ rustflags.arg("-Zallow-features=binary-dep-depinfo");
}
Mode::ToolStd => {
// Right now this is just compiletest and a few other tools that build on stable.
// Allow them to use `feature(test)`, but nothing else.
- rustflags.arg("-Zallow-features=binary-dep-depinfo,test,backtrace,proc_macro_internals,proc_macro_diagnostic,proc_macro_span");
+ rustflags.arg("-Zallow-features=binary-dep-depinfo,test,proc_macro_internals,proc_macro_diagnostic,proc_macro_span");
}
Mode::Std | Mode::Rustc | Mode::Codegen | Mode::ToolRustc => {}
}
@@ -1940,25 +1945,26 @@ impl<'a> Builder<'a> {
_ => s.display().to_string(),
}
};
+ let triple_underscored = target.triple.replace("-", "_");
let cc = ccacheify(&self.cc(target));
- cargo.env(format!("CC_{}", target.triple), &cc);
+ cargo.env(format!("CC_{}", triple_underscored), &cc);
let cflags = self.cflags(target, GitRepo::Rustc, CLang::C).join(" ");
- cargo.env(format!("CFLAGS_{}", target.triple), &cflags);
+ cargo.env(format!("CFLAGS_{}", triple_underscored), &cflags);
if let Some(ar) = self.ar(target) {
let ranlib = format!("{} s", ar.display());
cargo
- .env(format!("AR_{}", target.triple), ar)
- .env(format!("RANLIB_{}", target.triple), ranlib);
+ .env(format!("AR_{}", triple_underscored), ar)
+ .env(format!("RANLIB_{}", triple_underscored), ranlib);
}
if let Ok(cxx) = self.cxx(target) {
let cxx = ccacheify(&cxx);
let cxxflags = self.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" ");
cargo
- .env(format!("CXX_{}", target.triple), &cxx)
- .env(format!("CXXFLAGS_{}", target.triple), cxxflags);
+ .env(format!("CXX_{}", triple_underscored), &cxx)
+ .env(format!("CXXFLAGS_{}", triple_underscored), cxxflags);
}
}
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index 280eba75f..88bbcc93d 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -236,7 +236,7 @@ mod defaults {
fn doc_default() {
let mut config = configure("doc", &["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false };
+ config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
let mut cache = run_build(&[], config);
let a = TargetSelection::from_user("A");
@@ -587,7 +587,7 @@ mod dist {
fn doc_ci() {
let mut config = configure(&["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false };
+ config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]);
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index 1932a0017..258352a21 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -5,10 +5,12 @@
//! `package_vers`, and otherwise indicating to the compiler what it should
//! print out as part of its version information.
+use std::fs;
use std::path::Path;
use std::process::Command;
use crate::util::output;
+use crate::util::t;
use crate::Build;
pub enum GitInfo {
@@ -18,19 +20,25 @@ pub enum GitInfo {
/// If the info should be used (`ignore_git` is false), this will be
/// `Some`, otherwise it will be `None`.
Present(Option<Info>),
+ /// This is not a git repostory, but the info can be fetched from the
+ /// `git-commit-info` file.
+ RecordedForTarball(Info),
}
pub struct Info {
- commit_date: String,
- sha: String,
- short_sha: String,
+ pub commit_date: String,
+ pub sha: String,
+ pub short_sha: String,
}
impl GitInfo {
pub fn new(ignore_git: bool, dir: &Path) -> GitInfo {
// See if this even begins to look like a git dir
if !dir.join(".git").exists() {
- return GitInfo::Absent;
+ match read_commit_info_file(dir) {
+ Some(info) => return GitInfo::RecordedForTarball(info),
+ None => return GitInfo::Absent,
+ }
}
// Make sure git commands work
@@ -65,10 +73,11 @@ impl GitInfo {
}))
}
- fn info(&self) -> Option<&Info> {
+ pub fn info(&self) -> Option<&Info> {
match self {
- GitInfo::Present(info) => info.as_ref(),
GitInfo::Absent => None,
+ GitInfo::Present(info) => info.as_ref(),
+ GitInfo::RecordedForTarball(info) => Some(info),
}
}
@@ -96,10 +105,53 @@ impl GitInfo {
version
}
- pub fn is_git(&self) -> bool {
+ /// Returns whether this directory has a `.git` directory which should be managed by bootstrap.
+ pub fn is_managed_git_subrepository(&self) -> bool {
match self {
- GitInfo::Absent => false,
+ GitInfo::Absent | GitInfo::RecordedForTarball(_) => false,
GitInfo::Present(_) => true,
}
}
+
+ /// Returns whether this is being built from a tarball.
+ pub fn is_from_tarball(&self) -> bool {
+ match self {
+ GitInfo::Absent | GitInfo::Present(_) => false,
+ GitInfo::RecordedForTarball(_) => true,
+ }
+ }
+}
+
+/// Read the commit information from the `git-commit-info` file given the
+/// project root.
+pub fn read_commit_info_file(root: &Path) -> Option<Info> {
+ if let Ok(contents) = fs::read_to_string(root.join("git-commit-info")) {
+ let mut lines = contents.lines();
+ let sha = lines.next();
+ let short_sha = lines.next();
+ let commit_date = lines.next();
+ let info = match (commit_date, sha, short_sha) {
+ (Some(commit_date), Some(sha), Some(short_sha)) => Info {
+ commit_date: commit_date.to_owned(),
+ sha: sha.to_owned(),
+ short_sha: short_sha.to_owned(),
+ },
+ _ => panic!("the `git-comit-info` file is malformed"),
+ };
+ Some(info)
+ } else {
+ None
+ }
+}
+
+/// Write the commit information to the `git-commit-info` file given the project
+/// root.
+pub fn write_commit_info_file(root: &Path, info: &Info) {
+ let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date);
+ t!(fs::write(root.join("git-commit-info"), &commit_info));
+}
+
+/// Write the commit hash to the `git-commit-hash` file given the project root.
+pub fn write_commit_hash_file(root: &Path, sha: &str) {
+ t!(fs::write(root.join("git-commit-hash"), sha));
}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index 5c085bedf..229851238 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -456,6 +456,8 @@ tool_check_step!(Rustdoc, "src/tools/rustdoc", "src/librustdoc", SourceType::InT
// behavior, treat it as in-tree so that any new warnings in clippy will be
// rejected.
tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree);
+// Miri on the other hand is treated as out of tree, since InTree also causes it to
+// be run as part of `check`, which can fail on platforms which libffi-sys has no support for.
tool_check_step!(Miri, "src/tools/miri", SourceType::Submodule);
tool_check_step!(Rls, "src/tools/rls", SourceType::InTree);
tool_check_step!(Rustfmt, "src/tools/rustfmt", SourceType::InTree);
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index c13e83f6c..e02a10b81 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -21,7 +21,7 @@ use serde::Deserialize;
use crate::builder::Cargo;
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
-use crate::config::{LlvmLibunwind, TargetSelection};
+use crate::config::{LlvmLibunwind, RustcLto, TargetSelection};
use crate::dist;
use crate::native;
use crate::tool::SourceType;
@@ -299,9 +299,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// Determine if we're going to compile in optimized C intrinsics to
// the `compiler-builtins` crate. These intrinsics live in LLVM's
- // `compiler-rt` repository, but our `src/llvm-project` submodule isn't
- // always checked out, so we need to conditionally look for this. (e.g. if
- // an external LLVM is used we skip the LLVM submodule checkout).
+ // `compiler-rt` repository.
//
// Note that this shouldn't affect the correctness of `compiler-builtins`,
// but only its speed. Some intrinsics in C haven't been translated to Rust
@@ -312,8 +310,15 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// If `compiler-rt` is available ensure that the `c` feature of the
// `compiler-builtins` crate is enabled and it's configured to learn where
// `compiler-rt` is located.
- let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
- let compiler_builtins_c_feature = if compiler_builtins_root.exists() {
+ let compiler_builtins_c_feature = if builder.config.optimized_compiler_builtins {
+ if !builder.is_rust_llvm(target) {
+ panic!(
+ "need a managed LLVM submodule for optimized intrinsics support; unset `llvm-config` or `optimized-compiler-builtins`"
+ );
+ }
+
+ builder.update_submodule(&Path::new("src").join("llvm-project"));
+ let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
// Note that `libprofiler_builtins/build.rs` also computes this so if
// you're changing something here please also change that.
cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
@@ -459,7 +464,7 @@ fn copy_sanitizers(
builder.copy(&runtime.path, &dst);
if target == "x86_64-apple-darwin" || target == "aarch64-apple-darwin" {
- // Update the library’s install name to reflect that it has has been renamed.
+ // Update the library’s install name to reflect that it has been renamed.
apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name));
// Upon renaming the install name, the code signature of the file will invalidate,
// so we will sign it again.
@@ -696,6 +701,28 @@ impl Step for Rustc {
));
}
+ // cfg(bootstrap): remove if condition once the bootstrap compiler supports dylib LTO
+ if compiler.stage != 0 {
+ match builder.config.rust_lto {
+ RustcLto::Thin | RustcLto::Fat => {
+ // Since using LTO for optimizing dylibs is currently experimental,
+ // we need to pass -Zdylib-lto.
+ cargo.rustflag("-Zdylib-lto");
+ // Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when
+ // compiling dylibs (and their dependencies), even when LTO is enabled for the
+ // crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here.
+ let lto_type = match builder.config.rust_lto {
+ RustcLto::Thin => "thin",
+ RustcLto::Fat => "fat",
+ _ => unreachable!(),
+ };
+ cargo.rustflag(&format!("-Clto={}", lto_type));
+ cargo.rustflag("-Cembed-bitcode=yes");
+ }
+ RustcLto::ThinLocal => { /* Do nothing, this is the default */ }
+ }
+ }
+
builder.info(&format!(
"Building stage{} compiler artifacts ({} -> {})",
compiler.stage, &compiler.host, target
@@ -1099,10 +1126,13 @@ impl Step for Sysroot {
/// 1-3.
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
+ let host_dir = builder.out.join(&compiler.host.triple);
let sysroot = if compiler.stage == 0 {
- builder.out.join(&compiler.host.triple).join("stage0-sysroot")
+ host_dir.join("stage0-sysroot")
+ } else if builder.download_rustc() {
+ host_dir.join("ci-rustc-sysroot")
} else {
- builder.out.join(&compiler.host.triple).join(format!("stage{}", compiler.stage))
+ host_dir.join(format!("stage{}", compiler.stage))
};
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
@@ -1113,6 +1143,11 @@ impl Step for Sysroot {
builder.config.build, compiler.host,
"Cross-compiling is not yet supported with `download-rustc`",
);
+
+ // #102002, cleanup stage1 and stage0-sysroot folders when using download-rustc so people don't use old versions of the toolchain by accident.
+ let _ = fs::remove_dir_all(host_dir.join("stage1"));
+ let _ = fs::remove_dir_all(host_dir.join("stage0-sysroot"));
+
// Copy the compiler into the correct sysroot.
let ci_rustc_dir =
builder.config.out.join(&*builder.config.build.triple).join("ci-rustc");
@@ -1142,6 +1177,20 @@ impl Step for Sysroot {
);
}
}
+ // Same for the rustc-src component.
+ let sysroot_lib_rustlib_rustcsrc = sysroot.join("lib/rustlib/rustc-src");
+ t!(fs::create_dir_all(&sysroot_lib_rustlib_rustcsrc));
+ let sysroot_lib_rustlib_rustcsrc_rust = sysroot_lib_rustlib_rustcsrc.join("rust");
+ if let Err(e) =
+ symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_rustcsrc_rust)
+ {
+ eprintln!(
+ "warning: creating symbolic link `{}` to `{}` failed with {}",
+ sysroot_lib_rustlib_rustcsrc_rust.display(),
+ builder.src.display(),
+ e,
+ );
+ }
INTERNER.intern_path(sysroot)
}
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index f1a150e0f..a8c403675 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -73,6 +73,8 @@ pub struct Config {
pub color: Color,
pub patch_binaries_for_nix: bool,
pub stage0_metadata: Stage0Metadata,
+ /// Whether to use the `c` feature of the `compiler_builtins` crate.
+ pub optimized_compiler_builtins: bool,
pub on_fail: Option<String>,
pub stage: u32,
@@ -156,9 +158,12 @@ pub struct Config {
pub rust_new_symbol_mangling: Option<bool>,
pub rust_profile_use: Option<String>,
pub rust_profile_generate: Option<String>,
+ pub rust_lto: RustcLto,
pub llvm_profile_use: Option<String>,
pub llvm_profile_generate: bool,
pub llvm_libunwind_default: Option<LlvmLibunwind>,
+ pub llvm_bolt_profile_generate: bool,
+ pub llvm_bolt_profile_use: Option<String>,
pub build: TargetSelection,
pub hosts: Vec<TargetSelection>,
@@ -315,6 +320,28 @@ impl SplitDebuginfo {
}
}
+/// LTO mode used for compiling rustc itself.
+#[derive(Default, Clone)]
+pub enum RustcLto {
+ #[default]
+ ThinLocal,
+ Thin,
+ Fat,
+}
+
+impl std::str::FromStr for RustcLto {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "thin-local" => Ok(RustcLto::ThinLocal),
+ "thin" => Ok(RustcLto::Thin),
+ "fat" => Ok(RustcLto::Fat),
+ _ => Err(format!("Invalid value for rustc LTO: {}", s)),
+ }
+ }
+}
+
#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TargetSelection {
pub triple: Interned<String>,
@@ -597,6 +624,7 @@ define_config! {
bench_stage: Option<u32> = "bench-stage",
patch_binaries_for_nix: Option<bool> = "patch-binaries-for-nix",
metrics: Option<bool> = "metrics",
+ optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
}
}
@@ -721,6 +749,7 @@ define_config! {
profile_use: Option<String> = "profile-use",
// ignored; this is set from an env var set by bootstrap.py
download_rustc: Option<StringOrBool> = "download-rustc",
+ lto: Option<String> = "lto",
}
}
@@ -772,21 +801,20 @@ impl Config {
// set by build.rs
config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
+
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// Undo `src/bootstrap`
config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned();
config.out = PathBuf::from("build");
- config.initial_cargo = PathBuf::from(env!("CARGO"));
- config.initial_rustc = PathBuf::from(env!("RUSTC"));
-
config
}
pub fn parse(args: &[String]) -> Config {
let flags = Flags::parse(&args);
-
let mut config = Config::default_opts();
+
+ // Set flags.
config.exclude = flags.exclude.into_iter().map(|path| TaskPath::parse(path)).collect();
config.include_default_paths = flags.include_default_paths;
config.rustc_error_format = flags.rustc_error_format;
@@ -804,8 +832,68 @@ impl Config {
}
config.llvm_profile_use = flags.llvm_profile_use;
config.llvm_profile_generate = flags.llvm_profile_generate;
+ config.llvm_bolt_profile_generate = flags.llvm_bolt_profile_generate;
+ config.llvm_bolt_profile_use = flags.llvm_bolt_profile_use;
+
+ if config.llvm_bolt_profile_generate && config.llvm_bolt_profile_use.is_some() {
+ eprintln!(
+ "Cannot use both `llvm_bolt_profile_generate` and `llvm_bolt_profile_use` at the same time"
+ );
+ crate::detail_exit(1);
+ }
+
+ // Infer the rest of the configuration.
+
+ // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary,
+ // running on a completely machine from where it was compiled.
+ let mut cmd = Command::new("git");
+ // NOTE: we cannot support running from outside the repository because the only path we have available
+ // is set at compile time, which can be wrong if bootstrap was downloaded from source.
+ // We still support running outside the repository if we find we aren't in a git directory.
+ cmd.arg("rev-parse").arg("--show-toplevel");
+ // Discard stderr because we expect this to fail when building from a tarball.
+ let output = cmd
+ .stderr(std::process::Stdio::null())
+ .output()
+ .ok()
+ .and_then(|output| if output.status.success() { Some(output) } else { None });
+ if let Some(output) = output {
+ let git_root = String::from_utf8(output.stdout).unwrap();
+ // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes.
+ let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap();
+ let s = git_root.to_str().unwrap();
+
+ // Bootstrap is quite bad at handling /? in front of paths
+ let src = match s.strip_prefix("\\\\?\\") {
+ Some(p) => PathBuf::from(p),
+ None => PathBuf::from(git_root),
+ };
+ // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when,
+ // for example, the build directory is inside of another unrelated git directory.
+ // In that case keep the original `CARGO_MANIFEST_DIR` handling.
+ //
+ // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside
+ // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1.
+ if src.join("src").join("stage0.json").exists() {
+ config.src = src;
+ }
+ } else {
+ // We're building from a tarball, not git sources.
+ // We don't support pre-downloaded bootstrap in this case.
+ }
+
+ if cfg!(test) {
+ // Use the build directory of the original x.py invocation, so that we can set `initial_rustc` properly.
+ config.out = Path::new(
+ &env::var_os("CARGO_TARGET_DIR").expect("cargo test directly is not supported"),
+ )
+ .parent()
+ .unwrap()
+ .to_path_buf();
+ }
let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json")));
+
config.stage0_metadata = t!(serde_json::from_slice::<Stage0Metadata>(&stage0_json));
#[cfg(test)]
@@ -861,7 +949,6 @@ impl Config {
let build = toml.build.unwrap_or_default();
- set(&mut config.initial_rustc, build.rustc.map(PathBuf::from));
set(&mut config.out, flags.build_dir.or_else(|| build.build_dir.map(PathBuf::from)));
// NOTE: Bootstrap spawns various commands with different working directories.
// To avoid writing to random places on the file system, `config.out` needs to be an absolute path.
@@ -870,6 +957,16 @@ impl Config {
config.out = crate::util::absolute(&config.out);
}
+ config.initial_rustc = build
+ .rustc
+ .map(PathBuf::from)
+ .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/rustc"));
+ config.initial_cargo = build
+ .cargo
+ .map(PathBuf::from)
+ .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo"));
+
+ // NOTE: it's important this comes *after* we set `initial_rustc` just above.
if config.dry_run {
let dir = config.out.join("tmp-dry-run");
t!(fs::create_dir_all(&dir));
@@ -916,6 +1013,7 @@ impl Config {
set(&mut config.print_step_timings, build.print_step_timings);
set(&mut config.print_step_rusage, build.print_step_rusage);
set(&mut config.patch_binaries_for_nix, build.patch_binaries_for_nix);
+ set(&mut config.optimized_compiler_builtins, build.optimized_compiler_builtins);
config.verbose = cmp::max(config.verbose, flags.verbose);
@@ -1099,6 +1197,12 @@ impl Config {
config.rust_profile_use = flags.rust_profile_use.or(rust.profile_use);
config.rust_profile_generate = flags.rust_profile_generate.or(rust.profile_generate);
config.download_rustc_commit = download_ci_rustc_commit(&config, rust.download_rustc);
+
+ config.rust_lto = rust
+ .lto
+ .as_deref()
+ .map(|value| RustcLto::from_str(value).unwrap())
+ .unwrap_or_default();
} else {
config.rust_profile_use = flags.rust_profile_use;
config.rust_profile_generate = flags.rust_profile_generate;
@@ -1280,20 +1384,21 @@ impl Config {
git
}
- pub(crate) fn artifact_version_part(&self, commit: &str) -> String {
- let mut channel = self.git();
- channel.arg("show").arg(format!("{}:src/ci/channel", commit));
- let channel = output(&mut channel);
-
- let mut version = self.git();
- version.arg("show").arg(format!("{}:src/version", commit));
- let version = output(&mut version);
-
- match channel.trim() {
- "stable" => version.trim().to_owned(),
- "beta" => channel.trim().to_owned(),
- "nightly" => channel.trim().to_owned(),
- other => unreachable!("{:?} is not recognized as a valid channel", other),
+ pub(crate) fn artifact_channel(&self, builder: &Builder<'_>, commit: &str) -> String {
+ if builder.rust_info.is_managed_git_subrepository() {
+ let mut channel = self.git();
+ channel.arg("show").arg(format!("{}:src/ci/channel", commit));
+ let channel = output(&mut channel);
+ channel.trim().to_owned()
+ } else if let Ok(channel) = fs::read_to_string(builder.src.join("src/ci/channel")) {
+ channel.trim().to_owned()
+ } else {
+ let src = builder.src.display();
+ eprintln!("error: failed to determine artifact channel");
+ eprintln!(
+ "help: either use git or ensure that {src}/src/ci/channel contains the name of the channel to use"
+ );
+ panic!();
}
}
@@ -1431,7 +1536,7 @@ impl Config {
}
pub fn submodules(&self, rust_info: &GitInfo) -> bool {
- self.submodules.unwrap_or(rust_info.is_git())
+ self.submodules.unwrap_or(rust_info.is_managed_git_subrepository())
}
}
@@ -1536,7 +1641,7 @@ fn maybe_download_rustfmt(builder: &Builder<'_>) -> Option<PathBuf> {
fn download_ci_rustc(builder: &Builder<'_>, commit: &str) {
builder.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
- let version = builder.config.artifact_version_part(commit);
+ let channel = builder.config.artifact_channel(builder, commit);
let host = builder.config.build.triple;
let bin_root = builder.out.join(host).join("ci-rustc");
let rustc_stamp = bin_root.join(".rustc-stamp");
@@ -1545,13 +1650,13 @@ fn download_ci_rustc(builder: &Builder<'_>, commit: &str) {
if bin_root.exists() {
t!(fs::remove_dir_all(&bin_root));
}
- let filename = format!("rust-std-{version}-{host}.tar.xz");
+ let filename = format!("rust-std-{channel}-{host}.tar.xz");
let pattern = format!("rust-std-{host}");
download_ci_component(builder, filename, &pattern, commit);
- let filename = format!("rustc-{version}-{host}.tar.xz");
+ let filename = format!("rustc-{channel}-{host}.tar.xz");
download_ci_component(builder, filename, "rustc", commit);
// download-rustc doesn't need its own cargo, it can just use beta's.
- let filename = format!("rustc-dev-{version}-{host}.tar.xz");
+ let filename = format!("rustc-dev-{channel}-{host}.tar.xz");
download_ci_component(builder, filename, "rustc-dev", commit);
builder.fix_bin_or_dylib(&bin_root.join("bin").join("rustc"));
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 1a59b3958..12585e80e 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -16,6 +16,7 @@ use std::process::Command;
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
+use crate::channel;
use crate::compile;
use crate::config::TargetSelection;
use crate::tarball::{GeneratedTarball, OverlayKind, Tarball};
@@ -35,18 +36,6 @@ pub fn tmpdir(builder: &Builder<'_>) -> PathBuf {
builder.out.join("tmp/dist")
}
-fn missing_tool(tool_name: &str, skip: bool) {
- if skip {
- println!("Unable to build {}, skipping dist", tool_name)
- } else {
- let help = "note: not all tools are available on all nightlies\nhelp: see https://forge.rust-lang.org/infra/toolstate.html for more information";
- panic!(
- "Unable to build submodule tool {} (use `missing-tools = true` to ignore this failure)\n{}",
- tool_name, help
- )
- }
-}
-
fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool {
if !builder.config.extended {
return false;
@@ -87,6 +76,39 @@ impl Step for Docs {
}
}
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct JsonDocs {
+ pub host: TargetSelection,
+}
+
+impl Step for JsonDocs {
+ type Output = Option<GeneratedTarball>;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ let default = run.builder.config.docs;
+ run.alias("rust-docs-json").default_condition(default)
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(JsonDocs { host: run.target });
+ }
+
+ /// Builds the `rust-docs-json` installer component.
+ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
+ let host = self.host;
+ builder.ensure(crate::doc::JsonStd { stage: builder.top_stage, target: host });
+
+ let dest = "share/doc/rust/json";
+
+ let mut tarball = Tarball::new(builder, "rust-docs-json", &host.triple);
+ tarball.set_product_name("Rust Documentation In JSON Format");
+ tarball.is_preview(true);
+ tarball.add_bulk_dir(&builder.json_doc_out(host), dest);
+ Some(tarball.generate())
+ }
+}
+
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustcDocs {
pub host: TargetSelection,
@@ -897,12 +919,13 @@ impl Step for PlainSourceTarball {
// Create the version file
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
- if let Some(sha) = builder.rust_sha() {
- builder.create(&plain_dst_src.join("git-commit-hash"), &sha);
+ if let Some(info) = builder.rust_info.info() {
+ channel::write_commit_hash_file(&plain_dst_src, &info.sha);
+ channel::write_commit_info_file(&plain_dst_src, info);
}
// If we're building from git sources, we need to vendor a complete distribution.
- if builder.rust_info.is_git() {
+ if builder.rust_info.is_managed_git_subrepository() {
// Ensure we have the submodules checked out.
builder.update_submodule(Path::new("src/tools/rust-analyzer"));
@@ -1170,18 +1193,9 @@ impl Step for Miri {
let compiler = self.compiler;
let target = self.target;
- let miri = builder
- .ensure(tool::Miri { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("miri", builder.build.config.missing_tools);
- None
- })?;
- let cargomiri = builder
- .ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("cargo miri", builder.build.config.missing_tools);
- None
- })?;
+ let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() })?;
+ let cargomiri =
+ builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })?;
let mut tarball = Tarball::new(builder, "miri", &target.triple);
tarball.set_overlay(OverlayKind::Miri);
@@ -1353,6 +1367,7 @@ impl Step for Extended {
}
add_component!("rust-docs" => Docs { host: target });
+ add_component!("rust-json-docs" => JsonDocs { host: target });
add_component!("rust-demangler"=> RustDemangler { compiler, target });
add_component!("cargo" => Cargo { compiler, target });
add_component!("rustfmt" => Rustfmt { compiler, target });
@@ -1412,7 +1427,7 @@ impl Step for Extended {
let xform = |p: &Path| {
let mut contents = t!(fs::read_to_string(p));
- for tool in &["rust-demangler", "rust-analyzer", "miri", "rustfmt"] {
+ for tool in &["rust-demangler", "miri"] {
if !built_tools.contains(tool) {
contents = filter(&contents, tool);
}
@@ -1452,7 +1467,8 @@ impl Step for Extended {
prepare("rust-std");
prepare("rust-analysis");
prepare("clippy");
- for tool in &["rust-docs", "rust-demangler", "rust-analyzer", "miri"] {
+ prepare("rust-analyzer");
+ for tool in &["rust-docs", "rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1511,7 +1527,8 @@ impl Step for Extended {
prepare("rust-docs");
prepare("rust-std");
prepare("clippy");
- for tool in &["rust-demangler", "rust-analyzer", "miri"] {
+ prepare("rust-analyzer");
+ for tool in &["rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1595,25 +1612,23 @@ impl Step for Extended {
.arg("-out")
.arg(exe.join("StdGroup.wxs")),
);
- if built_tools.contains("rust-analyzer") {
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("rust-analyzer")
- .args(&heat_flags)
- .arg("-cg")
- .arg("RustAnalyzerGroup")
- .arg("-dr")
- .arg("RustAnalyzer")
- .arg("-var")
- .arg("var.RustAnalyzerDir")
- .arg("-out")
- .arg(exe.join("RustAnalyzerGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/remove-duplicates.xsl")),
- );
- }
+ builder.run(
+ Command::new(&heat)
+ .current_dir(&exe)
+ .arg("dir")
+ .arg("rust-analyzer")
+ .args(&heat_flags)
+ .arg("-cg")
+ .arg("RustAnalyzerGroup")
+ .arg("-dr")
+ .arg("RustAnalyzer")
+ .arg("-var")
+ .arg("var.RustAnalyzerDir")
+ .arg("-out")
+ .arg(exe.join("RustAnalyzerGroup.wxs"))
+ .arg("-t")
+ .arg(etc.join("msi/remove-duplicates.xsl")),
+ );
builder.run(
Command::new(&heat)
.current_dir(&exe)
@@ -1745,15 +1760,15 @@ impl Step for Extended {
candle("CargoGroup.wxs".as_ref());
candle("StdGroup.wxs".as_ref());
candle("ClippyGroup.wxs".as_ref());
+ if built_tools.contains("miri") {
+ candle("MiriGroup.wxs".as_ref());
+ }
if built_tools.contains("rust-demangler") {
candle("RustDemanglerGroup.wxs".as_ref());
}
if built_tools.contains("rust-analyzer") {
candle("RustAnalyzerGroup.wxs".as_ref());
}
- if built_tools.contains("miri") {
- candle("MiriGroup.wxs".as_ref());
- }
candle("AnalysisGroup.wxs".as_ref());
if target.ends_with("windows-gnu") {
@@ -1785,15 +1800,15 @@ impl Step for Extended {
.arg("ClippyGroup.wixobj")
.current_dir(&exe);
+ if built_tools.contains("miri") {
+ cmd.arg("MiriGroup.wixobj");
+ }
if built_tools.contains("rust-analyzer") {
cmd.arg("RustAnalyzerGroup.wixobj");
}
if built_tools.contains("rust-demangler") {
cmd.arg("RustDemanglerGroup.wixobj");
}
- if built_tools.contains("miri") {
- cmd.arg("MiriGroup.wixobj");
- }
if target.ends_with("windows-gnu") {
cmd.arg("GccGroup.wixobj");
@@ -1838,23 +1853,21 @@ fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
///
/// Returns whether the files were actually copied.
fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) -> bool {
- if let Some(config) = builder.config.target_config.get(&target) {
- if config.llvm_config.is_some() && !builder.config.llvm_from_ci {
- // If the LLVM was externally provided, then we don't currently copy
- // artifacts into the sysroot. This is not necessarily the right
- // choice (in particular, it will require the LLVM dylib to be in
- // the linker's load path at runtime), but the common use case for
- // external LLVMs is distribution provided LLVMs, and in that case
- // they're usually in the standard search path (e.g., /usr/lib) and
- // copying them here is going to cause problems as we may end up
- // with the wrong files and isn't what distributions want.
- //
- // This behavior may be revisited in the future though.
- //
- // If the LLVM is coming from ourselves (just from CI) though, we
- // still want to install it, as it otherwise won't be available.
- return false;
- }
+ if !builder.is_rust_llvm(target) {
+ // If the LLVM was externally provided, then we don't currently copy
+ // artifacts into the sysroot. This is not necessarily the right
+ // choice (in particular, it will require the LLVM dylib to be in
+ // the linker's load path at runtime), but the common use case for
+ // external LLVMs is distribution provided LLVMs, and in that case
+ // they're usually in the standard search path (e.g., /usr/lib) and
+ // copying them here is going to cause problems as we may end up
+ // with the wrong files and isn't what distributions want.
+ //
+ // This behavior may be revisited in the future though.
+ //
+ // If the LLVM is coming from ourselves (just from CI) though, we
+ // still want to install it, as it otherwise won't be available.
+ return false;
}
// On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib
@@ -1873,7 +1886,7 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
let mut cmd = Command::new(llvm_config);
cmd.arg("--libfiles");
builder.verbose(&format!("running {:?}", cmd));
- let files = output(&mut cmd);
+ let files = if builder.config.dry_run { "".into() } else { output(&mut cmd) };
let build_llvm_out = &builder.llvm_out(builder.config.build);
let target_llvm_out = &builder.llvm_out(target);
for file in files.trim_end().split(' ') {
@@ -2020,6 +2033,8 @@ impl Step for RustDev {
"llvm-dwp",
"llvm-nm",
"llvm-dwarfdump",
+ "llvm-dis",
+ "llvm-tblgen",
] {
tarball.add_file(src_bindir.join(exe(bin, target)), "bin", 0o755);
}
@@ -2050,6 +2065,41 @@ impl Step for RustDev {
}
}
+// Tarball intended for internal consumption to ease rustc/std development.
+//
+// Should not be considered stable by end users.
+#[derive(Clone, Debug, Eq, Hash, PartialEq)]
+pub struct Bootstrap {
+ pub target: TargetSelection,
+}
+
+impl Step for Bootstrap {
+ type Output = Option<GeneratedTarball>;
+ const DEFAULT: bool = false;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.alias("bootstrap")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Bootstrap { target: run.target });
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
+ let target = self.target;
+
+ let tarball = Tarball::new(builder, "bootstrap", &target.triple);
+
+ let bootstrap_outdir = &builder.bootstrap_out;
+ for file in &["bootstrap", "llvm-config-wrapper", "rustc", "rustdoc", "sccache-plus-cl"] {
+ tarball.add_file(bootstrap_outdir.join(exe(file, target)), "bootstrap/bin", 0o755);
+ }
+
+ Some(tarball.generate())
+ }
+}
+
/// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the
/// release process to avoid cloning the monorepo and building stuff.
///
@@ -2115,6 +2165,10 @@ impl Step for ReproducibleArtifacts {
tarball.add_file(path, ".", 0o644);
added_anything = true;
}
+ if let Some(path) = builder.config.llvm_bolt_profile_use.as_ref() {
+ tarball.add_file(path, ".", 0o644);
+ added_anything = true;
+ }
if added_anything { Some(tarball.generate()) } else { None }
}
}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index f909ecc0a..ea06caf9c 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -7,6 +7,7 @@
//! Everything here is basically just a shim around calling either `rustbook` or
//! `rustdoc`.
+use std::ffi::OsStr;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
@@ -81,6 +82,7 @@ book!(
Reference, "src/doc/reference", "reference", submodule;
RustByExample, "src/doc/rust-by-example", "rust-by-example", submodule;
RustdocBook, "src/doc/rustdoc", "rustdoc";
+ StyleGuide, "src/doc/style-guide", "style-guide";
);
fn open(builder: &Builder<'_>, path: impl AsRef<Path>) {
@@ -226,7 +228,7 @@ impl Step for TheBook {
}
// build the version info page and CSS
- builder.ensure(Standalone { compiler, target });
+ let shared_assets = builder.ensure(SharedAssets { target });
// build the redirect pages
builder.info(&format!("Documenting book redirect pages ({})", target));
@@ -235,7 +237,7 @@ impl Step for TheBook {
let path = file.path();
let path = path.to_str().unwrap();
- invoke_rustdoc(builder, compiler, target, path);
+ invoke_rustdoc(builder, compiler, &shared_assets, target, path);
}
if builder.was_invoked_explicitly::<Self>(Kind::Doc) {
@@ -249,6 +251,7 @@ impl Step for TheBook {
fn invoke_rustdoc(
builder: &Builder<'_>,
compiler: Compiler,
+ shared_assets: &SharedAssetsPaths,
target: TargetSelection,
markdown: &str,
) {
@@ -258,7 +261,6 @@ fn invoke_rustdoc(
let header = builder.src.join("src/doc/redirect.inc");
let footer = builder.src.join("src/doc/footer.inc");
- let version_info = out.join("version_info.html");
let mut cmd = builder.rustdoc_cmd(compiler);
@@ -267,7 +269,7 @@ fn invoke_rustdoc(
cmd.arg("--html-after-content")
.arg(&footer)
.arg("--html-before-content")
- .arg(&version_info)
+ .arg(&shared_assets.version_info)
.arg("--html-in-header")
.arg(&header)
.arg("--markdown-no-toc")
@@ -298,7 +300,7 @@ impl Step for Standalone {
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.path("src/doc").default_condition(builder.config.docs)
+ run.path("src/doc").alias("standalone").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig<'_>) {
@@ -323,21 +325,11 @@ impl Step for Standalone {
let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
+ let version_info = builder.ensure(SharedAssets { target: self.target }).version_info;
+
let favicon = builder.src.join("src/doc/favicon.inc");
let footer = builder.src.join("src/doc/footer.inc");
let full_toc = builder.src.join("src/doc/full-toc.inc");
- t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
-
- let version_input = builder.src.join("src/doc/version_info.html.template");
- let version_info = out.join("version_info.html");
-
- if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
- let info = t!(fs::read_to_string(&version_input))
- .replace("VERSION", &builder.rust_release())
- .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
- .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
- t!(fs::write(&version_info, &info));
- }
for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
let file = t!(file);
@@ -383,15 +375,9 @@ impl Step for Standalone {
}
if filename == "not_found.md" {
- cmd.arg("--markdown-css")
- .arg(format!("https://doc.rust-lang.org/rustdoc{}.css", &builder.version))
- .arg("--markdown-css")
- .arg("https://doc.rust-lang.org/rust.css");
+ cmd.arg("--markdown-css").arg("https://doc.rust-lang.org/rust.css");
} else {
- cmd.arg("--markdown-css")
- .arg(format!("rustdoc{}.css", &builder.version))
- .arg("--markdown-css")
- .arg("rust.css");
+ cmd.arg("--markdown-css").arg("rust.css");
}
builder.run(&mut cmd);
}
@@ -405,6 +391,45 @@ impl Step for Standalone {
}
}
+#[derive(Debug, Clone)]
+pub struct SharedAssetsPaths {
+ pub version_info: PathBuf,
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct SharedAssets {
+ target: TargetSelection,
+}
+
+impl Step for SharedAssets {
+ type Output = SharedAssetsPaths;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ // Other tasks depend on this, no need to execute it on its own
+ run.never()
+ }
+
+ // Generate shared resources used by other pieces of documentation.
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ let out = builder.doc_out(self.target);
+
+ let version_input = builder.src.join("src").join("doc").join("version_info.html.template");
+ let version_info = out.join("version_info.html");
+ if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
+ let info = t!(fs::read_to_string(&version_input))
+ .replace("VERSION", &builder.rust_release())
+ .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
+ .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
+ t!(fs::write(&version_info, &info));
+ }
+
+ builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
+
+ SharedAssetsPaths { version_info }
+ }
+}
+
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub stage: u32,
@@ -431,49 +456,25 @@ impl Step for Std {
fn run(self, builder: &Builder<'_>) {
let stage = self.stage;
let target = self.target;
- builder.info(&format!("Documenting stage{} std ({})", stage, target));
- if builder.no_std(target) == Some(true) {
- panic!(
- "building std documentation for no_std target {target} is not supported\n\
- Set `docs = false` in the config to disable documentation."
- );
- }
let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, builder.config.build);
-
- let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
-
- t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
-
- let run_cargo_rustdoc_for = |package: &str| {
- let mut cargo =
- builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
- compile::std_cargo(builder, target, compiler.stage, &mut cargo);
- cargo
- .arg("-p")
- .arg(package)
- .arg("-Zskip-rustdoc-fingerprint")
- .arg("--")
- .arg("--markdown-css")
- .arg("rust.css")
- .arg("--markdown-no-toc")
- .arg("-Z")
- .arg("unstable-options")
- .arg("--resource-suffix")
- .arg(&builder.version)
- .arg("--index-page")
- .arg(&builder.src.join("src/doc/index.md"));
+ builder.ensure(SharedAssets { target: self.target });
- if !builder.config.docs_minification {
- cargo.arg("--disable-minification");
- }
+ let index_page = builder.src.join("src/doc/index.md").into_os_string();
+ let mut extra_args = vec![
+ OsStr::new("--markdown-css"),
+ OsStr::new("rust.css"),
+ OsStr::new("--markdown-no-toc"),
+ OsStr::new("--index-page"),
+ &index_page,
+ ];
- builder.run(&mut cargo.into());
- };
+ if !builder.config.docs_minification {
+ extra_args.push(OsStr::new("--disable-minification"));
+ }
- let paths = builder
+ let requested_crates = builder
.paths
.iter()
.map(components_simplified)
@@ -491,30 +492,20 @@ impl Step for Std {
})
.collect::<Vec<_>>();
- // Only build the following crates. While we could just iterate over the
- // folder structure, that would also build internal crates that we do
- // not want to show in documentation. These crates will later be visited
- // by the rustc step, so internal documentation will show them.
- //
- // Note that the order here is important! The crates need to be
- // processed starting from the leaves, otherwise rustdoc will not
- // create correct links between crates because rustdoc depends on the
- // existence of the output directories to know if it should be a local
- // or remote link.
- let krates = ["core", "alloc", "std", "proc_macro", "test"];
- for krate in &krates {
- run_cargo_rustdoc_for(krate);
- if paths.iter().any(|p| p == krate) {
- // No need to document more of the libraries if we have the one we want.
- break;
- }
- }
- builder.cp_r(&out_dir, &out);
+ doc_std(
+ builder,
+ DocumentationFormat::HTML,
+ stage,
+ target,
+ &out,
+ &extra_args,
+ &requested_crates,
+ );
// Look for library/std, library/core etc in the `x.py doc` arguments and
// open the corresponding rendered docs.
- for requested_crate in paths {
- if krates.iter().any(|k| *k == requested_crate.as_str()) {
+ for requested_crate in requested_crates {
+ if STD_PUBLIC_CRATES.iter().any(|k| *k == requested_crate.as_str()) {
let index = out.join(requested_crate).join("index.html");
open(builder, &index);
}
@@ -523,6 +514,134 @@ impl Step for Std {
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct JsonStd {
+ pub stage: u32,
+ pub target: TargetSelection,
+}
+
+impl Step for JsonStd {
+ type Output = ();
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ let default = run.builder.config.docs && run.builder.config.cmd.json();
+ run.all_krates("test").path("library").default_condition(default)
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Std { stage: run.builder.top_stage, target: run.target });
+ }
+
+ /// Build JSON documentation for the standard library crates.
+ ///
+ /// This is largely just a wrapper around `cargo doc`.
+ fn run(self, builder: &Builder<'_>) {
+ let stage = self.stage;
+ let target = self.target;
+ let out = builder.json_doc_out(target);
+ t!(fs::create_dir_all(&out));
+ let extra_args = [OsStr::new("--output-format"), OsStr::new("json")];
+ doc_std(builder, DocumentationFormat::JSON, stage, target, &out, &extra_args, &[])
+ }
+}
+
+/// Name of the crates that are visible to consumers of the standard library.
+/// Documentation for internal crates is handled by the rustc step, so internal crates will show
+/// up there.
+///
+/// Order here is important!
+/// Crates need to be processed starting from the leaves, otherwise rustdoc will not
+/// create correct links between crates because rustdoc depends on the
+/// existence of the output directories to know if it should be a local
+/// or remote link.
+const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum DocumentationFormat {
+ HTML,
+ JSON,
+}
+
+impl DocumentationFormat {
+ fn as_str(&self) -> &str {
+ match self {
+ DocumentationFormat::HTML => "HTML",
+ DocumentationFormat::JSON => "JSON",
+ }
+ }
+}
+
+/// Build the documentation for public standard library crates.
+///
+/// `requested_crates` can be used to build only a subset of the crates. If empty, all crates will
+/// be built.
+fn doc_std(
+ builder: &Builder<'_>,
+ format: DocumentationFormat,
+ stage: u32,
+ target: TargetSelection,
+ out: &Path,
+ extra_args: &[&OsStr],
+ requested_crates: &[String],
+) {
+ builder.info(&format!(
+ "Documenting stage{} std ({}) in {} format",
+ stage,
+ target,
+ format.as_str()
+ ));
+ if builder.no_std(target) == Some(true) {
+ panic!(
+ "building std documentation for no_std target {target} is not supported\n\
+ Set `docs = false` in the config to disable documentation."
+ );
+ }
+ let compiler = builder.compiler(stage, builder.config.build);
+ // This is directory where the compiler will place the output of the command.
+ // We will then copy the files from this directory into the final `out` directory, the specified
+ // as a function parameter.
+ let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
+ // `cargo` uses the same directory for both JSON docs and HTML docs.
+ // This could lead to cross-contamination when copying files into the specified `out` directory.
+ // For example:
+ // ```bash
+ // x doc std
+ // x doc std --json
+ // ```
+ // could lead to HTML docs being copied into the JSON docs output directory.
+ // To avoid this issue, we clean the doc folder before invoking `cargo`.
+ if out_dir.exists() {
+ builder.remove_dir(&out_dir);
+ }
+
+ let run_cargo_rustdoc_for = |package: &str| {
+ let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
+ compile::std_cargo(builder, target, compiler.stage, &mut cargo);
+ cargo
+ .arg("-p")
+ .arg(package)
+ .arg("-Zskip-rustdoc-fingerprint")
+ .arg("--")
+ .arg("-Z")
+ .arg("unstable-options")
+ .arg("--resource-suffix")
+ .arg(&builder.version)
+ .args(extra_args);
+ builder.run(&mut cargo.into());
+ };
+
+ for krate in STD_PUBLIC_CRATES {
+ run_cargo_rustdoc_for(krate);
+ if requested_crates.iter().any(|p| p == krate) {
+ // No need to document more of the libraries if we have the one we want.
+ break;
+ }
+ }
+
+ builder.cp_r(&out_dir, &out);
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
pub stage: u32,
pub target: TargetSelection,
diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp
index 19504a51a..d19a1ae95 100644
--- a/src/bootstrap/download-ci-llvm-stamp
+++ b/src/bootstrap/download-ci-llvm-stamp
@@ -1,4 +1,4 @@
Change this file to make users of the `download-ci-llvm` configuration download
a new version of LLVM from CI, even if the LLVM submodule hasn’t changed.
-Last change is for: https://github.com/rust-lang/rust/pull/96867
+Last change is for: https://github.com/rust-lang/rust/pull/102790
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 789da7481..ee341a353 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -78,6 +78,8 @@ pub struct Flags {
//
// llvm_out/build/profiles/ is the location this writes to.
pub llvm_profile_generate: bool,
+ pub llvm_bolt_profile_generate: bool,
+ pub llvm_bolt_profile_use: Option<String>,
}
#[derive(Debug)]
@@ -107,6 +109,7 @@ pub enum Subcommand {
Doc {
paths: Vec<PathBuf>,
open: bool,
+ json: bool,
},
Test {
paths: Vec<PathBuf>,
@@ -254,6 +257,8 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
opts.optmulti("D", "", "deny certain clippy lints", "OPT");
opts.optmulti("W", "", "warn about certain clippy lints", "OPT");
opts.optmulti("F", "", "forbid certain clippy lints", "OPT");
+ opts.optflag("", "llvm-bolt-profile-generate", "generate BOLT profile for LLVM build");
+ opts.optopt("", "llvm-bolt-profile-use", "use BOLT profile for LLVM build", "PROFILE");
// We can't use getopt to parse the options until we have completed specifying which
// options are valid, but under the current implementation, some options are conditional on
@@ -325,6 +330,11 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
}
Kind::Doc => {
opts.optflag("", "open", "open the docs in a browser");
+ opts.optflag(
+ "",
+ "json",
+ "render the documentation in JSON format in addition to the usual HTML format",
+ );
}
Kind::Clean => {
opts.optflag("", "all", "clean all build artifacts");
@@ -493,6 +503,7 @@ Arguments:
./x.py doc src/doc/book
./x.py doc src/doc/nomicon
./x.py doc src/doc/book library/std
+ ./x.py doc library/std --json
./x.py doc library/std --open
If no arguments are passed then everything is documented:
@@ -581,7 +592,11 @@ Arguments:
},
},
Kind::Bench => Subcommand::Bench { paths, test_args: matches.opt_strs("test-args") },
- Kind::Doc => Subcommand::Doc { paths, open: matches.opt_present("open") },
+ Kind::Doc => Subcommand::Doc {
+ paths,
+ open: matches.opt_present("open"),
+ json: matches.opt_present("json"),
+ },
Kind::Clean => {
if !paths.is_empty() {
println!("\nclean does not take a path argument\n");
@@ -680,6 +695,8 @@ Arguments:
rust_profile_generate: matches.opt_str("rust-profile-generate"),
llvm_profile_use: matches.opt_str("llvm-profile-use"),
llvm_profile_generate: matches.opt_present("llvm-profile-generate"),
+ llvm_bolt_profile_generate: matches.opt_present("llvm-bolt-profile-generate"),
+ llvm_bolt_profile_use: matches.opt_str("llvm-bolt-profile-use"),
}
}
}
@@ -787,6 +804,13 @@ impl Subcommand {
_ => false,
}
}
+
+ pub fn json(&self) -> bool {
+ match *self {
+ Subcommand::Doc { json, .. } => json,
+ _ => false,
+ }
+ }
}
fn split(s: &[String]) -> Vec<String> {
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index d34aa15c5..7672b7c91 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -200,13 +200,10 @@ install!((self, builder, _config),
install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball);
};
Miri, alias = "miri", Self::should_build(_config), only_hosts: true, {
- if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) {
- install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
- } else {
- builder.info(
- &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target),
- );
- }
+ let tarball = builder
+ .ensure(dist::Miri { compiler: self.compiler, target: self.target })
+ .expect("missing miri");
+ install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
};
Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, {
if let Some(tarball) = builder.ensure(dist::Rustfmt {
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index cc0cf12bd..7e70e99bb 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -122,6 +122,7 @@ use crate::util::{
check_run, exe, libdir, mtime, output, run, run_suppressed, try_run, try_run_suppressed, CiEnv,
};
+mod bolt;
mod builder;
mod cache;
mod cc_detect;
@@ -198,9 +199,12 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
(None, "bootstrap", None),
(Some(Mode::Rustc), "parallel_compiler", None),
(Some(Mode::ToolRustc), "parallel_compiler", None),
+ (Some(Mode::Codegen), "parallel_compiler", None),
(Some(Mode::Std), "stdarch_intel_sde", None),
(Some(Mode::Std), "no_fp_fmt_parse", None),
(Some(Mode::Std), "no_global_oom_handling", None),
+ (Some(Mode::Std), "no_rc", None),
+ (Some(Mode::Std), "no_sync", None),
(Some(Mode::Std), "freebsd12", None),
(Some(Mode::Std), "backtrace_in_libstd", None),
/* Extra values not defined in the built-in targets yet, but used in std */
@@ -226,6 +230,8 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
// FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
(Some(Mode::Rustc), "span_locations", None),
(Some(Mode::ToolRustc), "span_locations", None),
+ // Can be passed in RUSTFLAGS to prevent direct syscalls in rustix.
+ (None, "rustix_use_libc", None),
];
/// A structure representing a Rust compiler.
@@ -395,7 +401,7 @@ impl Build {
/// line and the filesystem `config`.
///
/// By default all build output will be placed in the current directory.
- pub fn new(config: Config) -> Build {
+ pub fn new(mut config: Config) -> Build {
let src = config.src.clone();
let out = config.out.clone();
@@ -456,19 +462,22 @@ impl Build {
.expect("failed to read src/version");
let version = version.trim();
- let bootstrap_out = if std::env::var("BOOTSTRAP_PYTHON").is_ok() {
- out.join("bootstrap").join("debug")
- } else {
- let workspace_target_dir = std::env::var("CARGO_TARGET_DIR")
- .map(PathBuf::from)
- .unwrap_or_else(|_| src.join("target"));
- let bootstrap_out = workspace_target_dir.join("debug");
- if !bootstrap_out.join("rustc").exists() && !cfg!(test) {
- // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
- panic!("run `cargo build --bins` before `cargo run`")
- }
- bootstrap_out
- };
+ let bootstrap_out = std::env::current_exe()
+ .expect("could not determine path to running process")
+ .parent()
+ .unwrap()
+ .to_path_buf();
+ if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) {
+ // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
+ panic!(
+ "`rustc` not found in {}, run `cargo build --bins` before `cargo run`",
+ bootstrap_out.display()
+ )
+ }
+
+ if rust_info.is_from_tarball() && config.description.is_none() {
+ config.description = Some("built from a source tarball".to_owned());
+ }
let mut build = Build {
initial_rustc: config.initial_rustc.clone(),
@@ -540,13 +549,8 @@ impl Build {
// Make sure we update these before gathering metadata so we don't get an error about missing
// Cargo.toml files.
- let rust_submodules = [
- "src/tools/rust-installer",
- "src/tools/cargo",
- "src/tools/miri",
- "library/backtrace",
- "library/stdarch",
- ];
+ let rust_submodules =
+ ["src/tools/rust-installer", "src/tools/cargo", "library/backtrace", "library/stdarch"];
for s in rust_submodules {
build.update_submodule(Path::new(s));
}
@@ -574,7 +578,9 @@ impl Build {
// NOTE: The check for the empty directory is here because when running x.py the first time,
// the submodule won't be checked out. Check it out now so we can build it.
- if !channel::GitInfo::new(false, &absolute_path).is_git() && !dir_is_empty(&absolute_path) {
+ if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
+ && !dir_is_empty(&absolute_path)
+ {
return;
}
@@ -645,7 +651,7 @@ impl Build {
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
// Don't update the submodule unless it's already been cloned.
- if channel::GitInfo::new(false, submodule).is_git() {
+ if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() {
self.update_submodule(submodule);
}
}
@@ -671,6 +677,9 @@ impl Build {
return setup::setup(&self.config, *profile);
}
+ // Download rustfmt early so that it can be used in rust-analyzer configs.
+ let _ = &builder::Builder::new(&self).initial_rustfmt();
+
{
let builder = builder::Builder::new(&self);
if let Some(path) = builder.paths.get(0) {
@@ -825,6 +834,11 @@ impl Build {
self.out.join(&*target.triple).join("doc")
}
+ /// Output directory for all JSON-formatted documentation for a target
+ fn json_doc_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("json-doc")
+ }
+
fn test_out(&self, target: TargetSelection) -> PathBuf {
self.out.join(&*target.triple).join("test")
}
@@ -1253,7 +1267,7 @@ impl Build {
match &self.config.channel[..] {
"stable" => num.to_string(),
"beta" => {
- if self.rust_info.is_git() && !self.config.ignore_git {
+ if self.rust_info.is_managed_git_subrepository() && !self.config.ignore_git {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
@@ -1307,10 +1321,6 @@ impl Build {
self.package_vers(&self.version)
}
- fn llvm_link_tools_dynamically(&self, target: TargetSelection) -> bool {
- target.contains("linux-gnu") || target.contains("apple-darwin")
- }
-
/// Returns the `version` string associated with this compiler for Rust
/// itself.
///
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index fc3bfaf1b..2f856c276 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -16,7 +16,9 @@ use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
+use crate::bolt::{instrument_with_bolt_inplace, optimize_library_with_bolt_inplace};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
+use crate::channel;
use crate::config::TargetSelection;
use crate::util::get_clang_cl_resource_dir;
use crate::util::{self, exe, output, program_out_of_date, t, up_to_date};
@@ -115,24 +117,29 @@ pub fn prebuilt_llvm_config(
}
/// This retrieves the LLVM sha we *want* to use, according to git history.
-pub(crate) fn detect_llvm_sha(config: &crate::config::Config) -> String {
- let mut rev_list = config.git();
- rev_list.args(&[
- PathBuf::from("rev-list"),
- format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(),
- "-n1".into(),
- "--first-parent".into(),
- "HEAD".into(),
- "--".into(),
- config.src.join("src/llvm-project"),
- config.src.join("src/bootstrap/download-ci-llvm-stamp"),
- // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly`
- config.src.join("src/version"),
- ]);
- let llvm_sha = output(&mut rev_list);
- let llvm_sha = llvm_sha.trim();
-
- if llvm_sha == "" {
+pub(crate) fn detect_llvm_sha(config: &crate::config::Config, is_git: bool) -> String {
+ let llvm_sha = if is_git {
+ let mut rev_list = config.git();
+ rev_list.args(&[
+ PathBuf::from("rev-list"),
+ format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(),
+ "-n1".into(),
+ "--first-parent".into(),
+ "HEAD".into(),
+ "--".into(),
+ config.src.join("src/llvm-project"),
+ config.src.join("src/bootstrap/download-ci-llvm-stamp"),
+ // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly`
+ config.src.join("src/version"),
+ ]);
+ output(&mut rev_list).trim().to_owned()
+ } else if let Some(info) = channel::read_commit_info_file(&config.src) {
+ info.sha.trim().to_owned()
+ } else {
+ "".to_owned()
+ };
+
+ if &llvm_sha == "" {
eprintln!("error: could not find commit hash for downloading LLVM");
eprintln!("help: maybe your repository history is too shallow?");
eprintln!("help: consider disabling `download-ci-llvm`");
@@ -140,7 +147,7 @@ pub(crate) fn detect_llvm_sha(config: &crate::config::Config) -> String {
panic!();
}
- llvm_sha.to_owned()
+ llvm_sha
}
/// Returns whether the CI-found LLVM is currently usable.
@@ -194,7 +201,9 @@ pub(crate) fn is_ci_llvm_available(config: &crate::config::Config, asserts: bool
}
if crate::util::CiEnv::is_ci() {
- let llvm_sha = detect_llvm_sha(config);
+ // We assume we have access to git, so it's okay to unconditionally pass
+ // `true` here.
+ let llvm_sha = detect_llvm_sha(config, true);
let head_sha = output(config.git().arg("rev-parse").arg("HEAD"));
let head_sha = head_sha.trim();
if llvm_sha == head_sha {
@@ -215,7 +224,7 @@ pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
}
let llvm_root = config.ci_llvm_root();
let llvm_stamp = llvm_root.join(".llvm-stamp");
- let llvm_sha = detect_llvm_sha(&config);
+ let llvm_sha = detect_llvm_sha(&config, builder.rust_info.is_managed_git_subrepository());
let key = format!("{}{}", llvm_sha, config.llvm_assertions);
if program_out_of_date(&llvm_stamp, &key) && !config.dry_run {
download_ci_llvm(builder, &llvm_sha);
@@ -260,8 +269,8 @@ fn download_ci_llvm(builder: &Builder<'_>, llvm_sha: &str) {
} else {
&builder.config.stage0_metadata.config.artifacts_server
};
- let version = builder.config.artifact_version_part(llvm_sha);
- let filename = format!("rust-dev-{}-{}.tar.xz", version, builder.build.build.triple);
+ let channel = builder.config.artifact_channel(builder, llvm_sha);
+ let filename = format!("rust-dev-{}-{}.tar.xz", channel, builder.build.build.triple);
let tarball = rustc_cache.join(&filename);
if !tarball.exists() {
let help_on_error = "error: failed to download llvm from ci
@@ -395,6 +404,12 @@ impl Step for Llvm {
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
cfg.define("LLVM_PROFDATA_FILE", &path);
}
+ if builder.config.llvm_bolt_profile_generate
+ || builder.config.llvm_bolt_profile_use.is_some()
+ {
+ // Relocations are required for BOLT to work.
+ ldflags.push_all("-Wl,-q");
+ }
// Disable zstd to avoid a dependency on libzstd.so.
cfg.define("LLVM_ENABLE_ZSTD", "OFF");
@@ -423,12 +438,7 @@ impl Step for Llvm {
// which saves both memory during parallel links and overall disk space
// for the tools. We don't do this on every platform as it doesn't work
// equally well everywhere.
- //
- // If we're not linking rustc to a dynamic LLVM, though, then don't link
- // tools to it.
- let llvm_link_shared =
- builder.llvm_link_tools_dynamically(target) && builder.llvm_link_shared();
- if llvm_link_shared {
+ if builder.llvm_link_shared() {
cfg.define("LLVM_LINK_LLVM_DYLIB", "ON");
}
@@ -494,18 +504,18 @@ impl Step for Llvm {
// https://llvm.org/docs/HowToCrossCompileLLVM.html
if target != builder.config.build {
- builder.ensure(Llvm { target: builder.config.build });
- // FIXME: if the llvm root for the build triple is overridden then we
- // should use llvm-tblgen from there, also should verify that it
- // actually exists most of the time in normal installs of LLVM.
- let host_bin = builder.llvm_out(builder.config.build).join("bin");
- cfg.define("LLVM_TABLEGEN", host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION));
- // LLVM_NM is required for cross compiling using MSVC
- cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION));
- cfg.define(
- "LLVM_CONFIG_PATH",
- host_bin.join("llvm-config").with_extension(EXE_EXTENSION),
- );
+ let llvm_config = builder.ensure(Llvm { target: builder.config.build });
+ if !builder.config.dry_run {
+ let llvm_bindir = output(Command::new(&llvm_config).arg("--bindir"));
+ let host_bin = Path::new(llvm_bindir.trim());
+ cfg.define(
+ "LLVM_TABLEGEN",
+ host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION),
+ );
+ // LLVM_NM is required for cross compiling using MSVC
+ cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION));
+ }
+ cfg.define("LLVM_CONFIG_PATH", llvm_config);
if builder.config.llvm_clang {
let build_bin = builder.llvm_out(builder.config.build).join("build").join("bin");
let clang_tblgen = build_bin.join("clang-tblgen").with_extension(EXE_EXTENSION);
@@ -553,7 +563,7 @@ impl Step for Llvm {
// libLLVM.dylib will be built. However, llvm-config will still look
// for a versioned path like libLLVM-14.dylib. Manually create a symbolic
// link to make llvm-config happy.
- if llvm_link_shared && target.contains("apple-darwin") {
+ if builder.llvm_link_shared() && target.contains("apple-darwin") {
let mut cmd = Command::new(&build_llvm_config);
let version = output(cmd.arg("--version"));
let major = version.split('.').next().unwrap();
@@ -568,12 +578,34 @@ impl Step for Llvm {
}
}
+ // After LLVM is built, we modify (instrument or optimize) the libLLVM.so library file
+ // in place. This is fine, because currently we do not support incrementally rebuilding
+ // LLVM after a configuration change, so to rebuild it the build files have to be removed,
+ // which will also remove these modified files.
+ if builder.config.llvm_bolt_profile_generate {
+ instrument_with_bolt_inplace(&get_built_llvm_lib_path(&build_llvm_config));
+ }
+ if let Some(path) = &builder.config.llvm_bolt_profile_use {
+ optimize_library_with_bolt_inplace(
+ &get_built_llvm_lib_path(&build_llvm_config),
+ &Path::new(path),
+ );
+ }
+
t!(stamp.write());
build_llvm_config
}
}
+/// Returns path to a built LLVM library (libLLVM.so).
+/// Assumes that we have built LLVM into a single library file.
+fn get_built_llvm_lib_path(llvm_config_path: &Path) -> PathBuf {
+ let mut cmd = Command::new(llvm_config_path);
+ cmd.arg("--libfiles");
+ PathBuf::from(output(&mut cmd).trim())
+}
+
fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
if !builder.config.llvm_version_check {
return;
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index cae41286f..e90551725 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -74,7 +74,7 @@ pub fn check(build: &mut Build) {
let mut cmd_finder = Finder::new();
// If we've got a git directory we're gonna need git to update
// submodules and learn about various other aspects.
- if build.rust_info.is_git() {
+ if build.rust_info.is_managed_git_subrepository() {
cmd_finder.must_have("git");
}
diff --git a/src/bootstrap/tarball.rs b/src/bootstrap/tarball.rs
index e30067a5c..d999b6c15 100644
--- a/src/bootstrap/tarball.rs
+++ b/src/bootstrap/tarball.rs
@@ -4,6 +4,7 @@ use std::{
};
use crate::builder::Builder;
+use crate::channel;
use crate::util::t;
#[derive(Copy, Clone)]
@@ -297,8 +298,9 @@ impl<'a> Tarball<'a> {
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball {
t!(std::fs::create_dir_all(&self.overlay_dir));
self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder));
- if let Some(sha) = self.builder.rust_sha() {
- self.builder.create(&self.overlay_dir.join("git-commit-hash"), &sha);
+ if let Some(info) = self.builder.rust_info.info() {
+ channel::write_commit_hash_file(&self.overlay_dir, &info.sha);
+ channel::write_commit_info_file(&self.overlay_dir, info);
}
for file in self.overlay.legal_and_readme() {
self.builder.install(&self.builder.src.join(file), &self.overlay_dir, 0o644);
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index dd41f8453..791c35c36 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -23,7 +23,7 @@ use crate::toolstate::ToolState;
use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t};
use crate::{envify, CLang, DocTests, GitRepo, Mode};
-const ADB_TEST_DIR: &str = "/data/tmp/work";
+const ADB_TEST_DIR: &str = "/data/local/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
@@ -461,139 +461,158 @@ impl Step for RustDemangler {
pub struct Miri {
stage: u32,
host: TargetSelection,
+ target: TargetSelection,
}
impl Step for Miri {
type Output = ();
- const ONLY_HOSTS: bool = true;
+ const ONLY_HOSTS: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/miri")
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Miri { stage: run.builder.top_stage, host: run.target });
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.build_triple(),
+ target: run.target,
+ });
}
/// Runs `cargo test` for miri.
fn run(self, builder: &Builder<'_>) {
let stage = self.stage;
let host = self.host;
+ let target = self.target;
let compiler = builder.compiler(stage, host);
// We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri.
// Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage.
let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host);
- let miri =
- builder.ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() });
- let cargo_miri = builder.ensure(tool::CargoMiri {
- compiler,
- target: self.host,
- extra_features: Vec::new(),
- });
+ let miri = builder
+ .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
+ let _cargo_miri = builder
+ .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
// The stdlib we need might be at a different stage. And just asking for the
// sysroot does not seem to populate it, so we do that first.
builder.ensure(compile::Std::new(compiler_std, host));
let sysroot = builder.sysroot(compiler_std);
- if let (Some(miri), Some(_cargo_miri)) = (miri, cargo_miri) {
- let mut cargo =
- builder.cargo(compiler, Mode::ToolRustc, SourceType::Submodule, host, "install");
- cargo.arg("xargo");
- // Configure `cargo install` path. cargo adds a `bin/`.
- cargo.env("CARGO_INSTALL_ROOT", &builder.out);
-
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
- // # Run `cargo miri setup`.
- let mut cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolRustc,
- host,
- "run",
- "src/tools/miri/cargo-miri",
- SourceType::Submodule,
- &[],
- );
- cargo.add_rustc_lib_path(builder, compiler);
- cargo.arg("--").arg("miri").arg("setup");
-
- // Tell `cargo miri setup` where to find the sources.
- cargo.env("XARGO_RUST_SRC", builder.src.join("library"));
- // Tell it where to find Miri.
- cargo.env("MIRI", &miri);
- // Debug things.
- cargo.env("RUST_BACKTRACE", "1");
- // Let cargo-miri know where xargo ended up.
- cargo.env("XARGO_CHECK", builder.out.join("bin").join("xargo-check"));
-
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
+ // # Run `cargo miri setup` for the given target.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "run",
+ "src/tools/miri/cargo-miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
+ cargo.arg("--").arg("miri").arg("setup");
+ cargo.arg("--target").arg(target.rustc_target_arg());
+
+ // Tell `cargo miri setup` where to find the sources.
+ cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
+ // Tell it where to find Miri.
+ cargo.env("MIRI", &miri);
+ // Debug things.
+ cargo.env("RUST_BACKTRACE", "1");
+
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
+
+ // # Determine where Miri put its sysroot.
+ // To this end, we run `cargo miri setup --print-sysroot` and capture the output.
+ // (We do this separately from the above so that when the setup actually
+ // happens we get some output.)
+ // We re-use the `cargo` from above.
+ cargo.arg("--print-sysroot");
+
+ // FIXME: Is there a way in which we can re-use the usual `run` helpers?
+ let miri_sysroot = if builder.config.dry_run {
+ String::new()
+ } else {
+ builder.verbose(&format!("running: {:?}", cargo));
+ let out =
+ cargo.output().expect("We already ran `cargo miri setup` before and that worked");
+ assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
+ // Output is "<sysroot>\n".
+ let stdout = String::from_utf8(out.stdout)
+ .expect("`cargo miri setup` stdout is not valid UTF-8");
+ let sysroot = stdout.trim_end();
+ builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
+ sysroot.to_owned()
+ };
- // # Determine where Miri put its sysroot.
- // To this end, we run `cargo miri setup --print-sysroot` and capture the output.
- // (We do this separately from the above so that when the setup actually
- // happens we get some output.)
- // We re-use the `cargo` from above.
- cargo.arg("--print-sysroot");
-
- // FIXME: Is there a way in which we can re-use the usual `run` helpers?
- let miri_sysroot = if builder.config.dry_run {
- String::new()
- } else {
- builder.verbose(&format!("running: {:?}", cargo));
- let out = cargo
- .output()
- .expect("We already ran `cargo miri setup` before and that worked");
- assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
- // Output is "<sysroot>\n".
- let stdout = String::from_utf8(out.stdout)
- .expect("`cargo miri setup` stdout is not valid UTF-8");
- let sysroot = stdout.trim_end();
- builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
- sysroot.to_owned()
- };
-
- // # Run `cargo test`.
- let mut cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolRustc,
- host,
- "test",
- "src/tools/miri",
- SourceType::Submodule,
- &[],
- );
- cargo.add_rustc_lib_path(builder, compiler);
-
- // miri tests need to know about the stage sysroot
- cargo.env("MIRI_SYSROOT", miri_sysroot);
- cargo.env("MIRI_HOST_SYSROOT", sysroot);
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- cargo.env("MIRI", miri);
- // propagate --bless
- if builder.config.cmd.bless() {
- cargo.env("MIRI_BLESS", "Gesundheit");
- }
+ // # Run `cargo test`.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "test",
+ "src/tools/miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
- cargo.arg("--").args(builder.config.cmd.test_args());
+ // miri tests need to know about the stage sysroot
+ cargo.env("MIRI_SYSROOT", &miri_sysroot);
+ cargo.env("MIRI_HOST_SYSROOT", sysroot);
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI", &miri);
+ // propagate --bless
+ if builder.config.cmd.bless() {
+ cargo.env("MIRI_BLESS", "Gesundheit");
+ }
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
+ // Set the target.
+ cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg());
+ // Forward test filters.
+ cargo.arg("--").args(builder.config.cmd.test_args());
- // # Done!
- builder.save_toolstate("miri", ToolState::TestPass);
- } else {
- eprintln!("failed to test miri: could not build");
- }
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
+
+ // # Run `cargo miri test`.
+ // This is just a smoke test (Miri's own CI invokes this in a bunch of different ways and ensures
+ // that we get the desired output), but that is sufficient to make sure that the libtest harness
+ // itself executes properly under Miri.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "run",
+ "src/tools/miri/cargo-miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
+ cargo.arg("--").arg("miri").arg("test");
+ cargo
+ .arg("--manifest-path")
+ .arg(builder.src.join("src/tools/miri/test-cargo-miri/Cargo.toml"));
+ cargo.arg("--target").arg(target.rustc_target_arg());
+ cargo.arg("--tests"); // don't run doctests, they are too confused by the staging
+ cargo.arg("--").args(builder.config.cmd.test_args());
+
+ // Tell `cargo miri` where to find things.
+ cargo.env("MIRI_SYSROOT", &miri_sysroot);
+ cargo.env("MIRI_HOST_SYSROOT", sysroot);
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI", &miri);
+ // Debug things.
+ cargo.env("RUST_BACKTRACE", "1");
+
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
}
}
@@ -1400,7 +1419,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
}
let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] };
flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests));
- flags.push(builder.config.cmd.rustc_args().join(" "));
+ flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string()));
if let Some(linker) = builder.linker(target) {
cmd.arg("--linker").arg(linker);
@@ -1409,12 +1428,16 @@ note: if you're sure you want to do this, please open an issue as to why. In the
let mut hostflags = flags.clone();
hostflags.push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display()));
hostflags.extend(builder.lld_flags(compiler.host));
- cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+ for flag in hostflags {
+ cmd.arg("--host-rustcflags").arg(flag);
+ }
let mut targetflags = flags;
targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display()));
targetflags.extend(builder.lld_flags(target));
- cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+ for flag in targetflags {
+ cmd.arg("--target-rustcflags").arg(flag);
+ }
cmd.arg("--python").arg(builder.python());
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index 7d4ed24b6..eec74b267 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -698,7 +698,7 @@ pub struct RustAnalyzer {
impl Step for RustAnalyzer {
type Output = Option<PathBuf>;
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = false;
+ const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
@@ -742,18 +742,22 @@ pub struct RustAnalyzerProcMacroSrv {
impl Step for RustAnalyzerProcMacroSrv {
type Output = Option<PathBuf>;
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = false;
+ const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.path("src/tools/rust-analyzer").default_condition(
- builder.config.extended
- && builder
- .config
- .tools
- .as_ref()
- .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")),
- )
+
+ // Allow building `rust-analyzer-proc-macro-srv` both as part of the `rust-analyzer` and as a stand-alone tool.
+ run.path("src/tools/rust-analyzer")
+ .path("src/tools/rust-analyzer/crates/proc-macro-srv-cli")
+ .default_condition(
+ builder.config.extended
+ && builder.config.tools.as_ref().map_or(true, |tools| {
+ tools.iter().any(|tool| {
+ tool == "rust-analyzer" || tool == "rust-analyzer-proc-macro-srv"
+ })
+ }),
+ )
}
fn make_run(run: RunConfig<'_>) {
@@ -764,7 +768,7 @@ impl Step for RustAnalyzerProcMacroSrv {
}
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
- builder.ensure(ToolBuild {
+ let path = builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: "rust-analyzer-proc-macro-srv",
@@ -773,7 +777,15 @@ impl Step for RustAnalyzerProcMacroSrv {
extra_features: vec!["proc-macro-srv/sysroot-abi".to_owned()],
is_optional_tool: false,
source_type: SourceType::InTree,
- })
+ })?;
+
+ // Copy `rust-analyzer-proc-macro-srv` to `<sysroot>/libexec/`
+ // so that r-a can use it.
+ let libexec_path = builder.sysroot(self.compiler).join("libexec");
+ t!(fs::create_dir_all(&libexec_path));
+ builder.copy(&path, &libexec_path.join("rust-analyzer-proc-macro-srv"));
+
+ Some(path)
}
}
@@ -856,12 +868,12 @@ tool_extended!((self, builder),
Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true, in_tree=true, {};
CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true, in_tree=true, {};
Clippy, "src/tools/clippy", "clippy-driver", stable=true, in_tree=true, {};
- Miri, "src/tools/miri", "miri", stable=false, {};
- CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, {};
- Rls, "src/tools/rls", "rls", stable=true, {};
+ Miri, "src/tools/miri", "miri", stable=false, in_tree=true, {};
+ CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, in_tree=true, {};
// FIXME: tool_std is not quite right, we shouldn't allow nightly features.
// But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0,
// and this is close enough for now.
+ Rls, "src/tools/rls", "rls", stable=true, in_tree=true, tool_std=true, {};
RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, in_tree=true, tool_std=true, {};
Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, in_tree=true, {};
);
diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs
index f3a6759ab..1a1774432 100644
--- a/src/bootstrap/toolstate.rs
+++ b/src/bootstrap/toolstate.rs
@@ -77,7 +77,6 @@ static STABLE_TOOLS: &[(&str, &str)] = &[
// though, as otherwise we will be unable to file an issue if they start
// failing.
static NIGHTLY_TOOLS: &[(&str, &str)] = &[
- ("miri", "src/tools/miri"),
("embedded-book", "src/doc/embedded-book"),
// ("rustc-dev-guide", "src/doc/rustc-dev-guide"),
];