summaryrefslogtreecommitdiffstats
path: root/src/bootstrap
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:28 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:28 +0000
commit94a0819fe3a0d679c3042a77bfe6a2afc505daea (patch)
tree2b827afe6a05f3538db3f7803a88c4587fe85648 /src/bootstrap
parentAdding upstream version 1.64.0+dfsg1. (diff)
downloadrustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.tar.xz
rustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.zip
Adding upstream version 1.66.0+dfsg1.upstream/1.66.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/bootstrap')
-rw-r--r--src/bootstrap/CHANGELOG.md2
-rw-r--r--src/bootstrap/Cargo.lock9
-rw-r--r--src/bootstrap/Cargo.toml2
-rw-r--r--src/bootstrap/README.md64
-rw-r--r--src/bootstrap/bin/rustc.rs23
-rw-r--r--src/bootstrap/bin/rustdoc.rs11
-rw-r--r--src/bootstrap/bolt.rs71
-rw-r--r--src/bootstrap/bootstrap.py20
-rw-r--r--src/bootstrap/build.rs36
-rw-r--r--src/bootstrap/builder.rs68
-rw-r--r--src/bootstrap/builder/tests.rs6
-rw-r--r--src/bootstrap/channel.rs68
-rw-r--r--src/bootstrap/check.rs4
-rw-r--r--src/bootstrap/compile.rs78
-rw-r--r--src/bootstrap/config.rs189
-rw-r--r--src/bootstrap/dist.rs251
-rw-r--r--src/bootstrap/doc.rs287
-rw-r--r--src/bootstrap/download-ci-llvm-stamp2
-rw-r--r--src/bootstrap/flags.rs41
-rw-r--r--src/bootstrap/install.rs20
-rw-r--r--src/bootstrap/lib.rs99
-rw-r--r--src/bootstrap/mk/Makefile.in11
-rw-r--r--src/bootstrap/native.rs201
-rw-r--r--src/bootstrap/run.rs22
-rw-r--r--src/bootstrap/sanity.rs2
-rw-r--r--src/bootstrap/setup.rs2
-rw-r--r--src/bootstrap/tarball.rs14
-rw-r--r--src/bootstrap/test.rs346
-rw-r--r--src/bootstrap/tool.rs64
-rw-r--r--src/bootstrap/toolstate.rs2
-rw-r--r--src/bootstrap/util.rs14
31 files changed, 1309 insertions, 720 deletions
diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md
index 85afc1f5f..64b74ecc9 100644
--- a/src/bootstrap/CHANGELOG.md
+++ b/src/bootstrap/CHANGELOG.md
@@ -13,6 +13,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Change the names for `dist` commands to match the component they generate. [#90684](https://github.com/rust-lang/rust/pull/90684)
- The `build.fast-submodules` option has been removed. Fast submodule checkouts are enabled unconditionally. Automatic submodule handling can still be disabled with `build.submodules = false`.
- Several unsupported `./configure` options have been removed: `optimize`, `parallel-compiler`. These can still be enabled with `--set`, although it isn't recommended.
+- `remote-test-server`'s `verbose` argument has been removed in favor of the `--verbose` flag
+- `remote-test-server`'s `remote` argument has been removed in favor of the `--bind` flag. Use `--bind 0.0.0.0:12345` to replicate the behavior of the `remote` argument.
### Non-breaking changes
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index 664ffa1dd..baecca44c 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -53,7 +53,6 @@ dependencies = [
"hex",
"ignore",
"libc",
- "num_cpus",
"once_cell",
"opener",
"pretty_assertions",
@@ -446,9 +445,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.39"
+version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
+checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [
"unicode-ident",
]
@@ -597,9 +596,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.95"
+version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
+checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [
"proc-macro2",
"quote",
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 84f6aaf99..95e711737 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -38,7 +38,6 @@ test = false
cmake = "0.1.38"
fd-lock = "3.0.6"
filetime = "0.2"
-num_cpus = "1.0"
getopts = "0.2.19"
cc = "1.0.69"
libc = "0.2"
@@ -68,6 +67,7 @@ features = [
"psapi",
"impl-default",
"timezoneapi",
+ "winbase",
]
[dev-dependencies]
diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md
index a2e596bf4..985727bdd 100644
--- a/src/bootstrap/README.md
+++ b/src/bootstrap/README.md
@@ -1,7 +1,7 @@
# rustbuild - Bootstrapping Rust
This is an in-progress README which is targeted at helping to explain how Rust
-is bootstrapped and in general some of the technical details of the build
+is bootstrapped and in general, some of the technical details of the build
system.
## Using rustbuild
@@ -12,7 +12,7 @@ The rustbuild build system has a primary entry point, a top level `x.py` script:
$ python ./x.py build
```
-Note that if you're on Unix you should be able to execute the script directly:
+Note that if you're on Unix, you should be able to execute the script directly:
```sh
$ ./x.py build
@@ -20,8 +20,8 @@ $ ./x.py build
The script accepts commands, flags, and arguments to determine what to do:
-* `build` - a general purpose command for compiling code. Alone `build` will
- bootstrap the entire compiler, and otherwise arguments passed indicate what to
+* `build` - a general purpose command for compiling code. Alone, `build` will
+ bootstrap the entire compiler, and otherwise, arguments passed indicate what to
build. For example:
```
@@ -38,7 +38,7 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py build --stage 0 library/test
```
- If files are dirty that would normally be rebuilt from stage 0, that can be
+ If files that would normally be rebuilt from stage 0 are dirty, the rebuild can be
overridden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
that belong to stage n or earlier:
@@ -47,8 +47,8 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py build --keep-stage 0
```
-* `test` - a command for executing unit tests. Like the `build` command this
- will execute the entire test suite by default, and otherwise it can be used to
+* `test` - a command for executing unit tests. Like the `build` command, this
+ will execute the entire test suite by default, and otherwise, it can be used to
select which test suite is run:
```
@@ -75,7 +75,7 @@ The script accepts commands, flags, and arguments to determine what to do:
./x.py test src/doc
```
-* `doc` - a command for building documentation. Like above can take arguments
+* `doc` - a command for building documentation. Like above, can take arguments
for what to document.
## Configuring rustbuild
@@ -110,12 +110,12 @@ compiler. What actually happens when you invoke rustbuild is:
compiles the build system itself (this folder). Finally, it then invokes the
actual `bootstrap` binary build system.
2. In Rust, `bootstrap` will slurp up all configuration, perform a number of
- sanity checks (compilers exist for example), and then start building the
+ sanity checks (whether compilers exist, for example), and then start building the
stage0 artifacts.
-3. The stage0 `cargo` downloaded earlier is used to build the standard library
+3. The stage0 `cargo`, downloaded earlier, is used to build the standard library
and the compiler, and then these binaries are then copied to the `stage1`
directory. That compiler is then used to generate the stage1 artifacts which
- are then copied to the stage2 directory, and then finally the stage2
+ are then copied to the stage2 directory, and then finally, the stage2
artifacts are generated using that compiler.
The goal of each stage is to (a) leverage Cargo as much as possible and failing
@@ -149,7 +149,7 @@ like this:
build/
# Location where the stage0 compiler downloads are all cached. This directory
- # only contains the tarballs themselves as they're extracted elsewhere.
+ # only contains the tarballs themselves, as they're extracted elsewhere.
cache/
2015-12-19/
2016-01-15/
@@ -172,10 +172,10 @@ build/
# hand.
x86_64-unknown-linux-gnu/
- # The build artifacts for the `compiler-rt` library for the target this
- # folder is under. The exact layout here will likely depend on the platform,
- # and this is also built with CMake so the build system is also likely
- # different.
+ # The build artifacts for the `compiler-rt` library for the target that
+ # this folder is under. The exact layout here will likely depend on the
+ # platform, and this is also built with CMake, so the build system is
+ # also likely different.
compiler-rt/
build/
@@ -183,11 +183,11 @@ build/
llvm/
# build folder (e.g. the platform-specific build system). Like with
- # compiler-rt this is compiled with CMake
+ # compiler-rt, this is compiled with CMake
build/
# Installation of LLVM. Note that we run the equivalent of 'make install'
- # for LLVM to setup these folders.
+ # for LLVM, to setup these folders.
bin/
lib/
include/
@@ -206,18 +206,18 @@ build/
# Location where the stage0 Cargo and Rust compiler are unpacked. This
# directory is purely an extracted and overlaid tarball of these two (done
- # by the bootstrapy python script). In theory the build system does not
+ # by the bootstrap python script). In theory, the build system does not
# modify anything under this directory afterwards.
stage0/
- # These to build directories are the cargo output directories for builds of
- # the standard library and compiler, respectively. Internally these may also
+ # These to-build directories are the cargo output directories for builds of
+ # the standard library and compiler, respectively. Internally, these may also
# have other target directories, which represent artifacts being compiled
# from the host to the specified target.
#
# Essentially, each of these directories is filled in by one `cargo`
# invocation. The build system instruments calling Cargo in the right order
- # with the right variables to ensure these are filled in correctly.
+ # with the right variables to ensure that these are filled in correctly.
stageN-std/
stageN-test/
stageN-rustc/
@@ -232,8 +232,8 @@ build/
# being compiled (e.g. after libstd has been built), *this* is used as the
# sysroot for the stage0 compiler being run.
#
- # Basically this directory is just a temporary artifact use to configure the
- # stage0 compiler to ensure that the libstd we just built is used to
+ # Basically, this directory is just a temporary artifact used to configure the
+ # stage0 compiler to ensure that the libstd that we just built is used to
# compile the stage1 compiler.
stage0-sysroot/lib/
@@ -242,7 +242,7 @@ build/
# system will link (using hard links) output from stageN-{std,rustc} into
# each of these directories.
#
- # In theory there is no extra build output in these directories.
+ # In theory, there is no extra build output in these directories.
stage1/
stage2/
stage3/
@@ -265,14 +265,14 @@ structure here serves two goals:
depend on `std`, so libstd is a separate project compiled ahead of time
before the actual compiler builds.
2. Splitting "host artifacts" from "target artifacts". That is, when building
- code for an arbitrary target you don't need the entire compiler, but you'll
+ code for an arbitrary target, you don't need the entire compiler, but you'll
end up needing libraries like libtest that depend on std but also want to use
crates.io dependencies. Hence, libtest is split out as its own project that
is sequenced after `std` but before `rustc`. This project is built for all
targets.
There is some loss in build parallelism here because libtest can be compiled in
-parallel with a number of rustc artifacts, but in theory the loss isn't too bad!
+parallel with a number of rustc artifacts, but in theory, the loss isn't too bad!
## Build tools
@@ -285,13 +285,13 @@ appropriate libstd/libtest/librustc compile above.
## Extending rustbuild
-So you'd like to add a feature to the rustbuild build system or just fix a bug.
+So, you'd like to add a feature to the rustbuild build system or just fix a bug.
Great! One of the major motivational factors for moving away from `make` is that
Rust is in theory much easier to read, modify, and write. If you find anything
-excessively confusing, please open an issue on this and we'll try to get it
-documented or simplified pronto.
+excessively confusing, please open an issue on this, and we'll try to get it
+documented or simplified, pronto.
-First up, you'll probably want to read over the documentation above as that'll
+First up, you'll probably want to read over the documentation above, as that'll
give you a high level overview of what rustbuild is doing. You also probably
want to play around a bit yourself by just getting it up and running before you
dive too much into the actual build system itself.
@@ -326,7 +326,7 @@ A 'major change' includes
Changes that do not affect contributors to the compiler or users
building rustc from source don't need an update to `VERSION`.
-If you have any questions feel free to reach out on the `#t-infra` channel in
+If you have any questions, feel free to reach out on the `#t-infra` channel in
the [Rust Zulip server][rust-zulip] or ask on internals.rust-lang.org. When
you encounter bugs, please file issues on the rust-lang/rust issue tracker.
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index 40a3cc6d1..776d73b98 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -25,10 +25,11 @@ use std::time::Instant;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
+ let arg = |name| args.windows(2).find(|args| args[0] == name).and_then(|args| args[1].to_str());
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
- let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
+ let target = arg("--target");
let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
@@ -59,15 +60,14 @@ fn main() {
cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
- let crate_name =
- args.windows(2).find(|args| args[0] == "--crate-name").and_then(|args| args[1].to_str());
+ let crate_name = arg("--crate-name");
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
- cmd.arg("-Ztime");
+ cmd.arg("-Ztime-passes");
}
}
}
@@ -106,6 +106,15 @@ fn main() {
{
cmd.arg("-C").arg("panic=abort");
}
+
+ // `-Ztls-model=initial-exec` must not be applied to proc-macros, see
+ // issue https://github.com/rust-lang/rust/issues/100530
+ if env::var("RUSTC_TLS_MODEL_INITIAL_EXEC").is_ok()
+ && arg("--crate-type") != Some("proc-macro")
+ && !matches!(crate_name, Some("proc_macro2" | "quote" | "syn" | "synstructure"))
+ {
+ cmd.arg("-Ztls-model=initial-exec");
+ }
} else {
// FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars
// here, but rather Cargo should know what flags to pass rustc itself.
@@ -130,10 +139,8 @@ fn main() {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`.
- // We also declare that the flag is expected, which is mainly needed for
- // later stages so that they don't warn about #[cfg(bootstrap)],
- // but enabling it for stage 0 too lets any warnings, if they occur,
- // occur more early on, e.g. about #[cfg(bootstrap = "foo")].
+ // We also declare that the flag is expected, which we need to do to not
+ // get warnings about it being unexpected.
if stage == "0" {
cmd.arg("--cfg=bootstrap");
}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
index 87c1d22e7..e69cab956 100644
--- a/src/bootstrap/bin/rustdoc.rs
+++ b/src/bootstrap/bin/rustdoc.rs
@@ -11,6 +11,7 @@ include!("../dylib_util.rs");
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
+ let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
@@ -62,6 +63,16 @@ fn main() {
cmd.arg("-Clink-arg=-Wl,--threads=1");
}
}
+ // Cargo doesn't pass RUSTDOCFLAGS to proc_macros:
+ // https://github.com/rust-lang/cargo/issues/4423
+ // Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`.
+ // We also declare that the flag is expected, which we need to do to not
+ // get warnings about it being unexpected.
+ if stage == "0" {
+ cmd.arg("--cfg=bootstrap");
+ }
+ cmd.arg("-Zunstable-options");
+ cmd.arg("--check-cfg=values(bootstrap)");
if verbose > 1 {
eprintln!(
diff --git a/src/bootstrap/bolt.rs b/src/bootstrap/bolt.rs
new file mode 100644
index 000000000..ea37cd470
--- /dev/null
+++ b/src/bootstrap/bolt.rs
@@ -0,0 +1,71 @@
+use std::path::Path;
+use std::process::Command;
+
+/// Uses the `llvm-bolt` binary to instrument the binary/library at the given `path` with BOLT.
+/// When the instrumented artifact is executed, it will generate BOLT profiles into
+/// `/tmp/prof.fdata.<pid>.fdata`.
+pub fn instrument_with_bolt_inplace(path: &Path) {
+ let dir = std::env::temp_dir();
+ let instrumented_path = dir.join("instrumented.so");
+
+ let status = Command::new("llvm-bolt")
+ .arg("-instrument")
+ .arg(&path)
+ // Make sure that each process will write its profiles into a separate file
+ .arg("--instrumentation-file-append-pid")
+ .arg("-o")
+ .arg(&instrumented_path)
+ .status()
+ .expect("Could not instrument artifact using BOLT");
+
+ if !status.success() {
+ panic!("Could not instrument {} with BOLT, exit code {:?}", path.display(), status.code());
+ }
+
+ std::fs::copy(&instrumented_path, path).expect("Cannot copy instrumented artifact");
+ std::fs::remove_file(instrumented_path).expect("Cannot delete instrumented artifact");
+}
+
+/// Uses the `llvm-bolt` binary to optimize the binary/library at the given `path` with BOLT,
+/// using merged profiles from `profile_path`.
+///
+/// The recorded profiles have to be merged using the `merge-fdata` tool from LLVM and the merged
+/// profile path should be then passed to this function.
+pub fn optimize_library_with_bolt_inplace(path: &Path, profile_path: &Path) {
+ let dir = std::env::temp_dir();
+ let optimized_path = dir.join("optimized.so");
+
+ let status = Command::new("llvm-bolt")
+ .arg(&path)
+ .arg("-data")
+ .arg(&profile_path)
+ .arg("-o")
+ .arg(&optimized_path)
+ // Reorder basic blocks within functions
+ .arg("-reorder-blocks=ext-tsp")
+ // Reorder functions within the binary
+ .arg("-reorder-functions=hfsort+")
+ // Split function code into hot and code regions
+ .arg("-split-functions=2")
+ // Split as many basic blocks as possible
+ .arg("-split-all-cold")
+ // Move jump tables to a separate section
+ .arg("-jump-tables=move")
+ // Use GNU_STACK program header for new segment (workaround for issues with strip/objcopy)
+ .arg("-use-gnu-stack")
+ // Fold functions with identical code
+ .arg("-icf=1")
+ // Update DWARF debug info in the final binary
+ .arg("-update-debug-sections")
+ // Print optimization statistics
+ .arg("-dyno-stats")
+ .status()
+ .expect("Could not optimize artifact using BOLT");
+
+ if !status.success() {
+ panic!("Could not optimize {} with BOLT, exit code {:?}", path.display(), status.code());
+ }
+
+ std::fs::copy(&optimized_path, path).expect("Cannot copy optimized artifact");
+ std::fs::remove_file(optimized_path).expect("Cannot delete optimized artifact");
+}
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 03eec02a8..57128685d 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -85,7 +85,7 @@ def _download(path, url, probably_big, verbose, exception):
option = "-#"
else:
option = "-s"
- # If curl is not present on Win32, we shoud not sys.exit
+ # If curl is not present on Win32, we should not sys.exit
# but raise `CalledProcessError` or `OSError` instead
require(["curl", "--version"], exception=platform_is_win32)
run(["curl", option,
@@ -732,9 +732,19 @@ class RustBuild(object):
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
+ # Export Stage0 snapshot compiler related env variables
+ build_section = "target.{}".format(self.build)
+ host_triple_sanitized = self.build.replace("-", "_")
+ var_data = {
+ "CC": "cc", "CXX": "cxx", "LD": "linker", "AR": "ar", "RANLIB": "ranlib"
+ }
+ for var_name, toml_key in var_data.items():
+ toml_val = self.get_toml(toml_key, build_section)
+ if toml_val != None:
+ env["{}_{}".format(var_name, host_triple_sanitized)] = toml_val
+
# preserve existing RUSTFLAGS
env.setdefault("RUSTFLAGS", "")
- build_section = "target.{}".format(self.build)
target_features = []
if self.get_toml("crt-static", build_section) == "true":
target_features += ["+crt-static"]
@@ -742,9 +752,6 @@ class RustBuild(object):
target_features += ["-crt-static"]
if target_features:
env["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features))
- target_linker = self.get_toml("linker", build_section)
- if target_linker is not None:
- env["RUSTFLAGS"] += " -C linker=" + target_linker
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
env["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros"
if self.get_toml("deny-warnings", "rust") != "false":
@@ -771,7 +778,8 @@ class RustBuild(object):
elif color == "never":
args.append("--color=never")
- run(args, env=env, verbose=self.verbose)
+ # Run this from the source directory so cargo finds .cargo/config
+ run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
def build_triple(self):
"""Build triple as in LLVM
diff --git a/src/bootstrap/build.rs b/src/bootstrap/build.rs
index ab34d5c1e..cd1f41802 100644
--- a/src/bootstrap/build.rs
+++ b/src/bootstrap/build.rs
@@ -1,43 +1,7 @@
-use env::consts::{EXE_EXTENSION, EXE_SUFFIX};
use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-
-/// Given an executable called `name`, return the filename for the
-/// executable for a particular target.
-pub fn exe(name: &PathBuf) -> PathBuf {
- if EXE_EXTENSION != "" && name.extension() != Some(EXE_EXTENSION.as_ref()) {
- let mut name: OsString = name.clone().into();
- name.push(EXE_SUFFIX);
- name.into()
- } else {
- name.clone()
- }
-}
fn main() {
let host = env::var("HOST").unwrap();
println!("cargo:rerun-if-changed=build.rs");
- println!("cargo:rerun-if-env-changed=RUSTC");
println!("cargo:rustc-env=BUILD_TRIPLE={}", host);
-
- // This may not be a canonicalized path.
- let mut rustc = PathBuf::from(env::var_os("RUSTC").unwrap());
-
- if rustc.is_relative() {
- println!("cargo:rerun-if-env-changed=PATH");
- for dir in env::split_paths(&env::var_os("PATH").unwrap_or_default()) {
- let absolute = dir.join(&exe(&rustc));
- if absolute.exists() {
- rustc = absolute;
- break;
- }
- }
- }
- assert!(rustc.is_absolute());
-
- // FIXME: if the path is not utf-8, this is going to break. Unfortunately
- // Cargo doesn't have a way for us to specify non-utf-8 paths easily, so
- // we'll need to invent some encoding scheme if this becomes a problem.
- println!("cargo:rustc-env=RUSTC={}", rustc.to_str().unwrap());
}
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 0ab4824ac..8b144f146 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -647,9 +647,9 @@ impl<'a> Builder<'a> {
test::CrateRustdocJsonTypes,
test::Linkcheck,
test::TierCheck,
+ test::ReplacePlaceholderTest,
test::Cargotest,
test::Cargo,
- test::Rls,
test::RustAnalyzer,
test::ErrorIndex,
test::Distcheck,
@@ -704,10 +704,12 @@ impl<'a> Builder<'a> {
doc::Miri,
doc::EmbeddedBook,
doc::EditionGuide,
+ doc::StyleGuide,
),
Kind::Dist => describe!(
dist::Docs,
dist::RustcDocs,
+ dist::JsonDocs,
dist::Mingw,
dist::Rustc,
dist::Std,
@@ -723,6 +725,7 @@ impl<'a> Builder<'a> {
dist::Miri,
dist::LlvmTools,
dist::RustDev,
+ dist::Bootstrap,
dist::Extended,
// It seems that PlainSourceTarball somehow changes how some of the tools
// perceive their dependencies (see #93033) which would invalidate fingerprints
@@ -736,7 +739,6 @@ impl<'a> Builder<'a> {
install::Docs,
install::Std,
install::Cargo,
- install::Rls,
install::RustAnalyzer,
install::Rustfmt,
install::RustDemangler,
@@ -746,7 +748,12 @@ impl<'a> Builder<'a> {
install::Src,
install::Rustc
),
- Kind::Run => describe!(run::ExpandYamlAnchors, run::BuildManifest, run::BumpStage0),
+ Kind::Run => describe!(
+ run::ExpandYamlAnchors,
+ run::BuildManifest,
+ run::BumpStage0,
+ run::ReplaceVersionPlaceholder,
+ ),
// These commands either don't use paths, or they're special-cased in Build::build()
Kind::Clean | Kind::Format | Kind::Setup => vec![],
}
@@ -942,7 +949,7 @@ impl<'a> Builder<'a> {
};
patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
if !fname.extension().map_or(false, |ext| ext == "so") {
- // Finally, set the corret .interp for binaries
+ // Finally, set the correct .interp for binaries
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
@@ -958,7 +965,7 @@ impl<'a> Builder<'a> {
let tempfile = self.tempdir().join(dest_path.file_name().unwrap());
// While bootstrap itself only supports http and https downloads, downstream forks might
// need to download components from other protocols. The match allows them adding more
- // protocols without worrying about merge conficts if we change the HTTP implementation.
+ // protocols without worrying about merge conflicts if we change the HTTP implementation.
match url.split_once("://").map(|(proto, _)| proto) {
Some("http") | Some("https") => {
self.download_http_with_retries(&tempfile, url, help_on_error)
@@ -1321,6 +1328,9 @@ impl<'a> Builder<'a> {
) -> Cargo {
let mut cargo = Command::new(&self.initial_cargo);
let out_dir = self.stage_out(compiler, mode);
+ // Run cargo from the source root so it can find .cargo/config.
+ // This matters when using vendoring and the working directory is outside the repository.
+ cargo.current_dir(&self.src);
// Codegen backends are not yet tracked by -Zbinary-dep-depinfo,
// so we need to explicitly clear out if they've been updated.
@@ -1548,13 +1558,12 @@ impl<'a> Builder<'a> {
match mode {
Mode::ToolBootstrap => {
// Restrict the allowed features to those passed by rustbuild, so we don't depend on nightly accidentally.
- // HACK: because anyhow does feature detection in build.rs, we need to allow the backtrace feature too.
- rustflags.arg("-Zallow-features=binary-dep-depinfo,backtrace");
+ rustflags.arg("-Zallow-features=binary-dep-depinfo");
}
Mode::ToolStd => {
// Right now this is just compiletest and a few other tools that build on stable.
// Allow them to use `feature(test)`, but nothing else.
- rustflags.arg("-Zallow-features=binary-dep-depinfo,test,backtrace,proc_macro_internals,proc_macro_diagnostic,proc_macro_span");
+ rustflags.arg("-Zallow-features=binary-dep-depinfo,test,proc_macro_internals,proc_macro_diagnostic,proc_macro_span");
}
Mode::Std | Mode::Rustc | Mode::Codegen | Mode::ToolRustc => {}
}
@@ -1759,23 +1768,21 @@ impl<'a> Builder<'a> {
},
);
- if !target.contains("windows") {
- let needs_unstable_opts = target.contains("linux")
- || target.contains("solaris")
- || target.contains("windows")
- || target.contains("bsd")
- || target.contains("dragonfly")
- || target.contains("illumos");
+ let split_debuginfo_is_stable = target.contains("linux")
+ || target.contains("apple")
+ || (target.contains("msvc")
+ && self.config.rust_split_debuginfo == SplitDebuginfo::Packed)
+ || (target.contains("windows")
+ && self.config.rust_split_debuginfo == SplitDebuginfo::Off);
- if needs_unstable_opts {
- rustflags.arg("-Zunstable-options");
- }
- match self.config.rust_split_debuginfo {
- SplitDebuginfo::Packed => rustflags.arg("-Csplit-debuginfo=packed"),
- SplitDebuginfo::Unpacked => rustflags.arg("-Csplit-debuginfo=unpacked"),
- SplitDebuginfo::Off => rustflags.arg("-Csplit-debuginfo=off"),
- };
+ if !split_debuginfo_is_stable {
+ rustflags.arg("-Zunstable-options");
}
+ match self.config.rust_split_debuginfo {
+ SplitDebuginfo::Packed => rustflags.arg("-Csplit-debuginfo=packed"),
+ SplitDebuginfo::Unpacked => rustflags.arg("-Csplit-debuginfo=unpacked"),
+ SplitDebuginfo::Off => rustflags.arg("-Csplit-debuginfo=off"),
+ };
if self.config.cmd.bless() {
// Bless `expect!` tests.
@@ -1850,7 +1857,7 @@ impl<'a> Builder<'a> {
// so we can't use it by default in general, but we can use it for tools
// and our own internal libraries.
if !mode.must_support_dlopen() && !target.triple.starts_with("powerpc-") {
- rustflags.arg("-Ztls-model=initial-exec");
+ cargo.env("RUSTC_TLS_MODEL_INITIAL_EXEC", "1");
}
if self.config.incremental {
@@ -1938,25 +1945,26 @@ impl<'a> Builder<'a> {
_ => s.display().to_string(),
}
};
+ let triple_underscored = target.triple.replace("-", "_");
let cc = ccacheify(&self.cc(target));
- cargo.env(format!("CC_{}", target.triple), &cc);
+ cargo.env(format!("CC_{}", triple_underscored), &cc);
let cflags = self.cflags(target, GitRepo::Rustc, CLang::C).join(" ");
- cargo.env(format!("CFLAGS_{}", target.triple), &cflags);
+ cargo.env(format!("CFLAGS_{}", triple_underscored), &cflags);
if let Some(ar) = self.ar(target) {
let ranlib = format!("{} s", ar.display());
cargo
- .env(format!("AR_{}", target.triple), ar)
- .env(format!("RANLIB_{}", target.triple), ranlib);
+ .env(format!("AR_{}", triple_underscored), ar)
+ .env(format!("RANLIB_{}", triple_underscored), ranlib);
}
if let Ok(cxx) = self.cxx(target) {
let cxx = ccacheify(&cxx);
let cxxflags = self.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" ");
cargo
- .env(format!("CXX_{}", target.triple), &cxx)
- .env(format!("CXXFLAGS_{}", target.triple), cxxflags);
+ .env(format!("CXX_{}", triple_underscored), &cxx)
+ .env(format!("CXXFLAGS_{}", triple_underscored), cxxflags);
}
}
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index c084e77d3..88bbcc93d 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -236,7 +236,7 @@ mod defaults {
fn doc_default() {
let mut config = configure("doc", &["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false };
+ config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
let mut cache = run_build(&[], config);
let a = TargetSelection::from_user("A");
@@ -547,7 +547,6 @@ mod dist {
config.stage = 0;
config.cmd = Subcommand::Test {
paths: vec!["library/std".into()],
- skip: vec![],
test_args: vec![],
rustc_args: vec![],
fail_fast: true,
@@ -588,7 +587,7 @@ mod dist {
fn doc_ci() {
let mut config = configure(&["A"], &["A"]);
config.compiler_docs = true;
- config.cmd = Subcommand::Doc { paths: Vec::new(), open: false };
+ config.cmd = Subcommand::Doc { paths: Vec::new(), open: false, json: false };
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]);
@@ -618,7 +617,6 @@ mod dist {
let mut config = configure(&["A"], &["A"]);
config.cmd = Subcommand::Test {
paths: vec![],
- skip: vec![],
test_args: vec![],
rustc_args: vec![],
fail_fast: true,
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index 1932a0017..258352a21 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -5,10 +5,12 @@
//! `package_vers`, and otherwise indicating to the compiler what it should
//! print out as part of its version information.
+use std::fs;
use std::path::Path;
use std::process::Command;
use crate::util::output;
+use crate::util::t;
use crate::Build;
pub enum GitInfo {
@@ -18,19 +20,25 @@ pub enum GitInfo {
/// If the info should be used (`ignore_git` is false), this will be
/// `Some`, otherwise it will be `None`.
Present(Option<Info>),
+ /// This is not a git repostory, but the info can be fetched from the
+ /// `git-commit-info` file.
+ RecordedForTarball(Info),
}
pub struct Info {
- commit_date: String,
- sha: String,
- short_sha: String,
+ pub commit_date: String,
+ pub sha: String,
+ pub short_sha: String,
}
impl GitInfo {
pub fn new(ignore_git: bool, dir: &Path) -> GitInfo {
// See if this even begins to look like a git dir
if !dir.join(".git").exists() {
- return GitInfo::Absent;
+ match read_commit_info_file(dir) {
+ Some(info) => return GitInfo::RecordedForTarball(info),
+ None => return GitInfo::Absent,
+ }
}
// Make sure git commands work
@@ -65,10 +73,11 @@ impl GitInfo {
}))
}
- fn info(&self) -> Option<&Info> {
+ pub fn info(&self) -> Option<&Info> {
match self {
- GitInfo::Present(info) => info.as_ref(),
GitInfo::Absent => None,
+ GitInfo::Present(info) => info.as_ref(),
+ GitInfo::RecordedForTarball(info) => Some(info),
}
}
@@ -96,10 +105,53 @@ impl GitInfo {
version
}
- pub fn is_git(&self) -> bool {
+ /// Returns whether this directory has a `.git` directory which should be managed by bootstrap.
+ pub fn is_managed_git_subrepository(&self) -> bool {
match self {
- GitInfo::Absent => false,
+ GitInfo::Absent | GitInfo::RecordedForTarball(_) => false,
GitInfo::Present(_) => true,
}
}
+
+ /// Returns whether this is being built from a tarball.
+ pub fn is_from_tarball(&self) -> bool {
+ match self {
+ GitInfo::Absent | GitInfo::Present(_) => false,
+ GitInfo::RecordedForTarball(_) => true,
+ }
+ }
+}
+
+/// Read the commit information from the `git-commit-info` file given the
+/// project root.
+pub fn read_commit_info_file(root: &Path) -> Option<Info> {
+ if let Ok(contents) = fs::read_to_string(root.join("git-commit-info")) {
+ let mut lines = contents.lines();
+ let sha = lines.next();
+ let short_sha = lines.next();
+ let commit_date = lines.next();
+ let info = match (commit_date, sha, short_sha) {
+ (Some(commit_date), Some(sha), Some(short_sha)) => Info {
+ commit_date: commit_date.to_owned(),
+ sha: sha.to_owned(),
+ short_sha: short_sha.to_owned(),
+ },
+ _ => panic!("the `git-comit-info` file is malformed"),
+ };
+ Some(info)
+ } else {
+ None
+ }
+}
+
+/// Write the commit information to the `git-commit-info` file given the project
+/// root.
+pub fn write_commit_info_file(root: &Path, info: &Info) {
+ let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date);
+ t!(fs::write(root.join("git-commit-info"), &commit_info));
+}
+
+/// Write the commit hash to the `git-commit-hash` file given the project root.
+pub fn write_commit_hash_file(root: &Path, sha: &str) {
+ t!(fs::write(root.join("git-commit-hash"), sha));
}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index 4e1e8ef9d..229851238 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -456,8 +456,10 @@ tool_check_step!(Rustdoc, "src/tools/rustdoc", "src/librustdoc", SourceType::InT
// behavior, treat it as in-tree so that any new warnings in clippy will be
// rejected.
tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree);
+// Miri on the other hand is treated as out of tree, since InTree also causes it to
+// be run as part of `check`, which can fail on platforms which libffi-sys has no support for.
tool_check_step!(Miri, "src/tools/miri", SourceType::Submodule);
-tool_check_step!(Rls, "src/tools/rls", SourceType::Submodule);
+tool_check_step!(Rls, "src/tools/rls", SourceType::InTree);
tool_check_step!(Rustfmt, "src/tools/rustfmt", SourceType::InTree);
tool_check_step!(Bootstrap, "src/bootstrap", SourceType::InTree, false);
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index dd2b9d593..e02a10b81 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -21,7 +21,7 @@ use serde::Deserialize;
use crate::builder::Cargo;
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
-use crate::config::{LlvmLibunwind, TargetSelection};
+use crate::config::{LlvmLibunwind, RustcLto, TargetSelection};
use crate::dist;
use crate::native;
use crate::tool::SourceType;
@@ -299,9 +299,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// Determine if we're going to compile in optimized C intrinsics to
// the `compiler-builtins` crate. These intrinsics live in LLVM's
- // `compiler-rt` repository, but our `src/llvm-project` submodule isn't
- // always checked out, so we need to conditionally look for this. (e.g. if
- // an external LLVM is used we skip the LLVM submodule checkout).
+ // `compiler-rt` repository.
//
// Note that this shouldn't affect the correctness of `compiler-builtins`,
// but only its speed. Some intrinsics in C haven't been translated to Rust
@@ -312,8 +310,15 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// If `compiler-rt` is available ensure that the `c` feature of the
// `compiler-builtins` crate is enabled and it's configured to learn where
// `compiler-rt` is located.
- let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
- let compiler_builtins_c_feature = if compiler_builtins_root.exists() {
+ let compiler_builtins_c_feature = if builder.config.optimized_compiler_builtins {
+ if !builder.is_rust_llvm(target) {
+ panic!(
+ "need a managed LLVM submodule for optimized intrinsics support; unset `llvm-config` or `optimized-compiler-builtins`"
+ );
+ }
+
+ builder.update_submodule(&Path::new("src").join("llvm-project"));
+ let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
// Note that `libprofiler_builtins/build.rs` also computes this so if
// you're changing something here please also change that.
cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
@@ -459,7 +464,7 @@ fn copy_sanitizers(
builder.copy(&runtime.path, &dst);
if target == "x86_64-apple-darwin" || target == "aarch64-apple-darwin" {
- // Update the library’s install name to reflect that it has has been renamed.
+ // Update the library’s install name to reflect that it has been renamed.
apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name));
// Upon renaming the install name, the code signature of the file will invalidate,
// so we will sign it again.
@@ -658,7 +663,12 @@ impl Step for Rustc {
// With LLD, we can use ICF (identical code folding) to reduce the executable size
// of librustc_driver/rustc and to improve i-cache utilization.
- if builder.config.use_lld {
+ //
+ // -Wl,[link options] doesn't work on MSVC. However, /OPT:ICF (technically /OPT:REF,ICF)
+ // is already on by default in MSVC optimized builds, which is interpreted as --icf=all:
+ // https://github.com/llvm/llvm-project/blob/3329cec2f79185bafd678f310fafadba2a8c76d2/lld/COFF/Driver.cpp#L1746
+ // https://github.com/rust-lang/rust/blob/f22819bcce4abaff7d1246a56eec493418f9f4ee/compiler/rustc_codegen_ssa/src/back/linker.rs#L827
+ if builder.config.use_lld && !compiler.host.contains("msvc") {
cargo.rustflag("-Clink-args=-Wl,--icf=all");
}
@@ -691,6 +701,28 @@ impl Step for Rustc {
));
}
+ // cfg(bootstrap): remove if condition once the bootstrap compiler supports dylib LTO
+ if compiler.stage != 0 {
+ match builder.config.rust_lto {
+ RustcLto::Thin | RustcLto::Fat => {
+ // Since using LTO for optimizing dylibs is currently experimental,
+ // we need to pass -Zdylib-lto.
+ cargo.rustflag("-Zdylib-lto");
+ // Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when
+ // compiling dylibs (and their dependencies), even when LTO is enabled for the
+ // crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here.
+ let lto_type = match builder.config.rust_lto {
+ RustcLto::Thin => "thin",
+ RustcLto::Fat => "fat",
+ _ => unreachable!(),
+ };
+ cargo.rustflag(&format!("-Clto={}", lto_type));
+ cargo.rustflag("-Cembed-bitcode=yes");
+ }
+ RustcLto::ThinLocal => { /* Do nothing, this is the default */ }
+ }
+ }
+
builder.info(&format!(
"Building stage{} compiler artifacts ({} -> {})",
compiler.stage, &compiler.host, target
@@ -1094,10 +1126,13 @@ impl Step for Sysroot {
/// 1-3.
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
+ let host_dir = builder.out.join(&compiler.host.triple);
let sysroot = if compiler.stage == 0 {
- builder.out.join(&compiler.host.triple).join("stage0-sysroot")
+ host_dir.join("stage0-sysroot")
+ } else if builder.download_rustc() {
+ host_dir.join("ci-rustc-sysroot")
} else {
- builder.out.join(&compiler.host.triple).join(format!("stage{}", compiler.stage))
+ host_dir.join(format!("stage{}", compiler.stage))
};
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
@@ -1108,6 +1143,11 @@ impl Step for Sysroot {
builder.config.build, compiler.host,
"Cross-compiling is not yet supported with `download-rustc`",
);
+
+ // #102002, cleanup stage1 and stage0-sysroot folders when using download-rustc so people don't use old versions of the toolchain by accident.
+ let _ = fs::remove_dir_all(host_dir.join("stage1"));
+ let _ = fs::remove_dir_all(host_dir.join("stage0-sysroot"));
+
// Copy the compiler into the correct sysroot.
let ci_rustc_dir =
builder.config.out.join(&*builder.config.build.triple).join("ci-rustc");
@@ -1137,6 +1177,20 @@ impl Step for Sysroot {
);
}
}
+ // Same for the rustc-src component.
+ let sysroot_lib_rustlib_rustcsrc = sysroot.join("lib/rustlib/rustc-src");
+ t!(fs::create_dir_all(&sysroot_lib_rustlib_rustcsrc));
+ let sysroot_lib_rustlib_rustcsrc_rust = sysroot_lib_rustlib_rustcsrc.join("rust");
+ if let Err(e) =
+ symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_rustcsrc_rust)
+ {
+ eprintln!(
+ "warning: creating symbolic link `{}` to `{}` failed with {}",
+ sysroot_lib_rustlib_rustcsrc_rust.display(),
+ builder.src.display(),
+ e,
+ );
+ }
INTERNER.intern_path(sysroot)
}
@@ -1276,7 +1330,9 @@ impl Step for Assemble {
compiler: build_compiler,
target: target_compiler.host,
});
- builder.copy(&lld_wrapper_exe, &gcc_ld_dir.join(exe("ld", target_compiler.host)));
+ for name in crate::LLD_FILE_NAMES {
+ builder.copy(&lld_wrapper_exe, &gcc_ld_dir.join(exe(name, target_compiler.host)));
+ }
}
if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) {
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index 4325a237c..a8c403675 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -73,6 +73,8 @@ pub struct Config {
pub color: Color,
pub patch_binaries_for_nix: bool,
pub stage0_metadata: Stage0Metadata,
+ /// Whether to use the `c` feature of the `compiler_builtins` crate.
+ pub optimized_compiler_builtins: bool,
pub on_fail: Option<String>,
pub stage: u32,
@@ -156,9 +158,12 @@ pub struct Config {
pub rust_new_symbol_mangling: Option<bool>,
pub rust_profile_use: Option<String>,
pub rust_profile_generate: Option<String>,
+ pub rust_lto: RustcLto,
pub llvm_profile_use: Option<String>,
pub llvm_profile_generate: bool,
pub llvm_libunwind_default: Option<LlvmLibunwind>,
+ pub llvm_bolt_profile_generate: bool,
+ pub llvm_bolt_profile_use: Option<String>,
pub build: TargetSelection,
pub hosts: Vec<TargetSelection>,
@@ -315,6 +320,28 @@ impl SplitDebuginfo {
}
}
+/// LTO mode used for compiling rustc itself.
+#[derive(Default, Clone)]
+pub enum RustcLto {
+ #[default]
+ ThinLocal,
+ Thin,
+ Fat,
+}
+
+impl std::str::FromStr for RustcLto {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "thin-local" => Ok(RustcLto::ThinLocal),
+ "thin" => Ok(RustcLto::Thin),
+ "fat" => Ok(RustcLto::Fat),
+ _ => Err(format!("Invalid value for rustc LTO: {}", s)),
+ }
+ }
+}
+
#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TargetSelection {
pub triple: Interned<String>,
@@ -388,6 +415,7 @@ impl PartialEq<&str> for TargetSelection {
pub struct Target {
/// Some(path to llvm-config) if using an external LLVM.
pub llvm_config: Option<PathBuf>,
+ pub llvm_has_rust_patches: Option<bool>,
/// Some(path to FileCheck) if one was specified.
pub llvm_filecheck: Option<PathBuf>,
pub llvm_libunwind: Option<LlvmLibunwind>,
@@ -596,6 +624,7 @@ define_config! {
bench_stage: Option<u32> = "bench-stage",
patch_binaries_for_nix: Option<bool> = "patch-binaries-for-nix",
metrics: Option<bool> = "metrics",
+ optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
}
}
@@ -720,6 +749,7 @@ define_config! {
profile_use: Option<String> = "profile-use",
// ignored; this is set from an env var set by bootstrap.py
download_rustc: Option<StringOrBool> = "download-rustc",
+ lto: Option<String> = "lto",
}
}
@@ -733,6 +763,7 @@ define_config! {
default_linker: Option<PathBuf> = "default-linker",
linker: Option<String> = "linker",
llvm_config: Option<String> = "llvm-config",
+ llvm_has_rust_patches: Option<bool> = "llvm-has-rust-patches",
llvm_filecheck: Option<String> = "llvm-filecheck",
llvm_libunwind: Option<String> = "llvm-libunwind",
android_ndk: Option<String> = "android-ndk",
@@ -770,21 +801,20 @@ impl Config {
// set by build.rs
config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
+
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// Undo `src/bootstrap`
config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned();
config.out = PathBuf::from("build");
- config.initial_cargo = PathBuf::from(env!("CARGO"));
- config.initial_rustc = PathBuf::from(env!("RUSTC"));
-
config
}
pub fn parse(args: &[String]) -> Config {
let flags = Flags::parse(&args);
-
let mut config = Config::default_opts();
+
+ // Set flags.
config.exclude = flags.exclude.into_iter().map(|path| TaskPath::parse(path)).collect();
config.include_default_paths = flags.include_default_paths;
config.rustc_error_format = flags.rustc_error_format;
@@ -802,8 +832,68 @@ impl Config {
}
config.llvm_profile_use = flags.llvm_profile_use;
config.llvm_profile_generate = flags.llvm_profile_generate;
+ config.llvm_bolt_profile_generate = flags.llvm_bolt_profile_generate;
+ config.llvm_bolt_profile_use = flags.llvm_bolt_profile_use;
+
+ if config.llvm_bolt_profile_generate && config.llvm_bolt_profile_use.is_some() {
+ eprintln!(
+ "Cannot use both `llvm_bolt_profile_generate` and `llvm_bolt_profile_use` at the same time"
+ );
+ crate::detail_exit(1);
+ }
+
+ // Infer the rest of the configuration.
+
+ // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary,
+ // running on a completely machine from where it was compiled.
+ let mut cmd = Command::new("git");
+ // NOTE: we cannot support running from outside the repository because the only path we have available
+ // is set at compile time, which can be wrong if bootstrap was downloaded from source.
+ // We still support running outside the repository if we find we aren't in a git directory.
+ cmd.arg("rev-parse").arg("--show-toplevel");
+ // Discard stderr because we expect this to fail when building from a tarball.
+ let output = cmd
+ .stderr(std::process::Stdio::null())
+ .output()
+ .ok()
+ .and_then(|output| if output.status.success() { Some(output) } else { None });
+ if let Some(output) = output {
+ let git_root = String::from_utf8(output.stdout).unwrap();
+ // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes.
+ let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap();
+ let s = git_root.to_str().unwrap();
+
+ // Bootstrap is quite bad at handling /? in front of paths
+ let src = match s.strip_prefix("\\\\?\\") {
+ Some(p) => PathBuf::from(p),
+ None => PathBuf::from(git_root),
+ };
+ // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when,
+ // for example, the build directory is inside of another unrelated git directory.
+ // In that case keep the original `CARGO_MANIFEST_DIR` handling.
+ //
+ // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside
+ // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1.
+ if src.join("src").join("stage0.json").exists() {
+ config.src = src;
+ }
+ } else {
+ // We're building from a tarball, not git sources.
+ // We don't support pre-downloaded bootstrap in this case.
+ }
+
+ if cfg!(test) {
+ // Use the build directory of the original x.py invocation, so that we can set `initial_rustc` properly.
+ config.out = Path::new(
+ &env::var_os("CARGO_TARGET_DIR").expect("cargo test directly is not supported"),
+ )
+ .parent()
+ .unwrap()
+ .to_path_buf();
+ }
let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json")));
+
config.stage0_metadata = t!(serde_json::from_slice::<Stage0Metadata>(&stage0_json));
#[cfg(test)]
@@ -859,7 +949,6 @@ impl Config {
let build = toml.build.unwrap_or_default();
- set(&mut config.initial_rustc, build.rustc.map(PathBuf::from));
set(&mut config.out, flags.build_dir.or_else(|| build.build_dir.map(PathBuf::from)));
// NOTE: Bootstrap spawns various commands with different working directories.
// To avoid writing to random places on the file system, `config.out` needs to be an absolute path.
@@ -868,6 +957,16 @@ impl Config {
config.out = crate::util::absolute(&config.out);
}
+ config.initial_rustc = build
+ .rustc
+ .map(PathBuf::from)
+ .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/rustc"));
+ config.initial_cargo = build
+ .cargo
+ .map(PathBuf::from)
+ .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo"));
+
+ // NOTE: it's important this comes *after* we set `initial_rustc` just above.
if config.dry_run {
let dir = config.out.join("tmp-dry-run");
t!(fs::create_dir_all(&dir));
@@ -914,6 +1013,7 @@ impl Config {
set(&mut config.print_step_timings, build.print_step_timings);
set(&mut config.print_step_rusage, build.print_step_rusage);
set(&mut config.patch_binaries_for_nix, build.patch_binaries_for_nix);
+ set(&mut config.optimized_compiler_builtins, build.optimized_compiler_builtins);
config.verbose = cmp::max(config.verbose, flags.verbose);
@@ -990,42 +1090,7 @@ impl Config {
config.llvm_from_ci = match llvm.download_ci_llvm {
Some(StringOrBool::String(s)) => {
assert!(s == "if-available", "unknown option `{}` for download-ci-llvm", s);
- // This is currently all tier 1 targets and tier 2 targets with host tools
- // (since others may not have CI artifacts)
- // https://doc.rust-lang.org/rustc/platform-support.html#tier-1
- // FIXME: this is duplicated in bootstrap.py
- let supported_platforms = [
- // tier 1
- "aarch64-unknown-linux-gnu",
- "i686-pc-windows-gnu",
- "i686-pc-windows-msvc",
- "i686-unknown-linux-gnu",
- "x86_64-unknown-linux-gnu",
- "x86_64-apple-darwin",
- "x86_64-pc-windows-gnu",
- "x86_64-pc-windows-msvc",
- // tier 2 with host tools
- "aarch64-apple-darwin",
- "aarch64-pc-windows-msvc",
- "aarch64-unknown-linux-musl",
- "arm-unknown-linux-gnueabi",
- "arm-unknown-linux-gnueabihf",
- "armv7-unknown-linux-gnueabihf",
- "mips-unknown-linux-gnu",
- "mips64-unknown-linux-gnuabi64",
- "mips64el-unknown-linux-gnuabi64",
- "mipsel-unknown-linux-gnu",
- "powerpc-unknown-linux-gnu",
- "powerpc64-unknown-linux-gnu",
- "powerpc64le-unknown-linux-gnu",
- "riscv64gc-unknown-linux-gnu",
- "s390x-unknown-linux-gnu",
- "x86_64-unknown-freebsd",
- "x86_64-unknown-illumos",
- "x86_64-unknown-linux-musl",
- "x86_64-unknown-netbsd",
- ];
- supported_platforms.contains(&&*config.build.triple)
+ crate::native::is_ci_llvm_available(&config, llvm_assertions.unwrap_or(false))
}
Some(StringOrBool::Bool(b)) => b,
None => false,
@@ -1132,6 +1197,12 @@ impl Config {
config.rust_profile_use = flags.rust_profile_use.or(rust.profile_use);
config.rust_profile_generate = flags.rust_profile_generate.or(rust.profile_generate);
config.download_rustc_commit = download_ci_rustc_commit(&config, rust.download_rustc);
+
+ config.rust_lto = rust
+ .lto
+ .as_deref()
+ .map(|value| RustcLto::from_str(value).unwrap())
+ .unwrap_or_default();
} else {
config.rust_profile_use = flags.rust_profile_use;
config.rust_profile_generate = flags.rust_profile_generate;
@@ -1144,6 +1215,7 @@ impl Config {
if let Some(ref s) = cfg.llvm_config {
target.llvm_config = Some(config.src.join(s));
}
+ target.llvm_has_rust_patches = cfg.llvm_has_rust_patches;
if let Some(ref s) = cfg.llvm_filecheck {
target.llvm_filecheck = Some(config.src.join(s));
}
@@ -1176,6 +1248,7 @@ impl Config {
if config.llvm_from_ci {
let triple = &config.build.triple;
+ let ci_llvm_bin = config.ci_llvm_root().join("bin");
let mut build_target = config
.target_config
.entry(config.build)
@@ -1183,7 +1256,6 @@ impl Config {
check_ci_llvm!(build_target.llvm_config);
check_ci_llvm!(build_target.llvm_filecheck);
- let ci_llvm_bin = config.out.join(&*config.build.triple).join("ci-llvm/bin");
build_target.llvm_config = Some(ci_llvm_bin.join(exe("llvm-config", config.build)));
build_target.llvm_filecheck = Some(ci_llvm_bin.join(exe("FileCheck", config.build)));
}
@@ -1312,11 +1384,22 @@ impl Config {
git
}
- pub(crate) fn artifact_channel(&self, commit: &str) -> String {
- let mut channel = self.git();
- channel.arg("show").arg(format!("{}:src/ci/channel", commit));
- let channel = output(&mut channel);
- channel.trim().to_owned()
+ pub(crate) fn artifact_channel(&self, builder: &Builder<'_>, commit: &str) -> String {
+ if builder.rust_info.is_managed_git_subrepository() {
+ let mut channel = self.git();
+ channel.arg("show").arg(format!("{}:src/ci/channel", commit));
+ let channel = output(&mut channel);
+ channel.trim().to_owned()
+ } else if let Ok(channel) = fs::read_to_string(builder.src.join("src/ci/channel")) {
+ channel.trim().to_owned()
+ } else {
+ let src = builder.src.display();
+ eprintln!("error: failed to determine artifact channel");
+ eprintln!(
+ "help: either use git or ensure that {src}/src/ci/channel contains the name of the channel to use"
+ );
+ panic!();
+ }
}
/// Try to find the relative path of `bindir`, otherwise return it in full.
@@ -1445,11 +1528,15 @@ impl Config {
.get(&target)
.and_then(|t| t.llvm_libunwind)
.or(self.llvm_libunwind_default)
- .unwrap_or(LlvmLibunwind::No)
+ .unwrap_or(if target.contains("fuchsia") {
+ LlvmLibunwind::InTree
+ } else {
+ LlvmLibunwind::No
+ })
}
pub fn submodules(&self, rust_info: &GitInfo) -> bool {
- self.submodules.unwrap_or(rust_info.is_git())
+ self.submodules.unwrap_or(rust_info.is_managed_git_subrepository())
}
}
@@ -1461,7 +1548,7 @@ fn set<T>(field: &mut T, val: Option<T>) {
fn threads_from_config(v: u32) -> u32 {
match v {
- 0 => num_cpus::get() as u32,
+ 0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32,
n => n,
}
}
@@ -1554,7 +1641,7 @@ fn maybe_download_rustfmt(builder: &Builder<'_>) -> Option<PathBuf> {
fn download_ci_rustc(builder: &Builder<'_>, commit: &str) {
builder.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})"));
- let channel = builder.config.artifact_channel(commit);
+ let channel = builder.config.artifact_channel(builder, commit);
let host = builder.config.build.triple;
let bin_root = builder.out.join(host).join("ci-rustc");
let rustc_stamp = bin_root.join(".rustc-stamp");
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 6291b204e..12585e80e 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -16,6 +16,7 @@ use std::process::Command;
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
+use crate::channel;
use crate::compile;
use crate::config::TargetSelection;
use crate::tarball::{GeneratedTarball, OverlayKind, Tarball};
@@ -35,18 +36,6 @@ pub fn tmpdir(builder: &Builder<'_>) -> PathBuf {
builder.out.join("tmp/dist")
}
-fn missing_tool(tool_name: &str, skip: bool) {
- if skip {
- println!("Unable to build {}, skipping dist", tool_name)
- } else {
- let help = "note: not all tools are available on all nightlies\nhelp: see https://forge.rust-lang.org/infra/toolstate.html for more information";
- panic!(
- "Unable to build submodule tool {} (use `missing-tools = true` to ignore this failure)\n{}",
- tool_name, help
- )
- }
-}
-
fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool {
if !builder.config.extended {
return false;
@@ -87,6 +76,39 @@ impl Step for Docs {
}
}
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct JsonDocs {
+ pub host: TargetSelection,
+}
+
+impl Step for JsonDocs {
+ type Output = Option<GeneratedTarball>;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ let default = run.builder.config.docs;
+ run.alias("rust-docs-json").default_condition(default)
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(JsonDocs { host: run.target });
+ }
+
+ /// Builds the `rust-docs-json` installer component.
+ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
+ let host = self.host;
+ builder.ensure(crate::doc::JsonStd { stage: builder.top_stage, target: host });
+
+ let dest = "share/doc/rust/json";
+
+ let mut tarball = Tarball::new(builder, "rust-docs-json", &host.triple);
+ tarball.set_product_name("Rust Documentation In JSON Format");
+ tarball.is_preview(true);
+ tarball.add_bulk_dir(&builder.json_doc_out(host), dest);
+ Some(tarball.generate())
+ }
+}
+
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustcDocs {
pub host: TargetSelection,
@@ -423,8 +445,11 @@ impl Step for Rustc {
let gcc_lld_src_dir = src_dir.join("gcc-ld");
let gcc_lld_dst_dir = dst_dir.join("gcc-ld");
t!(fs::create_dir(&gcc_lld_dst_dir));
- let exe_name = exe("ld", compiler.host);
- builder.copy(&gcc_lld_src_dir.join(&exe_name), &gcc_lld_dst_dir.join(&exe_name));
+ for name in crate::LLD_FILE_NAMES {
+ let exe_name = exe(name, compiler.host);
+ builder
+ .copy(&gcc_lld_src_dir.join(&exe_name), &gcc_lld_dst_dir.join(&exe_name));
+ }
}
// Man pages
@@ -894,12 +919,13 @@ impl Step for PlainSourceTarball {
// Create the version file
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
- if let Some(sha) = builder.rust_sha() {
- builder.create(&plain_dst_src.join("git-commit-hash"), &sha);
+ if let Some(info) = builder.rust_info.info() {
+ channel::write_commit_hash_file(&plain_dst_src, &info.sha);
+ channel::write_commit_info_file(&plain_dst_src, info);
}
// If we're building from git sources, we need to vendor a complete distribution.
- if builder.rust_info.is_git() {
+ if builder.rust_info.is_managed_git_subrepository() {
// Ensure we have the submodules checked out.
builder.update_submodule(Path::new("src/tools/rust-analyzer"));
@@ -1018,10 +1044,7 @@ impl Step for Rls {
let rls = builder
.ensure(tool::Rls { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("RLS", builder.build.config.missing_tools);
- None
- })?;
+ .expect("rls expected to build");
let mut tarball = Tarball::new(builder, "rls", &target.triple);
tarball.set_overlay(OverlayKind::RLS);
@@ -1170,18 +1193,9 @@ impl Step for Miri {
let compiler = self.compiler;
let target = self.target;
- let miri = builder
- .ensure(tool::Miri { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("miri", builder.build.config.missing_tools);
- None
- })?;
- let cargomiri = builder
- .ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("cargo miri", builder.build.config.missing_tools);
- None
- })?;
+ let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() })?;
+ let cargomiri =
+ builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })?;
let mut tarball = Tarball::new(builder, "miri", &target.triple);
tarball.set_overlay(OverlayKind::Miri);
@@ -1226,17 +1240,10 @@ impl Step for Rustfmt {
let rustfmt = builder
.ensure(tool::Rustfmt { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("Rustfmt", builder.build.config.missing_tools);
- None
- })?;
+ .expect("rustfmt expected to build - essential tool");
let cargofmt = builder
.ensure(tool::Cargofmt { compiler, target, extra_features: Vec::new() })
- .or_else(|| {
- missing_tool("Cargofmt", builder.build.config.missing_tools);
- None
- })?;
-
+ .expect("cargo fmt expected to build - essential tool");
let mut tarball = Tarball::new(builder, "rustfmt", &target.triple);
tarball.set_overlay(OverlayKind::Rustfmt);
tarball.is_preview(true);
@@ -1360,6 +1367,7 @@ impl Step for Extended {
}
add_component!("rust-docs" => Docs { host: target });
+ add_component!("rust-json-docs" => JsonDocs { host: target });
add_component!("rust-demangler"=> RustDemangler { compiler, target });
add_component!("cargo" => Cargo { compiler, target });
add_component!("rustfmt" => Rustfmt { compiler, target });
@@ -1419,7 +1427,7 @@ impl Step for Extended {
let xform = |p: &Path| {
let mut contents = t!(fs::read_to_string(p));
- for tool in &["rust-demangler", "rls", "rust-analyzer", "miri", "rustfmt"] {
+ for tool in &["rust-demangler", "miri"] {
if !built_tools.contains(tool) {
contents = filter(&contents, tool);
}
@@ -1459,7 +1467,8 @@ impl Step for Extended {
prepare("rust-std");
prepare("rust-analysis");
prepare("clippy");
- for tool in &["rust-docs", "rust-demangler", "rls", "rust-analyzer", "miri"] {
+ prepare("rust-analyzer");
+ for tool in &["rust-docs", "rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1495,8 +1504,6 @@ impl Step for Extended {
builder.create_dir(&exe.join(name));
let dir = if name == "rust-std" || name == "rust-analysis" {
format!("{}-{}", name, target.triple)
- } else if name == "rls" {
- "rls-preview".to_string()
} else if name == "rust-analyzer" {
"rust-analyzer-preview".to_string()
} else if name == "clippy" {
@@ -1520,7 +1527,8 @@ impl Step for Extended {
prepare("rust-docs");
prepare("rust-std");
prepare("clippy");
- for tool in &["rust-demangler", "rls", "rust-analyzer", "miri"] {
+ prepare("rust-analyzer");
+ for tool in &["rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1604,44 +1612,23 @@ impl Step for Extended {
.arg("-out")
.arg(exe.join("StdGroup.wxs")),
);
- if built_tools.contains("rls") {
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("rls")
- .args(&heat_flags)
- .arg("-cg")
- .arg("RlsGroup")
- .arg("-dr")
- .arg("Rls")
- .arg("-var")
- .arg("var.RlsDir")
- .arg("-out")
- .arg(exe.join("RlsGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/remove-duplicates.xsl")),
- );
- }
- if built_tools.contains("rust-analyzer") {
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("rust-analyzer")
- .args(&heat_flags)
- .arg("-cg")
- .arg("RustAnalyzerGroup")
- .arg("-dr")
- .arg("RustAnalyzer")
- .arg("-var")
- .arg("var.RustAnalyzerDir")
- .arg("-out")
- .arg(exe.join("RustAnalyzerGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/remove-duplicates.xsl")),
- );
- }
+ builder.run(
+ Command::new(&heat)
+ .current_dir(&exe)
+ .arg("dir")
+ .arg("rust-analyzer")
+ .args(&heat_flags)
+ .arg("-cg")
+ .arg("RustAnalyzerGroup")
+ .arg("-dr")
+ .arg("RustAnalyzer")
+ .arg("-var")
+ .arg("var.RustAnalyzerDir")
+ .arg("-out")
+ .arg(exe.join("RustAnalyzerGroup.wxs"))
+ .arg("-t")
+ .arg(etc.join("msi/remove-duplicates.xsl")),
+ );
builder.run(
Command::new(&heat)
.current_dir(&exe)
@@ -1754,9 +1741,6 @@ impl Step for Extended {
if built_tools.contains("rust-demangler") {
cmd.arg("-dRustDemanglerDir=rust-demangler");
}
- if built_tools.contains("rls") {
- cmd.arg("-dRlsDir=rls");
- }
if built_tools.contains("rust-analyzer") {
cmd.arg("-dRustAnalyzerDir=rust-analyzer");
}
@@ -1776,18 +1760,15 @@ impl Step for Extended {
candle("CargoGroup.wxs".as_ref());
candle("StdGroup.wxs".as_ref());
candle("ClippyGroup.wxs".as_ref());
+ if built_tools.contains("miri") {
+ candle("MiriGroup.wxs".as_ref());
+ }
if built_tools.contains("rust-demangler") {
candle("RustDemanglerGroup.wxs".as_ref());
}
- if built_tools.contains("rls") {
- candle("RlsGroup.wxs".as_ref());
- }
if built_tools.contains("rust-analyzer") {
candle("RustAnalyzerGroup.wxs".as_ref());
}
- if built_tools.contains("miri") {
- candle("MiriGroup.wxs".as_ref());
- }
candle("AnalysisGroup.wxs".as_ref());
if target.ends_with("windows-gnu") {
@@ -1819,8 +1800,8 @@ impl Step for Extended {
.arg("ClippyGroup.wixobj")
.current_dir(&exe);
- if built_tools.contains("rls") {
- cmd.arg("RlsGroup.wixobj");
+ if built_tools.contains("miri") {
+ cmd.arg("MiriGroup.wixobj");
}
if built_tools.contains("rust-analyzer") {
cmd.arg("RustAnalyzerGroup.wixobj");
@@ -1828,9 +1809,6 @@ impl Step for Extended {
if built_tools.contains("rust-demangler") {
cmd.arg("RustDemanglerGroup.wixobj");
}
- if built_tools.contains("miri") {
- cmd.arg("MiriGroup.wixobj");
- }
if target.ends_with("windows-gnu") {
cmd.arg("GccGroup.wixobj");
@@ -1875,23 +1853,21 @@ fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
///
/// Returns whether the files were actually copied.
fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) -> bool {
- if let Some(config) = builder.config.target_config.get(&target) {
- if config.llvm_config.is_some() && !builder.config.llvm_from_ci {
- // If the LLVM was externally provided, then we don't currently copy
- // artifacts into the sysroot. This is not necessarily the right
- // choice (in particular, it will require the LLVM dylib to be in
- // the linker's load path at runtime), but the common use case for
- // external LLVMs is distribution provided LLVMs, and in that case
- // they're usually in the standard search path (e.g., /usr/lib) and
- // copying them here is going to cause problems as we may end up
- // with the wrong files and isn't what distributions want.
- //
- // This behavior may be revisited in the future though.
- //
- // If the LLVM is coming from ourselves (just from CI) though, we
- // still want to install it, as it otherwise won't be available.
- return false;
- }
+ if !builder.is_rust_llvm(target) {
+ // If the LLVM was externally provided, then we don't currently copy
+ // artifacts into the sysroot. This is not necessarily the right
+ // choice (in particular, it will require the LLVM dylib to be in
+ // the linker's load path at runtime), but the common use case for
+ // external LLVMs is distribution provided LLVMs, and in that case
+ // they're usually in the standard search path (e.g., /usr/lib) and
+ // copying them here is going to cause problems as we may end up
+ // with the wrong files and isn't what distributions want.
+ //
+ // This behavior may be revisited in the future though.
+ //
+ // If the LLVM is coming from ourselves (just from CI) though, we
+ // still want to install it, as it otherwise won't be available.
+ return false;
}
// On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib
@@ -1910,7 +1886,7 @@ fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir
let mut cmd = Command::new(llvm_config);
cmd.arg("--libfiles");
builder.verbose(&format!("running {:?}", cmd));
- let files = output(&mut cmd);
+ let files = if builder.config.dry_run { "".into() } else { output(&mut cmd) };
let build_llvm_out = &builder.llvm_out(builder.config.build);
let target_llvm_out = &builder.llvm_out(target);
for file in files.trim_end().split(' ') {
@@ -2057,6 +2033,8 @@ impl Step for RustDev {
"llvm-dwp",
"llvm-nm",
"llvm-dwarfdump",
+ "llvm-dis",
+ "llvm-tblgen",
] {
tarball.add_file(src_bindir.join(exe(bin, target)), "bin", 0o755);
}
@@ -2087,6 +2065,41 @@ impl Step for RustDev {
}
}
+// Tarball intended for internal consumption to ease rustc/std development.
+//
+// Should not be considered stable by end users.
+#[derive(Clone, Debug, Eq, Hash, PartialEq)]
+pub struct Bootstrap {
+ pub target: TargetSelection,
+}
+
+impl Step for Bootstrap {
+ type Output = Option<GeneratedTarball>;
+ const DEFAULT: bool = false;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.alias("bootstrap")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Bootstrap { target: run.target });
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
+ let target = self.target;
+
+ let tarball = Tarball::new(builder, "bootstrap", &target.triple);
+
+ let bootstrap_outdir = &builder.bootstrap_out;
+ for file in &["bootstrap", "llvm-config-wrapper", "rustc", "rustdoc", "sccache-plus-cl"] {
+ tarball.add_file(bootstrap_outdir.join(exe(file, target)), "bootstrap/bin", 0o755);
+ }
+
+ Some(tarball.generate())
+ }
+}
+
/// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the
/// release process to avoid cloning the monorepo and building stuff.
///
@@ -2152,6 +2165,10 @@ impl Step for ReproducibleArtifacts {
tarball.add_file(path, ".", 0o644);
added_anything = true;
}
+ if let Some(path) = builder.config.llvm_bolt_profile_use.as_ref() {
+ tarball.add_file(path, ".", 0o644);
+ added_anything = true;
+ }
if added_anything { Some(tarball.generate()) } else { None }
}
}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 2852442d0..ea06caf9c 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -7,6 +7,7 @@
//! Everything here is basically just a shim around calling either `rustbook` or
//! `rustdoc`.
+use std::ffi::OsStr;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
@@ -81,6 +82,7 @@ book!(
Reference, "src/doc/reference", "reference", submodule;
RustByExample, "src/doc/rust-by-example", "rust-by-example", submodule;
RustdocBook, "src/doc/rustdoc", "rustdoc";
+ StyleGuide, "src/doc/style-guide", "style-guide";
);
fn open(builder: &Builder<'_>, path: impl AsRef<Path>) {
@@ -226,7 +228,7 @@ impl Step for TheBook {
}
// build the version info page and CSS
- builder.ensure(Standalone { compiler, target });
+ let shared_assets = builder.ensure(SharedAssets { target });
// build the redirect pages
builder.info(&format!("Documenting book redirect pages ({})", target));
@@ -235,7 +237,7 @@ impl Step for TheBook {
let path = file.path();
let path = path.to_str().unwrap();
- invoke_rustdoc(builder, compiler, target, path);
+ invoke_rustdoc(builder, compiler, &shared_assets, target, path);
}
if builder.was_invoked_explicitly::<Self>(Kind::Doc) {
@@ -249,6 +251,7 @@ impl Step for TheBook {
fn invoke_rustdoc(
builder: &Builder<'_>,
compiler: Compiler,
+ shared_assets: &SharedAssetsPaths,
target: TargetSelection,
markdown: &str,
) {
@@ -258,7 +261,6 @@ fn invoke_rustdoc(
let header = builder.src.join("src/doc/redirect.inc");
let footer = builder.src.join("src/doc/footer.inc");
- let version_info = out.join("version_info.html");
let mut cmd = builder.rustdoc_cmd(compiler);
@@ -267,7 +269,7 @@ fn invoke_rustdoc(
cmd.arg("--html-after-content")
.arg(&footer)
.arg("--html-before-content")
- .arg(&version_info)
+ .arg(&shared_assets.version_info)
.arg("--html-in-header")
.arg(&header)
.arg("--markdown-no-toc")
@@ -298,7 +300,7 @@ impl Step for Standalone {
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.path("src/doc").default_condition(builder.config.docs)
+ run.path("src/doc").alias("standalone").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig<'_>) {
@@ -323,21 +325,11 @@ impl Step for Standalone {
let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
+ let version_info = builder.ensure(SharedAssets { target: self.target }).version_info;
+
let favicon = builder.src.join("src/doc/favicon.inc");
let footer = builder.src.join("src/doc/footer.inc");
let full_toc = builder.src.join("src/doc/full-toc.inc");
- t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
-
- let version_input = builder.src.join("src/doc/version_info.html.template");
- let version_info = out.join("version_info.html");
-
- if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
- let info = t!(fs::read_to_string(&version_input))
- .replace("VERSION", &builder.rust_release())
- .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
- .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
- t!(fs::write(&version_info, &info));
- }
for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
let file = t!(file);
@@ -383,15 +375,9 @@ impl Step for Standalone {
}
if filename == "not_found.md" {
- cmd.arg("--markdown-css")
- .arg(format!("https://doc.rust-lang.org/rustdoc{}.css", &builder.version))
- .arg("--markdown-css")
- .arg("https://doc.rust-lang.org/rust.css");
+ cmd.arg("--markdown-css").arg("https://doc.rust-lang.org/rust.css");
} else {
- cmd.arg("--markdown-css")
- .arg(format!("rustdoc{}.css", &builder.version))
- .arg("--markdown-css")
- .arg("rust.css");
+ cmd.arg("--markdown-css").arg("rust.css");
}
builder.run(&mut cmd);
}
@@ -405,6 +391,45 @@ impl Step for Standalone {
}
}
+#[derive(Debug, Clone)]
+pub struct SharedAssetsPaths {
+ pub version_info: PathBuf,
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct SharedAssets {
+ target: TargetSelection,
+}
+
+impl Step for SharedAssets {
+ type Output = SharedAssetsPaths;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ // Other tasks depend on this, no need to execute it on its own
+ run.never()
+ }
+
+ // Generate shared resources used by other pieces of documentation.
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ let out = builder.doc_out(self.target);
+
+ let version_input = builder.src.join("src").join("doc").join("version_info.html.template");
+ let version_info = out.join("version_info.html");
+ if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
+ let info = t!(fs::read_to_string(&version_input))
+ .replace("VERSION", &builder.rust_release())
+ .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
+ .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
+ t!(fs::write(&version_info, &info));
+ }
+
+ builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
+
+ SharedAssetsPaths { version_info }
+ }
+}
+
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub stage: u32,
@@ -431,49 +456,25 @@ impl Step for Std {
fn run(self, builder: &Builder<'_>) {
let stage = self.stage;
let target = self.target;
- builder.info(&format!("Documenting stage{} std ({})", stage, target));
- if builder.no_std(target) == Some(true) {
- panic!(
- "building std documentation for no_std target {target} is not supported\n\
- Set `docs = false` in the config to disable documentation."
- );
- }
let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, builder.config.build);
-
- let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
-
- t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
-
- let run_cargo_rustdoc_for = |package: &str| {
- let mut cargo =
- builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
- compile::std_cargo(builder, target, compiler.stage, &mut cargo);
- cargo
- .arg("-p")
- .arg(package)
- .arg("-Zskip-rustdoc-fingerprint")
- .arg("--")
- .arg("--markdown-css")
- .arg("rust.css")
- .arg("--markdown-no-toc")
- .arg("-Z")
- .arg("unstable-options")
- .arg("--resource-suffix")
- .arg(&builder.version)
- .arg("--index-page")
- .arg(&builder.src.join("src/doc/index.md"));
+ builder.ensure(SharedAssets { target: self.target });
- if !builder.config.docs_minification {
- cargo.arg("--disable-minification");
- }
+ let index_page = builder.src.join("src/doc/index.md").into_os_string();
+ let mut extra_args = vec![
+ OsStr::new("--markdown-css"),
+ OsStr::new("rust.css"),
+ OsStr::new("--markdown-no-toc"),
+ OsStr::new("--index-page"),
+ &index_page,
+ ];
- builder.run(&mut cargo.into());
- };
+ if !builder.config.docs_minification {
+ extra_args.push(OsStr::new("--disable-minification"));
+ }
- let paths = builder
+ let requested_crates = builder
.paths
.iter()
.map(components_simplified)
@@ -491,30 +492,20 @@ impl Step for Std {
})
.collect::<Vec<_>>();
- // Only build the following crates. While we could just iterate over the
- // folder structure, that would also build internal crates that we do
- // not want to show in documentation. These crates will later be visited
- // by the rustc step, so internal documentation will show them.
- //
- // Note that the order here is important! The crates need to be
- // processed starting from the leaves, otherwise rustdoc will not
- // create correct links between crates because rustdoc depends on the
- // existence of the output directories to know if it should be a local
- // or remote link.
- let krates = ["core", "alloc", "std", "proc_macro", "test"];
- for krate in &krates {
- run_cargo_rustdoc_for(krate);
- if paths.iter().any(|p| p == krate) {
- // No need to document more of the libraries if we have the one we want.
- break;
- }
- }
- builder.cp_r(&out_dir, &out);
+ doc_std(
+ builder,
+ DocumentationFormat::HTML,
+ stage,
+ target,
+ &out,
+ &extra_args,
+ &requested_crates,
+ );
// Look for library/std, library/core etc in the `x.py doc` arguments and
// open the corresponding rendered docs.
- for requested_crate in paths {
- if krates.iter().any(|k| *k == requested_crate.as_str()) {
+ for requested_crate in requested_crates {
+ if STD_PUBLIC_CRATES.iter().any(|k| *k == requested_crate.as_str()) {
let index = out.join(requested_crate).join("index.html");
open(builder, &index);
}
@@ -523,6 +514,134 @@ impl Step for Std {
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct JsonStd {
+ pub stage: u32,
+ pub target: TargetSelection,
+}
+
+impl Step for JsonStd {
+ type Output = ();
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ let default = run.builder.config.docs && run.builder.config.cmd.json();
+ run.all_krates("test").path("library").default_condition(default)
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Std { stage: run.builder.top_stage, target: run.target });
+ }
+
+ /// Build JSON documentation for the standard library crates.
+ ///
+ /// This is largely just a wrapper around `cargo doc`.
+ fn run(self, builder: &Builder<'_>) {
+ let stage = self.stage;
+ let target = self.target;
+ let out = builder.json_doc_out(target);
+ t!(fs::create_dir_all(&out));
+ let extra_args = [OsStr::new("--output-format"), OsStr::new("json")];
+ doc_std(builder, DocumentationFormat::JSON, stage, target, &out, &extra_args, &[])
+ }
+}
+
+/// Name of the crates that are visible to consumers of the standard library.
+/// Documentation for internal crates is handled by the rustc step, so internal crates will show
+/// up there.
+///
+/// Order here is important!
+/// Crates need to be processed starting from the leaves, otherwise rustdoc will not
+/// create correct links between crates because rustdoc depends on the
+/// existence of the output directories to know if it should be a local
+/// or remote link.
+const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum DocumentationFormat {
+ HTML,
+ JSON,
+}
+
+impl DocumentationFormat {
+ fn as_str(&self) -> &str {
+ match self {
+ DocumentationFormat::HTML => "HTML",
+ DocumentationFormat::JSON => "JSON",
+ }
+ }
+}
+
+/// Build the documentation for public standard library crates.
+///
+/// `requested_crates` can be used to build only a subset of the crates. If empty, all crates will
+/// be built.
+fn doc_std(
+ builder: &Builder<'_>,
+ format: DocumentationFormat,
+ stage: u32,
+ target: TargetSelection,
+ out: &Path,
+ extra_args: &[&OsStr],
+ requested_crates: &[String],
+) {
+ builder.info(&format!(
+ "Documenting stage{} std ({}) in {} format",
+ stage,
+ target,
+ format.as_str()
+ ));
+ if builder.no_std(target) == Some(true) {
+ panic!(
+ "building std documentation for no_std target {target} is not supported\n\
+ Set `docs = false` in the config to disable documentation."
+ );
+ }
+ let compiler = builder.compiler(stage, builder.config.build);
+ // This is directory where the compiler will place the output of the command.
+ // We will then copy the files from this directory into the final `out` directory, the specified
+ // as a function parameter.
+ let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
+ // `cargo` uses the same directory for both JSON docs and HTML docs.
+ // This could lead to cross-contamination when copying files into the specified `out` directory.
+ // For example:
+ // ```bash
+ // x doc std
+ // x doc std --json
+ // ```
+ // could lead to HTML docs being copied into the JSON docs output directory.
+ // To avoid this issue, we clean the doc folder before invoking `cargo`.
+ if out_dir.exists() {
+ builder.remove_dir(&out_dir);
+ }
+
+ let run_cargo_rustdoc_for = |package: &str| {
+ let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
+ compile::std_cargo(builder, target, compiler.stage, &mut cargo);
+ cargo
+ .arg("-p")
+ .arg(package)
+ .arg("-Zskip-rustdoc-fingerprint")
+ .arg("--")
+ .arg("-Z")
+ .arg("unstable-options")
+ .arg("--resource-suffix")
+ .arg(&builder.version)
+ .args(extra_args);
+ builder.run(&mut cargo.into());
+ };
+
+ for krate in STD_PUBLIC_CRATES {
+ run_cargo_rustdoc_for(krate);
+ if requested_crates.iter().any(|p| p == krate) {
+ // No need to document more of the libraries if we have the one we want.
+ break;
+ }
+ }
+
+ builder.cp_r(&out_dir, &out);
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
pub stage: u32,
pub target: TargetSelection,
@@ -793,7 +912,7 @@ impl Step for ErrorIndex {
t!(fs::create_dir_all(&out));
let mut index = tool::ErrorIndex::command(builder);
index.arg("html");
- index.arg(out.join("error-index.html"));
+ index.arg(out);
index.arg(&builder.version);
builder.run(&mut index);
diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp
index 19504a51a..d19a1ae95 100644
--- a/src/bootstrap/download-ci-llvm-stamp
+++ b/src/bootstrap/download-ci-llvm-stamp
@@ -1,4 +1,4 @@
Change this file to make users of the `download-ci-llvm` configuration download
a new version of LLVM from CI, even if the LLVM submodule hasn’t changed.
-Last change is for: https://github.com/rust-lang/rust/pull/96867
+Last change is for: https://github.com/rust-lang/rust/pull/102790
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 80b3bcce8..ee341a353 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -78,8 +78,11 @@ pub struct Flags {
//
// llvm_out/build/profiles/ is the location this writes to.
pub llvm_profile_generate: bool,
+ pub llvm_bolt_profile_generate: bool,
+ pub llvm_bolt_profile_use: Option<String>,
}
+#[derive(Debug)]
#[cfg_attr(test, derive(Clone))]
pub enum Subcommand {
Build {
@@ -106,6 +109,7 @@ pub enum Subcommand {
Doc {
paths: Vec<PathBuf>,
open: bool,
+ json: bool,
},
Test {
paths: Vec<PathBuf>,
@@ -115,7 +119,6 @@ pub enum Subcommand {
compare_mode: Option<String>,
pass: Option<String>,
run: Option<String>,
- skip: Vec<String>,
test_args: Vec<String>,
rustc_args: Vec<String>,
fail_fast: bool,
@@ -220,7 +223,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
let j_msg = format!(
"number of jobs to run in parallel; \
defaults to {} (this host's logical CPU count)",
- num_cpus::get()
+ std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get)
);
opts.optopt("j", "jobs", &j_msg, "JOBS");
opts.optflag("h", "help", "print this help message");
@@ -254,6 +257,8 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
opts.optmulti("D", "", "deny certain clippy lints", "OPT");
opts.optmulti("W", "", "warn about certain clippy lints", "OPT");
opts.optmulti("F", "", "forbid certain clippy lints", "OPT");
+ opts.optflag("", "llvm-bolt-profile-generate", "generate BOLT profile for LLVM build");
+ opts.optopt("", "llvm-bolt-profile-use", "use BOLT profile for LLVM build", "PROFILE");
// We can't use getopt to parse the options until we have completed specifying which
// options are valid, but under the current implementation, some options are conditional on
@@ -325,6 +330,11 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
}
Kind::Doc => {
opts.optflag("", "open", "open the docs in a browser");
+ opts.optflag(
+ "",
+ "json",
+ "render the documentation in JSON format in addition to the usual HTML format",
+ );
}
Kind::Clean => {
opts.optflag("", "all", "clean all build artifacts");
@@ -493,6 +503,7 @@ Arguments:
./x.py doc src/doc/book
./x.py doc src/doc/nomicon
./x.py doc src/doc/book library/std
+ ./x.py doc library/std --json
./x.py doc library/std --open
If no arguments are passed then everything is documented:
@@ -568,7 +579,6 @@ Arguments:
compare_mode: matches.opt_str("compare-mode"),
pass: matches.opt_str("pass"),
run: matches.opt_str("run"),
- skip: matches.opt_strs("skip"),
test_args: matches.opt_strs("test-args"),
rustc_args: matches.opt_strs("rustc-args"),
fail_fast: !matches.opt_present("no-fail-fast"),
@@ -582,7 +592,11 @@ Arguments:
},
},
Kind::Bench => Subcommand::Bench { paths, test_args: matches.opt_strs("test-args") },
- Kind::Doc => Subcommand::Doc { paths, open: matches.opt_present("open") },
+ Kind::Doc => Subcommand::Doc {
+ paths,
+ open: matches.opt_present("open"),
+ json: matches.opt_present("json"),
+ },
Kind::Clean => {
if !paths.is_empty() {
println!("\nclean does not take a path argument\n");
@@ -681,6 +695,8 @@ Arguments:
rust_profile_generate: matches.opt_str("rust-profile-generate"),
llvm_profile_use: matches.opt_str("llvm-profile-use"),
llvm_profile_generate: matches.opt_present("llvm-profile-generate"),
+ llvm_bolt_profile_generate: matches.opt_present("llvm-bolt-profile-generate"),
+ llvm_bolt_profile_use: matches.opt_str("llvm-bolt-profile-use"),
}
}
}
@@ -708,16 +724,6 @@ impl Subcommand {
let mut args = vec![];
match *self {
- Subcommand::Test { ref skip, .. } => {
- for s in skip {
- args.push("--skip");
- args.push(s.as_str());
- }
- }
- _ => (),
- };
-
- match *self {
Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => {
args.extend(test_args.iter().flat_map(|s| s.split_whitespace()))
}
@@ -798,6 +804,13 @@ impl Subcommand {
_ => false,
}
}
+
+ pub fn json(&self) -> bool {
+ match *self {
+ Subcommand::Doc { json, .. } => json,
+ _ => false,
+ }
+ }
}
fn split(s: &[String]) -> Vec<String> {
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index 6e49f39ff..7672b7c91 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -182,15 +182,6 @@ install!((self, builder, _config),
.expect("missing cargo");
install_sh(builder, "cargo", self.compiler.stage, Some(self.target), &tarball);
};
- Rls, alias = "rls", Self::should_build(_config), only_hosts: true, {
- if let Some(tarball) = builder.ensure(dist::Rls { compiler: self.compiler, target: self.target }) {
- install_sh(builder, "rls", self.compiler.stage, Some(self.target), &tarball);
- } else {
- builder.info(
- &format!("skipping Install RLS stage{} ({})", self.compiler.stage, self.target),
- );
- }
- };
RustAnalyzer, alias = "rust-analyzer", Self::should_build(_config), only_hosts: true, {
if let Some(tarball) =
builder.ensure(dist::RustAnalyzer { compiler: self.compiler, target: self.target })
@@ -209,13 +200,10 @@ install!((self, builder, _config),
install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball);
};
Miri, alias = "miri", Self::should_build(_config), only_hosts: true, {
- if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) {
- install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
- } else {
- builder.info(
- &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target),
- );
- }
+ let tarball = builder
+ .ensure(dist::Miri { compiler: self.compiler, target: self.target })
+ .expect("missing miri");
+ install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
};
Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, {
if let Some(tarball) = builder.ensure(dist::Rustfmt {
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index d265277b4..7e70e99bb 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -112,6 +112,7 @@ use std::path::{Path, PathBuf};
use std::process::Command;
use std::str;
+use config::Target;
use filetime::FileTime;
use once_cell::sync::OnceCell;
@@ -121,6 +122,7 @@ use crate::util::{
check_run, exe, libdir, mtime, output, run, run_suppressed, try_run, try_run_suppressed, CiEnv,
};
+mod bolt;
mod builder;
mod cache;
mod cc_detect;
@@ -186,6 +188,9 @@ const LLVM_TOOLS: &[&str] = &[
"opt", // used to optimize LLVM bytecode
];
+/// LLD file names for all flavors.
+const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
+
pub const VERSION: usize = 2;
/// Extra --check-cfg to add when building
@@ -194,9 +199,12 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
(None, "bootstrap", None),
(Some(Mode::Rustc), "parallel_compiler", None),
(Some(Mode::ToolRustc), "parallel_compiler", None),
+ (Some(Mode::Codegen), "parallel_compiler", None),
(Some(Mode::Std), "stdarch_intel_sde", None),
(Some(Mode::Std), "no_fp_fmt_parse", None),
(Some(Mode::Std), "no_global_oom_handling", None),
+ (Some(Mode::Std), "no_rc", None),
+ (Some(Mode::Std), "no_sync", None),
(Some(Mode::Std), "freebsd12", None),
(Some(Mode::Std), "backtrace_in_libstd", None),
/* Extra values not defined in the built-in targets yet, but used in std */
@@ -222,6 +230,8 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
// FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
(Some(Mode::Rustc), "span_locations", None),
(Some(Mode::ToolRustc), "span_locations", None),
+ // Can be passed in RUSTFLAGS to prevent direct syscalls in rustix.
+ (None, "rustix_use_libc", None),
];
/// A structure representing a Rust compiler.
@@ -273,7 +283,6 @@ pub struct Build {
bootstrap_out: PathBuf,
rust_info: channel::GitInfo,
cargo_info: channel::GitInfo,
- rls_info: channel::GitInfo,
rust_analyzer_info: channel::GitInfo,
clippy_info: channel::GitInfo,
miri_info: channel::GitInfo,
@@ -392,7 +401,7 @@ impl Build {
/// line and the filesystem `config`.
///
/// By default all build output will be placed in the current directory.
- pub fn new(config: Config) -> Build {
+ pub fn new(mut config: Config) -> Build {
let src = config.src.clone();
let out = config.out.clone();
@@ -412,7 +421,6 @@ impl Build {
let ignore_git = config.ignore_git;
let rust_info = channel::GitInfo::new(ignore_git, &src);
let cargo_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/cargo"));
- let rls_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/rls"));
let rust_analyzer_info =
channel::GitInfo::new(ignore_git, &src.join("src/tools/rust-analyzer"));
let clippy_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/clippy"));
@@ -454,19 +462,22 @@ impl Build {
.expect("failed to read src/version");
let version = version.trim();
- let bootstrap_out = if std::env::var("BOOTSTRAP_PYTHON").is_ok() {
- out.join("bootstrap").join("debug")
- } else {
- let workspace_target_dir = std::env::var("CARGO_TARGET_DIR")
- .map(PathBuf::from)
- .unwrap_or_else(|_| src.join("target"));
- let bootstrap_out = workspace_target_dir.join("debug");
- if !bootstrap_out.join("rustc").exists() && !cfg!(test) {
- // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
- panic!("run `cargo build --bins` before `cargo run`")
- }
- bootstrap_out
- };
+ let bootstrap_out = std::env::current_exe()
+ .expect("could not determine path to running process")
+ .parent()
+ .unwrap()
+ .to_path_buf();
+ if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) {
+ // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
+ panic!(
+ "`rustc` not found in {}, run `cargo build --bins` before `cargo run`",
+ bootstrap_out.display()
+ )
+ }
+
+ if rust_info.is_from_tarball() && config.description.is_none() {
+ config.description = Some("built from a source tarball".to_owned());
+ }
let mut build = Build {
initial_rustc: config.initial_rustc.clone(),
@@ -490,7 +501,6 @@ impl Build {
rust_info,
cargo_info,
- rls_info,
rust_analyzer_info,
clippy_info,
miri_info,
@@ -539,14 +549,8 @@ impl Build {
// Make sure we update these before gathering metadata so we don't get an error about missing
// Cargo.toml files.
- let rust_submodules = [
- "src/tools/rust-installer",
- "src/tools/cargo",
- "src/tools/rls",
- "src/tools/miri",
- "library/backtrace",
- "library/stdarch",
- ];
+ let rust_submodules =
+ ["src/tools/rust-installer", "src/tools/cargo", "library/backtrace", "library/stdarch"];
for s in rust_submodules {
build.update_submodule(Path::new(s));
}
@@ -574,7 +578,9 @@ impl Build {
// NOTE: The check for the empty directory is here because when running x.py the first time,
// the submodule won't be checked out. Check it out now so we can build it.
- if !channel::GitInfo::new(false, &absolute_path).is_git() && !dir_is_empty(&absolute_path) {
+ if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
+ && !dir_is_empty(&absolute_path)
+ {
return;
}
@@ -645,7 +651,7 @@ impl Build {
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
// Don't update the submodule unless it's already been cloned.
- if channel::GitInfo::new(false, submodule).is_git() {
+ if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() {
self.update_submodule(submodule);
}
}
@@ -671,6 +677,9 @@ impl Build {
return setup::setup(&self.config, *profile);
}
+ // Download rustfmt early so that it can be used in rust-analyzer configs.
+ let _ = &builder::Builder::new(&self).initial_rustfmt();
+
{
let builder = builder::Builder::new(&self);
if let Some(path) = builder.paths.get(0) {
@@ -825,6 +834,11 @@ impl Build {
self.out.join(&*target.triple).join("doc")
}
+ /// Output directory for all JSON-formatted documentation for a target
+ fn json_doc_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("json-doc")
+ }
+
fn test_out(&self, target: TargetSelection) -> PathBuf {
self.out.join(&*target.triple).join("test")
}
@@ -843,12 +857,13 @@ impl Build {
///
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
fn is_rust_llvm(&self, target: TargetSelection) -> bool {
- if self.config.llvm_from_ci && target == self.config.build {
- return true;
- }
-
match self.config.target_config.get(&target) {
- Some(ref c) => c.llvm_config.is_none(),
+ Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched,
+ Some(Target { llvm_config, .. }) => {
+ // If the user set llvm-config we assume Rust is not patched,
+ // but first check to see if it was configured by llvm-from-ci.
+ (self.config.llvm_from_ci && target == self.config.build) || llvm_config.is_none()
+ }
None => true,
}
}
@@ -1013,7 +1028,9 @@ impl Build {
/// Returns the number of parallel jobs that have been configured for this
/// build.
fn jobs(&self) -> u32 {
- self.config.jobs.unwrap_or_else(|| num_cpus::get() as u32)
+ self.config.jobs.unwrap_or_else(|| {
+ std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32
+ })
}
fn debuginfo_map_to(&self, which: GitRepo) -> Option<String> {
@@ -1250,7 +1267,7 @@ impl Build {
match &self.config.channel[..] {
"stable" => num.to_string(),
"beta" => {
- if self.rust_info.is_git() && !self.config.ignore_git {
+ if self.rust_info.is_managed_git_subrepository() && !self.config.ignore_git {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
@@ -1304,10 +1321,6 @@ impl Build {
self.package_vers(&self.version)
}
- fn llvm_link_tools_dynamically(&self, target: TargetSelection) -> bool {
- target.contains("linux-gnu") || target.contains("apple-darwin")
- }
-
/// Returns the `version` string associated with this compiler for Rust
/// itself.
///
@@ -1636,14 +1649,12 @@ fn chmod(_path: &Path, _perms: u32) {}
/// If code is not 0 (successful exit status), exit status is 101 (rust's default error code.)
/// If the test is running and code is an error code, it will cause a panic.
fn detail_exit(code: i32) -> ! {
- // Successful exit
- if code == 0 {
- std::process::exit(0);
- }
- if cfg!(test) {
+ // if in test and code is an error code, panic with status code provided
+ if cfg!(test) && code != 0 {
panic!("status code: {}", code);
} else {
- std::panic::resume_unwind(Box::new(code));
+ //otherwise,exit with provided status code
+ std::process::exit(code);
}
}
diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in
index 5a1f2e704..9a08a7be0 100644
--- a/src/bootstrap/mk/Makefile.in
+++ b/src/bootstrap/mk/Makefile.in
@@ -66,16 +66,21 @@ TESTS_IN_2 := \
src/test/ui \
src/tools/linkchecker
+## MSVC native builders
+
+# these intentionally don't use `$(BOOTSTRAP)` so we can test the shebang on Windows
ci-subset-1:
- $(Q)$(BOOTSTRAP) test --stage 2 $(TESTS_IN_2:%=--exclude %)
+ $(Q)$(CFG_SRC_DIR)/x.py test --stage 2 $(TESTS_IN_2:%=--exclude %)
ci-subset-2:
- $(Q)$(BOOTSTRAP) test --stage 2 $(TESTS_IN_2)
+ $(Q)$(CFG_SRC_DIR)/x.ps1 test --stage 2 $(TESTS_IN_2)
+
+## MingW native builders
TESTS_IN_MINGW_2 := \
src/test/ui
ci-mingw-subset-1:
- $(Q)$(BOOTSTRAP) test --stage 2 $(TESTS_IN_MINGW_2:%=--exclude %)
+ $(Q)$(CFG_SRC_DIR)/x test --stage 2 $(TESTS_IN_MINGW_2:%=--exclude %)
ci-mingw-subset-2:
$(Q)$(BOOTSTRAP) test --stage 2 $(TESTS_IN_MINGW_2)
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index 4d548dbb6..2f856c276 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -16,7 +16,9 @@ use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
+use crate::bolt::{instrument_with_bolt_inplace, optimize_library_with_bolt_inplace};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
+use crate::channel;
use crate::config::TargetSelection;
use crate::util::get_clang_cl_resource_dir;
use crate::util::{self, exe, output, program_out_of_date, t, up_to_date};
@@ -114,28 +116,30 @@ pub fn prebuilt_llvm_config(
Err(Meta { stamp, build_llvm_config, out_dir, root: root.into() })
}
-pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
- let config = &builder.config;
- if !config.llvm_from_ci {
- return;
- }
- let mut rev_list = config.git();
- rev_list.args(&[
- PathBuf::from("rev-list"),
- format!("--author={}", builder.config.stage0_metadata.config.git_merge_commit_email).into(),
- "-n1".into(),
- "--first-parent".into(),
- "HEAD".into(),
- "--".into(),
- builder.src.join("src/llvm-project"),
- builder.src.join("src/bootstrap/download-ci-llvm-stamp"),
- // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly`
- builder.src.join("src/version"),
- ]);
- let llvm_sha = output(&mut rev_list);
- let llvm_sha = llvm_sha.trim();
-
- if llvm_sha == "" {
+/// This retrieves the LLVM sha we *want* to use, according to git history.
+pub(crate) fn detect_llvm_sha(config: &crate::config::Config, is_git: bool) -> String {
+ let llvm_sha = if is_git {
+ let mut rev_list = config.git();
+ rev_list.args(&[
+ PathBuf::from("rev-list"),
+ format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(),
+ "-n1".into(),
+ "--first-parent".into(),
+ "HEAD".into(),
+ "--".into(),
+ config.src.join("src/llvm-project"),
+ config.src.join("src/bootstrap/download-ci-llvm-stamp"),
+ // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly`
+ config.src.join("src/version"),
+ ]);
+ output(&mut rev_list).trim().to_owned()
+ } else if let Some(info) = channel::read_commit_info_file(&config.src) {
+ info.sha.trim().to_owned()
+ } else {
+ "".to_owned()
+ };
+
+ if &llvm_sha == "" {
eprintln!("error: could not find commit hash for downloading LLVM");
eprintln!("help: maybe your repository history is too shallow?");
eprintln!("help: consider disabling `download-ci-llvm`");
@@ -143,8 +147,84 @@ pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
panic!();
}
+ llvm_sha
+}
+
+/// Returns whether the CI-found LLVM is currently usable.
+///
+/// This checks both the build triple platform to confirm we're usable at all,
+/// and then verifies if the current HEAD matches the detected LLVM SHA head,
+/// in which case LLVM is indicated as not available.
+pub(crate) fn is_ci_llvm_available(config: &crate::config::Config, asserts: bool) -> bool {
+ // This is currently all tier 1 targets and tier 2 targets with host tools
+ // (since others may not have CI artifacts)
+ // https://doc.rust-lang.org/rustc/platform-support.html#tier-1
+ let supported_platforms = [
+ // tier 1
+ "aarch64-unknown-linux-gnu",
+ "i686-pc-windows-gnu",
+ "i686-pc-windows-msvc",
+ "i686-unknown-linux-gnu",
+ "x86_64-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-pc-windows-gnu",
+ "x86_64-pc-windows-msvc",
+ // tier 2 with host tools
+ "aarch64-apple-darwin",
+ "aarch64-pc-windows-msvc",
+ "aarch64-unknown-linux-musl",
+ "arm-unknown-linux-gnueabi",
+ "arm-unknown-linux-gnueabihf",
+ "armv7-unknown-linux-gnueabihf",
+ "mips-unknown-linux-gnu",
+ "mips64-unknown-linux-gnuabi64",
+ "mips64el-unknown-linux-gnuabi64",
+ "mipsel-unknown-linux-gnu",
+ "powerpc-unknown-linux-gnu",
+ "powerpc64-unknown-linux-gnu",
+ "powerpc64le-unknown-linux-gnu",
+ "riscv64gc-unknown-linux-gnu",
+ "s390x-unknown-linux-gnu",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-illumos",
+ "x86_64-unknown-linux-musl",
+ "x86_64-unknown-netbsd",
+ ];
+ if !supported_platforms.contains(&&*config.build.triple) {
+ return false;
+ }
+
+ let triple = &*config.build.triple;
+ if (triple == "aarch64-unknown-linux-gnu" || triple.contains("i686")) && asserts {
+ // No alt builder for aarch64-unknown-linux-gnu today.
+ return false;
+ }
+
+ if crate::util::CiEnv::is_ci() {
+ // We assume we have access to git, so it's okay to unconditionally pass
+ // `true` here.
+ let llvm_sha = detect_llvm_sha(config, true);
+ let head_sha = output(config.git().arg("rev-parse").arg("HEAD"));
+ let head_sha = head_sha.trim();
+ if llvm_sha == head_sha {
+ eprintln!(
+ "Detected LLVM as non-available: running in CI and modified LLVM in this change"
+ );
+ return false;
+ }
+ }
+
+ true
+}
+
+pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
+ let config = &builder.config;
+ if !config.llvm_from_ci {
+ return;
+ }
let llvm_root = config.ci_llvm_root();
let llvm_stamp = llvm_root.join(".llvm-stamp");
+ let llvm_sha = detect_llvm_sha(&config, builder.rust_info.is_managed_git_subrepository());
let key = format!("{}{}", llvm_sha, config.llvm_assertions);
if program_out_of_date(&llvm_stamp, &key) && !config.dry_run {
download_ci_llvm(builder, &llvm_sha);
@@ -189,7 +269,7 @@ fn download_ci_llvm(builder: &Builder<'_>, llvm_sha: &str) {
} else {
&builder.config.stage0_metadata.config.artifacts_server
};
- let channel = builder.config.artifact_channel(llvm_sha);
+ let channel = builder.config.artifact_channel(builder, llvm_sha);
let filename = format!("rust-dev-{}-{}.tar.xz", channel, builder.build.build.triple);
let tarball = rustc_cache.join(&filename);
if !tarball.exists() {
@@ -324,6 +404,15 @@ impl Step for Llvm {
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
cfg.define("LLVM_PROFDATA_FILE", &path);
}
+ if builder.config.llvm_bolt_profile_generate
+ || builder.config.llvm_bolt_profile_use.is_some()
+ {
+ // Relocations are required for BOLT to work.
+ ldflags.push_all("-Wl,-q");
+ }
+
+ // Disable zstd to avoid a dependency on libzstd.so.
+ cfg.define("LLVM_ENABLE_ZSTD", "OFF");
if target != "aarch64-apple-darwin" && !target.contains("windows") {
cfg.define("LLVM_ENABLE_ZLIB", "ON");
@@ -349,21 +438,17 @@ impl Step for Llvm {
// which saves both memory during parallel links and overall disk space
// for the tools. We don't do this on every platform as it doesn't work
// equally well everywhere.
- //
- // If we're not linking rustc to a dynamic LLVM, though, then don't link
- // tools to it.
- let llvm_link_shared =
- builder.llvm_link_tools_dynamically(target) && builder.llvm_link_shared();
- if llvm_link_shared {
+ if builder.llvm_link_shared() {
cfg.define("LLVM_LINK_LLVM_DYLIB", "ON");
}
- if target.starts_with("riscv") && !target.contains("freebsd") {
+ if target.starts_with("riscv") && !target.contains("freebsd") && !target.contains("openbsd")
+ {
// RISC-V GCC erroneously requires linking against
// `libatomic` when using 1-byte and 2-byte C++
// atomics but the LLVM build system check cannot
// detect this. Therefore it is set manually here.
- // FreeBSD uses Clang as its system compiler and
+ // Some BSD uses Clang as its system compiler and
// provides no libatomic in its base system so does
// not want this.
ldflags.exe.push(" -latomic");
@@ -419,18 +504,18 @@ impl Step for Llvm {
// https://llvm.org/docs/HowToCrossCompileLLVM.html
if target != builder.config.build {
- builder.ensure(Llvm { target: builder.config.build });
- // FIXME: if the llvm root for the build triple is overridden then we
- // should use llvm-tblgen from there, also should verify that it
- // actually exists most of the time in normal installs of LLVM.
- let host_bin = builder.llvm_out(builder.config.build).join("bin");
- cfg.define("LLVM_TABLEGEN", host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION));
- // LLVM_NM is required for cross compiling using MSVC
- cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION));
- cfg.define(
- "LLVM_CONFIG_PATH",
- host_bin.join("llvm-config").with_extension(EXE_EXTENSION),
- );
+ let llvm_config = builder.ensure(Llvm { target: builder.config.build });
+ if !builder.config.dry_run {
+ let llvm_bindir = output(Command::new(&llvm_config).arg("--bindir"));
+ let host_bin = Path::new(llvm_bindir.trim());
+ cfg.define(
+ "LLVM_TABLEGEN",
+ host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION),
+ );
+ // LLVM_NM is required for cross compiling using MSVC
+ cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION));
+ }
+ cfg.define("LLVM_CONFIG_PATH", llvm_config);
if builder.config.llvm_clang {
let build_bin = builder.llvm_out(builder.config.build).join("build").join("bin");
let clang_tblgen = build_bin.join("clang-tblgen").with_extension(EXE_EXTENSION);
@@ -478,7 +563,7 @@ impl Step for Llvm {
// libLLVM.dylib will be built. However, llvm-config will still look
// for a versioned path like libLLVM-14.dylib. Manually create a symbolic
// link to make llvm-config happy.
- if llvm_link_shared && target.contains("apple-darwin") {
+ if builder.llvm_link_shared() && target.contains("apple-darwin") {
let mut cmd = Command::new(&build_llvm_config);
let version = output(cmd.arg("--version"));
let major = version.split('.').next().unwrap();
@@ -493,12 +578,34 @@ impl Step for Llvm {
}
}
+ // After LLVM is built, we modify (instrument or optimize) the libLLVM.so library file
+ // in place. This is fine, because currently we do not support incrementally rebuilding
+ // LLVM after a configuration change, so to rebuild it the build files have to be removed,
+ // which will also remove these modified files.
+ if builder.config.llvm_bolt_profile_generate {
+ instrument_with_bolt_inplace(&get_built_llvm_lib_path(&build_llvm_config));
+ }
+ if let Some(path) = &builder.config.llvm_bolt_profile_use {
+ optimize_library_with_bolt_inplace(
+ &get_built_llvm_lib_path(&build_llvm_config),
+ &Path::new(path),
+ );
+ }
+
t!(stamp.write());
build_llvm_config
}
}
+/// Returns path to a built LLVM library (libLLVM.so).
+/// Assumes that we have built LLVM into a single library file.
+fn get_built_llvm_lib_path(llvm_config_path: &Path) -> PathBuf {
+ let mut cmd = Command::new(llvm_config_path);
+ cmd.arg("--libfiles");
+ PathBuf::from(output(&mut cmd).trim())
+}
+
fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
if !builder.config.llvm_version_check {
return;
@@ -512,11 +619,11 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
let version = output(cmd.arg("--version"));
let mut parts = version.split('.').take(2).filter_map(|s| s.parse::<u32>().ok());
if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) {
- if major >= 12 {
+ if major >= 13 {
return;
}
}
- panic!("\n\nbad LLVM version: {}, need >=12.0\n\n", version)
+ panic!("\n\nbad LLVM version: {}, need >=13.0\n\n", version)
}
fn configure_cmake(
@@ -563,7 +670,7 @@ fn configure_cmake(
if target.contains("darwin") {
// Make sure that CMake does not build universal binaries on macOS.
- // Explicitly specifiy the one single target architecture.
+ // Explicitly specify the one single target architecture.
if target.starts_with("aarch64") {
// macOS uses a different name for building arm64
cfg.define("CMAKE_OSX_ARCHITECTURES", "arm64");
diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs
index 25abe7a72..511872903 100644
--- a/src/bootstrap/run.rs
+++ b/src/bootstrap/run.rs
@@ -103,3 +103,25 @@ impl Step for BumpStage0 {
builder.run(&mut cmd);
}
}
+
+#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct ReplaceVersionPlaceholder;
+
+impl Step for ReplaceVersionPlaceholder {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/replace-version-placeholder")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(ReplaceVersionPlaceholder);
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ let mut cmd = builder.tool_cmd(Tool::ReplaceVersionPlaceholder);
+ cmd.arg(&builder.src);
+ builder.run(&mut cmd);
+ }
+}
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index cae41286f..e90551725 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -74,7 +74,7 @@ pub fn check(build: &mut Build) {
let mut cmd_finder = Finder::new();
// If we've got a git directory we're gonna need git to update
// submodules and learn about various other aspects.
- if build.rust_info.is_git() {
+ if build.rust_info.is_managed_git_subrepository() {
cmd_finder.must_have("git");
}
diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs
index a5a39a5a3..eb7da1bda 100644
--- a/src/bootstrap/setup.rs
+++ b/src/bootstrap/setup.rs
@@ -11,7 +11,7 @@ use std::{
io::{self, Write},
};
-#[derive(Clone, Copy, Eq, PartialEq)]
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Profile {
Compiler,
Codegen,
diff --git a/src/bootstrap/tarball.rs b/src/bootstrap/tarball.rs
index 7b0c029c1..d999b6c15 100644
--- a/src/bootstrap/tarball.rs
+++ b/src/bootstrap/tarball.rs
@@ -4,6 +4,7 @@ use std::{
};
use crate::builder::Builder;
+use crate::channel;
use crate::util::t;
#[derive(Copy, Clone)]
@@ -50,11 +51,7 @@ impl OverlayKind {
OverlayKind::RustDemangler => {
&["src/tools/rust-demangler/README.md", "LICENSE-APACHE", "LICENSE-MIT"]
}
- OverlayKind::RLS => &[
- "src/tools/rls/README.md",
- "src/tools/rls/LICENSE-APACHE",
- "src/tools/rls/LICENSE-MIT",
- ],
+ OverlayKind::RLS => &["src/tools/rls/README.md", "LICENSE-APACHE", "LICENSE-MIT"],
OverlayKind::RustAnalyzer => &[
"src/tools/rust-analyzer/README.md",
"src/tools/rust-analyzer/LICENSE-APACHE",
@@ -78,7 +75,7 @@ impl OverlayKind {
OverlayKind::Rustfmt => {
builder.rustfmt_info.version(builder, &builder.release_num("rustfmt"))
}
- OverlayKind::RLS => builder.rls_info.version(builder, &builder.release_num("rls")),
+ OverlayKind::RLS => builder.release(&builder.release_num("rls")),
OverlayKind::RustAnalyzer => builder
.rust_analyzer_info
.version(builder, &builder.release_num("rust-analyzer/crates/rust-analyzer")),
@@ -301,8 +298,9 @@ impl<'a> Tarball<'a> {
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball {
t!(std::fs::create_dir_all(&self.overlay_dir));
self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder));
- if let Some(sha) = self.builder.rust_sha() {
- self.builder.create(&self.overlay_dir.join("git-commit-hash"), &sha);
+ if let Some(info) = self.builder.rust_info.info() {
+ channel::write_commit_hash_file(&self.overlay_dir, &info.sha);
+ channel::write_commit_info_file(&self.overlay_dir, info);
}
for file in self.overlay.legal_and_readme() {
self.builder.install(&self.builder.src.join(file), &self.overlay_dir, 0o644);
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index c0fa8c9ac..791c35c36 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -23,7 +23,7 @@ use crate::toolstate::ToolState;
use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t};
use crate::{envify, CLang, DocTests, GitRepo, Mode};
-const ADB_TEST_DIR: &str = "/data/tmp/work";
+const ADB_TEST_DIR: &str = "/data/local/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
@@ -300,57 +300,6 @@ impl Step for Cargo {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rls {
- stage: u32,
- host: TargetSelection,
-}
-
-impl Step for Rls {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.path("src/tools/rls")
- }
-
- fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Rls { stage: run.builder.top_stage, host: run.target });
- }
-
- /// Runs `cargo test` for the rls.
- fn run(self, builder: &Builder<'_>) {
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- let build_result =
- builder.ensure(tool::Rls { compiler, target: self.host, extra_features: Vec::new() });
- if build_result.is_none() {
- eprintln!("failed to test rls: could not build");
- return;
- }
-
- let mut cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolRustc,
- host,
- "test",
- "src/tools/rls",
- SourceType::Submodule,
- &[],
- );
-
- cargo.add_rustc_lib_path(builder, compiler);
- cargo.arg("--").args(builder.config.cmd.test_args());
-
- if try_run(builder, &mut cargo.into()) {
- builder.save_toolstate("rls", ToolState::TestPass);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct RustAnalyzer {
stage: u32,
host: TargetSelection,
@@ -512,135 +461,158 @@ impl Step for RustDemangler {
pub struct Miri {
stage: u32,
host: TargetSelection,
+ target: TargetSelection,
}
impl Step for Miri {
type Output = ();
- const ONLY_HOSTS: bool = true;
+ const ONLY_HOSTS: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/miri")
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Miri { stage: run.builder.top_stage, host: run.target });
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.build_triple(),
+ target: run.target,
+ });
}
/// Runs `cargo test` for miri.
fn run(self, builder: &Builder<'_>) {
let stage = self.stage;
let host = self.host;
+ let target = self.target;
let compiler = builder.compiler(stage, host);
// We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri.
// Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage.
let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host);
- let miri =
- builder.ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() });
- let cargo_miri = builder.ensure(tool::CargoMiri {
- compiler,
- target: self.host,
- extra_features: Vec::new(),
- });
+ let miri = builder
+ .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
+ let _cargo_miri = builder
+ .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() })
+ .expect("in-tree tool");
// The stdlib we need might be at a different stage. And just asking for the
// sysroot does not seem to populate it, so we do that first.
builder.ensure(compile::Std::new(compiler_std, host));
let sysroot = builder.sysroot(compiler_std);
- if let (Some(miri), Some(_cargo_miri)) = (miri, cargo_miri) {
- let mut cargo =
- builder.cargo(compiler, Mode::ToolRustc, SourceType::Submodule, host, "install");
- cargo.arg("xargo");
- // Configure `cargo install` path. cargo adds a `bin/`.
- cargo.env("CARGO_INSTALL_ROOT", &builder.out);
-
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
- // # Run `cargo miri setup`.
- let mut cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolRustc,
- host,
- "run",
- "src/tools/miri/cargo-miri",
- SourceType::Submodule,
- &[],
- );
- cargo.add_rustc_lib_path(builder, compiler);
- cargo.arg("--").arg("miri").arg("setup");
-
- // Tell `cargo miri setup` where to find the sources.
- cargo.env("XARGO_RUST_SRC", builder.src.join("library"));
- // Tell it where to find Miri.
- cargo.env("MIRI", &miri);
- // Debug things.
- cargo.env("RUST_BACKTRACE", "1");
- // Let cargo-miri know where xargo ended up.
- cargo.env("XARGO_CHECK", builder.out.join("bin").join("xargo-check"));
-
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
+ // # Run `cargo miri setup` for the given target.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "run",
+ "src/tools/miri/cargo-miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
+ cargo.arg("--").arg("miri").arg("setup");
+ cargo.arg("--target").arg(target.rustc_target_arg());
+
+ // Tell `cargo miri setup` where to find the sources.
+ cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
+ // Tell it where to find Miri.
+ cargo.env("MIRI", &miri);
+ // Debug things.
+ cargo.env("RUST_BACKTRACE", "1");
+
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
+
+ // # Determine where Miri put its sysroot.
+ // To this end, we run `cargo miri setup --print-sysroot` and capture the output.
+ // (We do this separately from the above so that when the setup actually
+ // happens we get some output.)
+ // We re-use the `cargo` from above.
+ cargo.arg("--print-sysroot");
+
+ // FIXME: Is there a way in which we can re-use the usual `run` helpers?
+ let miri_sysroot = if builder.config.dry_run {
+ String::new()
+ } else {
+ builder.verbose(&format!("running: {:?}", cargo));
+ let out =
+ cargo.output().expect("We already ran `cargo miri setup` before and that worked");
+ assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
+ // Output is "<sysroot>\n".
+ let stdout = String::from_utf8(out.stdout)
+ .expect("`cargo miri setup` stdout is not valid UTF-8");
+ let sysroot = stdout.trim_end();
+ builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
+ sysroot.to_owned()
+ };
- // # Determine where Miri put its sysroot.
- // To this end, we run `cargo miri setup --print-sysroot` and capture the output.
- // (We do this separately from the above so that when the setup actually
- // happens we get some output.)
- // We re-use the `cargo` from above.
- cargo.arg("--print-sysroot");
-
- // FIXME: Is there a way in which we can re-use the usual `run` helpers?
- let miri_sysroot = if builder.config.dry_run {
- String::new()
- } else {
- builder.verbose(&format!("running: {:?}", cargo));
- let out = cargo
- .output()
- .expect("We already ran `cargo miri setup` before and that worked");
- assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
- // Output is "<sysroot>\n".
- let stdout = String::from_utf8(out.stdout)
- .expect("`cargo miri setup` stdout is not valid UTF-8");
- let sysroot = stdout.trim_end();
- builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
- sysroot.to_owned()
- };
-
- // # Run `cargo test`.
- let mut cargo = tool::prepare_tool_cargo(
- builder,
- compiler,
- Mode::ToolRustc,
- host,
- "test",
- "src/tools/miri",
- SourceType::Submodule,
- &[],
- );
- cargo.add_rustc_lib_path(builder, compiler);
+ // # Run `cargo test`.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "test",
+ "src/tools/miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
- // miri tests need to know about the stage sysroot
- cargo.env("MIRI_SYSROOT", miri_sysroot);
- cargo.env("MIRI_HOST_SYSROOT", sysroot);
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- cargo.env("MIRI", miri);
+ // miri tests need to know about the stage sysroot
+ cargo.env("MIRI_SYSROOT", &miri_sysroot);
+ cargo.env("MIRI_HOST_SYSROOT", sysroot);
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI", &miri);
+ // propagate --bless
+ if builder.config.cmd.bless() {
+ cargo.env("MIRI_BLESS", "Gesundheit");
+ }
- cargo.arg("--").args(builder.config.cmd.test_args());
+ // Set the target.
+ cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg());
+ // Forward test filters.
+ cargo.arg("--").args(builder.config.cmd.test_args());
- let mut cargo = Command::from(cargo);
- if !try_run(builder, &mut cargo) {
- return;
- }
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
- // # Done!
- builder.save_toolstate("miri", ToolState::TestPass);
- } else {
- eprintln!("failed to test miri: could not build");
- }
+ // # Run `cargo miri test`.
+ // This is just a smoke test (Miri's own CI invokes this in a bunch of different ways and ensures
+ // that we get the desired output), but that is sufficient to make sure that the libtest harness
+ // itself executes properly under Miri.
+ let mut cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolRustc,
+ host,
+ "run",
+ "src/tools/miri/cargo-miri",
+ SourceType::Submodule,
+ &[],
+ );
+ cargo.add_rustc_lib_path(builder, compiler);
+ cargo.arg("--").arg("miri").arg("test");
+ cargo
+ .arg("--manifest-path")
+ .arg(builder.src.join("src/tools/miri/test-cargo-miri/Cargo.toml"));
+ cargo.arg("--target").arg(target.rustc_target_arg());
+ cargo.arg("--tests"); // don't run doctests, they are too confused by the staging
+ cargo.arg("--").args(builder.config.cmd.test_args());
+
+ // Tell `cargo miri` where to find things.
+ cargo.env("MIRI_SYSROOT", &miri_sysroot);
+ cargo.env("MIRI_HOST_SYSROOT", sysroot);
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI", &miri);
+ // Debug things.
+ cargo.env("RUST_BACKTRACE", "1");
+
+ let mut cargo = Command::from(cargo);
+ builder.run(&mut cargo);
}
}
@@ -901,7 +873,10 @@ fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String>
.output()
.map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
.unwrap_or(String::new());
- lines.lines().find_map(|l| l.split(":browser-ui-test@").skip(1).next()).map(|v| v.to_owned())
+ lines
+ .lines()
+ .find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
+ .map(|v| v.to_owned())
}
fn get_browser_ui_test_version(npm: &Path) -> Option<String> {
@@ -920,6 +895,11 @@ fn compare_browser_ui_test_version(installed_version: &str, src: &Path) {
one used in the CI (`{}`)",
installed_version, v
);
+ eprintln!(
+ "You can install this version using `npm update browser-ui-test` or by using \
+ `npm install browser-ui-test@{}`",
+ v,
+ );
}
}
Err(e) => eprintln!("Couldn't find the CI browser-ui-test version: {:?}", e),
@@ -1381,6 +1361,8 @@ note: if you're sure you want to do this, please open an issue as to why. In the
let json_compiler = compiler.with_stage(0);
cmd.arg("--jsondocck-path")
.arg(builder.ensure(tool::JsonDocCk { compiler: json_compiler, target }));
+ cmd.arg("--jsondoclint-path")
+ .arg(builder.ensure(tool::JsonDocLint { compiler: json_compiler, target }));
}
if mode == "run-make" {
@@ -1437,7 +1419,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
}
let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] };
flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests));
- flags.push(builder.config.cmd.rustc_args().join(" "));
+ flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string()));
if let Some(linker) = builder.linker(target) {
cmd.arg("--linker").arg(linker);
@@ -1446,12 +1428,16 @@ note: if you're sure you want to do this, please open an issue as to why. In the
let mut hostflags = flags.clone();
hostflags.push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display()));
hostflags.extend(builder.lld_flags(compiler.host));
- cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+ for flag in hostflags {
+ cmd.arg("--host-rustcflags").arg(flag);
+ }
let mut targetflags = flags;
targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display()));
targetflags.extend(builder.lld_flags(target));
- cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+ for flag in targetflags {
+ cmd.arg("--target-rustcflags").arg(flag);
+ }
cmd.arg("--python").arg(builder.python());
@@ -1487,6 +1473,11 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--run-clang-based-tests-with").arg(clang_exe);
}
+ for exclude in &builder.config.exclude {
+ cmd.arg("--skip");
+ cmd.arg(&exclude.path);
+ }
+
// Get paths from cmd args
let paths = match &builder.config.cmd {
Subcommand::Test { ref paths, .. } => &paths[..],
@@ -1501,7 +1492,15 @@ note: if you're sure you want to do this, please open an issue as to why. In the
test_args.append(&mut builder.config.cmd.test_args());
- cmd.args(&test_args);
+ // On Windows, replace forward slashes in test-args by backslashes
+ // so the correct filters are passed to libtest
+ if cfg!(windows) {
+ let test_args_win: Vec<String> =
+ test_args.iter().map(|s| s.replace("/", "\\")).collect();
+ cmd.args(&test_args_win);
+ } else {
+ cmd.args(&test_args);
+ }
if builder.is_verbose() {
cmd.arg("--verbose");
@@ -2509,6 +2508,43 @@ impl Step for TierCheck {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ReplacePlaceholderTest;
+
+impl Step for ReplacePlaceholderTest {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ /// Ensure the version placeholder replacement tool builds
+ fn run(self, builder: &Builder<'_>) {
+ builder.info("build check for version replacement placeholder");
+
+ // Test the version placeholder replacement tool itself.
+ let bootstrap_host = builder.config.build;
+ let compiler = builder.compiler(0, bootstrap_host);
+ let cargo = tool::prepare_tool_cargo(
+ builder,
+ compiler,
+ Mode::ToolBootstrap,
+ bootstrap_host,
+ "test",
+ "src/tools/replace-version-placeholder",
+ SourceType::InTree,
+ &[],
+ );
+ try_run(builder, &mut cargo.into());
+ }
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.path("src/tools/replace-version-placeholder")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ run.builder.ensure(Self);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct LintDocs {
pub compiler: Compiler,
pub target: TargetSelection,
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index 06fa5039f..eec74b267 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -376,8 +376,10 @@ bootstrap_tool!(
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";
LintDocs, "src/tools/lint-docs", "lint-docs";
JsonDocCk, "src/tools/jsondocck", "jsondocck";
+ JsonDocLint, "src/tools/jsondoclint", "jsondoclint";
HtmlChecker, "src/tools/html-checker", "html-checker";
BumpStage0, "src/tools/bump-stage0", "bump-stage0";
+ ReplaceVersionPlaceholder, "src/tools/replace-version-placeholder", "replace-version-placeholder";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
@@ -696,7 +698,7 @@ pub struct RustAnalyzer {
impl Step for RustAnalyzer {
type Output = Option<PathBuf>;
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = false;
+ const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
@@ -740,18 +742,22 @@ pub struct RustAnalyzerProcMacroSrv {
impl Step for RustAnalyzerProcMacroSrv {
type Output = Option<PathBuf>;
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = false;
+ const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.path("src/tools/rust-analyzer").default_condition(
- builder.config.extended
- && builder
- .config
- .tools
- .as_ref()
- .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")),
- )
+
+ // Allow building `rust-analyzer-proc-macro-srv` both as part of the `rust-analyzer` and as a stand-alone tool.
+ run.path("src/tools/rust-analyzer")
+ .path("src/tools/rust-analyzer/crates/proc-macro-srv-cli")
+ .default_condition(
+ builder.config.extended
+ && builder.config.tools.as_ref().map_or(true, |tools| {
+ tools.iter().any(|tool| {
+ tool == "rust-analyzer" || tool == "rust-analyzer-proc-macro-srv"
+ })
+ }),
+ )
}
fn make_run(run: RunConfig<'_>) {
@@ -762,7 +768,7 @@ impl Step for RustAnalyzerProcMacroSrv {
}
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
- builder.ensure(ToolBuild {
+ let path = builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: "rust-analyzer-proc-macro-srv",
@@ -771,19 +777,25 @@ impl Step for RustAnalyzerProcMacroSrv {
extra_features: vec!["proc-macro-srv/sysroot-abi".to_owned()],
is_optional_tool: false,
source_type: SourceType::InTree,
- })
+ })?;
+
+ // Copy `rust-analyzer-proc-macro-srv` to `<sysroot>/libexec/`
+ // so that r-a can use it.
+ let libexec_path = builder.sysroot(self.compiler).join("libexec");
+ t!(fs::create_dir_all(&libexec_path));
+ builder.copy(&path, &libexec_path.join("rust-analyzer-proc-macro-srv"));
+
+ Some(path)
}
}
macro_rules! tool_extended {
(($sel:ident, $builder:ident),
$($name:ident,
- $toolstate:ident,
$path:expr,
$tool_name:expr,
stable = $stable:expr,
$(in_tree = $in_tree:expr,)?
- $(submodule = $submodule:literal,)?
$(tool_std = $tool_std:literal,)?
$extra_deps:block;)+) => {
$(
@@ -828,7 +840,6 @@ macro_rules! tool_extended {
#[allow(unused_mut)]
fn run(mut $sel, $builder: &Builder<'_>) -> Option<PathBuf> {
$extra_deps
- $( $builder.update_submodule(&Path::new("src").join("tools").join($submodule)); )?
$builder.ensure(ToolBuild {
compiler: $sel.compiler,
target: $sel.target,
@@ -854,24 +865,17 @@ macro_rules! tool_extended {
// Note: Most submodule updates for tools are handled by bootstrap.py, since they're needed just to
// invoke Cargo to build bootstrap. See the comment there for more details.
tool_extended!((self, builder),
- Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", stable=true, in_tree=true, {};
- CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", stable=true, in_tree=true, {};
- Clippy, clippy, "src/tools/clippy", "clippy-driver", stable=true, in_tree=true, {};
- Miri, miri, "src/tools/miri", "miri", stable=false, {};
- CargoMiri, miri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, {};
- Rls, rls, "src/tools/rls", "rls", stable=true, {
- builder.ensure(Clippy {
- compiler: self.compiler,
- target: self.target,
- extra_features: Vec::new(),
- });
- self.extra_features.push("clippy".to_owned());
- };
+ Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true, in_tree=true, {};
+ CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true, in_tree=true, {};
+ Clippy, "src/tools/clippy", "clippy-driver", stable=true, in_tree=true, {};
+ Miri, "src/tools/miri", "miri", stable=false, in_tree=true, {};
+ CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, in_tree=true, {};
// FIXME: tool_std is not quite right, we shouldn't allow nightly features.
// But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0,
// and this is close enough for now.
- RustDemangler, rust_demangler, "src/tools/rust-demangler", "rust-demangler", stable=false, in_tree=true, tool_std=true, {};
- Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, in_tree=true, {};
+ Rls, "src/tools/rls", "rls", stable=true, in_tree=true, tool_std=true, {};
+ RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, in_tree=true, tool_std=true, {};
+ Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, in_tree=true, {};
);
impl<'a> Builder<'a> {
diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs
index 2cfeae7dc..1a1774432 100644
--- a/src/bootstrap/toolstate.rs
+++ b/src/bootstrap/toolstate.rs
@@ -69,7 +69,6 @@ static STABLE_TOOLS: &[(&str, &str)] = &[
("reference", "src/doc/reference"),
("rust-by-example", "src/doc/rust-by-example"),
("edition-guide", "src/doc/edition-guide"),
- ("rls", "src/tools/rls"),
];
// These tools are permitted to not build on the beta/stable channels.
@@ -78,7 +77,6 @@ static STABLE_TOOLS: &[(&str, &str)] = &[
// though, as otherwise we will be unable to file an issue if they start
// failing.
static NIGHTLY_TOOLS: &[(&str, &str)] = &[
- ("miri", "src/tools/miri"),
("embedded-book", "src/doc/embedded-book"),
// ("rustc-dev-guide", "src/doc/rustc-dev-guide"),
];
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 1895e2901..0ebabbd5c 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -197,9 +197,11 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
ptr::null_mut(),
);
- let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE as usize];
- let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
- let buf = &mut (*db).ReparseTarget as *mut u16;
+ #[repr(C, align(8))]
+ struct Align8<T>(T);
+ let mut data = Align8([0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE as usize]);
+ let db = data.0.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
+ let buf = core::ptr::addr_of_mut!((*db).ReparseTarget) as *mut u16;
let mut i = 0;
// FIXME: this conversion is very hacky
let v = br"\??\";
@@ -219,7 +221,7 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
let res = DeviceIoControl(
h as *mut _,
FSCTL_SET_REPARSE_POINT,
- data.as_ptr() as *mut _,
+ db.cast(),
(*db).ReparseDataLength + 8,
ptr::null_mut(),
0,
@@ -258,6 +260,10 @@ impl CiEnv {
}
}
+ pub fn is_ci() -> bool {
+ Self::current() != CiEnv::None
+ }
+
/// If in a CI environment, forces the command to run with colors.
pub fn force_coloring_in_ci(self, cmd: &mut Command) {
if self != CiEnv::None {