summaryrefslogtreecommitdiffstats
path: root/src/bootstrap
diff options
context:
space:
mode:
Diffstat (limited to 'src/bootstrap')
-rw-r--r--src/bootstrap/CHANGELOG.md1
-rw-r--r--src/bootstrap/Cargo.lock17
-rw-r--r--src/bootstrap/Cargo.toml1
-rw-r--r--src/bootstrap/bin/main.rs61
-rw-r--r--src/bootstrap/bootstrap.py172
-rw-r--r--src/bootstrap/bootstrap_test.py103
-rw-r--r--src/bootstrap/builder.rs122
-rw-r--r--src/bootstrap/builder/tests.rs51
-rw-r--r--src/bootstrap/cc_detect.rs104
-rw-r--r--src/bootstrap/check.rs81
-rw-r--r--src/bootstrap/clean.rs4
-rw-r--r--src/bootstrap/compile.rs192
-rw-r--r--src/bootstrap/config.rs193
-rw-r--r--src/bootstrap/config/tests.rs34
-rwxr-xr-xsrc/bootstrap/configure.py31
-rw-r--r--src/bootstrap/defaults/config.dist.toml (renamed from src/bootstrap/defaults/config.user.toml)3
-rw-r--r--src/bootstrap/defaults/config.tools.toml2
-rw-r--r--src/bootstrap/dist.rs101
-rw-r--r--src/bootstrap/doc.rs300
-rw-r--r--src/bootstrap/download-ci-llvm-stamp2
-rw-r--r--src/bootstrap/download.rs60
-rw-r--r--src/bootstrap/flags.rs6
-rw-r--r--src/bootstrap/format.rs4
-rw-r--r--src/bootstrap/lib.rs137
-rw-r--r--src/bootstrap/llvm.rs46
-rw-r--r--src/bootstrap/metadata.rs3
-rw-r--r--src/bootstrap/mk/Makefile.in29
-rw-r--r--src/bootstrap/render_tests.rs47
-rw-r--r--src/bootstrap/run.rs11
-rw-r--r--src/bootstrap/sanity.rs2
-rw-r--r--src/bootstrap/setup.rs29
-rw-r--r--src/bootstrap/synthetic_targets.rs82
-rw-r--r--src/bootstrap/test.rs301
-rw-r--r--src/bootstrap/tool.rs10
-rw-r--r--src/bootstrap/toolstate.rs8
-rw-r--r--src/bootstrap/util.rs39
36 files changed, 1557 insertions, 832 deletions
diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md
index d6924cf2c..1aba07138 100644
--- a/src/bootstrap/CHANGELOG.md
+++ b/src/bootstrap/CHANGELOG.md
@@ -18,6 +18,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- `x.py fmt` now formats only files modified between the merge-base of HEAD and the last commit in the master branch of the rust-lang repository and the current working directory. To restore old behaviour, use `x.py fmt .`. The check mode is not affected by this change. [#105702](https://github.com/rust-lang/rust/pull/105702)
- The `llvm.version-check` config option has been removed. Older versions were never supported. If you still need to support older versions (e.g. you are applying custom patches), patch `check_llvm_version` in bootstrap to change the minimum version. [#108619](https://github.com/rust-lang/rust/pull/108619)
- The `rust.ignore-git` option has been renamed to `rust.omit-git-hash`. [#110059](https://github.com/rust-lang/rust/pull/110059)
+- `--exclude` no longer accepts a `Kind` as part of a Step; instead it uses the top-level Kind of the subcommand. If this matches how you were already using --exclude (e.g. `x test --exclude test::std`), simply remove the kind: `--exclude std`. If you were using a kind that did not match the top-level subcommand, please open an issue explaining why you wanted this feature.
### Non-breaking changes
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index 8f8778efe..2b2e9e9f9 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -51,7 +51,6 @@ dependencies = [
"filetime",
"hex",
"ignore",
- "is-terminal",
"junction",
"libc",
"object",
@@ -387,18 +386,6 @@ dependencies = [
]
[[package]]
-name = "is-terminal"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "256017f749ab3117e93acb91063009e1f1bb56d03965b14c2c8df4eb02c524d8"
-dependencies = [
- "hermit-abi 0.3.1",
- "io-lifetimes",
- "rustix",
- "windows-sys",
-]
-
-[[package]]
name = "itoa"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -540,9 +527,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.56"
+version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
+checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 367c61909..85eb543e4 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -30,7 +30,6 @@ path = "bin/sccache-plus-cl.rs"
test = false
[dependencies]
-is-terminal = "0.4"
build_helper = { path = "../tools/build_helper" }
cmake = "0.1.38"
filetime = "0.2"
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
index a80379e85..30dfa81c6 100644
--- a/src/bootstrap/bin/main.rs
+++ b/src/bootstrap/bin/main.rs
@@ -5,7 +5,11 @@
//! parent directory, and otherwise documentation can be found throughout the `build`
//! directory in each respective module.
-use std::env;
+#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+use std::io::Write;
+#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+use std::process;
+use std::{env, fs};
#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
use bootstrap::t;
@@ -20,22 +24,32 @@ fn main() {
#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
let _build_lock_guard;
#[cfg(all(any(unix, windows), not(target_os = "solaris")))]
+ // Display PID of process holding the lock
+ // PID will be stored in a lock file
{
let path = config.out.join("lock");
- build_lock = fd_lock::RwLock::new(t!(std::fs::File::create(&path)));
+ let pid = match fs::read_to_string(&path) {
+ Ok(contents) => contents,
+ Err(_) => String::new(),
+ };
+
+ build_lock =
+ fd_lock::RwLock::new(t!(fs::OpenOptions::new().write(true).create(true).open(&path)));
_build_lock_guard = match build_lock.try_write() {
- Ok(lock) => lock,
+ Ok(mut lock) => {
+ t!(lock.write(&process::id().to_string().as_ref()));
+ lock
+ }
err => {
drop(err);
- if let Some(pid) = get_lock_owner(&path) {
- println!("warning: build directory locked by process {pid}, waiting for lock");
- } else {
- println!("warning: build directory locked, waiting for lock");
- }
- t!(build_lock.write())
+ println!("warning: build directory locked by process {pid}, waiting for lock");
+ let mut lock = t!(build_lock.write());
+ t!(lock.write(&process::id().to_string().as_ref()));
+ lock
}
};
}
+
#[cfg(any(not(any(unix, windows)), target_os = "solaris"))]
println!("warning: file locking not supported for target, not locking build directory");
@@ -73,7 +87,7 @@ fn main() {
// HACK: Since the commit script uses hard links, we can't actually tell if it was installed by x.py setup or not.
// We could see if it's identical to src/etc/pre-push.sh, but pre-push may have been modified in the meantime.
// Instead, look for this comment, which is almost certainly not in any custom hook.
- if std::fs::read_to_string(pre_commit).map_or(false, |contents| {
+ if fs::read_to_string(pre_commit).map_or(false, |contents| {
contents.contains("https://github.com/rust-lang/rust/issues/77620#issuecomment-705144570")
}) {
println!(
@@ -108,30 +122,3 @@ fn check_version(config: &Config) -> Option<String> {
Some(msg)
}
-
-/// Get the PID of the process which took the write lock by
-/// parsing `/proc/locks`.
-#[cfg(target_os = "linux")]
-fn get_lock_owner(f: &std::path::Path) -> Option<u64> {
- use std::fs::File;
- use std::io::{BufRead, BufReader};
- use std::os::unix::fs::MetadataExt;
-
- let lock_inode = std::fs::metadata(f).ok()?.ino();
- let lockfile = File::open("/proc/locks").ok()?;
- BufReader::new(lockfile).lines().find_map(|line| {
- // pid--vvvvvv vvvvvvv--- inode
- // 21: FLOCK ADVISORY WRITE 359238 08:02:3719774 0 EOF
- let line = line.ok()?;
- let parts = line.split_whitespace().collect::<Vec<_>>();
- let (pid, inode) = (parts[4].parse::<u64>().ok()?, &parts[5]);
- let inode = inode.rsplit_once(':')?.1.parse::<u64>().ok()?;
- if inode == lock_inode { Some(pid) } else { None }
- })
-}
-
-#[cfg(not(any(target_os = "linux", target_os = "solaris")))]
-fn get_lock_owner(_: &std::path::Path) -> Option<u64> {
- // FIXME: Implement on other OS's
- None
-}
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 58d1926ad..149350e62 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -323,6 +323,7 @@ def default_build_triple(verbose):
cputype_mapper = {
'BePC': 'i686',
'aarch64': 'aarch64',
+ 'aarch64eb': 'aarch64',
'amd64': 'x86_64',
'arm64': 'aarch64',
'i386': 'i686',
@@ -458,23 +459,51 @@ def unpack_component(download_info):
verbose=download_info.verbose,
)
-class RustBuild(object):
- """Provide all the methods required to build Rust"""
+class FakeArgs:
+ """Used for unit tests to avoid updating all call sites"""
def __init__(self):
- self.checksums_sha256 = {}
- self.stage0_compiler = None
- self.download_url = ''
self.build = ''
self.build_dir = ''
self.clean = False
- self.config_toml = ''
- self.rust_root = ''
- self.use_locked_deps = False
- self.use_vendored_sources = False
self.verbose = False
+ self.json_output = False
+ self.color = 'auto'
+ self.warnings = 'default'
+
+class RustBuild(object):
+ """Provide all the methods required to build Rust"""
+ def __init__(self, config_toml="", args=FakeArgs()):
self.git_version = None
self.nix_deps_dir = None
self._should_fix_bins_and_dylibs = None
+ self.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
+
+ self.config_toml = config_toml
+
+ self.clean = args.clean
+ self.json_output = args.json_output
+ self.verbose = args.verbose
+ self.color = args.color
+ self.warnings = args.warnings
+
+ config_verbose_count = self.get_toml('verbose', 'build')
+ if config_verbose_count is not None:
+ self.verbose = max(self.verbose, int(config_verbose_count))
+
+ self.use_vendored_sources = self.get_toml('vendor', 'build') == 'true'
+ self.use_locked_deps = self.get_toml('locked-deps', 'build') == 'true'
+
+ build_dir = args.build_dir or self.get_toml('build-dir', 'build') or 'build'
+ self.build_dir = os.path.abspath(build_dir)
+
+ with open(os.path.join(self.rust_root, "src", "stage0.json")) as f:
+ data = json.load(f)
+ self.checksums_sha256 = data["checksums_sha256"]
+ self.stage0_compiler = Stage0Toolchain(data["compiler"])
+ self.download_url = os.getenv("RUSTUP_DIST_SERVER") or data["config"]["dist_server"]
+
+ self.build = args.build or self.build_triple()
+
def download_toolchain(self):
"""Fetch the build system for Rust, written in Rust
@@ -620,7 +649,7 @@ class RustBuild(object):
# The latter one does not exist on NixOS when using tmpfs as root.
try:
with open("/etc/os-release", "r") as f:
- if not any(l.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"') for l in f):
+ if not any(ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"') for ln in f):
return False
except FileNotFoundError:
return False
@@ -704,9 +733,10 @@ class RustBuild(object):
"""Return the path for .rustc-stamp at the given stage
>>> rb = RustBuild()
+ >>> rb.build = "host"
>>> rb.build_dir = "build"
- >>> rb.rustc_stamp() == os.path.join("build", "stage0", ".rustc-stamp")
- True
+ >>> expected = os.path.join("build", "host", "stage0", ".rustc-stamp")
+ >>> assert rb.rustc_stamp() == expected, rb.rustc_stamp()
"""
return os.path.join(self.bin_root(), '.rustc-stamp')
@@ -721,15 +751,9 @@ class RustBuild(object):
"""Return the binary root directory for the given stage
>>> rb = RustBuild()
- >>> rb.build_dir = "build"
- >>> rb.bin_root() == os.path.join("build", "stage0")
- True
-
- When the 'build' property is given should be a nested directory:
-
>>> rb.build = "devel"
- >>> rb.bin_root() == os.path.join("build", "devel", "stage0")
- True
+ >>> expected = os.path.abspath(os.path.join("build", "devel", "stage0"))
+ >>> assert rb.bin_root() == expected, rb.bin_root()
"""
subdir = "stage0"
return os.path.join(self.build_dir, self.build, subdir)
@@ -761,9 +785,12 @@ class RustBuild(object):
>>> rb.get_toml("key1")
'true'
"""
+ return RustBuild.get_toml_static(self.config_toml, key, section)
+ @staticmethod
+ def get_toml_static(config_toml, key, section=None):
cur_section = None
- for line in self.config_toml.splitlines():
+ for line in config_toml.splitlines():
section_match = re.match(r'^\s*\[(.*)\]\s*$', line)
if section_match is not None:
cur_section = section_match.group(1)
@@ -772,7 +799,7 @@ class RustBuild(object):
if match is not None:
value = match.group(1)
if section is None or section == cur_section:
- return self.get_string(value) or value.strip()
+ return RustBuild.get_string(value) or value.strip()
return None
def cargo(self):
@@ -835,13 +862,23 @@ class RustBuild(object):
"""
return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap")
- def build_bootstrap(self, color, verbose_count):
+ def build_bootstrap(self):
"""Build bootstrap"""
env = os.environ.copy()
if "GITHUB_ACTIONS" in env:
print("::group::Building bootstrap")
else:
print("Building bootstrap", file=sys.stderr)
+
+ args = self.build_bootstrap_cmd(env)
+ # Run this from the source directory so cargo finds .cargo/config
+ run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
+
+ if "GITHUB_ACTIONS" in env:
+ print("::endgroup::")
+
+ def build_bootstrap_cmd(self, env):
+ """For tests."""
build_dir = os.path.join(self.build_dir, "bootstrap")
if self.clean and os.path.exists(build_dir):
shutil.rmtree(build_dir)
@@ -872,11 +909,17 @@ class RustBuild(object):
}
for var_name, toml_key in var_data.items():
toml_val = self.get_toml(toml_key, build_section)
- if toml_val != None:
+ if toml_val is not None:
env["{}_{}".format(var_name, host_triple_sanitized)] = toml_val
# preserve existing RUSTFLAGS
env.setdefault("RUSTFLAGS", "")
+ # we need to explicitly add +xgot here so that we can successfully bootstrap
+ # a usable stage1 compiler
+ # FIXME: remove this if condition on the next bootstrap bump
+ # cfg(bootstrap)
+ if self.build_triple().startswith('mips'):
+ env["RUSTFLAGS"] += " -Ctarget-feature=+xgot"
target_features = []
if self.get_toml("crt-static", build_section) == "true":
target_features += ["+crt-static"]
@@ -888,7 +931,11 @@ class RustBuild(object):
if target_linker is not None:
env["RUSTFLAGS"] += " -C linker=" + target_linker
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
- if self.get_toml("deny-warnings", "rust") != "false":
+ if self.warnings == "default":
+ deny_warnings = self.get_toml("deny-warnings", "rust") != "false"
+ else:
+ deny_warnings = self.warnings == "deny"
+ if deny_warnings:
env["RUSTFLAGS"] += " -Dwarnings"
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
@@ -898,7 +945,7 @@ class RustBuild(object):
self.cargo()))
args = [self.cargo(), "build", "--manifest-path",
os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
- args.extend("--verbose" for _ in range(verbose_count))
+ args.extend("--verbose" for _ in range(self.verbose))
if self.use_locked_deps:
args.append("--locked")
if self.use_vendored_sources:
@@ -908,16 +955,16 @@ class RustBuild(object):
args.append("build-metrics")
if self.json_output:
args.append("--message-format=json")
- if color == "always":
+ if self.color == "always":
args.append("--color=always")
- elif color == "never":
+ elif self.color == "never":
args.append("--color=never")
+ try:
+ args += env["CARGOFLAGS"].split()
+ except KeyError:
+ pass
- # Run this from the source directory so cargo finds .cargo/config
- run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
-
- if "GITHUB_ACTIONS" in env:
- print("::endgroup::")
+ return args
def build_triple(self):
"""Build triple as in LLVM
@@ -967,7 +1014,7 @@ class RustBuild(object):
if os.path.exists(cargo_dir):
shutil.rmtree(cargo_dir)
-def parse_args():
+def parse_args(args):
"""Parse the command line arguments that the python script needs."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-h', '--help', action='store_true')
@@ -977,18 +1024,14 @@ def parse_args():
parser.add_argument('--color', choices=['always', 'never', 'auto'])
parser.add_argument('--clean', action='store_true')
parser.add_argument('--json-output', action='store_true')
+ parser.add_argument('--warnings', choices=['deny', 'warn', 'default'], default='default')
parser.add_argument('-v', '--verbose', action='count', default=0)
- return parser.parse_known_args(sys.argv)[0]
+ return parser.parse_known_args(args)[0]
def bootstrap(args):
"""Configure, fetch, build and run the initial bootstrap"""
- # Configure initial bootstrap
- build = RustBuild()
- build.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
- build.verbose = args.verbose != 0
- build.clean = args.clean
- build.json_output = args.json_output
+ rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
# Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`,
# then `config.toml` in the root directory.
@@ -997,52 +1040,43 @@ def bootstrap(args):
if using_default_path:
toml_path = 'config.toml'
if not os.path.exists(toml_path):
- toml_path = os.path.join(build.rust_root, toml_path)
+ toml_path = os.path.join(rust_root, toml_path)
# Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path,
# but not if `config.toml` hasn't been created.
if not using_default_path or os.path.exists(toml_path):
with open(toml_path) as config:
- build.config_toml = config.read()
+ config_toml = config.read()
+ else:
+ config_toml = ''
- profile = build.get_toml('profile')
+ profile = RustBuild.get_toml_static(config_toml, 'profile')
if profile is not None:
- include_file = 'config.{}.toml'.format(profile)
- include_dir = os.path.join(build.rust_root, 'src', 'bootstrap', 'defaults')
+ # Allows creating alias for profile names, allowing
+ # profiles to be renamed while maintaining back compatibility
+ # Keep in sync with `profile_aliases` in config.rs
+ profile_aliases = {
+ "user": "dist"
+ }
+ include_file = 'config.{}.toml'.format(profile_aliases.get(profile) or profile)
+ include_dir = os.path.join(rust_root, 'src', 'bootstrap', 'defaults')
include_path = os.path.join(include_dir, include_file)
- # HACK: This works because `build.get_toml()` returns the first match it finds for a
+ # HACK: This works because `self.get_toml()` returns the first match it finds for a
# specific key, so appending our defaults at the end allows the user to override them
with open(include_path) as included_toml:
- build.config_toml += os.linesep + included_toml.read()
-
- verbose_count = args.verbose
- config_verbose_count = build.get_toml('verbose', 'build')
- if config_verbose_count is not None:
- verbose_count = max(args.verbose, int(config_verbose_count))
-
- build.use_vendored_sources = build.get_toml('vendor', 'build') == 'true'
- build.use_locked_deps = build.get_toml('locked-deps', 'build') == 'true'
+ config_toml += os.linesep + included_toml.read()
+ # Configure initial bootstrap
+ build = RustBuild(config_toml, args)
build.check_vendored_status()
- build_dir = args.build_dir or build.get_toml('build-dir', 'build') or 'build'
- build.build_dir = os.path.abspath(build_dir)
-
- with open(os.path.join(build.rust_root, "src", "stage0.json")) as f:
- data = json.load(f)
- build.checksums_sha256 = data["checksums_sha256"]
- build.stage0_compiler = Stage0Toolchain(data["compiler"])
- build.download_url = os.getenv("RUSTUP_DIST_SERVER") or data["config"]["dist_server"]
-
- build.build = args.build or build.build_triple()
-
if not os.path.exists(build.build_dir):
os.makedirs(build.build_dir)
# Fetch/build the bootstrap
build.download_toolchain()
sys.stdout.flush()
- build.build_bootstrap(args.color, verbose_count)
+ build.build_bootstrap()
sys.stdout.flush()
# Run the bootstrap
@@ -1062,7 +1096,7 @@ def main():
if len(sys.argv) > 1 and sys.argv[1] == 'help':
sys.argv[1] = '-h'
- args = parse_args()
+ args = parse_args(sys.argv)
help_triggered = args.help or len(sys.argv) == 1
# If the user is asking for help, let them know that the whole download-and-build
diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py
index 5ecda83ee..3c91e403d 100644
--- a/src/bootstrap/bootstrap_test.py
+++ b/src/bootstrap/bootstrap_test.py
@@ -1,4 +1,6 @@
-"""Bootstrap tests"""
+"""Bootstrap tests
+
+Run these with `x test bootstrap`, or `python -m unittest src/bootstrap/bootstrap_test.py`."""
from __future__ import absolute_import, division, print_function
import os
@@ -10,9 +12,29 @@ import sys
from shutil import rmtree
+# Allow running this from the top-level directory.
+bootstrap_dir = os.path.dirname(os.path.abspath(__file__))
+# For the import below, have Python search in src/bootstrap first.
+sys.path.insert(0, bootstrap_dir)
import bootstrap
import configure
+def serialize_and_parse(configure_args, bootstrap_args=bootstrap.FakeArgs()):
+ from io import StringIO
+
+ section_order, sections, targets = configure.parse_args(configure_args)
+ buffer = StringIO()
+ configure.write_config_toml(buffer, section_order, targets, sections)
+ build = bootstrap.RustBuild(config_toml=buffer.getvalue(), args=bootstrap_args)
+
+ try:
+ import tomllib
+ # Verify this is actually valid TOML.
+ tomllib.loads(build.config_toml)
+ except ImportError:
+ print("warning: skipping TOML validation, need at least python 3.11", file=sys.stderr)
+ return build
+
class VerifyTestCase(unittest.TestCase):
"""Test Case for verify"""
@@ -77,58 +99,63 @@ class ProgramOutOfDate(unittest.TestCase):
class GenerateAndParseConfig(unittest.TestCase):
"""Test that we can serialize and deserialize a config.toml file"""
- def serialize_and_parse(self, args):
- from io import StringIO
-
- section_order, sections, targets = configure.parse_args(args)
- buffer = StringIO()
- configure.write_config_toml(buffer, section_order, targets, sections)
- build = bootstrap.RustBuild()
- build.config_toml = buffer.getvalue()
-
- try:
- import tomllib
- # Verify this is actually valid TOML.
- tomllib.loads(build.config_toml)
- except ImportError:
- print("warning: skipping TOML validation, need at least python 3.11", file=sys.stderr)
- return build
-
def test_no_args(self):
- build = self.serialize_and_parse([])
+ build = serialize_and_parse([])
self.assertEqual(build.get_toml("changelog-seen"), '2')
- self.assertEqual(build.get_toml("profile"), 'user')
+ self.assertEqual(build.get_toml("profile"), 'dist')
self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
def test_set_section(self):
- build = self.serialize_and_parse(["--set", "llvm.download-ci-llvm"])
+ build = serialize_and_parse(["--set", "llvm.download-ci-llvm"])
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true')
def test_set_target(self):
- build = self.serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
+ build = serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
def test_set_top_level(self):
- build = self.serialize_and_parse(["--set", "profile=compiler"])
+ build = serialize_and_parse(["--set", "profile=compiler"])
self.assertEqual(build.get_toml("profile"), 'compiler')
def test_set_codegen_backends(self):
- build = self.serialize_and_parse(["--set", "rust.codegen-backends=cranelift"])
+ build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift"])
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift']"), -1)
- build = self.serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"])
+ build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"])
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1)
- build = self.serialize_and_parse(["--enable-full-tools"])
+ build = serialize_and_parse(["--enable-full-tools"])
self.assertNotEqual(build.config_toml.find("codegen-backends = ['llvm']"), -1)
-if __name__ == '__main__':
- SUITE = unittest.TestSuite()
- TEST_LOADER = unittest.TestLoader()
- SUITE.addTest(doctest.DocTestSuite(bootstrap))
- SUITE.addTests([
- TEST_LOADER.loadTestsFromTestCase(VerifyTestCase),
- TEST_LOADER.loadTestsFromTestCase(GenerateAndParseConfig),
- TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)])
-
- RUNNER = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
- result = RUNNER.run(SUITE)
- sys.exit(0 if result.wasSuccessful() else 1)
+
+class BuildBootstrap(unittest.TestCase):
+ """Test that we generate the appropriate arguments when building bootstrap"""
+
+ def build_args(self, configure_args=[], args=[], env={}):
+ env = env.copy()
+ env["PATH"] = os.environ["PATH"]
+
+ parsed = bootstrap.parse_args(args)
+ build = serialize_and_parse(configure_args, parsed)
+ # Make these optional so that `python -m unittest` works when run manually.
+ build_dir = os.environ.get("BUILD_DIR")
+ if build_dir is not None:
+ build.build_dir = build_dir
+ build_platform = os.environ.get("BUILD_PLATFORM")
+ if build_platform is not None:
+ build.build = build_platform
+ return build.build_bootstrap_cmd(env), env
+
+ def test_cargoflags(self):
+ args, _ = self.build_args(env={"CARGOFLAGS": "--timings"})
+ self.assertTrue("--timings" in args)
+
+ def test_warnings(self):
+ for toml_warnings in ['false', 'true', None]:
+ configure_args = []
+ if toml_warnings is not None:
+ configure_args = ["--set", "rust.deny-warnings=" + toml_warnings]
+
+ _, env = self.build_args(configure_args, args=["--warnings=warn"])
+ self.assertFalse("-Dwarnings" in env["RUSTFLAGS"])
+
+ _, env = self.build_args(configure_args, args=["--warnings=deny"])
+ self.assertTrue("-Dwarnings" in env["RUSTFLAGS"])
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 2fa445506..05b66f947 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -8,7 +8,7 @@ use std::fs::{self, File};
use std::hash::Hash;
use std::io::{BufRead, BufReader};
use std::ops::Deref;
-use std::path::{Component, Path, PathBuf};
+use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{Duration, Instant};
@@ -103,11 +103,14 @@ impl RunConfig<'_> {
}
/// Return a list of crate names selected by `run.paths`.
+ #[track_caller]
pub fn cargo_crates_in_set(&self) -> Interned<Vec<String>> {
let mut crates = Vec::new();
for krate in &self.paths {
let path = krate.assert_single_path();
- let crate_name = self.builder.crate_paths[&path.path];
+ let Some(crate_name) = self.builder.crate_paths.get(&path.path) else {
+ panic!("missing crate for path {}", path.path.display())
+ };
crates.push(crate_name.to_string());
}
INTERNER.intern_list(crates)
@@ -147,29 +150,6 @@ pub struct TaskPath {
pub kind: Option<Kind>,
}
-impl TaskPath {
- pub fn parse(path: impl Into<PathBuf>) -> TaskPath {
- let mut kind = None;
- let mut path = path.into();
-
- let mut components = path.components();
- if let Some(Component::Normal(os_str)) = components.next() {
- if let Some(str) = os_str.to_str() {
- if let Some((found_kind, found_prefix)) = str.split_once("::") {
- if found_kind.is_empty() {
- panic!("empty kind in task path {}", path.display());
- }
- kind = Kind::parse(found_kind);
- assert!(kind.is_some());
- path = Path::new(found_prefix).join(components.as_path());
- }
- }
- }
-
- TaskPath { path, kind }
- }
-}
-
impl Debug for TaskPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(kind) = &self.kind {
@@ -213,7 +193,7 @@ impl PathSet {
PathSet::Set(set)
}
- fn has(&self, needle: &Path, module: Option<Kind>) -> bool {
+ fn has(&self, needle: &Path, module: Kind) -> bool {
match self {
PathSet::Set(set) => set.iter().any(|p| Self::check(p, needle, module)),
PathSet::Suite(suite) => Self::check(suite, needle, module),
@@ -221,9 +201,9 @@ impl PathSet {
}
// internal use only
- fn check(p: &TaskPath, needle: &Path, module: Option<Kind>) -> bool {
- if let (Some(p_kind), Some(kind)) = (&p.kind, module) {
- p.path.ends_with(needle) && *p_kind == kind
+ fn check(p: &TaskPath, needle: &Path, module: Kind) -> bool {
+ if let Some(p_kind) = &p.kind {
+ p.path.ends_with(needle) && *p_kind == module
} else {
p.path.ends_with(needle)
}
@@ -235,11 +215,7 @@ impl PathSet {
/// This is used for `StepDescription::krate`, which passes all matching crates at once to
/// `Step::make_run`, rather than calling it many times with a single crate.
/// See `tests.rs` for examples.
- fn intersection_removing_matches(
- &self,
- needles: &mut Vec<&Path>,
- module: Option<Kind>,
- ) -> PathSet {
+ fn intersection_removing_matches(&self, needles: &mut Vec<&Path>, module: Kind) -> PathSet {
let mut check = |p| {
for (i, n) in needles.iter().enumerate() {
let matched = Self::check(p, n, module);
@@ -304,7 +280,7 @@ impl StepDescription {
}
fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool {
- if builder.config.exclude.iter().any(|e| pathset.has(&e.path, e.kind)) {
+ if builder.config.exclude.iter().any(|e| pathset.has(&e, builder.kind)) {
println!("Skipping {:?} because it is excluded", pathset);
return true;
}
@@ -378,7 +354,7 @@ impl StepDescription {
eprintln!(
"note: if you are adding a new Step to bootstrap itself, make sure you register it with `describe!`"
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
}
}
@@ -430,25 +406,6 @@ impl<'a> ShouldRun<'a> {
/// Indicates it should run if the command-line selects the given crate or
/// any of its (local) dependencies.
///
- /// Compared to `krate`, this treats the dependencies as aliases for the
- /// same job. Generally it is preferred to use `krate`, and treat each
- /// individual path separately. For example `./x.py test src/liballoc`
- /// (which uses `krate`) will test just `liballoc`. However, `./x.py check
- /// src/liballoc` (which uses `all_krates`) will check all of `libtest`.
- /// `all_krates` should probably be removed at some point.
- pub fn all_krates(mut self, name: &str) -> Self {
- let mut set = BTreeSet::new();
- for krate in self.builder.in_tree_crates(name, None) {
- let path = krate.local_path(self.builder);
- set.insert(TaskPath { path, kind: Some(self.kind) });
- }
- self.paths.insert(PathSet::Set(set));
- self
- }
-
- /// Indicates it should run if the command-line selects the given crate or
- /// any of its (local) dependencies.
- ///
/// `make_run` will be called a single time with all matching command-line paths.
pub fn crate_or_deps(self, name: &str) -> Self {
let crates = self.builder.in_tree_crates(name, None);
@@ -458,6 +415,8 @@ impl<'a> ShouldRun<'a> {
/// Indicates it should run if the command-line selects any of the given crates.
///
/// `make_run` will be called a single time with all matching command-line paths.
+ ///
+ /// Prefer [`ShouldRun::crate_or_deps`] to this function where possible.
pub(crate) fn crates(mut self, crates: Vec<&Crate>) -> Self {
for krate in crates {
let path = krate.local_path(self.builder);
@@ -487,7 +446,15 @@ impl<'a> ShouldRun<'a> {
self.paths(&[path])
}
- // multiple aliases for the same job
+ /// Multiple aliases for the same job.
+ ///
+ /// This differs from [`path`] in that multiple calls to path will end up calling `make_run`
+ /// multiple times, whereas a single call to `paths` will only ever generate a single call to
+ /// `paths`.
+ ///
+ /// This is analogous to `all_krates`, although `all_krates` is gone now. Prefer [`path`] where possible.
+ ///
+ /// [`path`]: ShouldRun::path
pub fn paths(mut self, paths: &[&str]) -> Self {
static SUBMODULES_PATHS: OnceCell<Vec<String>> = OnceCell::new();
@@ -568,7 +535,7 @@ impl<'a> ShouldRun<'a> {
) -> Vec<PathSet> {
let mut sets = vec![];
for pathset in &self.paths {
- let subset = pathset.intersection_removing_matches(paths, Some(kind));
+ let subset = pathset.intersection_removing_matches(paths, kind);
if subset != PathSet::empty() {
sets.push(subset);
}
@@ -577,7 +544,7 @@ impl<'a> ShouldRun<'a> {
}
}
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, ValueEnum)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum Kind {
#[clap(alias = "b")]
Build,
@@ -641,12 +608,19 @@ impl Kind {
}
}
- pub fn test_description(&self) -> &'static str {
+ pub fn description(&self) -> String {
match self {
Kind::Test => "Testing",
Kind::Bench => "Benchmarking",
- _ => panic!("not a test command: {}!", self.as_str()),
+ Kind::Doc => "Documenting",
+ Kind::Run => "Running",
+ Kind::Suggest => "Suggesting",
+ _ => {
+ let title_letter = self.as_str()[0..1].to_ascii_uppercase();
+ return format!("{title_letter}{}ing", &self.as_str()[1..]);
+ }
}
+ .to_owned()
}
}
@@ -702,8 +676,8 @@ impl<'a> Builder<'a> {
check::CargoMiri,
check::MiroptTestTools,
check::Rls,
- check::RustAnalyzer,
check::Rustfmt,
+ check::RustAnalyzer,
check::Bootstrap
),
Kind::Test => describe!(
@@ -712,6 +686,7 @@ impl<'a> Builder<'a> {
test::Tidy,
test::Ui,
test::RunPassValgrind,
+ test::RunCoverage,
test::MirOpt,
test::Codegen,
test::CodegenUnits,
@@ -720,6 +695,7 @@ impl<'a> Builder<'a> {
test::Debuginfo,
test::UiFullDeps,
test::Rustdoc,
+ test::RunCoverageRustdoc,
test::Pretty,
test::Crate,
test::CrateLibrustc,
@@ -994,7 +970,7 @@ impl<'a> Builder<'a> {
}
pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> {
- self.ensure(compile::Sysroot { compiler })
+ self.ensure(compile::Sysroot::new(compiler))
}
/// Returns the libdir where the standard library and other artifacts are
@@ -1231,7 +1207,7 @@ impl<'a> Builder<'a> {
assert_eq!(target, compiler.host);
}
- if self.config.rust_optimize {
+ if self.config.rust_optimize.is_release() {
// FIXME: cargo bench/install do not accept `--release`
if cmd != "bench" && cmd != "install" {
cargo.arg("--release");
@@ -1287,7 +1263,7 @@ impl<'a> Builder<'a> {
}
let profile_var = |name: &str| {
- let profile = if self.config.rust_optimize { "RELEASE" } else { "DEV" };
+ let profile = if self.config.rust_optimize.is_release() { "RELEASE" } else { "DEV" };
format!("CARGO_PROFILE_{}_{}", profile, name)
};
@@ -1357,7 +1333,7 @@ impl<'a> Builder<'a> {
"error: `x.py clippy` requires a host `rustc` toolchain with the `clippy` component"
);
eprintln!("help: try `rustup component add clippy`");
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
});
if !t!(std::str::from_utf8(&output.stdout)).contains("nightly") {
rustflags.arg("--cfg=bootstrap");
@@ -1634,7 +1610,7 @@ impl<'a> Builder<'a> {
// flesh out rpath support more fully in the future.
rustflags.arg("-Zosx-rpath-install-name");
Some("-Wl,-rpath,@loader_path/../lib")
- } else if !target.contains("windows") {
+ } else if !target.contains("windows") && !target.contains("aix") {
rustflags.arg("-Clink-args=-Wl,-z,origin");
Some("-Wl,-rpath,$ORIGIN/../lib")
} else {
@@ -1676,6 +1652,13 @@ impl<'a> Builder<'a> {
}
};
cargo.env(profile_var("DEBUG"), debuginfo_level.to_string());
+ if let Some(opt_level) = &self.config.rust_optimize.get_opt_level() {
+ cargo.env(profile_var("OPT_LEVEL"), opt_level);
+ }
+ if !self.config.dry_run() && self.cc.borrow()[&target].args().iter().any(|arg| arg == "-gz")
+ {
+ rustflags.arg("-Clink-arg=-gz");
+ }
cargo.env(
profile_var("DEBUG_ASSERTIONS"),
if mode == Mode::Std {
@@ -1785,7 +1768,10 @@ impl<'a> Builder<'a> {
cargo.env("RUSTC_TLS_MODEL_INITIAL_EXEC", "1");
}
- if self.config.incremental {
+ // Ignore incremental modes except for stage0, since we're
+ // not guaranteeing correctness across builds if the compiler
+ // is changing under your feet.
+ if self.config.incremental && compiler.stage == 0 {
cargo.env("CARGO_INCREMENTAL", "1");
} else {
// Don't rely on any default setting for incr. comp. in Cargo
@@ -2126,7 +2112,7 @@ impl<'a> Builder<'a> {
let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind));
for path in &self.paths {
- if should_run.paths.iter().any(|s| s.has(path, Some(desc.kind)))
+ if should_run.paths.iter().any(|s| s.has(path, desc.kind))
&& !desc.is_excluded(
self,
&PathSet::Suite(TaskPath { path: path.clone(), kind: Some(desc.kind) }),
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index edca8fe9b..31dcee582 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -1,5 +1,6 @@
use super::*;
use crate::config::{Config, DryRun, TargetSelection};
+use crate::doc::DocumentationFormat;
use std::thread;
fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config {
@@ -66,6 +67,16 @@ macro_rules! std {
};
}
+macro_rules! doc_std {
+ ($host:ident => $target:ident, stage = $stage:literal) => {
+ doc::Std::new(
+ $stage,
+ TargetSelection::from_user(stringify!($target)),
+ DocumentationFormat::HTML,
+ )
+ };
+}
+
macro_rules! rustc {
($host:ident => $target:ident, stage = $stage:literal) => {
compile::Rustc::new(
@@ -90,23 +101,21 @@ fn test_invalid() {
#[test]
fn test_intersection() {
- let set = PathSet::Set(
- ["library/core", "library/alloc", "library/std"].into_iter().map(TaskPath::parse).collect(),
- );
+ let set = |paths: &[&str]| {
+ PathSet::Set(paths.into_iter().map(|p| TaskPath { path: p.into(), kind: None }).collect())
+ };
+ let library_set = set(&["library/core", "library/alloc", "library/std"]);
let mut command_paths =
vec![Path::new("library/core"), Path::new("library/alloc"), Path::new("library/stdarch")];
- let subset = set.intersection_removing_matches(&mut command_paths, None);
- assert_eq!(
- subset,
- PathSet::Set(["library/core", "library/alloc"].into_iter().map(TaskPath::parse).collect())
- );
+ let subset = library_set.intersection_removing_matches(&mut command_paths, Kind::Build);
+ assert_eq!(subset, set(&["library/core", "library/alloc"]),);
assert_eq!(command_paths, vec![Path::new("library/stdarch")]);
}
#[test]
fn test_exclude() {
let mut config = configure("test", &["A"], &["A"]);
- config.exclude = vec![TaskPath::parse("src/tools/tidy")];
+ config.exclude = vec!["src/tools/tidy".into()];
let cache = run_build(&[], config);
// Ensure we have really excluded tidy
@@ -118,21 +127,16 @@ fn test_exclude() {
#[test]
fn test_exclude_kind() {
- let path = PathBuf::from("src/tools/cargotest");
- let exclude = TaskPath::parse("test::src/tools/cargotest");
- assert_eq!(exclude, TaskPath { kind: Some(Kind::Test), path: path.clone() });
+ let path = PathBuf::from("compiler/rustc_data_structures");
let mut config = configure("test", &["A"], &["A"]);
- // Ensure our test is valid, and `test::Cargotest` would be run without the exclude.
- assert!(run_build(&[path.clone()], config.clone()).contains::<test::Cargotest>());
- // Ensure tests for cargotest are skipped.
- config.exclude = vec![exclude.clone()];
- assert!(!run_build(&[path.clone()], config).contains::<test::Cargotest>());
-
- // Ensure builds for cargotest are not skipped.
- let mut config = configure("build", &["A"], &["A"]);
- config.exclude = vec![exclude];
- assert!(run_build(&[path], config).contains::<tool::CargoTest>());
+ // Ensure our test is valid, and `test::Rustc` would be run without the exclude.
+ assert!(run_build(&[], config.clone()).contains::<test::CrateLibrustc>());
+ // Ensure tests for rustc are skipped.
+ config.exclude = vec![path.clone()];
+ assert!(!run_build(&[], config.clone()).contains::<test::CrateLibrustc>());
+ // Ensure builds for rustc are not skipped.
+ assert!(run_build(&[], config).contains::<compile::Rustc>());
}
/// Ensure that if someone passes both a single crate and `library`, all library crates get built.
@@ -144,6 +148,9 @@ fn alias_and_path_for_library() {
first(cache.all::<compile::Std>()),
&[std!(A => A, stage = 0), std!(A => A, stage = 1)]
);
+
+ let mut cache = run_build(&["library".into(), "core".into()], configure("doc", &["A"], &["A"]));
+ assert_eq!(first(cache.all::<doc::Std>()), &[doc_std!(A => A, stage = 0)]);
}
#[test]
diff --git a/src/bootstrap/cc_detect.rs b/src/bootstrap/cc_detect.rs
index 65c882fb8..ade3bfed1 100644
--- a/src/bootstrap/cc_detect.rs
+++ b/src/bootstrap/cc_detect.rs
@@ -69,6 +69,8 @@ fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build {
.opt_level(2)
.warnings(false)
.debug(false)
+ // Compress debuginfo
+ .flag_if_supported("-gz")
.target(&target.triple)
.host(&build.build.triple);
match build.crt_static(target) {
@@ -87,7 +89,7 @@ fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build {
cfg
}
-pub fn find(build: &mut Build) {
+pub fn find(build: &Build) {
// For all targets we're going to need a C compiler for building some shims
// and such as well as for being a linker for Rust code.
let targets = build
@@ -98,60 +100,64 @@ pub fn find(build: &mut Build) {
.chain(iter::once(build.build))
.collect::<HashSet<_>>();
for target in targets.into_iter() {
- let mut cfg = new_cc_build(build, target);
- let config = build.config.target_config.get(&target);
- if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
- cfg.compiler(cc);
- } else {
- set_compiler(&mut cfg, Language::C, target, config, build);
- }
+ find_target(build, target);
+ }
+}
- let compiler = cfg.get_compiler();
- let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
- ar
- } else {
- cc2ar(compiler.path(), target)
- };
+pub fn find_target(build: &Build, target: TargetSelection) {
+ let mut cfg = new_cc_build(build, target);
+ let config = build.config.target_config.get(&target);
+ if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
+ cfg.compiler(cc);
+ } else {
+ set_compiler(&mut cfg, Language::C, target, config, build);
+ }
- build.cc.insert(target, compiler.clone());
- let cflags = build.cflags(target, GitRepo::Rustc, CLang::C);
+ let compiler = cfg.get_compiler();
+ let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
+ ar
+ } else {
+ cc2ar(compiler.path(), target)
+ };
- // If we use llvm-libunwind, we will need a C++ compiler as well for all targets
- // We'll need one anyways if the target triple is also a host triple
- let mut cfg = new_cc_build(build, target);
- cfg.cpp(true);
- let cxx_configured = if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
- cfg.compiler(cxx);
- true
- } else if build.hosts.contains(&target) || build.build == target {
- set_compiler(&mut cfg, Language::CPlusPlus, target, config, build);
- true
- } else {
- // Use an auto-detected compiler (or one configured via `CXX_target_triple` env vars).
- cfg.try_get_compiler().is_ok()
- };
+ build.cc.borrow_mut().insert(target, compiler.clone());
+ let cflags = build.cflags(target, GitRepo::Rustc, CLang::C);
- // for VxWorks, record CXX compiler which will be used in lib.rs:linker()
- if cxx_configured || target.contains("vxworks") {
- let compiler = cfg.get_compiler();
- build.cxx.insert(target, compiler);
- }
+ // If we use llvm-libunwind, we will need a C++ compiler as well for all targets
+ // We'll need one anyways if the target triple is also a host triple
+ let mut cfg = new_cc_build(build, target);
+ cfg.cpp(true);
+ let cxx_configured = if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
+ cfg.compiler(cxx);
+ true
+ } else if build.hosts.contains(&target) || build.build == target {
+ set_compiler(&mut cfg, Language::CPlusPlus, target, config, build);
+ true
+ } else {
+ // Use an auto-detected compiler (or one configured via `CXX_target_triple` env vars).
+ cfg.try_get_compiler().is_ok()
+ };
- build.verbose(&format!("CC_{} = {:?}", &target.triple, build.cc(target)));
- build.verbose(&format!("CFLAGS_{} = {:?}", &target.triple, cflags));
- if let Ok(cxx) = build.cxx(target) {
- let cxxflags = build.cflags(target, GitRepo::Rustc, CLang::Cxx);
- build.verbose(&format!("CXX_{} = {:?}", &target.triple, cxx));
- build.verbose(&format!("CXXFLAGS_{} = {:?}", &target.triple, cxxflags));
- }
- if let Some(ar) = ar {
- build.verbose(&format!("AR_{} = {:?}", &target.triple, ar));
- build.ar.insert(target, ar);
- }
+ // for VxWorks, record CXX compiler which will be used in lib.rs:linker()
+ if cxx_configured || target.contains("vxworks") {
+ let compiler = cfg.get_compiler();
+ build.cxx.borrow_mut().insert(target, compiler);
+ }
- if let Some(ranlib) = config.and_then(|c| c.ranlib.clone()) {
- build.ranlib.insert(target, ranlib);
- }
+ build.verbose(&format!("CC_{} = {:?}", &target.triple, build.cc(target)));
+ build.verbose(&format!("CFLAGS_{} = {:?}", &target.triple, cflags));
+ if let Ok(cxx) = build.cxx(target) {
+ let cxxflags = build.cflags(target, GitRepo::Rustc, CLang::Cxx);
+ build.verbose(&format!("CXX_{} = {:?}", &target.triple, cxx));
+ build.verbose(&format!("CXXFLAGS_{} = {:?}", &target.triple, cxxflags));
+ }
+ if let Some(ar) = ar {
+ build.verbose(&format!("AR_{} = {:?}", &target.triple, ar));
+ build.ar.borrow_mut().insert(target, ar);
+ }
+
+ if let Some(ranlib) = config.and_then(|c| c.ranlib.clone()) {
+ build.ranlib.borrow_mut().insert(target, ranlib);
}
}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index b11be96ce..1a0f00478 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -1,8 +1,10 @@
//! Implementation of compiling the compiler and standard library, in "check"-based modes.
-use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
+use crate::builder::{crate_description, Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::Interned;
-use crate::compile::{add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo};
+use crate::compile::{
+ add_to_sysroot, make_run_crates, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo,
+};
use crate::config::TargetSelection;
use crate::tool::{prepare_tool_cargo, SourceType};
use crate::INTERNER;
@@ -12,6 +14,12 @@ use std::path::{Path, PathBuf};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Std {
pub target: TargetSelection,
+ /// Whether to build only a subset of crates.
+ ///
+ /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`].
+ ///
+ /// [`compile::Rustc`]: crate::compile::Rustc
+ crates: Interned<Vec<String>>,
}
/// Returns args for the subcommand itself (not for cargo)
@@ -66,16 +74,23 @@ fn cargo_subcommand(kind: Kind) -> &'static str {
}
}
+impl Std {
+ pub fn new(target: TargetSelection) -> Self {
+ Self { target, crates: INTERNER.intern_list(vec![]) }
+ }
+}
+
impl Step for Std {
type Output = ();
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.all_krates("sysroot").path("library")
+ run.crate_or_deps("sysroot").path("library")
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Std { target: run.target });
+ let crates = make_run_crates(&run, "library");
+ run.builder.ensure(Std { target: run.target, crates });
}
fn run(self, builder: &Builder<'_>) {
@@ -97,7 +112,14 @@ impl Step for Std {
cargo.arg("--lib");
}
- let _guard = builder.msg_check("library artifacts", target);
+ for krate in &*self.crates {
+ cargo.arg("-p").arg(krate);
+ }
+
+ let _guard = builder.msg_check(
+ format_args!("library artifacts{}", crate_description(&self.crates)),
+ target,
+ );
run_cargo(
builder,
cargo,
@@ -117,7 +139,8 @@ impl Step for Std {
}
// don't run on std twice with x.py clippy
- if builder.kind == Kind::Clippy {
+ // don't check test dependencies if we haven't built libtest
+ if builder.kind == Kind::Clippy || !self.crates.is_empty() {
return;
}
@@ -147,8 +170,8 @@ impl Step for Std {
// Explicitly pass -p for all dependencies krates -- this will force cargo
// to also check the tests/benches/examples for these crates, rather
// than just the leaf crate.
- for krate in builder.in_tree_crates("test", Some(target)) {
- cargo.arg("-p").arg(krate.name);
+ for krate in &*self.crates {
+ cargo.arg("-p").arg(krate);
}
let _guard = builder.msg_check("library test/bench/example targets", target);
@@ -167,6 +190,22 @@ impl Step for Std {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
pub target: TargetSelection,
+ /// Whether to build only a subset of crates.
+ ///
+ /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`].
+ ///
+ /// [`compile::Rustc`]: crate::compile::Rustc
+ crates: Interned<Vec<String>>,
+}
+
+impl Rustc {
+ pub fn new(target: TargetSelection, builder: &Builder<'_>) -> Self {
+ let mut crates = vec![];
+ for krate in builder.in_tree_crates("rustc-main", None) {
+ crates.push(krate.name.to_string());
+ }
+ Self { target, crates: INTERNER.intern_list(crates) }
+ }
}
impl Step for Rustc {
@@ -175,11 +214,12 @@ impl Step for Rustc {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.all_krates("rustc-main").path("compiler")
+ run.crate_or_deps("rustc-main").path("compiler")
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Rustc { target: run.target });
+ let crates = make_run_crates(&run, "compiler");
+ run.builder.ensure(Rustc { target: run.target, crates });
}
/// Builds the compiler.
@@ -200,7 +240,7 @@ impl Step for Rustc {
builder.ensure(crate::compile::Std::new(compiler, compiler.host));
builder.ensure(crate::compile::Std::new(compiler, target));
} else {
- builder.ensure(Std { target });
+ builder.ensure(Std::new(target));
}
let mut cargo = builder.cargo(
@@ -218,14 +258,17 @@ impl Step for Rustc {
cargo.arg("--all-targets");
}
- // Explicitly pass -p for all compiler krates -- this will force cargo
+ // Explicitly pass -p for all compiler crates -- this will force cargo
// to also check the tests/benches/examples for these crates, rather
// than just the leaf crate.
- for krate in builder.in_tree_crates("rustc-main", Some(target)) {
- cargo.arg("-p").arg(krate.name);
+ for krate in &*self.crates {
+ cargo.arg("-p").arg(krate);
}
- let _guard = builder.msg_check("compiler artifacts", target);
+ let _guard = builder.msg_check(
+ format_args!("compiler artifacts{}", crate_description(&self.crates)),
+ target,
+ );
run_cargo(
builder,
cargo,
@@ -268,7 +311,7 @@ impl Step for CodegenBackend {
let target = self.target;
let backend = self.backend;
- builder.ensure(Rustc { target });
+ builder.ensure(Rustc::new(target, builder));
let mut cargo = builder.cargo(
compiler,
@@ -304,7 +347,7 @@ pub struct RustAnalyzer {
impl Step for RustAnalyzer {
type Output = ();
const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
+ const DEFAULT: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/rust-analyzer")
@@ -318,7 +361,7 @@ impl Step for RustAnalyzer {
let compiler = builder.compiler(builder.top_stage, builder.config.build);
let target = self.target;
- builder.ensure(Std { target });
+ builder.ensure(Std::new(target));
let mut cargo = prepare_tool_cargo(
builder,
@@ -386,7 +429,7 @@ macro_rules! tool_check_step {
let compiler = builder.compiler(builder.top_stage, builder.config.build);
let target = self.target;
- builder.ensure(Rustc { target });
+ builder.ensure(Rustc::new(target, builder));
let mut cargo = prepare_tool_cargo(
builder,
diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs
index 0d9fd56b0..c1d867a0b 100644
--- a/src/bootstrap/clean.rs
+++ b/src/bootstrap/clean.rs
@@ -85,6 +85,10 @@ clean_crate_tree! {
}
fn clean_default(build: &Build, all: bool) {
+ if build.config.dry_run() {
+ return;
+ }
+
rm_rf("tmp".as_ref());
if all {
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index 33addb90d..14c3ef79a 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -40,14 +40,32 @@ pub struct Std {
///
/// This shouldn't be used from other steps; see the comment on [`Rustc`].
crates: Interned<Vec<String>>,
+ /// When using download-rustc, we need to use a new build of `std` for running unit tests of Std itself,
+ /// but we need to use the downloaded copy of std for linking to rustdoc. Allow this to be overriden by `builder.ensure` from other steps.
+ force_recompile: bool,
}
impl Std {
pub fn new(compiler: Compiler, target: TargetSelection) -> Self {
- Self { target, compiler, crates: Default::default() }
+ Self { target, compiler, crates: Default::default(), force_recompile: false }
+ }
+
+ pub fn force_recompile(compiler: Compiler, target: TargetSelection) -> Self {
+ Self { target, compiler, crates: Default::default(), force_recompile: true }
}
}
+/// Given an `alias` selected by the `Step` and the paths passed on the command line,
+/// return a list of the crates that should be built.
+///
+/// Normally, people will pass *just* `library` if they pass it.
+/// But it's possible (although strange) to pass something like `library std core`.
+/// Build all crates anyway, as if they hadn't passed the other args.
+pub(crate) fn make_run_crates(run: &RunConfig<'_>, alias: &str) -> Interned<Vec<String>> {
+ let has_alias = run.paths.iter().any(|set| set.assert_single_path().path.ends_with(alias));
+ if has_alias { Default::default() } else { run.cargo_crates_in_set() }
+}
+
impl Step for Std {
type Output = ();
const DEFAULT: bool = true;
@@ -62,16 +80,11 @@ impl Step for Std {
}
fn make_run(run: RunConfig<'_>) {
- // Normally, people will pass *just* library if they pass it.
- // But it's possible (although strange) to pass something like `library std core`.
- // Build all crates anyway, as if they hadn't passed the other args.
- let has_library =
- run.paths.iter().any(|set| set.assert_single_path().path.ends_with("library"));
- let crates = if has_library { Default::default() } else { run.cargo_crates_in_set() };
run.builder.ensure(Std {
compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
target: run.target,
- crates,
+ crates: make_run_crates(&run, "library"),
+ force_recompile: false,
});
}
@@ -84,11 +97,20 @@ impl Step for Std {
let target = self.target;
let compiler = self.compiler;
- // When using `download-rustc`, we already have artifacts for the host available
- // (they were copied in `impl Step for Sysroot`). Don't recompile them.
- // NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler,
- // so its artifacts can't be reused.
- if builder.download_rustc() && compiler.stage != 0 && target == builder.build.build {
+ // When using `download-rustc`, we already have artifacts for the host available. Don't
+ // recompile them.
+ if builder.download_rustc() && target == builder.build.build
+ // NOTE: the beta compiler may generate different artifacts than the downloaded compiler, so
+ // its artifacts can't be reused.
+ && compiler.stage != 0
+ // This check is specific to testing std itself; see `test::Std` for more details.
+ && !self.force_recompile
+ {
+ cp_rustc_component_to_ci_sysroot(
+ builder,
+ compiler,
+ builder.config.ci_rust_std_contents(),
+ );
return;
}
@@ -96,6 +118,10 @@ impl Step for Std {
|| builder.config.keep_stage_std.contains(&compiler.stage)
{
builder.info("Warning: Using a potentially old libstd. This may not behave well.");
+
+ copy_third_party_objects(builder, &compiler, target);
+ copy_self_contained_objects(builder, &compiler, target);
+
builder.ensure(StdLink::from_std(self, compiler));
return;
}
@@ -143,6 +169,11 @@ impl Step for Std {
cargo.arg("-p").arg(krate);
}
+ // See src/bootstrap/synthetic_targets.rs
+ if target.is_synthetic() {
+ cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1");
+ }
+
let _guard = builder.msg(
Kind::Build,
compiler.stage,
@@ -288,7 +319,7 @@ fn copy_self_contained_objects(
}
} else if target.ends_with("windows-gnu") {
for obj in ["crt2.o", "dllcrt2.o"].iter() {
- let src = compiler_file(builder, builder.cc(target), target, CLang::C, obj);
+ let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj);
let target = libdir_self_contained.join(obj);
builder.copy(&src, &target);
target_deps.push((target, DependencyType::TargetSelfContained));
@@ -423,6 +454,8 @@ struct StdLink {
pub target: TargetSelection,
/// Not actually used; only present to make sure the cache invalidation is correct.
crates: Interned<Vec<String>>,
+ /// See [`Std::force_recompile`].
+ force_recompile: bool,
}
impl StdLink {
@@ -432,6 +465,7 @@ impl StdLink {
target_compiler: std.compiler,
target: std.target,
crates: std.crates,
+ force_recompile: std.force_recompile,
}
}
}
@@ -455,8 +489,24 @@ impl Step for StdLink {
let compiler = self.compiler;
let target_compiler = self.target_compiler;
let target = self.target;
- let libdir = builder.sysroot_libdir(target_compiler, target);
- let hostdir = builder.sysroot_libdir(target_compiler, compiler.host);
+
+ // NOTE: intentionally does *not* check `target == builder.build` to avoid having to add the same check in `test::Crate`.
+ let (libdir, hostdir) = if self.force_recompile && builder.download_rustc() {
+ // NOTE: copies part of `sysroot_libdir` to avoid having to add a new `force_recompile` argument there too
+ let lib = builder.sysroot_libdir_relative(self.compiler);
+ let sysroot = builder.ensure(crate::compile::Sysroot {
+ compiler: self.compiler,
+ force_recompile: self.force_recompile,
+ });
+ let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib");
+ let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib");
+ (INTERNER.intern_path(libdir), INTERNER.intern_path(hostdir))
+ } else {
+ let libdir = builder.sysroot_libdir(target_compiler, target);
+ let hostdir = builder.sysroot_libdir(target_compiler, compiler.host);
+ (libdir, hostdir)
+ };
+
add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
}
}
@@ -589,6 +639,25 @@ impl Step for StartupObjects {
}
}
+fn cp_rustc_component_to_ci_sysroot(
+ builder: &Builder<'_>,
+ compiler: Compiler,
+ contents: Vec<String>,
+) {
+ let sysroot = builder.ensure(Sysroot { compiler, force_recompile: false });
+
+ let ci_rustc_dir = builder.out.join(&*builder.build.build.triple).join("ci-rustc");
+ for file in contents {
+ let src = ci_rustc_dir.join(&file);
+ let dst = sysroot.join(file);
+ if src.is_dir() {
+ t!(fs::create_dir_all(dst));
+ } else {
+ builder.copy(&src, &dst);
+ }
+ }
+}
+
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
pub target: TargetSelection,
@@ -615,6 +684,8 @@ impl Step for Rustc {
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let mut crates = run.builder.in_tree_crates("rustc-main", None);
for (i, krate) in crates.iter().enumerate() {
+ // We can't allow `build rustc` as an alias for this Step, because that's reserved by `Assemble`.
+ // Ideally Assemble would use `build compiler` instead, but that seems too confusing to be worth the breaking change.
if krate.name == "rustc-main" {
crates.swap_remove(i);
break;
@@ -646,18 +717,11 @@ impl Step for Rustc {
if builder.download_rustc() && compiler.stage != 0 {
// Copy the existing artifacts instead of rebuilding them.
// NOTE: this path is only taken for tools linking to rustc-dev (including ui-fulldeps tests).
- let sysroot = builder.ensure(Sysroot { compiler });
-
- let ci_rustc_dir = builder.out.join(&*builder.build.build.triple).join("ci-rustc");
- for file in builder.config.rustc_dev_contents() {
- let src = ci_rustc_dir.join(&file);
- let dst = sysroot.join(file);
- if src.is_dir() {
- t!(fs::create_dir_all(dst));
- } else {
- builder.copy(&src, &dst);
- }
- }
+ cp_rustc_component_to_ci_sysroot(
+ builder,
+ compiler,
+ builder.config.ci_rustc_dev_contents(),
+ );
return;
}
@@ -936,8 +1000,13 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
&& !target.contains("apple")
&& !target.contains("solaris")
{
- let file =
- compiler_file(builder, builder.cxx(target).unwrap(), target, CLang::Cxx, "libstdc++.a");
+ let file = compiler_file(
+ builder,
+ &builder.cxx(target).unwrap(),
+ target,
+ CLang::Cxx,
+ "libstdc++.a",
+ );
cargo.env("LLVM_STATIC_STDCPP", file);
}
if builder.llvm_link_shared() {
@@ -1208,6 +1277,9 @@ pub fn compiler_file(
c: CLang,
file: &str,
) -> PathBuf {
+ if builder.config.dry_run() {
+ return PathBuf::new();
+ }
let mut cmd = Command::new(compiler);
cmd.args(builder.cflags(target, GitRepo::Rustc, c));
cmd.arg(format!("-print-file-name={}", file));
@@ -1218,6 +1290,14 @@ pub fn compiler_file(
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Sysroot {
pub compiler: Compiler,
+ /// See [`Std::force_recompile`].
+ force_recompile: bool,
+}
+
+impl Sysroot {
+ pub(crate) fn new(compiler: Compiler) -> Self {
+ Sysroot { compiler, force_recompile: false }
+ }
}
impl Step for Sysroot {
@@ -1240,6 +1320,8 @@ impl Step for Sysroot {
let sysroot_dir = |stage| {
if stage == 0 {
host_dir.join("stage0-sysroot")
+ } else if self.force_recompile && stage == compiler.stage {
+ host_dir.join(format!("stage{stage}-test-sysroot"))
} else if builder.download_rustc() && compiler.stage != builder.top_stage {
host_dir.join("ci-rustc-sysroot")
} else {
@@ -1279,16 +1361,26 @@ impl Step for Sysroot {
// 2. The sysroot is deleted and recreated between each invocation, so running `x test
// ui-fulldeps && x test ui` can't cause failures.
let mut filtered_files = Vec::new();
- // Don't trim directories or files that aren't loaded per-target; they can't cause conflicts.
- let suffix = format!("lib/rustlib/{}/lib", compiler.host);
- for path in builder.config.rustc_dev_contents() {
- let path = Path::new(&path);
- if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
- filtered_files.push(path.file_name().unwrap().to_owned());
+ let mut add_filtered_files = |suffix, contents| {
+ for path in contents {
+ let path = Path::new(&path);
+ if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
+ filtered_files.push(path.file_name().unwrap().to_owned());
+ }
}
- }
-
- let filtered_extensions = [OsStr::new("rmeta"), OsStr::new("rlib"), OsStr::new("so")];
+ };
+ let suffix = format!("lib/rustlib/{}/lib", compiler.host);
+ add_filtered_files(suffix.as_str(), builder.config.ci_rustc_dev_contents());
+ // NOTE: we can't copy std eagerly because `stage2-test-sysroot` needs to have only the
+ // newly compiled std, not the downloaded std.
+ add_filtered_files("lib", builder.config.ci_rust_std_contents());
+
+ let filtered_extensions = [
+ OsStr::new("rmeta"),
+ OsStr::new("rlib"),
+ // FIXME: this is wrong when compiler.host != build, but we don't support that today
+ OsStr::new(std::env::consts::DLL_EXTENSION),
+ ];
let ci_rustc_dir = builder.ci_rustc_dir(builder.config.build);
builder.cp_filtered(&ci_rustc_dir, &sysroot, &|path| {
if path.extension().map_or(true, |ext| !filtered_extensions.contains(&ext)) {
@@ -1404,10 +1496,15 @@ impl Step for Assemble {
// If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0.
if builder.download_rustc() {
- let sysroot = builder.ensure(Sysroot { compiler: target_compiler });
+ let sysroot =
+ builder.ensure(Sysroot { compiler: target_compiler, force_recompile: false });
// Ensure that `libLLVM.so` ends up in the newly created target directory,
// so that tools using `rustc_private` can use it.
dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot);
+ // Lower stages use `ci-rustc-sysroot`, not stageN
+ if target_compiler.stage == builder.top_stage {
+ builder.info(&format!("Creating a sysroot for stage{stage} compiler (use `rustup toolchain link 'name' build/host/stage{stage}`)", stage=target_compiler.stage));
+ }
return target_compiler;
}
@@ -1445,11 +1542,18 @@ impl Step for Assemble {
let stage = target_compiler.stage;
let host = target_compiler.host;
- let msg = if build_compiler.host == host {
- format!("Assembling stage{} compiler", stage)
+ let (host_info, dir_name) = if build_compiler.host == host {
+ ("".into(), "host".into())
} else {
- format!("Assembling stage{} compiler ({})", stage, host)
+ (format!(" ({host})"), host.to_string())
};
+ // NOTE: "Creating a sysroot" is somewhat inconsistent with our internal terminology, since
+ // sysroots can temporarily be empty until we put the compiler inside. However,
+ // `ensure(Sysroot)` isn't really something that's user facing, so there shouldn't be any
+ // ambiguity.
+ let msg = format!(
+ "Creating a sysroot for stage{stage} compiler{host_info} (use `rustup toolchain link 'name' build/{dir_name}/stage{stage}`)"
+ );
builder.info(&msg);
// Link in all dylibs to the libdir
@@ -1679,7 +1783,7 @@ pub fn run_cargo(
});
if !ok {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
// Ok now we need to actually find all the files listed in `toplevel`. We've
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index e192cda9a..fe932fd6b 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -10,19 +10,20 @@ use std::cell::{Cell, RefCell};
use std::cmp;
use std::collections::{HashMap, HashSet};
use std::env;
-use std::fmt;
+use std::fmt::{self, Display};
use std::fs;
+use std::io::IsTerminal;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::FromStr;
-use crate::builder::TaskPath;
use crate::cache::{Interned, INTERNER};
use crate::cc_detect::{ndk_compiler, Language};
use crate::channel::{self, GitInfo};
pub use crate::flags::Subcommand;
use crate::flags::{Color, Flags, Warnings};
use crate::util::{exe, output, t};
+use build_helper::detail_exit_macro;
use once_cell::sync::OnceCell;
use semver::Version;
use serde::{Deserialize, Deserializer};
@@ -49,6 +50,57 @@ pub enum DryRun {
UserSelected,
}
+#[derive(Copy, Clone, Default)]
+pub enum DebuginfoLevel {
+ #[default]
+ None,
+ LineTablesOnly,
+ Limited,
+ Full,
+}
+
+// NOTE: can't derive(Deserialize) because the intermediate trip through toml::Value only
+// deserializes i64, and derive() only generates visit_u64
+impl<'de> Deserialize<'de> for DebuginfoLevel {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ use serde::de::Error;
+
+ Ok(match Deserialize::deserialize(deserializer)? {
+ StringOrInt::String("none") | StringOrInt::Int(0) => DebuginfoLevel::None,
+ StringOrInt::String("line-tables-only") => DebuginfoLevel::LineTablesOnly,
+ StringOrInt::String("limited") | StringOrInt::Int(1) => DebuginfoLevel::Limited,
+ StringOrInt::String("full") | StringOrInt::Int(2) => DebuginfoLevel::Full,
+ StringOrInt::Int(n) => {
+ let other = serde::de::Unexpected::Signed(n);
+ return Err(D::Error::invalid_value(other, &"expected 0, 1, or 2"));
+ }
+ StringOrInt::String(s) => {
+ let other = serde::de::Unexpected::Str(s);
+ return Err(D::Error::invalid_value(
+ other,
+ &"expected none, line-tables-only, limited, or full",
+ ));
+ }
+ })
+ }
+}
+
+/// Suitable for passing to `-C debuginfo`
+impl Display for DebuginfoLevel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use DebuginfoLevel::*;
+ f.write_str(match self {
+ None => "0",
+ LineTablesOnly => "line-tables-only",
+ Limited => "1",
+ Full => "2",
+ })
+ }
+}
+
/// Global configuration for the entire build and/or bootstrap.
///
/// This structure is parsed from `config.toml`, and some of the fields are inferred from `git` or build-time parameters.
@@ -78,7 +130,7 @@ pub struct Config {
pub sanitizers: bool,
pub profiler: bool,
pub omit_git_hash: bool,
- pub exclude: Vec<TaskPath>,
+ pub exclude: Vec<PathBuf>,
pub include_default_paths: bool,
pub rustc_error_format: Option<String>,
pub json_output: bool,
@@ -150,7 +202,7 @@ pub struct Config {
pub llvm_use_libcxx: bool,
// rust codegen options
- pub rust_optimize: bool,
+ pub rust_optimize: RustOptimize,
pub rust_codegen_units: Option<u32>,
pub rust_codegen_units_std: Option<u32>,
pub rust_debug_assertions: bool,
@@ -158,10 +210,10 @@ pub struct Config {
pub rust_overflow_checks: bool,
pub rust_overflow_checks_std: bool,
pub rust_debug_logging: bool,
- pub rust_debuginfo_level_rustc: u32,
- pub rust_debuginfo_level_std: u32,
- pub rust_debuginfo_level_tools: u32,
- pub rust_debuginfo_level_tests: u32,
+ pub rust_debuginfo_level_rustc: DebuginfoLevel,
+ pub rust_debuginfo_level_std: DebuginfoLevel,
+ pub rust_debuginfo_level_tools: DebuginfoLevel,
+ pub rust_debuginfo_level_tests: DebuginfoLevel,
pub rust_split_debuginfo: SplitDebuginfo,
pub rust_rpath: bool,
pub rustc_parallel: bool,
@@ -377,6 +429,7 @@ impl std::str::FromStr for RustcLto {
pub struct TargetSelection {
pub triple: Interned<String>,
file: Option<Interned<String>>,
+ synthetic: bool,
}
/// Newtype over `Vec<TargetSelection>` so we can implement custom parsing logic
@@ -408,7 +461,15 @@ impl TargetSelection {
let triple = INTERNER.intern_str(triple);
let file = file.map(|f| INTERNER.intern_str(f));
- Self { triple, file }
+ Self { triple, file, synthetic: false }
+ }
+
+ pub fn create_synthetic(triple: &str, file: &str) -> Self {
+ Self {
+ triple: INTERNER.intern_str(triple),
+ file: Some(INTERNER.intern_str(file)),
+ synthetic: true,
+ }
}
pub fn rustc_target_arg(&self) -> &str {
@@ -426,6 +487,11 @@ impl TargetSelection {
pub fn ends_with(&self, needle: &str) -> bool {
self.triple.ends_with(needle)
}
+
+ // See src/bootstrap/synthetic_targets.rs
+ pub fn is_synthetic(&self) -> bool {
+ self.synthetic
+ }
}
impl fmt::Display for TargetSelection {
@@ -580,7 +646,7 @@ macro_rules! define_config {
panic!("overriding existing option")
} else {
eprintln!("overriding existing option: `{}`", stringify!($field));
- crate::detail_exit(2);
+ detail_exit_macro!(2);
}
} else {
self.$field = other.$field;
@@ -679,7 +745,7 @@ impl<T> Merge for Option<T> {
panic!("overriding existing option")
} else {
eprintln!("overriding existing option");
- crate::detail_exit(2);
+ detail_exit_macro!(2);
}
} else {
*self = other;
@@ -809,10 +875,55 @@ impl Default for StringOrBool {
}
}
+#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
+#[serde(untagged)]
+pub enum RustOptimize {
+ #[serde(deserialize_with = "deserialize_and_validate_opt_level")]
+ String(String),
+ Bool(bool),
+}
+
+impl Default for RustOptimize {
+ fn default() -> RustOptimize {
+ RustOptimize::Bool(false)
+ }
+}
+
+fn deserialize_and_validate_opt_level<'de, D>(d: D) -> Result<String, D::Error>
+where
+ D: serde::de::Deserializer<'de>,
+{
+ let v = String::deserialize(d)?;
+ if ["0", "1", "2", "3", "s", "z"].iter().find(|x| **x == v).is_some() {
+ Ok(v)
+ } else {
+ Err(format!(r#"unrecognized option for rust optimize: "{}", expected one of "0", "1", "2", "3", "s", "z""#, v)).map_err(serde::de::Error::custom)
+ }
+}
+
+impl RustOptimize {
+ pub(crate) fn is_release(&self) -> bool {
+ if let RustOptimize::Bool(true) | RustOptimize::String(_) = &self { true } else { false }
+ }
+
+ pub(crate) fn get_opt_level(&self) -> Option<String> {
+ match &self {
+ RustOptimize::String(s) => Some(s.clone()),
+ RustOptimize::Bool(_) => None,
+ }
+ }
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum StringOrInt<'a> {
+ String(&'a str),
+ Int(i64),
+}
define_config! {
/// TOML representation of how the Rust build is configured.
struct Rust {
- optimize: Option<bool> = "optimize",
+ optimize: Option<RustOptimize> = "optimize",
debug: Option<bool> = "debug",
codegen_units: Option<u32> = "codegen-units",
codegen_units_std: Option<u32> = "codegen-units-std",
@@ -821,11 +932,11 @@ define_config! {
overflow_checks: Option<bool> = "overflow-checks",
overflow_checks_std: Option<bool> = "overflow-checks-std",
debug_logging: Option<bool> = "debug-logging",
- debuginfo_level: Option<u32> = "debuginfo-level",
- debuginfo_level_rustc: Option<u32> = "debuginfo-level-rustc",
- debuginfo_level_std: Option<u32> = "debuginfo-level-std",
- debuginfo_level_tools: Option<u32> = "debuginfo-level-tools",
- debuginfo_level_tests: Option<u32> = "debuginfo-level-tests",
+ debuginfo_level: Option<DebuginfoLevel> = "debuginfo-level",
+ debuginfo_level_rustc: Option<DebuginfoLevel> = "debuginfo-level-rustc",
+ debuginfo_level_std: Option<DebuginfoLevel> = "debuginfo-level-std",
+ debuginfo_level_tools: Option<DebuginfoLevel> = "debuginfo-level-tools",
+ debuginfo_level_tests: Option<DebuginfoLevel> = "debuginfo-level-tests",
split_debuginfo: Option<String> = "split-debuginfo",
run_dsymutil: Option<bool> = "run-dsymutil",
backtrace: Option<bool> = "backtrace",
@@ -893,14 +1004,12 @@ define_config! {
impl Config {
pub fn default_opts() -> Config {
- use is_terminal::IsTerminal;
-
let mut config = Config::default();
config.llvm_optimize = true;
config.ninja_in_file = true;
config.llvm_static_stdcpp = false;
config.backtrace = true;
- config.rust_optimize = true;
+ config.rust_optimize = RustOptimize::Bool(true);
config.rust_optimize_tests = true;
config.submodules = None;
config.docs = true;
@@ -945,7 +1054,7 @@ impl Config {
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
.unwrap_or_else(|err| {
eprintln!("failed to parse TOML configuration '{}': {err}", file.display());
- crate::detail_exit(2);
+ detail_exit_macro!(2);
})
}
Self::parse_inner(args, get_toml)
@@ -957,7 +1066,7 @@ impl Config {
// Set flags.
config.paths = std::mem::take(&mut flags.paths);
- config.exclude = flags.exclude.into_iter().map(|path| TaskPath::parse(path)).collect();
+ config.exclude = flags.exclude;
config.include_default_paths = flags.include_default_paths;
config.rustc_error_format = flags.rustc_error_format;
config.json_output = flags.json_output;
@@ -979,7 +1088,7 @@ impl Config {
eprintln!(
"Cannot use both `llvm_bolt_profile_generate` and `llvm_bolt_profile_use` at the same time"
);
- crate::detail_exit(1);
+ detail_exit_macro!(1);
}
// Infer the rest of the configuration.
@@ -1058,6 +1167,14 @@ impl Config {
};
if let Some(include) = &toml.profile {
+ // Allows creating alias for profile names, allowing
+ // profiles to be renamed while maintaining back compatibility
+ // Keep in sync with `profile_aliases` in bootstrap.py
+ let profile_aliases = HashMap::from([("user", "dist")]);
+ let include = match profile_aliases.get(include.as_str()) {
+ Some(alias) => alias,
+ None => include.as_str(),
+ };
let mut include_path = config.src.clone();
include_path.push("src");
include_path.push("bootstrap");
@@ -1095,7 +1212,7 @@ impl Config {
}
}
eprintln!("failed to parse override `{option}`: `{err}");
- crate::detail_exit(2)
+ detail_exit_macro!(2)
}
toml.merge(override_toml, ReplaceOpt::Override);
@@ -1114,10 +1231,13 @@ impl Config {
config.out = crate::util::absolute(&config.out);
}
- config.initial_rustc = build.rustc.map(PathBuf::from).unwrap_or_else(|| {
+ config.initial_rustc = if let Some(rustc) = build.rustc {
+ config.check_build_rustc_version(&rustc);
+ PathBuf::from(rustc)
+ } else {
config.download_beta_toolchain();
config.out.join(config.build.triple).join("stage0/bin/rustc")
- });
+ };
config.initial_cargo = build
.cargo
@@ -1464,7 +1584,7 @@ impl Config {
config.llvm_assertions = llvm_assertions.unwrap_or(false);
config.llvm_tests = llvm_tests.unwrap_or(false);
config.llvm_plugins = llvm_plugins.unwrap_or(false);
- config.rust_optimize = optimize.unwrap_or(true);
+ config.rust_optimize = optimize.unwrap_or(RustOptimize::Bool(true));
let default = debug == Some(true);
config.rust_debug_assertions = debug_assertions.unwrap_or(default);
@@ -1476,17 +1596,17 @@ impl Config {
config.rust_debug_logging = debug_logging.unwrap_or(config.rust_debug_assertions);
- let with_defaults = |debuginfo_level_specific: Option<u32>| {
+ let with_defaults = |debuginfo_level_specific: Option<_>| {
debuginfo_level_specific.or(debuginfo_level).unwrap_or(if debug == Some(true) {
- 1
+ DebuginfoLevel::Limited
} else {
- 0
+ DebuginfoLevel::None
})
};
config.rust_debuginfo_level_rustc = with_defaults(debuginfo_level_rustc);
config.rust_debuginfo_level_std = with_defaults(debuginfo_level_std);
config.rust_debuginfo_level_tools = with_defaults(debuginfo_level_tools);
- config.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(0);
+ config.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(DebuginfoLevel::None);
let download_rustc = config.download_rustc_commit.is_some();
// See https://github.com/rust-lang/compiler-team/issues/326
@@ -1780,13 +1900,13 @@ impl Config {
self.rust_codegen_backends.get(0).cloned()
}
- pub fn check_build_rustc_version(&self) {
+ pub fn check_build_rustc_version(&self, rustc_path: &str) {
if self.dry_run() {
return;
}
// check rustc version is same or lower with 1 apart from the building one
- let mut cmd = Command::new(&self.initial_rustc);
+ let mut cmd = Command::new(rustc_path);
cmd.arg("--version");
let rustc_output = output(&mut cmd)
.lines()
@@ -1805,14 +1925,15 @@ impl Config {
.unwrap();
if !(source_version == rustc_version
|| (source_version.major == rustc_version.major
- && source_version.minor == rustc_version.minor + 1))
+ && (source_version.minor == rustc_version.minor
+ || source_version.minor == rustc_version.minor + 1)))
{
let prev_version = format!("{}.{}.x", source_version.major, source_version.minor - 1);
eprintln!(
"Unexpected rustc version: {}, we should use {}/{} to build source with {}",
rustc_version, prev_version, source_version, source_version
);
- crate::detail_exit(1);
+ detail_exit_macro!(1);
}
}
@@ -1848,7 +1969,7 @@ impl Config {
println!("help: maybe your repository history is too shallow?");
println!("help: consider disabling `download-rustc`");
println!("help: or fetch enough history to include one upstream commit");
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
// Warn if there were changes to the compiler or standard library since the ancestor commit.
diff --git a/src/bootstrap/config/tests.rs b/src/bootstrap/config/tests.rs
index 4de84b543..732df54cd 100644
--- a/src/bootstrap/config/tests.rs
+++ b/src/bootstrap/config/tests.rs
@@ -1,5 +1,8 @@
+use crate::config::TomlConfig;
+
use super::{Config, Flags};
use clap::CommandFactory;
+use serde::Deserialize;
use std::{env, path::Path};
fn parse(config: &str) -> Config {
@@ -159,3 +162,34 @@ fn override_toml_duplicate() {
|&_| toml::from_str("changelog-seen = 0").unwrap(),
);
}
+
+#[test]
+fn profile_user_dist() {
+ fn get_toml(file: &Path) -> TomlConfig {
+ let contents = if file.ends_with("config.toml") {
+ "profile = \"user\"".to_owned()
+ } else {
+ assert!(file.ends_with("config.dist.toml"));
+ std::fs::read_to_string(dbg!(file)).unwrap()
+ };
+ toml::from_str(&contents)
+ .and_then(|table: toml::Value| TomlConfig::deserialize(table))
+ .unwrap()
+ }
+ Config::parse_inner(&["check".to_owned()], get_toml);
+}
+
+#[test]
+fn rust_optimize() {
+ assert_eq!(parse("").rust_optimize.is_release(), true);
+ assert_eq!(parse("rust.optimize = false").rust_optimize.is_release(), false);
+ assert_eq!(parse("rust.optimize = true").rust_optimize.is_release(), true);
+ assert_eq!(parse("rust.optimize = \"1\"").rust_optimize.get_opt_level(), Some("1".to_string()));
+ assert_eq!(parse("rust.optimize = \"s\"").rust_optimize.get_opt_level(), Some("s".to_string()));
+}
+
+#[test]
+#[should_panic]
+fn invalid_rust_optimize() {
+ parse("rust.optimize = \"a\"");
+}
diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py
index 571062a3a..e8eebdfb5 100755
--- a/src/bootstrap/configure.py
+++ b/src/bootstrap/configure.py
@@ -9,7 +9,7 @@ rust_dir = os.path.dirname(os.path.abspath(__file__))
rust_dir = os.path.dirname(rust_dir)
rust_dir = os.path.dirname(rust_dir)
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
-import bootstrap
+import bootstrap # noqa: E402
class Option(object):
@@ -45,7 +45,6 @@ o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ f
o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)")
o("rpath", "rust.rpath", "build rpaths into rustc itself")
o("codegen-tests", "rust.codegen-tests", "run the tests/codegen tests")
-o("option-checking", None, "complain about unrecognized options in this configure script")
o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)")
o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date")
o("vendor", "build.vendor", "enable usage of vendored Rust crates")
@@ -170,6 +169,9 @@ v("build", "build.build", "GNUs ./configure syntax LLVM build triple")
v("host", None, "List of GNUs ./configure syntax LLVM host triples")
v("target", None, "List of GNUs ./configure syntax LLVM target triples")
+# Options specific to this configure script
+o("option-checking", None, "complain about unrecognized options in this configure script")
+o("verbose-configure", None, "don't truncate options when printing them in this configure script")
v("set", None, "set arbitrary key/value pairs in TOML configuration")
@@ -211,6 +213,8 @@ if '--help' in sys.argv or '-h' in sys.argv:
print('be passed with `--disable-foo` to forcibly disable the option')
sys.exit(0)
+VERBOSE = False
+
# Parse all command line arguments into one of these three lists, handling
# boolean and value-based options separately
def parse_args(args):
@@ -271,9 +275,12 @@ def parse_args(args):
if len(need_value_args) > 0:
err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
+ global VERBOSE
+ VERBOSE = 'verbose-configure' in known_args
+
config = {}
- set('build.configure-args', sys.argv[1:], config)
+ set('build.configure-args', args, config)
apply_args(known_args, option_checking, config)
return parse_example_config(known_args, config)
@@ -290,7 +297,7 @@ def set(key, value, config):
value = [v for v in value if v]
s = "{:20} := {}".format(key, value)
- if len(s) < 70:
+ if len(s) < 70 or VERBOSE:
p(s)
else:
p(s[:70] + " ...")
@@ -312,7 +319,7 @@ def apply_args(known_args, option_checking, config):
for key in known_args:
# The `set` option is special and can be passed a bunch of times
if key == 'set':
- for option, value in known_args[key]:
+ for _option, value in known_args[key]:
keyval = value.split('=', 1)
if len(keyval) == 1 or keyval[1] == "true":
value = True
@@ -371,7 +378,7 @@ def apply_args(known_args, option_checking, config):
set('rust.lld', True, config)
set('rust.llvm-tools', True, config)
set('build.extended', True, config)
- elif option.name == 'option-checking':
+ elif option.name in ['option-checking', 'verbose-configure']:
# this was handled above
pass
elif option.name == 'dist-compression-formats':
@@ -393,8 +400,10 @@ def parse_example_config(known_args, config):
targets = {}
top_level_keys = []
- for line in open(rust_dir + '/config.example.toml').read().split("\n"):
- if cur_section == None:
+ with open(rust_dir + '/config.example.toml') as example_config:
+ example_lines = example_config.read().split("\n")
+ for line in example_lines:
+ if cur_section is None:
if line.count('=') == 1:
top_level_key = line.split('=')[0]
top_level_key = top_level_key.strip(' #')
@@ -428,7 +437,7 @@ def parse_example_config(known_args, config):
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
if 'profile' not in config:
- set('profile', 'user', config)
+ set('profile', 'dist', config)
configure_file(sections, top_level_keys, targets, config)
return section_order, sections, targets
@@ -516,8 +525,8 @@ def write_uncommented(target, f):
block.append(line)
if len(line) == 0:
if not is_comment:
- for l in block:
- f.write(l + "\n")
+ for ln in block:
+ f.write(ln + "\n")
block = []
is_comment = True
continue
diff --git a/src/bootstrap/defaults/config.user.toml b/src/bootstrap/defaults/config.dist.toml
index 25d9e649f..44efdf50b 100644
--- a/src/bootstrap/defaults/config.user.toml
+++ b/src/bootstrap/defaults/config.dist.toml
@@ -12,6 +12,9 @@ extended = true
[llvm]
download-ci-llvm = false
[rust]
+# We have several defaults in bootstrap that depend on whether the channel is `dev` (e.g. `omit-git-hash` and `download-ci-llvm`).
+# Make sure they don't get set when installing from source.
+channel = "nightly"
download-rustc = false
[dist]
diff --git a/src/bootstrap/defaults/config.tools.toml b/src/bootstrap/defaults/config.tools.toml
index 6b6625342..79424f28d 100644
--- a/src/bootstrap/defaults/config.tools.toml
+++ b/src/bootstrap/defaults/config.tools.toml
@@ -9,6 +9,8 @@ debug-logging = true
incremental = true
# Download rustc from CI instead of building it from source.
# This cuts compile times by almost 60x, but means you can't modify the compiler.
+# Using these defaults will download the stage2 compiler (see `download-rustc`
+# setting) and the stage2 toolchain should therefore be used for these defaults.
download-rustc = "if-unchanged"
[build]
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 9cead7adc..b34a4b2dc 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -106,11 +106,7 @@ impl Step for JsonDocs {
/// Builds the `rust-docs-json` installer component.
fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let host = self.host;
- builder.ensure(crate::doc::Std {
- stage: builder.top_stage,
- target: host,
- format: DocumentationFormat::JSON,
- });
+ builder.ensure(crate::doc::Std::new(builder.top_stage, host, DocumentationFormat::JSON));
let dest = "share/doc/rust/json";
@@ -174,6 +170,10 @@ fn make_win_dist(
target: TargetSelection,
builder: &Builder<'_>,
) {
+ if builder.config.dry_run() {
+ return;
+ }
+
//Ask gcc where it keeps its stuff
let mut cmd = Command::new(builder.cc(target));
cmd.arg("-print-search-dirs");
@@ -1013,6 +1013,9 @@ impl Step for PlainSourceTarball {
.arg(builder.src.join("./compiler/rustc_codegen_cranelift/Cargo.toml"))
.arg("--sync")
.arg(builder.src.join("./src/bootstrap/Cargo.toml"))
+ // Will read the libstd Cargo.toml
+ // which uses the unstable `public-dependency` feature.
+ .env("RUSTC_BOOTSTRAP", "1")
.current_dir(&plain_dst_src);
let config = if !builder.config.dry_run() {
@@ -1594,9 +1597,7 @@ impl Step for Extended {
prepare("cargo");
prepare("rust-analysis");
prepare("rust-std");
- prepare("clippy");
- prepare("rust-analyzer");
- for tool in &["rust-docs", "rust-demangler", "miri"] {
+ for tool in &["clippy", "rust-analyzer", "rust-docs", "rust-demangler", "miri"] {
if built_tools.contains(tool) {
prepare(tool);
}
@@ -1682,40 +1683,44 @@ impl Step for Extended {
.arg("-out")
.arg(exe.join("StdGroup.wxs")),
);
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("rust-analyzer")
- .args(&heat_flags)
- .arg("-cg")
- .arg("RustAnalyzerGroup")
- .arg("-dr")
- .arg("RustAnalyzer")
- .arg("-var")
- .arg("var.RustAnalyzerDir")
- .arg("-out")
- .arg(exe.join("RustAnalyzerGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/remove-duplicates.xsl")),
- );
- builder.run(
- Command::new(&heat)
- .current_dir(&exe)
- .arg("dir")
- .arg("clippy")
- .args(&heat_flags)
- .arg("-cg")
- .arg("ClippyGroup")
- .arg("-dr")
- .arg("Clippy")
- .arg("-var")
- .arg("var.ClippyDir")
- .arg("-out")
- .arg(exe.join("ClippyGroup.wxs"))
- .arg("-t")
- .arg(etc.join("msi/remove-duplicates.xsl")),
- );
+ if built_tools.contains("rust-analyzer") {
+ builder.run(
+ Command::new(&heat)
+ .current_dir(&exe)
+ .arg("dir")
+ .arg("rust-analyzer")
+ .args(&heat_flags)
+ .arg("-cg")
+ .arg("RustAnalyzerGroup")
+ .arg("-dr")
+ .arg("RustAnalyzer")
+ .arg("-var")
+ .arg("var.RustAnalyzerDir")
+ .arg("-out")
+ .arg(exe.join("RustAnalyzerGroup.wxs"))
+ .arg("-t")
+ .arg(etc.join("msi/remove-duplicates.xsl")),
+ );
+ }
+ if built_tools.contains("clippy") {
+ builder.run(
+ Command::new(&heat)
+ .current_dir(&exe)
+ .arg("dir")
+ .arg("clippy")
+ .args(&heat_flags)
+ .arg("-cg")
+ .arg("ClippyGroup")
+ .arg("-dr")
+ .arg("Clippy")
+ .arg("-var")
+ .arg("var.ClippyDir")
+ .arg("-out")
+ .arg(exe.join("ClippyGroup.wxs"))
+ .arg("-t")
+ .arg(etc.join("msi/remove-duplicates.xsl")),
+ );
+ }
if built_tools.contains("rust-demangler") {
builder.run(
Command::new(&heat)
@@ -1799,7 +1804,6 @@ impl Step for Extended {
.arg("-dCargoDir=cargo")
.arg("-dStdDir=rust-std")
.arg("-dAnalysisDir=rust-analysis")
- .arg("-dClippyDir=clippy")
.arg("-arch")
.arg(&arch)
.arg("-out")
@@ -1807,6 +1811,9 @@ impl Step for Extended {
.arg(&input);
add_env(builder, &mut cmd, target);
+ if built_tools.contains("clippy") {
+ cmd.arg("-dClippyDir=clippy");
+ }
if built_tools.contains("rust-docs") {
cmd.arg("-dDocsDir=rust-docs");
}
@@ -1833,7 +1840,9 @@ impl Step for Extended {
}
candle("CargoGroup.wxs".as_ref());
candle("StdGroup.wxs".as_ref());
- candle("ClippyGroup.wxs".as_ref());
+ if built_tools.contains("clippy") {
+ candle("ClippyGroup.wxs".as_ref());
+ }
if built_tools.contains("miri") {
candle("MiriGroup.wxs".as_ref());
}
@@ -1870,9 +1879,11 @@ impl Step for Extended {
.arg("CargoGroup.wixobj")
.arg("StdGroup.wixobj")
.arg("AnalysisGroup.wixobj")
- .arg("ClippyGroup.wixobj")
.current_dir(&exe);
+ if built_tools.contains("clippy") {
+ cmd.arg("ClippyGroup.wixobj");
+ }
if built_tools.contains("miri") {
cmd.arg("MiriGroup.wixobj");
}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index b52c3b68c..5ebfe0995 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -7,15 +7,14 @@
//! Everything here is basically just a shim around calling either `rustbook` or
//! `rustdoc`.
-use std::ffi::OsStr;
use std::fs;
-use std::io;
use std::path::{Path, PathBuf};
use crate::builder::crate_description;
use crate::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::compile;
+use crate::compile::make_run_crates;
use crate::config::{Config, TargetSelection};
use crate::tool::{self, prepare_tool_cargo, SourceType, Tool};
use crate::util::{symlink_dir, t, up_to_date};
@@ -87,15 +86,6 @@ book!(
StyleGuide, "src/doc/style-guide", "style-guide";
);
-// "library/std" -> ["library", "std"]
-//
-// Used for deciding whether a particular step is one requested by the user on
-// the `x.py doc` command line, which determines whether `--open` will open that
-// page.
-pub(crate) fn components_simplified(path: &PathBuf) -> Vec<&str> {
- path.iter().map(|component| component.to_str().unwrap_or("???")).collect()
-}
-
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct UnstableBook {
target: TargetSelection,
@@ -232,7 +222,7 @@ impl Step for TheBook {
let shared_assets = builder.ensure(SharedAssets { target });
// build the redirect pages
- builder.info(&format!("Documenting book redirect pages ({})", target));
+ builder.msg_doc(compiler, "book redirect pages", target);
for file in t!(fs::read_dir(builder.src.join(&relative_path).join("redirects"))) {
let file = t!(file);
let path = file.path();
@@ -316,7 +306,7 @@ impl Step for Standalone {
fn run(self, builder: &Builder<'_>) {
let target = self.target;
let compiler = self.compiler;
- builder.info(&format!("Documenting standalone ({})", target));
+ builder.msg_doc(compiler, "standalone", target);
let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
@@ -425,11 +415,18 @@ impl Step for SharedAssets {
}
}
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Std {
pub stage: u32,
pub target: TargetSelection,
pub format: DocumentationFormat,
+ crates: Interned<Vec<String>>,
+}
+
+impl Std {
+ pub(crate) fn new(stage: u32, target: TargetSelection, format: DocumentationFormat) -> Self {
+ Std { stage, target, format, crates: INTERNER.intern_list(vec![]) }
+ }
}
impl Step for Std {
@@ -438,7 +435,7 @@ impl Step for Std {
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.all_krates("sysroot").path("library").default_condition(builder.config.docs)
+ run.crate_or_deps("sysroot").path("library").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig<'_>) {
@@ -450,6 +447,7 @@ impl Step for Std {
} else {
DocumentationFormat::HTML
},
+ crates: make_run_crates(&run, "library"),
});
}
@@ -457,7 +455,7 @@ impl Step for Std {
///
/// This will generate all documentation for the standard library and its
/// dependencies. This is largely just a wrapper around `cargo doc`.
- fn run(self, builder: &Builder<'_>) {
+ fn run(mut self, builder: &Builder<'_>) {
let stage = self.stage;
let target = self.target;
let out = match self.format {
@@ -471,41 +469,24 @@ impl Step for Std {
builder.ensure(SharedAssets { target: self.target });
}
- let index_page = builder.src.join("src/doc/index.md").into_os_string();
+ let index_page = builder
+ .src
+ .join("src/doc/index.md")
+ .into_os_string()
+ .into_string()
+ .expect("non-utf8 paths are unsupported");
let mut extra_args = match self.format {
- DocumentationFormat::HTML => vec![
- OsStr::new("--markdown-css"),
- OsStr::new("rust.css"),
- OsStr::new("--markdown-no-toc"),
- OsStr::new("--index-page"),
- &index_page,
- ],
- DocumentationFormat::JSON => vec![OsStr::new("--output-format"), OsStr::new("json")],
+ DocumentationFormat::HTML => {
+ vec!["--markdown-css", "rust.css", "--markdown-no-toc", "--index-page", &index_page]
+ }
+ DocumentationFormat::JSON => vec!["--output-format", "json"],
};
if !builder.config.docs_minification {
- extra_args.push(OsStr::new("--disable-minification"));
+ extra_args.push("--disable-minification");
}
- let requested_crates = builder
- .paths
- .iter()
- .map(components_simplified)
- .filter_map(|path| {
- if path.len() >= 2 && path.get(0) == Some(&"library") {
- // single crate
- Some(path[1].to_owned())
- } else if !path.is_empty() {
- // ??
- Some(path[0].to_owned())
- } else {
- // all library crates
- None
- }
- })
- .collect::<Vec<_>>();
-
- doc_std(builder, self.format, stage, target, &out, &extra_args, &requested_crates);
+ doc_std(builder, self.format, stage, target, &out, &extra_args, &self.crates);
// Don't open if the format is json
if let DocumentationFormat::JSON = self.format {
@@ -514,7 +495,11 @@ impl Step for Std {
// Look for library/std, library/core etc in the `x.py doc` arguments and
// open the corresponding rendered docs.
- for requested_crate in requested_crates {
+ if self.crates.is_empty() {
+ self.crates = INTERNER.intern_list(vec!["library".to_owned()]);
+ };
+
+ for requested_crate in &*self.crates {
if requested_crate == "library" {
// For `x.py doc library --open`, open `std` by default.
let index = out.join("std").join("index.html");
@@ -538,7 +523,7 @@ impl Step for Std {
/// or remote link.
const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"];
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub enum DocumentationFormat {
HTML,
JSON,
@@ -563,24 +548,22 @@ fn doc_std(
stage: u32,
target: TargetSelection,
out: &Path,
- extra_args: &[&OsStr],
+ extra_args: &[&str],
requested_crates: &[String],
) {
- builder.info(&format!(
- "Documenting{} stage{} library ({}) in {} format",
- crate_description(requested_crates),
- stage,
- target,
- format.as_str()
- ));
if builder.no_std(target) == Some(true) {
panic!(
"building std documentation for no_std target {target} is not supported\n\
- Set `docs = false` in the config to disable documentation."
+ Set `docs = false` in the config to disable documentation, or pass `--exclude doc::library`."
);
}
+
let compiler = builder.compiler(stage, builder.config.build);
+ let description =
+ format!("library{} in {} format", crate_description(&requested_crates), format.as_str());
+ let _guard = builder.msg_doc(compiler, &description, target);
+
let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" };
let target_dir =
builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name);
@@ -590,35 +573,42 @@ fn doc_std(
// as a function parameter.
let out_dir = target_dir.join(target.triple).join("doc");
- let run_cargo_rustdoc_for = |package: &str| {
- let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
- compile::std_cargo(builder, target, compiler.stage, &mut cargo);
- cargo
- .arg("--target-dir")
- .arg(&*target_dir.to_string_lossy())
- .arg("-p")
- .arg(package)
- .arg("-Zskip-rustdoc-fingerprint")
- .arg("--")
- .arg("-Z")
- .arg("unstable-options")
- .arg("--resource-suffix")
- .arg(&builder.version)
- .args(extra_args);
- if builder.config.library_docs_private_items {
- cargo.arg("--document-private-items").arg("--document-hidden-items");
- }
- builder.run(&mut cargo.into());
+ let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "doc");
+ compile::std_cargo(builder, target, compiler.stage, &mut cargo);
+ cargo
+ .arg("--no-deps")
+ .arg("--target-dir")
+ .arg(&*target_dir.to_string_lossy())
+ .arg("-Zskip-rustdoc-fingerprint")
+ .rustdocflag("-Z")
+ .rustdocflag("unstable-options")
+ .rustdocflag("--resource-suffix")
+ .rustdocflag(&builder.version);
+ for arg in extra_args {
+ cargo.rustdocflag(arg);
+ }
+
+ if builder.config.library_docs_private_items {
+ cargo.rustdocflag("--document-private-items").rustdocflag("--document-hidden-items");
+ }
+
+ // HACK: because we use `--manifest-path library/sysroot/Cargo.toml`, cargo thinks we only want to document that specific crate, not its dependencies.
+ // Override its default.
+ let built_crates = if requested_crates.is_empty() {
+ builder
+ .in_tree_crates("sysroot", None)
+ .into_iter()
+ .map(|krate| krate.name.to_string())
+ .collect()
+ } else {
+ requested_crates.to_vec()
};
- for krate in STD_PUBLIC_CRATES {
- run_cargo_rustdoc_for(krate);
- if requested_crates.iter().any(|p| p == krate) {
- // No need to document more of the libraries if we have the one we want.
- break;
- }
+ for krate in built_crates {
+ cargo.arg("-p").arg(krate);
}
+ builder.run(&mut cargo.into());
builder.cp_r(&out_dir, &out);
}
@@ -626,6 +616,28 @@ fn doc_std(
pub struct Rustc {
pub stage: u32,
pub target: TargetSelection,
+ crates: Interned<Vec<String>>,
+}
+
+impl Rustc {
+ pub(crate) fn new(stage: u32, target: TargetSelection, builder: &Builder<'_>) -> Self {
+ // Find dependencies for top level crates.
+ let root_crates = vec![
+ INTERNER.intern_str("rustc_driver"),
+ INTERNER.intern_str("rustc_codegen_llvm"),
+ INTERNER.intern_str("rustc_codegen_ssa"),
+ ];
+ let crates: Vec<_> = root_crates
+ .iter()
+ .flat_map(|krate| {
+ builder
+ .in_tree_crates(krate, Some(target))
+ .into_iter()
+ .map(|krate| krate.name.to_string())
+ })
+ .collect();
+ Self { stage, target, crates: INTERNER.intern_list(crates) }
+ }
}
impl Step for Rustc {
@@ -641,7 +653,11 @@ impl Step for Rustc {
}
fn make_run(run: RunConfig<'_>) {
- run.builder.ensure(Rustc { stage: run.builder.top_stage, target: run.target });
+ run.builder.ensure(Rustc {
+ stage: run.builder.top_stage,
+ target: run.target,
+ crates: make_run_crates(&run, "compiler"),
+ });
}
/// Generates compiler documentation.
@@ -650,19 +666,10 @@ impl Step for Rustc {
/// Compiler documentation is distributed separately, so we make sure
/// we do not merge it with the other documentation from std, test and
/// proc_macros. This is largely just a wrapper around `cargo doc`.
- fn run(self, builder: &Builder<'_>) {
+ fn run(mut self, builder: &Builder<'_>) {
let stage = self.stage;
let target = self.target;
- let paths = builder
- .paths
- .iter()
- .filter(|path| {
- let components = components_simplified(path);
- components.len() >= 2 && components[0] == "compiler"
- })
- .collect::<Vec<_>>();
-
// This is the intended out directory for compiler documentation.
let out = builder.compiler_doc_out(target);
t!(fs::create_dir_all(&out));
@@ -672,7 +679,13 @@ impl Step for Rustc {
let compiler = builder.compiler(stage, builder.config.build);
builder.ensure(compile::Std::new(compiler, builder.config.build));
- builder.info(&format!("Documenting stage{} compiler ({})", stage, target));
+ let _guard = builder.msg_sysroot_tool(
+ Kind::Doc,
+ stage,
+ &format!("compiler{}", crate_description(&self.crates)),
+ compiler.host,
+ target,
+ );
// This uses a shared directory so that librustdoc documentation gets
// correctly built and merged with the rustc documentation. This is
@@ -680,11 +693,12 @@ impl Step for Rustc {
// rustc. rustdoc needs to be able to see everything, for example when
// merging the search index, or generating local (relative) links.
let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc");
- t!(symlink_dir_force(&builder.config, &out, &out_dir));
+ t!(fs::create_dir_all(out_dir.parent().unwrap()));
+ symlink_dir_force(&builder.config, &out, &out_dir);
// Cargo puts proc macros in `target/doc` even if you pass `--target`
// explicitly (https://github.com/rust-lang/cargo/issues/7677).
let proc_macro_out_dir = builder.stage_out(compiler, Mode::Rustc).join("doc");
- t!(symlink_dir_force(&builder.config, &out, &proc_macro_out_dir));
+ symlink_dir_force(&builder.config, &out, &proc_macro_out_dir);
// Build cargo command.
let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "doc");
@@ -710,22 +724,13 @@ impl Step for Rustc {
cargo.rustdocflag("--extern-html-root-url");
cargo.rustdocflag("ena=https://docs.rs/ena/latest/");
- let root_crates = if paths.is_empty() {
- vec![
- INTERNER.intern_str("rustc_driver"),
- INTERNER.intern_str("rustc_codegen_llvm"),
- INTERNER.intern_str("rustc_codegen_ssa"),
- ]
- } else {
- paths.into_iter().map(|p| builder.crate_paths[p]).collect()
+ let mut to_open = None;
+
+ if self.crates.is_empty() {
+ self.crates = INTERNER.intern_list(vec!["rustc_driver".to_owned()]);
};
- // Find dependencies for top level crates.
- let compiler_crates = root_crates.iter().flat_map(|krate| {
- builder.in_tree_crates(krate, Some(target)).into_iter().map(|krate| krate.name)
- });
- let mut to_open = None;
- for krate in compiler_crates {
+ for krate in &*self.crates {
// Create all crate output directories first to make sure rustdoc uses
// relative links.
// FIXME: Cargo should probably do this itself.
@@ -746,7 +751,15 @@ impl Step for Rustc {
}
macro_rules! tool_doc {
- ($tool: ident, $should_run: literal, $path: literal, $(rustc_tool = $rustc_tool:literal, )? $(in_tree = $in_tree:literal, )? [$($krate: literal),+ $(,)?] $(,)?) => {
+ (
+ $tool: ident,
+ $should_run: literal,
+ $path: literal,
+ $(rustc_tool = $rustc_tool:literal, )?
+ $(in_tree = $in_tree:literal, )?
+ [$($extra_arg: literal),+ $(,)?]
+ $(,)?
+ ) => {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $tool {
target: TargetSelection,
@@ -785,7 +798,7 @@ macro_rules! tool_doc {
if true $(&& $rustc_tool)? {
// Build rustc docs so that we generate relative links.
- builder.ensure(Rustc { stage, target });
+ builder.ensure(Rustc::new(stage, target, builder));
// Rustdoc needs the rustc sysroot available to build.
// FIXME: is there a way to only ensure `check::Rustc` here? Last time I tried it failed
@@ -799,14 +812,7 @@ macro_rules! tool_doc {
SourceType::Submodule
};
- builder.info(
- &format!(
- "Documenting stage{} {} ({})",
- stage,
- stringify!($tool).to_lowercase(),
- target,
- ),
- );
+ builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target);
// Symlink compiler docs to the output directory of rustdoc documentation.
let out_dirs = [
@@ -816,7 +822,7 @@ macro_rules! tool_doc {
];
for out_dir in out_dirs {
t!(fs::create_dir_all(&out_dir));
- t!(symlink_dir_force(&builder.config, &out, &out_dir));
+ symlink_dir_force(&builder.config, &out, &out_dir);
}
// Build cargo command.
@@ -834,8 +840,9 @@ macro_rules! tool_doc {
cargo.arg("-Zskip-rustdoc-fingerprint");
// Only include compiler crates, no dependencies of those, such as `libc`.
cargo.arg("--no-deps");
+
$(
- cargo.arg("-p").arg($krate);
+ cargo.arg($extra_arg);
)+
cargo.rustdocflag("--document-private-items");
@@ -851,15 +858,20 @@ macro_rules! tool_doc {
}
}
-tool_doc!(Rustdoc, "rustdoc-tool", "src/tools/rustdoc", ["rustdoc", "rustdoc-json-types"],);
+tool_doc!(
+ Rustdoc,
+ "rustdoc-tool",
+ "src/tools/rustdoc",
+ ["-p", "rustdoc", "-p", "rustdoc-json-types"]
+);
tool_doc!(
Rustfmt,
"rustfmt-nightly",
"src/tools/rustfmt",
- ["rustfmt-nightly", "rustfmt-config_proc_macro"],
+ ["-p", "rustfmt-nightly", "-p", "rustfmt-config_proc_macro"],
);
-tool_doc!(Clippy, "clippy", "src/tools/clippy", ["clippy_utils"]);
-tool_doc!(Miri, "miri", "src/tools/miri", ["miri"]);
+tool_doc!(Clippy, "clippy", "src/tools/clippy", ["-p", "clippy_utils"]);
+tool_doc!(Miri, "miri", "src/tools/miri", ["-p", "miri"]);
tool_doc!(
Cargo,
"cargo",
@@ -867,25 +879,44 @@ tool_doc!(
rustc_tool = false,
in_tree = false,
[
+ "-p",
"cargo",
+ "-p",
"cargo-platform",
+ "-p",
"cargo-util",
+ "-p",
"crates-io",
+ "-p",
"cargo-test-macro",
+ "-p",
"cargo-test-support",
+ "-p",
"cargo-credential",
+ "-p",
"cargo-credential-1password",
+ "-p",
"mdman",
// FIXME: this trips a license check in tidy.
+ // "-p",
// "resolver-tests",
// FIXME: we should probably document these, but they're different per-platform so we can't use `tool_doc`.
+ // "-p",
// "cargo-credential-gnome-secret",
+ // "-p",
// "cargo-credential-macos-keychain",
+ // "-p",
// "cargo-credential-wincred",
]
);
-tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, ["tidy"]);
-tool_doc!(Bootstrap, "bootstrap", "src/bootstrap", rustc_tool = false, ["bootstrap"]);
+tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, ["-p", "tidy"]);
+tool_doc!(
+ Bootstrap,
+ "bootstrap",
+ "src/bootstrap",
+ rustc_tool = false,
+ ["--lib", "-p", "bootstrap"]
+);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
@@ -958,21 +989,24 @@ impl Step for UnstableBookGen {
}
}
-fn symlink_dir_force(config: &Config, src: &Path, dst: &Path) -> io::Result<()> {
+fn symlink_dir_force(config: &Config, original: &Path, link: &Path) {
if config.dry_run() {
- return Ok(());
+ return;
}
- if let Ok(m) = fs::symlink_metadata(dst) {
+ if let Ok(m) = fs::symlink_metadata(link) {
if m.file_type().is_dir() {
- fs::remove_dir_all(dst)?;
+ t!(fs::remove_dir_all(link));
} else {
// handle directory junctions on windows by falling back to
// `remove_dir`.
- fs::remove_file(dst).or_else(|_| fs::remove_dir(dst))?;
+ t!(fs::remove_file(link).or_else(|_| fs::remove_dir(link)));
}
}
- symlink_dir(config, src, dst)
+ t!(
+ symlink_dir(config, original, link),
+ format!("failed to create link from {} -> {}", link.display(), original.display())
+ );
}
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp
index 4111b7cc0..120b3c9c4 100644
--- a/src/bootstrap/download-ci-llvm-stamp
+++ b/src/bootstrap/download-ci-llvm-stamp
@@ -1,4 +1,4 @@
Change this file to make users of the `download-ci-llvm` configuration download
a new version of LLVM from CI, even if the LLVM submodule hasn’t changed.
-Last change is for: https://github.com/rust-lang/rust/pull/96971
+Last change is for: https://github.com/rust-lang/rust/pull/112931
diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs
index c7969d2a2..cb40521dd 100644
--- a/src/bootstrap/download.rs
+++ b/src/bootstrap/download.rs
@@ -7,6 +7,7 @@ use std::{
process::{Command, Stdio},
};
+use build_helper::util::try_run;
use once_cell::sync::OnceCell;
use xz2::bufread::XzDecoder;
@@ -14,7 +15,7 @@ use crate::{
config::RustfmtMetadata,
llvm::detect_llvm_sha,
t,
- util::{check_run, exe, program_out_of_date, try_run},
+ util::{check_run, exe, program_out_of_date},
Config,
};
@@ -53,9 +54,9 @@ impl Config {
/// Runs a command, printing out nice contextual information if it fails.
/// Exits if the command failed to execute at all, otherwise returns its
/// `status.success()`.
- pub(crate) fn try_run(&self, cmd: &mut Command) -> bool {
+ pub(crate) fn try_run(&self, cmd: &mut Command) -> Result<(), ()> {
if self.dry_run() {
- return true;
+ return Ok(());
}
self.verbose(&format!("running: {:?}", cmd));
try_run(cmd, self.is_verbose())
@@ -155,12 +156,14 @@ impl Config {
];
}
";
- nix_build_succeeded = self.try_run(Command::new("nix-build").args(&[
- Path::new("-E"),
- Path::new(NIX_EXPR),
- Path::new("-o"),
- &nix_deps_dir,
- ]));
+ nix_build_succeeded = self
+ .try_run(Command::new("nix-build").args(&[
+ Path::new("-E"),
+ Path::new(NIX_EXPR),
+ Path::new("-o"),
+ &nix_deps_dir,
+ ]))
+ .is_ok();
nix_deps_dir
});
if !nix_build_succeeded {
@@ -185,7 +188,7 @@ impl Config {
patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
}
- self.try_run(patchelf.arg(fname));
+ self.try_run(patchelf.arg(fname)).unwrap();
}
fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) {
@@ -236,7 +239,7 @@ impl Config {
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"),
),
- ])) {
+ ])).is_err() {
return;
}
eprintln!("\nspurious failure, trying again");
@@ -245,7 +248,7 @@ impl Config {
if !help_on_error.is_empty() {
eprintln!("{}", help_on_error);
}
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
}
@@ -270,11 +273,8 @@ impl Config {
// `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding
// it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow.
// Cache the entries when we extract it so we only have to read it once.
- let mut recorded_entries = if dst.ends_with("ci-rustc") && pattern == "rustc-dev" {
- Some(BufWriter::new(t!(File::create(dst.join(".rustc-dev-contents")))))
- } else {
- None
- };
+ let mut recorded_entries =
+ if dst.ends_with("ci-rustc") { recorded_entries(dst, pattern) } else { None };
for member in t!(tar.entries()) {
let mut member = t!(member);
@@ -331,6 +331,17 @@ impl Config {
}
}
+fn recorded_entries(dst: &Path, pattern: &str) -> Option<BufWriter<File>> {
+ let name = if pattern == "rustc-dev" {
+ ".rustc-dev-contents"
+ } else if pattern.starts_with("rust-std") {
+ ".rust-std-contents"
+ } else {
+ return None;
+ };
+ Some(BufWriter::new(t!(File::create(dst.join(name)))))
+}
+
enum DownloadSource {
CI,
Dist,
@@ -381,11 +392,20 @@ impl Config {
Some(rustfmt_path)
}
- pub(crate) fn rustc_dev_contents(&self) -> Vec<String> {
+ pub(crate) fn ci_rust_std_contents(&self) -> Vec<String> {
+ self.ci_component_contents(".rust-std-contents")
+ }
+
+ pub(crate) fn ci_rustc_dev_contents(&self) -> Vec<String> {
+ self.ci_component_contents(".rustc-dev-contents")
+ }
+
+ fn ci_component_contents(&self, stamp_file: &str) -> Vec<String> {
assert!(self.download_rustc());
let ci_rustc_dir = self.out.join(&*self.build.triple).join("ci-rustc");
- let rustc_dev_contents_file = t!(File::open(ci_rustc_dir.join(".rustc-dev-contents")));
- t!(BufReader::new(rustc_dev_contents_file).lines().collect())
+ let stamp_file = ci_rustc_dir.join(stamp_file);
+ let contents_file = t!(File::open(&stamp_file), stamp_file.display().to_string());
+ t!(BufReader::new(contents_file).lines().collect())
}
pub(crate) fn download_ci_rustc(&self, commit: &str) {
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 80e715777..a882336c3 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -193,7 +193,7 @@ impl Flags {
} else {
panic!("No paths available for subcommand `{}`", subcommand.as_str());
}
- crate::detail_exit(0);
+ crate::detail_exit_macro!(0);
}
Flags::parse_from(it)
@@ -304,7 +304,7 @@ pub enum Subcommand {
./x.py test library/std --test-args hash_map
./x.py test library/std --stage 0 --no-doc
./x.py test tests/ui --bless
- ./x.py test tests/ui --compare-mode chalk
+ ./x.py test tests/ui --compare-mode next-solver
Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`;
just like `build library/std --stage N` it tests the compiler produced by the previous
stage.
@@ -538,7 +538,7 @@ pub fn get_completion<G: clap_complete::Generator>(shell: G, path: &Path) -> Opt
} else {
std::fs::read_to_string(path).unwrap_or_else(|_| {
eprintln!("couldn't read {}", path.display());
- crate::detail_exit(1)
+ crate::detail_exit_macro!(1)
})
};
let mut buf = Vec::new();
diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs
index d8d3f300a..ebf068b2c 100644
--- a/src/bootstrap/format.rs
+++ b/src/bootstrap/format.rs
@@ -40,7 +40,7 @@ fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl F
code, run `./x.py fmt` instead.",
cmd_debug,
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
true
}
@@ -196,7 +196,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
let rustfmt_path = build.initial_rustfmt().unwrap_or_else(|| {
eprintln!("./x.py fmt is not supported on this channel");
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
});
assert!(rustfmt_path.exists(), "{}", rustfmt_path.display());
let src = build.src.clone();
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 943f51341..0a7aff622 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -27,6 +27,7 @@ use std::process::{Command, Stdio};
use std::str;
use build_helper::ci::{gha, CiEnv};
+use build_helper::detail_exit_macro;
use channel::GitInfo;
use config::{DryRun, Target};
use filetime::FileTime;
@@ -60,6 +61,7 @@ mod run;
mod sanity;
mod setup;
mod suggest;
+mod synthetic_targets;
mod tarball;
mod test;
mod tool;
@@ -221,13 +223,14 @@ pub struct Build {
initial_cargo: PathBuf,
initial_lld: PathBuf,
initial_libdir: PathBuf,
+ initial_sysroot: PathBuf,
// Runtime state filled in later on
// C/C++ compilers and archiver for all targets
- cc: HashMap<TargetSelection, cc::Tool>,
- cxx: HashMap<TargetSelection, cc::Tool>,
- ar: HashMap<TargetSelection, PathBuf>,
- ranlib: HashMap<TargetSelection, PathBuf>,
+ cc: RefCell<HashMap<TargetSelection, cc::Tool>>,
+ cxx: RefCell<HashMap<TargetSelection, cc::Tool>>,
+ ar: RefCell<HashMap<TargetSelection, PathBuf>>,
+ ranlib: RefCell<HashMap<TargetSelection, PathBuf>>,
// Miscellaneous
// allow bidirectional lookups: both name -> path and path -> name
crates: HashMap<Interned<String>, Crate>,
@@ -330,7 +333,7 @@ forward! {
create(path: &Path, s: &str),
remove(f: &Path),
tempdir() -> PathBuf,
- try_run(cmd: &mut Command) -> bool,
+ try_run(cmd: &mut Command) -> Result<(), ()>,
llvm_link_shared() -> bool,
download_rustc() -> bool,
initial_rustfmt() -> Option<PathBuf>,
@@ -388,13 +391,16 @@ impl Build {
"/dummy".to_string()
} else {
output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot"))
- };
+ }
+ .trim()
+ .to_string();
+
let initial_libdir = initial_target_dir
.parent()
.unwrap()
.parent()
.unwrap()
- .strip_prefix(initial_sysroot.trim())
+ .strip_prefix(&initial_sysroot)
.unwrap()
.to_path_buf();
@@ -414,7 +420,6 @@ impl Build {
bootstrap_out.display()
)
}
- config.check_build_rustc_version();
if rust_info.is_from_tarball() && config.description.is_none() {
config.description = Some("built from a source tarball".to_owned());
@@ -425,6 +430,7 @@ impl Build {
initial_cargo: config.initial_cargo.clone(),
initial_lld,
initial_libdir,
+ initial_sysroot: initial_sysroot.into(),
local_rebuild: config.local_rebuild,
fail_fast: config.cmd.fail_fast(),
doc_tests: config.cmd.doc_tests(),
@@ -446,10 +452,10 @@ impl Build {
miri_info,
rustfmt_info,
in_tree_llvm_info,
- cc: HashMap::new(),
- cxx: HashMap::new(),
- ar: HashMap::new(),
- ranlib: HashMap::new(),
+ cc: RefCell::new(HashMap::new()),
+ cxx: RefCell::new(HashMap::new()),
+ ar: RefCell::new(HashMap::new()),
+ ranlib: RefCell::new(HashMap::new()),
crates: HashMap::new(),
crate_paths: HashMap::new(),
is_sudo,
@@ -477,7 +483,7 @@ impl Build {
}
build.verbose("finding compilers");
- cc_detect::find(&mut build);
+ cc_detect::find(&build);
// When running `setup`, the profile is about to change, so any requirements we have now may
// be different on the next invocation. Don't check for them until the next time x.py is
// run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
@@ -593,6 +599,9 @@ impl Build {
let mut git = self.config.git();
if let Some(branch) = current_branch {
+ // If there is a tag named after the current branch, git will try to disambiguate by prepending `heads/` to the branch name.
+ // This syntax isn't accepted by `branch.{branch}`. Strip it.
+ let branch = branch.strip_prefix("heads/").unwrap_or(&branch);
git.arg("-c").arg(format!("branch.{branch}.remote=origin"));
}
git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
@@ -608,11 +617,13 @@ impl Build {
}
// Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error).
- let has_local_modifications = !self.try_run(
- Command::new("git")
- .args(&["diff-index", "--quiet", "HEAD"])
- .current_dir(&absolute_path),
- );
+ let has_local_modifications = self
+ .try_run(
+ Command::new("git")
+ .args(&["diff-index", "--quiet", "HEAD"])
+ .current_dir(&absolute_path),
+ )
+ .is_err();
if has_local_modifications {
self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
}
@@ -700,7 +711,7 @@ impl Build {
for failure in failures.iter() {
eprintln!(" - {}\n", failure);
}
- detail_exit(1);
+ detail_exit_macro!(1);
}
#[cfg(feature = "build-metrics")]
@@ -777,7 +788,7 @@ impl Build {
/// Component directory that Cargo will produce output into (e.g.
/// release/debug)
fn cargo_dir(&self) -> &'static str {
- if self.config.rust_optimize { "release" } else { "debug" }
+ if self.config.rust_optimize.is_release() { "release" } else { "debug" }
}
fn tools_dir(&self, compiler: Compiler) -> PathBuf {
@@ -1001,6 +1012,15 @@ impl Build {
self.msg(Kind::Check, self.config.stage, what, self.config.build, target)
}
+ fn msg_doc(
+ &self,
+ compiler: Compiler,
+ what: impl Display,
+ target: impl Into<Option<TargetSelection>> + Copy,
+ ) -> Option<gha::Group> {
+ self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into())
+ }
+
fn msg_build(
&self,
compiler: Compiler,
@@ -1021,8 +1041,8 @@ impl Build {
host: impl Into<Option<TargetSelection>>,
target: impl Into<Option<TargetSelection>>,
) -> Option<gha::Group> {
- let action = action.into();
- let msg = |fmt| format!("{action:?}ing stage{stage} {what}{fmt}");
+ let action = action.into().description();
+ let msg = |fmt| format!("{action} stage{stage} {what}{fmt}");
let msg = if let Some(target) = target.into() {
let host = host.into().unwrap();
if host == target {
@@ -1045,8 +1065,8 @@ impl Build {
what: impl Display,
target: TargetSelection,
) -> Option<gha::Group> {
- let action = action.into();
- let msg = format!("{action:?}ing {what} for {target}");
+ let action = action.into().description();
+ let msg = format!("{action} {what} for {target}");
self.group(&msg)
}
@@ -1058,8 +1078,8 @@ impl Build {
host: TargetSelection,
target: TargetSelection,
) -> Option<gha::Group> {
- let action = action.into();
- let msg = |fmt| format!("{action:?}ing {what} {fmt}");
+ let action = action.into().description();
+ let msg = |fmt| format!("{action} {what} {fmt}");
let msg = if host == target {
msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1))
} else {
@@ -1069,7 +1089,6 @@ impl Build {
}
fn group(&self, msg: &str) -> Option<gha::Group> {
- self.info(&msg);
match self.config.dry_run {
DryRun::SelfCheck => None,
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)),
@@ -1099,16 +1118,22 @@ impl Build {
}
/// Returns the path to the C compiler for the target specified.
- fn cc(&self, target: TargetSelection) -> &Path {
- self.cc[&target].path()
+ fn cc(&self, target: TargetSelection) -> PathBuf {
+ if self.config.dry_run() {
+ return PathBuf::new();
+ }
+ self.cc.borrow()[&target].path().into()
}
/// Returns a list of flags to pass to the C compiler for the target
/// specified.
fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec<String> {
+ if self.config.dry_run() {
+ return Vec::new();
+ }
let base = match c {
- CLang::C => &self.cc[&target],
- CLang::Cxx => &self.cxx[&target],
+ CLang::C => self.cc.borrow()[&target].clone(),
+ CLang::Cxx => self.cxx.borrow()[&target].clone(),
};
// Filter out -O and /O (the optimization flags) that we picked up from
@@ -1149,19 +1174,28 @@ impl Build {
}
/// Returns the path to the `ar` archive utility for the target specified.
- fn ar(&self, target: TargetSelection) -> Option<&Path> {
- self.ar.get(&target).map(|p| &**p)
+ fn ar(&self, target: TargetSelection) -> Option<PathBuf> {
+ if self.config.dry_run() {
+ return None;
+ }
+ self.ar.borrow().get(&target).cloned()
}
/// Returns the path to the `ranlib` utility for the target specified.
- fn ranlib(&self, target: TargetSelection) -> Option<&Path> {
- self.ranlib.get(&target).map(|p| &**p)
+ fn ranlib(&self, target: TargetSelection) -> Option<PathBuf> {
+ if self.config.dry_run() {
+ return None;
+ }
+ self.ranlib.borrow().get(&target).cloned()
}
/// Returns the path to the C++ compiler for the target specified.
- fn cxx(&self, target: TargetSelection) -> Result<&Path, String> {
- match self.cxx.get(&target) {
- Some(p) => Ok(p.path()),
+ fn cxx(&self, target: TargetSelection) -> Result<PathBuf, String> {
+ if self.config.dry_run() {
+ return Ok(PathBuf::new());
+ }
+ match self.cxx.borrow().get(&target) {
+ Some(p) => Ok(p.path().into()),
None => {
Err(format!("target `{}` is not configured as a host, only as a target", target))
}
@@ -1169,21 +1203,24 @@ impl Build {
}
/// Returns the path to the linker for the given target if it needs to be overridden.
- fn linker(&self, target: TargetSelection) -> Option<&Path> {
- if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
+ fn linker(&self, target: TargetSelection) -> Option<PathBuf> {
+ if self.config.dry_run() {
+ return Some(PathBuf::new());
+ }
+ if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.clone())
{
Some(linker)
} else if target.contains("vxworks") {
// need to use CXX compiler as linker to resolve the exception functions
// that are only existed in CXX libraries
- Some(self.cxx[&target].path())
+ Some(self.cxx.borrow()[&target].path().into())
} else if target != self.config.build
&& util::use_host_linker(target)
&& !target.contains("msvc")
{
Some(self.cc(target))
} else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target {
- Some(&self.initial_lld)
+ Some(self.initial_lld.clone())
} else {
None
}
@@ -1483,7 +1520,7 @@ impl Build {
"Error: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?",
stamp.display()
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
let mut paths = Vec::new();
@@ -1675,7 +1712,7 @@ Alternatively, set `download-ci-llvm = true` in that `[llvm]` section
to download LLVM rather than building it.
"
);
- detail_exit(1);
+ detail_exit_macro!(1);
}
}
@@ -1740,18 +1777,6 @@ fn chmod(path: &Path, perms: u32) {
#[cfg(windows)]
fn chmod(_path: &Path, _perms: u32) {}
-/// If code is not 0 (successful exit status), exit status is 101 (rust's default error code.)
-/// If the test is running and code is an error code, it will cause a panic.
-fn detail_exit(code: i32) -> ! {
- // if in test and code is an error code, panic with status code provided
- if cfg!(test) {
- panic!("status code: {}", code);
- } else {
- // otherwise,exit with provided status code
- std::process::exit(code);
- }
-}
-
impl Compiler {
pub fn with_stage(mut self, stage: u32) -> Compiler {
self.stage = stage;
diff --git a/src/bootstrap/llvm.rs b/src/bootstrap/llvm.rs
index 040a12f5d..7e27960f3 100644
--- a/src/bootstrap/llvm.rs
+++ b/src/bootstrap/llvm.rs
@@ -352,7 +352,7 @@ impl Step for Llvm {
// Disable zstd to avoid a dependency on libzstd.so.
cfg.define("LLVM_ENABLE_ZSTD", "OFF");
- if target != "aarch64-apple-darwin" && !target.contains("windows") {
+ if !target.contains("windows") {
cfg.define("LLVM_ENABLE_ZLIB", "ON");
} else {
cfg.define("LLVM_ENABLE_ZLIB", "OFF");
@@ -380,7 +380,10 @@ impl Step for Llvm {
cfg.define("LLVM_LINK_LLVM_DYLIB", "ON");
}
- if target.starts_with("riscv") && !target.contains("freebsd") && !target.contains("openbsd")
+ if target.starts_with("riscv")
+ && !target.contains("freebsd")
+ && !target.contains("openbsd")
+ && !target.contains("netbsd")
{
// RISC-V GCC erroneously requires linking against
// `libatomic` when using 1-byte and 2-byte C++
@@ -605,7 +608,7 @@ fn configure_cmake(
}
let (cc, cxx) = match builder.config.llvm_clang_cl {
- Some(ref cl) => (cl.as_ref(), cl.as_ref()),
+ Some(ref cl) => (cl.into(), cl.into()),
None => (builder.cc(target), builder.cxx(target).unwrap()),
};
@@ -656,9 +659,9 @@ fn configure_cmake(
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
}
}
- cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
- .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx))
- .define("CMAKE_ASM_COMPILER", sanitize_cc(cc));
+ cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
+ .define("CMAKE_CXX_COMPILER", sanitize_cc(&cxx))
+ .define("CMAKE_ASM_COMPILER", sanitize_cc(&cc));
}
cfg.build_arg("-j").build_arg(builder.jobs().to_string());
@@ -698,7 +701,7 @@ fn configure_cmake(
if ar.is_absolute() {
// LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
- cfg.define("CMAKE_AR", sanitize_cc(ar));
+ cfg.define("CMAKE_AR", sanitize_cc(&ar));
}
}
@@ -706,7 +709,7 @@ fn configure_cmake(
if ranlib.is_absolute() {
// LLVM build breaks if `CMAKE_RANLIB` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
- cfg.define("CMAKE_RANLIB", sanitize_cc(ranlib));
+ cfg.define("CMAKE_RANLIB", sanitize_cc(&ranlib));
}
}
@@ -834,6 +837,31 @@ impl Step for Lld {
}
}
+ // LLD is built as an LLVM tool, but is distributed outside of the `llvm-tools` component,
+ // which impacts where it expects to find LLVM's shared library. This causes #80703.
+ //
+ // LLD is distributed at "$root/lib/rustlib/$host/bin/rust-lld", but the `libLLVM-*.so` it
+ // needs is distributed at "$root/lib". The default rpath of "$ORIGIN/../lib" points at the
+ // lib path for LLVM tools, not the one for rust binaries.
+ //
+ // (The `llvm-tools` component copies the .so there for the other tools, and with that
+ // component installed, one can successfully invoke `rust-lld` directly without rustup's
+ // `LD_LIBRARY_PATH` overrides)
+ //
+ if builder.config.rpath_enabled(target)
+ && util::use_host_linker(target)
+ && builder.config.llvm_link_shared()
+ && target.contains("linux")
+ {
+ // So we inform LLD where it can find LLVM's libraries by adding an rpath entry to the
+ // expected parent `lib` directory.
+ //
+ // Be careful when changing this path, we need to ensure it's quoted or escaped:
+ // `$ORIGIN` would otherwise be expanded when the `LdFlags` are passed verbatim to
+ // cmake.
+ ldflags.push_all("-Wl,-rpath,'$ORIGIN/../../../'");
+ }
+
configure_cmake(builder, target, &mut cfg, true, ldflags, &[]);
configure_llvm(builder, target, &mut cfg);
@@ -1017,7 +1045,7 @@ fn supported_sanitizers(
"x86_64-unknown-illumos" => common_libs("illumos", "x86_64", &["asan"]),
"x86_64-pc-solaris" => common_libs("solaris", "x86_64", &["asan"]),
"x86_64-unknown-linux-gnu" => {
- common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"])
+ common_libs("linux", "x86_64", &["asan", "lsan", "msan", "safestack", "tsan"])
}
"x86_64-unknown-linux-musl" => {
common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"])
diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs
index 8f2c3faca..3b20ceac8 100644
--- a/src/bootstrap/metadata.rs
+++ b/src/bootstrap/metadata.rs
@@ -74,6 +74,9 @@ fn workspace_members(build: &Build) -> impl Iterator<Item = Package> {
let collect_metadata = |manifest_path| {
let mut cargo = Command::new(&build.initial_cargo);
cargo
+ // Will read the libstd Cargo.toml
+ // which uses the unstable `public-dependency` feature.
+ .env("RUSTC_BOOTSTRAP", "1")
.arg("metadata")
.arg("--format-version")
.arg("1")
diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in
index d54a21b9f..947613796 100644
--- a/src/bootstrap/mk/Makefile.in
+++ b/src/bootstrap/mk/Makefile.in
@@ -57,27 +57,22 @@ tidy:
prepare:
$(Q)$(BOOTSTRAP) build --stage 2 nonexistent/path/to/trigger/cargo/metadata
-TESTS_IN_2 := \
- tests/ui \
- src/tools/linkchecker
-
## MSVC native builders
-# these intentionally don't use `$(BOOTSTRAP)` so we can test the shebang on Windows
-ci-subset-1:
- $(Q)$(CFG_SRC_DIR)/x.py test --stage 2 $(TESTS_IN_2:%=--exclude %)
-ci-subset-2:
- $(Q)$(CFG_SRC_DIR)/x.ps1 test --stage 2 $(TESTS_IN_2)
+# this intentionally doesn't use `$(BOOTSTRAP)` so we can test the shebang on Windows
+ci-msvc-py:
+ $(Q)$(CFG_SRC_DIR)/x.py test --stage 2 tidy
+ci-msvc-ps1:
+ $(Q)$(CFG_SRC_DIR)/x.ps1 test --stage 2 --exclude tidy
+ci-msvc: ci-msvc-py ci-msvc-ps1
## MingW native builders
-TESTS_IN_MINGW_2 := \
- tests/ui
-
-ci-mingw-subset-1:
- $(Q)$(CFG_SRC_DIR)/x test --stage 2 $(TESTS_IN_MINGW_2:%=--exclude %)
-ci-mingw-subset-2:
- $(Q)$(BOOTSTRAP) test --stage 2 $(TESTS_IN_MINGW_2)
-
+# test both x and bootstrap entrypoints
+ci-mingw-x:
+ $(Q)$(CFG_SRC_DIR)/x test --stage 2 tidy
+ci-mingw-bootstrap:
+ $(Q)$(BOOTSTRAP) test --stage 2 --exclude tidy
+ci-mingw: ci-mingw-x ci-mingw-bootstrap
.PHONY: dist
diff --git a/src/bootstrap/render_tests.rs b/src/bootstrap/render_tests.rs
index fa0a48066..98a468c88 100644
--- a/src/bootstrap/render_tests.rs
+++ b/src/bootstrap/render_tests.rs
@@ -7,7 +7,7 @@
//! to reimplement all the rendering logic in this module because of that.
use crate::builder::Builder;
-use std::io::{BufRead, BufReader, Write};
+use std::io::{BufRead, BufReader, Read, Write};
use std::process::{ChildStdout, Command, Stdio};
use std::time::Duration;
use termcolor::{Color, ColorSpec, WriteColor};
@@ -20,17 +20,17 @@ pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Comma
}
cmd.args(&["-Z", "unstable-options", "--format", "json"]);
- try_run_tests(builder, cmd)
+ try_run_tests(builder, cmd, false)
}
-pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool {
if builder.config.dry_run() {
return true;
}
- if !run_tests(builder, cmd) {
+ if !run_tests(builder, cmd, stream) {
if builder.fail_fast {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
} else {
let mut failures = builder.delayed_failures.borrow_mut();
failures.push(format!("{cmd:?}"));
@@ -41,7 +41,7 @@ pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
}
}
-fn run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+fn run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool {
cmd.stdout(Stdio::piped());
builder.verbose(&format!("running: {cmd:?}"));
@@ -50,7 +50,12 @@ fn run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
// This runs until the stdout of the child is closed, which means the child exited. We don't
// run this on another thread since the builder is not Sync.
- Renderer::new(process.stdout.take().unwrap(), builder).render_all();
+ let renderer = Renderer::new(process.stdout.take().unwrap(), builder);
+ if stream {
+ renderer.stream_all();
+ } else {
+ renderer.render_all();
+ }
let result = process.wait_with_output().unwrap();
if !result.status.success() && builder.is_verbose() {
@@ -88,10 +93,10 @@ impl<'a> Renderer<'a> {
}
fn render_all(mut self) {
- let mut line = String::new();
+ let mut line = Vec::new();
loop {
line.clear();
- match self.stdout.read_line(&mut line) {
+ match self.stdout.read_until(b'\n', &mut line) {
Ok(_) => {}
Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break,
Err(err) => panic!("failed to read output of test runner: {err}"),
@@ -100,13 +105,31 @@ impl<'a> Renderer<'a> {
break;
}
- match serde_json::from_str(&line) {
+ match serde_json::from_slice(&line) {
Ok(parsed) => self.render_message(parsed),
Err(_err) => {
// Handle non-JSON output, for example when --nocapture is passed.
- print!("{line}");
- let _ = std::io::stdout().flush();
+ let mut stdout = std::io::stdout();
+ stdout.write_all(&line).unwrap();
+ let _ = stdout.flush();
+ }
+ }
+ }
+ }
+
+ /// Renders the stdout characters one by one
+ fn stream_all(mut self) {
+ let mut buffer = [0; 1];
+ loop {
+ match self.stdout.read(&mut buffer) {
+ Ok(0) => break,
+ Ok(_) => {
+ let mut stdout = std::io::stdout();
+ stdout.write_all(&buffer).unwrap();
+ let _ = stdout.flush();
}
+ Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break,
+ Err(err) => panic!("failed to read output of test runner: {err}"),
}
}
}
diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs
index ec01f744b..c97b75927 100644
--- a/src/bootstrap/run.rs
+++ b/src/bootstrap/run.rs
@@ -27,7 +27,8 @@ impl Step for ExpandYamlAnchors {
try_run(
builder,
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
- );
+ )
+ .unwrap();
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -39,17 +40,17 @@ impl Step for ExpandYamlAnchors {
}
}
-fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> Result<(), ()> {
if !builder.fail_fast {
- if !builder.try_run(cmd) {
+ if let Err(e) = builder.try_run(cmd) {
let mut failures = builder.delayed_failures.borrow_mut();
failures.push(format!("{:?}", cmd));
- return false;
+ return Err(e);
}
} else {
builder.run(cmd);
}
- true
+ Ok(())
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index 140259b02..8f5ba4273 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -104,7 +104,7 @@ You should install cmake, or set `download-ci-llvm = true` in the
than building it.
"
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
}
diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs
index 09f26862b..34c6ccf13 100644
--- a/src/bootstrap/setup.rs
+++ b/src/bootstrap/setup.rs
@@ -20,7 +20,7 @@ pub enum Profile {
Codegen,
Library,
Tools,
- User,
+ Dist,
None,
}
@@ -31,6 +31,7 @@ static SETTINGS_HASHES: &[&str] = &[
"ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8",
"56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922",
"af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0",
+ "3468fea433c25fff60be6b71e8a215a732a7b1268b6a83bf10d024344e140541",
];
static RUST_ANALYZER_SETTINGS: &str = include_str!("../etc/rust_analyzer_settings.json");
@@ -42,7 +43,7 @@ impl Profile {
pub fn all() -> impl Iterator<Item = Self> {
use Profile::*;
// N.B. these are ordered by how they are displayed, not alphabetically
- [Library, Compiler, Codegen, Tools, User, None].iter().copied()
+ [Library, Compiler, Codegen, Tools, Dist, None].iter().copied()
}
pub fn purpose(&self) -> String {
@@ -52,7 +53,7 @@ impl Profile {
Compiler => "Contribute to the compiler itself",
Codegen => "Contribute to the compiler, and also modify LLVM or codegen",
Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)",
- User => "Install Rust from source",
+ Dist => "Install Rust from source",
None => "Do not modify `config.toml`"
}
.to_string()
@@ -72,7 +73,7 @@ impl Profile {
Profile::Codegen => "codegen",
Profile::Library => "library",
Profile::Tools => "tools",
- Profile::User => "user",
+ Profile::Dist => "dist",
Profile::None => "none",
}
}
@@ -86,7 +87,7 @@ impl FromStr for Profile {
"lib" | "library" => Ok(Profile::Library),
"compiler" => Ok(Profile::Compiler),
"llvm" | "codegen" => Ok(Profile::Codegen),
- "maintainer" | "user" => Ok(Profile::User),
+ "maintainer" | "dist" | "user" => Ok(Profile::Dist),
"tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => {
Ok(Profile::Tools)
}
@@ -159,7 +160,7 @@ pub fn setup(config: &Config, profile: Profile) {
"test src/tools/rustfmt",
],
Profile::Library => &["check", "build", "test library/std", "doc"],
- Profile::User => &["dist", "build"],
+ Profile::Dist => &["dist", "build"],
};
println!();
@@ -169,12 +170,20 @@ pub fn setup(config: &Config, profile: Profile) {
println!("- `x.py {}`", cmd);
}
- if profile != Profile::User {
+ if profile != Profile::Dist {
println!(
"For more suggestions, see https://rustc-dev-guide.rust-lang.org/building/suggested.html"
);
}
+ if profile == Profile::Tools {
+ eprintln!();
+ eprintln!(
+ "note: the `tools` profile sets up the `stage2` toolchain (use \
+ `rustup toolchain link 'name' host/build/stage2` to use rustc)"
+ )
+ }
+
let path = &config.config.clone().unwrap_or(PathBuf::from("config.toml"));
setup_config_toml(path, profile, config);
}
@@ -194,7 +203,7 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
"note: this will use the configuration in {}",
profile.include_path(&config.src).display()
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
let settings = format!(
@@ -380,7 +389,7 @@ pub fn interactive_path() -> io::Result<Profile> {
io::stdin().read_line(&mut input)?;
if input.is_empty() {
eprintln!("EOF on stdin, when expecting answer to question. Giving up.");
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
break match parse_with_abbrev(&input) {
Ok(profile) => profile,
@@ -573,7 +582,7 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> {
Some(false) => {
// exists and is not current version or outdated, so back it up
let mut backup = vscode_settings.clone();
- backup.set_extension("bak");
+ backup.set_extension("json.bak");
eprintln!("warning: copying `settings.json` to `settings.json.bak`");
fs::copy(&vscode_settings, &backup)?;
"Updated"
diff --git a/src/bootstrap/synthetic_targets.rs b/src/bootstrap/synthetic_targets.rs
new file mode 100644
index 000000000..7eeac9025
--- /dev/null
+++ b/src/bootstrap/synthetic_targets.rs
@@ -0,0 +1,82 @@
+//! In some cases, parts of bootstrap need to change part of a target spec just for one or a few
+//! steps. Adding these targets to rustc proper would "leak" this implementation detail of
+//! bootstrap, and would make it more complex to apply additional changes if the need arises.
+//!
+//! To address that problem, this module implements support for "synthetic targets". Synthetic
+//! targets are custom target specs generated using builtin target specs as their base. You can use
+//! one of the target specs already defined in this module, or create new ones by adding a new step
+//! that calls create_synthetic_target.
+
+use crate::builder::{Builder, ShouldRun, Step};
+use crate::config::TargetSelection;
+use crate::Compiler;
+use std::process::{Command, Stdio};
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct MirOptPanicAbortSyntheticTarget {
+ pub(crate) compiler: Compiler,
+ pub(crate) base: TargetSelection,
+}
+
+impl Step for MirOptPanicAbortSyntheticTarget {
+ type Output = TargetSelection;
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.never()
+ }
+
+ fn run(self, builder: &Builder<'_>) -> Self::Output {
+ create_synthetic_target(builder, self.compiler, "miropt-abort", self.base, |spec| {
+ spec.insert("panic-strategy".into(), "abort".into());
+ })
+ }
+}
+
+fn create_synthetic_target(
+ builder: &Builder<'_>,
+ compiler: Compiler,
+ suffix: &str,
+ base: TargetSelection,
+ customize: impl FnOnce(&mut serde_json::Map<String, serde_json::Value>),
+) -> TargetSelection {
+ if base.contains("synthetic") {
+ // This check is not strictly needed, but nothing currently needs recursive synthetic
+ // targets. If the need arises, removing this in the future *SHOULD* be safe.
+ panic!("cannot create synthetic targets with other synthetic targets as their base");
+ }
+
+ let name = format!("{base}-synthetic-{suffix}");
+ let path = builder.out.join("synthetic-target-specs").join(format!("{name}.json"));
+ std::fs::create_dir_all(path.parent().unwrap()).unwrap();
+
+ if builder.config.dry_run() {
+ std::fs::write(&path, b"dry run\n").unwrap();
+ return TargetSelection::create_synthetic(&name, path.to_str().unwrap());
+ }
+
+ let mut cmd = Command::new(builder.rustc(compiler));
+ cmd.arg("--target").arg(base.rustc_target_arg());
+ cmd.args(["-Zunstable-options", "--print", "target-spec-json"]);
+ cmd.stdout(Stdio::piped());
+
+ let output = cmd.spawn().unwrap().wait_with_output().unwrap();
+ if !output.status.success() {
+ panic!("failed to gather the target spec for {base}");
+ }
+
+ let mut spec: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap();
+ let spec_map = spec.as_object_mut().unwrap();
+
+ // The `is-builtin` attribute of a spec needs to be removed, otherwise rustc will complain.
+ spec_map.remove("is-builtin");
+
+ customize(spec_map);
+
+ std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
+ let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap());
+ crate::cc_detect::find_target(builder, target);
+
+ target
+}
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index 44cd84be7..eed7a584b 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -23,6 +23,7 @@ use crate::doc::DocumentationFormat;
use crate::flags::Subcommand;
use crate::llvm;
use crate::render_tests::add_flags_and_try_run_tests;
+use crate::synthetic_targets::MirOptPanicAbortSyntheticTarget;
use crate::tool::{self, SourceType, Tool};
use crate::toolstate::ToolState;
use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t, up_to_date};
@@ -30,17 +31,34 @@ use crate::{envify, CLang, DocTests, GitRepo, Mode};
const ADB_TEST_DIR: &str = "/data/local/tmp/work";
-fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> bool {
+// mir-opt tests have different variants depending on whether a target is 32bit or 64bit, and
+// blessing them requires blessing with each target. To aid developers, when blessing the mir-opt
+// test suite the corresponding target of the opposite pointer size is also blessed.
+//
+// This array serves as the known mappings between 32bit and 64bit targets. If you're developing on
+// a target where a target with the opposite pointer size exists, feel free to add it here.
+const MIR_OPT_BLESS_TARGET_MAPPING: &[(&str, &str)] = &[
+ // (32bit, 64bit)
+ ("i686-unknown-linux-gnu", "x86_64-unknown-linux-gnu"),
+ ("i686-unknown-linux-musl", "x86_64-unknown-linux-musl"),
+ ("i686-pc-windows-msvc", "x86_64-pc-windows-msvc"),
+ ("i686-pc-windows-gnu", "x86_64-pc-windows-gnu"),
+ ("i686-apple-darwin", "x86_64-apple-darwin"),
+ // ARM Macs don't have a corresponding 32-bit target that they can (easily)
+ // build for, so there is no entry for "aarch64-apple-darwin" here.
+];
+
+fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> Result<(), ()> {
if !builder.fail_fast {
- if !builder.try_run(cmd) {
+ if let Err(e) = builder.try_run(cmd) {
let mut failures = builder.delayed_failures.borrow_mut();
failures.push(format!("{:?}", cmd));
- return false;
+ return Err(e);
}
} else {
builder.run(cmd);
}
- true
+ Ok(())
}
fn try_run_quiet(builder: &Builder<'_>, cmd: &mut Command) -> bool {
@@ -101,7 +119,7 @@ impl Step for CrateBootstrap {
);
builder.info(&format!(
"{} {} stage0 ({})",
- builder.kind.test_description(),
+ builder.kind.description(),
path,
bootstrap_host,
));
@@ -169,7 +187,8 @@ You can skip linkcheck with --exclude src/tools/linkchecker"
try_run(
builder,
builder.tool_cmd(Tool::Linkchecker).arg(builder.out.join(host.triple).join("doc")),
- );
+ )
+ .unwrap();
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -220,9 +239,10 @@ impl Step for HtmlCheck {
}
// Ensure that a few different kinds of documentation are available.
builder.default_doc(&[]);
- builder.ensure(crate::doc::Rustc { target: self.target, stage: builder.top_stage });
+ builder.ensure(crate::doc::Rustc::new(builder.top_stage, self.target, builder));
- try_run(builder, builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)));
+ try_run(builder, builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)))
+ .unwrap();
}
}
@@ -268,7 +288,8 @@ impl Step for Cargotest {
.args(builder.config.test_args())
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler)),
- );
+ )
+ .unwrap();
}
}
@@ -358,7 +379,9 @@ impl Step for RustAnalyzer {
let host = self.host;
let compiler = builder.compiler(stage, host);
- builder.ensure(tool::RustAnalyzer { compiler, target: self.host }).expect("in-tree tool");
+ // We don't need to build the whole Rust Analyzer for the proc-macro-srv test suite,
+ // but we do need the standard library to be present.
+ builder.ensure(compile::Std::new(compiler, host));
let workspace_path = "src/tools/rust-analyzer";
// until the whole RA test suite runs on `i686`, we only run
@@ -700,7 +723,7 @@ impl Step for CompiletestTest {
/// Runs `cargo test` for compiletest.
fn run(self, builder: &Builder<'_>) {
let host = self.host;
- let compiler = builder.compiler(1, host);
+ let compiler = builder.compiler(builder.top_stage, host);
// We need `ToolStd` for the locally-built sysroot because
// compiletest uses unstable features of the `test` crate.
@@ -767,27 +790,19 @@ impl Step for Clippy {
cargo.add_rustc_lib_path(builder, compiler);
let mut cargo = prepare_cargo_test(cargo, &[], &[], "clippy", compiler, host, builder);
- if builder.try_run(&mut cargo) {
+ // propagate --bless
+ if builder.config.cmd.bless() {
+ cargo.env("BLESS", "Gesundheit");
+ }
+
+ if builder.try_run(&mut cargo).is_ok() {
// The tests succeeded; nothing to do.
return;
}
if !builder.config.cmd.bless() {
- crate::detail_exit(1);
- }
-
- let mut cargo = builder.cargo(compiler, Mode::ToolRustc, SourceType::InTree, host, "run");
- cargo.arg("-p").arg("clippy_dev");
- // clippy_dev gets confused if it can't find `clippy/Cargo.toml`
- cargo.current_dir(&builder.src.join("src").join("tools").join("clippy"));
- if builder.config.rust_optimize {
- cargo.env("PROFILE", "release");
- } else {
- cargo.env("PROFILE", "debug");
+ crate::detail_exit_macro!(1);
}
- cargo.arg("--");
- cargo.arg("bless");
- builder.run(&mut cargo.into());
}
}
@@ -840,7 +855,7 @@ impl Step for RustdocTheme {
util::lld_flag_no_threads(self.compiler.host.contains("windows")),
);
}
- try_run(builder, &mut cmd);
+ try_run(builder, &mut cmd).unwrap();
}
}
@@ -886,11 +901,11 @@ impl Step for RustdocJSStd {
command.arg("--test-file").arg(path);
}
}
- builder.ensure(crate::doc::Std {
- target: self.target,
- stage: builder.top_stage,
- format: DocumentationFormat::HTML,
- });
+ builder.ensure(crate::doc::Std::new(
+ builder.top_stage,
+ self.target,
+ DocumentationFormat::HTML,
+ ));
builder.run(&mut command);
} else {
builder.info("No nodejs found, skipping \"tests/rustdoc-js-std\" tests");
@@ -1035,7 +1050,7 @@ impl Step for RustdocGUI {
}
let _time = util::timeit(&builder);
- crate::render_tests::try_run_tests(builder, &mut cmd);
+ crate::render_tests::try_run_tests(builder, &mut cmd, true);
}
}
@@ -1085,13 +1100,13 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy`
PATH = inferred_rustfmt_dir.display(),
CHAN = builder.config.channel,
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
crate::format::format(&builder, !builder.config.cmd.bless(), &[]);
}
builder.info("tidy check");
- try_run(builder, &mut cmd);
+ try_run(builder, &mut cmd).unwrap();
builder.ensure(ExpandYamlAnchors);
@@ -1108,7 +1123,7 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy`
eprintln!(
"x.py completions were changed; run `x.py run generate-completions` to update them"
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
}
}
@@ -1139,7 +1154,8 @@ impl Step for ExpandYamlAnchors {
try_run(
builder,
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
- );
+ )
+ .unwrap();
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -1261,8 +1277,6 @@ default_test!(RunPassValgrind {
suite: "run-pass-valgrind"
});
-default_test!(MirOpt { path: "tests/mir-opt", mode: "mir-opt", suite: "mir-opt" });
-
default_test!(Codegen { path: "tests/codegen", mode: "codegen", suite: "codegen" });
default_test!(CodegenUnits {
@@ -1299,6 +1313,98 @@ host_test!(RunMakeFullDeps {
default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly" });
+host_test!(RunCoverage { path: "tests/run-coverage", mode: "run-coverage", suite: "run-coverage" });
+host_test!(RunCoverageRustdoc {
+ path: "tests/run-coverage-rustdoc",
+ mode: "run-coverage",
+ suite: "run-coverage-rustdoc"
+});
+
+// For the mir-opt suite we do not use macros, as we need custom behavior when blessing.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct MirOpt {
+ pub compiler: Compiler,
+ pub target: TargetSelection,
+}
+
+impl Step for MirOpt {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = false;
+
+ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
+ run.suite_path("tests/mir-opt")
+ }
+
+ fn make_run(run: RunConfig<'_>) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple());
+ run.builder.ensure(MirOpt { compiler, target: run.target });
+ }
+
+ fn run(self, builder: &Builder<'_>) {
+ let run = |target| {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: target,
+ mode: "mir-opt",
+ suite: "mir-opt",
+ path: "tests/mir-opt",
+ compare_mode: None,
+ })
+ };
+
+ // We use custom logic to bless the mir-opt suite: mir-opt tests have multiple variants
+ // (32bit vs 64bit, and panic=abort vs panic=unwind), and all of them needs to be blessed.
+ // When blessing, we try best-effort to also bless the other variants, to aid developers.
+ if builder.config.cmd.bless() {
+ let targets = MIR_OPT_BLESS_TARGET_MAPPING
+ .iter()
+ .filter(|(target_32bit, target_64bit)| {
+ *target_32bit == &*self.target.triple || *target_64bit == &*self.target.triple
+ })
+ .next()
+ .map(|(target_32bit, target_64bit)| {
+ let target_32bit = TargetSelection::from_user(target_32bit);
+ let target_64bit = TargetSelection::from_user(target_64bit);
+
+ // Running compiletest requires a C compiler to be available, but it might not
+ // have been detected by bootstrap if the target we're testing wasn't in the
+ // --target flags.
+ if !builder.cc.borrow().contains_key(&target_32bit) {
+ crate::cc_detect::find_target(builder, target_32bit);
+ }
+ if !builder.cc.borrow().contains_key(&target_64bit) {
+ crate::cc_detect::find_target(builder, target_64bit);
+ }
+
+ vec![target_32bit, target_64bit]
+ })
+ .unwrap_or_else(|| {
+ eprintln!(
+ "\
+Note that not all variants of mir-opt tests are going to be blessed, as no mapping between
+a 32bit and a 64bit target was found for {target}.
+You can add that mapping by changing MIR_OPT_BLESS_TARGET_MAPPING in src/bootstrap/test.rs",
+ target = self.target,
+ );
+ vec![self.target]
+ });
+
+ for target in targets {
+ run(target);
+
+ let panic_abort_target = builder.ensure(MirOptPanicAbortSyntheticTarget {
+ compiler: self.compiler,
+ base: target,
+ });
+ run(panic_abort_target);
+ }
+ } else {
+ run(self.target);
+ }
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
struct Compiletest {
compiler: Compiler,
@@ -1329,7 +1435,7 @@ help: to test the compiler, use `--stage 1` instead
help: to test the standard library, use `--stage 0 library/std` instead
note: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`."
);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
let mut compiler = self.compiler;
@@ -1398,6 +1504,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
|| (mode == "ui" && is_rustdoc)
|| mode == "js-doc-test"
|| mode == "rustdoc-json"
+ || suite == "run-coverage-rustdoc"
{
cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler));
}
@@ -1411,7 +1518,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
.arg(builder.ensure(tool::JsonDocLint { compiler: json_compiler, target }));
}
- if mode == "run-make" {
+ if mode == "run-make" || mode == "run-coverage" {
let rust_demangler = builder
.ensure(tool::RustDemangler {
compiler,
@@ -1424,7 +1531,15 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--src-base").arg(builder.src.join("tests").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
- cmd.arg("--sysroot-base").arg(builder.sysroot(compiler));
+
+ // When top stage is 0, that means that we're testing an externally provided compiler.
+ // In that case we need to use its specific sysroot for tests to pass.
+ let sysroot = if builder.top_stage == 0 {
+ builder.initial_sysroot.clone()
+ } else {
+ builder.sysroot(compiler).to_path_buf()
+ };
+ cmd.arg("--sysroot-base").arg(sysroot);
cmd.arg("--stage-id").arg(stage_id);
cmd.arg("--suite").arg(suite);
cmd.arg("--mode").arg(mode);
@@ -1529,7 +1644,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
for exclude in &builder.config.exclude {
cmd.arg("--skip");
- cmd.arg(&exclude.path);
+ cmd.arg(&exclude);
}
// Get paths from cmd args
@@ -1590,17 +1705,21 @@ note: if you're sure you want to do this, please open an issue as to why. In the
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd);
}
- // Only pass correct values for these flags for the `run-make` suite as it
- // requires that a C++ compiler was configured which isn't always the case.
- if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
+ if !builder.config.dry_run()
+ && (matches!(suite, "run-make" | "run-make-fulldeps") || mode == "run-coverage")
+ {
// The llvm/bin directory contains many useful cross-platform
// tools. Pass the path to run-make tests so they can use them.
+ // (The run-coverage tests also need these tools to process
+ // coverage reports.)
let llvm_bin_path = llvm_config
.parent()
.expect("Expected llvm-config to be contained in directory");
assert!(llvm_bin_path.is_dir());
cmd.arg("--llvm-bin-dir").arg(llvm_bin_path);
+ }
+ if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
// If LLD is available, add it to the PATH
if builder.config.lld_enabled {
let lld_install_root =
@@ -1658,8 +1777,8 @@ note: if you're sure you want to do this, please open an issue as to why. In the
//
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
- if target.contains("msvc") {
- for &(ref k, ref v) in builder.cc[&target].env() {
+ if !builder.config.dry_run() && target.contains("msvc") {
+ for &(ref k, ref v) in builder.cc.borrow()[&target].env() {
if k != "PATH" {
cmd.env(k, v);
}
@@ -1684,7 +1803,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--adb-path").arg("adb");
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
- if target.contains("android") {
+ if target.contains("android") && !builder.config.dry_run() {
// Assume that cc for this target comes from the android sysroot
cmd.arg("--android-cross-path")
.arg(builder.cc(target).parent().unwrap().parent().unwrap());
@@ -1704,10 +1823,6 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--git-hash");
}
- if let Some(commit) = builder.config.download_rustc_commit() {
- cmd.env("FAKE_DOWNLOAD_RUSTC_PREFIX", format!("/rustc/{commit}"));
- }
-
builder.ci_env.force_coloring_in_ci(&mut cmd);
#[cfg(feature = "build-metrics")]
@@ -1723,12 +1838,14 @@ note: if you're sure you want to do this, please open an issue as to why. In the
builder,
);
- builder.info(&format!(
- "Check compiletest suite={} mode={} ({} -> {})",
- suite, mode, &compiler.host, target
- ));
- let _time = util::timeit(&builder);
- crate::render_tests::try_run_tests(builder, &mut cmd);
+ let _group = builder.msg(
+ Kind::Test,
+ compiler.stage,
+ &format!("compiletest suite={suite} mode={mode}"),
+ compiler.host,
+ target,
+ );
+ crate::render_tests::try_run_tests(builder, &mut cmd, false);
if let Some(compare_mode) = compare_mode {
cmd.arg("--compare-mode").arg(compare_mode);
@@ -1751,7 +1868,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the
suite, mode, compare_mode, &compiler.host, target
));
let _time = util::timeit(&builder);
- crate::render_tests::try_run_tests(builder, &mut cmd);
+ crate::render_tests::try_run_tests(builder, &mut cmd, false);
}
}
}
@@ -1776,6 +1893,14 @@ impl Step for BookTest {
///
/// This uses the `rustdoc` that sits next to `compiler`.
fn run(self, builder: &Builder<'_>) {
+ let host = self.compiler.host;
+ let _guard = builder.msg(
+ Kind::Test,
+ self.compiler.stage,
+ &format!("book {}", self.name),
+ host,
+ host,
+ );
// External docs are different from local because:
// - Some books need pre-processing by mdbook before being tested.
// - They need to save their state to toolstate.
@@ -1823,7 +1948,7 @@ impl BookTest {
compiler.host,
);
let _time = util::timeit(&builder);
- let toolstate = if try_run(builder, &mut rustbook_cmd) {
+ let toolstate = if try_run(builder, &mut rustbook_cmd).is_ok() {
ToolState::TestPass
} else {
ToolState::TestFail
@@ -1967,7 +2092,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) ->
}
}
- builder.info(&format!("doc tests for: {}", markdown.display()));
+ builder.verbose(&format!("doc tests for: {}", markdown.display()));
let mut cmd = builder.rustdoc_cmd(compiler);
builder.add_rust_test_threads(&mut cmd);
// allow for unstable options such as new editions
@@ -1981,7 +2106,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) ->
cmd.arg("--test-args").arg(test_args);
if builder.config.verbose_tests {
- try_run(builder, &mut cmd)
+ try_run(builder, &mut cmd).is_ok()
} else {
try_run_quiet(builder, &mut cmd)
}
@@ -2009,7 +2134,7 @@ impl Step for RustcGuide {
let src = builder.src.join(relative_path);
let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
- let toolstate = if try_run(builder, rustbook_cmd.arg("linkcheck").arg(&src)) {
+ let toolstate = if try_run(builder, rustbook_cmd.arg("linkcheck").arg(&src)).is_ok() {
ToolState::TestPass
} else {
ToolState::TestFail
@@ -2098,6 +2223,11 @@ fn prepare_cargo_test(
) -> Command {
let mut cargo = cargo.into();
+ // If bless is passed, give downstream crates a way to use it
+ if builder.config.cmd.bless() {
+ cargo.env("RUSTC_BLESS", "1");
+ }
+
// Pass in some standard flags then iterate over the graph we've discovered
// in `cargo metadata` with the maps above and figure out what `-p`
// arguments need to get passed.
@@ -2200,7 +2330,8 @@ impl Step for Crate {
let target = self.target;
let mode = self.mode;
- builder.ensure(compile::Std::new(compiler, target));
+ // See [field@compile::Std::force_recompile].
+ builder.ensure(compile::Std::force_recompile(compiler, target));
builder.ensure(RemoteCopyLibs { compiler, target });
// If we're not doing a full bootstrap but we're testing a stage2
@@ -2214,6 +2345,16 @@ impl Step for Crate {
match mode {
Mode::Std => {
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
+ // `std_cargo` actually does the wrong thing: it passes `--sysroot build/host/stage2`,
+ // but we want to use the force-recompile std we just built in `build/host/stage2-test-sysroot`.
+ // Override it.
+ if builder.download_rustc() {
+ let sysroot = builder
+ .out
+ .join(compiler.host.triple)
+ .join(format!("stage{}-test-sysroot", compiler.stage));
+ cargo.env("RUSTC_SYSROOT", sysroot);
+ }
}
Mode::Rustc => {
compile::rustc_cargo(builder, &mut cargo, target, compiler.stage);
@@ -2265,6 +2406,11 @@ impl Step for CrateRustdoc {
// isn't really necessary.
builder.compiler_for(builder.top_stage, target, target)
};
+ // NOTE: normally `ensure(Rustc)` automatically runs `ensure(Std)` for us. However, when
+ // using `download-rustc`, the rustc_private artifacts may be in a *different sysroot* from
+ // the target rustdoc (`ci-rustc-sysroot` vs `stage2`). In that case, we need to ensure this
+ // explicitly to make sure it ends up in the stage2 sysroot.
+ builder.ensure(compile::Std::new(compiler, target));
builder.ensure(compile::Rustc::new(compiler, target));
let mut cargo = tool::prepare_tool_cargo(
@@ -2316,7 +2462,13 @@ impl Step for CrateRustdoc {
dylib_path.insert(0, PathBuf::from(&*libdir));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
- let _guard = builder.msg(builder.kind, compiler.stage, "rustdoc", compiler.host, target);
+ let _guard = builder.msg_sysroot_tool(
+ builder.kind,
+ compiler.stage,
+ "rustdoc",
+ compiler.host,
+ target,
+ );
run_cargo_test(
cargo,
&[],
@@ -2503,6 +2655,9 @@ impl Step for Distcheck {
let toml = dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml");
builder.run(
Command::new(&builder.initial_cargo)
+ // Will read the libstd Cargo.toml
+ // which uses the unstable `public-dependency` feature.
+ .env("RUSTC_BOOTSTRAP", "1")
.arg("generate-lockfile")
.arg("--manifest-path")
.arg(&toml)
@@ -2522,8 +2677,14 @@ impl Step for Bootstrap {
/// Tests the build system itself.
fn run(self, builder: &Builder<'_>) {
let mut check_bootstrap = Command::new(&builder.python());
- check_bootstrap.arg("bootstrap_test.py").current_dir(builder.src.join("src/bootstrap/"));
- try_run(builder, &mut check_bootstrap);
+ check_bootstrap
+ .args(["-m", "unittest", "bootstrap_test.py"])
+ .env("BUILD_DIR", &builder.out)
+ .env("BUILD_PLATFORM", &builder.build.build.triple)
+ .current_dir(builder.src.join("src/bootstrap/"));
+ // NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
+ // Use `python -m unittest` manually if you want to pass arguments.
+ try_run(builder, &mut check_bootstrap).unwrap();
let host = builder.config.build;
let compiler = builder.compiler(0, host);
@@ -2595,7 +2756,7 @@ impl Step for TierCheck {
}
builder.info("platform support check");
- try_run(builder, &mut cargo.into());
+ try_run(builder, &mut cargo.into()).unwrap();
}
}
@@ -2675,7 +2836,7 @@ impl Step for RustInstaller {
cmd.env("CARGO", &builder.initial_cargo);
cmd.env("RUSTC", &builder.initial_rustc);
cmd.env("TMP_DIR", &tmpdir);
- try_run(builder, &mut cmd);
+ try_run(builder, &mut cmd).unwrap();
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index b3791efaf..06c031788 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -107,7 +107,7 @@ impl Step for ToolBuild {
);
let mut cargo = Command::from(cargo);
- let is_expected = builder.try_run(&mut cargo);
+ let is_expected = builder.try_run(&mut cargo).is_ok();
builder.save_toolstate(
tool,
@@ -116,7 +116,7 @@ impl Step for ToolBuild {
if !is_expected {
if !is_optional_tool {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
} else {
None
}
@@ -289,7 +289,7 @@ bootstrap_tool!(
Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true, allow_features = "test";
BuildManifest, "src/tools/build-manifest", "build-manifest";
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client";
- RustInstaller, "src/tools/rust-installer", "rust-installer", is_external_tool = true;
+ RustInstaller, "src/tools/rust-installer", "rust-installer";
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";
LintDocs, "src/tools/lint-docs", "lint-docs";
@@ -711,7 +711,7 @@ impl Step for RustAnalyzerProcMacroSrv {
tool: "rust-analyzer-proc-macro-srv",
mode: Mode::ToolStd,
path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli",
- extra_features: vec!["proc-macro-srv/sysroot-abi".to_owned()],
+ extra_features: vec!["sysroot-abi".to_owned()],
is_optional_tool: false,
source_type: SourceType::InTree,
allow_features: RustAnalyzer::ALLOW_FEATURES,
@@ -855,7 +855,7 @@ impl<'a> Builder<'a> {
if compiler.host.contains("msvc") {
let curpaths = env::var_os("PATH").unwrap_or_default();
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
- for &(ref k, ref v) in self.cc[&compiler.host].env() {
+ for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() {
if k != "PATH" {
continue;
}
diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs
index 7aab88a1a..9c4d0ea26 100644
--- a/src/bootstrap/toolstate.rs
+++ b/src/bootstrap/toolstate.rs
@@ -91,7 +91,7 @@ fn print_error(tool: &str, submodule: &str) {
eprintln!("If you do NOT intend to update '{}', please ensure you did not accidentally", tool);
eprintln!("change the submodule at '{}'. You may ask your reviewer for the", submodule);
eprintln!("proper steps.");
- crate::detail_exit(3);
+ crate::detail_exit_macro!(3);
}
fn check_changed_files(toolstates: &HashMap<Box<str>, ToolState>) {
@@ -106,7 +106,7 @@ fn check_changed_files(toolstates: &HashMap<Box<str>, ToolState>) {
Ok(o) => o,
Err(e) => {
eprintln!("Failed to get changed files: {:?}", e);
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
};
@@ -177,7 +177,7 @@ impl Step for ToolStateCheck {
}
if did_error {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
check_changed_files(&toolstates);
@@ -223,7 +223,7 @@ impl Step for ToolStateCheck {
}
if did_error {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
if builder.config.channel == "nightly" && env::var_os("TOOLSTATE_PUBLISH").is_some() {
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 9bfdc77e6..b291584b3 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -3,6 +3,7 @@
//! Simple things like testing the various filesystem operations here and there,
//! not a lot of interesting happenings here unfortunately.
+use build_helper::util::{fail, try_run};
use std::env;
use std::fs;
use std::io;
@@ -133,17 +134,17 @@ pub(crate) fn program_out_of_date(stamp: &Path, key: &str) -> bool {
/// Symlinks two directories, using junctions on Windows and normal symlinks on
/// Unix.
-pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
+pub fn symlink_dir(config: &Config, original: &Path, link: &Path) -> io::Result<()> {
if config.dry_run() {
return Ok(());
}
- let _ = fs::remove_dir(dest);
- return symlink_dir_inner(src, dest);
+ let _ = fs::remove_dir(link);
+ return symlink_dir_inner(original, link);
#[cfg(not(windows))]
- fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> {
+ fn symlink_dir_inner(original: &Path, link: &Path) -> io::Result<()> {
use std::os::unix::fs;
- fs::symlink(src, dest)
+ fs::symlink(original, link)
}
#[cfg(windows)]
@@ -158,8 +159,6 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
pub enum CiEnv {
/// Not a CI environment.
None,
- /// The Azure Pipelines environment, for Linux (including Docker), Windows, and macOS builds.
- AzurePipelines,
/// The GitHub Actions environment, for Linux (including Docker), Windows and macOS builds.
GitHubActions,
}
@@ -229,26 +228,11 @@ pub fn is_valid_test_suite_arg<'a, P: AsRef<Path>>(
}
pub fn run(cmd: &mut Command, print_cmd_on_fail: bool) {
- if !try_run(cmd, print_cmd_on_fail) {
- crate::detail_exit(1);
+ if try_run(cmd, print_cmd_on_fail).is_err() {
+ crate::detail_exit_macro!(1);
}
}
-pub fn try_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool {
- let status = match cmd.status() {
- Ok(status) => status,
- Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}", cmd, e)),
- };
- if !status.success() && print_cmd_on_fail {
- println!(
- "\n\ncommand did not execute successfully: {:?}\n\
- expected success, got: {}\n\n",
- cmd, status
- );
- }
- status.success()
-}
-
pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool {
let status = match cmd.status() {
Ok(status) => status,
@@ -269,7 +253,7 @@ pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool {
pub fn run_suppressed(cmd: &mut Command) {
if !try_run_suppressed(cmd) {
- crate::detail_exit(1);
+ crate::detail_exit_macro!(1);
}
}
@@ -374,11 +358,6 @@ fn dir_up_to_date(src: &Path, threshold: SystemTime) -> bool {
})
}
-fn fail(s: &str) -> ! {
- eprintln!("\n\n{}\n\n", s);
- crate::detail_exit(1);
-}
-
/// Copied from `std::path::absolute` until it stabilizes.
///
/// FIXME: this shouldn't exist.