diff options
Diffstat (limited to 'src/bootstrap')
31 files changed, 1100 insertions, 707 deletions
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock index 4a0ba5925..e861d520c 100644 --- a/src/bootstrap/Cargo.lock +++ b/src/bootstrap/Cargo.lock @@ -50,6 +50,7 @@ dependencies = [ "opener", "pretty_assertions", "serde", + "serde_derive", "serde_json", "sha2", "sysinfo", @@ -564,9 +565,6 @@ name = "serde" version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" -dependencies = [ - "serde_derive", -] [[package]] name = "serde_derive" diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 22ceeca94..663987f11 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -39,7 +39,10 @@ cc = "1.0.69" libc = "0.2" hex = "0.4" object = { version = "0.29.0", default-features = false, features = ["archive", "coff", "read_core", "unaligned"] } -serde = { version = "1.0.8", features = ["derive"] } +serde = "1.0.137" +# Directly use serde_derive rather than through the derive feature of serde to allow building both +# in parallel and to allow serde_json and toml to start building as soon as serde has been built. +serde_derive = "1.0.137" serde_json = "1.0.2" sha2 = "0.10" tar = "0.4" diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 8dce9e79e..71eee8968 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -4,105 +4,31 @@ This is an in-progress README which is targeted at helping to explain how Rust is bootstrapped and in general, some of the technical details of the build system. -## Using rustbuild +Note that this README only covers internal information, not how to use the tool. +Please check [bootstrapping dev guide][bootstrapping-dev-guide] for further information. -The rustbuild build system has a primary entry point, a top level `x.py` script: +[bootstrapping-dev-guide]: https://rustc-dev-guide.rust-lang.org/building/bootstrapping.html -```sh -$ python ./x.py build -``` - -Note that if you're on Unix, you should be able to execute the script directly: - -```sh -$ ./x.py build -``` - -The script accepts commands, flags, and arguments to determine what to do: - -* `build` - a general purpose command for compiling code. Alone, `build` will - bootstrap the entire compiler, and otherwise, arguments passed indicate what to - build. For example: - - ``` - # build the whole compiler - ./x.py build --stage 2 - - # build the stage1 compiler - ./x.py build - - # build stage0 libstd - ./x.py build --stage 0 library/std - - # build a particular crate in stage0 - ./x.py build --stage 0 library/test - ``` - - If files that would normally be rebuilt from stage 0 are dirty, the rebuild can be - overridden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps - that belong to stage n or earlier: - - ``` - # build stage 1, keeping old build products for stage 0 - ./x.py build --keep-stage 0 - ``` - -* `test` - a command for executing unit tests. Like the `build` command, this - will execute the entire test suite by default, and otherwise, it can be used to - select which test suite is run: - - ``` - # run all unit tests - ./x.py test - - # execute tool tests - ./x.py test tidy - - # execute the UI test suite - ./x.py test tests/ui - - # execute only some tests in the UI test suite - ./x.py test tests/ui --test-args substring-of-test-name - - # execute tests in the standard library in stage0 - ./x.py test --stage 0 library/std - - # execute tests in the core and standard library in stage0, - # without running doc tests (thus avoid depending on building the compiler) - ./x.py test --stage 0 --no-doc library/core library/std +## Introduction - # execute all doc tests - ./x.py test src/doc - ``` +The build system defers most of the complicated logic managing invocations +of rustc and rustdoc to Cargo itself. However, moving through various stages +and copying artifacts is still necessary for it to do. Each time rustbuild +is invoked, it will iterate through the list of predefined steps and execute +each serially in turn if it matches the paths passed or is a default rule. +For each step rustbuild relies on the step internally being incremental and +parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded +to appropriate test harnesses and such. -* `doc` - a command for building documentation. Like above, can take arguments - for what to document. - -## Configuring rustbuild - -rustbuild offers a TOML-based configuration system with a `config.toml` -file. An example of this configuration can be found at `config.toml.example`, -and the configuration file can also be passed as `--config path/to/config.toml` -if the build system is being invoked manually (via the python script). - -You can generate a config.toml using `./configure` options if you want to automate creating the file without having to edit it. - -Finally, rustbuild makes use of the [cc-rs crate] which has [its own -method][env-vars] of configuring C compilers and C flags via environment -variables. - -[cc-rs crate]: https://github.com/alexcrichton/cc-rs -[env-vars]: https://github.com/alexcrichton/cc-rs#external-configuration-via-environment-variables - -## Build stages +## Build phases The rustbuild build system goes through a few phases to actually build the compiler. What actually happens when you invoke rustbuild is: -1. The entry point script, `x.py` is run. This script is - responsible for downloading the stage0 compiler/Cargo binaries, and it then - compiles the build system itself (this folder). Finally, it then invokes the - actual `bootstrap` binary build system. +1. The entry point script(`x` for unix like systems, `x.ps1` for windows systems, + `x.py` cross-platform) is run. This script is responsible for downloading the stage0 + compiler/Cargo binaries, and it then compiles the build system itself (this folder). + Finally, it then invokes the actual `bootstrap` binary build system. 2. In Rust, `bootstrap` will slurp up all configuration, perform a number of sanity checks (whether compilers exist, for example), and then start building the stage0 artifacts. @@ -115,24 +41,6 @@ compiler. What actually happens when you invoke rustbuild is: The goal of each stage is to (a) leverage Cargo as much as possible and failing that (b) leverage Rust as much as possible! -## Incremental builds - -You can configure rustbuild to use incremental compilation with the -`--incremental` flag: - -```sh -$ ./x.py build --incremental -``` - -The `--incremental` flag will store incremental compilation artifacts -in `build/<host>/stage0-incremental`. Note that we only use incremental -compilation for the stage0 -> stage1 compilation -- this is because -the stage1 compiler is changing, and we don't try to cache and reuse -incremental artifacts across different versions of the compiler. - -You can always drop the `--incremental` to build as normal (but you -will still be using the local nightly as your bootstrap). - ## Directory Layout This build system houses all output under the `build` directory, which looks @@ -236,63 +144,31 @@ build/ # system will link (using hard links) output from stageN-{std,rustc} into # each of these directories. # - # In theory, there is no extra build output in these directories. + # In theory these are working rustc sysroot directories, meaning there is + # no extra build output in these directories. stage1/ stage2/ stage3/ ``` -## Cargo projects - -The current build is unfortunately not quite as simple as `cargo build` in a -directory, but rather the compiler is split into three different Cargo projects: - -* `library/std` - the standard library -* `library/test` - testing support, depends on libstd -* `compiler/rustc` - the actual compiler itself - -Each "project" has a corresponding Cargo.lock file with all dependencies, and -this means that building the compiler involves running Cargo three times. The -structure here serves two goals: - -1. Facilitating dependencies coming from crates.io. These dependencies don't - depend on `std`, so libstd is a separate project compiled ahead of time - before the actual compiler builds. -2. Splitting "host artifacts" from "target artifacts". That is, when building - code for an arbitrary target, you don't need the entire compiler, but you'll - end up needing libraries like libtest that depend on std but also want to use - crates.io dependencies. Hence, libtest is split out as its own project that - is sequenced after `std` but before `rustc`. This project is built for all - targets. - -There is some loss in build parallelism here because libtest can be compiled in -parallel with a number of rustc artifacts, but in theory, the loss isn't too bad! - -## Build tools - -We've actually got quite a few tools that we use in the compiler's build system -and for testing. To organize these, each tool is a project in `src/tools` with a -corresponding `Cargo.toml`. All tools are compiled with Cargo (currently having -independent `Cargo.lock` files) and do not currently explicitly depend on the -compiler or standard library. Compiling each tool is sequenced after the -appropriate libstd/libtest/librustc compile above. - ## Extending rustbuild -So, you'd like to add a feature to the rustbuild build system or just fix a bug. -Great! One of the major motivational factors for moving away from `make` is that -Rust is in theory much easier to read, modify, and write. If you find anything -excessively confusing, please open an issue on this, and we'll try to get it -documented or simplified, pronto. +When you use the bootstrap system, you'll call it through the entry point script +(`x`, `x.ps1`, or `x.py`). However, most of the code lives in `src/bootstrap`. +`bootstrap` has a difficult problem: it is written in Rust, but yet it is run +before the Rust compiler is built! To work around this, there are two components +of bootstrap: the main one written in rust, and `bootstrap.py`. `bootstrap.py` +is what gets run by entry point script. It takes care of downloading the `stage0` +compiler, which will then build the bootstrap binary written in Rust. -First up, you'll probably want to read over the documentation above, as that'll -give you a high level overview of what rustbuild is doing. You also probably -want to play around a bit yourself by just getting it up and running before you -dive too much into the actual build system itself. +Because there are two separate codebases behind `x.py`, they need to +be kept in sync. In particular, both `bootstrap.py` and the bootstrap binary +parse `config.toml` and read the same command line arguments. `bootstrap.py` +keeps these in sync by setting various environment variables, and the +programs sometimes have to add arguments that are explicitly ignored, to be +read by the other. -After that, each module in rustbuild should have enough documentation to keep -you up and running. Some general areas that you may be interested in modifying -are: +Some general areas that you may be interested in modifying are: * Adding a new build tool? Take a look at `bootstrap/tool.rs` for examples of other tools. @@ -320,8 +196,9 @@ A 'major change' includes Changes that do not affect contributors to the compiler or users building rustc from source don't need an update to `VERSION`. -If you have any questions, feel free to reach out on the `#t-infra` channel in -the [Rust Zulip server][rust-zulip] or ask on internals.rust-lang.org. When -you encounter bugs, please file issues on the rust-lang/rust issue tracker. +If you have any questions, feel free to reach out on the `#t-infra/bootstrap` channel +at [Rust Bootstrap Zulip server][rust-bootstrap-zulip]. When you encounter bugs, +please file issues on the [Rust issue tracker][rust-issue-tracker]. -[rust-zulip]: https://rust-lang.zulipchat.com/#narrow/stream/242791-t-infra +[rust-bootstrap-zulip]: https://rust-lang.zulipchat.com/#narrow/stream/t-infra.2Fbootstrap +[rust-issue-tracker]: https://github.com/rust-lang/rust/issues diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs index be69f819c..3856bb64f 100644 --- a/src/bootstrap/bin/main.rs +++ b/src/bootstrap/bin/main.rs @@ -16,12 +16,17 @@ fn main() { let mut build_lock; let _build_lock_guard; if cfg!(any(unix, windows)) { - build_lock = fd_lock::RwLock::new(t!(std::fs::File::create(config.out.join("lock")))); + let path = config.out.join("lock"); + build_lock = fd_lock::RwLock::new(t!(std::fs::File::create(&path))); _build_lock_guard = match build_lock.try_write() { Ok(lock) => lock, err => { - println!("warning: build directory locked, waiting for lock"); drop(err); + if let Some(pid) = get_lock_owner(&path) { + println!("warning: build directory locked by process {pid}, waiting for lock"); + } else { + println!("warning: build directory locked, waiting for lock"); + } t!(build_lock.write()) } }; @@ -98,3 +103,30 @@ fn check_version(config: &Config) -> Option<String> { Some(msg) } + +/// Get the PID of the process which took the write lock by +/// parsing `/proc/locks`. +#[cfg(target_os = "linux")] +fn get_lock_owner(f: &std::path::Path) -> Option<u64> { + use std::fs::File; + use std::io::{BufRead, BufReader}; + use std::os::unix::fs::MetadataExt; + + let lock_inode = std::fs::metadata(f).ok()?.ino(); + let lockfile = File::open("/proc/locks").ok()?; + BufReader::new(lockfile).lines().find_map(|line| { + // pid--vvvvvv vvvvvvv--- inode + // 21: FLOCK ADVISORY WRITE 359238 08:02:3719774 0 EOF + let line = line.ok()?; + let parts = line.split_whitespace().collect::<Vec<_>>(); + let (pid, inode) = (parts[4].parse::<u64>().ok()?, &parts[5]); + let inode = inode.rsplit_once(':')?.1.parse::<u64>().ok()?; + if inode == lock_inode { Some(pid) } else { None } + }) +} + +#[cfg(not(target_os = "linux"))] +fn get_lock_owner(_: &std::path::Path) -> Option<u64> { + // FIXME: Implement on other OS's + None +} diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 9cf43fc7a..013d1ab52 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -2,7 +2,6 @@ from __future__ import absolute_import, division, print_function import argparse import contextlib import datetime -import distutils.version import hashlib import json import os @@ -13,17 +12,17 @@ import sys import tarfile import tempfile -from time import time, sleep +from time import time -def support_xz(): - try: - with tempfile.NamedTemporaryFile(delete=False) as temp_file: - temp_path = temp_file.name - with tarfile.open(temp_path, "w:xz"): - pass - return True - except tarfile.CompressionError: - return False +try: + import lzma +except ImportError: + lzma = None + +if sys.platform == 'win32': + EXE_SUFFIX = ".exe" +else: + EXE_SUFFIX = "" def get(base, url, path, checksums, verbose=False): with tempfile.NamedTemporaryFile(delete=False) as temp_file: @@ -61,7 +60,7 @@ def get(base, url, path, checksums, verbose=False): def download(path, url, probably_big, verbose): - for _ in range(0, 4): + for _ in range(4): try: _download(path, url, probably_big, verbose, True) return @@ -88,14 +87,16 @@ def _download(path, url, probably_big, verbose, exception): # If curl is not present on Win32, we should not sys.exit # but raise `CalledProcessError` or `OSError` instead require(["curl", "--version"], exception=platform_is_win32) - run(["curl", option, - "-L", # Follow redirect. - "-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds - "--connect-timeout", "30", # timeout if cannot connect within 30 seconds - "--retry", "3", "-Sf", "-o", path, url], - verbose=verbose, - exception=True, # Will raise RuntimeError on failure - ) + with open(path, "wb") as outfile: + run(["curl", option, + "-L", # Follow redirect. + "-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds + "--connect-timeout", "30", # timeout if cannot connect within 30 seconds + "--retry", "3", "-Sf", url], + stdout=outfile, #Implements cli redirect operator '>' + verbose=verbose, + exception=True, # Will raise RuntimeError on failure + ) except (subprocess.CalledProcessError, OSError, RuntimeError): # see http://serverfault.com/questions/301128/how-to-download if platform_is_win32: @@ -395,17 +396,18 @@ class RustBuild(object): def __init__(self): self.checksums_sha256 = {} self.stage0_compiler = None - self._download_url = '' + self.download_url = '' self.build = '' self.build_dir = '' self.clean = False self.config_toml = '' self.rust_root = '' - self.use_locked_deps = '' - self.use_vendored_sources = '' + self.use_locked_deps = False + self.use_vendored_sources = False self.verbose = False self.git_version = None self.nix_deps_dir = None + self._should_fix_bins_and_dylibs = None def download_toolchain(self): """Fetch the build system for Rust, written in Rust @@ -426,7 +428,7 @@ class RustBuild(object): self.program_out_of_date(self.rustc_stamp(), key)): if os.path.exists(bin_root): shutil.rmtree(bin_root) - tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz' + tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz' filename = "rust-std-{}-{}{}".format( rustc_channel, self.build, tarball_suffix) pattern = "rust-std-{}".format(self.build) @@ -437,15 +439,17 @@ class RustBuild(object): filename = "cargo-{}-{}{}".format(rustc_channel, self.build, tarball_suffix) self._download_component_helper(filename, "cargo", tarball_suffix) - self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root)) - - self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root)) - self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root)) - self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root)) - lib_dir = "{}/lib".format(bin_root) - for lib in os.listdir(lib_dir): - if lib.endswith(".so"): - self.fix_bin_or_dylib(os.path.join(lib_dir, lib)) + if self.should_fix_bins_and_dylibs(): + self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root)) + + self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root)) + self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root)) + self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root)) + lib_dir = "{}/lib".format(bin_root) + for lib in os.listdir(lib_dir): + if lib.endswith(".so"): + self.fix_bin_or_dylib(os.path.join(lib_dir, lib)) + with output(self.rustc_stamp()) as rust_stamp: rust_stamp.write(key) @@ -458,60 +462,73 @@ class RustBuild(object): if not os.path.exists(rustc_cache): os.makedirs(rustc_cache) - base = self._download_url - url = "dist/{}".format(key) tarball = os.path.join(rustc_cache, filename) if not os.path.exists(tarball): get( - base, - "{}/{}".format(url, filename), + self.download_url, + "dist/{}/{}".format(key, filename), tarball, self.checksums_sha256, verbose=self.verbose, ) unpack(tarball, tarball_suffix, self.bin_root(), match=pattern, verbose=self.verbose) - def fix_bin_or_dylib(self, fname): - """Modifies the interpreter section of 'fname' to fix the dynamic linker, - or the RPATH section, to fix the dynamic library search path - - This method is only required on NixOS and uses the PatchELF utility to - change the interpreter/RPATH of ELF executables. - - Please see https://nixos.org/patchelf.html for more information + def should_fix_bins_and_dylibs(self): + """Whether or not `fix_bin_or_dylib` needs to be run; can only be True + on NixOS. """ - default_encoding = sys.getdefaultencoding() - try: - ostype = subprocess.check_output( - ['uname', '-s']).strip().decode(default_encoding) - except subprocess.CalledProcessError: - return - except OSError as reason: - if getattr(reason, 'winerror', None) is not None: - return - raise reason + if self._should_fix_bins_and_dylibs is not None: + return self._should_fix_bins_and_dylibs - if ostype != "Linux": - return + def get_answer(): + default_encoding = sys.getdefaultencoding() + try: + ostype = subprocess.check_output( + ['uname', '-s']).strip().decode(default_encoding) + except subprocess.CalledProcessError: + return False + except OSError as reason: + if getattr(reason, 'winerror', None) is not None: + return False + raise reason + + if ostype != "Linux": + return False + + # If the user has asked binaries to be patched for Nix, then + # don't check for NixOS or `/lib`. + if self.get_toml("patch-binaries-for-nix", "build") == "true": + return True - # If the user has asked binaries to be patched for Nix, then - # don't check for NixOS or `/lib`, just continue to the patching. - if self.get_toml('patch-binaries-for-nix', 'build') != 'true': # Use `/etc/os-release` instead of `/etc/NIXOS`. # The latter one does not exist on NixOS when using tmpfs as root. try: with open("/etc/os-release", "r") as f: - if not any(l.strip() in ["ID=nixos", "ID='nixos'", 'ID="nixos"'] for l in f): - return + if not any(l.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"') for l in f): + return False except FileNotFoundError: - return + return False if os.path.exists("/lib"): - return + return False + + return True + + answer = self._should_fix_bins_and_dylibs = get_answer() + if answer: + print("info: You seem to be using Nix.") + return answer - # At this point we're pretty sure the user is running NixOS or - # using Nix - nix_os_msg = "info: you seem to be using Nix. Attempting to patch" - print(nix_os_msg, fname) + def fix_bin_or_dylib(self, fname): + """Modifies the interpreter section of 'fname' to fix the dynamic linker, + or the RPATH section, to fix the dynamic library search path + + This method is only required on NixOS and uses the PatchELF utility to + change the interpreter/RPATH of ELF executables. + + Please see https://nixos.org/patchelf.html for more information + """ + assert self._should_fix_bins_and_dylibs is True + print("attempting to patch", fname) # Only build `.nix-deps` once. nix_deps_dir = self.nix_deps_dir @@ -666,8 +683,7 @@ class RustBuild(object): config = self.get_toml(program) if config: return os.path.expanduser(config) - return os.path.join(self.bin_root(), "bin", "{}{}".format( - program, self.exe_suffix())) + return os.path.join(self.bin_root(), "bin", "{}{}".format(program, EXE_SUFFIX)) @staticmethod def get_string(line): @@ -692,13 +708,6 @@ class RustBuild(object): return line[start + 1:end] return None - @staticmethod - def exe_suffix(): - """Return a suffix for executables""" - if sys.platform == 'win32': - return '.exe' - return '' - def bootstrap_binary(self): """Return the path of the bootstrap binary @@ -710,9 +719,9 @@ class RustBuild(object): """ return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap") - def build_bootstrap(self, color): + def build_bootstrap(self, color, verbose_count): """Build bootstrap""" - print("Building rustbuild") + print("Building bootstrap") build_dir = os.path.join(self.build_dir, "bootstrap") if self.clean and os.path.exists(build_dir): shutil.rmtree(build_dir) @@ -757,7 +766,6 @@ class RustBuild(object): if target_linker is not None: env["RUSTFLAGS"] += " -C linker=" + target_linker env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes" - env["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros" if self.get_toml("deny-warnings", "rust") != "false": env["RUSTFLAGS"] += " -Dwarnings" @@ -768,8 +776,7 @@ class RustBuild(object): self.cargo())) args = [self.cargo(), "build", "--manifest-path", os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")] - for _ in range(0, self.verbose): - args.append("--verbose") + args.extend("--verbose" for _ in range(verbose_count)) if self.use_locked_deps: args.append("--locked") if self.use_vendored_sources: @@ -777,6 +784,8 @@ class RustBuild(object): if self.get_toml("metrics", "build"): args.append("--features") args.append("build-metrics") + if self.json_output: + args.append("--message-format=json") if color == "always": args.append("--color=always") elif color == "never": @@ -792,16 +801,7 @@ class RustBuild(object): so use `self.build` where possible. """ config = self.get_toml('build') - if config: - return config - return default_build_triple(self.verbose) - - def set_dist_environment(self, url): - """Set download URL for normal environment""" - if 'RUSTUP_DIST_SERVER' in os.environ: - self._download_url = os.environ['RUSTUP_DIST_SERVER'] - else: - self._download_url = url + return config or default_build_triple(self.verbose) def check_vendored_status(self): """Check that vendoring is configured properly""" @@ -834,32 +834,28 @@ class RustBuild(object): if os.path.exists(cargo_dir): shutil.rmtree(cargo_dir) -def bootstrap(help_triggered): - """Configure, fetch, build and run the initial bootstrap""" - - # If the user is asking for help, let them know that the whole download-and-build - # process has to happen before anything is printed out. - if help_triggered: - print("info: Downloading and building bootstrap before processing --help") - print(" command. See src/bootstrap/README.md for help with common") - print(" commands.") - - parser = argparse.ArgumentParser(description='Build rust') +def parse_args(): + """Parse the command line arguments that the python script needs.""" + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument('-h', '--help', action='store_true') parser.add_argument('--config') parser.add_argument('--build-dir') parser.add_argument('--build') parser.add_argument('--color', choices=['always', 'never', 'auto']) parser.add_argument('--clean', action='store_true') + parser.add_argument('--json-output', action='store_true') parser.add_argument('-v', '--verbose', action='count', default=0) - args = [a for a in sys.argv if a != '-h' and a != '--help'] - args, _ = parser.parse_known_args(args) + return parser.parse_known_args(sys.argv)[0] +def bootstrap(args): + """Configure, fetch, build and run the initial bootstrap""" # Configure initial bootstrap build = RustBuild() build.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) - build.verbose = args.verbose + build.verbose = args.verbose != 0 build.clean = args.clean + build.json_output = args.json_output # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, # then `config.toml` in the root directory. @@ -886,12 +882,12 @@ def bootstrap(help_triggered): with open(include_path) as included_toml: build.config_toml += os.linesep + included_toml.read() - config_verbose = build.get_toml('verbose', 'build') - if config_verbose is not None: - build.verbose = max(build.verbose, int(config_verbose)) + verbose_count = args.verbose + config_verbose_count = build.get_toml('verbose', 'build') + if config_verbose_count is not None: + verbose_count = max(args.verbose, int(config_verbose_count)) build.use_vendored_sources = build.get_toml('vendor', 'build') == 'true' - build.use_locked_deps = build.get_toml('locked-deps', 'build') == 'true' build.check_vendored_status() @@ -903,8 +899,7 @@ def bootstrap(help_triggered): data = json.load(f) build.checksums_sha256 = data["checksums_sha256"] build.stage0_compiler = Stage0Toolchain(data["compiler"]) - - build.set_dist_environment(data["config"]["dist_server"]) + build.download_url = os.getenv("RUSTUP_DIST_SERVER") or data["config"]["dist_server"] build.build = args.build or build.build_triple() @@ -914,7 +909,7 @@ def bootstrap(help_triggered): # Fetch/build the bootstrap build.download_toolchain() sys.stdout.flush() - build.build_bootstrap(args.color) + build.build_bootstrap(args.color, verbose_count) sys.stdout.flush() # Run the bootstrap @@ -932,25 +927,34 @@ def main(): # x.py help <cmd> ... if len(sys.argv) > 1 and sys.argv[1] == 'help': - sys.argv = [sys.argv[0], '-h'] + sys.argv[2:] + sys.argv[1] = '-h' + + args = parse_args() + help_triggered = args.help or len(sys.argv) == 1 - help_triggered = ( - '-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1) + # If the user is asking for help, let them know that the whole download-and-build + # process has to happen before anything is printed out. + if help_triggered: + print( + "info: Downloading and building bootstrap before processing --help command.\n" + " See src/bootstrap/README.md for help with common commands." + ) + + exit_code = 0 + success_word = "successfully" try: - bootstrap(help_triggered) - if not help_triggered: - print("Build completed successfully in {}".format( - format_build_time(time() - start_time))) + bootstrap(args) except (SystemExit, KeyboardInterrupt) as error: if hasattr(error, 'code') and isinstance(error.code, int): exit_code = error.code else: exit_code = 1 print(error) - if not help_triggered: - print("Build completed unsuccessfully in {}".format( - format_build_time(time() - start_time))) - sys.exit(exit_code) + success_word = "unsuccessfully" + + if not help_triggered: + print("Build completed", success_word, "in", format_build_time(time() - start_time)) + sys.exit(exit_code) if __name__ == '__main__': diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index b4fc1d4f2..b33fc02f4 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -780,7 +780,6 @@ impl<'a> Builder<'a> { install::Clippy, install::Miri, install::LlvmTools, - install::Analysis, install::Src, install::Rustc ), @@ -793,7 +792,7 @@ impl<'a> Builder<'a> { run::CollectLicenseMetadata, run::GenerateCopyright, ), - Kind::Setup => describe!(setup::Profile), + Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode), Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std), // special-cased in Build::build() Kind::Format => vec![], @@ -1802,16 +1801,6 @@ impl<'a> Builder<'a> { } } - if mode == Mode::Std && self.config.extended && compiler.is_final_stage(self) { - rustflags.arg("-Zsave-analysis"); - cargo.env( - "RUST_SAVE_ANALYSIS_CONFIG", - "{\"output_file\": null,\"full_docs\": false,\ - \"pub_only\": true,\"reachable_only\": false,\ - \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}", - ); - } - // If Control Flow Guard is enabled, pass the `control-flow-guard` flag to rustc // when compiling the standard library, since this might be linked into the final outputs // produced by rustc. Since this mitigation is only available on Windows, only enable it @@ -1926,6 +1915,13 @@ impl<'a> Builder<'a> { } } + if matches!(mode, Mode::Std) { + if let Some(mir_opt_level) = self.config.rust_validate_mir_opts { + rustflags.arg("-Zvalidate-mir"); + rustflags.arg(&format!("-Zmir-opt-level={}", mir_opt_level)); + } + } + Cargo { command: cargo, rustflags, rustdocflags, allow_features } } diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs index d5fcd1075..3574f1118 100644 --- a/src/bootstrap/builder/tests.rs +++ b/src/bootstrap/builder/tests.rs @@ -557,6 +557,7 @@ mod dist { rustfix_coverage: false, pass: None, run: None, + only_modified: false, }; let build = Build::new(config); @@ -627,6 +628,7 @@ mod dist { rustfix_coverage: false, pass: None, run: None, + only_modified: false, }; // Make sure rustfmt binary not being found isn't an error. config.channel = "beta".to_string(); diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 4b8a58e87..cd1966713 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -58,9 +58,10 @@ fn args(builder: &Builder<'_>) -> Vec<String> { clippy_lint_warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v))); clippy_lint_forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v))); args.extend(clippy_lint_levels); + args.extend(builder.config.free_args.clone()); args } else { - vec![] + builder.config.free_args.clone() } } @@ -99,11 +100,20 @@ impl Step for Std { cargo_subcommand(builder.kind), ); std_cargo(builder, target, compiler.stage, &mut cargo); + if matches!(builder.config.cmd, Subcommand::Fix { .. }) { + // By default, cargo tries to fix all targets. Tell it not to fix tests until we've added `test` to the sysroot. + cargo.arg("--lib"); + } - builder.info(&format!( - "Checking stage{} library artifacts ({} -> {})", - builder.top_stage, &compiler.host, target - )); + let msg = if compiler.host == target { + format!("Checking stage{} library artifacts ({target})", builder.top_stage) + } else { + format!( + "Checking stage{} library artifacts ({} -> {})", + builder.top_stage, &compiler.host, target + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -157,10 +167,18 @@ impl Step for Std { cargo.arg("-p").arg(krate.name); } - builder.info(&format!( - "Checking stage{} library test/bench/example targets ({} -> {})", - builder.top_stage, &compiler.host, target - )); + let msg = if compiler.host == target { + format!( + "Checking stage{} library test/bench/example targets ({target})", + builder.top_stage + ) + } else { + format!( + "Checking stage{} library test/bench/example targets ({} -> {})", + builder.top_stage, &compiler.host, target + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -234,10 +252,15 @@ impl Step for Rustc { cargo.arg("-p").arg(krate.name); } - builder.info(&format!( - "Checking stage{} compiler artifacts ({} -> {})", - builder.top_stage, &compiler.host, target - )); + let msg = if compiler.host == target { + format!("Checking stage{} compiler artifacts ({target})", builder.top_stage) + } else { + format!( + "Checking stage{} compiler artifacts ({} -> {})", + builder.top_stage, &compiler.host, target + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -294,10 +317,15 @@ impl Step for CodegenBackend { .arg(builder.src.join(format!("compiler/rustc_codegen_{}/Cargo.toml", backend))); rustc_cargo_env(builder, &mut cargo, target); - builder.info(&format!( - "Checking stage{} {} artifacts ({} -> {})", - builder.top_stage, backend, &compiler.host.triple, target.triple - )); + let msg = if compiler.host == target { + format!("Checking stage{} {} artifacts ({target})", builder.top_stage, backend) + } else { + format!( + "Checking stage{} {} library ({} -> {})", + builder.top_stage, backend, &compiler.host.triple, target.triple + ) + }; + builder.info(&msg); run_cargo( builder, @@ -357,10 +385,15 @@ impl Step for RustAnalyzer { cargo.arg("--benches"); } - builder.info(&format!( - "Checking stage{} {} artifacts ({} -> {})", - compiler.stage, "rust-analyzer", &compiler.host.triple, target.triple - )); + let msg = if compiler.host == target { + format!("Checking stage{} {} artifacts ({target})", compiler.stage, "rust-analyzer") + } else { + format!( + "Checking stage{} {} artifacts ({} -> {})", + compiler.stage, "rust-analyzer", &compiler.host.triple, target.triple + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -427,14 +460,18 @@ macro_rules! tool_check_step { // NOTE: this doesn't enable lints for any other tools unless they explicitly add `#![warn(rustc::internal)]` // See https://github.com/rust-lang/rust/pull/80573#issuecomment-754010776 cargo.rustflag("-Zunstable-options"); - - builder.info(&format!( - "Checking stage{} {} artifacts ({} -> {})", - builder.top_stage, - stringify!($name).to_lowercase(), - &compiler.host.triple, - target.triple - )); + let msg = if compiler.host == target { + format!("Checking stage{} {} artifacts ({target})", builder.top_stage, stringify!($name).to_lowercase()) + } else { + format!( + "Checking stage{} {} artifacts ({} -> {})", + builder.top_stage, + stringify!($name).to_lowercase(), + &compiler.host.triple, + target.triple + ) + }; + builder.info(&msg); run_cargo( builder, cargo, diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 468efc111..7ebd0a8f2 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -62,6 +62,7 @@ macro_rules! clean_crate_tree { let target = compiler.host; let mut cargo = builder.bare_cargo(compiler, $mode, target, "clean"); for krate in &*self.crates { + cargo.arg("-p"); cargo.arg(krate); } diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 68d1db016..8b80dfc0f 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -16,7 +16,7 @@ use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str; -use serde::Deserialize; +use serde_derive::Deserialize; use crate::builder::crate_description; use crate::builder::Cargo; @@ -111,10 +111,18 @@ impl Step for Std { let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); if compiler_to_use != compiler { builder.ensure(Std::new(compiler_to_use, target)); - builder.info(&format!( - "Uplifting stage1 library ({} -> {})", - compiler_to_use.host, target - )); + let msg = if compiler_to_use.host == target { + format!( + "Uplifting library (stage{} -> stage{})", + compiler_to_use.stage, compiler.stage + ) + } else { + format!( + "Uplifting library (stage{}:{} -> stage{}:{})", + compiler_to_use.stage, compiler_to_use.host, compiler.stage, target + ) + }; + builder.info(&msg); // Even if we're not building std this stage, the new sysroot must // still contain the third party objects needed by various targets. @@ -134,13 +142,23 @@ impl Step for Std { cargo.arg("-p").arg(krate); } - builder.info(&format!( - "Building{} stage{} library artifacts ({} -> {})", - crate_description(&self.crates), - compiler.stage, - &compiler.host, - target, - )); + let msg = if compiler.host == target { + format!( + "Building{} stage{} library artifacts ({}) ", + crate_description(&self.crates), + compiler.stage, + compiler.host + ) + } else { + format!( + "Building{} stage{} library artifacts ({} -> {})", + crate_description(&self.crates), + compiler.stage, + compiler.host, + target, + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -379,6 +397,9 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car if stage >= 1 { cargo.rustflag("-Cembed-bitcode=yes"); } + if builder.config.rust_lto == RustcLto::Off { + cargo.rustflag("-Clto=off"); + } // By default, rustc does not include unwind tables unless they are required // for a particular target. They are not required by RISC-V targets, but @@ -435,10 +456,6 @@ impl Step for StdLink { let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - builder.info(&format!( - "Copying stage{} library from stage{} ({} -> {} / {})", - target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target - )); let libdir = builder.sysroot_libdir(target_compiler, target); let hostdir = builder.sysroot_libdir(target_compiler, compiler.host); add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); @@ -464,7 +481,12 @@ fn copy_sanitizers( let dst = libdir.join(&runtime.name); builder.copy(&runtime.path, &dst); - if target == "x86_64-apple-darwin" || target == "aarch64-apple-darwin" { + if target == "x86_64-apple-darwin" + || target == "aarch64-apple-darwin" + || target == "aarch64-apple-ios" + || target == "aarch64-apple-ios-sim" + || target == "x86_64-apple-ios" + { // Update the library’s install name to reflect that it has been renamed. apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name)); // Upon renaming the install name, the code signature of the file will invalidate, @@ -641,8 +663,22 @@ impl Step for Rustc { let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); if compiler_to_use != compiler { builder.ensure(Rustc::new(compiler_to_use, target)); - builder - .info(&format!("Uplifting stage1 rustc ({} -> {})", builder.config.build, target)); + let msg = if compiler_to_use.host == target { + format!( + "Uplifting rustc (stage{} -> stage{})", + compiler_to_use.stage, + compiler.stage + 1 + ) + } else { + format!( + "Uplifting rustc (stage{}:{} -> stage{}:{})", + compiler_to_use.stage, + compiler_to_use.host, + compiler.stage + 1, + target + ) + }; + builder.info(&msg); builder.ensure(RustcLink::from_rustc(self, compiler_to_use)); return; } @@ -722,6 +758,13 @@ impl Step for Rustc { cargo.rustflag("-Cembed-bitcode=yes"); } RustcLto::ThinLocal => { /* Do nothing, this is the default */ } + RustcLto::Off => { + cargo.rustflag("-Clto=off"); + } + } + } else { + if builder.config.rust_lto == RustcLto::Off { + cargo.rustflag("-Clto=off"); } } @@ -729,13 +772,24 @@ impl Step for Rustc { cargo.arg("-p").arg(krate); } - builder.info(&format!( - "Building{} stage{} compiler artifacts ({} -> {})", - crate_description(&self.crates), - compiler.stage, - &compiler.host, - target, - )); + let msg = if compiler.host == target { + format!( + "Building{} compiler artifacts (stage{} -> stage{})", + crate_description(&self.crates), + compiler.stage, + compiler.stage + 1 + ) + } else { + format!( + "Building{} compiler artifacts (stage{}:{} -> stage{}:{})", + crate_description(&self.crates), + compiler.stage, + compiler.host, + compiler.stage + 1, + target, + ) + }; + builder.info(&msg); run_cargo( builder, cargo, @@ -919,10 +973,6 @@ impl Step for RustcLink { let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - builder.info(&format!( - "Copying stage{} rustc from stage{} ({} -> {} / {})", - target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target - )); add_to_sysroot( builder, &builder.sysroot_libdir(target_compiler, target), @@ -996,10 +1046,15 @@ impl Step for CodegenBackend { let tmp_stamp = out_dir.join(".tmp.stamp"); - builder.info(&format!( - "Building stage{} codegen backend {} ({} -> {})", - compiler.stage, backend, &compiler.host, target - )); + let msg = if compiler.host == target { + format!("Building stage{} codegen backend {}", compiler.stage, backend) + } else { + format!( + "Building stage{} codegen backend {} ({} -> {})", + compiler.stage, backend, compiler.host, target + ) + }; + builder.info(&msg); let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false); if builder.config.dry_run() { return; @@ -1305,7 +1360,12 @@ impl Step for Assemble { let stage = target_compiler.stage; let host = target_compiler.host; - builder.info(&format!("Assembling stage{} compiler ({})", stage, host)); + let msg = if build_compiler.host == host { + format!("Assembling stage{} compiler", stage) + } else { + format!("Assembling stage{} compiler ({})", stage, host) + }; + builder.info(&msg); // Link in all dylibs to the libdir let stamp = librustc_stamp(builder, build_compiler, target_compiler.host); diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index b41d60d51..05e742549 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -25,6 +25,7 @@ use crate::flags::{Color, Flags}; use crate::util::{exe, output, t}; use once_cell::sync::OnceCell; use serde::{Deserialize, Deserializer}; +use serde_derive::Deserialize; macro_rules! check_ci_llvm { ($name:expr) => { @@ -65,6 +66,7 @@ pub struct Config { pub verbose: usize, pub submodules: Option<bool>, pub compiler_docs: bool, + pub library_docs_private_items: bool, pub docs_minification: bool, pub docs: bool, pub locked_deps: bool, @@ -96,6 +98,10 @@ pub struct Config { pub cmd: Subcommand, pub incremental: bool, pub dry_run: DryRun, + /// Arguments appearing after `--` to be forwarded to tools, + /// e.g. `--fix-broken` or test arguments. + pub free_args: Vec<String>, + /// `None` if we shouldn't download CI compiler artifacts, or the commit to download if we should. #[cfg(not(test))] download_rustc_commit: Option<String>, @@ -168,6 +174,7 @@ pub struct Config { pub rust_profile_use: Option<String>, pub rust_profile_generate: Option<String>, pub rust_lto: RustcLto, + pub rust_validate_mir_opts: Option<u32>, pub llvm_profile_use: Option<String>, pub llvm_profile_generate: bool, pub llvm_libunwind_default: Option<LlvmLibunwind>, @@ -332,8 +339,9 @@ impl SplitDebuginfo { } /// LTO mode used for compiling rustc itself. -#[derive(Default, Clone)] +#[derive(Default, Clone, PartialEq)] pub enum RustcLto { + Off, #[default] ThinLocal, Thin, @@ -348,6 +356,7 @@ impl std::str::FromStr for RustcLto { "thin-local" => Ok(RustcLto::ThinLocal), "thin" => Ok(RustcLto::Thin), "fat" => Ok(RustcLto::Fat), + "off" => Ok(RustcLto::Off), _ => Err(format!("Invalid value for rustc LTO: {}", s)), } } @@ -606,6 +615,7 @@ define_config! { rustfmt: Option<PathBuf> = "rustfmt", docs: Option<bool> = "docs", compiler_docs: Option<bool> = "compiler-docs", + library_docs_private_items: Option<bool> = "library-docs-private-items", docs_minification: Option<bool> = "docs-minification", submodules: Option<bool> = "submodules", gdb: Option<String> = "gdb", @@ -762,6 +772,7 @@ define_config! { // ignored; this is set from an env var set by bootstrap.py download_rustc: Option<StringOrBool> = "download-rustc", lto: Option<String> = "lto", + validate_mir_opts: Option<u32> = "validate-mir-opts", } } @@ -862,6 +873,7 @@ impl Config { config.keep_stage = flags.keep_stage; config.keep_stage_std = flags.keep_stage_std; config.color = flags.color; + config.free_args = flags.free_args.clone().unwrap_or_default(); if let Some(value) = flags.deny_warnings { config.deny_warnings = value; } @@ -965,6 +977,9 @@ impl Config { config.changelog_seen = toml.changelog_seen; let build = toml.build.unwrap_or_default(); + if let Some(file_build) = build.build { + config.build = TargetSelection::from_user(&file_build); + }; set(&mut config.out, flags.build_dir.or_else(|| build.build_dir.map(PathBuf::from))); // NOTE: Bootstrap spawns various commands with different working directories. @@ -1015,6 +1030,7 @@ impl Config { config.submodules = build.submodules; set(&mut config.low_priority, build.low_priority); set(&mut config.compiler_docs, build.compiler_docs); + set(&mut config.library_docs_private_items, build.library_docs_private_items); set(&mut config.docs_minification, build.docs_minification); set(&mut config.docs, build.docs); set(&mut config.locked_deps, build.locked_deps); @@ -1136,6 +1152,7 @@ impl Config { .as_deref() .map(|value| RustcLto::from_str(value).unwrap()) .unwrap_or_default(); + config.rust_validate_mir_opts = rust.validate_mir_opts; } else { config.rust_profile_use = flags.rust_profile_use; config.rust_profile_generate = flags.rust_profile_generate; @@ -1302,15 +1319,6 @@ impl Config { } else { RustfmtState::Unavailable }; - } else { - // If using a system toolchain for bootstrapping, see if that has rustfmt available. - let host = config.build; - let rustfmt_path = config.initial_rustc.with_file_name(exe("rustfmt", host)); - let bin_root = config.out.join(host.triple).join("stage0"); - if !rustfmt_path.starts_with(&bin_root) { - // Using a system-provided toolchain; we shouldn't download rustfmt. - *config.initial_rustfmt.borrow_mut() = RustfmtState::SystemToolchain(rustfmt_path); - } } // Now that we've reached the end of our configuration, infer the diff --git a/src/bootstrap/config/tests.rs b/src/bootstrap/config/tests.rs index c30c91317..5a105007f 100644 --- a/src/bootstrap/config/tests.rs +++ b/src/bootstrap/config/tests.rs @@ -11,6 +11,11 @@ fn parse(config: &str) -> Config { #[test] fn download_ci_llvm() { + if crate::native::is_ci_llvm_modified(&parse("")) { + eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); + return; + } + let parse_llvm = |s| parse(s).llvm_from_ci; let if_available = parse_llvm("llvm.download-ci-llvm = \"if-available\""); @@ -19,6 +24,13 @@ fn download_ci_llvm() { assert_eq!(parse_llvm(""), if_available); assert_eq!(parse_llvm("rust.channel = \"dev\""), if_available); assert!(!parse_llvm("rust.channel = \"stable\"")); + assert!(parse_llvm("build.build = \"x86_64-unknown-linux-gnu\"")); + assert!(parse_llvm( + "llvm.assertions = true \r\n build.build = \"x86_64-unknown-linux-gnu\" \r\n llvm.download-ci-llvm = \"if-available\"" + )); + assert!(!parse_llvm( + "llvm.assertions = true \r\n build.build = \"aarch64-apple-darwin\" \r\n llvm.download-ci-llvm = \"if-available\"" + )); } // FIXME: add test for detecting `src` and `out` diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 0af329e70..ab3d08292 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -379,8 +379,14 @@ cur_section = None sections[None] = [] section_order = [None] targets = {} +top_level_keys = [] for line in open(rust_dir + '/config.toml.example').read().split("\n"): + if cur_section == None: + if line.count('=') == 1: + top_level_key = line.split('=')[0] + top_level_key = top_level_key.strip(' #') + top_level_keys.append(top_level_key) if line.startswith('['): cur_section = line[1:-1] if cur_section.startswith('target'): @@ -436,6 +442,8 @@ def to_toml(value): return value else: return "'" + value + "'" + elif isinstance(value, dict): + return "{" + ", ".join(map(lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])), value.items())) + "}" else: raise RuntimeError('no toml') @@ -459,12 +467,22 @@ def configure_section(lines, config): raise RuntimeError("failed to find config line for {}".format(key)) -for section_key in config: - section_config = config[section_key] - if section_key not in sections: - raise RuntimeError("config key {} not in sections".format(section_key)) +def configure_top_level_key(lines, top_level_key, value): + for i, line in enumerate(lines): + if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '): + lines[i] = "{} = {}".format(top_level_key, value) + return - if section_key == 'target': + raise RuntimeError("failed to find config line for {}".format(top_level_key)) + + +for section_key, section_config in config.items(): + if section_key not in sections and section_key not in top_level_keys: + raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key)) + if section_key in top_level_keys: + configure_top_level_key(sections[None], section_key, section_config) + + elif section_key == 'target': for target in section_config: configure_section(targets[target], section_config[target]) else: diff --git a/src/bootstrap/defaults/config.compiler.toml b/src/bootstrap/defaults/config.compiler.toml index 2f4ccb825..b98b13119 100644 --- a/src/bootstrap/defaults/config.compiler.toml +++ b/src/bootstrap/defaults/config.compiler.toml @@ -12,6 +12,8 @@ debug-logging = true incremental = true # Print backtrace on internal compiler errors during bootstrap backtrace-on-ice = true +# Make the compiler and standard library faster to build, at the expense of a ~20% runtime slowdown. +lto = "off" [llvm] # Will download LLVM from CI if available on your platform. diff --git a/src/bootstrap/defaults/config.library.toml b/src/bootstrap/defaults/config.library.toml index 7bc054d3a..f362c4111 100644 --- a/src/bootstrap/defaults/config.library.toml +++ b/src/bootstrap/defaults/config.library.toml @@ -8,6 +8,8 @@ bench-stage = 0 [rust] # This greatly increases the speed of rebuilds, especially when there are only minor changes. However, it makes the initial build slightly slower. incremental = true +# Make the compiler and standard library faster to build, at the expense of a ~20% runtime slowdown. +lto = "off" [llvm] # Will download LLVM from CI if available on your platform. diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 6594b23c5..9b2b54961 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -12,6 +12,7 @@ use std::collections::HashSet; use std::env; use std::ffi::OsStr; use std::fs; +use std::io::Write; use std::path::{Path, PathBuf}; use std::process::Command; @@ -392,19 +393,29 @@ impl Step for Rustc { t!(fs::create_dir_all(image.join("bin"))); builder.cp_r(&src.join("bin"), &image.join("bin")); - builder.install(&builder.rustdoc(compiler), &image.join("bin"), 0o755); + if builder + .config + .tools + .as_ref() + .map_or(true, |tools| tools.iter().any(|tool| tool == "rustdoc")) + { + let rustdoc = builder.rustdoc(compiler); + builder.install(&rustdoc, &image.join("bin"), 0o755); + } - let ra_proc_macro_srv = builder - .ensure(tool::RustAnalyzerProcMacroSrv { + if let Some(ra_proc_macro_srv) = builder.ensure_if_default( + tool::RustAnalyzerProcMacroSrv { compiler: builder.compiler_for( compiler.stage, builder.config.build, compiler.host, ), target: compiler.host, - }) - .expect("rust-analyzer-proc-macro-server always builds"); - builder.install(&ra_proc_macro_srv, &image.join("libexec"), 0o755); + }, + builder.kind, + ) { + builder.install(&ra_proc_macro_srv, &image.join("libexec"), 0o755); + } let libdir_relative = builder.libdir_relative(compiler); @@ -743,7 +754,7 @@ impl Step for Analysis { }); } - /// Creates a tarball of save-analysis metadata, if available. + /// Creates a tarball of (degenerate) save-analysis metadata, if available. fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> { let compiler = self.compiler; let target = self.target; @@ -751,7 +762,6 @@ impl Step for Analysis { return None; } - builder.ensure(compile::Std::new(compiler, target)); let src = builder .stage_out(compiler, Mode::Std) .join(target.triple) @@ -759,6 +769,13 @@ impl Step for Analysis { .join("deps") .join("save-analysis"); + // Write a file indicating that this component has been removed. + t!(std::fs::create_dir_all(&src)); + let mut removed = src.clone(); + removed.push("removed.json"); + let mut f = t!(std::fs::File::create(removed)); + t!(write!(f, r#"{{ "warning": "The `rust-analysis` component has been removed." }}"#)); + let mut tarball = Tarball::new(builder, "rust-analysis", &target.triple); tarball.include_target_in_component_name(true); tarball.add_dir(src, format!("lib/rustlib/{}/analysis", target.triple)); diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 9bad9046e..cc80763ef 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -62,6 +62,7 @@ macro_rules! book { target: self.target, name: INTERNER.intern_str($book_name), src: INTERNER.intern_path(builder.src.join($path)), + parent: Some(self), }) } } @@ -119,18 +120,20 @@ impl Step for UnstableBook { target: self.target, name: INTERNER.intern_str("unstable-book"), src: INTERNER.intern_path(builder.md_doc_out(self.target).join("unstable-book")), + parent: Some(self), }) } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -struct RustbookSrc { +struct RustbookSrc<P: Step> { target: TargetSelection, name: Interned<String>, src: Interned<PathBuf>, + parent: Option<P>, } -impl Step for RustbookSrc { +impl<P: Step> Step for RustbookSrc<P> { type Output = (); fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -152,13 +155,18 @@ impl Step for RustbookSrc { let index = out.join("index.html"); let rustbook = builder.tool_exe(Tool::Rustbook); let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); - if builder.config.dry_run() || up_to_date(&src, &index) && up_to_date(&rustbook, &index) { - return; + + if !builder.config.dry_run() && !(up_to_date(&src, &index) || up_to_date(&rustbook, &index)) + { + builder.info(&format!("Rustbook ({}) - {}", target, name)); + let _ = fs::remove_dir_all(&out); + + builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); } - builder.info(&format!("Rustbook ({}) - {}", target, name)); - let _ = fs::remove_dir_all(&out); - builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); + if self.parent.is_some() { + builder.maybe_open_in_browser::<P>(index) + } } } @@ -205,6 +213,7 @@ impl Step for TheBook { target, name: INTERNER.intern_str("book"), src: INTERNER.intern_path(builder.src.join(&relative_path)), + parent: Some(self), }); // building older edition redirects @@ -213,6 +222,9 @@ impl Step for TheBook { target, name: INTERNER.intern_string(format!("book/{}", edition)), src: INTERNER.intern_path(builder.src.join(&relative_path).join(edition)), + // There should only be one book that is marked as the parent for each target, so + // treat the other editions as not having a parent. + parent: Option::<Self>::None, }); } @@ -228,10 +240,6 @@ impl Step for TheBook { invoke_rustdoc(builder, compiler, &shared_assets, target, path); } - - let out = builder.doc_out(target); - let index = out.join("book").join("index.html"); - builder.maybe_open_in_browser::<Self>(index); } } @@ -597,6 +605,9 @@ fn doc_std( .arg("--resource-suffix") .arg(&builder.version) .args(extra_args); + if builder.config.library_docs_private_items { + cargo.arg("--document-private-items").arg("--document-hidden-items"); + } builder.run(&mut cargo.into()); }; @@ -1029,10 +1040,7 @@ impl Step for RustcBook { target: self.target, name: INTERNER.intern_str("rustc"), src: INTERNER.intern_path(out_base), + parent: Some(self), }); - - let out = builder.doc_out(self.target); - let index = out.join("rustc").join("index.html"); - builder.maybe_open_in_browser::<Self>(index); } } diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index 6ae283f32..d1e2149d3 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -18,6 +18,8 @@ use crate::{ Config, }; +static SHOULD_FIX_BINS_AND_DYLIBS: OnceCell<bool> = OnceCell::new(); + /// Generic helpers that are useful anywhere in bootstrap. impl Config { pub fn is_verbose(&self) -> bool { @@ -70,53 +72,61 @@ impl Config { check_run(cmd, self.is_verbose()) } - /// Modifies the interpreter section of 'fname' to fix the dynamic linker, - /// or the RPATH section, to fix the dynamic library search path - /// - /// This is only required on NixOS and uses the PatchELF utility to - /// change the interpreter/RPATH of ELF executables. - /// - /// Please see https://nixos.org/patchelf.html for more information - fn fix_bin_or_dylib(&self, fname: &Path) { - // FIXME: cache NixOS detection? - match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() { - Err(_) => return, - Ok(output) if !output.status.success() => return, - Ok(output) => { - let mut s = output.stdout; - if s.last() == Some(&b'\n') { - s.pop(); - } - if s != b"Linux" { - return; + /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true + /// on NixOS + fn should_fix_bins_and_dylibs(&self) -> bool { + let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { + match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() { + Err(_) => return false, + Ok(output) if !output.status.success() => return false, + Ok(output) => { + let mut os_name = output.stdout; + if os_name.last() == Some(&b'\n') { + os_name.pop(); + } + if os_name != b"Linux" { + return false; + } } } - } - // If the user has asked binaries to be patched for Nix, then - // don't check for NixOS or `/lib`, just continue to the patching. - // NOTE: this intentionally comes after the Linux check: - // - patchelf only works with ELF files, so no need to run it on Mac or Windows - // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. - if !self.patch_binaries_for_nix { + // If the user has asked binaries to be patched for Nix, then + // don't check for NixOS or `/lib`. + // NOTE: this intentionally comes after the Linux check: + // - patchelf only works with ELF files, so no need to run it on Mac or Windows + // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. + if self.patch_binaries_for_nix { + return true; + } + // Use `/etc/os-release` instead of `/etc/NIXOS`. // The latter one does not exist on NixOS when using tmpfs as root. - const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""]; - let os_release = match File::open("/etc/os-release") { - Err(e) if e.kind() == ErrorKind::NotFound => return, + let is_nixos = match File::open("/etc/os-release") { + Err(e) if e.kind() == ErrorKind::NotFound => false, Err(e) => panic!("failed to access /etc/os-release: {}", e), - Ok(f) => f, + Ok(os_release) => BufReader::new(os_release).lines().any(|l| { + let l = l.expect("reading /etc/os-release"); + matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") + }), }; - if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) { - return; - } - if Path::new("/lib").exists() { - return; - } + is_nixos && !Path::new("/lib").exists() + }); + if val { + println!("info: You seem to be using Nix."); } + val + } - // At this point we're pretty sure the user is running NixOS or using Nix - println!("info: you seem to be using Nix. Attempting to patch {}", fname.display()); + /// Modifies the interpreter section of 'fname' to fix the dynamic linker, + /// or the RPATH section, to fix the dynamic library search path + /// + /// This is only required on NixOS and uses the PatchELF utility to + /// change the interpreter/RPATH of ELF executables. + /// + /// Please see https://nixos.org/patchelf.html for more information + fn fix_bin_or_dylib(&self, fname: &Path) { + assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); + println!("attempting to patch {}", fname.display()); // Only build `.nix-deps` once. static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new(); @@ -163,8 +173,7 @@ impl Config { // appear to have this (even when `../lib` is redundant). // NOTE: there are only two paths here, delimited by a `:` let mut entries = OsString::from("$ORIGIN/../lib:"); - entries.push(t!(fs::canonicalize(nix_deps_dir))); - entries.push("/lib"); + entries.push(t!(fs::canonicalize(nix_deps_dir)).join("lib")); entries }; patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]); @@ -211,10 +220,10 @@ impl Config { "--retry", "3", "-Sf", - "-o", ]); - curl.arg(tempfile); curl.arg(url); + let f = File::create(tempfile).unwrap(); + curl.stdout(Stdio::from(f)); if !self.check_run(&mut curl) { if self.build.contains("windows-msvc") { println!("Fallback to PowerShell"); @@ -318,18 +327,39 @@ impl Config { let channel = format!("{version}-{date}"); let host = self.build; - let rustfmt_path = self.initial_rustc.with_file_name(exe("rustfmt", host)); - let bin_root = self.out.join(host.triple).join("stage0"); + let bin_root = self.out.join(host.triple).join("rustfmt"); + let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { return Some(rustfmt_path); } - let filename = format!("rustfmt-{version}-{build}.tar.xz", build = host.triple); - self.download_component(DownloadSource::Dist, filename, "rustfmt-preview", &date, "stage0"); - - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); + self.download_component( + DownloadSource::Dist, + format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), + "rustfmt-preview", + &date, + "rustfmt", + ); + self.download_component( + DownloadSource::Dist, + format!("rustc-{version}-{build}.tar.xz", build = host.triple), + "rustc", + &date, + "rustfmt", + ); + + if self.should_fix_bins_and_dylibs() { + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); + self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if lib.path().extension() == Some(OsStr::new("so")) { + self.fix_bin_or_dylib(&lib.path()); + } + } + } self.create(&rustfmt_stamp, &channel); Some(rustfmt_path) @@ -358,16 +388,21 @@ impl Config { let filename = format!("rust-src-{version}.tar.xz"); self.download_ci_component(filename, "rust-src", commit); - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc")); - self.fix_bin_or_dylib(&bin_root.join("libexec").join("rust-analyzer-proc-macro-srv")); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if lib.path().extension() == Some(OsStr::new("so")) { - self.fix_bin_or_dylib(&lib.path()); + if self.should_fix_bins_and_dylibs() { + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc")); + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc")); + self.fix_bin_or_dylib( + &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), + ); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if lib.path().extension() == Some(OsStr::new("so")) { + self.fix_bin_or_dylib(&lib.path()); + } } } + t!(fs::write(rustc_stamp, commit)); } } @@ -459,8 +494,10 @@ impl Config { let key = format!("{}{}", llvm_sha, self.llvm_assertions); if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() { self.download_ci_llvm(&llvm_sha); - for entry in t!(fs::read_dir(llvm_root.join("bin"))) { - self.fix_bin_or_dylib(&t!(entry).path()); + if self.should_fix_bins_and_dylibs() { + for entry in t!(fs::read_dir(llvm_root.join("bin"))) { + self.fix_bin_or_dylib(&t!(entry).path()); + } } // Update the timestamp of llvm-config to force rustc_llvm to be @@ -475,13 +512,16 @@ impl Config { let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build)); t!(filetime::set_file_times(&llvm_config, now, now)); - let llvm_lib = llvm_root.join("lib"); - for entry in t!(fs::read_dir(&llvm_lib)) { - let lib = t!(entry).path(); - if lib.extension().map_or(false, |ext| ext == "so") { - self.fix_bin_or_dylib(&lib); + if self.should_fix_bins_and_dylibs() { + let llvm_lib = llvm_root.join("lib"); + for entry in t!(fs::read_dir(&llvm_lib)) { + let lib = t!(entry).path(); + if lib.extension().map_or(false, |ext| ext == "so") { + self.fix_bin_or_dylib(&lib); + } } } + t!(fs::write(llvm_stamp, key)); } } diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 52c3dc0bf..9d1504c34 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -80,6 +80,10 @@ pub struct Flags { pub llvm_profile_generate: bool, pub llvm_bolt_profile_generate: bool, pub llvm_bolt_profile_use: Option<String>, + + /// Arguments appearing after `--` to be forwarded to tools, + /// e.g. `--fix-broken` or test arguments. + pub free_args: Option<Vec<String>>, } #[derive(Debug)] @@ -124,6 +128,7 @@ pub enum Subcommand { fail_fast: bool, doc_tests: DocTests, rustfix_coverage: bool, + only_modified: bool, }, Bench { paths: Vec<PathBuf>, @@ -156,6 +161,12 @@ impl Default for Subcommand { impl Flags { pub fn parse(args: &[String]) -> Flags { + let (args, free_args) = if let Some(pos) = args.iter().position(|s| s == "--") { + let (args, free) = args.split_at(pos); + (args, Some(free[1..].to_vec())) + } else { + (args, None) + }; let mut subcommand_help = String::from( "\ Usage: x.py <subcommand> [options] [<paths>...] @@ -301,6 +312,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`", opts.optflag("", "doc", "only run doc tests"); opts.optflag("", "bless", "update all stderr/stdout files of failing ui tests"); opts.optflag("", "force-rerun", "rerun tests even if the inputs are unchanged"); + opts.optflag("", "only-modified", "only run tests that result has been changed"); opts.optopt( "", "compare-mode", @@ -542,7 +554,8 @@ Arguments: Kind::Setup => { subcommand_help.push_str(&format!( "\n -x.py setup creates a `config.toml` which changes the defaults for x.py itself. +x.py setup creates a `config.toml` which changes the defaults for x.py itself, +as well as setting up a git pre-push hook, VS code config and toolchain link. Arguments: This subcommand accepts a 'profile' to use for builds. For example: @@ -552,7 +565,13 @@ Arguments: The profile is optional and you will be prompted interactively if it is not given. The following profiles are available: -{}", +{} + + To only set up the git hook, VS code or toolchain link, you may use + ./x.py setup hook + ./x.py setup vscode + ./x.py setup link +", Profile::all_for_help(" ").trim_end() )); } @@ -598,6 +617,7 @@ Arguments: rustc_args: matches.opt_strs("rustc-args"), fail_fast: !matches.opt_present("no-fail-fast"), rustfix_coverage: matches.opt_present("rustfix-coverage"), + only_modified: matches.opt_present("only-modified"), doc_tests: if matches.opt_present("doc") { DocTests::Only } else if matches.opt_present("no-doc") { @@ -625,7 +645,7 @@ Arguments: } Kind::Setup => { let profile = if paths.len() > 1 { - eprintln!("\nerror: At most one profile can be passed to setup\n"); + eprintln!("\nerror: At most one option can be passed to setup\n"); usage(1, &opts, verbose, &subcommand_help) } else if let Some(path) = paths.pop() { let profile_string = t!(path.into_os_string().into_string().map_err( @@ -706,6 +726,7 @@ Arguments: llvm_profile_generate: matches.opt_present("llvm-profile-generate"), llvm_bolt_profile_generate: matches.opt_present("llvm-bolt-profile-generate"), llvm_bolt_profile_use: matches.opt_str("llvm-bolt-profile-use"), + free_args, } } } @@ -777,6 +798,13 @@ impl Subcommand { } } + pub fn only_modified(&self) -> bool { + match *self { + Subcommand::Test { only_modified, .. } => only_modified, + _ => false, + } + } + pub fn force_rerun(&self) -> bool { match *self { Subcommand::Test { force_rerun, .. } => force_rerun, diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs index bfc57a85c..6d5753e8a 100644 --- a/src/bootstrap/format.rs +++ b/src/bootstrap/format.rs @@ -1,8 +1,8 @@ //! Runs rustfmt on the repository. use crate::builder::Builder; -use crate::util::{output, output_result, program_out_of_date, t}; -use build_helper::git::updated_master_branch; +use crate::util::{output, program_out_of_date, t}; +use build_helper::git::get_git_modified_files; use ignore::WalkBuilder; use std::collections::VecDeque; use std::path::{Path, PathBuf}; @@ -80,26 +80,14 @@ fn update_rustfmt_version(build: &Builder<'_>) { /// /// Returns `None` if all files should be formatted. fn get_modified_rs_files(build: &Builder<'_>) -> Result<Option<Vec<String>>, String> { - let Ok(updated_master) = updated_master_branch(Some(&build.config.src)) else { return Ok(None); }; - if !verify_rustfmt_version(build) { return Ok(None); } - let merge_base = - output_result(build.config.git().arg("merge-base").arg(&updated_master).arg("HEAD"))?; - Ok(Some( - output_result( - build.config.git().arg("diff-index").arg("--name-only").arg(merge_base.trim()), - )? - .lines() - .map(|s| s.trim().to_owned()) - .filter(|f| Path::new(f).extension().map_or(false, |ext| ext == "rs")) - .collect(), - )) + get_git_modified_files(Some(&build.config.src), &vec!["rs"]) } -#[derive(serde::Deserialize)] +#[derive(serde_derive::Deserialize)] struct RustfmtConfig { ignore: Vec<String>, } @@ -205,10 +193,46 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { let (tx, rx): (SyncSender<PathBuf>, _) = std::sync::mpsc::sync_channel(128); let walker = match paths.get(0) { Some(first) => { - let mut walker = WalkBuilder::new(first); + let find_shortcut_candidates = |p: &PathBuf| { + let mut candidates = Vec::new(); + for candidate in WalkBuilder::new(src.clone()).max_depth(Some(3)).build() { + if let Ok(entry) = candidate { + if let Some(dir_name) = p.file_name() { + if entry.path().is_dir() && entry.file_name() == dir_name { + candidates.push(entry.into_path()); + } + } + } + } + candidates + }; + + // Only try to look for shortcut candidates for single component paths like + // `std` and not for e.g. relative paths like `../library/std`. + let should_look_for_shortcut_dir = |p: &PathBuf| p.components().count() == 1; + + let mut walker = if should_look_for_shortcut_dir(first) { + if let [single_candidate] = &find_shortcut_candidates(first)[..] { + WalkBuilder::new(single_candidate) + } else { + WalkBuilder::new(first) + } + } else { + WalkBuilder::new(src.join(first)) + }; + for path in &paths[1..] { - walker.add(path); + if should_look_for_shortcut_dir(path) { + if let [single_candidate] = &find_shortcut_candidates(path)[..] { + walker.add(single_candidate); + } else { + walker.add(path); + } + } else { + walker.add(src.join(path)); + } } + walker } None => WalkBuilder::new(src.clone()), diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index 1815a0973..ac3843c33 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -243,18 +243,6 @@ install!((self, builder, _config), ); } }; - Analysis, alias = "analysis", Self::should_build(_config), only_hosts: false, { - // `expect` should be safe, only None with host != build, but this - // only uses the `build` compiler - let tarball = builder.ensure(dist::Analysis { - // Find the actual compiler (handling the full bootstrap option) which - // produced the save-analysis data because that data isn't copied - // through the sysroot uplifting. - compiler: builder.compiler_for(builder.top_stage, builder.config.build, self.target), - target: self.target - }).expect("missing analysis"); - install_sh(builder, "analysis", self.compiler.stage, Some(self.target), &tarball); - }; Rustc, path = "compiler/rustc", true, only_hosts: true, { let tarball = builder.ensure(dist::Rustc { compiler: builder.compiler(builder.top_stage, self.target), diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 0474ab344..950f3b151 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -11,93 +11,6 @@ //! crates.io and Cargo. //! * A standard interface to build across all platforms, including MSVC //! -//! ## Architecture -//! -//! The build system defers most of the complicated logic managing invocations -//! of rustc and rustdoc to Cargo itself. However, moving through various stages -//! and copying artifacts is still necessary for it to do. Each time rustbuild -//! is invoked, it will iterate through the list of predefined steps and execute -//! each serially in turn if it matches the paths passed or is a default rule. -//! For each step rustbuild relies on the step internally being incremental and -//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded -//! to appropriate test harnesses and such. -//! -//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed -//! have its own parallelism and incremental management. Later steps, like -//! tests, aren't incremental and simply run the entire suite currently. -//! However, compiletest itself tries to avoid running tests when the artifacts -//! that are involved (mainly the compiler) haven't changed. -//! -//! When you execute `x.py build`, the steps executed are: -//! -//! * First, the python script is run. This will automatically download the -//! stage0 rustc and cargo according to `src/stage0.json`, or use the cached -//! versions if they're available. These are then used to compile rustbuild -//! itself (using Cargo). Finally, control is then transferred to rustbuild. -//! -//! * Rustbuild takes over, performs sanity checks, probes the environment, -//! reads configuration, and starts executing steps as it reads the command -//! line arguments (paths) or going through the default rules. -//! -//! The build output will be something like the following: -//! -//! Building stage0 std artifacts -//! Copying stage0 std -//! Building stage0 test artifacts -//! Copying stage0 test -//! Building stage0 compiler artifacts -//! Copying stage0 rustc -//! Assembling stage1 compiler -//! Building stage1 std artifacts -//! Copying stage1 std -//! Building stage1 test artifacts -//! Copying stage1 test -//! Building stage1 compiler artifacts -//! Copying stage1 rustc -//! Assembling stage2 compiler -//! Uplifting stage1 std -//! Uplifting stage1 test -//! Uplifting stage1 rustc -//! -//! Let's disect that a little: -//! -//! ## Building stage0 {std,test,compiler} artifacts -//! -//! These steps use the provided (downloaded, usually) compiler to compile the -//! local Rust source into libraries we can use. -//! -//! ## Copying stage0 {std,test,rustc} -//! -//! This copies the build output from Cargo into -//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's -//! documentation should be expanded -- the information already here may be -//! incorrect. -//! -//! ## Assembling stage1 compiler -//! -//! This copies the libraries we built in "building stage0 ... artifacts" into -//! the stage1 compiler's lib directory. These are the host libraries that the -//! compiler itself uses to run. These aren't actually used by artifacts the new -//! compiler generates. This step also copies the rustc and rustdoc binaries we -//! generated into build/$HOST/stage/bin. -//! -//! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have -//! any libraries to link built binaries or libraries to. The next 3 steps will -//! provide those libraries for it; they are mostly equivalent to constructing -//! the stage1/bin compiler so we don't go through them individually. -//! -//! ## Uplifting stage1 {std,test,rustc} -//! -//! This step copies the libraries from the stage1 compiler sysroot into the -//! stage2 compiler. This is done to avoid rebuilding the compiler; libraries -//! we'd build in this step should be identical (in function, if not necessarily -//! identical on disk) so there's no need to recompile the compiler again. Note -//! that if you want to, you can enable the full-bootstrap option to change this -//! behavior. -//! -//! Each step is driven by a separate Cargo project and rustbuild orchestrates -//! copying files between steps and otherwise preparing for Cargo to run. -//! //! ## Further information //! //! More documentation can be found in each respective module below, and you can @@ -110,7 +23,7 @@ use std::fs::{self, File}; use std::io; use std::io::ErrorKind; use std::path::{Path, PathBuf}; -use std::process::Command; +use std::process::{Command, Stdio}; use std::str; use build_helper::ci::CiEnv; @@ -203,7 +116,6 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] (None, "bootstrap", None), (Some(Mode::Rustc), "parallel_compiler", None), (Some(Mode::ToolRustc), "parallel_compiler", None), - (Some(Mode::ToolRustc), "emulate_second_only_system", None), (Some(Mode::Codegen), "parallel_compiler", None), (Some(Mode::Std), "stdarch_intel_sde", None), (Some(Mode::Std), "no_fp_fmt_parse", None), @@ -214,18 +126,9 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] (Some(Mode::Std), "backtrace_in_libstd", None), /* Extra values not defined in the built-in targets yet, but used in std */ (Some(Mode::Std), "target_env", Some(&["libnx"])), - (Some(Mode::Std), "target_os", Some(&["watchos"])), - ( - Some(Mode::Std), - "target_arch", - Some(&["asmjs", "spirv", "nvptx", "nvptx64", "le32", "xtensa"]), - ), + // (Some(Mode::Std), "target_os", Some(&[])), + (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])), /* Extra names used by dependencies */ - // FIXME: Used by rustfmt is their test but is invalid (neither cargo nor bootstrap ever set - // this config) should probably by removed or use a allow attribute. - (Some(Mode::ToolRustc), "release", None), - // FIXME: Used by stdarch in their test, should use a allow attribute instead. - (Some(Mode::Std), "dont_compile_me", None), // FIXME: Used by serde_json, but we should not be triggering on external dependencies. (Some(Mode::Rustc), "no_btreemap_remove_entry", None), (Some(Mode::ToolRustc), "no_btreemap_remove_entry", None), @@ -235,8 +138,12 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies. (Some(Mode::Rustc), "span_locations", None), (Some(Mode::ToolRustc), "span_locations", None), - // Can be passed in RUSTFLAGS to prevent direct syscalls in rustix. - (None, "rustix_use_libc", None), + // FIXME: Used by rustix, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "rustix_use_libc", None), + (Some(Mode::ToolRustc), "rustix_use_libc", None), + // FIXME: Used by filetime, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "emulate_second_only_system", None), + (Some(Mode::ToolRustc), "emulate_second_only_system", None), ]; /// A structure representing a Rust compiler. @@ -663,12 +570,32 @@ impl Build { // Try passing `--progress` to start, then run git again without if that fails. let update = |progress: bool| { - let mut git = Command::new("git"); + // Git is buggy and will try to fetch submodules from the tracking branch for *this* repository, + // even though that has no relation to the upstream for the submodule. + let current_branch = { + let output = self + .config + .git() + .args(["symbolic-ref", "--short", "HEAD"]) + .stderr(Stdio::inherit()) + .output(); + let output = t!(output); + if output.status.success() { + Some(String::from_utf8(output.stdout).unwrap().trim().to_owned()) + } else { + None + } + }; + + let mut git = self.config.git(); + if let Some(branch) = current_branch { + git.arg("-c").arg(format!("branch.{branch}.remote=origin")); + } git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]); if progress { git.arg("--progress"); } - git.arg(relative_path).current_dir(&self.config.src); + git.arg(relative_path); git }; // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails. @@ -1432,6 +1359,14 @@ impl Build { return Vec::new(); } + if !stamp.exists() { + eprintln!( + "Error: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?", + stamp.display() + ); + crate::detail_exit(1); + } + let mut paths = Vec::new(); let contents = t!(fs::read(stamp), &stamp); // This is the method we use for extracting paths from the stamp file passed to us. See diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index e193e70a0..bba4d65e8 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use std::process::Command; -use serde::Deserialize; +use serde_derive::Deserialize; use crate::cache::INTERNER; use crate::util::output; diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index c823dc796..2e62c9507 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -7,7 +7,7 @@ use crate::builder::Step; use crate::util::t; use crate::Build; -use serde::{Deserialize, Serialize}; +use serde_derive::{Deserialize, Serialize}; use std::cell::RefCell; use std::fs::File; use std::io::BufWriter; diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index e0d1504c9..5987b641b 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -180,60 +180,60 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool { // https://doc.rust-lang.org/rustc/platform-support.html#tier-1 let supported_platforms = [ // tier 1 - "aarch64-unknown-linux-gnu", - "i686-pc-windows-gnu", - "i686-pc-windows-msvc", - "i686-unknown-linux-gnu", - "x86_64-unknown-linux-gnu", - "x86_64-apple-darwin", - "x86_64-pc-windows-gnu", - "x86_64-pc-windows-msvc", + ("aarch64-unknown-linux-gnu", false), + ("i686-pc-windows-gnu", false), + ("i686-pc-windows-msvc", false), + ("i686-unknown-linux-gnu", false), + ("x86_64-unknown-linux-gnu", true), + ("x86_64-apple-darwin", true), + ("x86_64-pc-windows-gnu", true), + ("x86_64-pc-windows-msvc", true), // tier 2 with host tools - "aarch64-apple-darwin", - "aarch64-pc-windows-msvc", - "aarch64-unknown-linux-musl", - "arm-unknown-linux-gnueabi", - "arm-unknown-linux-gnueabihf", - "armv7-unknown-linux-gnueabihf", - "mips-unknown-linux-gnu", - "mips64-unknown-linux-gnuabi64", - "mips64el-unknown-linux-gnuabi64", - "mipsel-unknown-linux-gnu", - "powerpc-unknown-linux-gnu", - "powerpc64-unknown-linux-gnu", - "powerpc64le-unknown-linux-gnu", - "riscv64gc-unknown-linux-gnu", - "s390x-unknown-linux-gnu", - "x86_64-unknown-freebsd", - "x86_64-unknown-illumos", - "x86_64-unknown-linux-musl", - "x86_64-unknown-netbsd", + ("aarch64-apple-darwin", false), + ("aarch64-pc-windows-msvc", false), + ("aarch64-unknown-linux-musl", false), + ("arm-unknown-linux-gnueabi", false), + ("arm-unknown-linux-gnueabihf", false), + ("armv7-unknown-linux-gnueabihf", false), + ("mips-unknown-linux-gnu", false), + ("mips64-unknown-linux-gnuabi64", false), + ("mips64el-unknown-linux-gnuabi64", false), + ("mipsel-unknown-linux-gnu", false), + ("powerpc-unknown-linux-gnu", false), + ("powerpc64-unknown-linux-gnu", false), + ("powerpc64le-unknown-linux-gnu", false), + ("riscv64gc-unknown-linux-gnu", false), + ("s390x-unknown-linux-gnu", false), + ("x86_64-unknown-freebsd", false), + ("x86_64-unknown-illumos", false), + ("x86_64-unknown-linux-musl", false), + ("x86_64-unknown-netbsd", false), ]; - if !supported_platforms.contains(&&*config.build.triple) { - return false; + + if !supported_platforms.contains(&(&*config.build.triple, asserts)) { + if asserts == true || !supported_platforms.contains(&(&*config.build.triple, true)) { + return false; + } } - let triple = &*config.build.triple; - if (triple == "aarch64-unknown-linux-gnu" || triple.contains("i686")) && asserts { - // No alt builder for aarch64-unknown-linux-gnu today. + if is_ci_llvm_modified(config) { + eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); return false; } - if CiEnv::is_ci() { + true +} + +/// Returns true if we're running in CI with modified LLVM (and thus can't download it) +pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool { + CiEnv::is_ci() && { // We assume we have access to git, so it's okay to unconditionally pass // `true` here. let llvm_sha = detect_llvm_sha(config, true); let head_sha = output(config.git().arg("rev-parse").arg("HEAD")); let head_sha = head_sha.trim(); - if llvm_sha == head_sha { - eprintln!( - "Detected LLVM as non-available: running in CI and modified LLVM in this change" - ); - return false; - } + llvm_sha == head_sha } - - true } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] @@ -486,7 +486,7 @@ impl Step for Llvm { cfg.define("LLVM_VERSION_SUFFIX", suffix); } - configure_cmake(builder, target, &mut cfg, true, ldflags); + configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); configure_llvm(builder, target, &mut cfg); for (key, val) in &builder.config.llvm_build_config { @@ -564,11 +564,11 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) { let version = output(cmd.arg("--version")); let mut parts = version.split('.').take(2).filter_map(|s| s.parse::<u32>().ok()); if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { - if major >= 13 { + if major >= 14 { return; } } - panic!("\n\nbad LLVM version: {}, need >=13.0\n\n", version) + panic!("\n\nbad LLVM version: {}, need >=14.0\n\n", version) } fn configure_cmake( @@ -577,6 +577,7 @@ fn configure_cmake( cfg: &mut cmake::Config, use_compiler_launcher: bool, mut ldflags: LdFlags, + extra_compiler_flags: &[&str], ) { // Do not print installation messages for up-to-date files. // LLVM and LLD builds can produce a lot of those and hit CI limits on log size. @@ -717,6 +718,9 @@ fn configure_cmake( if builder.config.llvm_clang_cl.is_some() { cflags.push(&format!(" --target={}", target)); } + for flag in extra_compiler_flags { + cflags.push(&format!(" {}", flag)); + } cfg.define("CMAKE_C_FLAGS", cflags); let mut cxxflags: OsString = builder.cflags(target, GitRepo::Llvm, CLang::Cxx).join(" ").into(); if let Some(ref s) = builder.config.llvm_cxxflags { @@ -726,6 +730,9 @@ fn configure_cmake( if builder.config.llvm_clang_cl.is_some() { cxxflags.push(&format!(" --target={}", target)); } + for flag in extra_compiler_flags { + cxxflags.push(&format!(" {}", flag)); + } cfg.define("CMAKE_CXX_FLAGS", cxxflags); if let Some(ar) = builder.ar(target) { if ar.is_absolute() { @@ -867,7 +874,7 @@ impl Step for Lld { } } - configure_cmake(builder, target, &mut cfg, true, ldflags); + configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); configure_llvm(builder, target, &mut cfg); // Re-use the same flags as llvm to control the level of debug information @@ -1031,7 +1038,16 @@ impl Step for Sanitizers { // Unfortunately sccache currently lacks support to build them successfully. // Disable compiler launcher on Darwin targets to avoid potential issues. let use_compiler_launcher = !self.target.contains("apple-darwin"); - configure_cmake(builder, self.target, &mut cfg, use_compiler_launcher, LdFlags::default()); + let extra_compiler_flags: &[&str] = + if self.target.contains("apple") { &["-fembed-bitcode=off"] } else { &[] }; + configure_cmake( + builder, + self.target, + &mut cfg, + use_compiler_launcher, + LdFlags::default(), + extra_compiler_flags, + ); t!(fs::create_dir_all(&out_dir)); cfg.out_dir(out_dir); @@ -1087,12 +1103,15 @@ fn supported_sanitizers( match &*target.triple { "aarch64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), + "aarch64-apple-ios" => darwin_libs("ios", &["asan", "tsan"]), + "aarch64-apple-ios-sim" => darwin_libs("iossim", &["asan", "tsan"]), "aarch64-unknown-fuchsia" => common_libs("fuchsia", "aarch64", &["asan"]), "aarch64-unknown-linux-gnu" => { common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"]) } "x86_64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), "x86_64-unknown-fuchsia" => common_libs("fuchsia", "x86_64", &["asan"]), + "x86_64-apple-ios" => darwin_libs("iossim", &["asan", "tsan"]), "x86_64-unknown-freebsd" => common_libs("freebsd", "x86_64", &["asan", "msan", "tsan"]), "x86_64-unknown-netbsd" => { common_libs("netbsd", "x86_64", &["asan", "lsan", "msan", "tsan"]) @@ -1105,6 +1124,12 @@ fn supported_sanitizers( "x86_64-unknown-linux-musl" => { common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) } + "s390x-unknown-linux-gnu" => { + common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) + } + "s390x-unknown-linux-musl" => { + common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) + } _ => Vec::new(), } } diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs index e02808545..e14440f57 100644 --- a/src/bootstrap/run.rs +++ b/src/bootstrap/run.rs @@ -183,6 +183,7 @@ impl Step for Miri { // Forward arguments. miri.arg("--").arg("--target").arg(target.rustc_target_arg()); miri.args(builder.config.cmd.args()); + miri.args(&builder.config.free_args); // miri tests need to know about the stage sysroot miri.env("MIRI_SYSROOT", &miri_sysroot); diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs index 004601cb6..4480bce99 100644 --- a/src/bootstrap/setup.rs +++ b/src/bootstrap/setup.rs @@ -1,6 +1,7 @@ use crate::builder::{Builder, RunConfig, ShouldRun, Step}; use crate::Config; use crate::{t, VERSION}; +use sha2::Digest; use std::env::consts::EXE_SUFFIX; use std::fmt::Write as _; use std::fs::File; @@ -10,6 +11,9 @@ use std::process::Command; use std::str::FromStr; use std::{fmt, fs, io}; +#[cfg(test)] +mod tests; + #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] pub enum Profile { Compiler, @@ -17,8 +21,19 @@ pub enum Profile { Library, Tools, User, + None, } +/// A list of historical hashes of `src/etc/vscode_settings.json`. +/// New entries should be appended whenever this is updated so we can detect +/// outdated vs. user-modified settings files. +static SETTINGS_HASHES: &[&str] = &[ + "ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8", + "56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922", + "af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0", +]; +static VSCODE_SETTINGS: &str = include_str!("../etc/vscode_settings.json"); + impl Profile { fn include_path(&self, src_path: &Path) -> PathBuf { PathBuf::from(format!("{}/src/bootstrap/defaults/config.{}.toml", src_path.display(), self)) @@ -27,7 +42,7 @@ impl Profile { pub fn all() -> impl Iterator<Item = Self> { use Profile::*; // N.B. these are ordered by how they are displayed, not alphabetically - [Library, Compiler, Codegen, Tools, User].iter().copied() + [Library, Compiler, Codegen, Tools, User, None].iter().copied() } pub fn purpose(&self) -> String { @@ -38,6 +53,7 @@ impl Profile { Codegen => "Contribute to the compiler, and also modify LLVM or codegen", Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)", User => "Install Rust from source", + None => "Do not modify `config.toml`" } .to_string() } @@ -57,6 +73,7 @@ impl Profile { Profile::Library => "library", Profile::Tools => "tools", Profile::User => "user", + Profile::None => "none", } } } @@ -73,6 +90,7 @@ impl FromStr for Profile { "tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => { Ok(Profile::Tools) } + "none" => Ok(Profile::None), _ => Err(format!("unknown profile: '{}'", s)), } } @@ -130,17 +148,8 @@ impl Step for Profile { } pub fn setup(config: &Config, profile: Profile) { - let stage_path = - ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); - - if !rustup_installed() && profile != Profile::User { - eprintln!("`rustup` is not installed; cannot link `stage1` toolchain"); - } else if stage_dir_exists(&stage_path[..]) && !config.dry_run() { - attempt_toolchain_link(&stage_path[..]); - } - - let suggestions = match profile { - Profile::Codegen | Profile::Compiler => &["check", "build", "test"][..], + let suggestions: &[&str] = match profile { + Profile::Codegen | Profile::Compiler | Profile::None => &["check", "build", "test"], Profile::Tools => &[ "check", "build", @@ -153,10 +162,6 @@ pub fn setup(config: &Config, profile: Profile) { Profile::User => &["dist", "build"], }; - if !config.dry_run() { - t!(install_git_hook_maybe(&config)); - } - println!(); println!("To get started, try one of the following commands:"); @@ -175,6 +180,9 @@ pub fn setup(config: &Config, profile: Profile) { } fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { + if profile == Profile::None { + return; + } if path.exists() { eprintln!(); eprintln!( @@ -202,6 +210,41 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { println!("`x.py` will now use the configuration at {}", include_path.display()); } +/// Creates a toolchain link for stage1 using `rustup` +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Link; +impl Step for Link { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("link") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "link" { + run.builder.ensure(Link); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + let stage_path = + ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); + + if !rustup_installed() { + eprintln!("`rustup` is not installed; cannot link `stage1` toolchain"); + } else if stage_dir_exists(&stage_path[..]) && !config.dry_run() { + attempt_toolchain_link(&stage_path[..]); + } + } +} + fn rustup_installed() -> bool { Command::new("rustup") .arg("--version") @@ -351,6 +394,63 @@ pub fn interactive_path() -> io::Result<Profile> { Ok(template) } +#[derive(PartialEq)] +enum PromptResult { + Yes, // y/Y/yes + No, // n/N/no + Print, // p/P/print +} + +/// Prompt a user for a answer, looping until they enter an accepted input or nothing +fn prompt_user(prompt: &str) -> io::Result<Option<PromptResult>> { + let mut input = String::new(); + loop { + print!("{prompt} "); + io::stdout().flush()?; + input.clear(); + io::stdin().read_line(&mut input)?; + match input.trim().to_lowercase().as_str() { + "y" | "yes" => return Ok(Some(PromptResult::Yes)), + "n" | "no" => return Ok(Some(PromptResult::No)), + "p" | "print" => return Ok(Some(PromptResult::Print)), + "" => return Ok(None), + _ => { + eprintln!("error: unrecognized option '{}'", input.trim()); + eprintln!("note: press Ctrl+C to exit"); + } + }; + } +} + +/// Installs `src/etc/pre-push.sh` as a Git hook +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Hook; + +impl Step for Hook { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("hook") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "hook" { + run.builder.ensure(Hook); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + t!(install_git_hook_maybe(&config)); + } +} + // install a git hook to automatically run tidy, if they want fn install_git_hook_maybe(config: &Config) -> io::Result<()> { let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| { @@ -363,43 +463,127 @@ fn install_git_hook_maybe(config: &Config) -> io::Result<()> { return Ok(()); } - let mut input = String::new(); - println!(); println!( - "Rust's CI will automatically fail if it doesn't pass `tidy`, the internal tool for ensuring code quality. + "\nRust's CI will automatically fail if it doesn't pass `tidy`, the internal tool for ensuring code quality. If you'd like, x.py can install a git hook for you that will automatically run `test tidy` before pushing your code to ensure your code is up to par. If you decide later that this behavior is undesirable, simply delete the `pre-push` file from .git/hooks." ); - let should_install = loop { - print!("Would you like to install the git hook?: [y/N] "); - io::stdout().flush()?; - input.clear(); - io::stdin().read_line(&mut input)?; - break match input.trim().to_lowercase().as_str() { - "y" | "yes" => true, - "n" | "no" | "" => false, - _ => { - eprintln!("error: unrecognized option '{}'", input.trim()); - eprintln!("note: press Ctrl+C to exit"); - continue; - } - }; - }; - - if should_install { - let src = config.src.join("src").join("etc").join("pre-push.sh"); - match fs::hard_link(src, &dst) { - Err(e) => eprintln!( + if prompt_user("Would you like to install the git hook?: [y/N]")? != Some(PromptResult::Yes) { + println!("Ok, skipping installation!"); + return Ok(()); + } + let src = config.src.join("src").join("etc").join("pre-push.sh"); + match fs::hard_link(src, &dst) { + Err(e) => { + eprintln!( "error: could not create hook {}: do you already have the git hook installed?\n{}", dst.display(), e - ), - Ok(_) => println!("Linked `src/etc/pre-push.sh` to `.git/hooks/pre-push`"), + ); + return Err(e); + } + Ok(_) => println!("Linked `src/etc/pre-push.sh` to `.git/hooks/pre-push`"), + }; + Ok(()) +} + +/// Sets up or displays `src/etc/vscode_settings.json` +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Vscode; + +impl Step for Vscode { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("vscode") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "vscode" { + run.builder.ensure(Vscode); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + t!(create_vscode_settings_maybe(&config)); + } +} + +/// Create a `.vscode/settings.json` file for rustc development, or just print it +fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> { + let (current_hash, historical_hashes) = SETTINGS_HASHES.split_last().unwrap(); + let vscode_settings = config.src.join(".vscode").join("settings.json"); + // If None, no settings.json exists + // If Some(true), is a previous version of settings.json + // If Some(false), is not a previous version (i.e. user modified) + // If it's up to date we can just skip this + let mut mismatched_settings = None; + if let Ok(current) = fs::read_to_string(&vscode_settings) { + let mut hasher = sha2::Sha256::new(); + hasher.update(¤t); + let hash = hex::encode(hasher.finalize().as_slice()); + if hash == *current_hash { + return Ok(()); + } else if historical_hashes.contains(&hash.as_str()) { + mismatched_settings = Some(true); + } else { + mismatched_settings = Some(false); + } + } + println!( + "\nx.py can automatically install the recommended `.vscode/settings.json` file for rustc development" + ); + match mismatched_settings { + Some(true) => eprintln!( + "warning: existing `.vscode/settings.json` is out of date, x.py will update it" + ), + Some(false) => eprintln!( + "warning: existing `.vscode/settings.json` has been modified by user, x.py will back it up and replace it" + ), + _ => (), + } + let should_create = match prompt_user( + "Would you like to create/update `settings.json`, or only print suggested settings?: [y/p/N]", + )? { + Some(PromptResult::Yes) => true, + Some(PromptResult::Print) => false, + _ => { + println!("Ok, skipping settings!"); + return Ok(()); + } + }; + if should_create { + let path = config.src.join(".vscode"); + if !path.exists() { + fs::create_dir(&path)?; + } + let verb = match mismatched_settings { + // exists but outdated, we can replace this + Some(true) => "Updated", + // exists but user modified, back it up + Some(false) => { + // exists and is not current version or outdated, so back it up + let mut backup = vscode_settings.clone(); + backup.set_extension("bak"); + eprintln!("warning: copying `settings.json` to `settings.json.bak`"); + fs::copy(&vscode_settings, &backup)?; + "Updated" + } + _ => "Created", }; + fs::write(&vscode_settings, &VSCODE_SETTINGS)?; + println!("{verb} `.vscode/settings.json`"); } else { - println!("Ok, skipping installation!"); + println!("\n{VSCODE_SETTINGS}"); } Ok(()) } diff --git a/src/bootstrap/setup/tests.rs b/src/bootstrap/setup/tests.rs new file mode 100644 index 000000000..dcf9d18e6 --- /dev/null +++ b/src/bootstrap/setup/tests.rs @@ -0,0 +1,14 @@ +use super::{SETTINGS_HASHES, VSCODE_SETTINGS}; +use sha2::Digest; + +#[test] +fn check_matching_settings_hash() { + let mut hasher = sha2::Sha256::new(); + hasher.update(&VSCODE_SETTINGS); + let hash = hex::encode(hasher.finalize().as_slice()); + assert_eq!( + &hash, + SETTINGS_HASHES.last().unwrap(), + "Update `SETTINGS_HASHES` with the new hash of `src/etc/vscode_settings.json`" + ); +} diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 6078e39ac..b4f1506dc 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -435,6 +435,10 @@ impl Step for Rustfmt { &[], ); + if !builder.fail_fast { + cargo.arg("--no-fail-fast"); + } + let dir = testdir(builder, compiler.host); t!(fs::create_dir_all(&dir)); cargo.env("RUSTFMT_TEST_DIR", dir); @@ -615,6 +619,10 @@ impl Step for Miri { ); cargo.add_rustc_lib_path(builder, compiler); + if !builder.fail_fast { + cargo.arg("--no-fail-fast"); + } + // miri tests need to know about the stage sysroot cargo.env("MIRI_SYSROOT", &miri_sysroot); cargo.env("MIRI_HOST_SYSROOT", sysroot); @@ -746,6 +754,10 @@ impl Step for Clippy { &[], ); + if !builder.fail_fast { + cargo.arg("--no-fail-fast"); + } + cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir()); @@ -1114,9 +1126,6 @@ impl Step for Tidy { cmd.arg("--bless"); } - builder.info("tidy check"); - try_run(builder, &mut cmd); - if builder.config.channel == "dev" || builder.config.channel == "nightly" { builder.info("fmt check"); if builder.initial_rustfmt().is_none() { @@ -1134,6 +1143,11 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy` } crate::format::format(&builder, !builder.config.cmd.bless(), &[]); } + + builder.info("tidy check"); + try_run(builder, &mut cmd); + + builder.ensure(ExpandYamlAnchors {}); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -1508,6 +1522,10 @@ note: if you're sure you want to do this, please open an issue as to why. In the if builder.config.rust_optimize_tests { cmd.arg("--optimize-tests"); } + if builder.config.cmd.only_modified() { + cmd.arg("--only-modified"); + } + let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); @@ -1582,6 +1600,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the .collect(); test_args.append(&mut builder.config.cmd.test_args()); + test_args.extend(builder.config.free_args.iter().map(|s| s.as_str())); // On Windows, replace forward slashes in test-args by backslashes // so the correct filters are passed to libtest diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index 9a2100c2f..3c9a154da 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -33,6 +33,44 @@ struct ToolBuild { allow_features: &'static str, } +fn tooling_output( + mode: Mode, + tool: &str, + build_stage: u32, + host: &TargetSelection, + target: &TargetSelection, +) -> String { + match mode { + // depends on compiler stage, different to host compiler + Mode::ToolRustc => { + if host == target { + format!("Building tool {} (stage{} -> stage{})", tool, build_stage, build_stage + 1) + } else { + format!( + "Building tool {} (stage{}:{} -> stage{}:{})", + tool, + build_stage, + host, + build_stage + 1, + target + ) + } + } + // doesn't depend on compiler, same as host compiler + Mode::ToolStd => { + if host == target { + format!("Building tool {} (stage{})", tool, build_stage) + } else { + format!( + "Building tool {} (stage{}:{} -> stage{}:{})", + tool, build_stage, host, build_stage, target + ) + } + } + _ => format!("Building tool {} (stage{})", tool, build_stage), + } +} + impl Step for ToolBuild { type Output = Option<PathBuf>; @@ -74,8 +112,14 @@ impl Step for ToolBuild { if !self.allow_features.is_empty() { cargo.allow_features(self.allow_features); } - - builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target)); + let msg = tooling_output( + self.mode, + self.tool, + self.compiler.stage, + &self.compiler.host, + &self.target, + ); + builder.info(&msg); let mut duplicates = Vec::new(); let is_expected = compile::stream_cargo(builder, cargo, vec![], &mut |msg| { // Only care about big things like the RLS/Cargo for now @@ -551,7 +595,7 @@ impl Step for Rustdoc { features.push("jemalloc".to_string()); } - let cargo = prepare_tool_cargo( + let mut cargo = prepare_tool_cargo( builder, build_compiler, Mode::ToolRustc, @@ -562,10 +606,18 @@ impl Step for Rustdoc { features.as_slice(), ); - builder.info(&format!( - "Building rustdoc for stage{} ({})", - target_compiler.stage, target_compiler.host - )); + if builder.config.rustc_parallel { + cargo.rustflag("--cfg=parallel_compiler"); + } + + let msg = tooling_output( + Mode::ToolRustc, + "rustdoc", + build_compiler.stage, + &self.compiler.host, + &target, + ); + builder.info(&msg); builder.run(&mut cargo.into()); // Cargo adds a number of paths to the dylib search path on windows, which results in @@ -765,9 +817,15 @@ impl Step for RustAnalyzerProcMacroSrv { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; // Allow building `rust-analyzer-proc-macro-srv` both as part of the `rust-analyzer` and as a stand-alone tool. run.path("src/tools/rust-analyzer") .path("src/tools/rust-analyzer/crates/proc-macro-srv-cli") + .default_condition(builder.config.tools.as_ref().map_or(true, |tools| { + tools + .iter() + .any(|tool| tool == "rust-analyzer" || tool == "rust-analyzer-proc-macro-srv") + })) } fn make_run(run: RunConfig<'_>) { diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs index 1969e0b6f..7aab88a1a 100644 --- a/src/bootstrap/toolstate.rs +++ b/src/bootstrap/toolstate.rs @@ -1,6 +1,6 @@ use crate::builder::{Builder, RunConfig, ShouldRun, Step}; use crate::util::t; -use serde::{Deserialize, Serialize}; +use serde_derive::{Deserialize, Serialize}; use std::collections::HashMap; use std::env; use std::fmt; |