diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:11:38 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:13:23 +0000 |
commit | 20431706a863f92cb37dc512fef6e48d192aaf2c (patch) | |
tree | 2867f13f5fd5437ba628c67d7f87309ccadcd286 /src/tools/rust-analyzer/crates/project-model | |
parent | Releasing progress-linux version 1.65.0+dfsg1-2~progress7.99u1. (diff) | |
download | rustc-20431706a863f92cb37dc512fef6e48d192aaf2c.tar.xz rustc-20431706a863f92cb37dc512fef6e48d192aaf2c.zip |
Merging upstream version 1.66.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
9 files changed, 643 insertions, 304 deletions
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index bc75d6faa..cf9868740 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -13,11 +13,10 @@ doctest = false tracing = "0.1.35" rustc-hash = "1.1.0" cargo_metadata = "0.15.0" -semver = "1.0.10" +semver = "1.0.14" serde = { version = "1.0.137", features = ["derive"] } -serde_json = "1.0.81" -anyhow = "1.0.57" -expect-test = "1.4.0" +serde_json = "1.0.86" +anyhow = "1.0.62" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } cfg = { path = "../cfg", version = "0.0.0" } @@ -26,3 +25,6 @@ toolchain = { path = "../toolchain", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } + +[dev-dependencies] +expect-test = "1.4.0" diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs index 84e772d16..a26a7c57a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs @@ -6,7 +6,12 @@ //! This module implements this second part. We use "build script" terminology //! here, but it covers procedural macros as well. -use std::{cell::RefCell, io, path::PathBuf, process::Command}; +use std::{ + cell::RefCell, + io, mem, + path::{self, PathBuf}, + process::Command, +}; use cargo_metadata::{camino::Utf8Path, Message}; use la_arena::ArenaMap; @@ -15,11 +20,14 @@ use rustc_hash::FxHashMap; use semver::Version; use serde::Deserialize; -use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package}; +use crate::{ + cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, + InvocationStrategy, Package, +}; #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct WorkspaceBuildScripts { - outputs: ArenaMap<Package, Option<BuildScriptOutput>>, + outputs: ArenaMap<Package, BuildScriptOutput>, error: Option<String>, } @@ -38,43 +46,71 @@ pub(crate) struct BuildScriptOutput { pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, } +impl BuildScriptOutput { + fn is_unchanged(&self) -> bool { + self.cfgs.is_empty() + && self.envs.is_empty() + && self.out_dir.is_none() + && self.proc_macro_dylib_path.is_none() + } +} + impl WorkspaceBuildScripts { - fn build_command(config: &CargoConfig) -> Command { - if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() { - let mut cmd = Command::new(program); - cmd.args(args); - return cmd; - } + fn build_command(config: &CargoConfig) -> io::Result<Command> { + let mut cmd = match config.run_build_script_command.as_deref() { + Some([program, args @ ..]) => { + let mut cmd = Command::new(program); + cmd.args(args); + cmd + } + _ => { + let mut cmd = Command::new(toolchain::cargo()); - let mut cmd = Command::new(toolchain::cargo()); + cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); - cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); + // --all-targets includes tests, benches and examples in addition to the + // default lib and bins. This is an independent concept from the --targets + // flag below. + cmd.arg("--all-targets"); - // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --targets - // flag below. - cmd.arg("--all-targets"); + if let Some(target) = &config.target { + cmd.args(&["--target", target]); + } - if let Some(target) = &config.target { - cmd.args(&["--target", target]); - } + match &config.features { + CargoFeatures::All => { + cmd.arg("--all-features"); + } + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + cmd.arg("--no-default-features"); + } + if !features.is_empty() { + cmd.arg("--features"); + cmd.arg(features.join(" ")); + } + } + } - if config.all_features { - cmd.arg("--all-features"); - } else { - if config.no_default_features { - cmd.arg("--no-default-features"); - } - if !config.features.is_empty() { - cmd.arg("--features"); - cmd.arg(config.features.join(" ")); + cmd } + }; + + cmd.envs(&config.extra_env); + if config.wrap_rustc_in_build_scripts { + // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use + // that to compile only proc macros and build scripts during the initial + // `cargo check`. + let myself = std::env::current_exe()?; + cmd.env("RUSTC_WRAPPER", myself); + cmd.env("RA_RUSTC_WRAPPER", "1"); } - cmd + Ok(cmd) } - pub(crate) fn run( + /// Runs the build scripts for the given workspace + pub(crate) fn run_for_workspace( config: &CargoConfig, workspace: &CargoWorkspace, progress: &dyn Fn(String), @@ -82,15 +118,23 @@ impl WorkspaceBuildScripts { ) -> io::Result<WorkspaceBuildScripts> { const RUST_1_62: Version = Version::new(1, 62, 0); - match Self::run_(Self::build_command(config), config, workspace, progress) { + let current_dir = match &config.invocation_location { + InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { + root.as_path() + } + _ => &workspace.workspace_root(), + } + .as_ref(); + + match Self::run_per_ws(Self::build_command(config)?, workspace, current_dir, progress) { Ok(WorkspaceBuildScripts { error: Some(error), .. }) if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data - let mut cmd = Self::build_command(config); + let mut cmd = Self::build_command(config)?; cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); - let mut res = Self::run_(cmd, config, workspace, progress)?; + let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) } @@ -98,23 +142,90 @@ impl WorkspaceBuildScripts { } } - fn run_( - mut cmd: Command, + /// Runs the build scripts by invoking the configured command *once*. + /// This populates the outputs for all passed in workspaces. + pub(crate) fn run_once( config: &CargoConfig, - workspace: &CargoWorkspace, + workspaces: &[&CargoWorkspace], progress: &dyn Fn(String), - ) -> io::Result<WorkspaceBuildScripts> { - if config.wrap_rustc_in_build_scripts { - // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use - // that to compile only proc macros and build scripts during the initial - // `cargo check`. - let myself = std::env::current_exe()?; - cmd.env("RUSTC_WRAPPER", myself); - cmd.env("RA_RUSTC_WRAPPER", "1"); + ) -> io::Result<Vec<WorkspaceBuildScripts>> { + assert_eq!(config.invocation_strategy, InvocationStrategy::Once); + + let current_dir = match &config.invocation_location { + InvocationLocation::Root(root) => root, + InvocationLocation::Workspace => { + return Err(io::Error::new( + io::ErrorKind::Other, + "Cannot run build scripts from workspace with invocation strategy `once`", + )) + } + }; + let cmd = Self::build_command(config)?; + // NB: Cargo.toml could have been modified between `cargo metadata` and + // `cargo check`. We shouldn't assume that package ids we see here are + // exactly those from `config`. + let mut by_id = FxHashMap::default(); + // some workspaces might depend on the same crates, so we need to duplicate the outputs + // to those collisions + let mut collisions = Vec::new(); + let mut res: Vec<_> = workspaces + .iter() + .enumerate() + .map(|(idx, workspace)| { + let mut res = WorkspaceBuildScripts::default(); + for package in workspace.packages() { + res.outputs.insert(package, BuildScriptOutput::default()); + if by_id.contains_key(&workspace[package].id) { + collisions.push((&workspace[package].id, idx, package)); + } else { + by_id.insert(workspace[package].id.clone(), (package, idx)); + } + } + res + }) + .collect(); + + let errors = Self::run_command( + cmd, + current_dir.as_path().as_ref(), + |package, cb| { + if let Some(&(package, workspace)) = by_id.get(package) { + cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); + } + }, + progress, + )?; + res.iter_mut().for_each(|it| it.error = errors.clone()); + collisions.into_iter().for_each(|(id, workspace, package)| { + if let Some(&(p, w)) = by_id.get(id) { + res[workspace].outputs[package] = res[w].outputs[p].clone(); + } + }); + + if tracing::enabled!(tracing::Level::INFO) { + for (idx, workspace) in workspaces.iter().enumerate() { + for package in workspace.packages() { + let package_build_data = &mut res[idx].outputs[package]; + if !package_build_data.is_unchanged() { + tracing::info!( + "{}: {:?}", + workspace[package].manifest.parent().display(), + package_build_data, + ); + } + } + } } - cmd.current_dir(workspace.workspace_root()); + Ok(res) + } + fn run_per_ws( + cmd: Command, + workspace: &CargoWorkspace, + current_dir: &path::Path, + progress: &dyn Fn(String), + ) -> io::Result<WorkspaceBuildScripts> { let mut res = WorkspaceBuildScripts::default(); let outputs = &mut res.outputs; // NB: Cargo.toml could have been modified between `cargo metadata` and @@ -122,10 +233,46 @@ impl WorkspaceBuildScripts { // exactly those from `config`. let mut by_id: FxHashMap<String, Package> = FxHashMap::default(); for package in workspace.packages() { - outputs.insert(package, None); + outputs.insert(package, BuildScriptOutput::default()); by_id.insert(workspace[package].id.clone(), package); } + res.error = Self::run_command( + cmd, + current_dir, + |package, cb| { + if let Some(&package) = by_id.get(package) { + cb(&workspace[package].name, &mut outputs[package]); + } + }, + progress, + )?; + + if tracing::enabled!(tracing::Level::INFO) { + for package in workspace.packages() { + let package_build_data = &mut outputs[package]; + if !package_build_data.is_unchanged() { + tracing::info!( + "{}: {:?}", + workspace[package].manifest.parent().display(), + package_build_data, + ); + } + } + } + + Ok(res) + } + + fn run_command( + mut cmd: Command, + current_dir: &path::Path, + // ideally this would be something like: + // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)), + // but owned trait objects aren't a thing + mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)), + progress: &dyn Fn(String), + ) -> io::Result<Option<String>> { let errors = RefCell::new(String::new()); let push_err = |err: &str| { let mut e = errors.borrow_mut(); @@ -133,7 +280,8 @@ impl WorkspaceBuildScripts { e.push('\n'); }; - tracing::info!("Running build scripts: {:?}", cmd); + tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd); + cmd.current_dir(current_dir); let output = stdx::process::spawn_with_streaming_output( cmd, &mut |line| { @@ -145,59 +293,58 @@ impl WorkspaceBuildScripts { .unwrap_or_else(|_| Message::TextLine(line.to_string())); match message { - Message::BuildScriptExecuted(message) => { - let package = match by_id.get(&message.package_id.repr) { - Some(&it) => it, - None => return, - }; - let cfgs = { - let mut acc = Vec::new(); - for cfg in message.cfgs { - match cfg.parse::<CfgFlag>() { - Ok(it) => acc.push(it), - Err(err) => { - push_err(&format!( - "invalid cfg from cargo-metadata: {}", - err - )); - return; - } - }; + Message::BuildScriptExecuted(mut message) => { + with_output_for(&message.package_id.repr, &mut |name, data| { + progress(format!("running build-script: {}", name)); + let cfgs = { + let mut acc = Vec::new(); + for cfg in &message.cfgs { + match cfg.parse::<CfgFlag>() { + Ok(it) => acc.push(it), + Err(err) => { + push_err(&format!( + "invalid cfg from cargo-metadata: {}", + err + )); + return; + } + }; + } + acc + }; + if !message.env.is_empty() { + data.envs = mem::take(&mut message.env); } - acc - }; - // cargo_metadata crate returns default (empty) path for - // older cargos, which is not absolute, so work around that. - let out_dir = message.out_dir.into_os_string(); - if !out_dir.is_empty() { - let data = outputs[package].get_or_insert_with(Default::default); - data.out_dir = Some(AbsPathBuf::assert(PathBuf::from(out_dir))); - data.cfgs = cfgs; - } - if !message.env.is_empty() { - outputs[package].get_or_insert_with(Default::default).envs = - message.env; - } + // cargo_metadata crate returns default (empty) path for + // older cargos, which is not absolute, so work around that. + let out_dir = mem::take(&mut message.out_dir).into_os_string(); + if !out_dir.is_empty() { + let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir)); + // inject_cargo_env(package, package_build_data); + // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() + if let Some(out_dir) = + out_dir.as_os_str().to_str().map(|s| s.to_owned()) + { + data.envs.push(("OUT_DIR".to_string(), out_dir)); + } + data.out_dir = Some(out_dir); + data.cfgs = cfgs; + } + }); } Message::CompilerArtifact(message) => { - let package = match by_id.get(&message.package_id.repr) { - Some(it) => *it, - None => return, - }; - - progress(format!("metadata {}", message.target.name)); - - if message.target.kind.iter().any(|k| k == "proc-macro") { - // Skip rmeta file - if let Some(filename) = - message.filenames.iter().find(|name| is_dylib(name)) - { - let filename = AbsPathBuf::assert(PathBuf::from(&filename)); - outputs[package] - .get_or_insert_with(Default::default) - .proc_macro_dylib_path = Some(filename); + with_output_for(&message.package_id.repr, &mut |name, data| { + progress(format!("building proc-macros: {}", name)); + if message.target.kind.iter().any(|k| k == "proc-macro") { + // Skip rmeta file + if let Some(filename) = + message.filenames.iter().find(|name| is_dylib(name)) + { + let filename = AbsPathBuf::assert(PathBuf::from(&filename)); + data.proc_macro_dylib_path = Some(filename); + } } - } + }); } Message::CompilerMessage(message) => { progress(message.target.name); @@ -216,32 +363,13 @@ impl WorkspaceBuildScripts { }, )?; - for package in workspace.packages() { - if let Some(package_build_data) = &mut outputs[package] { - tracing::info!( - "{}: {:?}", - workspace[package].manifest.parent().display(), - package_build_data, - ); - // inject_cargo_env(package, package_build_data); - if let Some(out_dir) = &package_build_data.out_dir { - // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() - if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) { - package_build_data.envs.push(("OUT_DIR".to_string(), out_dir)); - } - } - } - } - - let mut errors = errors.into_inner(); - if !output.status.success() { - if errors.is_empty() { - errors = "cargo check failed".to_string(); - } - res.error = Some(errors); - } - - Ok(res) + let errors = if !output.status.success() { + let errors = errors.into_inner(); + Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors }) + } else { + None + }; + Ok(errors) } pub fn error(&self) -> Option<&str> { @@ -249,11 +377,11 @@ impl WorkspaceBuildScripts { } pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { - self.outputs.get(idx)?.as_ref() + self.outputs.get(idx) } } -// FIXME: File a better way to know if it is a dylib. +// FIXME: Find a better way to know if it is a dylib. fn is_dylib(path: &Utf8Path) -> bool { match path.extension().map(|e| e.to_string().to_lowercase()) { None => false, diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index eed955b42..b4c2ba436 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -2,6 +2,7 @@ use std::iter; use std::path::PathBuf; +use std::str::from_utf8; use std::{ops, process::Command}; use anyhow::{Context, Result}; @@ -13,8 +14,8 @@ use rustc_hash::FxHashMap; use serde::Deserialize; use serde_json::from_value; -use crate::CfgOverrides; -use crate::{utf8_stdout, ManifestPath}; +use crate::{utf8_stdout, InvocationLocation, ManifestPath}; +use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// workspace. It pretty closely mirrors `cargo metadata` output. @@ -70,34 +71,43 @@ impl Default for UnsetTestCrates { } } -#[derive(Default, Clone, Debug, PartialEq, Eq)] -pub struct CargoConfig { - /// Do not activate the `default` feature. - pub no_default_features: bool, +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum CargoFeatures { + All, + Selected { + /// List of features to activate. + features: Vec<String>, + /// Do not activate the `default` feature. + no_default_features: bool, + }, +} - /// Activate all available features - pub all_features: bool, +impl Default for CargoFeatures { + fn default() -> Self { + CargoFeatures::Selected { features: vec![], no_default_features: false } + } +} +#[derive(Default, Clone, Debug, PartialEq, Eq)] +pub struct CargoConfig { /// List of features to activate. - /// This will be ignored if `cargo_all_features` is true. - pub features: Vec<String>, - + pub features: CargoFeatures, /// rustc target pub target: Option<String>, - - /// Don't load sysroot crates (`std`, `core` & friends). Might be useful - /// when debugging isolated issues. - pub no_sysroot: bool, - + /// Sysroot loading behavior + pub sysroot: Option<RustcSource>, /// rustc private crate source pub rustc_source: Option<RustcSource>, - /// crates to disable `#[cfg(test)]` on pub unset_test_crates: UnsetTestCrates, - + /// Invoke `cargo check` through the RUSTC_WRAPPER. pub wrap_rustc_in_build_scripts: bool, - + /// The command to run instead of `cargo check` for building build scripts. pub run_build_script_command: Option<Vec<String>>, + /// Extra env vars to set when invoking the cargo command + pub extra_env: FxHashMap<String, String>, + pub invocation_strategy: InvocationStrategy, + pub invocation_location: InvocationLocation, } impl CargoConfig { @@ -140,7 +150,7 @@ pub struct PackageData { pub targets: Vec<Target>, /// Does this package come from the local filesystem (and is editable)? pub is_local: bool, - // Whether this package is a member of the workspace + /// Whether this package is a member of the workspace pub is_member: bool, /// List of packages this package depends on pub dependencies: Vec<PackageDependency>, @@ -246,8 +256,8 @@ impl TargetKind { } } +// Deserialize helper for the cargo metadata #[derive(Deserialize, Default)] -// Deserialise helper for the cargo metadata struct PackageMetadata { #[serde(rename = "rust-analyzer")] rust_analyzer: Option<RustAnalyzerPackageMetaData>, @@ -263,22 +273,23 @@ impl CargoWorkspace { let target = config .target .clone() - .or_else(|| cargo_config_build_target(cargo_toml)) - .or_else(|| rustc_discover_host_triple(cargo_toml)); + .or_else(|| cargo_config_build_target(cargo_toml, &config.extra_env)) + .or_else(|| rustc_discover_host_triple(cargo_toml, &config.extra_env)); let mut meta = MetadataCommand::new(); meta.cargo_path(toolchain::cargo()); meta.manifest_path(cargo_toml.to_path_buf()); - if config.all_features { - meta.features(CargoOpt::AllFeatures); - } else { - if config.no_default_features { - // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` - // https://github.com/oli-obk/cargo_metadata/issues/79 - meta.features(CargoOpt::NoDefaultFeatures); + match &config.features { + CargoFeatures::All => { + meta.features(CargoOpt::AllFeatures); } - if !config.features.is_empty() { - meta.features(CargoOpt::SomeFeatures(config.features.clone())); + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + meta.features(CargoOpt::NoDefaultFeatures); + } + if !features.is_empty() { + meta.features(CargoOpt::SomeFeatures(features.clone())); + } } } meta.current_dir(current_dir.as_os_str()); @@ -292,10 +303,22 @@ impl CargoWorkspace { // unclear whether cargo itself supports it. progress("metadata".to_string()); - let meta = - meta.exec().with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?; - - Ok(meta) + (|| -> Result<cargo_metadata::Metadata, cargo_metadata::Error> { + let mut command = meta.cargo_command(); + command.envs(&config.extra_env); + let output = command.output()?; + if !output.status.success() { + return Err(cargo_metadata::Error::CargoMetadata { + stderr: String::from_utf8(output.stderr)?, + }); + } + let stdout = from_utf8(&output.stdout)? + .lines() + .find(|line| line.starts_with('{')) + .ok_or(cargo_metadata::Error::NoJson)?; + cargo_metadata::MetadataCommand::parse(stdout) + })() + .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command())) } pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace { @@ -306,18 +329,21 @@ impl CargoWorkspace { let ws_members = &meta.workspace_members; meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); - for meta_pkg in &meta.packages { + for meta_pkg in meta.packages { let cargo_metadata::Package { - id, - edition, name, - manifest_path, version, - metadata, + id, + source, + targets: meta_targets, + features, + manifest_path, repository, + edition, + metadata, .. } = meta_pkg; - let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default(); + let meta = from_value::<PackageMetadata>(metadata).unwrap_or_default(); let edition = match edition { cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2018 => Edition::Edition2018, @@ -329,67 +355,50 @@ impl CargoWorkspace { }; // We treat packages without source as "local" packages. That includes all members of // the current workspace, as well as any path dependency outside the workspace. - let is_local = meta_pkg.source.is_none(); - let is_member = ws_members.contains(id); + let is_local = source.is_none(); + let is_member = ws_members.contains(&id); let pkg = packages.alloc(PackageData { id: id.repr.clone(), - name: name.clone(), - version: version.clone(), - manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(), + name, + version, + manifest: AbsPathBuf::assert(manifest_path.into()).try_into().unwrap(), targets: Vec::new(), is_local, is_member, edition, - repository: repository.clone(), + repository, dependencies: Vec::new(), - features: meta_pkg.features.clone().into_iter().collect(), + features: features.into_iter().collect(), active_features: Vec::new(), metadata: meta.rust_analyzer.unwrap_or_default(), }); let pkg_data = &mut packages[pkg]; pkg_by_id.insert(id, pkg); - for meta_tgt in &meta_pkg.targets { - let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; + for meta_tgt in meta_targets { + let cargo_metadata::Target { name, kind, required_features, src_path, .. } = + meta_tgt; let tgt = targets.alloc(TargetData { package: pkg, - name: meta_tgt.name.clone(), - root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)), - kind: TargetKind::new(meta_tgt.kind.as_slice()), - is_proc_macro, - required_features: meta_tgt.required_features.clone(), + name, + root: AbsPathBuf::assert(src_path.into()), + kind: TargetKind::new(&kind), + is_proc_macro: &*kind == ["proc-macro"], + required_features, }); pkg_data.targets.push(tgt); } } let resolve = meta.resolve.expect("metadata executed with deps"); for mut node in resolve.nodes { - let source = match pkg_by_id.get(&node.id) { - Some(&src) => src, - // FIXME: replace this and a similar branch below with `.unwrap`, once - // https://github.com/rust-lang/cargo/issues/7841 - // is fixed and hits stable (around 1.43-is probably?). - None => { - tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id); - continue; - } - }; + let &source = pkg_by_id.get(&node.id).unwrap(); node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); - for (dep_node, kind) in node + let dependencies = node .deps .iter() - .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind))) - { - let pkg = match pkg_by_id.get(&dep_node.pkg) { - Some(&pkg) => pkg, - None => { - tracing::error!( - "Dep node id do not match in cargo metadata, ignoring {}", - dep_node.pkg - ); - continue; - } - }; + .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind))); + for (dep_node, kind) in dependencies { + let &pkg = pkg_by_id.get(&dep_node.pkg).unwrap(); let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind }; packages[source].dependencies.push(dep); } @@ -434,10 +443,7 @@ impl CargoWorkspace { found = true } self[pkg].dependencies.iter().find_map(|dep| { - if &self[dep.pkg].manifest == manifest_path { - return Some(self[pkg].manifest.clone()); - } - None + (&self[dep.pkg].manifest == manifest_path).then(|| self[pkg].manifest.clone()) }) }) .collect::<Vec<ManifestPath>>(); @@ -463,8 +469,12 @@ impl CargoWorkspace { } } -fn rustc_discover_host_triple(cargo_toml: &ManifestPath) -> Option<String> { +fn rustc_discover_host_triple( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap<String, String>, +) -> Option<String> { let mut rustc = Command::new(toolchain::rustc()); + rustc.envs(extra_env); rustc.current_dir(cargo_toml.parent()).arg("-vV"); tracing::debug!("Discovering host platform by {:?}", rustc); match utf8_stdout(rustc) { @@ -486,8 +496,12 @@ fn rustc_discover_host_triple(cargo_toml: &ManifestPath) -> Option<String> { } } -fn cargo_config_build_target(cargo_toml: &ManifestPath) -> Option<String> { +fn cargo_config_build_target( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap<String, String>, +) -> Option<String> { let mut cargo_config = Command::new(toolchain::cargo()); + cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) .args(&["-Z", "unstable-options", "config", "get", "build.target"]) diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index b81b7432f..575581fa5 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -42,8 +42,8 @@ use rustc_hash::FxHashSet; pub use crate::{ build_scripts::WorkspaceBuildScripts, cargo_workspace::{ - CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target, - TargetData, TargetKind, UnsetTestCrates, + CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency, + RustcSource, Target, TargetData, TargetKind, UnsetTestCrates, }, manifest_path::ManifestPath, project_json::{ProjectJson, ProjectJsonData}, @@ -67,7 +67,7 @@ impl ProjectManifest { if path.file_name().unwrap_or_default() == "Cargo.toml" { return Ok(ProjectManifest::CargoToml(path)); } - bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) + bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()); } pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> { @@ -78,7 +78,7 @@ impl ProjectManifest { }; if !candidates.is_empty() { - bail!("more than one project") + bail!("more than one project"); } Ok(res) } @@ -157,3 +157,17 @@ fn utf8_stdout(mut cmd: Command) -> Result<String> { let stdout = String::from_utf8(output.stdout)?; Ok(stdout.trim().to_string()) } + +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationStrategy { + Once, + #[default] + PerWorkspace, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationLocation { + Root(AbsPathBuf), + #[default] + Workspace, +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 63d1d0ace..5133a14d5 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -110,14 +110,17 @@ impl ProjectJson { .collect::<Vec<_>>(), } } + /// Returns the number of crates in the project. pub fn n_crates(&self) -> usize { self.crates.len() } + /// Returns an iterator over the crates in the project. pub fn crates(&self) -> impl Iterator<Item = (CrateId, &Crate)> + '_ { self.crates.iter().enumerate().map(|(idx, krate)| (CrateId(idx as u32), krate)) } + /// Returns the path to the project's root folder. pub fn path(&self) -> &AbsPath { &self.project_root diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index 17e244d06..323136183 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -3,10 +3,15 @@ use std::process::Command; use anyhow::Result; +use rustc_hash::FxHashMap; use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath}; -pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Vec<CfgFlag> { +pub(crate) fn get( + cargo_toml: Option<&ManifestPath>, + target: Option<&str>, + extra_env: &FxHashMap<String, String>, +) -> Vec<CfgFlag> { let _p = profile::span("rustc_cfg::get"); let mut res = Vec::with_capacity(6 * 2 + 1); @@ -18,7 +23,7 @@ pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Ve } } - match get_rust_cfgs(cargo_toml, target) { + match get_rust_cfgs(cargo_toml, target, extra_env) { Ok(rustc_cfgs) => { tracing::debug!( "rustc cfgs found: {:?}", @@ -35,9 +40,14 @@ pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Ve res } -fn get_rust_cfgs(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Result<String> { +fn get_rust_cfgs( + cargo_toml: Option<&ManifestPath>, + target: Option<&str>, + extra_env: &FxHashMap<String, String>, +) -> Result<String> { if let Some(cargo_toml) = cargo_toml { let mut cargo_config = Command::new(toolchain::cargo()); + cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"]) @@ -52,6 +62,7 @@ fn get_rust_cfgs(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Res } // using unstable cargo features failed, fall back to using plain rustc let mut cmd = Command::new(toolchain::rustc()); + cmd.envs(extra_env); cmd.args(&["--print", "cfg", "-O"]); if let Some(target) = target { cmd.args(&["--target", target]); diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 362bb0f5e..fa8d76f3f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -9,6 +9,7 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command}; use anyhow::{format_err, Result}; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; use crate::{utf8_stdout, ManifestPath}; @@ -63,22 +64,38 @@ impl Sysroot { self.by_name("proc_macro") } - pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a { + pub fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ { self.crates.iter().map(|(id, _data)| id) } +} - pub fn discover(dir: &AbsPath) -> Result<Sysroot> { - tracing::debug!("Discovering sysroot for {}", dir.display()); - let sysroot_dir = discover_sysroot_dir(dir)?; - let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir)?; +impl Sysroot { + /// Attempts to discover the toolchain's sysroot from the given `dir`. + pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> { + tracing::debug!("discovering sysroot for {}", dir.display()); + let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; + let sysroot_src_dir = + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?; Ok(res) } - pub fn discover_rustc(cargo_toml: &ManifestPath) -> Option<ManifestPath> { - tracing::debug!("Discovering rustc source for {}", cargo_toml.display()); + pub fn discover_rustc( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap<String, String>, + ) -> Option<ManifestPath> { + tracing::debug!("discovering rustc source for {}", cargo_toml.display()); let current_dir = cargo_toml.parent(); - discover_sysroot_dir(current_dir).ok().and_then(|sysroot_dir| get_rustc_src(&sysroot_dir)) + let sysroot_dir = discover_sysroot_dir(current_dir, extra_env).ok()?; + get_rustc_src(&sysroot_dir) + } + + pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> { + let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { + format_err!("can't load standard library from sysroot {}", sysroot_dir.display()) + })?; + let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?; + Ok(res) } pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> { @@ -144,33 +161,46 @@ impl Sysroot { } } -fn discover_sysroot_dir(current_dir: &AbsPath) -> Result<AbsPathBuf> { +fn discover_sysroot_dir( + current_dir: &AbsPath, + extra_env: &FxHashMap<String, String>, +) -> Result<AbsPathBuf> { let mut rustc = Command::new(toolchain::rustc()); + rustc.envs(extra_env); rustc.current_dir(current_dir).args(&["--print", "sysroot"]); tracing::debug!("Discovering sysroot by {:?}", rustc); let stdout = utf8_stdout(rustc)?; Ok(AbsPathBuf::assert(PathBuf::from(stdout))) } -fn discover_sysroot_src_dir( - sysroot_path: &AbsPathBuf, - current_dir: &AbsPath, -) -> Result<AbsPathBuf> { +fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> { if let Ok(path) = env::var("RUST_SRC_PATH") { - let path = AbsPathBuf::try_from(path.as_str()) - .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?; - let core = path.join("core"); - if fs::metadata(&core).is_ok() { - tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display()); - return Ok(path); + if let Ok(path) = AbsPathBuf::try_from(path.as_str()) { + let core = path.join("core"); + if fs::metadata(&core).is_ok() { + tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display()); + return Some(path); + } + tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core); + } else { + tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring"); } - tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core); } get_rust_src(sysroot_path) +} + +fn discover_sysroot_src_dir_or_add_component( + sysroot_path: &AbsPathBuf, + current_dir: &AbsPath, + extra_env: &FxHashMap<String, String>, +) -> Result<AbsPathBuf> { + discover_sysroot_src_dir(sysroot_path) .or_else(|| { let mut rustup = Command::new(toolchain::rustup()); + rustup.envs(extra_env); rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); + tracing::info!("adding rust-src component by {:?}", rustup); utf8_stdout(rustup).ok()?; get_rust_src(sysroot_path) }) @@ -189,7 +219,7 @@ try installing the Rust source the same way you installed rustc", fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = ManifestPath::try_from(rustc_src).ok()?; - tracing::debug!("Checking for rustc source code: {}", rustc_src.display()); + tracing::debug!("checking for rustc source code: {}", rustc_src.display()); if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { @@ -199,7 +229,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> { let rust_src = sysroot_path.join("lib/rustlib/src/rust/library"); - tracing::debug!("Checking sysroot: {}", rust_src.display()); + tracing::debug!("checking sysroot library: {}", rust_src.display()); if fs::metadata(&rust_src).is_ok() { Some(rust_src) } else { diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 9ccb6e910..e2444e249 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -92,13 +92,17 @@ fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { } fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph { - project_workspace.to_crate_graph(&mut |_, _| Ok(Vec::new()), &mut { - let mut counter = 0; - move |_path| { - counter += 1; - Some(FileId(counter)) - } - }) + project_workspace.to_crate_graph( + &mut |_, _| Ok(Vec::new()), + &mut { + let mut counter = 0; + move |_path| { + counter += 1; + Some(FileId(counter)) + } + }, + &Default::default(), + ) } fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) { @@ -181,6 +185,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -256,6 +263,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -331,6 +341,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -406,6 +419,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -473,6 +489,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { repo: Some( "https://github.com/rust-lang/libc", ), + name: Some( + "libc", + ), }, is_proc_macro: false, }, @@ -563,6 +582,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -640,6 +662,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -717,6 +742,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -794,6 +822,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -861,6 +892,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { repo: Some( "https://github.com/rust-lang/libc", ), + name: Some( + "libc", + ), }, is_proc_macro: false, }, @@ -942,6 +976,9 @@ fn cargo_hello_world_project_model() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -1019,6 +1056,9 @@ fn cargo_hello_world_project_model() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -1096,6 +1136,9 @@ fn cargo_hello_world_project_model() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -1173,6 +1216,9 @@ fn cargo_hello_world_project_model() { ), origin: CratesIo { repo: None, + name: Some( + "hello-world", + ), }, is_proc_macro: false, }, @@ -1240,6 +1286,9 @@ fn cargo_hello_world_project_model() { repo: Some( "https://github.com/rust-lang/libc", ), + name: Some( + "libc", + ), }, is_proc_macro: false, }, @@ -1800,6 +1849,9 @@ fn rust_project_hello_world_project_model() { ), origin: CratesIo { repo: None, + name: Some( + "hello_world", + ), }, is_proc_macro: false, }, diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 818bbed6a..2780c62ed 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -2,7 +2,7 @@ //! metadata` or `rust-project.json`) into representation stored in the salsa //! database -- `CrateGraph`. -use std::{collections::VecDeque, fmt, fs, process::Command}; +use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; use anyhow::{format_err, Context, Result}; use base_db::{ @@ -21,8 +21,8 @@ use crate::{ cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, - utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot, - TargetKind, WorkspaceBuildScripts, + utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, + ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -156,11 +156,16 @@ impl ProjectWorkspace { })?; let project_location = project_json.parent().to_path_buf(); let project_json = ProjectJson::new(&project_location, data); - ProjectWorkspace::load_inline(project_json, config.target.as_deref())? + ProjectWorkspace::load_inline( + project_json, + config.target.as_deref(), + &config.extra_env, + )? } ProjectManifest::CargoToml(cargo_toml) => { let cargo_version = utf8_stdout({ let mut cmd = Command::new(toolchain::cargo()); + cmd.envs(&config.extra_env); cmd.arg("--version"); cmd })?; @@ -183,22 +188,41 @@ impl ProjectWorkspace { })?; let cargo = CargoWorkspace::new(meta); - let sysroot = if config.no_sysroot { - None - } else { - Some(Sysroot::discover(cargo_toml.parent()).with_context(|| { - format!( + let sysroot = match &config.sysroot { + Some(RustcSource::Path(path)) => { + Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| { + format!( + "Failed to find sysroot for Cargo.toml file {}.", + cargo_toml.display() + ) + })?) + } + Some(RustcSource::Discover) => Some( + Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context( + || { + format!( "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", cargo_toml.display() ) - })?) + }, + )?, + ), + None => None, }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_dir = match &config.rustc_source { Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(), - Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml), + Some(RustcSource::Discover) => { + Sysroot::discover_rustc(&cargo_toml, &config.extra_env) + } None => None, }; + if let Some(rustc_dir) = &rustc_dir { + tracing::info!(rustc_dir = %rustc_dir.display(), "Using rustc source"); + } let rustc = match rustc_dir { Some(rustc_dir) => Some({ @@ -216,7 +240,8 @@ impl ProjectWorkspace { None => None, }; - let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref()); + let rustc_cfg = + rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env); let cfg_overrides = config.cfg_overrides(); ProjectWorkspace::Cargo { @@ -237,6 +262,7 @@ impl ProjectWorkspace { pub fn load_inline( project_json: ProjectJson, target: Option<&str>, + extra_env: &FxHashMap<String, String>, ) -> Result<ProjectWorkspace> { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?), @@ -257,8 +283,11 @@ impl ProjectWorkspace { } (None, None) => None, }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } - let rustc_cfg = rustc_cfg::get(None, target); + let rustc_cfg = rustc_cfg::get(None, target, extra_env); Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) } @@ -268,11 +297,13 @@ impl ProjectWorkspace { .first() .and_then(|it| it.parent()) .ok_or_else(|| format_err!("No detached files to load"))?, + &Default::default(), )?; - let rustc_cfg = rustc_cfg::get(None, None); + let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } + /// Runs the build scripts for this [`ProjectWorkspace`]. pub fn run_build_scripts( &self, config: &CargoConfig, @@ -280,9 +311,13 @@ impl ProjectWorkspace { ) -> Result<WorkspaceBuildScripts> { match self { ProjectWorkspace::Cargo { cargo, toolchain, .. } => { - WorkspaceBuildScripts::run(config, cargo, progress, toolchain).with_context(|| { - format!("Failed to run build scripts for {}", &cargo.workspace_root().display()) - }) + WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) + .with_context(|| { + format!( + "Failed to run build scripts for {}", + &cargo.workspace_root().display() + ) + }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { Ok(WorkspaceBuildScripts::default()) @@ -290,6 +325,49 @@ impl ProjectWorkspace { } } + /// Runs the build scripts for the given [`ProjectWorkspace`]s. Depending on the invocation + /// strategy this may run a single build process for all project workspaces. + pub fn run_all_build_scripts( + workspaces: &[ProjectWorkspace], + config: &CargoConfig, + progress: &dyn Fn(String), + ) -> Vec<Result<WorkspaceBuildScripts>> { + if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace) + || config.run_build_script_command.is_none() + { + return workspaces.iter().map(|it| it.run_build_scripts(config, progress)).collect(); + } + + let cargo_ws: Vec<_> = workspaces + .iter() + .filter_map(|it| match it { + ProjectWorkspace::Cargo { cargo, .. } => Some(cargo), + _ => None, + }) + .collect(); + let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + Ok(it) => Ok(it.into_iter()), + // io::Error is not Clone? + Err(e) => Err(Arc::new(e)), + }; + + workspaces + .iter() + .map(|it| match it { + ProjectWorkspace::Cargo { cargo, .. } => match outputs { + Ok(outputs) => Ok(outputs.next().unwrap()), + Err(e) => Err(e.clone()).with_context(|| { + format!( + "Failed to run build scripts for {}", + &cargo.workspace_root().display() + ) + }), + }, + _ => Ok(WorkspaceBuildScripts::default()), + }) + .collect() + } + pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) { match self { ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs, @@ -303,6 +381,13 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec<PackageRoot> { + let mk_sysroot = |sysroot: Option<&Sysroot>| { + sysroot.map(|sysroot| PackageRoot { + is_local: false, + include: vec![sysroot.src_root().to_path_buf()], + exclude: Vec::new(), + }) + }; match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project .crates() @@ -313,13 +398,7 @@ impl ProjectWorkspace { }) .collect::<FxHashSet<_>>() .into_iter() - .chain(sysroot.as_ref().into_iter().flat_map(|sysroot| { - sysroot.crates().map(move |krate| PackageRoot { - is_local: false, - include: vec![sysroot[krate].root.parent().to_path_buf()], - exclude: Vec::new(), - }) - })) + .chain(mk_sysroot(sysroot.as_ref())) .collect::<Vec<_>>(), ProjectWorkspace::Cargo { cargo, @@ -368,11 +447,7 @@ impl ProjectWorkspace { } PackageRoot { is_local, include, exclude } }) - .chain(sysroot.iter().map(|sysroot| PackageRoot { - is_local: false, - include: vec![sysroot.src_root().to_path_buf()], - exclude: Vec::new(), - })) + .chain(mk_sysroot(sysroot.as_ref())) .chain(rustc.iter().flat_map(|rustc| { rustc.packages().map(move |krate| PackageRoot { is_local: false, @@ -389,11 +464,7 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(sysroot.crates().map(|krate| PackageRoot { - is_local: false, - include: vec![sysroot[krate].root.parent().to_path_buf()], - exclude: Vec::new(), - })) + .chain(mk_sysroot(Some(sysroot))) .collect(), } } @@ -416,6 +487,7 @@ impl ProjectWorkspace { &self, load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, + extra_env: &FxHashMap<String, String>, ) -> CrateGraph { let _p = profile::span("ProjectWorkspace::to_crate_graph"); @@ -426,6 +498,7 @@ impl ProjectWorkspace { load, project, sysroot, + extra_env, ), ProjectWorkspace::Cargo { cargo, @@ -464,6 +537,7 @@ fn project_json_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, project: &ProjectJson, sysroot: &Option<Sysroot>, + extra_env: &FxHashMap<String, String>, ) -> CrateGraph { let mut crate_graph = CrateGraph::default(); let sysroot_deps = sysroot @@ -489,9 +563,9 @@ fn project_json_to_crate_graph( }; let target_cfgs = match krate.target.as_deref() { - Some(target) => { - cfg_cache.entry(target).or_insert_with(|| rustc_cfg::get(None, Some(target))) - } + Some(target) => cfg_cache + .entry(target) + .or_insert_with(|| rustc_cfg::get(None, Some(target), extra_env)), None => &rustc_cfg, }; @@ -510,9 +584,15 @@ fn project_json_to_crate_graph( proc_macro, krate.is_proc_macro, if krate.display_name.is_some() { - CrateOrigin::CratesIo { repo: krate.repository.clone() } + CrateOrigin::CratesIo { + repo: krate.repository.clone(), + name: krate + .display_name + .clone() + .map(|n| n.canonical_name().to_string()), + } } else { - CrateOrigin::CratesIo { repo: None } + CrateOrigin::CratesIo { repo: None, name: None } }, ), ) @@ -624,6 +704,8 @@ fn cargo_to_crate_graph( lib_tgt = Some((crate_id, cargo[tgt].name.clone())); pkg_to_lib_crate.insert(pkg, crate_id); } + // Even crates that don't set proc-macro = true are allowed to depend on proc_macro + // (just none of the APIs work when called outside of a proc macro). if let Some(proc_macro) = libproc_macro { add_dep_with_prelude( &mut crate_graph, @@ -639,19 +721,19 @@ fn cargo_to_crate_graph( } // Set deps to the core, std and to the lib target of the current package - for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() { + for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() { // Add sysroot deps first so that a lib target named `core` etc. can overwrite them. - public_deps.add(*from, &mut crate_graph); + public_deps.add(from, &mut crate_graph); if let Some((to, name)) = lib_tgt.clone() { - if to != *from && *kind != TargetKind::BuildScript { + if to != from && kind != TargetKind::BuildScript { // (build script can not depend on its library target) // For root projects with dashes in their name, // cargo metadata does not do any normalization, // so we do it ourselves currently let name = CrateName::normalize_dashes(&name); - add_dep(&mut crate_graph, *from, name, to); + add_dep(&mut crate_graph, from, name, to); } } } @@ -663,17 +745,17 @@ fn cargo_to_crate_graph( for dep in cargo[pkg].dependencies.iter() { let name = CrateName::new(&dep.name).unwrap(); if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { - for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() { - if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript { + for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() { + if dep.kind == DepKind::Build && kind != TargetKind::BuildScript { // Only build scripts may depend on build dependencies. continue; } - if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript { + if dep.kind != DepKind::Build && kind == TargetKind::BuildScript { // Build scripts may only depend on build dependencies. continue; } - add_dep(&mut crate_graph, *from, name.clone(), to) + add_dep(&mut crate_graph, from, name.clone(), to) } } } @@ -684,9 +766,9 @@ fn cargo_to_crate_graph( // and create dependencies on them for the crates which opt-in to that if let Some(rustc_workspace) = rustc { handle_rustc_crates( + &mut crate_graph, rustc_workspace, load, - &mut crate_graph, &cfg_options, override_cfg, load_proc_macro, @@ -730,14 +812,17 @@ fn detached_files_to_crate_graph( let detached_file_crate = crate_graph.add_crate_root( file_id, Edition::CURRENT, - display_name, + display_name.clone(), None, cfg_options.clone(), cfg_options.clone(), Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { + repo: None, + name: display_name.map(|n| n.canonical_name().to_string()), + }, ); public_deps.add(detached_file_crate, &mut crate_graph); @@ -746,16 +831,16 @@ fn detached_files_to_crate_graph( } fn handle_rustc_crates( + crate_graph: &mut CrateGraph, rustc_workspace: &CargoWorkspace, load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, - crate_graph: &mut CrateGraph, cfg_options: &CfgOptions, override_cfg: &CfgOverrides, load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, - pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>, + pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>, public_deps: &SysrootPublicDeps, cargo: &CargoWorkspace, - pkg_crates: &FxHashMap<la_arena::Idx<crate::PackageData>, Vec<(CrateId, TargetKind)>>, + pkg_crates: &FxHashMap<Package, Vec<(CrateId, TargetKind)>>, build_scripts: &WorkspaceBuildScripts, ) { let mut rustc_pkg_crates = FxHashMap::default(); @@ -769,8 +854,8 @@ fn handle_rustc_crates( let mut queue = VecDeque::new(); queue.push_back(root_pkg); while let Some(pkg) = queue.pop_front() { - // Don't duplicate packages if they are dependended on a diamond pattern - // N.B. if this line is omitted, we try to analyse over 4_800_000 crates + // Don't duplicate packages if they are dependent on a diamond pattern + // N.B. if this line is omitted, we try to analyze over 4_800_000 crates // which is not ideal if rustc_pkg_crates.contains_key(&pkg) { continue; @@ -913,7 +998,7 @@ fn add_target_crate_root( env, proc_macro, is_proc_macro, - CrateOrigin::CratesIo { repo: pkg.repository.clone() }, + CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) }, ) } |