summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_interface/src
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--compiler/rustc_interface/src/callbacks.rs59
-rw-r--r--compiler/rustc_interface/src/interface.rs362
-rw-r--r--compiler/rustc_interface/src/lib.rs22
-rw-r--r--compiler/rustc_interface/src/passes.rs1046
-rw-r--r--compiler/rustc_interface/src/proc_macro_decls.rs27
-rw-r--r--compiler/rustc_interface/src/queries.rs402
-rw-r--r--compiler/rustc_interface/src/tests.rs830
-rw-r--r--compiler/rustc_interface/src/util.rs672
8 files changed, 3420 insertions, 0 deletions
diff --git a/compiler/rustc_interface/src/callbacks.rs b/compiler/rustc_interface/src/callbacks.rs
new file mode 100644
index 000000000..76442de69
--- /dev/null
+++ b/compiler/rustc_interface/src/callbacks.rs
@@ -0,0 +1,59 @@
+//! Throughout the compiler tree, there are several places which want to have
+//! access to state or queries while being inside crates that are dependencies
+//! of `rustc_middle`. To facilitate this, we have the
+//! `rustc_data_structures::AtomicRef` type, which allows us to setup a global
+//! static which can then be set in this file at program startup.
+//!
+//! See `SPAN_TRACK` for an example of how to set things up.
+//!
+//! The functions in this file should fall back to the default set in their
+//! origin crate when the `TyCtxt` is not present in TLS.
+
+use rustc_errors::{Diagnostic, TRACK_DIAGNOSTICS};
+use rustc_middle::ty::tls;
+use std::fmt;
+
+fn track_span_parent(def_id: rustc_span::def_id::LocalDefId) {
+ tls::with_opt(|tcx| {
+ if let Some(tcx) = tcx {
+ let _span = tcx.source_span(def_id);
+ // Sanity check: relative span's parent must be an absolute span.
+ debug_assert_eq!(_span.data_untracked().parent, None);
+ }
+ })
+}
+
+/// This is a callback from `rustc_ast` as it cannot access the implicit state
+/// in `rustc_middle` otherwise. It is used when diagnostic messages are
+/// emitted and stores them in the current query, if there is one.
+fn track_diagnostic(diagnostic: &Diagnostic) {
+ tls::with_context_opt(|icx| {
+ if let Some(icx) = icx {
+ if let Some(diagnostics) = icx.diagnostics {
+ let mut diagnostics = diagnostics.lock();
+ diagnostics.extend(Some(diagnostic.clone()));
+ }
+ }
+ })
+}
+
+/// This is a callback from `rustc_hir` as it cannot access the implicit state
+/// in `rustc_middle` otherwise.
+fn def_id_debug(def_id: rustc_hir::def_id::DefId, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "DefId({}:{}", def_id.krate, def_id.index.index())?;
+ tls::with_opt(|opt_tcx| {
+ if let Some(tcx) = opt_tcx {
+ write!(f, " ~ {}", tcx.def_path_debug_str(def_id))?;
+ }
+ Ok(())
+ })?;
+ write!(f, ")")
+}
+
+/// Sets up the callbacks in prior crates which we want to refer to the
+/// TyCtxt in.
+pub fn setup_callbacks() {
+ rustc_span::SPAN_TRACK.swap(&(track_span_parent as fn(_)));
+ rustc_hir::def_id::DEF_ID_DEBUG.swap(&(def_id_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
+ TRACK_DIAGNOSTICS.swap(&(track_diagnostic as fn(&_)));
+}
diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs
new file mode 100644
index 000000000..94f81b660
--- /dev/null
+++ b/compiler/rustc_interface/src/interface.rs
@@ -0,0 +1,362 @@
+pub use crate::passes::BoxedResolver;
+use crate::util;
+
+use rustc_ast::token;
+use rustc_ast::{self as ast, LitKind, MetaItemKind};
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::OnDrop;
+use rustc_errors::registry::Registry;
+use rustc_errors::{ErrorGuaranteed, Handler};
+use rustc_lint::LintStore;
+use rustc_middle::ty;
+use rustc_parse::maybe_new_parser_from_source_str;
+use rustc_query_impl::QueryCtxt;
+use rustc_session::config::{self, CheckCfg, ErrorOutputType, Input, OutputFilenames};
+use rustc_session::early_error;
+use rustc_session::lint;
+use rustc_session::parse::{CrateConfig, ParseSess};
+use rustc_session::{DiagnosticOutput, Session};
+use rustc_span::source_map::{FileLoader, FileName};
+use rustc_span::symbol::sym;
+use std::path::PathBuf;
+use std::result;
+
+pub type Result<T> = result::Result<T, ErrorGuaranteed>;
+
+/// Represents a compiler session.
+///
+/// Can be used to run `rustc_interface` queries.
+/// Created by passing [`Config`] to [`run_compiler`].
+pub struct Compiler {
+ pub(crate) sess: Lrc<Session>,
+ codegen_backend: Lrc<Box<dyn CodegenBackend>>,
+ pub(crate) input: Input,
+ pub(crate) input_path: Option<PathBuf>,
+ pub(crate) output_dir: Option<PathBuf>,
+ pub(crate) output_file: Option<PathBuf>,
+ pub(crate) temps_dir: Option<PathBuf>,
+ pub(crate) register_lints: Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>>,
+ pub(crate) override_queries:
+ Option<fn(&Session, &mut ty::query::Providers, &mut ty::query::ExternProviders)>,
+}
+
+impl Compiler {
+ pub fn session(&self) -> &Lrc<Session> {
+ &self.sess
+ }
+ pub fn codegen_backend(&self) -> &Lrc<Box<dyn CodegenBackend>> {
+ &self.codegen_backend
+ }
+ pub fn input(&self) -> &Input {
+ &self.input
+ }
+ pub fn output_dir(&self) -> &Option<PathBuf> {
+ &self.output_dir
+ }
+ pub fn output_file(&self) -> &Option<PathBuf> {
+ &self.output_file
+ }
+ pub fn temps_dir(&self) -> &Option<PathBuf> {
+ &self.temps_dir
+ }
+ pub fn register_lints(&self) -> &Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>> {
+ &self.register_lints
+ }
+ pub fn build_output_filenames(
+ &self,
+ sess: &Session,
+ attrs: &[ast::Attribute],
+ ) -> OutputFilenames {
+ util::build_output_filenames(
+ &self.input,
+ &self.output_dir,
+ &self.output_file,
+ &self.temps_dir,
+ attrs,
+ sess,
+ )
+ }
+}
+
+/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
+pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
+ rustc_span::create_default_session_if_not_set_then(move |_| {
+ let cfg = cfgspecs
+ .into_iter()
+ .map(|s| {
+ let sess = ParseSess::with_silent_emitter(Some(format!(
+ "this error occurred on the command line: `--cfg={}`",
+ s
+ )));
+ let filename = FileName::cfg_spec_source_code(&s);
+
+ macro_rules! error {
+ ($reason: expr) => {
+ early_error(
+ ErrorOutputType::default(),
+ &format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s),
+ );
+ };
+ }
+
+ match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) {
+ Ok(mut parser) => match parser.parse_meta_item() {
+ Ok(meta_item) if parser.token == token::Eof => {
+ if meta_item.path.segments.len() != 1 {
+ error!("argument key must be an identifier");
+ }
+ match &meta_item.kind {
+ MetaItemKind::List(..) => {}
+ MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
+ error!("argument value must be a string");
+ }
+ MetaItemKind::NameValue(..) | MetaItemKind::Word => {
+ let ident = meta_item.ident().expect("multi-segment cfg key");
+ return (ident.name, meta_item.value_str());
+ }
+ }
+ }
+ Ok(..) => {}
+ Err(err) => err.cancel(),
+ },
+ Err(errs) => drop(errs),
+ }
+
+ // If the user tried to use a key="value" flag, but is missing the quotes, provide
+ // a hint about how to resolve this.
+ if s.contains('=') && !s.contains("=\"") && !s.ends_with('"') {
+ error!(concat!(
+ r#"expected `key` or `key="value"`, ensure escaping is appropriate"#,
+ r#" for your shell, try 'key="value"' or key=\"value\""#
+ ));
+ } else {
+ error!(r#"expected `key` or `key="value"`"#);
+ }
+ })
+ .collect::<CrateConfig>();
+ cfg.into_iter().map(|(a, b)| (a.to_string(), b.map(|b| b.to_string()))).collect()
+ })
+}
+
+/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`.
+pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
+ rustc_span::create_default_session_if_not_set_then(move |_| {
+ let mut cfg = CheckCfg::default();
+
+ 'specs: for s in specs {
+ let sess = ParseSess::with_silent_emitter(Some(format!(
+ "this error occurred on the command line: `--check-cfg={}`",
+ s
+ )));
+ let filename = FileName::cfg_spec_source_code(&s);
+
+ macro_rules! error {
+ ($reason: expr) => {
+ early_error(
+ ErrorOutputType::default(),
+ &format!(
+ concat!("invalid `--check-cfg` argument: `{}` (", $reason, ")"),
+ s
+ ),
+ );
+ };
+ }
+
+ match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) {
+ Ok(mut parser) => match parser.parse_meta_item() {
+ Ok(meta_item) if parser.token == token::Eof => {
+ if let Some(args) = meta_item.meta_item_list() {
+ if meta_item.has_name(sym::names) {
+ let names_valid =
+ cfg.names_valid.get_or_insert_with(|| FxHashSet::default());
+ for arg in args {
+ if arg.is_word() && arg.ident().is_some() {
+ let ident = arg.ident().expect("multi-segment cfg key");
+ names_valid.insert(ident.name.to_string());
+ } else {
+ error!("`names()` arguments must be simple identifers");
+ }
+ }
+ continue 'specs;
+ } else if meta_item.has_name(sym::values) {
+ if let Some((name, values)) = args.split_first() {
+ if name.is_word() && name.ident().is_some() {
+ let ident = name.ident().expect("multi-segment cfg key");
+ let ident_values = cfg
+ .values_valid
+ .entry(ident.name.to_string())
+ .or_insert_with(|| FxHashSet::default());
+
+ for val in values {
+ if let Some(LitKind::Str(s, _)) =
+ val.literal().map(|lit| &lit.kind)
+ {
+ ident_values.insert(s.to_string());
+ } else {
+ error!(
+ "`values()` arguments must be string literals"
+ );
+ }
+ }
+
+ continue 'specs;
+ } else {
+ error!(
+ "`values()` first argument must be a simple identifer"
+ );
+ }
+ } else if args.is_empty() {
+ cfg.well_known_values = true;
+ continue 'specs;
+ }
+ }
+ }
+ }
+ Ok(..) => {}
+ Err(err) => err.cancel(),
+ },
+ Err(errs) => drop(errs),
+ }
+
+ error!(
+ "expected `names(name1, name2, ... nameN)` or \
+ `values(name, \"value1\", \"value2\", ... \"valueN\")`"
+ );
+ }
+
+ if let Some(names_valid) = &mut cfg.names_valid {
+ names_valid.extend(cfg.values_valid.keys().cloned());
+ }
+ cfg
+ })
+}
+
+/// The compiler configuration
+pub struct Config {
+ /// Command line options
+ pub opts: config::Options,
+
+ /// cfg! configuration in addition to the default ones
+ pub crate_cfg: FxHashSet<(String, Option<String>)>,
+ pub crate_check_cfg: CheckCfg,
+
+ pub input: Input,
+ pub input_path: Option<PathBuf>,
+ pub output_dir: Option<PathBuf>,
+ pub output_file: Option<PathBuf>,
+ pub file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
+ pub diagnostic_output: DiagnosticOutput,
+
+ pub lint_caps: FxHashMap<lint::LintId, lint::Level>,
+
+ /// This is a callback from the driver that is called when [`ParseSess`] is created.
+ pub parse_sess_created: Option<Box<dyn FnOnce(&mut ParseSess) + Send>>,
+
+ /// This is a callback from the driver that is called when we're registering lints;
+ /// it is called during plugin registration when we have the LintStore in a non-shared state.
+ ///
+ /// Note that if you find a Some here you probably want to call that function in the new
+ /// function being registered.
+ pub register_lints: Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>>,
+
+ /// This is a callback from the driver that is called just after we have populated
+ /// the list of queries.
+ ///
+ /// The second parameter is local providers and the third parameter is external providers.
+ pub override_queries:
+ Option<fn(&Session, &mut ty::query::Providers, &mut ty::query::ExternProviders)>,
+
+ /// This is a callback from the driver that is called to create a codegen backend.
+ pub make_codegen_backend:
+ Option<Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>>,
+
+ /// Registry of diagnostics codes.
+ pub registry: Registry,
+}
+
+pub fn create_compiler_and_run<R>(config: Config, f: impl FnOnce(&Compiler) -> R) -> R {
+ crate::callbacks::setup_callbacks();
+
+ let registry = &config.registry;
+ let (mut sess, codegen_backend) = util::create_session(
+ config.opts,
+ config.crate_cfg,
+ config.crate_check_cfg,
+ config.diagnostic_output,
+ config.file_loader,
+ config.input_path.clone(),
+ config.lint_caps,
+ config.make_codegen_backend,
+ registry.clone(),
+ );
+
+ if let Some(parse_sess_created) = config.parse_sess_created {
+ parse_sess_created(
+ &mut Lrc::get_mut(&mut sess)
+ .expect("create_session() should never share the returned session")
+ .parse_sess,
+ );
+ }
+
+ let temps_dir = sess.opts.unstable_opts.temps_dir.as_ref().map(|o| PathBuf::from(&o));
+
+ let compiler = Compiler {
+ sess,
+ codegen_backend,
+ input: config.input,
+ input_path: config.input_path,
+ output_dir: config.output_dir,
+ output_file: config.output_file,
+ temps_dir,
+ register_lints: config.register_lints,
+ override_queries: config.override_queries,
+ };
+
+ rustc_span::with_source_map(compiler.sess.parse_sess.clone_source_map(), move || {
+ let r = {
+ let _sess_abort_error = OnDrop(|| {
+ compiler.sess.finish_diagnostics(registry);
+ });
+
+ f(&compiler)
+ };
+
+ let prof = compiler.sess.prof.clone();
+ prof.generic_activity("drop_compiler").run(move || drop(compiler));
+ r
+ })
+}
+
+// JUSTIFICATION: before session exists, only config
+#[cfg_attr(not(bootstrap), allow(rustc::bad_opt_access))]
+pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Send) -> R {
+ tracing::trace!("run_compiler");
+ util::run_in_thread_pool_with_globals(
+ config.opts.edition,
+ config.opts.unstable_opts.threads,
+ || create_compiler_and_run(config, f),
+ )
+}
+
+pub fn try_print_query_stack(handler: &Handler, num_frames: Option<usize>) {
+ eprintln!("query stack during panic:");
+
+ // Be careful relying on global state here: this code is called from
+ // a panic hook, which means that the global `Handler` may be in a weird
+ // state if it was responsible for triggering the panic.
+ let i = ty::tls::with_context_opt(|icx| {
+ if let Some(icx) = icx {
+ QueryCtxt::from_tcx(icx.tcx).try_print_query_stack(icx.query, handler, num_frames)
+ } else {
+ 0
+ }
+ });
+
+ if num_frames == None || num_frames >= Some(i) {
+ eprintln!("end of query stack");
+ } else {
+ eprintln!("we're just showing a limited slice of the query stack");
+ }
+}
diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs
new file mode 100644
index 000000000..d443057eb
--- /dev/null
+++ b/compiler/rustc_interface/src/lib.rs
@@ -0,0 +1,22 @@
+#![feature(box_patterns)]
+#![feature(let_else)]
+#![feature(internal_output_capture)]
+#![feature(thread_spawn_unchecked)]
+#![feature(once_cell)]
+#![recursion_limit = "256"]
+#![allow(rustc::potential_query_instability)]
+
+mod callbacks;
+pub mod interface;
+mod passes;
+mod proc_macro_decls;
+mod queries;
+pub mod util;
+
+pub use callbacks::setup_callbacks;
+pub use interface::{run_compiler, Config};
+pub use passes::{DEFAULT_EXTERN_QUERY_PROVIDERS, DEFAULT_QUERY_PROVIDERS};
+pub use queries::Queries;
+
+#[cfg(test)]
+mod tests;
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
new file mode 100644
index 000000000..8f0835917
--- /dev/null
+++ b/compiler/rustc_interface/src/passes.rs
@@ -0,0 +1,1046 @@
+use crate::interface::{Compiler, Result};
+use crate::proc_macro_decls;
+use crate::util;
+
+use ast::CRATE_NODE_ID;
+use rustc_ast::{self as ast, visit};
+use rustc_borrowck as mir_borrowck;
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::parallel;
+use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
+use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan, PResult};
+use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
+use rustc_hir::def_id::StableCrateId;
+use rustc_hir::definitions::Definitions;
+use rustc_lint::{BufferedEarlyLint, EarlyCheckNode, LintStore};
+use rustc_metadata::creader::CStore;
+use rustc_middle::arena::Arena;
+use rustc_middle::dep_graph::DepGraph;
+use rustc_middle::ty::query::{ExternProviders, Providers};
+use rustc_middle::ty::{self, GlobalCtxt, RegisteredTools, TyCtxt};
+use rustc_mir_build as mir_build;
+use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_attr};
+use rustc_passes::{self, hir_stats, layout_test};
+use rustc_plugin_impl as plugin;
+use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
+use rustc_resolve::{Resolver, ResolverArenas};
+use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
+use rustc_session::cstore::{CrateStoreDyn, MetadataLoader, MetadataLoaderDyn};
+use rustc_session::output::filename_for_input;
+use rustc_session::search_paths::PathKind;
+use rustc_session::{Limit, Session};
+use rustc_span::symbol::{sym, Symbol};
+use rustc_span::FileName;
+use rustc_trait_selection::traits;
+use rustc_typeck as typeck;
+use tracing::{info, warn};
+
+use std::any::Any;
+use std::cell::RefCell;
+use std::ffi::OsString;
+use std::io::{self, BufWriter, Write};
+use std::marker::PhantomPinned;
+use std::path::{Path, PathBuf};
+use std::pin::Pin;
+use std::rc::Rc;
+use std::sync::LazyLock;
+use std::{env, fs, iter};
+
+pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
+ let krate = sess.time("parse_crate", || match input {
+ Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
+ Input::Str { input, name } => {
+ parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess)
+ }
+ })?;
+
+ if sess.opts.unstable_opts.input_stats {
+ eprintln!("Lines of code: {}", sess.source_map().count_lines());
+ eprintln!("Pre-expansion node count: {}", count_nodes(&krate));
+ }
+
+ if let Some(ref s) = sess.opts.unstable_opts.show_span {
+ rustc_ast_passes::show_span::run(sess.diagnostic(), s, &krate);
+ }
+
+ if sess.opts.unstable_opts.hir_stats {
+ hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS");
+ }
+
+ Ok(krate)
+}
+
+fn count_nodes(krate: &ast::Crate) -> usize {
+ let mut counter = rustc_ast_passes::node_count::NodeCounter::new();
+ visit::walk_crate(&mut counter, krate);
+ counter.count
+}
+
+pub use boxed_resolver::BoxedResolver;
+mod boxed_resolver {
+ use super::*;
+
+ pub struct BoxedResolver(Pin<Box<BoxedResolverInner>>);
+
+ struct BoxedResolverInner {
+ session: Lrc<Session>,
+ resolver_arenas: Option<ResolverArenas<'static>>,
+ resolver: Option<Resolver<'static>>,
+ _pin: PhantomPinned,
+ }
+
+ // Note: Drop order is important to prevent dangling references. Resolver must be dropped first,
+ // then resolver_arenas and session.
+ impl Drop for BoxedResolverInner {
+ fn drop(&mut self) {
+ self.resolver.take();
+ self.resolver_arenas.take();
+ }
+ }
+
+ impl BoxedResolver {
+ pub(super) fn new(
+ session: Lrc<Session>,
+ make_resolver: impl for<'a> FnOnce(&'a Session, &'a ResolverArenas<'a>) -> Resolver<'a>,
+ ) -> BoxedResolver {
+ let mut boxed_resolver = Box::new(BoxedResolverInner {
+ session,
+ resolver_arenas: Some(Resolver::arenas()),
+ resolver: None,
+ _pin: PhantomPinned,
+ });
+ // SAFETY: `make_resolver` takes a resolver arena with an arbitrary lifetime and
+ // returns a resolver with the same lifetime as the arena. We ensure that the arena
+ // outlives the resolver in the drop impl and elsewhere so these transmutes are sound.
+ unsafe {
+ let resolver = make_resolver(
+ std::mem::transmute::<&Session, &Session>(&boxed_resolver.session),
+ std::mem::transmute::<&ResolverArenas<'_>, &ResolverArenas<'_>>(
+ boxed_resolver.resolver_arenas.as_ref().unwrap(),
+ ),
+ );
+ boxed_resolver.resolver = Some(resolver);
+ BoxedResolver(Pin::new_unchecked(boxed_resolver))
+ }
+ }
+
+ pub fn access<F: for<'a> FnOnce(&mut Resolver<'a>) -> R, R>(&mut self, f: F) -> R {
+ // SAFETY: The resolver doesn't need to be pinned.
+ let mut resolver = unsafe {
+ self.0.as_mut().map_unchecked_mut(|boxed_resolver| &mut boxed_resolver.resolver)
+ };
+ f((&mut *resolver).as_mut().unwrap())
+ }
+
+ pub fn to_resolver_outputs(
+ resolver: Rc<RefCell<BoxedResolver>>,
+ ) -> (Definitions, Box<CrateStoreDyn>, ty::ResolverOutputs, ty::ResolverAstLowering)
+ {
+ match Rc::try_unwrap(resolver) {
+ Ok(resolver) => {
+ let mut resolver = resolver.into_inner();
+ // SAFETY: The resolver doesn't need to be pinned.
+ let mut resolver = unsafe {
+ resolver
+ .0
+ .as_mut()
+ .map_unchecked_mut(|boxed_resolver| &mut boxed_resolver.resolver)
+ };
+ resolver.take().unwrap().into_outputs()
+ }
+ Err(resolver) => resolver.borrow_mut().access(|resolver| resolver.clone_outputs()),
+ }
+ }
+ }
+}
+
+pub fn create_resolver(
+ sess: Lrc<Session>,
+ metadata_loader: Box<MetadataLoaderDyn>,
+ krate: &ast::Crate,
+ crate_name: &str,
+) -> BoxedResolver {
+ tracing::trace!("create_resolver");
+ BoxedResolver::new(sess, move |sess, resolver_arenas| {
+ Resolver::new(sess, krate, crate_name, metadata_loader, resolver_arenas)
+ })
+}
+
+pub fn register_plugins<'a>(
+ sess: &'a Session,
+ metadata_loader: &'a dyn MetadataLoader,
+ register_lints: impl Fn(&Session, &mut LintStore),
+ mut krate: ast::Crate,
+ crate_name: &str,
+) -> Result<(ast::Crate, LintStore)> {
+ krate = sess.time("attributes_injection", || {
+ rustc_builtin_macros::cmdline_attrs::inject(
+ krate,
+ &sess.parse_sess,
+ &sess.opts.unstable_opts.crate_attr,
+ )
+ });
+
+ let (krate, features) = rustc_expand::config::features(sess, krate, CRATE_NODE_ID);
+ // these need to be set "early" so that expansion sees `quote` if enabled.
+ sess.init_features(features);
+
+ let crate_types = util::collect_crate_types(sess, &krate.attrs);
+ sess.init_crate_types(crate_types);
+
+ let stable_crate_id = StableCrateId::new(
+ crate_name,
+ sess.crate_types().contains(&CrateType::Executable),
+ sess.opts.cg.metadata.clone(),
+ );
+ sess.stable_crate_id.set(stable_crate_id).expect("not yet initialized");
+ rustc_incremental::prepare_session_directory(sess, crate_name, stable_crate_id)?;
+
+ if sess.opts.incremental.is_some() {
+ sess.time("incr_comp_garbage_collect_session_directories", || {
+ if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
+ warn!(
+ "Error while trying to garbage collect incremental \
+ compilation cache directory: {}",
+ e
+ );
+ }
+ });
+ }
+
+ let mut lint_store = rustc_lint::new_lint_store(
+ sess.opts.unstable_opts.no_interleave_lints,
+ sess.enable_internal_lints(),
+ );
+ register_lints(sess, &mut lint_store);
+
+ let registrars =
+ sess.time("plugin_loading", || plugin::load::load_plugins(sess, metadata_loader, &krate));
+ sess.time("plugin_registration", || {
+ let mut registry = plugin::Registry { lint_store: &mut lint_store };
+ for registrar in registrars {
+ registrar(&mut registry);
+ }
+ });
+
+ Ok((krate, lint_store))
+}
+
+fn pre_expansion_lint<'a>(
+ sess: &Session,
+ lint_store: &LintStore,
+ registered_tools: &RegisteredTools,
+ check_node: impl EarlyCheckNode<'a>,
+ node_name: &str,
+) {
+ sess.prof.generic_activity_with_arg("pre_AST_expansion_lint_checks", node_name).run(|| {
+ rustc_lint::check_ast_node(
+ sess,
+ true,
+ lint_store,
+ registered_tools,
+ None,
+ rustc_lint::BuiltinCombinedPreExpansionLintPass::new(),
+ check_node,
+ );
+ });
+}
+
+// Cannot implement directly for `LintStore` due to trait coherence.
+struct LintStoreExpandImpl<'a>(&'a LintStore);
+
+impl LintStoreExpand for LintStoreExpandImpl<'_> {
+ fn pre_expansion_lint(
+ &self,
+ sess: &Session,
+ registered_tools: &RegisteredTools,
+ node_id: ast::NodeId,
+ attrs: &[ast::Attribute],
+ items: &[rustc_ast::ptr::P<ast::Item>],
+ name: &str,
+ ) {
+ pre_expansion_lint(sess, self.0, registered_tools, (node_id, attrs, items), name);
+ }
+}
+
+/// Runs the "early phases" of the compiler: initial `cfg` processing, loading compiler plugins,
+/// syntax expansion, secondary `cfg` expansion, synthesis of a test
+/// harness if one is to be provided, injection of a dependency on the
+/// standard library and prelude, and name resolution.
+pub fn configure_and_expand(
+ sess: &Session,
+ lint_store: &LintStore,
+ mut krate: ast::Crate,
+ crate_name: &str,
+ resolver: &mut Resolver<'_>,
+) -> Result<ast::Crate> {
+ tracing::trace!("configure_and_expand");
+ pre_expansion_lint(sess, lint_store, resolver.registered_tools(), &krate, crate_name);
+ rustc_builtin_macros::register_builtin_macros(resolver);
+
+ krate = sess.time("crate_injection", || {
+ rustc_builtin_macros::standard_library_imports::inject(krate, resolver, sess)
+ });
+
+ util::check_attr_crate_type(sess, &krate.attrs, &mut resolver.lint_buffer());
+
+ // Expand all macros
+ krate = sess.time("macro_expand_crate", || {
+ // Windows dlls do not have rpaths, so they don't know how to find their
+ // dependencies. It's up to us to tell the system where to find all the
+ // dependent dlls. Note that this uses cfg!(windows) as opposed to
+ // targ_cfg because syntax extensions are always loaded for the host
+ // compiler, not for the target.
+ //
+ // This is somewhat of an inherently racy operation, however, as
+ // multiple threads calling this function could possibly continue
+ // extending PATH far beyond what it should. To solve this for now we
+ // just don't add any new elements to PATH which are already there
+ // within PATH. This is basically a targeted fix at #17360 for rustdoc
+ // which runs rustc in parallel but has been seen (#33844) to cause
+ // problems with PATH becoming too long.
+ let mut old_path = OsString::new();
+ if cfg!(windows) {
+ old_path = env::var_os("PATH").unwrap_or(old_path);
+ let mut new_path = sess.host_filesearch(PathKind::All).search_path_dirs();
+ for path in env::split_paths(&old_path) {
+ if !new_path.contains(&path) {
+ new_path.push(path);
+ }
+ }
+ env::set_var(
+ "PATH",
+ &env::join_paths(
+ new_path.iter().filter(|p| env::join_paths(iter::once(p)).is_ok()),
+ )
+ .unwrap(),
+ );
+ }
+
+ // Create the config for macro expansion
+ let features = sess.features_untracked();
+ let recursion_limit = get_recursion_limit(&krate.attrs, sess);
+ let cfg = rustc_expand::expand::ExpansionConfig {
+ features: Some(features),
+ recursion_limit,
+ trace_mac: sess.opts.unstable_opts.trace_macros,
+ should_test: sess.opts.test,
+ span_debug: sess.opts.unstable_opts.span_debug,
+ proc_macro_backtrace: sess.opts.unstable_opts.proc_macro_backtrace,
+ ..rustc_expand::expand::ExpansionConfig::default(crate_name.to_string())
+ };
+
+ let lint_store = LintStoreExpandImpl(lint_store);
+ let mut ecx = ExtCtxt::new(sess, cfg, resolver, Some(&lint_store));
+ // Expand macros now!
+ let krate = sess.time("expand_crate", || ecx.monotonic_expander().expand_crate(krate));
+
+ // The rest is error reporting
+
+ sess.parse_sess.buffered_lints.with_lock(|buffered_lints: &mut Vec<BufferedEarlyLint>| {
+ buffered_lints.append(&mut ecx.buffered_early_lint);
+ });
+
+ sess.time("check_unused_macros", || {
+ ecx.check_unused_macros();
+ });
+
+ let recursion_limit_hit = ecx.reduced_recursion_limit.is_some();
+
+ if cfg!(windows) {
+ env::set_var("PATH", &old_path);
+ }
+
+ if recursion_limit_hit {
+ // If we hit a recursion limit, exit early to avoid later passes getting overwhelmed
+ // with a large AST
+ Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ } else {
+ Ok(krate)
+ }
+ })?;
+
+ sess.time("maybe_building_test_harness", || {
+ rustc_builtin_macros::test_harness::inject(sess, resolver, &mut krate)
+ });
+
+ let has_proc_macro_decls = sess.time("AST_validation", || {
+ rustc_ast_passes::ast_validation::check_crate(sess, &krate, resolver.lint_buffer())
+ });
+
+ let crate_types = sess.crate_types();
+ let is_executable_crate = crate_types.contains(&CrateType::Executable);
+ let is_proc_macro_crate = crate_types.contains(&CrateType::ProcMacro);
+
+ if crate_types.len() > 1 {
+ if is_executable_crate {
+ sess.err("cannot mix `bin` crate type with others");
+ }
+ if is_proc_macro_crate {
+ sess.err("cannot mix `proc-macro` crate type with others");
+ }
+ }
+
+ // For backwards compatibility, we don't try to run proc macro injection
+ // if rustdoc is run on a proc macro crate without '--crate-type proc-macro' being
+ // specified. This should only affect users who manually invoke 'rustdoc', as
+ // 'cargo doc' will automatically pass the proper '--crate-type' flags.
+ // However, we do emit a warning, to let such users know that they should
+ // start passing '--crate-type proc-macro'
+ if has_proc_macro_decls && sess.opts.actually_rustdoc && !is_proc_macro_crate {
+ let mut msg = sess.diagnostic().struct_warn(
+ "Trying to document proc macro crate \
+ without passing '--crate-type proc-macro to rustdoc",
+ );
+
+ msg.warn("The generated documentation may be incorrect");
+ msg.emit();
+ } else {
+ krate = sess.time("maybe_create_a_macro_crate", || {
+ let is_test_crate = sess.opts.test;
+ rustc_builtin_macros::proc_macro_harness::inject(
+ sess,
+ resolver,
+ krate,
+ is_proc_macro_crate,
+ has_proc_macro_decls,
+ is_test_crate,
+ sess.diagnostic(),
+ )
+ });
+ }
+
+ // Done with macro expansion!
+
+ if sess.opts.unstable_opts.input_stats {
+ eprintln!("Post-expansion node count: {}", count_nodes(&krate));
+ }
+
+ if sess.opts.unstable_opts.hir_stats {
+ hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS");
+ }
+
+ resolver.resolve_crate(&krate);
+
+ // Needs to go *after* expansion to be able to check the results of macro expansion.
+ sess.time("complete_gated_feature_checking", || {
+ rustc_ast_passes::feature_gate::check_crate(&krate, sess);
+ });
+
+ // Add all buffered lints from the `ParseSess` to the `Session`.
+ sess.parse_sess.buffered_lints.with_lock(|buffered_lints| {
+ info!("{} parse sess buffered_lints", buffered_lints.len());
+ for early_lint in buffered_lints.drain(..) {
+ resolver.lint_buffer().add_early_lint(early_lint);
+ }
+ });
+
+ // Gate identifiers containing invalid Unicode codepoints that were recovered during lexing.
+ sess.parse_sess.bad_unicode_identifiers.with_lock(|identifiers| {
+ let mut identifiers: Vec<_> = identifiers.drain().collect();
+ identifiers.sort_by_key(|&(key, _)| key);
+ for (ident, mut spans) in identifiers.into_iter() {
+ spans.sort();
+ if ident == sym::ferris {
+ let first_span = spans[0];
+ sess.diagnostic()
+ .struct_span_err(
+ MultiSpan::from(spans),
+ "Ferris cannot be used as an identifier",
+ )
+ .span_suggestion(
+ first_span,
+ "try using their name instead",
+ "ferris",
+ Applicability::MaybeIncorrect,
+ )
+ .emit();
+ } else {
+ sess.diagnostic().span_err(
+ MultiSpan::from(spans),
+ &format!("identifiers cannot contain emoji: `{}`", ident),
+ );
+ }
+ }
+ });
+
+ sess.time("early_lint_checks", || {
+ let lint_buffer = Some(std::mem::take(resolver.lint_buffer()));
+ rustc_lint::check_ast_node(
+ sess,
+ false,
+ lint_store,
+ resolver.registered_tools(),
+ lint_buffer,
+ rustc_lint::BuiltinCombinedEarlyLintPass::new(),
+ &krate,
+ )
+ });
+
+ Ok(krate)
+}
+
+// Returns all the paths that correspond to generated files.
+fn generated_output_paths(
+ sess: &Session,
+ outputs: &OutputFilenames,
+ exact_name: bool,
+ crate_name: &str,
+) -> Vec<PathBuf> {
+ let mut out_filenames = Vec::new();
+ for output_type in sess.opts.output_types.keys() {
+ let file = outputs.path(*output_type);
+ match *output_type {
+ // If the filename has been overridden using `-o`, it will not be modified
+ // by appending `.rlib`, `.exe`, etc., so we can skip this transformation.
+ OutputType::Exe if !exact_name => {
+ for crate_type in sess.crate_types().iter() {
+ let p = filename_for_input(sess, *crate_type, crate_name, outputs);
+ out_filenames.push(p);
+ }
+ }
+ OutputType::DepInfo if sess.opts.unstable_opts.dep_info_omit_d_target => {
+ // Don't add the dep-info output when omitting it from dep-info targets
+ }
+ _ => {
+ out_filenames.push(file);
+ }
+ }
+ }
+ out_filenames
+}
+
+// Runs `f` on every output file path and returns the first non-None result, or None if `f`
+// returns None for every file path.
+fn check_output<F, T>(output_paths: &[PathBuf], f: F) -> Option<T>
+where
+ F: Fn(&PathBuf) -> Option<T>,
+{
+ for output_path in output_paths {
+ if let Some(result) = f(output_path) {
+ return Some(result);
+ }
+ }
+ None
+}
+
+fn output_contains_path(output_paths: &[PathBuf], input_path: &Path) -> bool {
+ let input_path = input_path.canonicalize().ok();
+ if input_path.is_none() {
+ return false;
+ }
+ let check = |output_path: &PathBuf| {
+ if output_path.canonicalize().ok() == input_path { Some(()) } else { None }
+ };
+ check_output(output_paths, check).is_some()
+}
+
+fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option<PathBuf> {
+ let check = |output_path: &PathBuf| output_path.is_dir().then(|| output_path.clone());
+ check_output(output_paths, check)
+}
+
+fn escape_dep_filename(filename: &str) -> String {
+ // Apparently clang and gcc *only* escape spaces:
+ // https://llvm.org/klaus/clang/commit/9d50634cfc268ecc9a7250226dd5ca0e945240d4
+ filename.replace(' ', "\\ ")
+}
+
+// Makefile comments only need escaping newlines and `\`.
+// The result can be unescaped by anything that can unescape `escape_default` and friends.
+fn escape_dep_env(symbol: Symbol) -> String {
+ let s = symbol.as_str();
+ let mut escaped = String::with_capacity(s.len());
+ for c in s.chars() {
+ match c {
+ '\n' => escaped.push_str(r"\n"),
+ '\r' => escaped.push_str(r"\r"),
+ '\\' => escaped.push_str(r"\\"),
+ _ => escaped.push(c),
+ }
+ }
+ escaped
+}
+
+fn write_out_deps(
+ sess: &Session,
+ boxed_resolver: &RefCell<BoxedResolver>,
+ outputs: &OutputFilenames,
+ out_filenames: &[PathBuf],
+) {
+ // Write out dependency rules to the dep-info file if requested
+ if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
+ return;
+ }
+ let deps_filename = outputs.path(OutputType::DepInfo);
+
+ let result = (|| -> io::Result<()> {
+ // Build a list of files used to compile the output and
+ // write Makefile-compatible dependency rules
+ let mut files: Vec<String> = sess
+ .source_map()
+ .files()
+ .iter()
+ .filter(|fmap| fmap.is_real_file())
+ .filter(|fmap| !fmap.is_imported())
+ .map(|fmap| escape_dep_filename(&fmap.name.prefer_local().to_string()))
+ .collect();
+
+ // Account for explicitly marked-to-track files
+ // (e.g. accessed in proc macros).
+ let file_depinfo = sess.parse_sess.file_depinfo.borrow();
+ let extra_tracked_files = file_depinfo.iter().map(|path_sym| {
+ let path = PathBuf::from(path_sym.as_str());
+ let file = FileName::from(path);
+ escape_dep_filename(&file.prefer_local().to_string())
+ });
+ files.extend(extra_tracked_files);
+
+ if sess.binary_dep_depinfo() {
+ if let Some(ref backend) = sess.opts.unstable_opts.codegen_backend {
+ if backend.contains('.') {
+ // If the backend name contain a `.`, it is the path to an external dynamic
+ // library. If not, it is not a path.
+ files.push(backend.to_string());
+ }
+ }
+
+ boxed_resolver.borrow_mut().access(|resolver| {
+ for cnum in resolver.cstore().crates_untracked() {
+ let source = resolver.cstore().crate_source_untracked(cnum);
+ if let Some((path, _)) = &source.dylib {
+ files.push(escape_dep_filename(&path.display().to_string()));
+ }
+ if let Some((path, _)) = &source.rlib {
+ files.push(escape_dep_filename(&path.display().to_string()));
+ }
+ if let Some((path, _)) = &source.rmeta {
+ files.push(escape_dep_filename(&path.display().to_string()));
+ }
+ }
+ });
+ }
+
+ let mut file = BufWriter::new(fs::File::create(&deps_filename)?);
+ for path in out_filenames {
+ writeln!(file, "{}: {}\n", path.display(), files.join(" "))?;
+ }
+
+ // Emit a fake target for each input file to the compilation. This
+ // prevents `make` from spitting out an error if a file is later
+ // deleted. For more info see #28735
+ for path in files {
+ writeln!(file, "{}:", path)?;
+ }
+
+ // Emit special comments with information about accessed environment variables.
+ let env_depinfo = sess.parse_sess.env_depinfo.borrow();
+ if !env_depinfo.is_empty() {
+ let mut envs: Vec<_> = env_depinfo
+ .iter()
+ .map(|(k, v)| (escape_dep_env(*k), v.map(escape_dep_env)))
+ .collect();
+ envs.sort_unstable();
+ writeln!(file)?;
+ for (k, v) in envs {
+ write!(file, "# env-dep:{}", k)?;
+ if let Some(v) = v {
+ write!(file, "={}", v)?;
+ }
+ writeln!(file)?;
+ }
+ }
+
+ Ok(())
+ })();
+
+ match result {
+ Ok(_) => {
+ if sess.opts.json_artifact_notifications {
+ sess.parse_sess
+ .span_diagnostic
+ .emit_artifact_notification(&deps_filename, "dep-info");
+ }
+ }
+ Err(e) => sess.fatal(&format!(
+ "error writing dependencies to `{}`: {}",
+ deps_filename.display(),
+ e
+ )),
+ }
+}
+
+pub fn prepare_outputs(
+ sess: &Session,
+ compiler: &Compiler,
+ krate: &ast::Crate,
+ boxed_resolver: &RefCell<BoxedResolver>,
+ crate_name: &str,
+) -> Result<OutputFilenames> {
+ let _timer = sess.timer("prepare_outputs");
+
+ // FIXME: rustdoc passes &[] instead of &krate.attrs here
+ let outputs = util::build_output_filenames(
+ &compiler.input,
+ &compiler.output_dir,
+ &compiler.output_file,
+ &compiler.temps_dir,
+ &krate.attrs,
+ sess,
+ );
+
+ let output_paths =
+ generated_output_paths(sess, &outputs, compiler.output_file.is_some(), crate_name);
+
+ // Ensure the source file isn't accidentally overwritten during compilation.
+ if let Some(ref input_path) = compiler.input_path {
+ if sess.opts.will_create_output_file() {
+ if output_contains_path(&output_paths, input_path) {
+ let reported = sess.err(&format!(
+ "the input file \"{}\" would be overwritten by the generated \
+ executable",
+ input_path.display()
+ ));
+ return Err(reported);
+ }
+ if let Some(dir_path) = output_conflicts_with_dir(&output_paths) {
+ let reported = sess.err(&format!(
+ "the generated executable for the input file \"{}\" conflicts with the \
+ existing directory \"{}\"",
+ input_path.display(),
+ dir_path.display()
+ ));
+ return Err(reported);
+ }
+ }
+ }
+
+ if let Some(ref dir) = compiler.temps_dir {
+ if fs::create_dir_all(dir).is_err() {
+ let reported =
+ sess.err("failed to find or create the directory specified by `--temps-dir`");
+ return Err(reported);
+ }
+ }
+
+ write_out_deps(sess, boxed_resolver, &outputs, &output_paths);
+
+ let only_dep_info = sess.opts.output_types.contains_key(&OutputType::DepInfo)
+ && sess.opts.output_types.len() == 1;
+
+ if !only_dep_info {
+ if let Some(ref dir) = compiler.output_dir {
+ if fs::create_dir_all(dir).is_err() {
+ let reported =
+ sess.err("failed to find or create the directory specified by `--out-dir`");
+ return Err(reported);
+ }
+ }
+ }
+
+ Ok(outputs)
+}
+
+pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
+ let providers = &mut Providers::default();
+ providers.analysis = analysis;
+ providers.hir_crate = rustc_ast_lowering::lower_to_hir;
+ proc_macro_decls::provide(providers);
+ rustc_const_eval::provide(providers);
+ rustc_middle::hir::provide(providers);
+ mir_borrowck::provide(providers);
+ mir_build::provide(providers);
+ rustc_mir_transform::provide(providers);
+ rustc_monomorphize::provide(providers);
+ rustc_privacy::provide(providers);
+ typeck::provide(providers);
+ ty::provide(providers);
+ traits::provide(providers);
+ rustc_passes::provide(providers);
+ rustc_resolve::provide(providers);
+ rustc_traits::provide(providers);
+ rustc_ty_utils::provide(providers);
+ rustc_metadata::provide(providers);
+ rustc_lint::provide(providers);
+ rustc_symbol_mangling::provide(providers);
+ rustc_codegen_ssa::provide(providers);
+ *providers
+});
+
+pub static DEFAULT_EXTERN_QUERY_PROVIDERS: LazyLock<ExternProviders> = LazyLock::new(|| {
+ let mut extern_providers = ExternProviders::default();
+ rustc_metadata::provide_extern(&mut extern_providers);
+ rustc_codegen_ssa::provide_extern(&mut extern_providers);
+ extern_providers
+});
+
+pub struct QueryContext<'tcx> {
+ gcx: &'tcx GlobalCtxt<'tcx>,
+}
+
+impl<'tcx> QueryContext<'tcx> {
+ pub fn enter<F, R>(&mut self, f: F) -> R
+ where
+ F: FnOnce(TyCtxt<'tcx>) -> R,
+ {
+ let icx = ty::tls::ImplicitCtxt::new(self.gcx);
+ ty::tls::enter_context(&icx, |_| f(icx.tcx))
+ }
+}
+
+pub fn create_global_ctxt<'tcx>(
+ compiler: &'tcx Compiler,
+ lint_store: Lrc<LintStore>,
+ krate: Lrc<ast::Crate>,
+ dep_graph: DepGraph,
+ resolver: Rc<RefCell<BoxedResolver>>,
+ outputs: OutputFilenames,
+ crate_name: &str,
+ queries: &'tcx OnceCell<TcxQueries<'tcx>>,
+ global_ctxt: &'tcx OnceCell<GlobalCtxt<'tcx>>,
+ arena: &'tcx WorkerLocal<Arena<'tcx>>,
+ hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
+) -> QueryContext<'tcx> {
+ // We're constructing the HIR here; we don't care what we will
+ // read, since we haven't even constructed the *input* to
+ // incr. comp. yet.
+ dep_graph.assert_ignored();
+
+ let (definitions, cstore, resolver_outputs, resolver_for_lowering) =
+ BoxedResolver::to_resolver_outputs(resolver);
+
+ let sess = &compiler.session();
+ let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
+
+ let codegen_backend = compiler.codegen_backend();
+ let mut local_providers = *DEFAULT_QUERY_PROVIDERS;
+ codegen_backend.provide(&mut local_providers);
+
+ let mut extern_providers = *DEFAULT_EXTERN_QUERY_PROVIDERS;
+ codegen_backend.provide_extern(&mut extern_providers);
+
+ if let Some(callback) = compiler.override_queries {
+ callback(sess, &mut local_providers, &mut extern_providers);
+ }
+
+ let queries = queries.get_or_init(|| {
+ TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
+ });
+
+ let gcx = sess.time("setup_global_ctxt", || {
+ global_ctxt.get_or_init(move || {
+ TyCtxt::create_global_ctxt(
+ sess,
+ lint_store,
+ arena,
+ hir_arena,
+ definitions,
+ cstore,
+ resolver_outputs,
+ resolver_for_lowering,
+ krate,
+ dep_graph,
+ queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
+ queries.as_dyn(),
+ rustc_query_impl::query_callbacks(arena),
+ crate_name,
+ outputs,
+ )
+ })
+ });
+
+ QueryContext { gcx }
+}
+
+/// Runs the resolution, type-checking, region checking and other
+/// miscellaneous analysis passes on the crate.
+fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
+ rustc_passes::hir_id_validator::check_crate(tcx);
+
+ let sess = tcx.sess;
+ let mut entry_point = None;
+
+ sess.time("misc_checking_1", || {
+ parallel!(
+ {
+ entry_point = sess.time("looking_for_entry_point", || tcx.entry_fn(()));
+
+ sess.time("looking_for_derive_registrar", || {
+ tcx.ensure().proc_macro_decls_static(())
+ });
+
+ CStore::from_tcx(tcx).report_unused_deps(tcx);
+ },
+ {
+ tcx.hir().par_for_each_module(|module| {
+ tcx.ensure().check_mod_loops(module);
+ tcx.ensure().check_mod_attrs(module);
+ tcx.ensure().check_mod_naked_functions(module);
+ tcx.ensure().check_mod_unstable_api_usage(module);
+ tcx.ensure().check_mod_const_bodies(module);
+ });
+ },
+ {
+ sess.time("unused_lib_feature_checking", || {
+ rustc_passes::stability::check_unused_or_stable_features(tcx)
+ });
+ },
+ {
+ // We force these queries to run,
+ // since they might not otherwise get called.
+ // This marks the corresponding crate-level attributes
+ // as used, and ensures that their values are valid.
+ tcx.ensure().limits(());
+ tcx.ensure().stability_index(());
+ }
+ );
+ });
+
+ // passes are timed inside typeck
+ typeck::check_crate(tcx)?;
+
+ sess.time("misc_checking_2", || {
+ parallel!(
+ {
+ sess.time("match_checking", || {
+ tcx.hir().par_body_owners(|def_id| tcx.ensure().check_match(def_id.to_def_id()))
+ });
+ },
+ {
+ sess.time("liveness_and_intrinsic_checking", || {
+ tcx.hir().par_for_each_module(|module| {
+ // this must run before MIR dump, because
+ // "not all control paths return a value" is reported here.
+ //
+ // maybe move the check to a MIR pass?
+ tcx.ensure().check_mod_liveness(module);
+ });
+ });
+ }
+ );
+ });
+
+ sess.time("MIR_borrow_checking", || {
+ tcx.hir().par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id));
+ });
+
+ sess.time("MIR_effect_checking", || {
+ for def_id in tcx.hir().body_owners() {
+ tcx.ensure().thir_check_unsafety(def_id);
+ if !tcx.sess.opts.unstable_opts.thir_unsafeck {
+ rustc_mir_transform::check_unsafety::check_unsafety(tcx, def_id);
+ }
+ tcx.ensure().has_ffi_unwind_calls(def_id);
+
+ if tcx.hir().body_const_context(def_id).is_some() {
+ tcx.ensure()
+ .mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(def_id));
+ }
+ }
+ });
+
+ sess.time("layout_testing", || layout_test::test_layout(tcx));
+
+ // Avoid overwhelming user with errors if borrow checking failed.
+ // I'm not sure how helpful this is, to be honest, but it avoids a
+ // lot of annoying errors in the ui tests (basically,
+ // lint warnings and so on -- kindck used to do this abort, but
+ // kindck is gone now). -nmatsakis
+ if let Some(reported) = sess.has_errors() {
+ return Err(reported);
+ }
+
+ sess.time("misc_checking_3", || {
+ parallel!(
+ {
+ tcx.ensure().privacy_access_levels(());
+
+ parallel!(
+ {
+ tcx.ensure().check_private_in_public(());
+ },
+ {
+ tcx.hir()
+ .par_for_each_module(|module| tcx.ensure().check_mod_deathness(module));
+ },
+ {
+ sess.time("lint_checking", || {
+ rustc_lint::check_crate(tcx, || {
+ rustc_lint::BuiltinCombinedLateLintPass::new()
+ });
+ });
+ }
+ );
+ },
+ {
+ sess.time("privacy_checking_modules", || {
+ tcx.hir().par_for_each_module(|module| {
+ tcx.ensure().check_mod_privacy(module);
+ });
+ });
+ }
+ );
+
+ // This check has to be run after all lints are done processing. We don't
+ // define a lint filter, as all lint checks should have finished at this point.
+ sess.time("check_lint_expectations", || tcx.check_expectations(None));
+ });
+
+ Ok(())
+}
+
+/// Runs the codegen backend, after which the AST and analysis can
+/// be discarded.
+pub fn start_codegen<'tcx>(
+ codegen_backend: &dyn CodegenBackend,
+ tcx: TyCtxt<'tcx>,
+ outputs: &OutputFilenames,
+) -> Box<dyn Any> {
+ info!("Pre-codegen\n{:?}", tcx.debug_stats());
+
+ let (metadata, need_metadata_module) =
+ rustc_metadata::fs::encode_and_write_metadata(tcx, outputs);
+
+ let codegen = tcx.sess.time("codegen_crate", move || {
+ codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
+ });
+
+ // Don't run these test assertions when not doing codegen. Compiletest tries to build
+ // build-fail tests in check mode first and expects it to not give an error in that case.
+ if tcx.sess.opts.output_types.should_codegen() {
+ rustc_incremental::assert_module_sources::assert_module_sources(tcx);
+ rustc_symbol_mangling::test::report_symbol_names(tcx);
+ }
+
+ info!("Post-codegen\n{:?}", tcx.debug_stats());
+
+ if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
+ if let Err(e) = rustc_mir_transform::dump_mir::emit_mir(tcx, outputs) {
+ tcx.sess.err(&format!("could not emit MIR: {}", e));
+ tcx.sess.abort_if_errors();
+ }
+ }
+
+ codegen
+}
+
+fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit {
+ if let Some(attr) = krate_attrs
+ .iter()
+ .find(|attr| attr.has_name(sym::recursion_limit) && attr.value_str().is_none())
+ {
+ // This is here mainly to check for using a macro, such as
+ // #![recursion_limit = foo!()]. That is not supported since that
+ // would require expanding this while in the middle of expansion,
+ // which needs to know the limit before expanding. Otherwise,
+ // validation would normally be caught in AstValidator (via
+ // `check_builtin_attribute`), but by the time that runs the macro
+ // is expanded, and it doesn't give an error.
+ validate_attr::emit_fatal_malformed_builtin_attribute(
+ &sess.parse_sess,
+ attr,
+ sym::recursion_limit,
+ );
+ }
+ rustc_middle::middle::limits::get_recursion_limit(krate_attrs, sess)
+}
diff --git a/compiler/rustc_interface/src/proc_macro_decls.rs b/compiler/rustc_interface/src/proc_macro_decls.rs
new file mode 100644
index 000000000..5371c513d
--- /dev/null
+++ b/compiler/rustc_interface/src/proc_macro_decls.rs
@@ -0,0 +1,27 @@
+use rustc_hir as hir;
+use rustc_hir::def_id::LocalDefId;
+use rustc_middle::ty::query::Providers;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::symbol::sym;
+
+fn proc_macro_decls_static(tcx: TyCtxt<'_>, (): ()) -> Option<LocalDefId> {
+ let mut finder = Finder { tcx, decls: None };
+
+ for id in tcx.hir().items() {
+ let attrs = finder.tcx.hir().attrs(id.hir_id());
+ if finder.tcx.sess.contains_name(attrs, sym::rustc_proc_macro_decls) {
+ finder.decls = Some(id.def_id);
+ }
+ }
+
+ finder.decls
+}
+
+struct Finder<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ decls: Option<hir::def_id::LocalDefId>,
+}
+
+pub(crate) fn provide(providers: &mut Providers) {
+ *providers = Providers { proc_macro_decls_static, ..*providers };
+}
diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs
new file mode 100644
index 000000000..73402ae08
--- /dev/null
+++ b/compiler/rustc_interface/src/queries.rs
@@ -0,0 +1,402 @@
+use crate::interface::{Compiler, Result};
+use crate::passes::{self, BoxedResolver, QueryContext};
+
+use rustc_ast as ast;
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_codegen_ssa::CodegenResults;
+use rustc_data_structures::svh::Svh;
+use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_incremental::DepGraphFuture;
+use rustc_lint::LintStore;
+use rustc_middle::arena::Arena;
+use rustc_middle::dep_graph::DepGraph;
+use rustc_middle::ty::{GlobalCtxt, TyCtxt};
+use rustc_query_impl::Queries as TcxQueries;
+use rustc_session::config::{self, OutputFilenames, OutputType};
+use rustc_session::{output::find_crate_name, Session};
+use rustc_span::symbol::sym;
+use std::any::Any;
+use std::cell::{Ref, RefCell, RefMut};
+use std::rc::Rc;
+
+/// Represent the result of a query.
+///
+/// This result can be stolen with the [`take`] method and generated with the [`compute`] method.
+///
+/// [`take`]: Self::take
+/// [`compute`]: Self::compute
+pub struct Query<T> {
+ result: RefCell<Option<Result<T>>>,
+}
+
+impl<T> Query<T> {
+ fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<&Query<T>> {
+ let mut result = self.result.borrow_mut();
+ if result.is_none() {
+ *result = Some(f());
+ }
+ result.as_ref().unwrap().as_ref().map(|_| self).map_err(|err| *err)
+ }
+
+ /// Takes ownership of the query result. Further attempts to take or peek the query
+ /// result will panic unless it is generated by calling the `compute` method.
+ pub fn take(&self) -> T {
+ self.result.borrow_mut().take().expect("missing query result").unwrap()
+ }
+
+ /// Borrows the query result using the RefCell. Panics if the result is stolen.
+ pub fn peek(&self) -> Ref<'_, T> {
+ Ref::map(self.result.borrow(), |r| {
+ r.as_ref().unwrap().as_ref().expect("missing query result")
+ })
+ }
+
+ /// Mutably borrows the query result using the RefCell. Panics if the result is stolen.
+ pub fn peek_mut(&self) -> RefMut<'_, T> {
+ RefMut::map(self.result.borrow_mut(), |r| {
+ r.as_mut().unwrap().as_mut().expect("missing query result")
+ })
+ }
+}
+
+impl<T> Default for Query<T> {
+ fn default() -> Self {
+ Query { result: RefCell::new(None) }
+ }
+}
+
+pub struct Queries<'tcx> {
+ compiler: &'tcx Compiler,
+ gcx: OnceCell<GlobalCtxt<'tcx>>,
+ queries: OnceCell<TcxQueries<'tcx>>,
+
+ arena: WorkerLocal<Arena<'tcx>>,
+ hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
+
+ dep_graph_future: Query<Option<DepGraphFuture>>,
+ parse: Query<ast::Crate>,
+ crate_name: Query<String>,
+ register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
+ expansion: Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>,
+ dep_graph: Query<DepGraph>,
+ prepare_outputs: Query<OutputFilenames>,
+ global_ctxt: Query<QueryContext<'tcx>>,
+ ongoing_codegen: Query<Box<dyn Any>>,
+}
+
+impl<'tcx> Queries<'tcx> {
+ pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> {
+ Queries {
+ compiler,
+ gcx: OnceCell::new(),
+ queries: OnceCell::new(),
+ arena: WorkerLocal::new(|_| Arena::default()),
+ hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
+ dep_graph_future: Default::default(),
+ parse: Default::default(),
+ crate_name: Default::default(),
+ register_plugins: Default::default(),
+ expansion: Default::default(),
+ dep_graph: Default::default(),
+ prepare_outputs: Default::default(),
+ global_ctxt: Default::default(),
+ ongoing_codegen: Default::default(),
+ }
+ }
+
+ fn session(&self) -> &Lrc<Session> {
+ &self.compiler.sess
+ }
+ fn codegen_backend(&self) -> &Lrc<Box<dyn CodegenBackend>> {
+ self.compiler.codegen_backend()
+ }
+
+ fn dep_graph_future(&self) -> Result<&Query<Option<DepGraphFuture>>> {
+ self.dep_graph_future.compute(|| {
+ let sess = self.session();
+ Ok(sess.opts.build_dep_graph().then(|| rustc_incremental::load_dep_graph(sess)))
+ })
+ }
+
+ pub fn parse(&self) -> Result<&Query<ast::Crate>> {
+ self.parse.compute(|| {
+ passes::parse(self.session(), &self.compiler.input)
+ .map_err(|mut parse_error| parse_error.emit())
+ })
+ }
+
+ pub fn register_plugins(&self) -> Result<&Query<(ast::Crate, Lrc<LintStore>)>> {
+ self.register_plugins.compute(|| {
+ let crate_name = self.crate_name()?.peek().clone();
+ let krate = self.parse()?.take();
+
+ let empty: &(dyn Fn(&Session, &mut LintStore) + Sync + Send) = &|_, _| {};
+ let (krate, lint_store) = passes::register_plugins(
+ self.session(),
+ &*self.codegen_backend().metadata_loader(),
+ self.compiler.register_lints.as_deref().unwrap_or_else(|| empty),
+ krate,
+ &crate_name,
+ )?;
+
+ // Compute the dependency graph (in the background). We want to do
+ // this as early as possible, to give the DepGraph maximum time to
+ // load before dep_graph() is called, but it also can't happen
+ // until after rustc_incremental::prepare_session_directory() is
+ // called, which happens within passes::register_plugins().
+ self.dep_graph_future().ok();
+
+ Ok((krate, Lrc::new(lint_store)))
+ })
+ }
+
+ pub fn crate_name(&self) -> Result<&Query<String>> {
+ self.crate_name.compute(|| {
+ Ok({
+ let parse_result = self.parse()?;
+ let krate = parse_result.peek();
+ // parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
+ find_crate_name(self.session(), &krate.attrs, &self.compiler.input)
+ })
+ })
+ }
+
+ pub fn expansion(
+ &self,
+ ) -> Result<&Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>> {
+ tracing::trace!("expansion");
+ self.expansion.compute(|| {
+ let crate_name = self.crate_name()?.peek().clone();
+ let (krate, lint_store) = self.register_plugins()?.take();
+ let _timer = self.session().timer("configure_and_expand");
+ let sess = self.session();
+ let mut resolver = passes::create_resolver(
+ sess.clone(),
+ self.codegen_backend().metadata_loader(),
+ &krate,
+ &crate_name,
+ );
+ let krate = resolver.access(|resolver| {
+ passes::configure_and_expand(sess, &lint_store, krate, &crate_name, resolver)
+ })?;
+ Ok((Lrc::new(krate), Rc::new(RefCell::new(resolver)), lint_store))
+ })
+ }
+
+ fn dep_graph(&self) -> Result<&Query<DepGraph>> {
+ self.dep_graph.compute(|| {
+ let sess = self.session();
+ let future_opt = self.dep_graph_future()?.take();
+ let dep_graph = future_opt
+ .and_then(|future| {
+ let (prev_graph, prev_work_products) =
+ sess.time("blocked_on_dep_graph_loading", || future.open().open(sess));
+
+ rustc_incremental::build_dep_graph(sess, prev_graph, prev_work_products)
+ })
+ .unwrap_or_else(DepGraph::new_disabled);
+ Ok(dep_graph)
+ })
+ }
+
+ pub fn prepare_outputs(&self) -> Result<&Query<OutputFilenames>> {
+ self.prepare_outputs.compute(|| {
+ let (krate, boxed_resolver, _) = &*self.expansion()?.peek();
+ let crate_name = self.crate_name()?.peek();
+ passes::prepare_outputs(
+ self.session(),
+ self.compiler,
+ krate,
+ &*boxed_resolver,
+ &crate_name,
+ )
+ })
+ }
+
+ pub fn global_ctxt(&'tcx self) -> Result<&Query<QueryContext<'tcx>>> {
+ self.global_ctxt.compute(|| {
+ let crate_name = self.crate_name()?.peek().clone();
+ let outputs = self.prepare_outputs()?.peek().clone();
+ let dep_graph = self.dep_graph()?.peek().clone();
+ let (krate, resolver, lint_store) = self.expansion()?.take();
+ Ok(passes::create_global_ctxt(
+ self.compiler,
+ lint_store,
+ krate,
+ dep_graph,
+ resolver,
+ outputs,
+ &crate_name,
+ &self.queries,
+ &self.gcx,
+ &self.arena,
+ &self.hir_arena,
+ ))
+ })
+ }
+
+ pub fn ongoing_codegen(&'tcx self) -> Result<&Query<Box<dyn Any>>> {
+ self.ongoing_codegen.compute(|| {
+ let outputs = self.prepare_outputs()?;
+ self.global_ctxt()?.peek_mut().enter(|tcx| {
+ tcx.analysis(()).ok();
+
+ // Don't do code generation if there were any errors
+ self.session().compile_status()?;
+
+ // Hook for UI tests.
+ Self::check_for_rustc_errors_attr(tcx);
+
+ Ok(passes::start_codegen(&***self.codegen_backend(), tcx, &*outputs.peek()))
+ })
+ })
+ }
+
+ /// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
+ /// to write UI tests that actually test that compilation succeeds without reporting
+ /// an error.
+ fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
+ let Some((def_id, _)) = tcx.entry_fn(()) else { return };
+ for attr in tcx.get_attrs(def_id, sym::rustc_error) {
+ match attr.meta_item_list() {
+ // Check if there is a `#[rustc_error(delay_span_bug_from_inside_query)]`.
+ Some(list)
+ if list.iter().any(|list_item| {
+ matches!(
+ list_item.ident().map(|i| i.name),
+ Some(sym::delay_span_bug_from_inside_query)
+ )
+ }) =>
+ {
+ tcx.ensure().trigger_delay_span_bug(def_id);
+ }
+
+ // Bare `#[rustc_error]`.
+ None => {
+ tcx.sess.span_fatal(
+ tcx.def_span(def_id),
+ "fatal error triggered by #[rustc_error]",
+ );
+ }
+
+ // Some other attribute.
+ Some(_) => {
+ tcx.sess.span_warn(
+ tcx.def_span(def_id),
+ "unexpected annotation used with `#[rustc_error(...)]!",
+ );
+ }
+ }
+ }
+ }
+
+ pub fn linker(&'tcx self) -> Result<Linker> {
+ let sess = self.session().clone();
+ let codegen_backend = self.codegen_backend().clone();
+
+ let dep_graph = self.dep_graph()?.peek().clone();
+ let prepare_outputs = self.prepare_outputs()?.take();
+ let crate_hash = self.global_ctxt()?.peek_mut().enter(|tcx| tcx.crate_hash(LOCAL_CRATE));
+ let ongoing_codegen = self.ongoing_codegen()?.take();
+
+ Ok(Linker {
+ sess,
+ codegen_backend,
+
+ dep_graph,
+ prepare_outputs,
+ crate_hash,
+ ongoing_codegen,
+ })
+ }
+}
+
+pub struct Linker {
+ // compilation inputs
+ sess: Lrc<Session>,
+ codegen_backend: Lrc<Box<dyn CodegenBackend>>,
+
+ // compilation outputs
+ dep_graph: DepGraph,
+ prepare_outputs: OutputFilenames,
+ crate_hash: Svh,
+ ongoing_codegen: Box<dyn Any>,
+}
+
+impl Linker {
+ pub fn link(self) -> Result<()> {
+ let (codegen_results, work_products) = self.codegen_backend.join_codegen(
+ self.ongoing_codegen,
+ &self.sess,
+ &self.prepare_outputs,
+ )?;
+
+ self.sess.compile_status()?;
+
+ let sess = &self.sess;
+ let dep_graph = self.dep_graph;
+ sess.time("serialize_work_products", || {
+ rustc_incremental::save_work_product_index(sess, &dep_graph, work_products)
+ });
+
+ let prof = self.sess.prof.clone();
+ prof.generic_activity("drop_dep_graph").run(move || drop(dep_graph));
+
+ // Now that we won't touch anything in the incremental compilation directory
+ // any more, we can finalize it (which involves renaming it)
+ rustc_incremental::finalize_session_directory(&self.sess, self.crate_hash);
+
+ if !self
+ .sess
+ .opts
+ .output_types
+ .keys()
+ .any(|&i| i == OutputType::Exe || i == OutputType::Metadata)
+ {
+ return Ok(());
+ }
+
+ if sess.opts.unstable_opts.no_link {
+ let encoded = CodegenResults::serialize_rlink(&codegen_results);
+ let rlink_file = self.prepare_outputs.with_extension(config::RLINK_EXT);
+ std::fs::write(&rlink_file, encoded).map_err(|err| {
+ sess.fatal(&format!("failed to write file {}: {}", rlink_file.display(), err));
+ })?;
+ return Ok(());
+ }
+
+ let _timer = sess.prof.verbose_generic_activity("link_crate");
+ self.codegen_backend.link(&self.sess, codegen_results, &self.prepare_outputs)
+ }
+}
+
+impl Compiler {
+ pub fn enter<F, T>(&self, f: F) -> T
+ where
+ F: for<'tcx> FnOnce(&'tcx Queries<'tcx>) -> T,
+ {
+ let mut _timer = None;
+ let queries = Queries::new(self);
+ let ret = f(&queries);
+
+ // NOTE: intentionally does not compute the global context if it hasn't been built yet,
+ // since that likely means there was a parse error.
+ if let Some(Ok(gcx)) = &mut *queries.global_ctxt.result.borrow_mut() {
+ // We assume that no queries are run past here. If there are new queries
+ // after this point, they'll show up as "<unknown>" in self-profiling data.
+ {
+ let _prof_timer =
+ queries.session().prof.generic_activity("self_profile_alloc_query_strings");
+ gcx.enter(rustc_query_impl::alloc_self_profile_query_strings);
+ }
+
+ self.session()
+ .time("serialize_dep_graph", || gcx.enter(rustc_incremental::save_dep_graph));
+ }
+
+ _timer = Some(self.session().timer("free_global_ctxt"));
+
+ ret
+ }
+}
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
new file mode 100644
index 000000000..a9fdfa241
--- /dev/null
+++ b/compiler/rustc_interface/src/tests.rs
@@ -0,0 +1,830 @@
+#![cfg_attr(not(bootstrap), allow(rustc::bad_opt_access))]
+use crate::interface::parse_cfgspecs;
+
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig};
+use rustc_session::config::InstrumentCoverage;
+use rustc_session::config::Strip;
+use rustc_session::config::{build_configuration, build_session_options, to_crate_config};
+use rustc_session::config::{
+ rustc_optgroups, ErrorOutputType, ExternLocation, LocationDetail, Options, Passes,
+};
+use rustc_session::config::{
+ BranchProtection, Externs, OomStrategy, OutputType, OutputTypes, PAuthKey, PacRet,
+ ProcMacroExecutionStrategy, SymbolManglingVersion, WasiExecModel,
+};
+use rustc_session::config::{CFGuard, ExternEntry, LinkerPluginLto, LtoCli, SwitchWithOptPath};
+use rustc_session::lint::Level;
+use rustc_session::search_paths::SearchPath;
+use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
+use rustc_session::{build_session, getopts, DiagnosticOutput, Session};
+use rustc_span::edition::{Edition, DEFAULT_EDITION};
+use rustc_span::symbol::sym;
+use rustc_span::SourceFileHashAlgorithm;
+use rustc_target::spec::{CodeModel, LinkerFlavor, MergeFunctions, PanicStrategy};
+use rustc_target::spec::{
+ RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo, StackProtector, TlsModel,
+};
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::iter::FromIterator;
+use std::num::NonZeroUsize;
+use std::path::{Path, PathBuf};
+
+type CfgSpecs = FxHashSet<(String, Option<String>)>;
+
+fn build_session_options_and_crate_config(matches: getopts::Matches) -> (Options, CfgSpecs) {
+ let sessopts = build_session_options(&matches);
+ let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
+ (sessopts, cfg)
+}
+
+fn mk_session(matches: getopts::Matches) -> (Session, CfgSpecs) {
+ let registry = registry::Registry::new(&[]);
+ let (sessopts, cfg) = build_session_options_and_crate_config(matches);
+ let sess = build_session(
+ sessopts,
+ None,
+ None,
+ registry,
+ DiagnosticOutput::Default,
+ Default::default(),
+ None,
+ None,
+ );
+ (sess, cfg)
+}
+
+fn new_public_extern_entry<S, I>(locations: I) -> ExternEntry
+where
+ S: Into<String>,
+ I: IntoIterator<Item = S>,
+{
+ let locations: BTreeSet<CanonicalizedPath> =
+ locations.into_iter().map(|s| CanonicalizedPath::new(Path::new(&s.into()))).collect();
+
+ ExternEntry {
+ location: ExternLocation::ExactPaths(locations),
+ is_private_dep: false,
+ add_prelude: true,
+ nounused_dep: false,
+ }
+}
+
+fn optgroups() -> getopts::Options {
+ let mut opts = getopts::Options::new();
+ for group in rustc_optgroups() {
+ (group.apply)(&mut opts);
+ }
+ return opts;
+}
+
+fn mk_map<K: Ord, V>(entries: Vec<(K, V)>) -> BTreeMap<K, V> {
+ BTreeMap::from_iter(entries.into_iter())
+}
+
+fn assert_same_clone(x: &Options) {
+ assert_eq!(x.dep_tracking_hash(true), x.clone().dep_tracking_hash(true));
+ assert_eq!(x.dep_tracking_hash(false), x.clone().dep_tracking_hash(false));
+}
+
+fn assert_same_hash(x: &Options, y: &Options) {
+ assert_eq!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+ assert_eq!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+ // Check clone
+ assert_same_clone(x);
+ assert_same_clone(y);
+}
+
+fn assert_different_hash(x: &Options, y: &Options) {
+ assert_ne!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+ assert_ne!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+ // Check clone
+ assert_same_clone(x);
+ assert_same_clone(y);
+}
+
+fn assert_non_crate_hash_different(x: &Options, y: &Options) {
+ assert_eq!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+ assert_ne!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+ // Check clone
+ assert_same_clone(x);
+ assert_same_clone(y);
+}
+
+// When the user supplies --test we should implicitly supply --cfg test
+#[test]
+fn test_switch_implies_cfg_test() {
+ rustc_span::create_default_session_globals_then(|| {
+ let matches = optgroups().parse(&["--test".to_string()]).unwrap();
+ let (sess, cfg) = mk_session(matches);
+ let cfg = build_configuration(&sess, to_crate_config(cfg));
+ assert!(cfg.contains(&(sym::test, None)));
+ });
+}
+
+// When the user supplies --test and --cfg test, don't implicitly add another --cfg test
+#[test]
+fn test_switch_implies_cfg_test_unless_cfg_test() {
+ rustc_span::create_default_session_globals_then(|| {
+ let matches = optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]).unwrap();
+ let (sess, cfg) = mk_session(matches);
+ let cfg = build_configuration(&sess, to_crate_config(cfg));
+ let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
+ assert!(test_items.next().is_some());
+ assert!(test_items.next().is_none());
+ });
+}
+
+#[test]
+fn test_can_print_warnings() {
+ rustc_span::create_default_session_globals_then(|| {
+ let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
+ let (sess, _) = mk_session(matches);
+ assert!(!sess.diagnostic().can_emit_warnings());
+ });
+
+ rustc_span::create_default_session_globals_then(|| {
+ let matches =
+ optgroups().parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()]).unwrap();
+ let (sess, _) = mk_session(matches);
+ assert!(sess.diagnostic().can_emit_warnings());
+ });
+
+ rustc_span::create_default_session_globals_then(|| {
+ let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
+ let (sess, _) = mk_session(matches);
+ assert!(sess.diagnostic().can_emit_warnings());
+ });
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_paths() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.output_types = OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("./some/thing")))]);
+ v2.output_types = OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("/some/thing")))]);
+ v3.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
+
+ assert_non_crate_hash_different(&v1, &v2);
+ assert_non_crate_hash_different(&v1, &v3);
+ assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+
+ v1.output_types = OutputTypes::new(&[
+ (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
+ (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
+ ]);
+
+ v2.output_types = OutputTypes::new(&[
+ (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
+ (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
+ ]);
+
+ assert_same_hash(&v1, &v2);
+}
+
+#[test]
+fn test_externs_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.externs = Externs::new(mk_map(vec![
+ (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+ (String::from("d"), new_public_extern_entry(vec!["e", "f"])),
+ ]));
+
+ v2.externs = Externs::new(mk_map(vec![
+ (String::from("d"), new_public_extern_entry(vec!["e", "f"])),
+ (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+ ]));
+
+ v3.externs = Externs::new(mk_map(vec![
+ (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+ (String::from("d"), new_public_extern_entry(vec!["f", "e"])),
+ ]));
+
+ assert_same_hash(&v1, &v2);
+ assert_same_hash(&v1, &v3);
+ assert_same_hash(&v2, &v3);
+}
+
+#[test]
+fn test_lints_tracking_hash_different_values() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.lint_opts = vec![
+ (String::from("a"), Level::Allow),
+ (String::from("b"), Level::Warn),
+ (String::from("c"), Level::Deny),
+ (String::from("d"), Level::Forbid),
+ ];
+
+ v2.lint_opts = vec![
+ (String::from("a"), Level::Allow),
+ (String::from("b"), Level::Warn),
+ (String::from("X"), Level::Deny),
+ (String::from("d"), Level::Forbid),
+ ];
+
+ v3.lint_opts = vec![
+ (String::from("a"), Level::Allow),
+ (String::from("b"), Level::Warn),
+ (String::from("c"), Level::Forbid),
+ (String::from("d"), Level::Deny),
+ ];
+
+ assert_non_crate_hash_different(&v1, &v2);
+ assert_non_crate_hash_different(&v1, &v3);
+ assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_lints_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+
+ v1.lint_opts = vec![
+ (String::from("a"), Level::Allow),
+ (String::from("b"), Level::Warn),
+ (String::from("c"), Level::Deny),
+ (String::from("d"), Level::Forbid),
+ ];
+
+ v2.lint_opts = vec![
+ (String::from("a"), Level::Allow),
+ (String::from("c"), Level::Deny),
+ (String::from("b"), Level::Warn),
+ (String::from("d"), Level::Forbid),
+ ];
+
+ // The hash should be order-dependent
+ assert_non_crate_hash_different(&v1, &v2);
+}
+
+#[test]
+fn test_lint_cap_hash_different() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let v3 = Options::default();
+
+ v1.lint_cap = Some(Level::Forbid);
+ v2.lint_cap = Some(Level::Allow);
+
+ assert_non_crate_hash_different(&v1, &v2);
+ assert_non_crate_hash_different(&v1, &v3);
+ assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_search_paths_tracking_hash_different_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+ let mut v4 = Options::default();
+
+ const JSON: ErrorOutputType = ErrorOutputType::Json {
+ pretty: false,
+ json_rendered: HumanReadableErrorType::Default(ColorConfig::Never),
+ };
+
+ // Reference
+ v1.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
+ v1.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
+ v1.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v1.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v1.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v2.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
+ v2.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v2.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
+ v2.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v2.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v3.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
+ v3.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v3.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
+ v3.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v3.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v4.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
+ v4.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
+ v4.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
+ v4.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v4.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
+
+ assert_same_hash(&v1, &v2);
+ assert_same_hash(&v1, &v3);
+ assert_same_hash(&v1, &v4);
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_values() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+ let mut v4 = Options::default();
+ let mut v5 = Options::default();
+
+ // Reference
+ v1.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ // Change label
+ v2.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("X"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ // Change kind
+ v3.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ // Change new-name
+ v4.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: Some(String::from("X")),
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ // Change verbatim
+ v5.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: Some(true),
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ assert_different_hash(&v1, &v2);
+ assert_different_hash(&v1, &v3);
+ assert_different_hash(&v1, &v4);
+ assert_different_hash(&v1, &v5);
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ // Reference
+ v1.libs = vec![
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ v2.libs = vec![
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ ];
+
+ v3.libs = vec![
+ NativeLib {
+ name: String::from("c"),
+ new_name: None,
+ kind: NativeLibKind::Unspecified,
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("a"),
+ new_name: None,
+ kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+ verbatim: None,
+ },
+ NativeLib {
+ name: String::from("b"),
+ new_name: None,
+ kind: NativeLibKind::Framework { as_needed: None },
+ verbatim: None,
+ },
+ ];
+
+ // The hash should be order-dependent
+ assert_different_hash(&v1, &v2);
+ assert_different_hash(&v1, &v3);
+ assert_different_hash(&v2, &v3);
+}
+
+#[test]
+fn test_codegen_options_tracking_hash() {
+ let reference = Options::default();
+ let mut opts = Options::default();
+
+ macro_rules! untracked {
+ ($name: ident, $non_default_value: expr) => {
+ assert_ne!(opts.cg.$name, $non_default_value);
+ opts.cg.$name = $non_default_value;
+ assert_same_hash(&reference, &opts);
+ };
+ }
+
+ // Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
+ // This list is in alphabetical order.
+ untracked!(ar, String::from("abc"));
+ untracked!(codegen_units, Some(42));
+ untracked!(default_linker_libraries, true);
+ untracked!(extra_filename, String::from("extra-filename"));
+ untracked!(incremental, Some(String::from("abc")));
+ // `link_arg` is omitted because it just forwards to `link_args`.
+ untracked!(link_args, vec![String::from("abc"), String::from("def")]);
+ untracked!(link_self_contained, Some(true));
+ untracked!(linker, Some(PathBuf::from("linker")));
+ untracked!(linker_flavor, Some(LinkerFlavor::Gcc));
+ untracked!(no_stack_check, true);
+ untracked!(remark, Passes::Some(vec![String::from("pass1"), String::from("pass2")]));
+ untracked!(rpath, true);
+ untracked!(save_temps, true);
+ untracked!(strip, Strip::Debuginfo);
+
+ macro_rules! tracked {
+ ($name: ident, $non_default_value: expr) => {
+ opts = reference.clone();
+ assert_ne!(opts.cg.$name, $non_default_value);
+ opts.cg.$name = $non_default_value;
+ assert_different_hash(&reference, &opts);
+ };
+ }
+
+ // Make sure that changing a [TRACKED] option changes the hash.
+ // This list is in alphabetical order.
+ tracked!(code_model, Some(CodeModel::Large));
+ tracked!(control_flow_guard, CFGuard::Checks);
+ tracked!(debug_assertions, Some(true));
+ tracked!(debuginfo, 0xdeadbeef);
+ tracked!(embed_bitcode, false);
+ tracked!(force_frame_pointers, Some(false));
+ tracked!(force_unwind_tables, Some(true));
+ tracked!(inline_threshold, Some(0xf007ba11));
+ tracked!(instrument_coverage, Some(InstrumentCoverage::All));
+ tracked!(linker_plugin_lto, LinkerPluginLto::LinkerPluginAuto);
+ tracked!(link_dead_code, Some(true));
+ tracked!(llvm_args, vec![String::from("1"), String::from("2")]);
+ tracked!(lto, LtoCli::Fat);
+ tracked!(metadata, vec![String::from("A"), String::from("B")]);
+ tracked!(no_prepopulate_passes, true);
+ tracked!(no_redzone, Some(true));
+ tracked!(no_vectorize_loops, true);
+ tracked!(no_vectorize_slp, true);
+ tracked!(opt_level, "3".to_string());
+ tracked!(overflow_checks, Some(true));
+ tracked!(panic, Some(PanicStrategy::Abort));
+ tracked!(passes, vec![String::from("1"), String::from("2")]);
+ tracked!(prefer_dynamic, true);
+ tracked!(profile_generate, SwitchWithOptPath::Enabled(None));
+ tracked!(profile_use, Some(PathBuf::from("abc")));
+ tracked!(relocation_model, Some(RelocModel::Pic));
+ tracked!(soft_float, true);
+ tracked!(split_debuginfo, Some(SplitDebuginfo::Packed));
+ tracked!(symbol_mangling_version, Some(SymbolManglingVersion::V0));
+ tracked!(target_cpu, Some(String::from("abc")));
+ tracked!(target_feature, String::from("all the features, all of them"));
+}
+
+#[test]
+fn test_top_level_options_tracked_no_crate() {
+ let reference = Options::default();
+ let mut opts;
+
+ macro_rules! tracked {
+ ($name: ident, $non_default_value: expr) => {
+ opts = reference.clone();
+ assert_ne!(opts.$name, $non_default_value);
+ opts.$name = $non_default_value;
+ // The crate hash should be the same
+ assert_eq!(reference.dep_tracking_hash(true), opts.dep_tracking_hash(true));
+ // The incremental hash should be different
+ assert_ne!(reference.dep_tracking_hash(false), opts.dep_tracking_hash(false));
+ };
+ }
+
+ // Make sure that changing a [TRACKED_NO_CRATE_HASH] option leaves the crate hash unchanged but changes the incremental hash.
+ // This list is in alphabetical order.
+ tracked!(remap_path_prefix, vec![("/home/bors/rust".into(), "src".into())]);
+ tracked!(
+ real_rust_source_base_dir,
+ Some("/home/bors/rust/.rustup/toolchains/nightly/lib/rustlib/src/rust".into())
+ );
+}
+
+#[test]
+fn test_unstable_options_tracking_hash() {
+ let reference = Options::default();
+ let mut opts = Options::default();
+
+ macro_rules! untracked {
+ ($name: ident, $non_default_value: expr) => {
+ assert_ne!(opts.unstable_opts.$name, $non_default_value);
+ opts.unstable_opts.$name = $non_default_value;
+ assert_same_hash(&reference, &opts);
+ };
+ }
+
+ // Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
+ // This list is in alphabetical order.
+ untracked!(assert_incr_state, Some(String::from("loaded")));
+ untracked!(deduplicate_diagnostics, false);
+ untracked!(dep_tasks, true);
+ untracked!(dlltool, Some(PathBuf::from("custom_dlltool.exe")));
+ untracked!(dont_buffer_diagnostics, true);
+ untracked!(dump_dep_graph, true);
+ untracked!(dump_drop_tracking_cfg, Some("cfg.dot".to_string()));
+ untracked!(dump_mir, Some(String::from("abc")));
+ untracked!(dump_mir_dataflow, true);
+ untracked!(dump_mir_dir, String::from("abc"));
+ untracked!(dump_mir_exclude_pass_number, true);
+ untracked!(dump_mir_graphviz, true);
+ untracked!(emit_stack_sizes, true);
+ untracked!(future_incompat_test, true);
+ untracked!(hir_stats, true);
+ untracked!(identify_regions, true);
+ untracked!(incremental_ignore_spans, true);
+ untracked!(incremental_info, true);
+ untracked!(incremental_verify_ich, true);
+ untracked!(input_stats, true);
+ untracked!(keep_hygiene_data, true);
+ untracked!(link_native_libraries, false);
+ untracked!(llvm_time_trace, true);
+ untracked!(ls, true);
+ untracked!(macro_backtrace, true);
+ untracked!(meta_stats, true);
+ untracked!(mir_pretty_relative_line_numbers, true);
+ untracked!(nll_facts, true);
+ untracked!(no_analysis, true);
+ untracked!(no_interleave_lints, true);
+ untracked!(no_leak_check, true);
+ untracked!(no_parallel_llvm, true);
+ untracked!(parse_only, true);
+ untracked!(perf_stats, true);
+ // `pre_link_arg` is omitted because it just forwards to `pre_link_args`.
+ untracked!(pre_link_args, vec![String::from("abc"), String::from("def")]);
+ untracked!(profile_closures, true);
+ untracked!(print_llvm_passes, true);
+ untracked!(print_mono_items, Some(String::from("abc")));
+ untracked!(print_type_sizes, true);
+ untracked!(proc_macro_backtrace, true);
+ untracked!(proc_macro_execution_strategy, ProcMacroExecutionStrategy::CrossThread);
+ untracked!(query_dep_graph, true);
+ untracked!(save_analysis, true);
+ untracked!(self_profile, SwitchWithOptPath::Enabled(None));
+ untracked!(self_profile_events, Some(vec![String::new()]));
+ untracked!(span_debug, true);
+ untracked!(span_free_formats, true);
+ untracked!(temps_dir, Some(String::from("abc")));
+ untracked!(threads, 99);
+ untracked!(time, true);
+ untracked!(time_llvm_passes, true);
+ untracked!(time_passes, true);
+ untracked!(trace_macros, true);
+ untracked!(trim_diagnostic_paths, false);
+ untracked!(ui_testing, true);
+ untracked!(unpretty, Some("expanded".to_string()));
+ untracked!(unstable_options, true);
+ untracked!(validate_mir, true);
+ untracked!(verbose, true);
+
+ macro_rules! tracked {
+ ($name: ident, $non_default_value: expr) => {
+ opts = reference.clone();
+ assert_ne!(opts.unstable_opts.$name, $non_default_value);
+ opts.unstable_opts.$name = $non_default_value;
+ assert_different_hash(&reference, &opts);
+ };
+ }
+
+ // Make sure that changing a [TRACKED] option changes the hash.
+ // This list is in alphabetical order.
+ tracked!(allow_features, Some(vec![String::from("lang_items")]));
+ tracked!(always_encode_mir, true);
+ tracked!(asm_comments, true);
+ tracked!(assume_incomplete_release, true);
+ tracked!(binary_dep_depinfo, true);
+ tracked!(box_noalias, Some(false));
+ tracked!(
+ branch_protection,
+ Some(BranchProtection {
+ bti: true,
+ pac_ret: Some(PacRet { leaf: true, key: PAuthKey::B })
+ })
+ );
+ tracked!(chalk, true);
+ tracked!(codegen_backend, Some("abc".to_string()));
+ tracked!(crate_attr, vec!["abc".to_string()]);
+ tracked!(debug_info_for_profiling, true);
+ tracked!(debug_macros, true);
+ tracked!(dep_info_omit_d_target, true);
+ tracked!(drop_tracking, true);
+ tracked!(export_executable_symbols, true);
+ tracked!(dual_proc_macros, true);
+ tracked!(dwarf_version, Some(5));
+ tracked!(emit_thin_lto, false);
+ tracked!(fewer_names, Some(true));
+ tracked!(force_unstable_if_unmarked, true);
+ tracked!(fuel, Some(("abc".to_string(), 99)));
+ tracked!(function_sections, Some(false));
+ tracked!(human_readable_cgu_names, true);
+ tracked!(inline_in_all_cgus, Some(true));
+ tracked!(inline_mir, Some(true));
+ tracked!(inline_mir_hint_threshold, Some(123));
+ tracked!(inline_mir_threshold, Some(123));
+ tracked!(instrument_coverage, Some(InstrumentCoverage::All));
+ tracked!(instrument_mcount, true);
+ tracked!(link_only, true);
+ tracked!(llvm_plugins, vec![String::from("plugin_name")]);
+ tracked!(location_detail, LocationDetail { file: true, line: false, column: false });
+ tracked!(merge_functions, Some(MergeFunctions::Disabled));
+ tracked!(mir_emit_retag, true);
+ tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);
+ tracked!(mir_opt_level, Some(4));
+ tracked!(move_size_limit, Some(4096));
+ tracked!(mutable_noalias, Some(true));
+ tracked!(new_llvm_pass_manager, Some(true));
+ tracked!(no_generate_arange_section, true);
+ tracked!(no_link, true);
+ tracked!(no_unique_section_names, true);
+ tracked!(no_profiler_runtime, true);
+ tracked!(oom, OomStrategy::Panic);
+ tracked!(osx_rpath_install_name, true);
+ tracked!(panic_abort_tests, true);
+ tracked!(panic_in_drop, PanicStrategy::Abort);
+ tracked!(pick_stable_methods_before_any_unstable, false);
+ tracked!(plt, Some(true));
+ tracked!(polonius, true);
+ tracked!(precise_enum_drop_elaboration, false);
+ tracked!(print_fuel, Some("abc".to_string()));
+ tracked!(profile, true);
+ tracked!(profile_emit, Some(PathBuf::from("abc")));
+ tracked!(profiler_runtime, "abc".to_string());
+ tracked!(profile_sample_use, Some(PathBuf::from("abc")));
+ tracked!(relax_elf_relocations, Some(true));
+ tracked!(relro_level, Some(RelroLevel::Full));
+ tracked!(remap_cwd_prefix, Some(PathBuf::from("abc")));
+ tracked!(report_delayed_bugs, true);
+ tracked!(sanitizer, SanitizerSet::ADDRESS);
+ tracked!(sanitizer_memory_track_origins, 2);
+ tracked!(sanitizer_recover, SanitizerSet::ADDRESS);
+ tracked!(saturating_float_casts, Some(true));
+ tracked!(share_generics, Some(true));
+ tracked!(show_span, Some(String::from("abc")));
+ tracked!(simulate_remapped_rust_src_base, Some(PathBuf::from("/rustc/abc")));
+ tracked!(src_hash_algorithm, Some(SourceFileHashAlgorithm::Sha1));
+ tracked!(stack_protector, StackProtector::All);
+ tracked!(symbol_mangling_version, Some(SymbolManglingVersion::V0));
+ tracked!(teach, true);
+ tracked!(thinlto, Some(true));
+ tracked!(thir_unsafeck, true);
+ tracked!(tls_model, Some(TlsModel::GeneralDynamic));
+ tracked!(translate_remapped_path_to_local_path, false);
+ tracked!(trap_unreachable, Some(false));
+ tracked!(treat_err_as_bug, NonZeroUsize::new(1));
+ tracked!(tune_cpu, Some(String::from("abc")));
+ tracked!(uninit_const_chunk_threshold, 123);
+ tracked!(unleash_the_miri_inside_of_you, true);
+ tracked!(use_ctors_section, Some(true));
+ tracked!(verify_llvm_ir, true);
+ tracked!(virtual_function_elimination, true);
+ tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
+
+ macro_rules! tracked_no_crate_hash {
+ ($name: ident, $non_default_value: expr) => {
+ opts = reference.clone();
+ assert_ne!(opts.unstable_opts.$name, $non_default_value);
+ opts.unstable_opts.$name = $non_default_value;
+ assert_non_crate_hash_different(&reference, &opts);
+ };
+ }
+ tracked_no_crate_hash!(no_codegen, true);
+}
+
+#[test]
+fn test_edition_parsing() {
+ // test default edition
+ let options = Options::default();
+ assert!(options.edition == DEFAULT_EDITION);
+
+ let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap();
+ let (sessopts, _) = build_session_options_and_crate_config(matches);
+ assert!(sessopts.edition == Edition::Edition2018)
+}
diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs
new file mode 100644
index 000000000..5e5596f13
--- /dev/null
+++ b/compiler/rustc_interface/src/util.rs
@@ -0,0 +1,672 @@
+use libloading::Library;
+use rustc_ast as ast;
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+#[cfg(parallel_compiler)]
+use rustc_data_structures::jobserver;
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::registry::Registry;
+#[cfg(parallel_compiler)]
+use rustc_middle::ty::tls;
+use rustc_parse::validate_attr;
+#[cfg(parallel_compiler)]
+use rustc_query_impl::{QueryContext, QueryCtxt};
+use rustc_session as session;
+use rustc_session::config::CheckCfg;
+use rustc_session::config::{self, CrateType};
+use rustc_session::config::{ErrorOutputType, Input, OutputFilenames};
+use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
+use rustc_session::parse::CrateConfig;
+use rustc_session::{early_error, filesearch, output, DiagnosticOutput, Session};
+use rustc_span::edition::Edition;
+use rustc_span::lev_distance::find_best_match_for_name;
+use rustc_span::source_map::FileLoader;
+use rustc_span::symbol::{sym, Symbol};
+use std::env;
+use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
+use std::mem;
+#[cfg(not(parallel_compiler))]
+use std::panic;
+use std::path::{Path, PathBuf};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::OnceLock;
+use std::thread;
+use tracing::info;
+
+/// Function pointer type that constructs a new CodegenBackend.
+pub type MakeBackendFn = fn() -> Box<dyn CodegenBackend>;
+
+/// Adds `target_feature = "..."` cfgs for a variety of platform
+/// specific features (SSE, NEON etc.).
+///
+/// This is performed by checking whether a set of permitted features
+/// is available on the target machine, by querying LLVM.
+pub fn add_configuration(
+ cfg: &mut CrateConfig,
+ sess: &mut Session,
+ codegen_backend: &dyn CodegenBackend,
+) {
+ let tf = sym::target_feature;
+
+ let unstable_target_features = codegen_backend.target_features(sess, true);
+ sess.unstable_target_features.extend(unstable_target_features.iter().cloned());
+
+ let target_features = codegen_backend.target_features(sess, false);
+ sess.target_features.extend(target_features.iter().cloned());
+
+ cfg.extend(target_features.into_iter().map(|feat| (tf, Some(feat))));
+
+ if sess.crt_static(None) {
+ cfg.insert((tf, Some(sym::crt_dash_static)));
+ }
+}
+
+pub fn create_session(
+ sopts: config::Options,
+ cfg: FxHashSet<(String, Option<String>)>,
+ check_cfg: CheckCfg,
+ diagnostic_output: DiagnosticOutput,
+ file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
+ input_path: Option<PathBuf>,
+ lint_caps: FxHashMap<lint::LintId, lint::Level>,
+ make_codegen_backend: Option<
+ Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
+ >,
+ descriptions: Registry,
+) -> (Lrc<Session>, Lrc<Box<dyn CodegenBackend>>) {
+ let codegen_backend = if let Some(make_codegen_backend) = make_codegen_backend {
+ make_codegen_backend(&sopts)
+ } else {
+ get_codegen_backend(
+ &sopts.maybe_sysroot,
+ sopts.unstable_opts.codegen_backend.as_ref().map(|name| &name[..]),
+ )
+ };
+
+ // target_override is documented to be called before init(), so this is okay
+ let target_override = codegen_backend.target_override(&sopts);
+
+ let bundle = match rustc_errors::fluent_bundle(
+ sopts.maybe_sysroot.clone(),
+ sysroot_candidates(),
+ sopts.unstable_opts.translate_lang.clone(),
+ sopts.unstable_opts.translate_additional_ftl.as_deref(),
+ sopts.unstable_opts.translate_directionality_markers,
+ ) {
+ Ok(bundle) => bundle,
+ Err(e) => {
+ early_error(sopts.error_format, &format!("failed to load fluent bundle: {e}"));
+ }
+ };
+
+ let mut sess = session::build_session(
+ sopts,
+ input_path,
+ bundle,
+ descriptions,
+ diagnostic_output,
+ lint_caps,
+ file_loader,
+ target_override,
+ );
+
+ codegen_backend.init(&sess);
+
+ let mut cfg = config::build_configuration(&sess, config::to_crate_config(cfg));
+ add_configuration(&mut cfg, &mut sess, &*codegen_backend);
+
+ let mut check_cfg = config::to_crate_check_config(check_cfg);
+ check_cfg.fill_well_known();
+
+ sess.parse_sess.config = cfg;
+ sess.parse_sess.check_config = check_cfg;
+
+ (Lrc::new(sess), Lrc::new(codegen_backend))
+}
+
+const STACK_SIZE: usize = 8 * 1024 * 1024;
+
+fn get_stack_size() -> Option<usize> {
+ // FIXME: Hacks on hacks. If the env is trying to override the stack size
+ // then *don't* set it explicitly.
+ env::var_os("RUST_MIN_STACK").is_none().then_some(STACK_SIZE)
+}
+
+/// Like a `thread::Builder::spawn` followed by a `join()`, but avoids the need
+/// for `'static` bounds.
+#[cfg(not(parallel_compiler))]
+fn scoped_thread<F: FnOnce() -> R + Send, R: Send>(cfg: thread::Builder, f: F) -> R {
+ // SAFETY: join() is called immediately, so any closure captures are still
+ // alive.
+ match unsafe { cfg.spawn_unchecked(f) }.unwrap().join() {
+ Ok(v) => v,
+ Err(e) => panic::resume_unwind(e),
+ }
+}
+
+#[cfg(not(parallel_compiler))]
+pub fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
+ edition: Edition,
+ _threads: usize,
+ f: F,
+) -> R {
+ let mut cfg = thread::Builder::new().name("rustc".to_string());
+
+ if let Some(size) = get_stack_size() {
+ cfg = cfg.stack_size(size);
+ }
+
+ let main_handler = move || rustc_span::create_session_globals_then(edition, f);
+
+ scoped_thread(cfg, main_handler)
+}
+
+/// Creates a new thread and forwards information in thread locals to it.
+/// The new thread runs the deadlock handler.
+/// Must only be called when a deadlock is about to happen.
+#[cfg(parallel_compiler)]
+unsafe fn handle_deadlock() {
+ let registry = rustc_rayon_core::Registry::current();
+
+ let query_map = tls::with(|tcx| {
+ QueryCtxt::from_tcx(tcx)
+ .try_collect_active_jobs()
+ .expect("active jobs shouldn't be locked in deadlock handler")
+ });
+ thread::spawn(move || rustc_query_impl::deadlock(query_map, &registry));
+}
+
+#[cfg(parallel_compiler)]
+pub fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
+ edition: Edition,
+ threads: usize,
+ f: F,
+) -> R {
+ let mut config = rayon::ThreadPoolBuilder::new()
+ .thread_name(|_| "rustc".to_string())
+ .acquire_thread_handler(jobserver::acquire_thread)
+ .release_thread_handler(jobserver::release_thread)
+ .num_threads(threads)
+ .deadlock_handler(|| unsafe { handle_deadlock() });
+
+ if let Some(size) = get_stack_size() {
+ config = config.stack_size(size);
+ }
+
+ let with_pool = move |pool: &rayon::ThreadPool| pool.install(f);
+
+ rustc_span::create_session_globals_then(edition, || {
+ rustc_span::with_session_globals(|session_globals| {
+ // The main handler runs for each Rayon worker thread and sets up
+ // the thread local rustc uses. `session_globals` is captured and set
+ // on the new threads.
+ let main_handler = move |thread: rayon::ThreadBuilder| {
+ rustc_span::set_session_globals_then(session_globals, || thread.run())
+ };
+
+ config.build_scoped(main_handler, with_pool).unwrap()
+ })
+ })
+}
+
+fn load_backend_from_dylib(path: &Path) -> MakeBackendFn {
+ let lib = unsafe { Library::new(path) }.unwrap_or_else(|err| {
+ let err = format!("couldn't load codegen backend {:?}: {}", path, err);
+ early_error(ErrorOutputType::default(), &err);
+ });
+
+ let backend_sym = unsafe { lib.get::<MakeBackendFn>(b"__rustc_codegen_backend") }
+ .unwrap_or_else(|e| {
+ let err = format!("couldn't load codegen backend: {}", e);
+ early_error(ErrorOutputType::default(), &err);
+ });
+
+ // Intentionally leak the dynamic library. We can't ever unload it
+ // since the library can make things that will live arbitrarily long.
+ let backend_sym = unsafe { backend_sym.into_raw() };
+ mem::forget(lib);
+
+ *backend_sym
+}
+
+/// Get the codegen backend based on the name and specified sysroot.
+///
+/// A name of `None` indicates that the default backend should be used.
+pub fn get_codegen_backend(
+ maybe_sysroot: &Option<PathBuf>,
+ backend_name: Option<&str>,
+) -> Box<dyn CodegenBackend> {
+ static LOAD: OnceLock<unsafe fn() -> Box<dyn CodegenBackend>> = OnceLock::new();
+
+ let load = LOAD.get_or_init(|| {
+ let default_codegen_backend = option_env!("CFG_DEFAULT_CODEGEN_BACKEND").unwrap_or("llvm");
+
+ match backend_name.unwrap_or(default_codegen_backend) {
+ filename if filename.contains('.') => load_backend_from_dylib(filename.as_ref()),
+ #[cfg(feature = "llvm")]
+ "llvm" => rustc_codegen_llvm::LlvmCodegenBackend::new,
+ backend_name => get_codegen_sysroot(maybe_sysroot, backend_name),
+ }
+ });
+
+ // SAFETY: In case of a builtin codegen backend this is safe. In case of an external codegen
+ // backend we hope that the backend links against the same rustc_driver version. If this is not
+ // the case, we get UB.
+ unsafe { load() }
+}
+
+// This is used for rustdoc, but it uses similar machinery to codegen backend
+// loading, so we leave the code here. It is potentially useful for other tools
+// that want to invoke the rustc binary while linking to rustc as well.
+pub fn rustc_path<'a>() -> Option<&'a Path> {
+ static RUSTC_PATH: OnceLock<Option<PathBuf>> = OnceLock::new();
+
+ const BIN_PATH: &str = env!("RUSTC_INSTALL_BINDIR");
+
+ RUSTC_PATH.get_or_init(|| get_rustc_path_inner(BIN_PATH)).as_ref().map(|v| &**v)
+}
+
+fn get_rustc_path_inner(bin_path: &str) -> Option<PathBuf> {
+ sysroot_candidates().iter().find_map(|sysroot| {
+ let candidate = sysroot.join(bin_path).join(if cfg!(target_os = "windows") {
+ "rustc.exe"
+ } else {
+ "rustc"
+ });
+ candidate.exists().then_some(candidate)
+ })
+}
+
+fn sysroot_candidates() -> Vec<PathBuf> {
+ let target = session::config::host_triple();
+ let mut sysroot_candidates = vec![filesearch::get_or_default_sysroot()];
+ let path = current_dll_path().and_then(|s| s.canonicalize().ok());
+ if let Some(dll) = path {
+ // use `parent` twice to chop off the file name and then also the
+ // directory containing the dll which should be either `lib` or `bin`.
+ if let Some(path) = dll.parent().and_then(|p| p.parent()) {
+ // The original `path` pointed at the `rustc_driver` crate's dll.
+ // Now that dll should only be in one of two locations. The first is
+ // in the compiler's libdir, for example `$sysroot/lib/*.dll`. The
+ // other is the target's libdir, for example
+ // `$sysroot/lib/rustlib/$target/lib/*.dll`.
+ //
+ // We don't know which, so let's assume that if our `path` above
+ // ends in `$target` we *could* be in the target libdir, and always
+ // assume that we may be in the main libdir.
+ sysroot_candidates.push(path.to_owned());
+
+ if path.ends_with(target) {
+ sysroot_candidates.extend(
+ path.parent() // chop off `$target`
+ .and_then(|p| p.parent()) // chop off `rustlib`
+ .and_then(|p| p.parent()) // chop off `lib`
+ .map(|s| s.to_owned()),
+ );
+ }
+ }
+ }
+
+ return sysroot_candidates;
+
+ #[cfg(unix)]
+ fn current_dll_path() -> Option<PathBuf> {
+ use std::ffi::{CStr, OsStr};
+ use std::os::unix::prelude::*;
+
+ unsafe {
+ let addr = current_dll_path as usize as *mut _;
+ let mut info = mem::zeroed();
+ if libc::dladdr(addr, &mut info) == 0 {
+ info!("dladdr failed");
+ return None;
+ }
+ if info.dli_fname.is_null() {
+ info!("dladdr returned null pointer");
+ return None;
+ }
+ let bytes = CStr::from_ptr(info.dli_fname).to_bytes();
+ let os = OsStr::from_bytes(bytes);
+ Some(PathBuf::from(os))
+ }
+ }
+
+ #[cfg(windows)]
+ fn current_dll_path() -> Option<PathBuf> {
+ use std::ffi::OsString;
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::ptr;
+
+ use winapi::um::libloaderapi::{
+ GetModuleFileNameW, GetModuleHandleExW, GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
+ };
+
+ unsafe {
+ let mut module = ptr::null_mut();
+ let r = GetModuleHandleExW(
+ GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
+ current_dll_path as usize as *mut _,
+ &mut module,
+ );
+ if r == 0 {
+ info!("GetModuleHandleExW failed: {}", io::Error::last_os_error());
+ return None;
+ }
+ let mut space = Vec::with_capacity(1024);
+ let r = GetModuleFileNameW(module, space.as_mut_ptr(), space.capacity() as u32);
+ if r == 0 {
+ info!("GetModuleFileNameW failed: {}", io::Error::last_os_error());
+ return None;
+ }
+ let r = r as usize;
+ if r >= space.capacity() {
+ info!("our buffer was too small? {}", io::Error::last_os_error());
+ return None;
+ }
+ space.set_len(r);
+ let os = OsString::from_wide(&space);
+ Some(PathBuf::from(os))
+ }
+ }
+}
+
+fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> MakeBackendFn {
+ // For now we only allow this function to be called once as it'll dlopen a
+ // few things, which seems to work best if we only do that once. In
+ // general this assertion never trips due to the once guard in `get_codegen_backend`,
+ // but there's a few manual calls to this function in this file we protect
+ // against.
+ static LOADED: AtomicBool = AtomicBool::new(false);
+ assert!(
+ !LOADED.fetch_or(true, Ordering::SeqCst),
+ "cannot load the default codegen backend twice"
+ );
+
+ let target = session::config::host_triple();
+ let sysroot_candidates = sysroot_candidates();
+
+ let sysroot = maybe_sysroot
+ .iter()
+ .chain(sysroot_candidates.iter())
+ .map(|sysroot| {
+ filesearch::make_target_lib_path(sysroot, target).with_file_name("codegen-backends")
+ })
+ .find(|f| {
+ info!("codegen backend candidate: {}", f.display());
+ f.exists()
+ });
+ let sysroot = sysroot.unwrap_or_else(|| {
+ let candidates = sysroot_candidates
+ .iter()
+ .map(|p| p.display().to_string())
+ .collect::<Vec<_>>()
+ .join("\n* ");
+ let err = format!(
+ "failed to find a `codegen-backends` folder \
+ in the sysroot candidates:\n* {}",
+ candidates
+ );
+ early_error(ErrorOutputType::default(), &err);
+ });
+ info!("probing {} for a codegen backend", sysroot.display());
+
+ let d = sysroot.read_dir().unwrap_or_else(|e| {
+ let err = format!(
+ "failed to load default codegen backend, couldn't \
+ read `{}`: {}",
+ sysroot.display(),
+ e
+ );
+ early_error(ErrorOutputType::default(), &err);
+ });
+
+ let mut file: Option<PathBuf> = None;
+
+ let expected_names = &[
+ format!("rustc_codegen_{}-{}", backend_name, release_str().expect("CFG_RELEASE")),
+ format!("rustc_codegen_{}", backend_name),
+ ];
+ for entry in d.filter_map(|e| e.ok()) {
+ let path = entry.path();
+ let Some(filename) = path.file_name().and_then(|s| s.to_str()) else { continue };
+ if !(filename.starts_with(DLL_PREFIX) && filename.ends_with(DLL_SUFFIX)) {
+ continue;
+ }
+ let name = &filename[DLL_PREFIX.len()..filename.len() - DLL_SUFFIX.len()];
+ if !expected_names.iter().any(|expected| expected == name) {
+ continue;
+ }
+ if let Some(ref prev) = file {
+ let err = format!(
+ "duplicate codegen backends found\n\
+ first: {}\n\
+ second: {}\n\
+ ",
+ prev.display(),
+ path.display()
+ );
+ early_error(ErrorOutputType::default(), &err);
+ }
+ file = Some(path.clone());
+ }
+
+ match file {
+ Some(ref s) => load_backend_from_dylib(s),
+ None => {
+ let err = format!("unsupported builtin codegen backend `{}`", backend_name);
+ early_error(ErrorOutputType::default(), &err);
+ }
+ }
+}
+
+pub(crate) fn check_attr_crate_type(
+ sess: &Session,
+ attrs: &[ast::Attribute],
+ lint_buffer: &mut LintBuffer,
+) {
+ // Unconditionally collect crate types from attributes to make them used
+ for a in attrs.iter() {
+ if a.has_name(sym::crate_type) {
+ if let Some(n) = a.value_str() {
+ if categorize_crate_type(n).is_some() {
+ return;
+ }
+
+ if let ast::MetaItemKind::NameValue(spanned) = a.meta_kind().unwrap() {
+ let span = spanned.span;
+ let lev_candidate = find_best_match_for_name(
+ &CRATE_TYPES.iter().map(|(k, _)| *k).collect::<Vec<_>>(),
+ n,
+ None,
+ );
+ if let Some(candidate) = lev_candidate {
+ lint_buffer.buffer_lint_with_diagnostic(
+ lint::builtin::UNKNOWN_CRATE_TYPES,
+ ast::CRATE_NODE_ID,
+ span,
+ "invalid `crate_type` value",
+ BuiltinLintDiagnostics::UnknownCrateTypes(
+ span,
+ "did you mean".to_string(),
+ format!("\"{}\"", candidate),
+ ),
+ );
+ } else {
+ lint_buffer.buffer_lint(
+ lint::builtin::UNKNOWN_CRATE_TYPES,
+ ast::CRATE_NODE_ID,
+ span,
+ "invalid `crate_type` value",
+ );
+ }
+ }
+ } else {
+ // This is here mainly to check for using a macro, such as
+ // #![crate_type = foo!()]. That is not supported since the
+ // crate type needs to be known very early in compilation long
+ // before expansion. Otherwise, validation would normally be
+ // caught in AstValidator (via `check_builtin_attribute`), but
+ // by the time that runs the macro is expanded, and it doesn't
+ // give an error.
+ validate_attr::emit_fatal_malformed_builtin_attribute(
+ &sess.parse_sess,
+ a,
+ sym::crate_type,
+ );
+ }
+ }
+ }
+}
+
+const CRATE_TYPES: &[(Symbol, CrateType)] = &[
+ (sym::rlib, CrateType::Rlib),
+ (sym::dylib, CrateType::Dylib),
+ (sym::cdylib, CrateType::Cdylib),
+ (sym::lib, config::default_lib_output()),
+ (sym::staticlib, CrateType::Staticlib),
+ (sym::proc_dash_macro, CrateType::ProcMacro),
+ (sym::bin, CrateType::Executable),
+];
+
+fn categorize_crate_type(s: Symbol) -> Option<CrateType> {
+ Some(CRATE_TYPES.iter().find(|(key, _)| *key == s)?.1)
+}
+
+pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<CrateType> {
+ // Unconditionally collect crate types from attributes to make them used
+ let attr_types: Vec<CrateType> = attrs
+ .iter()
+ .filter_map(|a| {
+ if a.has_name(sym::crate_type) {
+ match a.value_str() {
+ Some(s) => categorize_crate_type(s),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ // If we're generating a test executable, then ignore all other output
+ // styles at all other locations
+ if session.opts.test {
+ return vec![CrateType::Executable];
+ }
+
+ // Only check command line flags if present. If no types are specified by
+ // command line, then reuse the empty `base` Vec to hold the types that
+ // will be found in crate attributes.
+ // JUSTIFICATION: before wrapper fn is available
+ #[cfg_attr(not(bootstrap), allow(rustc::bad_opt_access))]
+ let mut base = session.opts.crate_types.clone();
+ if base.is_empty() {
+ base.extend(attr_types);
+ if base.is_empty() {
+ base.push(output::default_output_for_target(session));
+ } else {
+ base.sort();
+ base.dedup();
+ }
+ }
+
+ base.retain(|crate_type| {
+ let res = !output::invalid_output_for_target(session, *crate_type);
+
+ if !res {
+ session.warn(&format!(
+ "dropping unsupported crate type `{}` for target `{}`",
+ *crate_type, session.opts.target_triple
+ ));
+ }
+
+ res
+ });
+
+ base
+}
+
+pub fn build_output_filenames(
+ input: &Input,
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>,
+ temps_dir: &Option<PathBuf>,
+ attrs: &[ast::Attribute],
+ sess: &Session,
+) -> OutputFilenames {
+ match *ofile {
+ None => {
+ // "-" as input file will cause the parser to read from stdin so we
+ // have to make up a name
+ // We want to toss everything after the final '.'
+ let dirpath = (*odir).as_ref().cloned().unwrap_or_default();
+
+ // If a crate name is present, we use it as the link name
+ let stem = sess
+ .opts
+ .crate_name
+ .clone()
+ .or_else(|| rustc_attr::find_crate_name(sess, attrs).map(|n| n.to_string()))
+ .unwrap_or_else(|| input.filestem().to_owned());
+
+ OutputFilenames::new(
+ dirpath,
+ stem,
+ None,
+ temps_dir.clone(),
+ sess.opts.cg.extra_filename.clone(),
+ sess.opts.output_types.clone(),
+ )
+ }
+
+ Some(ref out_file) => {
+ let unnamed_output_types =
+ sess.opts.output_types.values().filter(|a| a.is_none()).count();
+ let ofile = if unnamed_output_types > 1 {
+ sess.warn(
+ "due to multiple output types requested, the explicitly specified \
+ output file name will be adapted for each output type",
+ );
+ None
+ } else {
+ if !sess.opts.cg.extra_filename.is_empty() {
+ sess.warn("ignoring -C extra-filename flag due to -o flag");
+ }
+ Some(out_file.clone())
+ };
+ if *odir != None {
+ sess.warn("ignoring --out-dir flag due to -o flag");
+ }
+
+ OutputFilenames::new(
+ out_file.parent().unwrap_or_else(|| Path::new("")).to_path_buf(),
+ out_file.file_stem().unwrap_or_default().to_str().unwrap().to_string(),
+ ofile,
+ temps_dir.clone(),
+ sess.opts.cg.extra_filename.clone(),
+ sess.opts.output_types.clone(),
+ )
+ }
+ }
+}
+
+/// Returns a version string such as "1.46.0 (04488afe3 2020-08-24)"
+pub fn version_str() -> Option<&'static str> {
+ option_env!("CFG_VERSION")
+}
+
+/// Returns a version string such as "0.12.0-dev".
+pub fn release_str() -> Option<&'static str> {
+ option_env!("CFG_RELEASE")
+}
+
+/// Returns the full SHA1 hash of HEAD of the Git repo from which rustc was built.
+pub fn commit_hash_str() -> Option<&'static str> {
+ option_env!("CFG_VER_HASH")
+}
+
+/// Returns the "commit date" of HEAD of the Git repo from which rustc was built as a static string.
+pub fn commit_date_str() -> Option<&'static str> {
+ option_env!("CFG_VER_DATE")
+}