summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_macros/src/query.rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:06:31 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:06:31 +0000
commit2ff14448863ac1a1dd9533461708e29aae170c2d (patch)
tree85b9fea2bbfe3f06473cfa381eed11f273b57c5c /compiler/rustc_macros/src/query.rs
parentAdding debian version 1.64.0+dfsg1-1. (diff)
downloadrustc-2ff14448863ac1a1dd9533461708e29aae170c2d.tar.xz
rustc-2ff14448863ac1a1dd9533461708e29aae170c2d.zip
Adding debian version 1.65.0+dfsg1-2.debian/1.65.0+dfsg1-2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_macros/src/query.rs')
-rw-r--r--compiler/rustc_macros/src/query.rs502
1 files changed, 154 insertions, 348 deletions
diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs
index a69126533..d49926c90 100644
--- a/compiler/rustc_macros/src/query.rs
+++ b/compiler/rustc_macros/src/query.rs
@@ -1,139 +1,17 @@
use proc_macro::TokenStream;
-use proc_macro2::{Delimiter, TokenTree};
use quote::{quote, quote_spanned};
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
use syn::{
- braced, parenthesized, parse_macro_input, parse_quote, AttrStyle, Attribute, Block, Error,
- Expr, Ident, ReturnType, Token, Type,
+ braced, parenthesized, parse_macro_input, parse_quote, token, AttrStyle, Attribute, Block,
+ Error, Expr, Ident, Pat, ReturnType, Token, Type,
};
mod kw {
syn::custom_keyword!(query);
}
-/// Ident or a wildcard `_`.
-struct IdentOrWild(Ident);
-
-impl Parse for IdentOrWild {
- fn parse(input: ParseStream<'_>) -> Result<Self> {
- Ok(if input.peek(Token![_]) {
- let underscore = input.parse::<Token![_]>()?;
- IdentOrWild(Ident::new("_", underscore.span()))
- } else {
- IdentOrWild(input.parse()?)
- })
- }
-}
-
-/// A modifier for a query
-enum QueryModifier {
- /// The description of the query.
- Desc(Option<Ident>, Punctuated<Expr, Token![,]>),
-
- /// Use this type for the in-memory cache.
- Storage(Type),
-
- /// Cache the query to disk if the `Expr` returns true.
- Cache(Option<IdentOrWild>, Block),
-
- /// Custom code to load the query from disk.
- LoadCached(Ident, Ident, Block),
-
- /// A cycle error for this query aborting the compilation with a fatal error.
- FatalCycle(Ident),
-
- /// A cycle error results in a delay_bug call
- CycleDelayBug(Ident),
-
- /// Don't hash the result, instead just mark a query red if it runs
- NoHash(Ident),
-
- /// Generate a dep node based on the dependencies of the query
- Anon(Ident),
-
- /// Always evaluate the query, ignoring its dependencies
- EvalAlways(Ident),
-
- /// Use a separate query provider for local and extern crates
- SeparateProvideExtern(Ident),
-
- /// Always remap the ParamEnv's constness before hashing and passing to the query provider
- RemapEnvConstness(Ident),
-}
-
-impl Parse for QueryModifier {
- fn parse(input: ParseStream<'_>) -> Result<Self> {
- let modifier: Ident = input.parse()?;
- if modifier == "desc" {
- // Parse a description modifier like:
- // `desc { |tcx| "foo {}", tcx.item_path(key) }`
- let attr_content;
- braced!(attr_content in input);
- let tcx = if attr_content.peek(Token![|]) {
- attr_content.parse::<Token![|]>()?;
- let tcx = attr_content.parse()?;
- attr_content.parse::<Token![|]>()?;
- Some(tcx)
- } else {
- None
- };
- let desc = attr_content.parse_terminated(Expr::parse)?;
- Ok(QueryModifier::Desc(tcx, desc))
- } else if modifier == "cache_on_disk_if" {
- // Parse a cache modifier like:
- // `cache(tcx, value) { |tcx| key.is_local() }`
- let has_args = if let TokenTree::Group(group) = input.fork().parse()? {
- group.delimiter() == Delimiter::Parenthesis
- } else {
- false
- };
- let args = if has_args {
- let args;
- parenthesized!(args in input);
- let tcx = args.parse()?;
- Some(tcx)
- } else {
- None
- };
- let block = input.parse()?;
- Ok(QueryModifier::Cache(args, block))
- } else if modifier == "load_cached" {
- // Parse a load_cached modifier like:
- // `load_cached(tcx, id) { tcx.on_disk_cache.try_load_query_result(tcx, id) }`
- let args;
- parenthesized!(args in input);
- let tcx = args.parse()?;
- args.parse::<Token![,]>()?;
- let id = args.parse()?;
- let block = input.parse()?;
- Ok(QueryModifier::LoadCached(tcx, id, block))
- } else if modifier == "storage" {
- let args;
- parenthesized!(args in input);
- let ty = args.parse()?;
- Ok(QueryModifier::Storage(ty))
- } else if modifier == "fatal_cycle" {
- Ok(QueryModifier::FatalCycle(modifier))
- } else if modifier == "cycle_delay_bug" {
- Ok(QueryModifier::CycleDelayBug(modifier))
- } else if modifier == "no_hash" {
- Ok(QueryModifier::NoHash(modifier))
- } else if modifier == "anon" {
- Ok(QueryModifier::Anon(modifier))
- } else if modifier == "eval_always" {
- Ok(QueryModifier::EvalAlways(modifier))
- } else if modifier == "separate_provide_extern" {
- Ok(QueryModifier::SeparateProvideExtern(modifier))
- } else if modifier == "remap_env_constness" {
- Ok(QueryModifier::RemapEnvConstness(modifier))
- } else {
- Err(Error::new(modifier.span(), "unknown query modifier"))
- }
- }
-}
-
/// Ensures only doc comment attributes are used
fn check_attributes(attrs: Vec<Attribute>) -> Result<Vec<Attribute>> {
let inner = |attr: Attribute| {
@@ -154,16 +32,16 @@ fn check_attributes(attrs: Vec<Attribute>) -> Result<Vec<Attribute>> {
/// A compiler query. `query ... { ... }`
struct Query {
doc_comments: Vec<Attribute>,
- modifiers: List<QueryModifier>,
+ modifiers: QueryModifiers,
name: Ident,
- key: IdentOrWild,
+ key: Pat,
arg: Type,
result: ReturnType,
}
impl Parse for Query {
fn parse(input: ParseStream<'_>) -> Result<Self> {
- let doc_comments = check_attributes(input.call(Attribute::parse_outer)?)?;
+ let mut doc_comments = check_attributes(input.call(Attribute::parse_outer)?)?;
// Parse the query declaration. Like `query type_of(key: DefId) -> Ty<'tcx>`
input.parse::<kw::query>()?;
@@ -178,7 +56,13 @@ impl Parse for Query {
// Parse the query modifiers
let content;
braced!(content in input);
- let modifiers = content.parse()?;
+ let modifiers = parse_query_modifiers(&content)?;
+
+ // If there are no doc-comments, give at least some idea of what
+ // it does by showing the query description.
+ if doc_comments.is_empty() {
+ doc_comments.push(doc_comment_from_desc(&modifiers.desc.1)?);
+ }
Ok(Query { doc_comments, modifiers, name, key, arg, result })
}
@@ -202,13 +86,10 @@ struct QueryModifiers {
desc: (Option<Ident>, Punctuated<Expr, Token![,]>),
/// Use this type for the in-memory cache.
- storage: Option<Type>,
+ arena_cache: Option<Ident>,
/// Cache the query to disk if the `Block` returns true.
- cache: Option<(Option<IdentOrWild>, Block)>,
-
- /// Custom code to load the query from disk.
- load_cached: Option<(Ident, Ident, Block)>,
+ cache: Option<(Option<Pat>, Block)>,
/// A cycle error for this query aborting the compilation with a fatal error.
fatal_cycle: Option<Ident>,
@@ -222,9 +103,12 @@ struct QueryModifiers {
/// Generate a dep node based on the dependencies of the query
anon: Option<Ident>,
- // Always evaluate the query, ignoring its dependencies
+ /// Always evaluate the query, ignoring its dependencies
eval_always: Option<Ident>,
+ /// Whether the query has a call depth limit
+ depth_limit: Option<Ident>,
+
/// Use a separate query provider for local and extern crates
separate_provide_extern: Option<Ident>,
@@ -232,10 +116,8 @@ struct QueryModifiers {
remap_env_constness: Option<Ident>,
}
-/// Process query modifiers into a struct, erroring on duplicates
-fn process_modifiers(query: &mut Query) -> QueryModifiers {
- let mut load_cached = None;
- let mut storage = None;
+fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
+ let mut arena_cache = None;
let mut cache = None;
let mut desc = None;
let mut fatal_cycle = None;
@@ -243,121 +125,77 @@ fn process_modifiers(query: &mut Query) -> QueryModifiers {
let mut no_hash = None;
let mut anon = None;
let mut eval_always = None;
+ let mut depth_limit = None;
let mut separate_provide_extern = None;
let mut remap_env_constness = None;
- for modifier in query.modifiers.0.drain(..) {
- match modifier {
- QueryModifier::LoadCached(tcx, id, block) => {
- if load_cached.is_some() {
- panic!("duplicate modifier `load_cached` for query `{}`", query.name);
- }
- load_cached = Some((tcx, id, block));
- }
- QueryModifier::Storage(ty) => {
- if storage.is_some() {
- panic!("duplicate modifier `storage` for query `{}`", query.name);
- }
- storage = Some(ty);
- }
- QueryModifier::Cache(args, expr) => {
- if cache.is_some() {
- panic!("duplicate modifier `cache` for query `{}`", query.name);
- }
- cache = Some((args, expr));
- }
- QueryModifier::Desc(tcx, list) => {
- if desc.is_some() {
- panic!("duplicate modifier `desc` for query `{}`", query.name);
- }
- // If there are no doc-comments, give at least some idea of what
- // it does by showing the query description.
- if query.doc_comments.is_empty() {
- use ::syn::*;
- let mut list = list.iter();
- let format_str: String = match list.next() {
- Some(&Expr::Lit(ExprLit { lit: Lit::Str(ref lit_str), .. })) => {
- lit_str.value().replace("`{}`", "{}") // We add them later anyways for consistency
- }
- _ => panic!("Expected a string literal"),
- };
- let mut fmt_fragments = format_str.split("{}");
- let mut doc_string = fmt_fragments.next().unwrap().to_string();
- list.map(::quote::ToTokens::to_token_stream).zip(fmt_fragments).for_each(
- |(tts, next_fmt_fragment)| {
- use ::core::fmt::Write;
- write!(
- &mut doc_string,
- " `{}` {}",
- tts.to_string().replace(" . ", "."),
- next_fmt_fragment,
- )
- .unwrap();
- },
- );
- let doc_string = format!(
- "[query description - consider adding a doc-comment!] {}",
- doc_string
- );
- let comment = parse_quote! {
- #[doc = #doc_string]
- };
- query.doc_comments.push(comment);
- }
- desc = Some((tcx, list));
- }
- QueryModifier::FatalCycle(ident) => {
- if fatal_cycle.is_some() {
- panic!("duplicate modifier `fatal_cycle` for query `{}`", query.name);
- }
- fatal_cycle = Some(ident);
- }
- QueryModifier::CycleDelayBug(ident) => {
- if cycle_delay_bug.is_some() {
- panic!("duplicate modifier `cycle_delay_bug` for query `{}`", query.name);
- }
- cycle_delay_bug = Some(ident);
- }
- QueryModifier::NoHash(ident) => {
- if no_hash.is_some() {
- panic!("duplicate modifier `no_hash` for query `{}`", query.name);
- }
- no_hash = Some(ident);
- }
- QueryModifier::Anon(ident) => {
- if anon.is_some() {
- panic!("duplicate modifier `anon` for query `{}`", query.name);
- }
- anon = Some(ident);
- }
- QueryModifier::EvalAlways(ident) => {
- if eval_always.is_some() {
- panic!("duplicate modifier `eval_always` for query `{}`", query.name);
- }
- eval_always = Some(ident);
- }
- QueryModifier::SeparateProvideExtern(ident) => {
- if separate_provide_extern.is_some() {
- panic!(
- "duplicate modifier `separate_provide_extern` for query `{}`",
- query.name
- );
- }
- separate_provide_extern = Some(ident);
- }
- QueryModifier::RemapEnvConstness(ident) => {
- if remap_env_constness.is_some() {
- panic!("duplicate modifier `remap_env_constness` for query `{}`", query.name);
+
+ while !input.is_empty() {
+ let modifier: Ident = input.parse()?;
+
+ macro_rules! try_insert {
+ ($name:ident = $expr:expr) => {
+ if $name.is_some() {
+ return Err(Error::new(modifier.span(), "duplicate modifier"));
}
- remap_env_constness = Some(ident)
- }
+ $name = Some($expr);
+ };
+ }
+
+ if modifier == "desc" {
+ // Parse a description modifier like:
+ // `desc { |tcx| "foo {}", tcx.item_path(key) }`
+ let attr_content;
+ braced!(attr_content in input);
+ let tcx = if attr_content.peek(Token![|]) {
+ attr_content.parse::<Token![|]>()?;
+ let tcx = attr_content.parse()?;
+ attr_content.parse::<Token![|]>()?;
+ Some(tcx)
+ } else {
+ None
+ };
+ let list = attr_content.parse_terminated(Expr::parse)?;
+ try_insert!(desc = (tcx, list));
+ } else if modifier == "cache_on_disk_if" {
+ // Parse a cache modifier like:
+ // `cache(tcx) { |tcx| key.is_local() }`
+ let args = if input.peek(token::Paren) {
+ let args;
+ parenthesized!(args in input);
+ let tcx = args.parse()?;
+ Some(tcx)
+ } else {
+ None
+ };
+ let block = input.parse()?;
+ try_insert!(cache = (args, block));
+ } else if modifier == "arena_cache" {
+ try_insert!(arena_cache = modifier);
+ } else if modifier == "fatal_cycle" {
+ try_insert!(fatal_cycle = modifier);
+ } else if modifier == "cycle_delay_bug" {
+ try_insert!(cycle_delay_bug = modifier);
+ } else if modifier == "no_hash" {
+ try_insert!(no_hash = modifier);
+ } else if modifier == "anon" {
+ try_insert!(anon = modifier);
+ } else if modifier == "eval_always" {
+ try_insert!(eval_always = modifier);
+ } else if modifier == "depth_limit" {
+ try_insert!(depth_limit = modifier);
+ } else if modifier == "separate_provide_extern" {
+ try_insert!(separate_provide_extern = modifier);
+ } else if modifier == "remap_env_constness" {
+ try_insert!(remap_env_constness = modifier);
+ } else {
+ return Err(Error::new(modifier.span(), "unknown query modifier"));
}
}
- let desc = desc.unwrap_or_else(|| {
- panic!("no description provided for query `{}`", query.name);
- });
- QueryModifiers {
- load_cached,
- storage,
+ let Some(desc) = desc else {
+ return Err(input.error("no description provided"));
+ };
+ Ok(QueryModifiers {
+ arena_cache,
cache,
desc,
fatal_cycle,
@@ -365,43 +203,48 @@ fn process_modifiers(query: &mut Query) -> QueryModifiers {
no_hash,
anon,
eval_always,
+ depth_limit,
separate_provide_extern,
remap_env_constness,
- }
+ })
+}
+
+fn doc_comment_from_desc(list: &Punctuated<Expr, token::Comma>) -> Result<Attribute> {
+ use ::syn::*;
+ let mut iter = list.iter();
+ let format_str: String = match iter.next() {
+ Some(&Expr::Lit(ExprLit { lit: Lit::Str(ref lit_str), .. })) => {
+ lit_str.value().replace("`{}`", "{}") // We add them later anyways for consistency
+ }
+ _ => return Err(Error::new(list.span(), "Expected a string literal")),
+ };
+ let mut fmt_fragments = format_str.split("{}");
+ let mut doc_string = fmt_fragments.next().unwrap().to_string();
+ iter.map(::quote::ToTokens::to_token_stream).zip(fmt_fragments).for_each(
+ |(tts, next_fmt_fragment)| {
+ use ::core::fmt::Write;
+ write!(
+ &mut doc_string,
+ " `{}` {}",
+ tts.to_string().replace(" . ", "."),
+ next_fmt_fragment,
+ )
+ .unwrap();
+ },
+ );
+ let doc_string = format!("[query description - consider adding a doc-comment!] {}", doc_string);
+ Ok(parse_quote! { #[doc = #doc_string] })
}
/// Add the impl of QueryDescription for the query to `impls` if one is requested
-fn add_query_description_impl(
- query: &Query,
- modifiers: QueryModifiers,
- impls: &mut proc_macro2::TokenStream,
-) {
+fn add_query_description_impl(query: &Query, impls: &mut proc_macro2::TokenStream) {
let name = &query.name;
- let key = &query.key.0;
+ let key = &query.key;
+ let modifiers = &query.modifiers;
// Find out if we should cache the query on disk
let cache = if let Some((args, expr)) = modifiers.cache.as_ref() {
- let try_load_from_disk = if let Some((tcx, id, block)) = modifiers.load_cached.as_ref() {
- // Use custom code to load the query from disk
- quote! {
- const TRY_LOAD_FROM_DISK: Option<fn(QueryCtxt<$tcx>, SerializedDepNodeIndex) -> Option<Self::Value>>
- = Some(|#tcx, #id| { #block });
- }
- } else {
- // Use the default code to load the query from disk
- quote! {
- const TRY_LOAD_FROM_DISK: Option<fn(QueryCtxt<$tcx>, SerializedDepNodeIndex) -> Option<Self::Value>>
- = Some(|tcx, id| tcx.on_disk_cache().as_ref()?.try_load_query_result(*tcx, id));
- }
- };
-
- let tcx = args
- .as_ref()
- .map(|t| {
- let t = &t.0;
- quote! { #t }
- })
- .unwrap_or_else(|| quote! { _ });
+ let tcx = args.as_ref().map(|t| quote! { #t }).unwrap_or_else(|| quote! { _ });
// expr is a `Block`, meaning that `{ #expr }` gets expanded
// to `{ { stmts... } }`, which triggers the `unused_braces` lint.
quote! {
@@ -410,29 +253,22 @@ fn add_query_description_impl(
fn cache_on_disk(#tcx: TyCtxt<'tcx>, #key: &Self::Key) -> bool {
#expr
}
-
- #try_load_from_disk
}
} else {
- if modifiers.load_cached.is_some() {
- panic!("load_cached modifier on query `{}` without a cache modifier", name);
- }
quote! {
#[inline]
fn cache_on_disk(_: TyCtxt<'tcx>, _: &Self::Key) -> bool {
false
}
-
- const TRY_LOAD_FROM_DISK: Option<fn(QueryCtxt<$tcx>, SerializedDepNodeIndex) -> Option<Self::Value>> = None;
}
};
- let (tcx, desc) = modifiers.desc;
+ let (tcx, desc) = &modifiers.desc;
let tcx = tcx.as_ref().map_or_else(|| quote! { _ }, |t| quote! { #t });
let desc = quote! {
#[allow(unused_variables)]
- fn describe(tcx: QueryCtxt<$tcx>, key: Self::Key) -> String {
+ fn describe(tcx: QueryCtxt<'tcx>, key: Self::Key) -> String {
let (#tcx, #key) = (*tcx, key);
::rustc_middle::ty::print::with_no_trimmed_paths!(
format!(#desc)
@@ -441,7 +277,7 @@ fn add_query_description_impl(
};
impls.extend(quote! {
- (#name<$tcx:tt>) => {
+ (#name) => {
#desc
#cache
};
@@ -453,13 +289,10 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
let mut query_stream = quote! {};
let mut query_description_stream = quote! {};
- let mut dep_node_def_stream = quote! {};
let mut cached_queries = quote! {};
- for mut query in queries.0 {
- let modifiers = process_modifiers(&mut query);
- let name = &query.name;
- let arg = &query.arg;
+ for query in queries.0 {
+ let Query { name, arg, modifiers, .. } = &query;
let result_full = &query.result;
let result = match query.result {
ReturnType::Default => quote! { -> () },
@@ -474,38 +307,32 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
let mut attributes = Vec::new();
- // Pass on the fatal_cycle modifier
- if let Some(fatal_cycle) = &modifiers.fatal_cycle {
- attributes.push(quote! { (#fatal_cycle) });
- };
- // Pass on the storage modifier
- if let Some(ref ty) = modifiers.storage {
- let span = ty.span();
- attributes.push(quote_spanned! {span=> (storage #ty) });
- };
- // Pass on the cycle_delay_bug modifier
- if let Some(cycle_delay_bug) = &modifiers.cycle_delay_bug {
- attributes.push(quote! { (#cycle_delay_bug) });
- };
- // Pass on the no_hash modifier
- if let Some(no_hash) = &modifiers.no_hash {
- attributes.push(quote! { (#no_hash) });
- };
- // Pass on the anon modifier
- if let Some(anon) = &modifiers.anon {
- attributes.push(quote! { (#anon) });
- };
- // Pass on the eval_always modifier
- if let Some(eval_always) = &modifiers.eval_always {
- attributes.push(quote! { (#eval_always) });
- };
- // Pass on the separate_provide_extern modifier
- if let Some(separate_provide_extern) = &modifiers.separate_provide_extern {
- attributes.push(quote! { (#separate_provide_extern) });
+ macro_rules! passthrough {
+ ( $( $modifier:ident ),+ $(,)? ) => {
+ $( if let Some($modifier) = &modifiers.$modifier {
+ attributes.push(quote! { (#$modifier) });
+ }; )+
+ }
}
- // Pass on the remap_env_constness modifier
- if let Some(remap_env_constness) = &modifiers.remap_env_constness {
- attributes.push(quote! { (#remap_env_constness) });
+
+ passthrough!(
+ fatal_cycle,
+ arena_cache,
+ cycle_delay_bug,
+ no_hash,
+ anon,
+ eval_always,
+ depth_limit,
+ separate_provide_extern,
+ remap_env_constness,
+ );
+
+ if modifiers.cache.is_some() {
+ attributes.push(quote! { (cache) });
+ }
+ // Pass on the cache modifier
+ if modifiers.cache.is_some() {
+ attributes.push(quote! { (cache) });
}
// This uses the span of the query definition for the commas,
@@ -516,48 +343,27 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
// be very useful.
let span = name.span();
let attribute_stream = quote_spanned! {span=> #(#attributes),*};
- let doc_comments = query.doc_comments.iter();
+ let doc_comments = &query.doc_comments;
// Add the query to the group
query_stream.extend(quote! {
#(#doc_comments)*
[#attribute_stream] fn #name(#arg) #result,
});
- // Create a dep node for the query
- dep_node_def_stream.extend(quote! {
- [#attribute_stream] #name(#arg),
- });
-
- add_query_description_impl(&query, modifiers, &mut query_description_stream);
+ add_query_description_impl(&query, &mut query_description_stream);
}
TokenStream::from(quote! {
#[macro_export]
macro_rules! rustc_query_append {
- ([$($macro:tt)*][$($other:tt)*]) => {
- $($macro)* {
- $($other)*
-
+ ($macro:ident! $( [$($other:tt)*] )?) => {
+ $macro! {
+ $( $($other)* )?
#query_stream
-
}
}
}
- macro_rules! rustc_dep_node_append {
- ([$($macro:tt)*][$($other:tt)*]) => {
- $($macro)*(
- $($other)*
- #dep_node_def_stream
- );
- }
- }
- #[macro_export]
- macro_rules! rustc_cached_queries {
- ($($macro:tt)*) => {
- $($macro)*(#cached_queries);
- }
- }
#[macro_export]
macro_rules! rustc_query_description {
#query_description_stream