From 698f8c2f01ea549d77d7dc3338a12e04c11057b9 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:02:58 +0200 Subject: Adding upstream version 1.64.0+dfsg1. Signed-off-by: Daniel Baumann --- compiler/rustc_ast_lowering/src/item.rs | 1513 +++++++++++++++++++++++++++++++ 1 file changed, 1513 insertions(+) create mode 100644 compiler/rustc_ast_lowering/src/item.rs (limited to 'compiler/rustc_ast_lowering/src/item.rs') diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs new file mode 100644 index 000000000..ee4c0036f --- /dev/null +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -0,0 +1,1513 @@ +use super::ResolverAstLoweringExt; +use super::{AstOwner, ImplTraitContext, ImplTraitPosition}; +use super::{FnDeclKind, LoweringContext, ParamMode}; + +use rustc_ast::ptr::P; +use rustc_ast::visit::AssocCtxt; +use rustc_ast::*; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sorted_map::SortedMap; +use rustc_errors::struct_span_err; +use rustc_hir as hir; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID}; +use rustc_hir::PredicateOrigin; +use rustc_index::vec::{Idx, IndexVec}; +use rustc_middle::ty::{DefIdTree, ResolverAstLowering, TyCtxt}; +use rustc_span::source_map::DesugaringKind; +use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::Span; +use rustc_target::spec::abi; +use smallvec::{smallvec, SmallVec}; + +use std::iter; + +pub(super) struct ItemLowerer<'a, 'hir> { + pub(super) tcx: TyCtxt<'hir>, + pub(super) resolver: &'a mut ResolverAstLowering, + pub(super) ast_index: &'a IndexVec>, + pub(super) owners: &'a mut IndexVec>>, +} + +/// When we have a ty alias we *may* have two where clauses. To give the best diagnostics, we set the span +/// to the where clause that is preferred, if it exists. Otherwise, it sets the span to the other where +/// clause if it exists. +fn add_ty_alias_where_clause( + generics: &mut ast::Generics, + mut where_clauses: (TyAliasWhereClause, TyAliasWhereClause), + prefer_first: bool, +) { + if !prefer_first { + where_clauses = (where_clauses.1, where_clauses.0); + } + if where_clauses.0.0 || !where_clauses.1.0 { + generics.where_clause.has_where_token = where_clauses.0.0; + generics.where_clause.span = where_clauses.0.1; + } else { + generics.where_clause.has_where_token = where_clauses.1.0; + generics.where_clause.span = where_clauses.1.1; + } +} + +impl<'a, 'hir> ItemLowerer<'a, 'hir> { + fn with_lctx( + &mut self, + owner: NodeId, + f: impl FnOnce(&mut LoweringContext<'_, 'hir>) -> hir::OwnerNode<'hir>, + ) { + let mut lctx = LoweringContext { + // Pseudo-globals. + tcx: self.tcx, + resolver: self.resolver, + arena: self.tcx.hir_arena, + + // HirId handling. + bodies: Vec::new(), + attrs: SortedMap::default(), + children: FxHashMap::default(), + current_hir_id_owner: CRATE_DEF_ID, + item_local_id_counter: hir::ItemLocalId::new(0), + node_id_to_local_id: Default::default(), + local_id_to_def_id: SortedMap::new(), + trait_map: Default::default(), + + // Lowering state. + catch_scope: None, + loop_scope: None, + is_in_loop_condition: false, + is_in_trait_impl: false, + is_in_dyn_type: false, + generator_kind: None, + task_context: None, + current_item: None, + impl_trait_defs: Vec::new(), + impl_trait_bounds: Vec::new(), + allow_try_trait: Some([sym::try_trait_v2, sym::yeet_desugar_details][..].into()), + allow_gen_future: Some([sym::gen_future][..].into()), + allow_into_future: Some([sym::into_future][..].into()), + }; + lctx.with_hir_id_owner(owner, |lctx| f(lctx)); + + for (def_id, info) in lctx.children { + self.owners.ensure_contains_elem(def_id, || hir::MaybeOwner::Phantom); + debug_assert!(matches!(self.owners[def_id], hir::MaybeOwner::Phantom)); + self.owners[def_id] = info; + } + } + + pub(super) fn lower_node( + &mut self, + def_id: LocalDefId, + ) -> hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>> { + self.owners.ensure_contains_elem(def_id, || hir::MaybeOwner::Phantom); + if let hir::MaybeOwner::Phantom = self.owners[def_id] { + let node = self.ast_index[def_id]; + match node { + AstOwner::NonOwner => {} + AstOwner::Crate(c) => self.lower_crate(c), + AstOwner::Item(item) => self.lower_item(item), + AstOwner::AssocItem(item, ctxt) => self.lower_assoc_item(item, ctxt), + AstOwner::ForeignItem(item) => self.lower_foreign_item(item), + } + } + + self.owners[def_id] + } + + #[instrument(level = "debug", skip(self, c))] + fn lower_crate(&mut self, c: &Crate) { + debug_assert_eq!(self.resolver.node_id_to_def_id[&CRATE_NODE_ID], CRATE_DEF_ID); + self.with_lctx(CRATE_NODE_ID, |lctx| { + let module = lctx.lower_mod(&c.items, &c.spans); + lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs); + hir::OwnerNode::Crate(lctx.arena.alloc(module)) + }) + } + + #[instrument(level = "debug", skip(self))] + fn lower_item(&mut self, item: &Item) { + self.with_lctx(item.id, |lctx| hir::OwnerNode::Item(lctx.lower_item(item))) + } + + fn lower_assoc_item(&mut self, item: &AssocItem, ctxt: AssocCtxt) { + let def_id = self.resolver.node_id_to_def_id[&item.id]; + + let parent_id = self.tcx.local_parent(def_id); + let parent_hir = self.lower_node(parent_id).unwrap(); + self.with_lctx(item.id, |lctx| { + // Evaluate with the lifetimes in `params` in-scope. + // This is used to track which lifetimes have already been defined, + // and which need to be replicated when lowering an async fn. + match parent_hir.node().expect_item().kind { + hir::ItemKind::Impl(hir::Impl { ref of_trait, .. }) => { + lctx.is_in_trait_impl = of_trait.is_some(); + } + _ => {} + }; + + match ctxt { + AssocCtxt::Trait => hir::OwnerNode::TraitItem(lctx.lower_trait_item(item)), + AssocCtxt::Impl => hir::OwnerNode::ImplItem(lctx.lower_impl_item(item)), + } + }) + } + + fn lower_foreign_item(&mut self, item: &ForeignItem) { + self.with_lctx(item.id, |lctx| hir::OwnerNode::ForeignItem(lctx.lower_foreign_item(item))) + } +} + +impl<'hir> LoweringContext<'_, 'hir> { + pub(super) fn lower_mod(&mut self, items: &[P], spans: &ModSpans) -> hir::Mod<'hir> { + hir::Mod { + spans: hir::ModSpans { + inner_span: self.lower_span(spans.inner_span), + inject_use_span: self.lower_span(spans.inject_use_span), + }, + item_ids: self.arena.alloc_from_iter(items.iter().flat_map(|x| self.lower_item_ref(x))), + } + } + + pub(super) fn lower_item_ref(&mut self, i: &Item) -> SmallVec<[hir::ItemId; 1]> { + let mut node_ids = smallvec![hir::ItemId { def_id: self.local_def_id(i.id) }]; + if let ItemKind::Use(ref use_tree) = &i.kind { + self.lower_item_id_use_tree(use_tree, i.id, &mut node_ids); + } + node_ids + } + + fn lower_item_id_use_tree( + &mut self, + tree: &UseTree, + base_id: NodeId, + vec: &mut SmallVec<[hir::ItemId; 1]>, + ) { + match tree.kind { + UseTreeKind::Nested(ref nested_vec) => { + for &(ref nested, id) in nested_vec { + vec.push(hir::ItemId { def_id: self.local_def_id(id) }); + self.lower_item_id_use_tree(nested, id, vec); + } + } + UseTreeKind::Glob => {} + UseTreeKind::Simple(_, id1, id2) => { + for (_, &id) in + iter::zip(self.expect_full_res_from_use(base_id).skip(1), &[id1, id2]) + { + vec.push(hir::ItemId { def_id: self.local_def_id(id) }); + } + } + } + } + + fn lower_item(&mut self, i: &Item) -> &'hir hir::Item<'hir> { + let mut ident = i.ident; + let vis_span = self.lower_span(i.vis.span); + let hir_id = self.lower_node_id(i.id); + let attrs = self.lower_attrs(hir_id, &i.attrs); + let kind = self.lower_item_kind(i.span, i.id, hir_id, &mut ident, attrs, vis_span, &i.kind); + let item = hir::Item { + def_id: hir_id.expect_owner(), + ident: self.lower_ident(ident), + kind, + vis_span, + span: self.lower_span(i.span), + }; + self.arena.alloc(item) + } + + fn lower_item_kind( + &mut self, + span: Span, + id: NodeId, + hir_id: hir::HirId, + ident: &mut Ident, + attrs: Option<&'hir [Attribute]>, + vis_span: Span, + i: &ItemKind, + ) -> hir::ItemKind<'hir> { + match *i { + ItemKind::ExternCrate(orig_name) => hir::ItemKind::ExternCrate(orig_name), + ItemKind::Use(ref use_tree) => { + // Start with an empty prefix. + let prefix = Path { segments: vec![], span: use_tree.span, tokens: None }; + + self.lower_use_tree(use_tree, &prefix, id, vis_span, ident, attrs) + } + ItemKind::Static(ref t, m, ref e) => { + let (ty, body_id) = self.lower_const_item(t, span, e.as_deref()); + hir::ItemKind::Static(ty, m, body_id) + } + ItemKind::Const(_, ref t, ref e) => { + let (ty, body_id) = self.lower_const_item(t, span, e.as_deref()); + hir::ItemKind::Const(ty, body_id) + } + ItemKind::Fn(box Fn { + sig: FnSig { ref decl, header, span: fn_sig_span }, + ref generics, + ref body, + .. + }) => { + self.with_new_scopes(|this| { + this.current_item = Some(ident.span); + + // Note: we don't need to change the return type from `T` to + // `impl Future` here because lower_body + // only cares about the input argument patterns in the function + // declaration (decl), not the return types. + let asyncness = header.asyncness; + let body_id = + this.lower_maybe_async_body(span, &decl, asyncness, body.as_deref()); + + let itctx = ImplTraitContext::Universal; + let (generics, decl) = this.lower_generics(generics, id, itctx, |this| { + let ret_id = asyncness.opt_return_id(); + this.lower_fn_decl(&decl, Some(id), FnDeclKind::Fn, ret_id) + }); + let sig = hir::FnSig { + decl, + header: this.lower_fn_header(header), + span: this.lower_span(fn_sig_span), + }; + hir::ItemKind::Fn(sig, generics, body_id) + }) + } + ItemKind::Mod(_, ref mod_kind) => match mod_kind { + ModKind::Loaded(items, _, spans) => { + hir::ItemKind::Mod(self.lower_mod(items, spans)) + } + ModKind::Unloaded => panic!("`mod` items should have been loaded by now"), + }, + ItemKind::ForeignMod(ref fm) => hir::ItemKind::ForeignMod { + abi: fm.abi.map_or(abi::Abi::FALLBACK, |abi| self.lower_abi(abi)), + items: self + .arena + .alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))), + }, + ItemKind::GlobalAsm(ref asm) => { + hir::ItemKind::GlobalAsm(self.lower_inline_asm(span, asm)) + } + ItemKind::TyAlias(box TyAlias { + ref generics, + where_clauses, + ty: Some(ref ty), + .. + }) => { + // We lower + // + // type Foo = impl Trait + // + // to + // + // type Foo = Foo1 + // opaque type Foo1: Trait + let mut generics = generics.clone(); + add_ty_alias_where_clause(&mut generics, where_clauses, true); + let (generics, ty) = self.lower_generics( + &generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| this.lower_ty(ty, ImplTraitContext::TypeAliasesOpaqueTy), + ); + hir::ItemKind::TyAlias(ty, generics) + } + ItemKind::TyAlias(box TyAlias { + ref generics, ref where_clauses, ty: None, .. + }) => { + let mut generics = generics.clone(); + add_ty_alias_where_clause(&mut generics, *where_clauses, true); + let (generics, ty) = self.lower_generics( + &generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| this.arena.alloc(this.ty(span, hir::TyKind::Err)), + ); + hir::ItemKind::TyAlias(ty, generics) + } + ItemKind::Enum(ref enum_definition, ref generics) => { + let (generics, variants) = self.lower_generics( + generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| { + this.arena.alloc_from_iter( + enum_definition.variants.iter().map(|x| this.lower_variant(x)), + ) + }, + ); + hir::ItemKind::Enum(hir::EnumDef { variants }, generics) + } + ItemKind::Struct(ref struct_def, ref generics) => { + let (generics, struct_def) = self.lower_generics( + generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| this.lower_variant_data(hir_id, struct_def), + ); + hir::ItemKind::Struct(struct_def, generics) + } + ItemKind::Union(ref vdata, ref generics) => { + let (generics, vdata) = self.lower_generics( + generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| this.lower_variant_data(hir_id, vdata), + ); + hir::ItemKind::Union(vdata, generics) + } + ItemKind::Impl(box Impl { + unsafety, + polarity, + defaultness, + constness, + generics: ref ast_generics, + of_trait: ref trait_ref, + self_ty: ref ty, + items: ref impl_items, + }) => { + // Lower the "impl header" first. This ordering is important + // for in-band lifetimes! Consider `'a` here: + // + // impl Foo<'a> for u32 { + // fn method(&'a self) { .. } + // } + // + // Because we start by lowering the `Foo<'a> for u32` + // part, we will add `'a` to the list of generics on + // the impl. When we then encounter it later in the + // method, it will not be considered an in-band + // lifetime to be added, but rather a reference to a + // parent lifetime. + let itctx = ImplTraitContext::Universal; + let (generics, (trait_ref, lowered_ty)) = + self.lower_generics(ast_generics, id, itctx, |this| { + let trait_ref = trait_ref.as_ref().map(|trait_ref| { + this.lower_trait_ref( + trait_ref, + ImplTraitContext::Disallowed(ImplTraitPosition::Trait), + ) + }); + + let lowered_ty = this + .lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + + (trait_ref, lowered_ty) + }); + + let new_impl_items = self + .arena + .alloc_from_iter(impl_items.iter().map(|item| self.lower_impl_item_ref(item))); + + // `defaultness.has_value()` is never called for an `impl`, always `true` in order + // to not cause an assertion failure inside the `lower_defaultness` function. + let has_val = true; + let (defaultness, defaultness_span) = self.lower_defaultness(defaultness, has_val); + let polarity = match polarity { + ImplPolarity::Positive => ImplPolarity::Positive, + ImplPolarity::Negative(s) => ImplPolarity::Negative(self.lower_span(s)), + }; + hir::ItemKind::Impl(self.arena.alloc(hir::Impl { + unsafety: self.lower_unsafety(unsafety), + polarity, + defaultness, + defaultness_span, + constness: self.lower_constness(constness), + generics, + of_trait: trait_ref, + self_ty: lowered_ty, + items: new_impl_items, + })) + } + ItemKind::Trait(box Trait { + is_auto, + unsafety, + ref generics, + ref bounds, + ref items, + }) => { + let (generics, (unsafety, items, bounds)) = self.lower_generics( + generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| { + let bounds = this.lower_param_bounds( + bounds, + ImplTraitContext::Disallowed(ImplTraitPosition::Bound), + ); + let items = this.arena.alloc_from_iter( + items.iter().map(|item| this.lower_trait_item_ref(item)), + ); + let unsafety = this.lower_unsafety(unsafety); + (unsafety, items, bounds) + }, + ); + hir::ItemKind::Trait(is_auto, unsafety, generics, bounds, items) + } + ItemKind::TraitAlias(ref generics, ref bounds) => { + let (generics, bounds) = self.lower_generics( + generics, + id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| { + this.lower_param_bounds( + bounds, + ImplTraitContext::Disallowed(ImplTraitPosition::Bound), + ) + }, + ); + hir::ItemKind::TraitAlias(generics, bounds) + } + ItemKind::MacroDef(MacroDef { ref body, macro_rules }) => { + let body = P(self.lower_mac_args(body)); + let macro_kind = self.resolver.decl_macro_kind(self.local_def_id(id)); + hir::ItemKind::Macro(ast::MacroDef { body, macro_rules }, macro_kind) + } + ItemKind::MacCall(..) => { + panic!("`TyMac` should have been expanded by now") + } + } + } + + fn lower_const_item( + &mut self, + ty: &Ty, + span: Span, + body: Option<&Expr>, + ) -> (&'hir hir::Ty<'hir>, hir::BodyId) { + let ty = self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + (ty, self.lower_const_body(span, body)) + } + + #[instrument(level = "debug", skip(self))] + fn lower_use_tree( + &mut self, + tree: &UseTree, + prefix: &Path, + id: NodeId, + vis_span: Span, + ident: &mut Ident, + attrs: Option<&'hir [Attribute]>, + ) -> hir::ItemKind<'hir> { + let path = &tree.prefix; + let segments = prefix.segments.iter().chain(path.segments.iter()).cloned().collect(); + + match tree.kind { + UseTreeKind::Simple(rename, id1, id2) => { + *ident = tree.ident(); + + // First, apply the prefix to the path. + let mut path = Path { segments, span: path.span, tokens: None }; + + // Correctly resolve `self` imports. + if path.segments.len() > 1 + && path.segments.last().unwrap().ident.name == kw::SelfLower + { + let _ = path.segments.pop(); + if rename.is_none() { + *ident = path.segments.last().unwrap().ident; + } + } + + let mut resolutions = self.expect_full_res_from_use(id).fuse(); + // We want to return *something* from this function, so hold onto the first item + // for later. + let ret_res = self.lower_res(resolutions.next().unwrap_or(Res::Err)); + + // Here, we are looping over namespaces, if they exist for the definition + // being imported. We only handle type and value namespaces because we + // won't be dealing with macros in the rest of the compiler. + // Essentially a single `use` which imports two names is desugared into + // two imports. + for new_node_id in [id1, id2] { + let new_id = self.local_def_id(new_node_id); + let Some(res) = resolutions.next() else { + // Associate an HirId to both ids even if there is no resolution. + let _old = self.children.insert( + new_id, + hir::MaybeOwner::NonOwner(hir::HirId::make_owner(new_id)), + ); + debug_assert!(_old.is_none()); + continue; + }; + let ident = *ident; + let mut path = path.clone(); + for seg in &mut path.segments { + seg.id = self.next_node_id(); + } + let span = path.span; + + self.with_hir_id_owner(new_node_id, |this| { + let res = this.lower_res(res); + let path = this.lower_path_extra(res, &path, ParamMode::Explicit); + let kind = hir::ItemKind::Use(path, hir::UseKind::Single); + if let Some(attrs) = attrs { + this.attrs.insert(hir::ItemLocalId::new(0), attrs); + } + + let item = hir::Item { + def_id: new_id, + ident: this.lower_ident(ident), + kind, + vis_span, + span: this.lower_span(span), + }; + hir::OwnerNode::Item(this.arena.alloc(item)) + }); + } + + let path = self.lower_path_extra(ret_res, &path, ParamMode::Explicit); + hir::ItemKind::Use(path, hir::UseKind::Single) + } + UseTreeKind::Glob => { + let path = self.lower_path( + id, + &Path { segments, span: path.span, tokens: None }, + ParamMode::Explicit, + ); + hir::ItemKind::Use(path, hir::UseKind::Glob) + } + UseTreeKind::Nested(ref trees) => { + // Nested imports are desugared into simple imports. + // So, if we start with + // + // ``` + // pub(x) use foo::{a, b}; + // ``` + // + // we will create three items: + // + // ``` + // pub(x) use foo::a; + // pub(x) use foo::b; + // pub(x) use foo::{}; // <-- this is called the `ListStem` + // ``` + // + // The first two are produced by recursively invoking + // `lower_use_tree` (and indeed there may be things + // like `use foo::{a::{b, c}}` and so forth). They + // wind up being directly added to + // `self.items`. However, the structure of this + // function also requires us to return one item, and + // for that we return the `{}` import (called the + // `ListStem`). + + let prefix = Path { segments, span: prefix.span.to(path.span), tokens: None }; + + // Add all the nested `PathListItem`s to the HIR. + for &(ref use_tree, id) in trees { + let new_hir_id = self.local_def_id(id); + + let mut prefix = prefix.clone(); + + // Give the segments new node-ids since they are being cloned. + for seg in &mut prefix.segments { + seg.id = self.next_node_id(); + } + + // Each `use` import is an item and thus are owners of the + // names in the path. Up to this point the nested import is + // the current owner, since we want each desugared import to + // own its own names, we have to adjust the owner before + // lowering the rest of the import. + self.with_hir_id_owner(id, |this| { + let mut ident = *ident; + + let kind = + this.lower_use_tree(use_tree, &prefix, id, vis_span, &mut ident, attrs); + if let Some(attrs) = attrs { + this.attrs.insert(hir::ItemLocalId::new(0), attrs); + } + + let item = hir::Item { + def_id: new_hir_id, + ident: this.lower_ident(ident), + kind, + vis_span, + span: this.lower_span(use_tree.span), + }; + hir::OwnerNode::Item(this.arena.alloc(item)) + }); + } + + let res = self.expect_full_res_from_use(id).next().unwrap_or(Res::Err); + let res = self.lower_res(res); + let path = self.lower_path_extra(res, &prefix, ParamMode::Explicit); + hir::ItemKind::Use(path, hir::UseKind::ListStem) + } + } + } + + fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> { + let hir_id = self.lower_node_id(i.id); + let def_id = hir_id.expect_owner(); + self.lower_attrs(hir_id, &i.attrs); + let item = hir::ForeignItem { + def_id, + ident: self.lower_ident(i.ident), + kind: match i.kind { + ForeignItemKind::Fn(box Fn { ref sig, ref generics, .. }) => { + let fdec = &sig.decl; + let itctx = ImplTraitContext::Universal; + let (generics, (fn_dec, fn_args)) = + self.lower_generics(generics, i.id, itctx, |this| { + ( + // Disallow `impl Trait` in foreign items. + this.lower_fn_decl(fdec, None, FnDeclKind::ExternFn, None), + this.lower_fn_params_to_names(fdec), + ) + }); + + hir::ForeignItemKind::Fn(fn_dec, fn_args, generics) + } + ForeignItemKind::Static(ref t, m, _) => { + let ty = + self.lower_ty(t, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + hir::ForeignItemKind::Static(ty, m) + } + ForeignItemKind::TyAlias(..) => hir::ForeignItemKind::Type, + ForeignItemKind::MacCall(_) => panic!("macro shouldn't exist here"), + }, + vis_span: self.lower_span(i.vis.span), + span: self.lower_span(i.span), + }; + self.arena.alloc(item) + } + + fn lower_foreign_item_ref(&mut self, i: &ForeignItem) -> hir::ForeignItemRef { + hir::ForeignItemRef { + id: hir::ForeignItemId { def_id: self.local_def_id(i.id) }, + ident: self.lower_ident(i.ident), + span: self.lower_span(i.span), + } + } + + fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> { + let id = self.lower_node_id(v.id); + self.lower_attrs(id, &v.attrs); + hir::Variant { + id, + data: self.lower_variant_data(id, &v.data), + disr_expr: v.disr_expr.as_ref().map(|e| self.lower_anon_const(e)), + ident: self.lower_ident(v.ident), + span: self.lower_span(v.span), + } + } + + fn lower_variant_data( + &mut self, + parent_id: hir::HirId, + vdata: &VariantData, + ) -> hir::VariantData<'hir> { + match *vdata { + VariantData::Struct(ref fields, recovered) => hir::VariantData::Struct( + self.arena + .alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_field_def(f))), + recovered, + ), + VariantData::Tuple(ref fields, id) => { + let ctor_id = self.lower_node_id(id); + self.alias_attrs(ctor_id, parent_id); + hir::VariantData::Tuple( + self.arena.alloc_from_iter( + fields.iter().enumerate().map(|f| self.lower_field_def(f)), + ), + ctor_id, + ) + } + VariantData::Unit(id) => { + let ctor_id = self.lower_node_id(id); + self.alias_attrs(ctor_id, parent_id); + hir::VariantData::Unit(ctor_id) + } + } + } + + fn lower_field_def(&mut self, (index, f): (usize, &FieldDef)) -> hir::FieldDef<'hir> { + let ty = if let TyKind::Path(ref qself, ref path) = f.ty.kind { + let t = self.lower_path_ty( + &f.ty, + qself, + path, + ParamMode::ExplicitNamed, // no `'_` in declarations (Issue #61124) + ImplTraitContext::Disallowed(ImplTraitPosition::Path), + ); + self.arena.alloc(t) + } else { + self.lower_ty(&f.ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)) + }; + let hir_id = self.lower_node_id(f.id); + self.lower_attrs(hir_id, &f.attrs); + hir::FieldDef { + span: self.lower_span(f.span), + hir_id, + ident: match f.ident { + Some(ident) => self.lower_ident(ident), + // FIXME(jseyfried): positional field hygiene. + None => Ident::new(sym::integer(index), self.lower_span(f.span)), + }, + vis_span: self.lower_span(f.vis.span), + ty, + } + } + + fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> { + let hir_id = self.lower_node_id(i.id); + let trait_item_def_id = hir_id.expect_owner(); + + let (generics, kind, has_default) = match i.kind { + AssocItemKind::Const(_, ref ty, ref default) => { + let ty = self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + let body = default.as_ref().map(|x| self.lower_const_body(i.span, Some(x))); + (hir::Generics::empty(), hir::TraitItemKind::Const(ty, body), body.is_some()) + } + AssocItemKind::Fn(box Fn { ref sig, ref generics, body: None, .. }) => { + let names = self.lower_fn_params_to_names(&sig.decl); + let (generics, sig) = + self.lower_method_sig(generics, sig, i.id, FnDeclKind::Trait, None); + (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false) + } + AssocItemKind::Fn(box Fn { ref sig, ref generics, body: Some(ref body), .. }) => { + let asyncness = sig.header.asyncness; + let body_id = + self.lower_maybe_async_body(i.span, &sig.decl, asyncness, Some(&body)); + let (generics, sig) = self.lower_method_sig( + generics, + sig, + i.id, + FnDeclKind::Trait, + asyncness.opt_return_id(), + ); + (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true) + } + AssocItemKind::TyAlias(box TyAlias { + ref generics, + where_clauses, + ref bounds, + ref ty, + .. + }) => { + let mut generics = generics.clone(); + add_ty_alias_where_clause(&mut generics, where_clauses, false); + let (generics, kind) = self.lower_generics( + &generics, + i.id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| { + let ty = ty.as_ref().map(|x| { + this.lower_ty(x, ImplTraitContext::Disallowed(ImplTraitPosition::Type)) + }); + hir::TraitItemKind::Type( + this.lower_param_bounds( + bounds, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + ), + ty, + ) + }, + ); + (generics, kind, ty.is_some()) + } + AssocItemKind::MacCall(..) => panic!("macro item shouldn't exist at this point"), + }; + + self.lower_attrs(hir_id, &i.attrs); + let item = hir::TraitItem { + def_id: trait_item_def_id, + ident: self.lower_ident(i.ident), + generics, + kind, + span: self.lower_span(i.span), + defaultness: hir::Defaultness::Default { has_value: has_default }, + }; + self.arena.alloc(item) + } + + fn lower_trait_item_ref(&mut self, i: &AssocItem) -> hir::TraitItemRef { + let kind = match &i.kind { + AssocItemKind::Const(..) => hir::AssocItemKind::Const, + AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type, + AssocItemKind::Fn(box Fn { sig, .. }) => { + hir::AssocItemKind::Fn { has_self: sig.decl.has_self() } + } + AssocItemKind::MacCall(..) => unimplemented!(), + }; + let id = hir::TraitItemId { def_id: self.local_def_id(i.id) }; + hir::TraitItemRef { + id, + ident: self.lower_ident(i.ident), + span: self.lower_span(i.span), + kind, + } + } + + /// Construct `ExprKind::Err` for the given `span`. + pub(crate) fn expr_err(&mut self, span: Span) -> hir::Expr<'hir> { + self.expr(span, hir::ExprKind::Err, AttrVec::new()) + } + + fn lower_impl_item(&mut self, i: &AssocItem) -> &'hir hir::ImplItem<'hir> { + // Since `default impl` is not yet implemented, this is always true in impls. + let has_value = true; + let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value); + + let (generics, kind) = match &i.kind { + AssocItemKind::Const(_, ty, expr) => { + let ty = self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + ( + hir::Generics::empty(), + hir::ImplItemKind::Const(ty, self.lower_const_body(i.span, expr.as_deref())), + ) + } + AssocItemKind::Fn(box Fn { sig, generics, body, .. }) => { + self.current_item = Some(i.span); + let asyncness = sig.header.asyncness; + let body_id = + self.lower_maybe_async_body(i.span, &sig.decl, asyncness, body.as_deref()); + let (generics, sig) = self.lower_method_sig( + generics, + sig, + i.id, + if self.is_in_trait_impl { FnDeclKind::Impl } else { FnDeclKind::Inherent }, + asyncness.opt_return_id(), + ); + + (generics, hir::ImplItemKind::Fn(sig, body_id)) + } + AssocItemKind::TyAlias(box TyAlias { generics, where_clauses, ty, .. }) => { + let mut generics = generics.clone(); + add_ty_alias_where_clause(&mut generics, *where_clauses, false); + self.lower_generics( + &generics, + i.id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| match ty { + None => { + let ty = this.arena.alloc(this.ty(i.span, hir::TyKind::Err)); + hir::ImplItemKind::TyAlias(ty) + } + Some(ty) => { + let ty = this.lower_ty(ty, ImplTraitContext::TypeAliasesOpaqueTy); + hir::ImplItemKind::TyAlias(ty) + } + }, + ) + } + AssocItemKind::MacCall(..) => panic!("`TyMac` should have been expanded by now"), + }; + + let hir_id = self.lower_node_id(i.id); + self.lower_attrs(hir_id, &i.attrs); + let item = hir::ImplItem { + def_id: hir_id.expect_owner(), + ident: self.lower_ident(i.ident), + generics, + kind, + vis_span: self.lower_span(i.vis.span), + span: self.lower_span(i.span), + defaultness, + }; + self.arena.alloc(item) + } + + fn lower_impl_item_ref(&mut self, i: &AssocItem) -> hir::ImplItemRef { + hir::ImplItemRef { + id: hir::ImplItemId { def_id: self.local_def_id(i.id) }, + ident: self.lower_ident(i.ident), + span: self.lower_span(i.span), + kind: match &i.kind { + AssocItemKind::Const(..) => hir::AssocItemKind::Const, + AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type, + AssocItemKind::Fn(box Fn { sig, .. }) => { + hir::AssocItemKind::Fn { has_self: sig.decl.has_self() } + } + AssocItemKind::MacCall(..) => unimplemented!(), + }, + trait_item_def_id: self.resolver.get_partial_res(i.id).map(|r| r.base_res().def_id()), + } + } + + fn lower_defaultness( + &self, + d: Defaultness, + has_value: bool, + ) -> (hir::Defaultness, Option) { + match d { + Defaultness::Default(sp) => { + (hir::Defaultness::Default { has_value }, Some(self.lower_span(sp))) + } + Defaultness::Final => { + assert!(has_value); + (hir::Defaultness::Final, None) + } + } + } + + fn record_body( + &mut self, + params: &'hir [hir::Param<'hir>], + value: hir::Expr<'hir>, + ) -> hir::BodyId { + let body = hir::Body { generator_kind: self.generator_kind, params, value }; + let id = body.id(); + debug_assert_eq!(id.hir_id.owner, self.current_hir_id_owner); + self.bodies.push((id.hir_id.local_id, self.arena.alloc(body))); + id + } + + pub(super) fn lower_body( + &mut self, + f: impl FnOnce(&mut Self) -> (&'hir [hir::Param<'hir>], hir::Expr<'hir>), + ) -> hir::BodyId { + let prev_gen_kind = self.generator_kind.take(); + let task_context = self.task_context.take(); + let (parameters, result) = f(self); + let body_id = self.record_body(parameters, result); + self.task_context = task_context; + self.generator_kind = prev_gen_kind; + body_id + } + + fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> { + let hir_id = self.lower_node_id(param.id); + self.lower_attrs(hir_id, ¶m.attrs); + hir::Param { + hir_id, + pat: self.lower_pat(¶m.pat), + ty_span: self.lower_span(param.ty.span), + span: self.lower_span(param.span), + } + } + + pub(super) fn lower_fn_body( + &mut self, + decl: &FnDecl, + body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, + ) -> hir::BodyId { + self.lower_body(|this| { + ( + this.arena.alloc_from_iter(decl.inputs.iter().map(|x| this.lower_param(x))), + body(this), + ) + }) + } + + fn lower_fn_body_block( + &mut self, + span: Span, + decl: &FnDecl, + body: Option<&Block>, + ) -> hir::BodyId { + self.lower_fn_body(decl, |this| this.lower_block_expr_opt(span, body)) + } + + fn lower_block_expr_opt(&mut self, span: Span, block: Option<&Block>) -> hir::Expr<'hir> { + match block { + Some(block) => self.lower_block_expr(block), + None => self.expr_err(span), + } + } + + pub(super) fn lower_const_body(&mut self, span: Span, expr: Option<&Expr>) -> hir::BodyId { + self.lower_body(|this| { + ( + &[], + match expr { + Some(expr) => this.lower_expr_mut(expr), + None => this.expr_err(span), + }, + ) + }) + } + + fn lower_maybe_async_body( + &mut self, + span: Span, + decl: &FnDecl, + asyncness: Async, + body: Option<&Block>, + ) -> hir::BodyId { + let closure_id = match asyncness { + Async::Yes { closure_id, .. } => closure_id, + Async::No => return self.lower_fn_body_block(span, decl, body), + }; + + self.lower_body(|this| { + let mut parameters: Vec> = Vec::new(); + let mut statements: Vec> = Vec::new(); + + // Async function parameters are lowered into the closure body so that they are + // captured and so that the drop order matches the equivalent non-async functions. + // + // from: + // + // async fn foo(: , : , : ) { + // + // } + // + // into: + // + // fn foo(__arg0: , __arg1: , __arg2: ) { + // async move { + // let __arg2 = __arg2; + // let = __arg2; + // let __arg1 = __arg1; + // let = __arg1; + // let __arg0 = __arg0; + // let = __arg0; + // drop-temps { } // see comments later in fn for details + // } + // } + // + // If `` is a simple ident, then it is lowered to a single + // `let = ;` statement as an optimization. + // + // Note that the body is embedded in `drop-temps`; an + // equivalent desugaring would be `return { + // };`. The key point is that we wish to drop all the + // let-bound variables and temporaries created in the body + // (and its tail expression!) before we drop the + // parameters (c.f. rust-lang/rust#64512). + for (index, parameter) in decl.inputs.iter().enumerate() { + let parameter = this.lower_param(parameter); + let span = parameter.pat.span; + + // Check if this is a binding pattern, if so, we can optimize and avoid adding a + // `let = __argN;` statement. In this case, we do not rename the parameter. + let (ident, is_simple_parameter) = match parameter.pat.kind { + hir::PatKind::Binding( + hir::BindingAnnotation::Unannotated | hir::BindingAnnotation::Mutable, + _, + ident, + _, + ) => (ident, true), + // For `ref mut` or wildcard arguments, we can't reuse the binding, but + // we can keep the same name for the parameter. + // This lets rustdoc render it correctly in documentation. + hir::PatKind::Binding(_, _, ident, _) => (ident, false), + hir::PatKind::Wild => { + (Ident::with_dummy_span(rustc_span::symbol::kw::Underscore), false) + } + _ => { + // Replace the ident for bindings that aren't simple. + let name = format!("__arg{}", index); + let ident = Ident::from_str(&name); + + (ident, false) + } + }; + + let desugared_span = this.mark_span_with_reason(DesugaringKind::Async, span, None); + + // Construct a parameter representing `__argN: ` to replace the parameter of the + // async function. + // + // If this is the simple case, this parameter will end up being the same as the + // original parameter, but with a different pattern id. + let stmt_attrs = this.attrs.get(¶meter.hir_id.local_id).copied(); + let (new_parameter_pat, new_parameter_id) = this.pat_ident(desugared_span, ident); + let new_parameter = hir::Param { + hir_id: parameter.hir_id, + pat: new_parameter_pat, + ty_span: this.lower_span(parameter.ty_span), + span: this.lower_span(parameter.span), + }; + + if is_simple_parameter { + // If this is the simple case, then we only insert one statement that is + // `let = ;`. We re-use the original argument's pattern so that + // `HirId`s are densely assigned. + let expr = this.expr_ident(desugared_span, ident, new_parameter_id); + let stmt = this.stmt_let_pat( + stmt_attrs, + desugared_span, + Some(expr), + parameter.pat, + hir::LocalSource::AsyncFn, + ); + statements.push(stmt); + } else { + // If this is not the simple case, then we construct two statements: + // + // ``` + // let __argN = __argN; + // let = __argN; + // ``` + // + // The first statement moves the parameter into the closure and thus ensures + // that the drop order is correct. + // + // The second statement creates the bindings that the user wrote. + + // Construct the `let mut __argN = __argN;` statement. It must be a mut binding + // because the user may have specified a `ref mut` binding in the next + // statement. + let (move_pat, move_id) = this.pat_ident_binding_mode( + desugared_span, + ident, + hir::BindingAnnotation::Mutable, + ); + let move_expr = this.expr_ident(desugared_span, ident, new_parameter_id); + let move_stmt = this.stmt_let_pat( + None, + desugared_span, + Some(move_expr), + move_pat, + hir::LocalSource::AsyncFn, + ); + + // Construct the `let = __argN;` statement. We re-use the original + // parameter's pattern so that `HirId`s are densely assigned. + let pattern_expr = this.expr_ident(desugared_span, ident, move_id); + let pattern_stmt = this.stmt_let_pat( + stmt_attrs, + desugared_span, + Some(pattern_expr), + parameter.pat, + hir::LocalSource::AsyncFn, + ); + + statements.push(move_stmt); + statements.push(pattern_stmt); + }; + + parameters.push(new_parameter); + } + + let body_span = body.map_or(span, |b| b.span); + let async_expr = this.make_async_expr( + CaptureBy::Value, + closure_id, + None, + body_span, + hir::AsyncGeneratorKind::Fn, + |this| { + // Create a block from the user's function body: + let user_body = this.lower_block_expr_opt(body_span, body); + + // Transform into `drop-temps { }`, an expression: + let desugared_span = + this.mark_span_with_reason(DesugaringKind::Async, user_body.span, None); + let user_body = this.expr_drop_temps( + desugared_span, + this.arena.alloc(user_body), + AttrVec::new(), + ); + + // As noted above, create the final block like + // + // ``` + // { + // let $param_pattern = $raw_param; + // ... + // drop-temps { } + // } + // ``` + let body = this.block_all( + desugared_span, + this.arena.alloc_from_iter(statements), + Some(user_body), + ); + + this.expr_block(body, AttrVec::new()) + }, + ); + + ( + this.arena.alloc_from_iter(parameters), + this.expr(body_span, async_expr, AttrVec::new()), + ) + }) + } + + fn lower_method_sig( + &mut self, + generics: &Generics, + sig: &FnSig, + id: NodeId, + kind: FnDeclKind, + is_async: Option, + ) -> (&'hir hir::Generics<'hir>, hir::FnSig<'hir>) { + let header = self.lower_fn_header(sig.header); + let itctx = ImplTraitContext::Universal; + let (generics, decl) = self.lower_generics(generics, id, itctx, |this| { + this.lower_fn_decl(&sig.decl, Some(id), kind, is_async) + }); + (generics, hir::FnSig { header, decl, span: self.lower_span(sig.span) }) + } + + fn lower_fn_header(&mut self, h: FnHeader) -> hir::FnHeader { + hir::FnHeader { + unsafety: self.lower_unsafety(h.unsafety), + asyncness: self.lower_asyncness(h.asyncness), + constness: self.lower_constness(h.constness), + abi: self.lower_extern(h.ext), + } + } + + pub(super) fn lower_abi(&mut self, abi: StrLit) -> abi::Abi { + abi::lookup(abi.symbol_unescaped.as_str()).unwrap_or_else(|| { + self.error_on_invalid_abi(abi); + abi::Abi::Rust + }) + } + + pub(super) fn lower_extern(&mut self, ext: Extern) -> abi::Abi { + match ext { + Extern::None => abi::Abi::Rust, + Extern::Implicit(_) => abi::Abi::FALLBACK, + Extern::Explicit(abi, _) => self.lower_abi(abi), + } + } + + fn error_on_invalid_abi(&self, abi: StrLit) { + struct_span_err!(self.tcx.sess, abi.span, E0703, "invalid ABI: found `{}`", abi.symbol) + .span_label(abi.span, "invalid ABI") + .help(&format!("valid ABIs: {}", abi::all_names().join(", "))) + .emit(); + } + + fn lower_asyncness(&mut self, a: Async) -> hir::IsAsync { + match a { + Async::Yes { .. } => hir::IsAsync::Async, + Async::No => hir::IsAsync::NotAsync, + } + } + + fn lower_constness(&mut self, c: Const) -> hir::Constness { + match c { + Const::Yes(_) => hir::Constness::Const, + Const::No => hir::Constness::NotConst, + } + } + + pub(super) fn lower_unsafety(&mut self, u: Unsafe) -> hir::Unsafety { + match u { + Unsafe::Yes(_) => hir::Unsafety::Unsafe, + Unsafe::No => hir::Unsafety::Normal, + } + } + + /// Return the pair of the lowered `generics` as `hir::Generics` and the evaluation of `f` with + /// the carried impl trait definitions and bounds. + #[instrument(level = "debug", skip(self, f))] + fn lower_generics( + &mut self, + generics: &Generics, + parent_node_id: NodeId, + itctx: ImplTraitContext, + f: impl FnOnce(&mut Self) -> T, + ) -> (&'hir hir::Generics<'hir>, T) { + debug_assert!(self.impl_trait_defs.is_empty()); + debug_assert!(self.impl_trait_bounds.is_empty()); + + // Error if `?Trait` bounds in where clauses don't refer directly to type parameters. + // Note: we used to clone these bounds directly onto the type parameter (and avoid lowering + // these into hir when we lower thee where clauses), but this makes it quite difficult to + // keep track of the Span info. Now, `add_implicitly_sized` in `AstConv` checks both param bounds and + // where clauses for `?Sized`. + for pred in &generics.where_clause.predicates { + let WherePredicate::BoundPredicate(ref bound_pred) = *pred else { + continue; + }; + let compute_is_param = || { + // Check if the where clause type is a plain type parameter. + match self + .resolver + .get_partial_res(bound_pred.bounded_ty.id) + .map(|d| (d.base_res(), d.unresolved_segments())) + { + Some((Res::Def(DefKind::TyParam, def_id), 0)) + if bound_pred.bound_generic_params.is_empty() => + { + generics + .params + .iter() + .any(|p| def_id == self.local_def_id(p.id).to_def_id()) + } + // Either the `bounded_ty` is not a plain type parameter, or + // it's not found in the generic type parameters list. + _ => false, + } + }; + // We only need to compute this once per `WherePredicate`, but don't + // need to compute this at all unless there is a Maybe bound. + let mut is_param: Option = None; + for bound in &bound_pred.bounds { + if !matches!(*bound, GenericBound::Trait(_, TraitBoundModifier::Maybe)) { + continue; + } + let is_param = *is_param.get_or_insert_with(compute_is_param); + if !is_param { + self.diagnostic().span_err( + bound.span(), + "`?Trait` bounds are only permitted at the \ + point where a type parameter is declared", + ); + } + } + } + + let mut predicates: SmallVec<[hir::WherePredicate<'hir>; 4]> = SmallVec::new(); + predicates.extend(generics.params.iter().filter_map(|param| { + self.lower_generic_bound_predicate( + param.ident, + param.id, + ¶m.kind, + ¶m.bounds, + itctx, + PredicateOrigin::GenericParam, + ) + })); + predicates.extend( + generics + .where_clause + .predicates + .iter() + .map(|predicate| self.lower_where_predicate(predicate)), + ); + + let mut params: SmallVec<[hir::GenericParam<'hir>; 4]> = + self.lower_generic_params_mut(&generics.params).collect(); + + // Introduce extra lifetimes if late resolution tells us to. + let extra_lifetimes = self.resolver.take_extra_lifetime_params(parent_node_id); + params.extend(extra_lifetimes.into_iter().filter_map(|(ident, node_id, res)| { + self.lifetime_res_to_generic_param(ident, node_id, res) + })); + + let has_where_clause_predicates = !generics.where_clause.predicates.is_empty(); + let where_clause_span = self.lower_span(generics.where_clause.span); + let span = self.lower_span(generics.span); + let res = f(self); + + let impl_trait_defs = std::mem::take(&mut self.impl_trait_defs); + params.extend(impl_trait_defs.into_iter()); + + let impl_trait_bounds = std::mem::take(&mut self.impl_trait_bounds); + predicates.extend(impl_trait_bounds.into_iter()); + + let lowered_generics = self.arena.alloc(hir::Generics { + params: self.arena.alloc_from_iter(params), + predicates: self.arena.alloc_from_iter(predicates), + has_where_clause_predicates, + where_clause_span, + span, + }); + + (lowered_generics, res) + } + + pub(super) fn lower_generic_bound_predicate( + &mut self, + ident: Ident, + id: NodeId, + kind: &GenericParamKind, + bounds: &[GenericBound], + itctx: ImplTraitContext, + origin: PredicateOrigin, + ) -> Option> { + // Do not create a clause if we do not have anything inside it. + if bounds.is_empty() { + return None; + } + + let bounds = self.lower_param_bounds(bounds, itctx); + + let ident = self.lower_ident(ident); + let param_span = ident.span; + let span = bounds + .iter() + .fold(Some(param_span.shrink_to_hi()), |span: Option, bound| { + let bound_span = bound.span(); + // We include bounds that come from a `#[derive(_)]` but point at the user's code, + // as we use this method to get a span appropriate for suggestions. + if !bound_span.can_be_used_for_suggestions() { + None + } else if let Some(span) = span { + Some(span.to(bound_span)) + } else { + Some(bound_span) + } + }) + .unwrap_or(param_span.shrink_to_hi()); + match kind { + GenericParamKind::Const { .. } => None, + GenericParamKind::Type { .. } => { + let def_id = self.local_def_id(id).to_def_id(); + let ty_path = self.arena.alloc(hir::Path { + span: param_span, + res: Res::Def(DefKind::TyParam, def_id), + segments: self.arena.alloc_from_iter([hir::PathSegment::from_ident(ident)]), + }); + let ty_id = self.next_id(); + let bounded_ty = + self.ty_path(ty_id, param_span, hir::QPath::Resolved(None, ty_path)); + Some(hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { + bounded_ty: self.arena.alloc(bounded_ty), + bounds, + span, + bound_generic_params: &[], + origin, + })) + } + GenericParamKind::Lifetime => { + let ident_span = self.lower_span(ident.span); + let ident = self.lower_ident(ident); + let lt_id = self.next_node_id(); + let lifetime = self.new_named_lifetime(id, lt_id, ident_span, ident); + Some(hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { + lifetime, + span, + bounds, + in_where_clause: false, + })) + } + } + } + + fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate<'hir> { + match *pred { + WherePredicate::BoundPredicate(WhereBoundPredicate { + ref bound_generic_params, + ref bounded_ty, + ref bounds, + span, + }) => hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { + bound_generic_params: self.lower_generic_params(bound_generic_params), + bounded_ty: self + .lower_ty(bounded_ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)), + bounds: self.arena.alloc_from_iter(bounds.iter().map(|bound| { + self.lower_param_bound( + bound, + ImplTraitContext::Disallowed(ImplTraitPosition::Bound), + ) + })), + span: self.lower_span(span), + origin: PredicateOrigin::WhereClause, + }), + WherePredicate::RegionPredicate(WhereRegionPredicate { + ref lifetime, + ref bounds, + span, + }) => hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { + span: self.lower_span(span), + lifetime: self.lower_lifetime(lifetime), + bounds: self.lower_param_bounds( + bounds, + ImplTraitContext::Disallowed(ImplTraitPosition::Bound), + ), + in_where_clause: true, + }), + WherePredicate::EqPredicate(WhereEqPredicate { id, ref lhs_ty, ref rhs_ty, span }) => { + hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { + hir_id: self.lower_node_id(id), + lhs_ty: self + .lower_ty(lhs_ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)), + rhs_ty: self + .lower_ty(rhs_ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)), + span: self.lower_span(span), + }) + } + } + } +} -- cgit v1.2.3