summaryrefslogtreecommitdiffstats
path: root/src/librustdoc/clean
diff options
context:
space:
mode:
Diffstat (limited to 'src/librustdoc/clean')
-rw-r--r--src/librustdoc/clean/auto_trait.rs6
-rw-r--r--src/librustdoc/clean/blanket_impl.rs2
-rw-r--r--src/librustdoc/clean/cfg.rs8
-rw-r--r--src/librustdoc/clean/cfg/tests.rs5
-rw-r--r--src/librustdoc/clean/inline.rs64
-rw-r--r--src/librustdoc/clean/mod.rs476
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs3
-rw-r--r--src/librustdoc/clean/types.rs248
-rw-r--r--src/librustdoc/clean/types/tests.rs1
-rw-r--r--src/librustdoc/clean/utils.rs23
10 files changed, 389 insertions, 447 deletions
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
index a302750aa..9479b3ee0 100644
--- a/src/librustdoc/clean/auto_trait.rs
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -137,7 +137,7 @@ where
pub(crate) fn get_auto_trait_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
let tcx = self.cx.tcx;
let param_env = tcx.param_env(item_def_id);
- let ty = tcx.type_of(item_def_id);
+ let ty = tcx.type_of(item_def_id).subst_identity();
let f = auto_trait::AutoTraitFinder::new(tcx);
debug!("get_auto_trait_impls({:?})", ty);
@@ -734,8 +734,8 @@ struct RegionReplacer<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for RegionReplacer<'a, 'tcx> {
+ fn interner(&self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
index e6b2b2349..bcdbbcacc 100644
--- a/src/librustdoc/clean/blanket_impl.rs
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -15,7 +15,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
pub(crate) fn get_blanket_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
let cx = &mut self.cx;
let param_env = cx.tcx.param_env(item_def_id);
- let ty = cx.tcx.bound_type_of(item_def_id);
+ let ty = cx.tcx.type_of(item_def_id);
trace!("get_blanket_impls({:?})", ty);
let mut impls = Vec::new();
diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs
index f1853f369..dd58a5b51 100644
--- a/src/librustdoc/clean/cfg.rs
+++ b/src/librustdoc/clean/cfg.rs
@@ -164,10 +164,10 @@ impl Cfg {
/// Renders the configuration for human display, as a short HTML description.
pub(crate) fn render_short_html(&self) -> String {
let mut msg = Display(self, Format::ShortHtml).to_string();
- if self.should_capitalize_first_letter() {
- if let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) {
- msg[i..i + 1].make_ascii_uppercase();
- }
+ if self.should_capitalize_first_letter() &&
+ let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric())
+ {
+ msg[i..i + 1].make_ascii_uppercase();
}
msg
}
diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs
index 81f676724..bb62660e1 100644
--- a/src/librustdoc/clean/cfg/tests.rs
+++ b/src/librustdoc/clean/cfg/tests.rs
@@ -4,6 +4,7 @@ use rustc_ast::{LitKind, MetaItemLit, Path, StrStyle};
use rustc_span::create_default_session_globals_then;
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::DUMMY_SP;
+use thin_vec::thin_vec;
fn word_cfg(s: &str) -> Cfg {
Cfg::Cfg(Symbol::intern(s), None)
@@ -34,7 +35,7 @@ macro_rules! dummy_meta_item_list {
($name:ident, [$($list:ident),* $(,)?]) => {
MetaItem {
path: Path::from_ident(Ident::from_str(stringify!($name))),
- kind: MetaItemKind::List(vec![
+ kind: MetaItemKind::List(thin_vec![
$(
NestedMetaItem::MetaItem(
dummy_meta_item_word(stringify!($list)),
@@ -48,7 +49,7 @@ macro_rules! dummy_meta_item_list {
($name:ident, [$($list:expr),* $(,)?]) => {
MetaItem {
path: Path::from_ident(Ident::from_str(stringify!($name))),
- kind: MetaItemKind::List(vec![
+ kind: MetaItemKind::List(thin_vec![
$(
NestedMetaItem::MetaItem($list),
)*
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index b3b093312..148243683 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -9,7 +9,7 @@ use rustc_ast as ast;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::def_id::{DefId, DefIdSet, LocalDefId};
use rustc_hir::Mutability;
use rustc_metadata::creader::{CStore, LoadedMacro};
use rustc_middle::ty::{self, TyCtxt};
@@ -45,7 +45,7 @@ pub(crate) fn try_inline(
res: Res,
name: Symbol,
attrs: Option<&[ast::Attribute]>,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
) -> Option<Vec<clean::Item>> {
let did = res.opt_def_id()?;
if did.is_local() {
@@ -163,7 +163,7 @@ pub(crate) fn try_inline_glob(
cx: &mut DocContext<'_>,
res: Res,
current_mod: LocalDefId,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
) -> Option<Vec<clean::Item>> {
let did = res.opt_def_id()?;
@@ -251,7 +251,7 @@ pub(crate) fn build_external_trait(cx: &mut DocContext<'_>, did: DefId) -> clean
}
fn build_external_function<'tcx>(cx: &mut DocContext<'tcx>, did: DefId) -> Box<clean::Function> {
- let sig = cx.tcx.fn_sig(did);
+ let sig = cx.tcx.fn_sig(did).subst_identity();
let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var {
ty::BoundVariableKind::Region(ty::BrNamed(_, name)) if name != kw::UnderscoreLifetime => {
@@ -303,7 +303,8 @@ fn build_union(cx: &mut DocContext<'_>, did: DefId) -> clean::Union {
fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box<clean::Typedef> {
let predicates = cx.tcx.explicit_predicates_of(did);
- let type_ = clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did));
+ let type_ =
+ clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()), cx, Some(did));
Box::new(clean::Typedef {
type_,
@@ -390,18 +391,17 @@ pub(crate) fn build_impl(
// Only inline impl if the implemented trait is
// reachable in rustdoc generated documentation
- if !did.is_local() {
- if let Some(traitref) = associated_trait {
- let did = traitref.def_id;
- if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
- return;
- }
+ if !did.is_local() && let Some(traitref) = associated_trait {
+ let did = traitref.def_id;
+ if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
+ return;
+ }
- if let Some(stab) = tcx.lookup_stability(did) {
- if stab.is_unstable() && stab.feature == sym::rustc_private {
- return;
- }
- }
+ if let Some(stab) = tcx.lookup_stability(did) &&
+ stab.is_unstable() &&
+ stab.feature == sym::rustc_private
+ {
+ return;
}
}
@@ -415,7 +415,9 @@ pub(crate) fn build_impl(
let for_ = match &impl_item {
Some(impl_) => clean_ty(impl_.self_ty, cx),
- None => clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did)),
+ None => {
+ clean_middle_ty(ty::Binder::dummy(tcx.type_of(did).subst_identity()), cx, Some(did))
+ }
};
// Only inline impl if the implementing type is
@@ -525,10 +527,8 @@ pub(crate) fn build_impl(
}
while let Some(ty) = stack.pop() {
- if let Some(did) = ty.def_id(&cx.cache) {
- if tcx.is_doc_hidden(did) {
- return;
- }
+ if let Some(did) = ty.def_id(&cx.cache) && tcx.is_doc_hidden(did) {
+ return;
}
if let Some(generics) = ty.generics() {
stack.extend(generics);
@@ -568,11 +568,7 @@ pub(crate) fn build_impl(
));
}
-fn build_module(
- cx: &mut DocContext<'_>,
- did: DefId,
- visited: &mut FxHashSet<DefId>,
-) -> clean::Module {
+fn build_module(cx: &mut DocContext<'_>, did: DefId, visited: &mut DefIdSet) -> clean::Module {
let items = build_module_items(cx, did, visited, &mut FxHashSet::default(), None);
let span = clean::Span::new(cx.tcx.def_span(did));
@@ -582,9 +578,9 @@ fn build_module(
fn build_module_items(
cx: &mut DocContext<'_>,
did: DefId,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
- allowed_def_ids: Option<&FxHashSet<DefId>>,
+ allowed_def_ids: Option<&DefIdSet>,
) -> Vec<clean::Item> {
let mut items = Vec::new();
@@ -659,14 +655,22 @@ pub(crate) fn print_inlined_const(tcx: TyCtxt<'_>, did: DefId) -> String {
fn build_const(cx: &mut DocContext<'_>, def_id: DefId) -> clean::Constant {
clean::Constant {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()),
+ cx,
+ Some(def_id),
+ ),
kind: clean::ConstantKind::Extern { def_id },
}
}
fn build_static(cx: &mut DocContext<'_>, did: DefId, mutable: bool) -> clean::Static {
clean::Static {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()),
+ cx,
+ Some(did),
+ ),
mutability: if mutable { Mutability::Mut } else { Mutability::Not },
expr: None,
}
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 0f0e16265..3edc2cd2e 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -11,18 +11,20 @@ pub(crate) mod types;
pub(crate) mod utils;
use rustc_ast as ast;
+use rustc_ast::token::{Token, TokenKind};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_attr as attr;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId, LOCAL_CRATE};
use rustc_hir::PredicateOrigin;
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData};
-use rustc_middle::middle::resolve_lifetime as rl;
+use rustc_middle::middle::resolve_bound_vars as rbv;
use rustc_middle::ty::fold::TypeFolder;
use rustc_middle::ty::InternalSubsts;
-use rustc_middle::ty::TypeVisitable;
+use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{self, AdtKind, DefIdTree, EarlyBinder, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{AstPass, MacroKind};
@@ -37,6 +39,7 @@ use std::hash::Hash;
use std::mem;
use thin_vec::ThinVec;
+use crate::clean::inline::merge_attrs;
use crate::core::{self, DocContext, ImplTraitParam};
use crate::formats::item_type::ItemType;
use crate::visit_ast::Module as DocModule;
@@ -75,7 +78,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
// This covers the case where somebody does an import which should pull in an item,
// but there's already an item with the same namespace and same name. Rust gives
// priority to the not-imported one, so we should, too.
- items.extend(doc.items.iter().flat_map(|(item, renamed, import_id)| {
+ items.extend(doc.items.values().flat_map(|(item, renamed, import_id)| {
// First, lower everything other than imports.
if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
return Vec::new();
@@ -88,7 +91,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
}
v
}));
- items.extend(doc.items.iter().flat_map(|(item, renamed, _)| {
+ items.extend(doc.items.values().flat_map(|(item, renamed, _)| {
// Now we actually lower the imports, skipping everything else.
if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind {
let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id()));
@@ -116,7 +119,8 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
}
});
- Item::from_hir_id_and_parts(doc.id, Some(doc.name), ModuleItem(Module { items, span }), cx)
+ let kind = ModuleItem(Module { items, span });
+ Item::from_def_id_and_parts(doc.def_id.to_def_id(), Some(doc.name), kind, cx)
}
fn clean_generic_bound<'tcx>(
@@ -197,11 +201,11 @@ fn clean_poly_trait_ref_with_bindings<'tcx>(
}
fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime {
- let def = cx.tcx.named_region(lifetime.hir_id);
+ let def = cx.tcx.named_bound_var(lifetime.hir_id);
if let Some(
- rl::Region::EarlyBound(node_id)
- | rl::Region::LateBound(_, _, node_id)
- | rl::Region::Free(_, node_id),
+ rbv::ResolvedArg::EarlyBound(node_id)
+ | rbv::ResolvedArg::LateBound(_, _, node_id)
+ | rbv::ResolvedArg::Free(_, node_id),
) = def
{
if let Some(lt) = cx.substs.get(&node_id).and_then(|p| p.as_lt()).cloned() {
@@ -214,7 +218,11 @@ fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) ->
pub(crate) fn clean_const<'tcx>(constant: &hir::ConstArg, cx: &mut DocContext<'tcx>) -> Constant {
let def_id = cx.tcx.hir().body_owner_def_id(constant.value.body).to_def_id();
Constant {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()),
+ cx,
+ Some(def_id),
+ ),
kind: ConstantKind::Anonymous { body: constant.value.body },
}
}
@@ -241,6 +249,7 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Life
ty::ReLateBound(..)
| ty::ReFree(..)
| ty::ReVar(..)
+ | ty::ReError(_)
| ty::RePlaceholder(..)
| ty::ReErased => {
debug!("cannot clean region {:?}", region);
@@ -309,10 +318,13 @@ pub(crate) fn clean_predicate<'tcx>(
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
Some(clean_projection_predicate(bound_predicate.rebind(pred), cx))
}
+ // FIXME(generic_const_exprs): should this do something?
ty::PredicateKind::ConstEvaluatable(..) => None,
ty::PredicateKind::WellFormed(..) => None,
+ ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) => None,
ty::PredicateKind::Subtype(..)
+ | ty::PredicateKind::AliasEq(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
@@ -382,13 +394,10 @@ fn clean_middle_term<'tcx>(
fn clean_hir_term<'tcx>(term: &hir::Term<'tcx>, cx: &mut DocContext<'tcx>) -> Term {
match term {
hir::Term::Ty(ty) => Term::Type(clean_ty(ty, cx)),
- hir::Term::Const(c) => {
- let def_id = cx.tcx.hir().local_def_id(c.hir_id);
- Term::Constant(clean_middle_const(
- ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, def_id)),
- cx,
- ))
- }
+ hir::Term::Const(c) => Term::Constant(clean_middle_const(
+ ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, c.def_id)),
+ cx,
+ )),
}
}
@@ -479,7 +488,7 @@ fn clean_generic_param_def<'tcx>(
ty::GenericParamDefKind::Type { has_default, synthetic, .. } => {
let default = if has_default {
Some(clean_middle_ty(
- ty::Binder::dummy(cx.tcx.type_of(def.def_id)),
+ ty::Binder::dummy(cx.tcx.type_of(def.def_id).subst_identity()),
cx,
Some(def.def_id),
))
@@ -501,7 +510,12 @@ fn clean_generic_param_def<'tcx>(
GenericParamDefKind::Const {
did: def.def_id,
ty: Box::new(clean_middle_ty(
- ty::Binder::dummy(cx.tcx.type_of(def.def_id)),
+ ty::Binder::dummy(
+ cx.tcx
+ .type_of(def.def_id)
+ .no_bound_vars()
+ .expect("const parameter types cannot be generic"),
+ ),
cx,
Some(def.def_id),
)),
@@ -523,12 +537,11 @@ fn clean_generic_param<'tcx>(
generics: Option<&hir::Generics<'tcx>>,
param: &hir::GenericParam<'tcx>,
) -> GenericParamDef {
- let did = cx.tcx.hir().local_def_id(param.hir_id);
let (name, kind) = match param.kind {
hir::GenericParamKind::Lifetime { .. } => {
let outlives = if let Some(generics) = generics {
generics
- .outlives_for_param(did)
+ .outlives_for_param(param.def_id)
.filter(|bp| !bp.in_where_clause)
.flat_map(|bp| bp.bounds)
.map(|bound| match bound {
@@ -544,7 +557,7 @@ fn clean_generic_param<'tcx>(
hir::GenericParamKind::Type { ref default, synthetic } => {
let bounds = if let Some(generics) = generics {
generics
- .bounds_for_param(did)
+ .bounds_for_param(param.def_id)
.filter(|bp| bp.origin != PredicateOrigin::WhereClause)
.flat_map(|bp| bp.bounds)
.filter_map(|x| clean_generic_bound(x, cx))
@@ -555,7 +568,7 @@ fn clean_generic_param<'tcx>(
(
param.name.ident().name,
GenericParamDefKind::Type {
- did: did.to_def_id(),
+ did: param.def_id.to_def_id(),
bounds,
default: default.map(|t| clean_ty(t, cx)).map(Box::new),
synthetic,
@@ -565,12 +578,10 @@ fn clean_generic_param<'tcx>(
hir::GenericParamKind::Const { ty, default } => (
param.name.ident().name,
GenericParamDefKind::Const {
- did: did.to_def_id(),
+ did: param.def_id.to_def_id(),
ty: Box::new(clean_ty(ty, cx)),
- default: default.map(|ct| {
- let def_id = cx.tcx.hir().local_def_id(ct.hir_id);
- Box::new(ty::Const::from_anon_const(cx.tcx, def_id).to_string())
- }),
+ default: default
+ .map(|ct| Box::new(ty::Const::from_anon_const(cx.tcx, ct.def_id).to_string())),
},
),
};
@@ -789,43 +800,43 @@ fn clean_ty_generics<'tcx>(
None
})();
- if let Some(param_idx) = param_idx {
- if let Some(b) = impl_trait.get_mut(&param_idx.into()) {
- let p: WherePredicate = clean_predicate(*p, cx)?;
+ if let Some(param_idx) = param_idx
+ && let Some(b) = impl_trait.get_mut(&param_idx.into())
+ {
+ let p: WherePredicate = clean_predicate(*p, cx)?;
+
+ b.extend(
+ p.get_bounds()
+ .into_iter()
+ .flatten()
+ .cloned()
+ .filter(|b| !b.is_sized_bound(cx)),
+ );
- b.extend(
- p.get_bounds()
+ let proj = projection.map(|p| {
+ (
+ clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
+ p.map_bound(|p| p.term),
+ )
+ });
+ if let Some(((_, trait_did, name), rhs)) = proj
+ .as_ref()
+ .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
+ {
+ // FIXME(...): Remove this unwrap()
+ impl_trait_proj.entry(param_idx).or_default().push((
+ trait_did,
+ name,
+ rhs.map_bound(|rhs| rhs.ty().unwrap()),
+ p.get_bound_params()
.into_iter()
.flatten()
- .cloned()
- .filter(|b| !b.is_sized_bound(cx)),
- );
-
- let proj = projection.map(|p| {
- (
- clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
- p.map_bound(|p| p.term),
- )
- });
- if let Some(((_, trait_did, name), rhs)) = proj
- .as_ref()
- .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
- {
- // FIXME(...): Remove this unwrap()
- impl_trait_proj.entry(param_idx).or_default().push((
- trait_did,
- name,
- rhs.map_bound(|rhs| rhs.ty().unwrap()),
- p.get_bound_params()
- .into_iter()
- .flatten()
- .map(|param| GenericParamDef::lifetime(param.0))
- .collect(),
- ));
- }
-
- return None;
+ .map(|param| GenericParamDef::lifetime(param.0))
+ .collect(),
+ ));
}
+
+ return None;
}
Some(p)
@@ -888,7 +899,7 @@ fn clean_ty_generics<'tcx>(
// `?Sized` bound for each one we didn't find to be `Sized`.
for tp in &stripped_params {
if let types::GenericParamDefKind::Type { .. } = tp.kind
- && !sized_params.contains(&tp.name)
+ && !sized_params.contains(&tp.name)
{
where_predicates.push(WherePredicate::BoundPredicate {
ty: Type::Generic(tp.name),
@@ -1214,7 +1225,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
let kind = match assoc_item.kind {
ty::AssocKind::Const => {
let ty = clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
);
@@ -1230,7 +1241,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
}
}
ty::AssocKind::Fn => {
- let sig = tcx.fn_sig(assoc_item.def_id);
+ let sig = tcx.fn_sig(assoc_item.def_id).subst_identity();
let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var {
ty::BoundVariableKind::Region(ty::BrNamed(_, name))
@@ -1253,7 +1264,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
if assoc_item.fn_has_self_parameter {
let self_ty = match assoc_item.container {
- ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)),
+ ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)).subst_identity(),
ty::TraitContainer => tcx.types.self_param,
};
let self_arg_ty = sig.input(0).skip_binder();
@@ -1400,7 +1411,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
AssocTypeItem(
Box::new(Typedef {
type_: clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
),
@@ -1418,7 +1429,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
AssocTypeItem(
Box::new(Typedef {
type_: clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
),
@@ -1463,10 +1474,10 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
// Try to normalize `<X as Y>::T` to a type
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
// `hir_to_ty` can return projection types with escaping vars for GATs, e.g. `<() as Trait>::Gat<'_>`
- if !ty.has_escaping_bound_vars() {
- if let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty)) {
- return clean_middle_ty(normalized_value, cx, None);
- }
+ if !ty.has_escaping_bound_vars()
+ && let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty))
+ {
+ return clean_middle_ty(normalized_value, cx, None);
}
let trait_segments = &p.segments[..p.segments.len() - 1];
@@ -1528,7 +1539,7 @@ fn maybe_expand_private_type_alias<'tcx>(
let hir::ItemKind::TyAlias(ty, generics) = alias else { return None };
let provided_params = &path.segments.last().expect("segments were empty");
- let mut substs = FxHashMap::default();
+ let mut substs = DefIdMap::default();
let generic_args = provided_params.args();
let mut indices: hir::GenericParamCount = Default::default();
@@ -1547,18 +1558,16 @@ fn maybe_expand_private_type_alias<'tcx>(
_ => None,
});
if let Some(lt) = lifetime {
- let lt_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let cleaned = if !lt.is_anonymous() {
clean_lifetime(lt, cx)
} else {
Lifetime::elided()
};
- substs.insert(lt_def_id.to_def_id(), SubstParam::Lifetime(cleaned));
+ substs.insert(param.def_id.to_def_id(), SubstParam::Lifetime(cleaned));
}
indices.lifetimes += 1;
}
hir::GenericParamKind::Type { ref default, .. } => {
- let ty_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let type_ = generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Type(ty) => {
@@ -1571,17 +1580,14 @@ fn maybe_expand_private_type_alias<'tcx>(
_ => None,
});
if let Some(ty) = type_ {
- substs.insert(ty_param_def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx)));
+ substs.insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx)));
} else if let Some(default) = *default {
- substs.insert(
- ty_param_def_id.to_def_id(),
- SubstParam::Type(clean_ty(default, cx)),
- );
+ substs
+ .insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(default, cx)));
}
indices.types += 1;
}
hir::GenericParamKind::Const { .. } => {
- let const_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let const_ = generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Const(ct) => {
@@ -1595,7 +1601,7 @@ fn maybe_expand_private_type_alias<'tcx>(
});
if let Some(ct) = const_ {
substs.insert(
- const_param_def_id.to_def_id(),
+ param.def_id.to_def_id(),
SubstParam::Constant(clean_const(ct, cx)),
);
}
@@ -1623,7 +1629,6 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
let length = match length {
hir::ArrayLen::Infer(_, _) => "_".to_string(),
hir::ArrayLen::Body(anon_const) => {
- let def_id = cx.tcx.hir().local_def_id(anon_const.hir_id);
// NOTE(min_const_generics): We can't use `const_eval_poly` for constants
// as we currently do not supply the parent generics to anonymous constants
// but do allow `ConstKind::Param`.
@@ -1631,8 +1636,8 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
// `const_eval_poly` tries to first substitute generic parameters which
// results in an ICE while manually constructing the constant and using `eval`
// does nothing for `ConstKind::Param`.
- let ct = ty::Const::from_anon_const(cx.tcx, def_id);
- let param_env = cx.tcx.param_env(def_id);
+ let ct = ty::Const::from_anon_const(cx.tcx, anon_const.def_id);
+ let param_env = cx.tcx.param_env(anon_const.def_id);
print_const(cx, ct.eval(cx.tcx, param_env))
}
};
@@ -1657,7 +1662,7 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
}
TyKind::BareFn(barefn) => BareFunction(Box::new(clean_bare_fn_ty(barefn, cx))),
// Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
- TyKind::Infer | TyKind::Err | TyKind::Typeof(..) => Infer,
+ TyKind::Infer | TyKind::Err(_) | TyKind::Typeof(..) => Infer,
}
}
@@ -1854,6 +1859,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
ty::Bound(..) => panic!("Bound"),
ty::Placeholder(..) => panic!("Placeholder"),
ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
+ ty::GeneratorWitnessMIR(..) => panic!("GeneratorWitnessMIR"),
ty::Infer(..) => panic!("Infer"),
ty::Error(_) => rustc_errors::FatalError.raise(),
}
@@ -1885,11 +1891,9 @@ fn clean_middle_opaque_bounds<'tcx>(
_ => return None,
};
- if let Some(sized) = cx.tcx.lang_items().sized_trait() {
- if trait_ref.def_id() == sized {
- has_sized = true;
- return None;
- }
+ if let Some(sized) = cx.tcx.lang_items().sized_trait() && trait_ref.def_id() == sized {
+ has_sized = true;
+ return None;
}
let bindings: ThinVec<_> = bounds
@@ -1928,15 +1932,18 @@ fn clean_middle_opaque_bounds<'tcx>(
}
pub(crate) fn clean_field<'tcx>(field: &hir::FieldDef<'tcx>, cx: &mut DocContext<'tcx>) -> Item {
- let def_id = cx.tcx.hir().local_def_id(field.hir_id).to_def_id();
- clean_field_with_def_id(def_id, field.ident.name, clean_ty(field.ty, cx), cx)
+ clean_field_with_def_id(field.def_id.to_def_id(), field.ident.name, clean_ty(field.ty, cx), cx)
}
pub(crate) fn clean_middle_field<'tcx>(field: &ty::FieldDef, cx: &mut DocContext<'tcx>) -> Item {
clean_field_with_def_id(
field.did,
field.name,
- clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(field.did)), cx, Some(field.did)),
+ clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(field.did).subst_identity()),
+ cx,
+ Some(field.did),
+ ),
cx,
)
}
@@ -1979,10 +1986,8 @@ fn clean_variant_data<'tcx>(
disr_expr: &Option<hir::AnonConst>,
cx: &mut DocContext<'tcx>,
) -> Variant {
- let discriminant = disr_expr.map(|disr| Discriminant {
- expr: Some(disr.body),
- value: cx.tcx.hir().local_def_id(disr.hir_id).to_def_id(),
- });
+ let discriminant = disr_expr
+ .map(|disr| Discriminant { expr: Some(disr.body), value: disr.def_id.to_def_id() });
let kind = match variant {
hir::VariantData::Struct(..) => VariantKind::Struct(VariantStruct {
@@ -2067,12 +2072,12 @@ struct OneLevelVisitor<'hir> {
map: rustc_middle::hir::map::Map<'hir>,
item: Option<&'hir hir::Item<'hir>>,
looking_for: Ident,
- target_hir_id: hir::HirId,
+ target_def_id: LocalDefId,
}
impl<'hir> OneLevelVisitor<'hir> {
- fn new(map: rustc_middle::hir::map::Map<'hir>, target_hir_id: hir::HirId) -> Self {
- Self { map, item: None, looking_for: Ident::empty(), target_hir_id }
+ fn new(map: rustc_middle::hir::map::Map<'hir>, target_def_id: LocalDefId) -> Self {
+ Self { map, item: None, looking_for: Ident::empty(), target_def_id }
}
fn reset(&mut self, looking_for: Ident) {
@@ -2091,8 +2096,8 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
fn visit_item(&mut self, item: &'hir hir::Item<'hir>) {
if self.item.is_none()
&& item.ident == self.looking_for
- && matches!(item.kind, hir::ItemKind::Use(_, _))
- || item.hir_id() == self.target_hir_id
+ && (matches!(item.kind, hir::ItemKind::Use(_, _))
+ || item.owner_id.def_id == self.target_def_id)
{
self.item = Some(item);
}
@@ -2106,41 +2111,168 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
fn get_all_import_attributes<'hir>(
mut item: &hir::Item<'hir>,
tcx: TyCtxt<'hir>,
- target_hir_id: hir::HirId,
+ target_def_id: LocalDefId,
attributes: &mut Vec<ast::Attribute>,
+ is_inline: bool,
) {
+ let mut first = true;
let hir_map = tcx.hir();
- let mut visitor = OneLevelVisitor::new(hir_map, target_hir_id);
+ let mut visitor = OneLevelVisitor::new(hir_map, target_def_id);
let mut visited = FxHashSet::default();
+
// If the item is an import and has at least a path with two parts, we go into it.
- while let hir::ItemKind::Use(path, _) = item.kind &&
- path.segments.len() > 1 &&
- let hir::def::Res::Def(_, def_id) = path.segments[path.segments.len() - 2].res &&
- visited.insert(def_id)
- {
- if let Some(hir::Node::Item(parent_item)) = hir_map.get_if_local(def_id) {
- // We add the attributes from this import into the list.
+ while let hir::ItemKind::Use(path, _) = item.kind && visited.insert(item.hir_id()) {
+ if first {
+ // This is the "original" reexport so we get all its attributes without filtering them.
attributes.extend_from_slice(hir_map.attrs(item.hir_id()));
- // We get the `Ident` we will be looking for into `item`.
- let looking_for = path.segments[path.segments.len() - 1].ident;
- visitor.reset(looking_for);
- hir::intravisit::walk_item(&mut visitor, parent_item);
- if let Some(i) = visitor.item {
- item = i;
- } else {
- break;
+ first = false;
+ } else {
+ add_without_unwanted_attributes(attributes, hir_map.attrs(item.hir_id()), is_inline);
+ }
+
+ let def_id = if let [.., parent_segment, _] = &path.segments {
+ match parent_segment.res {
+ hir::def::Res::Def(_, def_id) => def_id,
+ _ if parent_segment.ident.name == kw::Crate => {
+ // In case the "parent" is the crate, it'll give `Res::Err` so we need to
+ // circumvent it this way.
+ tcx.parent(item.owner_id.def_id.to_def_id())
+ }
+ _ => break,
}
} else {
+ // If the path doesn't have a parent, then the parent is the current module.
+ tcx.parent(item.owner_id.def_id.to_def_id())
+ };
+
+ let Some(parent) = hir_map.get_if_local(def_id) else { break };
+
+ // We get the `Ident` we will be looking for into `item`.
+ let looking_for = path.segments[path.segments.len() - 1].ident;
+ visitor.reset(looking_for);
+
+ match parent {
+ hir::Node::Item(parent_item) => {
+ hir::intravisit::walk_item(&mut visitor, parent_item);
+ }
+ hir::Node::Crate(m) => {
+ hir::intravisit::walk_mod(
+ &mut visitor,
+ m,
+ tcx.local_def_id_to_hir_id(def_id.as_local().unwrap()),
+ );
+ }
+ _ => break,
+ }
+ if let Some(i) = visitor.item {
+ item = i;
+ } else {
break;
}
}
}
+fn filter_tokens_from_list(
+ args_tokens: TokenStream,
+ should_retain: impl Fn(&TokenTree) -> bool,
+) -> Vec<TokenTree> {
+ let mut tokens = Vec::with_capacity(args_tokens.len());
+ let mut skip_next_comma = false;
+ for token in args_tokens.into_trees() {
+ match token {
+ TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => {
+ skip_next_comma = false;
+ }
+ token if should_retain(&token) => {
+ skip_next_comma = false;
+ tokens.push(token);
+ }
+ _ => {
+ skip_next_comma = true;
+ }
+ }
+ }
+ tokens
+}
+
+/// When inlining items, we merge its attributes (and all the reexports attributes too) with the
+/// final reexport. For example:
+///
+/// ```ignore (just an example)
+/// #[doc(hidden, cfg(feature = "foo"))]
+/// pub struct Foo;
+///
+/// #[doc(cfg(feature = "bar"))]
+/// #[doc(hidden, no_inline)]
+/// pub use Foo as Foo1;
+///
+/// #[doc(inline)]
+/// pub use Foo2 as Bar;
+/// ```
+///
+/// So `Bar` at the end will have both `cfg(feature = "...")`. However, we don't want to merge all
+/// attributes so we filter out the following ones:
+/// * `doc(inline)`
+/// * `doc(no_inline)`
+/// * `doc(hidden)`
+fn add_without_unwanted_attributes(
+ attrs: &mut Vec<ast::Attribute>,
+ new_attrs: &[ast::Attribute],
+ is_inline: bool,
+) {
+ // If it's `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything.
+ if !is_inline {
+ attrs.extend_from_slice(new_attrs);
+ return;
+ }
+ for attr in new_attrs {
+ let mut attr = attr.clone();
+ match attr.kind {
+ ast::AttrKind::Normal(ref mut normal) => {
+ if let [ident] = &*normal.item.path.segments &&
+ let ident = ident.ident.name &&
+ ident == sym::doc
+ {
+ match normal.item.args {
+ ast::AttrArgs::Delimited(ref mut args) => {
+ let tokens =
+ filter_tokens_from_list(args.tokens.clone(), |token| {
+ !matches!(
+ token,
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(
+ sym::hidden | sym::inline | sym::no_inline,
+ _,
+ ),
+ ..
+ },
+ _,
+ ),
+ )
+ });
+ args.tokens = TokenStream::new(tokens);
+ attrs.push(attr);
+ }
+ ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => {
+ attrs.push(attr);
+ continue;
+ }
+ }
+ }
+ }
+ ast::AttrKind::DocComment(..) => {
+ attrs.push(attr);
+ }
+ }
+ }
+}
+
fn clean_maybe_renamed_item<'tcx>(
cx: &mut DocContext<'tcx>,
item: &hir::Item<'tcx>,
renamed: Option<Symbol>,
- import_id: Option<hir::HirId>,
+ import_id: Option<LocalDefId>,
) -> Vec<Item> {
use hir::ItemKind;
@@ -2185,7 +2317,7 @@ fn clean_maybe_renamed_item<'tcx>(
generics: clean_generics(generics, cx),
fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
}),
- ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx),
+ ItemKind::Impl(impl_) => return clean_impl(impl_, item.owner_id.def_id, cx),
// proc macros can have a name set by attributes
ItemKind::Fn(ref sig, generics, body_id) => {
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
@@ -2218,40 +2350,38 @@ fn clean_maybe_renamed_item<'tcx>(
_ => unreachable!("not yet converted"),
};
- let mut extra_attrs = Vec::new();
- if let Some(hir::Node::Item(use_node)) =
- import_id.and_then(|hir_id| cx.tcx.hir().find(hir_id))
+ let mut import_attrs = Vec::new();
+ let mut target_attrs = Vec::new();
+ if let Some(import_id) = import_id &&
+ let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id)
{
- // We get all the various imports' attributes.
- get_all_import_attributes(use_node, cx.tcx, item.hir_id(), &mut extra_attrs);
- }
-
- if !extra_attrs.is_empty() {
- extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
- let attrs = Attributes::from_ast(&extra_attrs);
- let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
-
- vec![Item::from_def_id_and_attrs_and_parts(
- def_id,
- Some(name),
- kind,
- Box::new(attrs),
- cfg,
- )]
+ let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some();
+ // Then we get all the various imports' attributes.
+ get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut import_attrs, is_inline);
+ add_without_unwanted_attributes(&mut target_attrs, inline::load_attrs(cx, def_id), is_inline);
} else {
- vec![Item::from_def_id_and_parts(def_id, Some(name), kind, cx)]
+ // We only keep the item's attributes.
+ target_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
}
+
+ let import_parent = import_id.map(|import_id| cx.tcx.local_parent(import_id).to_def_id());
+ let (attrs, cfg) = merge_attrs(cx, import_parent, &target_attrs, Some(&import_attrs));
+
+ let mut item =
+ Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
+ item.inline_stmt_id = import_id.map(|def_id| def_id.to_def_id());
+ vec![item]
})
}
fn clean_variant<'tcx>(variant: &hir::Variant<'tcx>, cx: &mut DocContext<'tcx>) -> Item {
let kind = VariantItem(clean_variant_data(&variant.data, &variant.disr_expr, cx));
- Item::from_hir_id_and_parts(variant.hir_id, Some(variant.ident.name), kind, cx)
+ Item::from_def_id_and_parts(variant.def_id.to_def_id(), Some(variant.ident.name), kind, cx)
}
fn clean_impl<'tcx>(
impl_: &hir::Impl<'tcx>,
- hir_id: hir::HirId,
+ def_id: LocalDefId,
cx: &mut DocContext<'tcx>,
) -> Vec<Item> {
let tcx = cx.tcx;
@@ -2262,7 +2392,6 @@ fn clean_impl<'tcx>(
.iter()
.map(|ii| clean_impl_item(tcx.hir().impl_item(ii.id), cx))
.collect::<Vec<_>>();
- let def_id = tcx.hir().local_def_id(hir_id);
// If this impl block is an implementation of the Deref trait, then we
// need to try inlining the target's inherent impl blocks as well.
@@ -2272,9 +2401,11 @@ fn clean_impl<'tcx>(
let for_ = clean_ty(impl_.self_ty, cx);
let type_alias = for_.def_id(&cx.cache).and_then(|did| match tcx.def_kind(did) {
- DefKind::TyAlias => {
- Some(clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did)))
- }
+ DefKind::TyAlias => Some(clean_middle_ty(
+ ty::Binder::dummy(tcx.type_of(did).subst_identity()),
+ cx,
+ Some(did),
+ )),
_ => None,
});
let mut make_item = |trait_: Option<Path>, for_: Type, items: Vec<Item>| {
@@ -2291,7 +2422,7 @@ fn clean_impl<'tcx>(
ImplKind::Normal
},
}));
- Item::from_hir_id_and_parts(hir_id, None, kind, cx)
+ Item::from_def_id_and_parts(def_id.to_def_id(), None, kind, cx)
};
if let Some(type_alias) = type_alias {
ret.push(make_item(trait_.clone(), type_alias, items.clone()));
@@ -2323,7 +2454,7 @@ fn clean_extern_crate<'tcx>(
let krate_owner_def_id = krate.owner_id.to_def_id();
if please_inline {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
let res = Res::Def(DefKind::Mod, crate_def_id);
@@ -2405,17 +2536,15 @@ fn clean_use_statement_inner<'tcx>(
let is_visible_from_parent_mod =
visibility.is_accessible_from(parent_mod, cx.tcx) && !current_mod.is_top_level_module();
- if pub_underscore {
- if let Some(ref inline) = inline_attr {
- rustc_errors::struct_span_err!(
- cx.tcx.sess,
- inline.span(),
- E0780,
- "anonymous imports cannot be inlined"
- )
- .span_label(import.span, "anonymous import")
- .emit();
- }
+ if pub_underscore && let Some(ref inline) = inline_attr {
+ rustc_errors::struct_span_err!(
+ cx.tcx.sess,
+ inline.span(),
+ E0780,
+ "anonymous imports cannot be inlined"
+ )
+ .span_label(import.span, "anonymous import")
+ .emit();
}
// We consider inlining the documentation of `pub use` statements, but we
@@ -2442,7 +2571,7 @@ fn clean_use_statement_inner<'tcx>(
let path = clean_path(path, cx);
let inner = if kind == hir::UseKind::Glob {
if !denied {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
if let Some(items) =
inline::try_inline_glob(cx, path.res, current_mod, &mut visited, inlined_names)
{
@@ -2451,17 +2580,16 @@ fn clean_use_statement_inner<'tcx>(
}
Import::new_glob(resolve_use_source(cx, path), true)
} else {
- if inline_attr.is_none() {
- if let Res::Def(DefKind::Mod, did) = path.res {
- if !did.is_local() && did.is_crate_root() {
- // if we're `pub use`ing an extern crate root, don't inline it unless we
- // were specifically asked for it
- denied = true;
- }
- }
+ if inline_attr.is_none()
+ && let Res::Def(DefKind::Mod, did) = path.res
+ && !did.is_local() && did.is_crate_root()
+ {
+ // if we're `pub use`ing an extern crate root, don't inline it unless we
+ // were specifically asked for it
+ denied = true;
}
if !denied {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
let import_def_id = import.owner_id.to_def_id();
if let Some(mut items) = inline::try_inline(
@@ -2512,8 +2640,8 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
hir::ForeignItemKind::Type => ForeignTypeItem,
};
- Item::from_hir_id_and_parts(
- item.hir_id(),
+ Item::from_def_id_and_parts(
+ item.owner_id.def_id.to_def_id(),
Some(renamed.unwrap_or(item.ident.name)),
kind,
cx,
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index ed7683e36..ef38ca3c1 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -63,7 +63,8 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String
let snippet = source_map.span_to_snippet(span).ok()?;
// Create a Parser.
- let sess = ParseSess::new(FilePathMapping::empty());
+ let sess =
+ ParseSess::new(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), FilePathMapping::empty());
let file_name = source_map.span_to_filename(span);
let mut parser =
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 87de41fde..27d18aad7 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -5,13 +5,12 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::OnceLock as OnceCell;
-use std::{cmp, fmt, iter};
+use std::{fmt, iter};
use arrayvec::ArrayVec;
use thin_vec::ThinVec;
-use rustc_ast::util::comments::beautify_doc_string;
-use rustc_ast::{self as ast, AttrStyle};
+use rustc_ast as ast;
use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel};
use rustc_const_eval::const_eval::is_unstable_const_fn;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -24,6 +23,7 @@ use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety;
use rustc_index::vec::IndexVec;
use rustc_middle::ty::fast_reject::SimplifiedType;
use rustc_middle::ty::{self, DefIdTree, TyCtxt, Visibility};
+use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment};
use rustc_session::Session;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@@ -182,10 +182,8 @@ impl ExternalCrate {
return Local;
}
- if extern_url_takes_precedence {
- if let Some(url) = extern_url {
- return to_remote(url);
- }
+ if extern_url_takes_precedence && let Some(url) = extern_url {
+ return to_remote(url);
}
// Failing that, see if there's an attribute specifying where to find this
@@ -405,7 +403,7 @@ impl Item {
pub(crate) fn inner_docs(&self, tcx: TyCtxt<'_>) -> bool {
self.item_id
.as_def_id()
- .map(|did| tcx.get_attrs_unchecked(did).inner_docs())
+ .map(|did| inner_docs(tcx.get_attrs_unchecked(did)))
.unwrap_or(false)
}
@@ -439,17 +437,6 @@ impl Item {
self.attrs.doc_value()
}
- /// Convenience wrapper around [`Self::from_def_id_and_parts`] which converts
- /// `hir_id` to a [`DefId`]
- pub(crate) fn from_hir_id_and_parts(
- hir_id: hir::HirId,
- name: Option<Symbol>,
- kind: ItemKind,
- cx: &mut DocContext<'_>,
- ) -> Item {
- Item::from_def_id_and_parts(cx.tcx.hir().local_def_id(hir_id).to_def_id(), name, kind, cx)
- }
-
pub(crate) fn from_def_id_and_parts(
def_id: DefId,
name: Option<Symbol>,
@@ -493,16 +480,16 @@ impl Item {
}
pub(crate) fn links(&self, cx: &Context<'_>) -> Vec<RenderedLink> {
- use crate::html::format::href;
+ use crate::html::format::{href, link_tooltip};
cx.cache()
.intra_doc_links
.get(&self.item_id)
.map_or(&[][..], |v| v.as_slice())
.iter()
- .filter_map(|ItemLink { link: s, link_text, page_id: did, ref fragment }| {
- debug!(?did);
- if let Ok((mut href, ..)) = href(*did, cx) {
+ .filter_map(|ItemLink { link: s, link_text, page_id: id, ref fragment }| {
+ debug!(?id);
+ if let Ok((mut href, ..)) = href(*id, cx) {
debug!(?href);
if let Some(ref fragment) = *fragment {
fragment.render(&mut href, cx.tcx())
@@ -510,6 +497,7 @@ impl Item {
Some(RenderedLink {
original_text: s.clone(),
new_text: link_text.clone(),
+ tooltip: link_tooltip(*id, fragment, cx),
href,
})
} else {
@@ -534,6 +522,7 @@ impl Item {
original_text: s.clone(),
new_text: link_text.clone(),
href: String::new(),
+ tooltip: String::new(),
})
.collect()
}
@@ -665,7 +654,7 @@ impl Item {
tcx: TyCtxt<'_>,
asyncness: hir::IsAsync,
) -> hir::FnHeader {
- let sig = tcx.fn_sig(def_id);
+ let sig = tcx.fn_sig(def_id).skip_binder();
let constness =
if tcx.is_const_fn(def_id) && is_unstable_const_fn(tcx, def_id).is_none() {
hir::Constness::Const
@@ -677,7 +666,7 @@ impl Item {
let header = match *self.kind {
ItemKind::ForeignFunctionItem(_) => {
let def_id = self.item_id.as_def_id().unwrap();
- let abi = tcx.fn_sig(def_id).abi();
+ let abi = tcx.fn_sig(def_id).skip_binder().abi();
hir::FnHeader {
unsafety: if abi == Abi::RustIntrinsic {
intrinsic_operation_unsafety(tcx, self.item_id.as_def_id().unwrap())
@@ -885,8 +874,6 @@ pub(crate) trait AttributesExt {
fn span(&self) -> Option<rustc_span::Span>;
- fn inner_docs(&self) -> bool;
-
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>>;
}
@@ -905,14 +892,6 @@ impl AttributesExt for [ast::Attribute] {
self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span)
}
- /// Returns whether the first doc-comment is an inner attribute.
- ///
- //// If there are no doc-comments, return true.
- /// FIXME(#78591): Support both inner and outer attributes on the same item.
- fn inner_docs(&self) -> bool {
- self.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == AttrStyle::Inner)
- }
-
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>> {
let sess = tcx.sess;
let doc_cfg_active = tcx.features().doc_cfg;
@@ -1021,58 +1000,6 @@ impl<I: Iterator<Item = ast::NestedMetaItem>> NestedAttributesExt for I {
}
}
-/// A portion of documentation, extracted from a `#[doc]` attribute.
-///
-/// Each variant contains the line number within the complete doc-comment where the fragment
-/// starts, as well as the Span where the corresponding doc comment or attribute is located.
-///
-/// Included files are kept separate from inline doc comments so that proper line-number
-/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are
-/// kept separate because of issue #42760.
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub(crate) struct DocFragment {
- pub(crate) span: rustc_span::Span,
- /// The module this doc-comment came from.
- ///
- /// This allows distinguishing between the original documentation and a pub re-export.
- /// If it is `None`, the item was not re-exported.
- pub(crate) parent_module: Option<DefId>,
- pub(crate) doc: Symbol,
- pub(crate) kind: DocFragmentKind,
- pub(crate) indent: usize,
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Debug)]
-pub(crate) enum DocFragmentKind {
- /// A doc fragment created from a `///` or `//!` doc comment.
- SugaredDoc,
- /// A doc fragment created from a "raw" `#[doc=""]` attribute.
- RawDoc,
-}
-
-/// The goal of this function is to apply the `DocFragment` transformation that is required when
-/// transforming into the final Markdown, which is applying the computed indent to each line in
-/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`).
-///
-/// Note: remove the trailing newline where appropriate
-fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
- let s = frag.doc.as_str();
- let mut iter = s.lines();
- if s.is_empty() {
- out.push('\n');
- return;
- }
- while let Some(line) = iter.next() {
- if line.chars().any(|c| !c.is_whitespace()) {
- assert!(line.len() >= frag.indent);
- out.push_str(&line[frag.indent..]);
- } else {
- out.push_str(line);
- }
- out.push('\n');
- }
-}
-
/// Collapse a collection of [`DocFragment`]s into one string,
/// handling indentation and newlines as needed.
pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
@@ -1084,98 +1011,18 @@ pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
acc
}
-/// Removes excess indentation on comments in order for the Markdown
-/// to be parsed correctly. This is necessary because the convention for
-/// writing documentation is to provide a space between the /// or //! marker
-/// and the doc text, but Markdown is whitespace-sensitive. For example,
-/// a block of text with four-space indentation is parsed as a code block,
-/// so if we didn't unindent comments, these list items
-///
-/// /// A list:
-/// ///
-/// /// - Foo
-/// /// - Bar
-///
-/// would be parsed as if they were in a code block, which is likely not what the user intended.
-fn unindent_doc_fragments(docs: &mut Vec<DocFragment>) {
- // `add` is used in case the most common sugared doc syntax is used ("/// "). The other
- // fragments kind's lines are never starting with a whitespace unless they are using some
- // markdown formatting requiring it. Therefore, if the doc block have a mix between the two,
- // we need to take into account the fact that the minimum indent minus one (to take this
- // whitespace into account).
- //
- // For example:
- //
- // /// hello!
- // #[doc = "another"]
- //
- // In this case, you want "hello! another" and not "hello! another".
- let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind)
- && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc)
- {
- // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to
- // "decide" how much the minimum indent will be.
- 1
- } else {
- 0
- };
-
- // `min_indent` is used to know how much whitespaces from the start of each lines must be
- // removed. Example:
- //
- // /// hello!
- // #[doc = "another"]
- //
- // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum
- // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4
- // (5 - 1) whitespaces.
- let Some(min_indent) = docs
- .iter()
- .map(|fragment| {
- fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| {
- if line.chars().all(|c| c.is_whitespace()) {
- min_indent
- } else {
- // Compare against either space or tab, ignoring whether they are
- // mixed or not.
- let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
- cmp::min(min_indent, whitespace)
- + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add }
- }
- })
- })
- .min()
- else {
- return;
- };
-
- for fragment in docs {
- if fragment.doc == kw::Empty {
- continue;
- }
-
- let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 {
- min_indent - add
- } else {
- min_indent
- };
-
- fragment.indent = min_indent;
- }
-}
-
/// A link that has not yet been rendered.
///
/// This link will be turned into a rendered link by [`Item::links`].
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct ItemLink {
/// The original link written in the markdown
- pub(crate) link: String,
+ pub(crate) link: Box<str>,
/// The link text displayed in the HTML.
///
/// This may not be the same as `link` if there was a disambiguator
/// in an intra-doc link (e.g. \[`fn@f`\])
- pub(crate) link_text: String,
+ pub(crate) link_text: Box<str>,
/// The `DefId` of the Item whose **HTML Page** contains the item being
/// linked to. This will be different to `item_id` on item's that don't
/// have their own page, such as struct fields and enum variants.
@@ -1188,11 +1035,13 @@ pub struct RenderedLink {
/// The text the link was original written as.
///
/// This could potentially include disambiguators and backticks.
- pub(crate) original_text: String,
+ pub(crate) original_text: Box<str>,
/// The text to display in the HTML
- pub(crate) new_text: String,
+ pub(crate) new_text: Box<str>,
/// The URL to put in the `href`
pub(crate) href: String,
+ /// The tooltip.
+ pub(crate) tooltip: String,
}
/// The attributes on an [`Item`], including attributes like `#[derive(...)]` and `#[inline]`,
@@ -1242,26 +1091,7 @@ impl Attributes {
attrs: impl Iterator<Item = (&'a ast::Attribute, Option<DefId>)>,
doc_only: bool,
) -> Attributes {
- let mut doc_strings = Vec::new();
- let mut other_attrs = ast::AttrVec::new();
- for (attr, parent_module) in attrs {
- if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
- trace!("got doc_str={doc_str:?}");
- let doc = beautify_doc_string(doc_str, comment_kind);
- let kind = if attr.is_doc_comment() {
- DocFragmentKind::SugaredDoc
- } else {
- DocFragmentKind::RawDoc
- };
- let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 };
- doc_strings.push(fragment);
- } else if !doc_only {
- other_attrs.push(attr.clone());
- }
- }
-
- unindent_doc_fragments(&mut doc_strings);
-
+ let (doc_strings, other_attrs) = attrs_to_doc_fragments(attrs, doc_only);
Attributes { doc_strings, other_attrs }
}
@@ -1280,20 +1110,6 @@ impl Attributes {
if out.is_empty() { None } else { Some(out) }
}
- /// Return the doc-comments on this item, grouped by the module they came from.
- /// The module can be different if this is a re-export with added documentation.
- ///
- /// The last newline is not trimmed so the produced strings are reusable between
- /// early and late doc link resolution regardless of their position.
- pub(crate) fn prepare_to_doc_link_resolution(&self) -> FxHashMap<Option<DefId>, String> {
- let mut res = FxHashMap::default();
- for fragment in &self.doc_strings {
- let out_str = res.entry(fragment.parent_module).or_default();
- add_doc_fragment(out_str, fragment);
- }
- res
- }
-
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
@@ -1358,10 +1174,10 @@ impl GenericBound {
pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
use rustc_hir::TraitBoundModifier as TBM;
- if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
- if Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() {
- return true;
- }
+ if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self &&
+ Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait()
+ {
+ return true;
}
false
}
@@ -2416,10 +2232,7 @@ impl ConstantKind {
pub(crate) fn is_literal(&self, tcx: TyCtxt<'_>) -> bool {
match *self {
- ConstantKind::TyConst { .. } => false,
- ConstantKind::Extern { def_id } => def_id.as_local().map_or(false, |def_id| {
- is_literal_expr(tcx, tcx.hir().local_def_id_to_hir_id(def_id))
- }),
+ ConstantKind::TyConst { .. } | ConstantKind::Extern { .. } => false,
ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => {
is_literal_expr(tcx, body.hir_id)
}
@@ -2498,14 +2311,7 @@ impl Import {
}
pub(crate) fn imported_item_is_doc_hidden(&self, tcx: TyCtxt<'_>) -> bool {
- match self.source.did {
- Some(did) => tcx
- .get_attrs(did, sym::doc)
- .filter_map(ast::Attribute::meta_item_list)
- .flatten()
- .has_word(sym::hidden),
- None => false,
- }
+ self.source.did.map_or(false, |did| tcx.is_doc_hidden(did))
}
}
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
index 71eddf434..20627c2cf 100644
--- a/src/librustdoc/clean/types/tests.rs
+++ b/src/librustdoc/clean/types/tests.rs
@@ -2,6 +2,7 @@ use super::*;
use crate::clean::collapse_doc_fragments;
+use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind};
use rustc_span::create_default_session_globals_then;
use rustc_span::source_map::DUMMY_SP;
use rustc_span::symbol::Symbol;
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index a12f764fa..c9c1c2c45 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -29,11 +29,6 @@ mod tests;
pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
let module = crate::visit_ast::RustdocVisitor::new(cx).visit();
- for &cnum in cx.tcx.crates(()) {
- // Analyze doc-reachability for extern items
- crate::visit_lib::lib_embargo_visit_item(cx, cnum.as_def_id());
- }
-
// Clean the crate, translating the entire librustc_ast AST to one that is
// understood by rustdoc.
let mut module = clean_doc_module(&module, cx);
@@ -134,7 +129,7 @@ fn external_generic_args<'tcx>(
});
GenericArgs::Parenthesized { inputs, output }
} else {
- GenericArgs::AngleBracketed { args: args.into(), bindings: bindings.into() }
+ GenericArgs::AngleBracketed { args: args.into(), bindings }
}
}
@@ -271,7 +266,7 @@ pub(crate) fn print_evaluated_const(
underscores_and_type: bool,
) -> Option<String> {
tcx.const_eval_poly(def_id).ok().and_then(|val| {
- let ty = tcx.type_of(def_id);
+ let ty = tcx.type_of(def_id).subst_identity();
match (val, ty.kind()) {
(_, &ty::Ref(..)) => None,
(ConstValue::Scalar(_), &ty::Adt(_, _)) => None,
@@ -350,10 +345,10 @@ pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
return true;
}
- if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind {
- if let hir::ExprKind::Lit(_) = &expr.kind {
- return true;
- }
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind &&
+ let hir::ExprKind::Lit(_) = &expr.kind
+ {
+ return true;
}
}
@@ -475,6 +470,12 @@ pub(crate) fn get_auto_trait_and_blanket_impls(
cx: &mut DocContext<'_>,
item_def_id: DefId,
) -> impl Iterator<Item = Item> {
+ // FIXME: To be removed once `parallel_compiler` bugs are fixed!
+ // More information in <https://github.com/rust-lang/rust/pull/106930>.
+ if cfg!(parallel_compiler) {
+ return vec![].into_iter().chain(vec![].into_iter());
+ }
+
let auto_impls = cx
.sess()
.prof