summaryrefslogtreecommitdiffstats
path: root/src/librustdoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/librustdoc')
-rw-r--r--src/librustdoc/clean/auto_trait.rs22
-rw-r--r--src/librustdoc/clean/blanket_impl.rs2
-rw-r--r--src/librustdoc/clean/inline.rs16
-rw-r--r--src/librustdoc/clean/mod.rs229
-rw-r--r--src/librustdoc/clean/types.rs152
-rw-r--r--src/librustdoc/clean/types/tests.rs5
-rw-r--r--src/librustdoc/clean/utils.rs9
-rw-r--r--src/librustdoc/config.rs25
-rw-r--r--src/librustdoc/core.rs22
-rw-r--r--src/librustdoc/doctest.rs11
-rw-r--r--src/librustdoc/doctest/tests.rs4
-rw-r--r--src/librustdoc/externalfiles.rs4
-rw-r--r--src/librustdoc/formats/cache.rs22
-rw-r--r--src/librustdoc/html/format.rs49
-rw-r--r--src/librustdoc/html/highlight.rs14
-rw-r--r--src/librustdoc/html/markdown.rs136
-rw-r--r--src/librustdoc/html/render/context.rs19
-rw-r--r--src/librustdoc/html/render/mod.rs77
-rw-r--r--src/librustdoc/html/render/print_item.rs269
-rw-r--r--src/librustdoc/html/render/search_index.rs40
-rw-r--r--src/librustdoc/html/render/type_layout.rs86
-rw-r--r--src/librustdoc/html/sources.rs2
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css4
-rw-r--r--src/librustdoc/html/static/css/settings.css10
-rw-r--r--src/librustdoc/html/static/js/externs.js5
-rw-r--r--src/librustdoc/html/static/js/main.js15
-rw-r--r--src/librustdoc/html/static/js/search.js424
-rw-r--r--src/librustdoc/html/static/js/settings.js19
-rw-r--r--src/librustdoc/html/static/js/source-script.js2
-rw-r--r--src/librustdoc/html/templates/item_union.html8
-rw-r--r--src/librustdoc/html/templates/type_layout.html58
-rw-r--r--src/librustdoc/html/templates/type_layout_size.html12
-rw-r--r--src/librustdoc/json/conversions.rs19
-rw-r--r--src/librustdoc/json/mod.rs5
-rw-r--r--src/librustdoc/lib.rs60
-rw-r--r--src/librustdoc/lint.rs12
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs8
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs6
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs289
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs34
-rw-r--r--src/librustdoc/passes/lint.rs2
-rw-r--r--src/librustdoc/passes/lint/bare_urls.rs2
-rw-r--r--src/librustdoc/passes/lint/check_code_block_syntax.rs8
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs2
-rw-r--r--src/librustdoc/passes/lint/unescaped_backticks.rs416
-rw-r--r--src/librustdoc/passes/strip_hidden.rs30
-rw-r--r--src/librustdoc/passes/stripper.rs2
-rw-r--r--src/librustdoc/scrape_examples.rs6
-rw-r--r--src/librustdoc/theme.rs2
-rw-r--r--src/librustdoc/theme/tests.rs4
-rw-r--r--src/librustdoc/visit_ast.rs152
51 files changed, 1910 insertions, 921 deletions
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
index 9479b3ee0..baf2b0a85 100644
--- a/src/librustdoc/clean/auto_trait.rs
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -1,7 +1,7 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::lang_items::LangItem;
-use rustc_middle::ty::{self, Region, RegionVid, TypeFoldable, TypeSuperFoldable};
+use rustc_middle::ty::{self, Region, RegionVid, TypeFoldable};
use rustc_trait_selection::traits::auto_trait::{self, AutoTraitResult};
use thin_vec::ThinVec;
@@ -44,7 +44,7 @@ where
discard_positive_impl: bool,
) -> Option<Item> {
let tcx = self.cx.tcx;
- let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(trait_def_id, [ty]));
+ let trait_ref = ty::Binder::dummy(ty::TraitRef::new(tcx, trait_def_id, [ty]));
if !self.cx.generated_synthetics.insert((ty, trait_def_id)) {
debug!("get_auto_trait_impl_for({:?}): already generated, aborting", trait_ref);
return None;
@@ -141,7 +141,7 @@ where
let f = auto_trait::AutoTraitFinder::new(tcx);
debug!("get_auto_trait_impls({:?})", ty);
- let auto_traits: Vec<_> = self.cx.auto_traits.iter().copied().collect();
+ let auto_traits: Vec<_> = self.cx.auto_traits.to_vec();
let mut auto_traits: Vec<Item> = auto_traits
.into_iter()
.filter_map(|trait_def_id| {
@@ -556,7 +556,10 @@ where
WherePredicate::EqPredicate { lhs, rhs, bound_params } => {
match *lhs {
Type::QPath(box QPathData {
- ref assoc, ref self_type, ref trait_, ..
+ ref assoc,
+ ref self_type,
+ trait_: Some(ref trait_),
+ ..
}) => {
let ty = &*self_type;
let mut new_trait = trait_.clone();
@@ -740,10 +743,11 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for RegionReplacer<'a, 'tcx> {
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- (match *r {
- ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(),
- _ => None,
- })
- .unwrap_or_else(|| r.super_fold_with(self))
+ match *r {
+ // These are the regions that can be seen in the AST.
+ ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned().unwrap_or(r),
+ ty::ReEarlyBound(_) | ty::ReStatic | ty::ReLateBound(..) | ty::ReError(_) => r,
+ r => bug!("unexpected region: {r:?}"),
+ }
}
}
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
index 3a3bf6a7a..e4c05b573 100644
--- a/src/librustdoc/clean/blanket_impl.rs
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -20,7 +20,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
trace!("get_blanket_impls({:?})", ty);
let mut impls = Vec::new();
for trait_def_id in cx.tcx.all_traits() {
- if !cx.cache.effective_visibilities.is_directly_public(cx.tcx, trait_def_id)
+ if !cx.cache.effective_visibilities.is_reachable(cx.tcx, trait_def_id)
|| cx.generated_synthetics.get(&(ty.0, trait_def_id)).is_some()
{
continue;
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index cc5d13808..7dc08b3b1 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -152,8 +152,9 @@ pub(crate) fn try_inline_glob(
// reexported by the glob, e.g. because they are shadowed by something else.
let reexports = cx
.tcx
- .module_children_reexports(current_mod)
+ .module_children_local(current_mod)
.iter()
+ .filter(|child| !child.reexport_chain.is_empty())
.filter_map(|child| child.res.opt_def_id())
.collect();
let mut items = build_module_items(cx, did, visited, inlined_names, Some(&reexports));
@@ -354,9 +355,9 @@ pub(crate) fn build_impl(
return;
}
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_impl");
-
let tcx = cx.tcx;
+ let _prof_timer = tcx.sess.prof.generic_activity("build_impl");
+
let associated_trait = tcx.impl_trait_ref(did).map(ty::EarlyBinder::skip_binder);
// Only inline impl if the implemented trait is
@@ -528,7 +529,7 @@ pub(crate) fn build_impl(
items: trait_items,
polarity,
kind: if utils::has_doc_flag(tcx, did, sym::fake_variadic) {
- ImplKind::FakeVaradic
+ ImplKind::FakeVariadic
} else {
ImplKind::Normal
},
@@ -705,7 +706,12 @@ fn filter_non_trait_generics(trait_did: DefId, mut g: clean::Generics) -> clean:
g.where_predicates.retain(|pred| match pred {
clean::WherePredicate::BoundPredicate {
- ty: clean::QPath(box clean::QPathData { self_type: clean::Generic(ref s), trait_, .. }),
+ ty:
+ clean::QPath(box clean::QPathData {
+ self_type: clean::Generic(ref s),
+ trait_: Some(trait_),
+ ..
+ }),
bounds,
..
} => !(bounds.is_empty() || *s == kw::SelfUpper && trait_.def_id() == trait_did),
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 5fa0c120f..03adc19e3 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -119,7 +119,39 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
});
let kind = ModuleItem(Module { items, span });
- Item::from_def_id_and_parts(doc.def_id.to_def_id(), Some(doc.name), kind, cx)
+ generate_item_with_correct_attrs(cx, kind, doc.def_id, doc.name, doc.import_id, doc.renamed)
+}
+
+fn generate_item_with_correct_attrs(
+ cx: &mut DocContext<'_>,
+ kind: ItemKind,
+ local_def_id: LocalDefId,
+ name: Symbol,
+ import_id: Option<LocalDefId>,
+ renamed: Option<Symbol>,
+) -> Item {
+ let def_id = local_def_id.to_def_id();
+ let target_attrs = inline::load_attrs(cx, def_id);
+ let attrs = if let Some(import_id) = import_id {
+ let is_inline = inline::load_attrs(cx, import_id.to_def_id())
+ .lists(sym::doc)
+ .get_word_attr(sym::inline)
+ .is_some();
+ let mut attrs = get_all_import_attributes(cx, import_id, local_def_id, is_inline);
+ add_without_unwanted_attributes(&mut attrs, target_attrs, is_inline, None);
+ attrs
+ } else {
+ // We only keep the item's attributes.
+ target_attrs.iter().map(|attr| (Cow::Borrowed(attr), None)).collect()
+ };
+
+ let cfg = attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
+ let attrs = Attributes::from_ast_iter(attrs.iter().map(|(attr, did)| (&**attr, *did)), false);
+
+ let name = renamed.or(Some(name));
+ let mut item = Item::from_def_id_and_attrs_and_parts(def_id, name, kind, Box::new(attrs), cfg);
+ item.inline_stmt_id = import_id.map(|local| local.to_def_id());
+ item
}
fn clean_generic_bound<'tcx>(
@@ -131,7 +163,7 @@ fn clean_generic_bound<'tcx>(
hir::GenericBound::LangItemTrait(lang_item, span, _, generic_args) => {
let def_id = cx.tcx.require_lang_item(lang_item, Some(span));
- let trait_ref = ty::TraitRef::identity(cx.tcx, def_id);
+ let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(cx.tcx, def_id));
let generic_args = clean_generic_args(generic_args, cx);
let GenericArgs::AngleBracketed { bindings, .. } = generic_args
@@ -304,7 +336,7 @@ pub(crate) fn clean_predicate<'tcx>(
clean_region_outlives_predicate(pred)
}
ty::PredicateKind::Clause(ty::Clause::TypeOutlives(pred)) => {
- clean_type_outlives_predicate(pred, cx)
+ clean_type_outlives_predicate(bound_predicate.rebind(pred), cx)
}
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
Some(clean_projection_predicate(bound_predicate.rebind(pred), cx))
@@ -345,7 +377,7 @@ fn clean_poly_trait_predicate<'tcx>(
}
fn clean_region_outlives_predicate<'tcx>(
- pred: ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>,
+ pred: ty::RegionOutlivesPredicate<'tcx>,
) -> Option<WherePredicate> {
let ty::OutlivesPredicate(a, b) = pred;
@@ -358,13 +390,13 @@ fn clean_region_outlives_predicate<'tcx>(
}
fn clean_type_outlives_predicate<'tcx>(
- pred: ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>,
+ pred: ty::Binder<'tcx, ty::TypeOutlivesPredicate<'tcx>>,
cx: &mut DocContext<'tcx>,
) -> Option<WherePredicate> {
- let ty::OutlivesPredicate(ty, lt) = pred;
+ let ty::OutlivesPredicate(ty, lt) = pred.skip_binder();
Some(WherePredicate::BoundPredicate {
- ty: clean_middle_ty(ty::Binder::dummy(ty), cx, None),
+ ty: clean_middle_ty(pred.rebind(ty), cx, None),
bounds: vec![GenericBound::Outlives(
clean_middle_region(lt).expect("failed to clean lifetimes"),
)],
@@ -422,8 +454,8 @@ fn clean_projection<'tcx>(
let bounds = cx
.tcx
.explicit_item_bounds(ty.skip_binder().def_id)
- .iter()
- .map(|(bound, _)| EarlyBinder(*bound).subst(cx.tcx, ty.skip_binder().substs))
+ .subst_iter_copied(cx.tcx, ty.skip_binder().substs)
+ .map(|(pred, _)| pred)
.collect::<Vec<_>>();
return clean_middle_opaque_bounds(cx, bounds);
}
@@ -441,7 +473,7 @@ fn clean_projection<'tcx>(
assoc: projection_to_path_segment(ty, cx),
should_show_cast,
self_type,
- trait_,
+ trait_: Some(trait_),
}))
}
@@ -1315,10 +1347,11 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
}
if let ty::TraitContainer = assoc_item.container {
- let bounds = tcx.explicit_item_bounds(assoc_item.def_id);
+ let bounds =
+ tcx.explicit_item_bounds(assoc_item.def_id).subst_identity_iter_copied();
let predicates = tcx.explicit_predicates_of(assoc_item.def_id).predicates;
let predicates =
- tcx.arena.alloc_from_iter(bounds.into_iter().chain(predicates).copied());
+ tcx.arena.alloc_from_iter(bounds.chain(predicates.iter().copied()));
let mut generics = clean_ty_generics(
cx,
tcx.generics_of(assoc_item.def_id),
@@ -1329,7 +1362,13 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
let mut bounds: Vec<GenericBound> = Vec::new();
generics.where_predicates.retain_mut(|pred| match *pred {
WherePredicate::BoundPredicate {
- ty: QPath(box QPathData { ref assoc, ref self_type, ref trait_, .. }),
+ ty:
+ QPath(box QPathData {
+ ref assoc,
+ ref self_type,
+ trait_: Some(ref trait_),
+ ..
+ }),
bounds: ref mut pred_bounds,
..
} => {
@@ -1491,25 +1530,30 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
assoc: clean_path_segment(p.segments.last().expect("segments were empty"), cx),
should_show_cast,
self_type,
- trait_,
+ trait_: Some(trait_),
}))
}
hir::QPath::TypeRelative(qself, segment) => {
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
- let res = match ty.kind() {
+ let self_type = clean_ty(qself, cx);
+
+ let (trait_, should_show_cast) = match ty.kind() {
ty::Alias(ty::Projection, proj) => {
- Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id)
+ let res = Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id);
+ let trait_ = clean_path(&hir::Path { span, res, segments: &[] }, cx);
+ register_res(cx, trait_.res);
+ let self_def_id = res.opt_def_id();
+ let should_show_cast =
+ compute_should_show_cast(self_def_id, &trait_, &self_type);
+
+ (Some(trait_), should_show_cast)
}
+ ty::Alias(ty::Inherent, _) => (None, false),
// Rustdoc handles `ty::Error`s by turning them into `Type::Infer`s.
ty::Error(_) => return Type::Infer,
- // Otherwise, this is an inherent associated type.
- _ => return clean_middle_ty(ty::Binder::dummy(ty), cx, None),
+ _ => bug!("clean: expected associated type, found `{ty:?}`"),
};
- let trait_ = clean_path(&hir::Path { span, res, segments: &[] }, cx);
- register_res(cx, trait_.res);
- let self_def_id = res.opt_def_id();
- let self_type = clean_ty(qself, cx);
- let should_show_cast = compute_should_show_cast(self_def_id, &trait_, &self_type);
+
Type::QPath(Box::new(QPathData {
assoc: clean_path_segment(segment, cx),
should_show_cast,
@@ -1528,7 +1572,9 @@ fn maybe_expand_private_type_alias<'tcx>(
let Res::Def(DefKind::TyAlias, def_id) = path.res else { return None };
// Substitute private type aliases
let def_id = def_id.as_local()?;
- let alias = if !cx.cache.effective_visibilities.is_exported(cx.tcx, def_id.to_def_id()) {
+ let alias = if !cx.cache.effective_visibilities.is_exported(cx.tcx, def_id.to_def_id())
+ && !cx.current_type_aliases.contains_key(&def_id.to_def_id())
+ {
&cx.tcx.hir().expect_item(def_id).kind
} else {
return None;
@@ -1608,7 +1654,7 @@ fn maybe_expand_private_type_alias<'tcx>(
}
}
- Some(cx.enter_alias(substs, |cx| clean_ty(ty, cx)))
+ Some(cx.enter_alias(substs, def_id.to_def_id(), |cx| clean_ty(ty, cx)))
}
pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type {
@@ -1699,7 +1745,7 @@ fn normalize<'tcx>(
pub(crate) fn clean_middle_ty<'tcx>(
bound_ty: ty::Binder<'tcx, Ty<'tcx>>,
cx: &mut DocContext<'tcx>,
- def_id: Option<DefId>,
+ parent_def_id: Option<DefId>,
) -> Type {
let bound_ty = normalize(cx, bound_ty).unwrap_or(bound_ty);
match *bound_ty.skip_binder().kind() {
@@ -1829,7 +1875,32 @@ pub(crate) fn clean_middle_ty<'tcx>(
Tuple(t.iter().map(|t| clean_middle_ty(bound_ty.rebind(t), cx, None)).collect())
}
- ty::Alias(ty::Projection, ref data) => clean_projection(bound_ty.rebind(*data), cx, def_id),
+ ty::Alias(ty::Projection, ref data) => {
+ clean_projection(bound_ty.rebind(*data), cx, parent_def_id)
+ }
+
+ ty::Alias(ty::Inherent, alias_ty) => {
+ let alias_ty = bound_ty.rebind(alias_ty);
+ let self_type = clean_middle_ty(alias_ty.map_bound(|ty| ty.self_ty()), cx, None);
+
+ Type::QPath(Box::new(QPathData {
+ assoc: PathSegment {
+ name: cx.tcx.associated_item(alias_ty.skip_binder().def_id).name,
+ args: GenericArgs::AngleBracketed {
+ args: substs_to_args(
+ cx,
+ alias_ty.map_bound(|ty| ty.substs.as_slice()),
+ true,
+ )
+ .into(),
+ bindings: Default::default(),
+ },
+ },
+ should_show_cast: false,
+ self_type,
+ trait_: None,
+ }))
+ }
ty::Param(ref p) => {
if let Some(bounds) = cx.impl_trait_bounds.remove(&p.index.into()) {
@@ -1840,15 +1911,30 @@ pub(crate) fn clean_middle_ty<'tcx>(
}
ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
- // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
- // by looking up the bounds associated with the def_id.
- let bounds = cx
- .tcx
- .explicit_item_bounds(def_id)
- .iter()
- .map(|(bound, _)| EarlyBinder(*bound).subst(cx.tcx, substs))
- .collect::<Vec<_>>();
- clean_middle_opaque_bounds(cx, bounds)
+ // If it's already in the same alias, don't get an infinite loop.
+ if cx.current_type_aliases.contains_key(&def_id) {
+ let path =
+ external_path(cx, def_id, false, ThinVec::new(), bound_ty.rebind(substs));
+ Type::Path { path }
+ } else {
+ *cx.current_type_aliases.entry(def_id).or_insert(0) += 1;
+ // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
+ // by looking up the bounds associated with the def_id.
+ let bounds = cx
+ .tcx
+ .explicit_item_bounds(def_id)
+ .subst_iter_copied(cx.tcx, substs)
+ .map(|(bound, _)| bound)
+ .collect::<Vec<_>>();
+ let ty = clean_middle_opaque_bounds(cx, bounds);
+ if let Some(count) = cx.current_type_aliases.get_mut(&def_id) {
+ *count -= 1;
+ if *count == 0 {
+ cx.current_type_aliases.remove(&def_id);
+ }
+ }
+ ty
+ }
}
ty::Closure(..) => panic!("Closure"),
@@ -2069,9 +2155,9 @@ pub(crate) fn reexport_chain<'tcx>(
import_def_id: LocalDefId,
target_def_id: LocalDefId,
) -> &'tcx [Reexport] {
- for child in tcx.module_children_reexports(tcx.local_parent(import_def_id)) {
+ for child in tcx.module_children_local(tcx.local_parent(import_def_id)) {
if child.res.opt_def_id() == Some(target_def_id.to_def_id())
- && child.reexport_chain[0].id() == Some(import_def_id.to_def_id())
+ && child.reexport_chain.first().and_then(|r| r.id()) == Some(import_def_id.to_def_id())
{
return &child.reexport_chain;
}
@@ -2228,13 +2314,17 @@ fn clean_maybe_renamed_item<'tcx>(
generics: clean_generics(ty.generics, cx),
}),
ItemKind::TyAlias(hir_ty, generics) => {
+ *cx.current_type_aliases.entry(def_id).or_insert(0) += 1;
let rustdoc_ty = clean_ty(hir_ty, cx);
let ty = clean_middle_ty(ty::Binder::dummy(hir_ty_to_ty(cx.tcx, hir_ty)), cx, None);
- TypedefItem(Box::new(Typedef {
- type_: rustdoc_ty,
- generics: clean_generics(generics, cx),
- item_type: Some(ty),
- }))
+ let generics = clean_generics(generics, cx);
+ if let Some(count) = cx.current_type_aliases.get_mut(&def_id) {
+ *count -= 1;
+ if *count == 0 {
+ cx.current_type_aliases.remove(&def_id);
+ }
+ }
+ TypedefItem(Box::new(Typedef { type_: rustdoc_ty, generics, item_type: Some(ty) }))
}
ItemKind::Enum(ref def, generics) => EnumItem(Enum {
variants: def.variants.iter().map(|v| clean_variant(v, cx)).collect(),
@@ -2287,29 +2377,14 @@ fn clean_maybe_renamed_item<'tcx>(
_ => unreachable!("not yet converted"),
};
- let target_attrs = inline::load_attrs(cx, def_id);
- let attrs = if let Some(import_id) = import_id {
- let is_inline = inline::load_attrs(cx, import_id.to_def_id())
- .lists(sym::doc)
- .get_word_attr(sym::inline)
- .is_some();
- let mut attrs =
- get_all_import_attributes(cx, import_id, item.owner_id.def_id, is_inline);
- add_without_unwanted_attributes(&mut attrs, target_attrs, is_inline, None);
- attrs
- } else {
- // We only keep the item's attributes.
- target_attrs.iter().map(|attr| (Cow::Borrowed(attr), None)).collect()
- };
-
- let cfg = attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
- let attrs =
- Attributes::from_ast_iter(attrs.iter().map(|(attr, did)| (&**attr, *did)), false);
-
- let mut item =
- Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
- item.inline_stmt_id = import_id.map(|local| local.to_def_id());
- vec![item]
+ vec![generate_item_with_correct_attrs(
+ cx,
+ kind,
+ item.owner_id.def_id,
+ name,
+ import_id,
+ renamed,
+ )]
})
}
@@ -2339,14 +2414,15 @@ fn clean_impl<'tcx>(
}
let for_ = clean_ty(impl_.self_ty, cx);
- let type_alias = for_.def_id(&cx.cache).and_then(|did| match tcx.def_kind(did) {
- DefKind::TyAlias => Some(clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(did).subst_identity()),
- cx,
- Some(did),
- )),
- _ => None,
- });
+ let type_alias =
+ for_.def_id(&cx.cache).and_then(|alias_def_id: DefId| match tcx.def_kind(alias_def_id) {
+ DefKind::TyAlias => Some(clean_middle_ty(
+ ty::Binder::dummy(tcx.type_of(def_id).subst_identity()),
+ cx,
+ Some(def_id.to_def_id()),
+ )),
+ _ => None,
+ });
let mut make_item = |trait_: Option<Path>, for_: Type, items: Vec<Item>| {
let kind = ImplItem(Box::new(Impl {
unsafety: impl_.unsafety,
@@ -2356,7 +2432,7 @@ fn clean_impl<'tcx>(
items,
polarity: tcx.impl_polarity(def_id),
kind: if utils::has_doc_flag(tcx, def_id.to_def_id(), sym::fake_variadic) {
- ImplKind::FakeVaradic
+ ImplKind::FakeVariadic
} else {
ImplKind::Normal
},
@@ -2516,7 +2592,8 @@ fn clean_use_statement_inner<'tcx>(
} else {
if inline_attr.is_none()
&& let Res::Def(DefKind::Mod, did) = path.res
- && !did.is_local() && did.is_crate_root()
+ && !did.is_local()
+ && did.is_crate_root()
{
// if we're `pub use`ing an extern crate root, don't inline it unless we
// were specifically asked for it
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 6d2ce9e28..e9ccea2cf 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -11,6 +11,7 @@ use arrayvec::ArrayVec;
use thin_vec::ThinVec;
use rustc_ast as ast;
+use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel};
use rustc_const_eval::const_eval::is_unstable_const_fn;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -20,7 +21,7 @@ use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem;
use rustc_hir::{BodyId, Mutability};
use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety;
-use rustc_index::vec::IndexVec;
+use rustc_index::IndexVec;
use rustc_middle::ty::fast_reject::SimplifiedType;
use rustc_middle::ty::{self, TyCtxt, Visibility};
use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment};
@@ -156,7 +157,7 @@ impl ExternalCrate {
}
/// Attempts to find where an external crate is located, given that we're
- /// rendering in to the specified source destination.
+ /// rendering into the specified source destination.
pub(crate) fn location(
&self,
extern_url: Option<&str>,
@@ -400,12 +401,18 @@ impl Item {
.unwrap_or_else(|| self.span(tcx).map_or(rustc_span::DUMMY_SP, |span| span.inner()))
}
- /// Finds the `doc` attribute as a NameValue and returns the corresponding
- /// value found.
- pub(crate) fn doc_value(&self) -> Option<String> {
+ /// Combine all doc strings into a single value handling indentation and newlines as needed.
+ pub(crate) fn doc_value(&self) -> String {
self.attrs.doc_value()
}
+ /// Combine all doc strings into a single value handling indentation and newlines as needed.
+ /// Returns `None` is there's no documentation at all, and `Some("")` if there is some
+ /// documentation but it is empty (e.g. `#[doc = ""]`).
+ pub(crate) fn opt_doc_value(&self) -> Option<String> {
+ self.attrs.opt_doc_value()
+ }
+
pub(crate) fn from_def_id_and_parts(
def_id: DefId,
name: Option<Symbol>,
@@ -442,12 +449,6 @@ impl Item {
}
}
- /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
- /// with newlines.
- pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
- self.attrs.collapsed_doc_value()
- }
-
pub(crate) fn links(&self, cx: &Context<'_>) -> Vec<RenderedLink> {
use crate::html::format::{href, link_tooltip};
@@ -711,6 +712,78 @@ impl Item {
};
Some(tcx.visibility(def_id))
}
+
+ pub(crate) fn attributes(&self, tcx: TyCtxt<'_>, keep_as_is: bool) -> Vec<String> {
+ const ALLOWED_ATTRIBUTES: &[Symbol] =
+ &[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive];
+
+ use rustc_abi::IntegerType;
+ use rustc_middle::ty::ReprFlags;
+
+ let mut attrs: Vec<String> = self
+ .attrs
+ .other_attrs
+ .iter()
+ .filter_map(|attr| {
+ if keep_as_is {
+ Some(pprust::attribute_to_string(attr))
+ } else if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
+ Some(
+ pprust::attribute_to_string(attr)
+ .replace("\\\n", "")
+ .replace('\n', "")
+ .replace(" ", " "),
+ )
+ } else {
+ None
+ }
+ })
+ .collect();
+ if let Some(def_id) = self.item_id.as_def_id() &&
+ !def_id.is_local() &&
+ // This check is needed because `adt_def` will panic if not a compatible type otherwise...
+ matches!(self.type_(), ItemType::Struct | ItemType::Enum | ItemType::Union)
+ {
+ let repr = tcx.adt_def(def_id).repr();
+ let mut out = Vec::new();
+ if repr.flags.contains(ReprFlags::IS_C) {
+ out.push("C");
+ }
+ if repr.flags.contains(ReprFlags::IS_TRANSPARENT) {
+ out.push("transparent");
+ }
+ if repr.flags.contains(ReprFlags::IS_SIMD) {
+ out.push("simd");
+ }
+ let pack_s;
+ if let Some(pack) = repr.pack {
+ pack_s = format!("packed({})", pack.bytes());
+ out.push(&pack_s);
+ }
+ let align_s;
+ if let Some(align) = repr.align {
+ align_s = format!("align({})", align.bytes());
+ out.push(&align_s);
+ }
+ let int_s;
+ if let Some(int) = repr.int {
+ int_s = match int {
+ IntegerType::Pointer(is_signed) => {
+ format!("{}size", if is_signed { 'i' } else { 'u' })
+ }
+ IntegerType::Fixed(size, is_signed) => {
+ format!("{}{}", if is_signed { 'i' } else { 'u' }, size.size().bytes() * 8)
+ }
+ };
+ out.push(&int_s);
+ }
+ if out.is_empty() {
+ return Vec::new();
+ }
+ attrs.push(format!("#[repr({})]", out.join(", ")));
+ }
+ attrs
+ }
}
#[derive(Clone, Debug)]
@@ -751,7 +824,7 @@ pub(crate) enum ItemKind {
PrimitiveItem(PrimitiveType),
/// A required associated constant in a trait declaration.
TyAssocConstItem(Type),
- /// An associated associated constant in a trait impl or a provided one in a trait declaration.
+ /// An associated constant in a trait impl or a provided one in a trait declaration.
AssocConstItem(Type, ConstantKind),
/// A required associated type in a trait declaration.
///
@@ -995,17 +1068,6 @@ impl<I: Iterator<Item = ast::NestedMetaItem>> NestedAttributesExt for I {
}
}
-/// Collapse a collection of [`DocFragment`]s into one string,
-/// handling indentation and newlines as needed.
-pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
- let mut acc = String::new();
- for frag in doc_strings {
- add_doc_fragment(&mut acc, frag);
- }
- acc.pop();
- acc
-}
-
/// A link that has not yet been rendered.
///
/// This link will be turned into a rendered link by [`Item::links`].
@@ -1090,29 +1152,23 @@ impl Attributes {
Attributes { doc_strings, other_attrs }
}
- /// Finds the `doc` attribute as a NameValue and returns the corresponding
- /// value found.
- pub(crate) fn doc_value(&self) -> Option<String> {
- let mut iter = self.doc_strings.iter();
-
- let ori = iter.next()?;
- let mut out = String::new();
- add_doc_fragment(&mut out, ori);
- for new_frag in iter {
- add_doc_fragment(&mut out, new_frag);
- }
- out.pop();
- if out.is_empty() { None } else { Some(out) }
+ /// Combine all doc strings into a single value handling indentation and newlines as needed.
+ pub(crate) fn doc_value(&self) -> String {
+ self.opt_doc_value().unwrap_or_default()
}
- /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
- /// with newlines.
- pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
- if self.doc_strings.is_empty() {
- None
- } else {
- Some(collapse_doc_fragments(&self.doc_strings))
- }
+ /// Combine all doc strings into a single value handling indentation and newlines as needed.
+ /// Returns `None` is there's no documentation at all, and `Some("")` if there is some
+ /// documentation but it is empty (e.g. `#[doc = ""]`).
+ pub(crate) fn opt_doc_value(&self) -> Option<String> {
+ (!self.doc_strings.is_empty()).then(|| {
+ let mut res = String::new();
+ for frag in &self.doc_strings {
+ add_doc_fragment(&mut res, frag);
+ }
+ res.pop();
+ res
+ })
}
pub(crate) fn get_doc_aliases(&self) -> Box<[Symbol]> {
@@ -1587,7 +1643,7 @@ impl Type {
pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
if let QPath(box QPathData { self_type, trait_, assoc, .. }) = self {
- Some((self_type, trait_.def_id(), assoc.clone()))
+ Some((self_type, trait_.as_ref()?.def_id(), assoc.clone()))
} else {
None
}
@@ -1631,7 +1687,7 @@ pub(crate) struct QPathData {
pub self_type: Type,
/// FIXME: compute this field on demand.
pub should_show_cast: bool,
- pub trait_: Path,
+ pub trait_: Option<Path>,
}
/// A primitive (aka, builtin) type.
@@ -2305,7 +2361,7 @@ impl Impl {
pub(crate) enum ImplKind {
Normal,
Auto,
- FakeVaradic,
+ FakeVariadic,
Blanket(Box<Type>),
}
@@ -2319,7 +2375,7 @@ impl ImplKind {
}
pub(crate) fn is_fake_variadic(&self) -> bool {
- matches!(self, ImplKind::FakeVaradic)
+ matches!(self, ImplKind::FakeVariadic)
}
pub(crate) fn as_blanket_ty(&self) -> Option<&Type> {
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
index d8c91a968..394954208 100644
--- a/src/librustdoc/clean/types/tests.rs
+++ b/src/librustdoc/clean/types/tests.rs
@@ -1,7 +1,5 @@
use super::*;
-use crate::clean::collapse_doc_fragments;
-
use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind};
use rustc_span::create_default_session_globals_then;
use rustc_span::source_map::DUMMY_SP;
@@ -22,7 +20,8 @@ fn run_test(input: &str, expected: &str) {
create_default_session_globals_then(|| {
let mut s = create_doc_fragment(input);
unindent_doc_fragments(&mut s);
- assert_eq!(collapse_doc_fragments(&s), expected);
+ let attrs = Attributes { doc_strings: s, other_attrs: Default::default() };
+ assert_eq!(attrs.doc_value(), expected);
});
}
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index cca50df0d..366f93952 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -193,7 +193,7 @@ pub(crate) fn build_deref_target_impls(
};
if let Some(prim) = target.primitive_type() {
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
+ let _prof_timer = tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
for did in prim.impls(tcx).filter(|did| !did.is_local()) {
inline::build_impl(cx, did, None, ret);
}
@@ -243,9 +243,9 @@ pub(crate) fn print_const(cx: &DocContext<'_>, n: ty::Const<'_>) -> String {
match n.kind() {
ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs: _ }) => {
let s = if let Some(def) = def.as_local() {
- print_const_expr(cx.tcx, cx.tcx.hir().body_owned_by(def.did))
+ print_const_expr(cx.tcx, cx.tcx.hir().body_owned_by(def))
} else {
- inline::print_inlined_const(cx.tcx, def.did)
+ inline::print_inlined_const(cx.tcx, def)
};
s
@@ -594,9 +594,8 @@ pub(super) fn display_macro_source(
def_id: DefId,
vis: ty::Visibility<DefId>,
) -> String {
- let tts: Vec<_> = def.body.tokens.clone().into_trees().collect();
// Extract the spans of all matchers. They represent the "interface" of the macro.
- let matchers = tts.chunks(4).map(|arm| &arm[0]);
+ let matchers = def.body.tokens.chunks(4).map(|arm| &arm[0]);
if def.macro_rules {
format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index 1be4f364e..9f08609a6 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -314,7 +314,6 @@ impl Options {
matches: &getopts::Matches,
args: Vec<String>,
) -> Result<(Options, RenderOptions), i32> {
- let args = &args[1..];
// Check for unstable options.
nightly_options::check_nightly_options(matches, &opts());
@@ -382,7 +381,7 @@ impl Options {
match kind.parse() {
Ok(kind) => emit.push(kind),
Err(()) => {
- diag.err(&format!("unrecognized emission type: {}", kind));
+ diag.err(format!("unrecognized emission type: {}", kind));
return Err(1);
}
}
@@ -491,7 +490,7 @@ impl Options {
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/dataset
//
// The original key values we have are the same as the DOM storage API keys and the
- // command line options, so contain `-`. Our Javascript needs to be able to look
+ // command line options, so contain `-`. Our JavaScript needs to be able to look
// these values up both in `dataset` and in the storage API, so it needs to be able
// to convert the names back and forth. Despite doing this kebab-case to
// StudlyCaps transformation automatically, the JS DOM API does not provide a
@@ -559,28 +558,28 @@ impl Options {
matches.opt_strs("theme").iter().map(|s| (PathBuf::from(&s), s.to_owned()))
{
if !theme_file.is_file() {
- diag.struct_err(&format!("invalid argument: \"{}\"", theme_s))
+ diag.struct_err(format!("invalid argument: \"{}\"", theme_s))
.help("arguments to --theme must be files")
.emit();
return Err(1);
}
if theme_file.extension() != Some(OsStr::new("css")) {
- diag.struct_err(&format!("invalid argument: \"{}\"", theme_s))
+ diag.struct_err(format!("invalid argument: \"{}\"", theme_s))
.help("arguments to --theme must have a .css extension")
.emit();
return Err(1);
}
let (success, ret) = theme::test_theme_against(&theme_file, &paths, &diag);
if !success {
- diag.struct_err(&format!("error loading theme file: \"{}\"", theme_s)).emit();
+ diag.struct_err(format!("error loading theme file: \"{}\"", theme_s)).emit();
return Err(1);
} else if !ret.is_empty() {
- diag.struct_warn(&format!(
+ diag.struct_warn(format!(
"theme file \"{}\" is missing CSS rules from the default theme",
theme_s
))
.warn("the theme may appear incorrect when loaded")
- .help(&format!(
+ .help(format!(
"to see what rules are missing, call `rustdoc --check-theme \"{}\"`",
theme_s
))
@@ -611,7 +610,7 @@ impl Options {
match matches.opt_str("r").as_deref() {
Some("rust") | None => {}
Some(s) => {
- diag.struct_err(&format!("unknown input format: {}", s)).emit();
+ diag.struct_err(format!("unknown input format: {}", s)).emit();
return Err(1);
}
}
@@ -631,7 +630,7 @@ impl Options {
let crate_types = match parse_crate_types_from_list(matches.opt_strs("crate-type")) {
Ok(types) => types,
Err(e) => {
- diag.struct_err(&format!("unknown crate type: {}", e)).emit();
+ diag.struct_err(format!("unknown crate type: {}", e)).emit();
return Err(1);
}
};
@@ -649,7 +648,7 @@ impl Options {
out_fmt
}
Err(e) => {
- diag.struct_err(&e).emit();
+ diag.struct_err(e).emit();
return Err(1);
}
},
@@ -790,7 +789,7 @@ fn check_deprecated_options(matches: &getopts::Matches, diag: &rustc_errors::Han
for &flag in deprecated_flags.iter() {
if matches.opt_present(flag) {
- diag.struct_warn(&format!("the `{}` flag is deprecated", flag))
+ diag.struct_warn(format!("the `{}` flag is deprecated", flag))
.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
@@ -803,7 +802,7 @@ fn check_deprecated_options(matches: &getopts::Matches, diag: &rustc_errors::Han
for &flag in removed_flags.iter() {
if matches.opt_present(flag) {
- let mut err = diag.struct_warn(&format!("the `{}` flag no longer functions", flag));
+ let mut err = diag.struct_warn(format!("the `{}` flag no longer functions", flag));
err.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index b392ba058..e10a62977 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -46,6 +46,7 @@ pub(crate) struct DocContext<'tcx> {
// for expanding type aliases at the HIR level:
/// Table `DefId` of type, lifetime, or const parameter -> substituted type, lifetime, or const
pub(crate) substs: DefIdMap<clean::SubstParam>,
+ pub(crate) current_type_aliases: DefIdMap<usize>,
/// Table synthetic type parameter for `impl Trait` in argument position -> bounds
pub(crate) impl_trait_bounds: FxHashMap<ImplTraitParam, Vec<clean::GenericBound>>,
/// Auto-trait or blanket impls processed so far, as `(self_ty, trait_def_id)`.
@@ -82,13 +83,25 @@ impl<'tcx> DocContext<'tcx> {
/// Call the closure with the given parameters set as
/// the substitutions for a type alias' RHS.
- pub(crate) fn enter_alias<F, R>(&mut self, substs: DefIdMap<clean::SubstParam>, f: F) -> R
+ pub(crate) fn enter_alias<F, R>(
+ &mut self,
+ substs: DefIdMap<clean::SubstParam>,
+ def_id: DefId,
+ f: F,
+ ) -> R
where
F: FnOnce(&mut Self) -> R,
{
let old_substs = mem::replace(&mut self.substs, substs);
+ *self.current_type_aliases.entry(def_id).or_insert(0) += 1;
let r = f(self);
self.substs = old_substs;
+ if let Some(count) = self.current_type_aliases.get_mut(&def_id) {
+ *count -= 1;
+ if *count == 0 {
+ self.current_type_aliases.remove(&def_id);
+ }
+ }
r
}
@@ -258,8 +271,6 @@ pub(crate) fn create_config(
override_queries: Some(|_sess, providers, _external_providers| {
// Most lints will require typechecking, so just don't run them.
providers.lint_mod = |_, _| {};
- // Prevent `rustc_hir_analysis::check_crate` from calling `typeck` on all bodies.
- providers.typeck_item_bodies = |_, _| {};
// hack so that `used_trait_imports` won't try to call typeck
providers.used_trait_imports = |_, _| {
static EMPTY_SET: LazyLock<UnordSet<LocalDefId>> = LazyLock::new(UnordSet::default);
@@ -329,6 +340,7 @@ pub(crate) fn run_global_ctxt(
external_traits: Default::default(),
active_extern_traits: Default::default(),
substs: Default::default(),
+ current_type_aliases: Default::default(),
impl_trait_bounds: Default::default(),
generated_synthetics: Default::default(),
auto_traits,
@@ -355,7 +367,7 @@ pub(crate) fn run_global_ctxt(
let mut krate = tcx.sess.time("clean_crate", || clean::krate(&mut ctxt));
- if krate.module.doc_value().map(|d| d.is_empty()).unwrap_or(true) {
+ if krate.module.doc_value().is_empty() {
let help = format!(
"The following guide may be of use:\n\
{}/rustdoc/how-to-write-documentation.html",
@@ -371,7 +383,7 @@ pub(crate) fn run_global_ctxt(
fn report_deprecated_attr(name: &str, diag: &rustc_errors::Handler, sp: Span) {
let mut msg =
- diag.struct_span_warn(sp, &format!("the `#![doc({})]` attribute is deprecated", name));
+ diag.struct_span_warn(sp, format!("the `#![doc({})]` attribute is deprecated", name));
msg.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs
index daf10e5b8..f6631b66f 100644
--- a/src/librustdoc/doctest.rs
+++ b/src/librustdoc/doctest.rs
@@ -679,6 +679,10 @@ pub(crate) fn make_test(
// parse the source, but only has false positives, not false
// negatives.
if s.contains(crate_name) {
+ // rustdoc implicitly inserts an `extern crate` item for the own crate
+ // which may be unused, so we need to allow the lint.
+ prog.push_str(&format!("#[allow(unused_extern_crates)]\n"));
+
prog.push_str(&format!("extern crate r#{crate_name};\n"));
line_offset += 1;
}
@@ -1059,15 +1063,10 @@ impl Tester for Collector {
Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)),
},
ignore_message: None,
- #[cfg(not(bootstrap))]
source_file: "",
- #[cfg(not(bootstrap))]
start_line: 0,
- #[cfg(not(bootstrap))]
start_col: 0,
- #[cfg(not(bootstrap))]
end_line: 0,
- #[cfg(not(bootstrap))]
end_col: 0,
// compiler failures are test failures
should_panic: test::ShouldPanic::No,
@@ -1238,7 +1237,7 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
// The collapse-docs pass won't combine sugared/raw doc attributes, or included files with
// anything else, this will combine them for us.
let attrs = Attributes::from_ast(ast_attrs);
- if let Some(doc) = attrs.collapsed_doc_value() {
+ if let Some(doc) = attrs.opt_doc_value() {
// Use the outermost invocation, so that doctest names come from where the docs were written.
let span = ast_attrs
.iter()
diff --git a/src/librustdoc/doctest/tests.rs b/src/librustdoc/doctest/tests.rs
index 360d2259e..a30fe28f9 100644
--- a/src/librustdoc/doctest/tests.rs
+++ b/src/librustdoc/doctest/tests.rs
@@ -38,6 +38,7 @@ fn make_test_crate_name() {
let input = "use asdf::qwop;
assert_eq!(2+2, 4);";
let expected = "#![allow(unused)]
+#[allow(unused_extern_crates)]
extern crate r#asdf;
fn main() {
use asdf::qwop;
@@ -128,6 +129,7 @@ fn make_test_opts_attrs() {
let input = "use asdf::qwop;
assert_eq!(2+2, 4);";
let expected = "#![feature(sick_rad)]
+#[allow(unused_extern_crates)]
extern crate r#asdf;
fn main() {
use asdf::qwop;
@@ -141,6 +143,7 @@ assert_eq!(2+2, 4);
opts.attrs.push("feature(hella_dope)".to_string());
let expected = "#![feature(sick_rad)]
#![feature(hella_dope)]
+#[allow(unused_extern_crates)]
extern crate r#asdf;
fn main() {
use asdf::qwop;
@@ -236,6 +239,7 @@ assert_eq!(asdf::foo, 4);";
let expected = "#![allow(unused)]
extern crate hella_qwop;
+#[allow(unused_extern_crates)]
extern crate r#asdf;
fn main() {
assert_eq!(asdf::foo, 4);
diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs
index 37fd909c9..88049c4ca 100644
--- a/src/librustdoc/externalfiles.rs
+++ b/src/librustdoc/externalfiles.rs
@@ -83,14 +83,14 @@ pub(crate) fn load_string<P: AsRef<Path>>(
let contents = match fs::read(file_path) {
Ok(bytes) => bytes,
Err(e) => {
- diag.struct_err(&format!("error reading `{}`: {}", file_path.display(), e)).emit();
+ diag.struct_err(format!("error reading `{}`: {}", file_path.display(), e)).emit();
return Err(LoadStringError::ReadFail);
}
};
match str::from_utf8(&contents) {
Ok(s) => Ok(s.to_string()),
Err(_) => {
- diag.struct_err(&format!("error reading `{}`: not UTF-8", file_path.display())).emit();
+ diag.struct_err(format!("error reading `{}`: not UTF-8", file_path.display())).emit();
Err(LoadStringError::BadUtf8)
}
}
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index c03291820..8aaad8bce 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -147,7 +147,7 @@ impl Cache {
// Cache where all our extern crates are located
// FIXME: this part is specific to HTML so it'd be nice to remove it from the common code
- for &crate_num in cx.tcx.crates(()) {
+ for &crate_num in tcx.crates(()) {
let e = ExternalCrate { crate_num };
let name = e.name(tcx);
@@ -195,7 +195,13 @@ impl Cache {
impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
if item.item_id.is_local() {
- debug!("folding {} \"{:?}\", id {:?}", item.type_(), item.name, item.item_id);
+ let is_stripped = matches!(*item.kind, clean::ItemKind::StrippedItem(..));
+ debug!(
+ "folding {} (stripped: {is_stripped:?}) \"{:?}\", id {:?}",
+ item.type_(),
+ item.name,
+ item.item_id
+ );
}
// If this is a stripped module,
@@ -300,14 +306,13 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache),
ParentStackItem::Type(item_id) => item_id.as_def_id(),
};
- let path = match did.and_then(|did| self.cache.paths.get(&did)) {
+ let path = did
+ .and_then(|did| self.cache.paths.get(&did))
// The current stack not necessarily has correlation
// for where the type was defined. On the other
// hand, `paths` always has the right
// information if present.
- Some((fqp, _)) => Some(&fqp[..fqp.len() - 1]),
- None => None,
- };
+ .map(|(fqp, _)| &fqp[..fqp.len() - 1]);
((did, path), true)
}
}
@@ -322,9 +327,8 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
// which should not be indexed. The crate-item itself is
// inserted later on when serializing the search-index.
if item.item_id.as_def_id().map_or(false, |idx| !idx.is_crate_root()) {
- let desc = item.doc_value().map_or_else(String::new, |x| {
- short_markdown_summary(x.as_str(), &item.link_names(self.cache))
- });
+ let desc =
+ short_markdown_summary(&item.doc_value(), &item.link_names(self.cache));
let ty = item.type_();
if ty != ItemType::StructField
|| u16::from_str_radix(s.as_str(), 10).is_err()
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index 1b445b898..d963d6092 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -167,7 +167,7 @@ pub(crate) fn print_generic_bounds<'a, 'tcx: 'a>(
display_fn(move |f| {
let mut bounds_dup = FxHashSet::default();
- for (i, bound) in bounds.iter().filter(|b| bounds_dup.insert(b.clone())).enumerate() {
+ for (i, bound) in bounds.iter().filter(|b| bounds_dup.insert(*b)).enumerate() {
if i > 0 {
f.write_str(" + ")?;
}
@@ -439,6 +439,7 @@ impl clean::GenericBound {
let modifier_str = match modifier {
hir::TraitBoundModifier::None => "",
hir::TraitBoundModifier::Maybe => "?",
+ hir::TraitBoundModifier::Negative => "!",
// ~const is experimental; do not display those bounds in rustdoc
hir::TraitBoundModifier::MaybeConst => "",
};
@@ -1115,14 +1116,17 @@ fn fmt_type<'cx>(
ref trait_,
should_show_cast,
}) => {
+ // FIXME(inherent_associated_types): Once we support non-ADT self-types (#106719),
+ // we need to surround them with angle brackets in some cases (e.g. `<dyn …>::P`).
+
if f.alternate() {
- if should_show_cast {
+ if let Some(trait_) = trait_ && should_show_cast {
write!(f, "<{:#} as {:#}>::", self_type.print(cx), trait_.print(cx))?
} else {
write!(f, "{:#}::", self_type.print(cx))?
}
} else {
- if should_show_cast {
+ if let Some(trait_) = trait_ && should_show_cast {
write!(f, "&lt;{} as {}&gt;::", self_type.print(cx), trait_.print(cx))?
} else {
write!(f, "{}::", self_type.print(cx))?
@@ -1138,15 +1142,36 @@ fn fmt_type<'cx>(
// the ugliness comes from inlining across crates where
// everything comes in as a fully resolved QPath (hard to
// look at).
- if !f.alternate() && let Ok((url, _, path)) = href(trait_.def_id(), cx) {
- write!(
- f,
- "<a class=\"associatedtype\" href=\"{url}#{shortty}.{name}\" \
- title=\"type {path}::{name}\">{name}</a>",
- shortty = ItemType::AssocType,
- name = assoc.name,
- path = join_with_double_colon(&path),
- )
+ if !f.alternate() {
+ // FIXME(inherent_associated_types): We always link to the very first associated
+ // type (in respect to source order) that bears the given name (`assoc.name`) and that is
+ // affiliated with the computed `DefId`. This is obviously incorrect when we have
+ // multiple impl blocks. Ideally, we would thread the `DefId` of the assoc ty itself
+ // through here and map it to the corresponding HTML ID that was generated by
+ // `render::Context::derive_id` when the impl blocks were rendered.
+ // There is no such mapping unfortunately.
+ // As a hack, we could badly imitate `derive_id` here by keeping *count* when looking
+ // for the assoc ty `DefId` in `tcx.associated_items(self_ty_did).in_definition_order()`
+ // considering privacy, `doc(hidden)`, etc.
+ // I don't feel like that right now :cold_sweat:.
+
+ let parent_href = match trait_ {
+ Some(trait_) => href(trait_.def_id(), cx).ok(),
+ None => self_type.def_id(cx.cache()).and_then(|did| href(did, cx).ok()),
+ };
+
+ if let Some((url, _, path)) = parent_href {
+ write!(
+ f,
+ "<a class=\"associatedtype\" href=\"{url}#{shortty}.{name}\" \
+ title=\"type {path}::{name}\">{name}</a>",
+ shortty = ItemType::AssocType,
+ name = assoc.name,
+ path = join_with_double_colon(&path),
+ )
+ } else {
+ write!(f, "{}", assoc.name)
+ }
} else {
write!(f, "{}", assoc.name)
}?;
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index b61dd5714..c94968b48 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -514,7 +514,7 @@ struct Classifier<'src> {
impl<'src> Classifier<'src> {
/// Takes as argument the source code to HTML-ify, the rust edition to use and the source code
- /// file span which will be used later on by the `span_correspondance_map`.
+ /// file span which will be used later on by the `span_correspondence_map`.
fn new(src: &str, file_span: Span, decoration_info: Option<DecorationInfo>) -> Classifier<'_> {
let tokens = PeekIter::new(TokenIter { src, cursor: Cursor::new(src) });
let decorations = decoration_info.map(Decorations::new);
@@ -649,7 +649,7 @@ impl<'src> Classifier<'src> {
///
/// `before` is the position of the given token in the `source` string and is used as "lo" byte
/// in case we want to try to generate a link for this token using the
- /// `span_correspondance_map`.
+ /// `span_correspondence_map`.
fn advance(
&mut self,
token: TokenKind,
@@ -811,7 +811,9 @@ impl<'src> Classifier<'src> {
| LiteralKind::Str { .. }
| LiteralKind::ByteStr { .. }
| LiteralKind::RawStr { .. }
- | LiteralKind::RawByteStr { .. } => Class::String,
+ | LiteralKind::RawByteStr { .. }
+ | LiteralKind::CStr { .. }
+ | LiteralKind::RawCStr { .. } => Class::String,
// Number literals.
LiteralKind::Float { .. } | LiteralKind::Int { .. } => Class::Number,
},
@@ -895,7 +897,7 @@ fn exit_span(out: &mut impl Write, closing_tag: &str) {
/// flexible.
///
/// Note that if `context` is not `None` and that the given `klass` contains a `Span`, the function
-/// will then try to find this `span` in the `span_correspondance_map`. If found, it'll then
+/// will then try to find this `span` in the `span_correspondence_map`. If found, it'll then
/// generate a link for this element (which corresponds to where its definition is located).
fn string<T: Display>(
out: &mut impl Write,
@@ -916,7 +918,7 @@ fn string<T: Display>(
/// * If `klass` is `Some` but `klass.get_span()` is `None`, it writes the text wrapped in a
/// `<span>` with the provided `klass`.
/// * If `klass` is `Some` and has a [`rustc_span::Span`], it then tries to generate a link (`<a>`
-/// element) by retrieving the link information from the `span_correspondance_map` that was filled
+/// element) by retrieving the link information from the `span_correspondence_map` that was filled
/// in `span_map.rs::collect_spans_and_sources`. If it cannot retrieve the information, then it's
/// the same as the second point (`klass` is `Some` but doesn't have a [`rustc_span::Span`]).
fn string_without_closing_tag<T: Display>(
@@ -963,7 +965,7 @@ fn string_without_closing_tag<T: Display>(
if let Some(href_context) = href_context {
if let Some(href) =
- href_context.context.shared.span_correspondance_map.get(&def_span).and_then(|href| {
+ href_context.context.shared.span_correspondence_map.get(&def_span).and_then(|href| {
let context = href_context.context;
// FIXME: later on, it'd be nice to provide two links (if possible) for all items:
// one to the documentation page and one to the source definition.
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 00aadb8e8..9bb20022c 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -551,7 +551,15 @@ impl<'a, I: Iterator<Item = Event<'a>>> SummaryLine<'a, I> {
}
fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
- matches!(t, Tag::Paragraph | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote)
+ matches!(
+ t,
+ Tag::Paragraph
+ | Tag::Emphasis
+ | Tag::Strong
+ | Tag::Strikethrough
+ | Tag::Link(..)
+ | Tag::BlockQuote
+ )
}
fn is_forbidden_tag(t: &Tag<'_>) -> bool {
@@ -773,7 +781,7 @@ impl<'tcx> ExtraInfo<'tcx> {
ExtraInfo { def_id, sp, tcx }
}
- fn error_invalid_codeblock_attr(&self, msg: &str, help: &str) {
+ fn error_invalid_codeblock_attr(&self, msg: String, help: &str) {
if let Some(def_id) = self.def_id.as_local() {
self.tcx.struct_span_lint_hir(
crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
@@ -948,7 +956,7 @@ impl LangString {
} {
if let Some(extra) = extra {
extra.error_invalid_codeblock_attr(
- &format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
+ format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
help,
);
}
@@ -1229,7 +1237,27 @@ pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> Strin
pub(crate) struct MarkdownLink {
pub kind: LinkType,
pub link: String,
- pub range: Range<usize>,
+ pub range: MarkdownLinkRange,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum MarkdownLinkRange {
+ /// Normally, markdown link warnings point only at the destination.
+ Destination(Range<usize>),
+ /// In some cases, it's not possible to point at the destination.
+ /// Usually, this happens because backslashes `\\` are used.
+ /// When that happens, point at the whole link, and don't provide structured suggestions.
+ WholeLink(Range<usize>),
+}
+
+impl MarkdownLinkRange {
+ /// Extracts the inner range.
+ pub fn inner_range(&self) -> &Range<usize> {
+ match self {
+ MarkdownLinkRange::Destination(range) => range,
+ MarkdownLinkRange::WholeLink(range) => range,
+ }
+ }
}
pub(crate) fn markdown_links<R>(
@@ -1249,9 +1277,9 @@ pub(crate) fn markdown_links<R>(
if md_start <= s_start && s_end <= md_end {
let start = s_start.offset_from(md_start) as usize;
let end = s_end.offset_from(md_start) as usize;
- start..end
+ MarkdownLinkRange::Destination(start..end)
} else {
- fallback
+ MarkdownLinkRange::WholeLink(fallback)
}
};
@@ -1259,6 +1287,7 @@ pub(crate) fn markdown_links<R>(
// For diagnostics, we want to underline the link's definition but `span` will point at
// where the link is used. This is a problem for reference-style links, where the definition
// is separate from the usage.
+
match link {
// `Borrowed` variant means the string (the link's destination) may come directly from
// the markdown text and we can locate the original link destination.
@@ -1267,10 +1296,82 @@ pub(crate) fn markdown_links<R>(
CowStr::Borrowed(s) => locate(s, span),
// For anything else, we can only use the provided range.
- CowStr::Boxed(_) | CowStr::Inlined(_) => span,
+ CowStr::Boxed(_) | CowStr::Inlined(_) => MarkdownLinkRange::WholeLink(span),
}
};
+ let span_for_offset_backward = |span: Range<usize>, open: u8, close: u8| {
+ let mut open_brace = !0;
+ let mut close_brace = !0;
+ for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate().rev() {
+ let i = i + span.start;
+ if b == close {
+ close_brace = i;
+ break;
+ }
+ }
+ if close_brace < span.start || close_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ let mut nesting = 1;
+ for (i, b) in md.as_bytes()[span.start..close_brace].iter().copied().enumerate().rev() {
+ let i = i + span.start;
+ if b == close {
+ nesting += 1;
+ }
+ if b == open {
+ nesting -= 1;
+ }
+ if nesting == 0 {
+ open_brace = i;
+ break;
+ }
+ }
+ assert!(open_brace != close_brace);
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ // do not actually include braces in the span
+ let range = (open_brace + 1)..close_brace;
+ MarkdownLinkRange::Destination(range.clone())
+ };
+
+ let span_for_offset_forward = |span: Range<usize>, open: u8, close: u8| {
+ let mut open_brace = !0;
+ let mut close_brace = !0;
+ for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate() {
+ let i = i + span.start;
+ if b == open {
+ open_brace = i;
+ break;
+ }
+ }
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ let mut nesting = 0;
+ for (i, b) in md.as_bytes()[open_brace..span.end].iter().copied().enumerate() {
+ let i = i + open_brace;
+ if b == close {
+ nesting -= 1;
+ }
+ if b == open {
+ nesting += 1;
+ }
+ if nesting == 0 {
+ close_brace = i;
+ break;
+ }
+ }
+ assert!(open_brace != close_brace);
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ // do not actually include braces in the span
+ let range = (open_brace + 1)..close_brace;
+ MarkdownLinkRange::Destination(range.clone())
+ };
+
Parser::new_with_broken_link_callback(
md,
main_body_opts(),
@@ -1279,11 +1380,20 @@ pub(crate) fn markdown_links<R>(
.into_offset_iter()
.filter_map(|(event, span)| match event {
Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => {
- preprocess_link(MarkdownLink {
- kind: link_type,
- range: span_for_link(&dest, span),
- link: dest.into_string(),
- })
+ let range = match link_type {
+ // Link is pulled from the link itself.
+ LinkType::ReferenceUnknown | LinkType::ShortcutUnknown => {
+ span_for_offset_backward(span, b'[', b']')
+ }
+ LinkType::CollapsedUnknown => span_for_offset_forward(span, b'[', b']'),
+ LinkType::Inline => span_for_offset_backward(span, b'(', b')'),
+ // Link is pulled from elsewhere in the document.
+ LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => {
+ span_for_link(&dest, span)
+ }
+ LinkType::Autolink | LinkType::Email => unreachable!(),
+ };
+ preprocess_link(MarkdownLink { kind: link_type, range, link: dest.into_string() })
}
_ => None,
})
@@ -1392,7 +1502,7 @@ static DEFAULT_ID_MAP: Lazy<FxHashMap<Cow<'static, str>, usize>> = Lazy::new(||
fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
let mut map = FxHashMap::default();
- // This is the list of IDs used in Javascript.
+ // This is the list of IDs used in JavaScript.
map.insert("help".into(), 1);
map.insert("settings".into(), 1);
map.insert("not-displayed".into(), 1);
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
index ac5054ce1..56af257fd 100644
--- a/src/librustdoc/html/render/context.rs
+++ b/src/librustdoc/html/render/context.rs
@@ -122,9 +122,9 @@ pub(crate) struct SharedContext<'tcx> {
/// the crate.
redirections: Option<RefCell<FxHashMap<String, String>>>,
- /// Correspondance map used to link types used in the source code pages to allow to click on
+ /// Correspondence map used to link types used in the source code pages to allow to click on
/// links to jump to the type's definition.
- pub(crate) span_correspondance_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
+ pub(crate) span_correspondence_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
/// The [`Cache`] used during rendering.
pub(crate) cache: Cache,
@@ -184,11 +184,8 @@ impl<'tcx> Context<'tcx> {
};
title.push_str(" - Rust");
let tyname = it.type_();
- let desc = it
- .doc_value()
- .as_ref()
- .map(|doc| plain_text_summary(doc, &it.link_names(&self.cache())));
- let desc = if let Some(desc) = desc {
+ let desc = plain_text_summary(&it.doc_value(), &it.link_names(&self.cache()));
+ let desc = if !desc.is_empty() {
desc
} else if it.is_crate() {
format!("API documentation for the Rust `{}` crate.", self.shared.layout.krate)
@@ -531,7 +528,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
errors: receiver,
redirections: if generate_redirect_map { Some(Default::default()) } else { None },
show_type_layout,
- span_correspondance_map: matches,
+ span_correspondence_map: matches,
cache,
call_locations,
};
@@ -647,7 +644,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
</div>\
<noscript>\
<section>\
- You need to enable Javascript be able to update your settings.\
+ You need to enable JavaScript be able to update your settings.\
</section>\
</noscript>\
<link rel=\"stylesheet\" \
@@ -709,7 +706,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
</div>\
<noscript>\
<section>\
- <p>You need to enable Javascript to use keyboard commands or search.</p>\
+ <p>You need to enable JavaScript to use keyboard commands or search.</p>\
<p>For more information, browse the <a href=\"https://doc.rust-lang.org/rustdoc/\">rustdoc handbook</a>.</p>\
</section>\
</noscript>",
@@ -746,7 +743,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
// Flush pending errors.
Rc::get_mut(&mut self.shared).unwrap().fs.close();
let nb_errors =
- self.shared.errors.iter().map(|err| self.tcx().sess.struct_err(&err).emit()).count();
+ self.shared.errors.iter().map(|err| self.tcx().sess.struct_err(err).emit()).count();
if nb_errors > 0 {
Err(Error::new(io::Error::new(io::ErrorKind::Other, "I/O error"), ""))
} else {
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index 463184aca..9e3b5d10a 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -32,6 +32,7 @@ mod context;
mod print_item;
mod sidebar;
mod span_map;
+mod type_layout;
mod write_shared;
pub(crate) use self::context::*;
@@ -47,7 +48,6 @@ use std::str;
use std::string::ToString;
use askama::Template;
-use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -468,7 +468,8 @@ fn document_short<'a, 'cx: 'a>(
if !show_def_docs {
return Ok(());
}
- if let Some(s) = item.doc_value() {
+ let s = item.doc_value();
+ if !s.is_empty() {
let (mut summary_html, has_more_content) =
MarkdownSummaryLine(&s, &item.links(cx)).into_string_with_has_more_content();
@@ -511,7 +512,7 @@ fn document_full_inner<'a, 'cx: 'a>(
heading_offset: HeadingOffset,
) -> impl fmt::Display + 'a + Captures<'cx> {
display_fn(move |f| {
- if let Some(s) = item.collapsed_doc_value() {
+ if let Some(s) = item.opt_doc_value() {
debug!("Doc block: =====\n{}\n=====", s);
if is_collapsible {
write!(
@@ -848,10 +849,10 @@ fn assoc_method(
let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
header_len += 4;
let indent_str = " ";
- write!(w, "{}", render_attributes_in_pre(meth, indent_str));
+ write!(w, "{}", render_attributes_in_pre(meth, indent_str, tcx));
(4, indent_str, Ending::NoNewline)
} else {
- render_attributes_in_code(w, meth);
+ render_attributes_in_code(w, meth, tcx);
(0, "", Ending::Newline)
};
w.reserve(header_len + "<a href=\"\" class=\"fn\">{".len() + "</a>".len());
@@ -1020,36 +1021,15 @@ fn render_assoc_item(
}
}
-const ALLOWED_ATTRIBUTES: &[Symbol] =
- &[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive];
-
-fn attributes(it: &clean::Item) -> Vec<String> {
- it.attrs
- .other_attrs
- .iter()
- .filter_map(|attr| {
- if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
- Some(
- pprust::attribute_to_string(attr)
- .replace("\\\n", "")
- .replace('\n', "")
- .replace(" ", " "),
- )
- } else {
- None
- }
- })
- .collect()
-}
-
// When an attribute is rendered inside a `<pre>` tag, it is formatted using
// a whitespace prefix and newline.
-fn render_attributes_in_pre<'a>(
+fn render_attributes_in_pre<'a, 'b: 'a>(
it: &'a clean::Item,
prefix: &'a str,
-) -> impl fmt::Display + Captures<'a> {
+ tcx: TyCtxt<'b>,
+) -> impl fmt::Display + Captures<'a> + Captures<'b> {
crate::html::format::display_fn(move |f| {
- for a in attributes(it) {
+ for a in it.attributes(tcx, false) {
writeln!(f, "{}{}", prefix, a)?;
}
Ok(())
@@ -1058,8 +1038,8 @@ fn render_attributes_in_pre<'a>(
// When an attribute is rendered inside a <code> tag, it is formatted using
// a div to produce a newline after it.
-fn render_attributes_in_code(w: &mut Buffer, it: &clean::Item) {
- for a in attributes(it) {
+fn render_attributes_in_code(w: &mut Buffer, it: &clean::Item, tcx: TyCtxt<'_>) {
+ for a in it.attributes(tcx, false) {
write!(w, "<div class=\"code-attribute\">{}</div>", a);
}
}
@@ -1154,10 +1134,10 @@ fn render_assoc_items_inner(
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none());
if !non_trait.is_empty() {
let mut tmp_buf = Buffer::html();
- let (render_mode, id) = match what {
+ let (render_mode, id, class_html) = match what {
AssocItemRender::All => {
write_impl_section_heading(&mut tmp_buf, "Implementations", "implementations");
- (RenderMode::Normal, "implementations-list".to_owned())
+ (RenderMode::Normal, "implementations-list".to_owned(), "")
}
AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
let id =
@@ -1174,7 +1154,11 @@ fn render_assoc_items_inner(
),
&id,
);
- (RenderMode::ForDeref { mut_: deref_mut_ }, cx.derive_id(id))
+ (
+ RenderMode::ForDeref { mut_: deref_mut_ },
+ cx.derive_id(id),
+ r#" class="impl-items""#,
+ )
}
};
let mut impls_buf = Buffer::html();
@@ -1198,7 +1182,7 @@ fn render_assoc_items_inner(
}
if !impls_buf.is_empty() {
write!(w, "{}", tmp_buf.into_inner()).unwrap();
- write!(w, "<div id=\"{}\">", id).unwrap();
+ write!(w, "<div id=\"{id}\"{class_html}>").unwrap();
write!(w, "{}", impls_buf.into_inner()).unwrap();
w.write_str("</div>").unwrap();
}
@@ -1493,7 +1477,7 @@ fn render_impl(
if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
// We need the stability of the item from the trait
// because impls can't have a stability.
- if item.doc_value().is_some() {
+ if !item.doc_value().is_empty() {
document_item_info(cx, it, Some(parent))
.render_into(&mut info_buffer)
.unwrap();
@@ -1764,11 +1748,11 @@ fn render_impl(
write!(w, "</summary>")
}
- if let Some(ref dox) = i.impl_item.collapsed_doc_value() {
+ if let Some(ref dox) = i.impl_item.opt_doc_value() {
if trait_.is_none() && i.inner_impl().items.is_empty() {
w.write_str(
"<div class=\"item-info\">\
- <div class=\"stab empty-impl\">This impl block contains no items.</div>
+ <div class=\"stab empty-impl\">This impl block contains no items.</div>\
</div>",
);
}
@@ -1787,12 +1771,14 @@ fn render_impl(
.into_string()
);
}
+ if !default_impl_items.is_empty() || !impl_items.is_empty() {
+ w.write_str("<div class=\"impl-items\">");
+ close_tags.insert_str(0, "</div>");
+ }
}
if !default_impl_items.is_empty() || !impl_items.is_empty() {
- w.write_str("<div class=\"impl-items\">");
w.push_buffer(default_impl_items);
w.push_buffer(impl_items);
- close_tags.insert_str(0, "</div>");
}
w.write_str(&close_tags);
}
@@ -1947,8 +1933,6 @@ pub(crate) fn small_url_encode(s: String) -> String {
// While the same is not true for hashes, rustdoc only needs to be
// consistent with itself when encoding them.
st += "+";
- } else if b == b'%' {
- st += "%%";
} else {
write!(st, "%{:02X}", b).unwrap();
}
@@ -2217,7 +2201,9 @@ fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
}
clean::Type::QPath(box clean::QPathData { self_type, trait_, .. }) => {
work.push_back(self_type);
- process_path(trait_.def_id());
+ if let Some(trait_) = trait_ {
+ process_path(trait_.def_id());
+ }
}
_ => {}
}
@@ -2271,8 +2257,7 @@ fn render_call_locations<W: fmt::Write>(mut w: W, cx: &mut Context<'_>, item: &c
Ok(contents) => contents,
Err(err) => {
let span = item.span(tcx).map_or(rustc_span::DUMMY_SP, |span| span.inner());
- tcx.sess
- .span_err(span, &format!("failed to read file {}: {}", path.display(), err));
+ tcx.sess.span_err(span, format!("failed to read file {}: {}", path.display(), err));
return false;
}
};
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index 9a968e48b..62027a3fa 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -6,16 +6,16 @@ use rustc_hir as hir;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::DefId;
use rustc_middle::middle::stability;
-use rustc_middle::span_bug;
-use rustc_middle::ty::layout::LayoutError;
-use rustc_middle::ty::{self, Adt, TyCtxt};
+use rustc_middle::ty::{self, TyCtxt};
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Symbol};
-use rustc_target::abi::{LayoutS, Primitive, TagEncoding, Variants};
+use std::borrow::Borrow;
+use std::cell::{RefCell, RefMut};
use std::cmp::Ordering;
use std::fmt;
use std::rc::Rc;
+use super::type_layout::document_type_layout;
use super::{
collect_paths_for_type, document, ensure_trailing_slash, get_filtered_impls_for_reference,
item_ty_to_section, notable_traits_button, notable_traits_json, render_all_impls,
@@ -218,6 +218,53 @@ fn toggle_close(mut w: impl fmt::Write) {
w.write_str("</details>").unwrap();
}
+trait ItemTemplate<'a, 'cx: 'a>: askama::Template + fmt::Display {
+ fn item_and_mut_cx(&self) -> (&'a clean::Item, RefMut<'_, &'a mut Context<'cx>>);
+}
+
+fn item_template_document<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, mut cx) = templ.item_and_mut_cx();
+ let v = document(*cx, item, None, HeadingOffset::H2);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_document_type_layout<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, cx) = templ.item_and_mut_cx();
+ let def_id = item.item_id.expect_def_id();
+ let v = document_type_layout(*cx, def_id);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_render_attributes_in_pre<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, cx) = templ.item_and_mut_cx();
+ let tcx = cx.tcx();
+ let v = render_attributes_in_pre(item, "", tcx);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_render_assoc_items<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, mut cx) = templ.item_and_mut_cx();
+ let def_id = item.item_id.expect_def_id();
+ let v = render_assoc_items(*cx, item, def_id, AssocItemRender::All);
+ write!(f, "{v}")
+ })
+}
+
fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: &[clean::Item]) {
write!(w, "{}", document(cx, item, None, HeadingOffset::H2));
@@ -358,18 +405,18 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
clean::ImportItem(ref import) => {
let stab_tags = if let Some(import_def_id) = import.source.did {
- let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id);
+ let ast_attrs = tcx.get_attrs_unchecked(import_def_id);
let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
// Just need an item with the correct def_id and attrs
let import_item = clean::Item {
item_id: import_def_id.into(),
attrs: import_attrs,
- cfg: ast_attrs.cfg(cx.tcx(), &cx.cache().hidden_cfg),
+ cfg: ast_attrs.cfg(tcx, &cx.cache().hidden_cfg),
..myitem.clone()
};
- let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()).to_string());
+ let stab_tags = Some(extra_info_tags(&import_item, item, tcx).to_string());
stab_tags
} else {
None
@@ -407,8 +454,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
let unsafety_flag = match *myitem.kind {
clean::FunctionItem(_) | clean::ForeignFunctionItem(_)
- if myitem.fn_header(cx.tcx()).unwrap().unsafety
- == hir::Unsafety::Unsafe =>
+ if myitem.fn_header(tcx).unwrap().unsafety == hir::Unsafety::Unsafe =>
{
"<sup title=\"unsafe function\">⚠</sup>"
}
@@ -422,9 +468,9 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
_ => "",
};
- let doc_value = myitem.doc_value().unwrap_or_default();
w.write_str(ITEM_TABLE_ROW_OPEN);
- let docs = MarkdownSummaryLine(&doc_value, &myitem.links(cx)).into_string();
+ let docs =
+ MarkdownSummaryLine(&myitem.doc_value(), &myitem.links(cx)).into_string();
let (docs_before, docs_after) = if docs.is_empty() {
("", "")
} else {
@@ -441,7 +487,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
{docs_before}{docs}{docs_after}",
name = myitem.name.unwrap(),
visibility_emoji = visibility_emoji,
- stab_tags = extra_info_tags(myitem, item, cx.tcx()),
+ stab_tags = extra_info_tags(myitem, item, tcx),
class = myitem.type_(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
@@ -550,7 +596,7 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle
w,
"{attrs}{vis}{constness}{asyncness}{unsafety}{abi}fn \
{name}{generics}{decl}{notable_traits}{where_clause}",
- attrs = render_attributes_in_pre(it, ""),
+ attrs = render_attributes_in_pre(it, "", tcx),
vis = visibility,
constness = constness,
asyncness = asyncness,
@@ -591,7 +637,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
it.name.unwrap(),
t.generics.print(cx),
bounds,
- attrs = render_attributes_in_pre(it, ""),
+ attrs = render_attributes_in_pre(it, "", tcx),
);
if !t.generics.where_predicates.is_empty() {
@@ -888,7 +934,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", "");
for implementor in foreign {
- let provided_methods = implementor.inner_impl().provided_trait_methods(cx.tcx());
+ let provided_methods = implementor.inner_impl().provided_trait_methods(tcx);
let assoc_link =
AssocItemLink::GotoSource(implementor.impl_item.item_id, &provided_methods);
render_impl(
@@ -921,7 +967,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
}
w.write_str("</div>");
- if t.is_auto(cx.tcx()) {
+ if t.is_auto(tcx) {
write_small_section_header(
w,
"synthetic-implementors",
@@ -950,7 +996,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
"<div id=\"implementors-list\"></div>",
);
- if t.is_auto(cx.tcx()) {
+ if t.is_auto(tcx) {
write_small_section_header(
w,
"synthetic-implementors",
@@ -1065,7 +1111,7 @@ fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &
t.generics.print(cx),
print_where_clause(&t.generics, cx, 0, Ending::Newline),
bounds(&t.bounds, true, cx),
- attrs = render_attributes_in_pre(it, ""),
+ attrs = render_attributes_in_pre(it, "", cx.tcx()),
);
});
@@ -1087,7 +1133,7 @@ fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &cl
t.generics.print(cx),
where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
bounds = bounds(&t.bounds, false, cx),
- attrs = render_attributes_in_pre(it, ""),
+ attrs = render_attributes_in_pre(it, "", cx.tcx()),
);
});
@@ -1111,7 +1157,7 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea
t.generics.print(cx),
where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
type_ = t.type_.print(cx),
- attrs = render_attributes_in_pre(it, ""),
+ attrs = render_attributes_in_pre(it, "", cx.tcx()),
);
});
}
@@ -1133,32 +1179,18 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
#[derive(Template)]
#[template(path = "item_union.html")]
struct ItemUnion<'a, 'cx> {
- cx: std::cell::RefCell<&'a mut Context<'cx>>,
+ cx: RefCell<&'a mut Context<'cx>>,
it: &'a clean::Item,
s: &'a clean::Union,
}
- impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
- fn render_assoc_items<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let def_id = self.it.item_id.expect_def_id();
- let mut cx = self.cx.borrow_mut();
- let v = render_assoc_items(*cx, self.it, def_id, AssocItemRender::All);
- write!(f, "{v}")
- })
- }
- fn document_type_layout<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let def_id = self.it.item_id.expect_def_id();
- let cx = self.cx.borrow_mut();
- let v = document_type_layout(*cx, def_id);
- write!(f, "{v}")
- })
+ impl<'a, 'cx: 'a> ItemTemplate<'a, 'cx> for ItemUnion<'a, 'cx> {
+ fn item_and_mut_cx(&self) -> (&'a clean::Item, RefMut<'_, &'a mut Context<'cx>>) {
+ (self.it, self.cx.borrow_mut())
}
+ }
+
+ impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
fn render_union<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
display_fn(move |f| {
let cx = self.cx.borrow_mut();
@@ -1166,21 +1198,6 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
write!(f, "{v}")
})
}
- fn render_attributes_in_pre<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let v = render_attributes_in_pre(self.it, "");
- write!(f, "{v}")
- })
- }
- fn document<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let mut cx = self.cx.borrow_mut();
- let v = document(*cx, self.it, None, HeadingOffset::H2);
- write!(f, "{v}")
- })
- }
fn document_field<'b>(
&'b self,
field: &'a clean::Item,
@@ -1220,7 +1237,7 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
}
}
- ItemUnion { cx: std::cell::RefCell::new(cx), it, s }.render_into(w).unwrap();
+ ItemUnion { cx: RefCell::new(cx), it, s }.render_into(w).unwrap();
}
fn print_tuple_struct_fields<'a, 'cx: 'a>(
@@ -1246,13 +1263,13 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
let tcx = cx.tcx();
let count_variants = e.variants().count();
wrap_item(w, |mut w| {
+ render_attributes_in_code(w, it, tcx);
write!(
w,
- "{attrs}{}enum {}{}",
+ "{}enum {}{}",
visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
it.name.unwrap(),
e.generics.print(cx),
- attrs = render_attributes_in_pre(it, ""),
);
if !print_where_clause_and_check(w, &e.generics, cx) {
// If there wasn't a `where` clause, we add a whitespace.
@@ -1339,7 +1356,7 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
clean::VariantKind::Tuple(fields) => {
// Documentation on tuple variant fields is rare, so to reduce noise we only emit
// the section if at least one field is documented.
- if fields.iter().any(|f| f.doc_value().is_some()) {
+ if fields.iter().any(|f| !f.doc_value().is_empty()) {
Some(("Tuple Fields", fields))
} else {
None
@@ -1447,7 +1464,7 @@ fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) {
wrap_item(w, |w| {
let tcx = cx.tcx();
- render_attributes_in_code(w, it);
+ render_attributes_in_code(w, it, tcx);
write!(
w,
@@ -1494,7 +1511,7 @@ fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &cle
fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) {
wrap_item(w, |w| {
- render_attributes_in_code(w, it);
+ render_attributes_in_code(w, it, cx.tcx());
render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx);
});
@@ -1542,11 +1559,12 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
write!(w, "{}", document_type_layout(cx, def_id));
}
-fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
- wrap_item(w, |w| {
- render_attributes_in_code(w, it);
+fn item_static(w: &mut impl fmt::Write, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
+ let mut buffer = Buffer::new();
+ wrap_item(&mut buffer, |buffer| {
+ render_attributes_in_code(buffer, it, cx.tcx());
write!(
- w,
+ buffer,
"{vis}static {mutability}{name}: {typ}",
vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
mutability = s.mutability.print_with_space(),
@@ -1554,13 +1572,16 @@ fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
typ = s.type_.print(cx)
);
});
- write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
+
+ write!(w, "{}", buffer.into_inner()).unwrap();
+
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2)).unwrap();
}
fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
wrap_item(w, |w| {
w.write_str("extern {\n");
- render_attributes_in_code(w, it);
+ render_attributes_in_code(w, it, cx.tcx());
write!(
w,
" {}type {};\n}}",
@@ -1933,118 +1954,6 @@ fn document_non_exhaustive<'a>(item: &'a clean::Item) -> impl fmt::Display + 'a
})
}
-fn document_type_layout<'a, 'cx: 'a>(
- cx: &'a Context<'cx>,
- ty_def_id: DefId,
-) -> impl fmt::Display + 'a + Captures<'cx> {
- fn write_size_of_layout(mut w: impl fmt::Write, layout: &LayoutS, tag_size: u64) {
- if layout.abi.is_unsized() {
- write!(w, "(unsized)").unwrap();
- } else {
- let size = layout.size.bytes() - tag_size;
- write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" }).unwrap();
- if layout.abi.is_uninhabited() {
- write!(
- w,
- " (<a href=\"https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited\">uninhabited</a>)"
- ).unwrap();
- }
- }
- }
-
- display_fn(move |mut f| {
- if !cx.shared.show_type_layout {
- return Ok(());
- }
-
- writeln!(
- f,
- "<h2 id=\"layout\" class=\"small-section-header\"> \
- Layout<a href=\"#layout\" class=\"anchor\">§</a></h2>"
- )?;
- writeln!(f, "<div class=\"docblock\">")?;
-
- let tcx = cx.tcx();
- let param_env = tcx.param_env(ty_def_id);
- let ty = tcx.type_of(ty_def_id).subst_identity();
- match tcx.layout_of(param_env.and(ty)) {
- Ok(ty_layout) => {
- writeln!(
- f,
- "<div class=\"warning\"><p><strong>Note:</strong> Most layout information is \
- <strong>completely unstable</strong> and may even differ between compilations. \
- The only exception is types with certain <code>repr(...)</code> attributes. \
- Please see the Rust Reference’s \
- <a href=\"https://doc.rust-lang.org/reference/type-layout.html\">“Type Layout”</a> \
- chapter for details on type layout guarantees.</p></div>"
- )?;
- f.write_str("<p><strong>Size:</strong> ")?;
- write_size_of_layout(&mut f, &ty_layout.layout.0, 0);
- writeln!(f, "</p>")?;
- if let Variants::Multiple { variants, tag, tag_encoding, .. } =
- &ty_layout.layout.variants()
- {
- if !variants.is_empty() {
- f.write_str(
- "<p><strong>Size for each variant:</strong></p>\
- <ul>",
- )?;
-
- let Adt(adt, _) = ty_layout.ty.kind() else {
- span_bug!(tcx.def_span(ty_def_id), "not an adt")
- };
-
- let tag_size = if let TagEncoding::Niche { .. } = tag_encoding {
- 0
- } else if let Primitive::Int(i, _) = tag.primitive() {
- i.size().bytes()
- } else {
- span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
- };
-
- for (index, layout) in variants.iter_enumerated() {
- let name = adt.variant(index).name;
- write!(&mut f, "<li><code>{name}</code>: ")?;
- write_size_of_layout(&mut f, layout, tag_size);
- writeln!(&mut f, "</li>")?;
- }
- f.write_str("</ul>")?;
- }
- }
- }
- // This kind of layout error can occur with valid code, e.g. if you try to
- // get the layout of a generic type such as `Vec<T>`.
- Err(LayoutError::Unknown(_)) => {
- writeln!(
- f,
- "<p><strong>Note:</strong> Unable to compute type layout, \
- possibly due to this type having generic parameters. \
- Layout can only be computed for concrete, fully-instantiated types.</p>"
- )?;
- }
- // This kind of error probably can't happen with valid code, but we don't
- // want to panic and prevent the docs from building, so we just let the
- // user know that we couldn't compute the layout.
- Err(LayoutError::SizeOverflow(_)) => {
- writeln!(
- f,
- "<p><strong>Note:</strong> Encountered an error during type layout; \
- the type was too big.</p>"
- )?;
- }
- Err(LayoutError::NormalizationFailure(_, _)) => {
- writeln!(
- f,
- "<p><strong>Note:</strong> Encountered an error during type layout; \
- the type failed to be normalized.</p>"
- )?;
- }
- }
-
- writeln!(f, "</div>")
- })
-}
-
fn pluralize(count: usize) -> &'static str {
if count > 1 { "s" } else { "" }
}
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
index f5b4a3f5a..846299f02 100644
--- a/src/librustdoc/html/render/search_index.rs
+++ b/src/librustdoc/html/render/search_index.rs
@@ -28,9 +28,7 @@ pub(crate) fn build_index<'tcx>(
// has since been learned.
for &OrphanImplItem { parent, ref item, ref impl_generics } in &cache.orphan_impl_items {
if let Some((fqp, _)) = cache.paths.get(&parent) {
- let desc = item
- .doc_value()
- .map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(cache)));
+ let desc = short_markdown_summary(&item.doc_value(), &item.link_names(cache));
cache.search_index.push(IndexItem {
ty: item.type_(),
name: item.name.unwrap(),
@@ -45,10 +43,8 @@ pub(crate) fn build_index<'tcx>(
}
}
- let crate_doc = krate
- .module
- .doc_value()
- .map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(cache)));
+ let crate_doc =
+ short_markdown_summary(&krate.module.doc_value(), &krate.module.link_names(cache));
// Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias,
// we need the alias element to have an array of items.
@@ -391,12 +387,14 @@ fn get_index_type_id(clean_type: &clean::Type) -> Option<RenderTypeId> {
clean::BorrowedRef { ref type_, .. } | clean::RawPointer(_, ref type_) => {
get_index_type_id(type_)
}
+ // The type parameters are converted to generics in `add_generics_and_bounds_as_types`
+ clean::Slice(_) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Slice)),
+ clean::Array(_, _) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Array)),
+ // Not supported yet
clean::BareFunction(_)
| clean::Generic(_)
| clean::ImplTrait(_)
| clean::Tuple(_)
- | clean::Slice(_)
- | clean::Array(_, _)
| clean::QPath { .. }
| clean::Infer => None,
}
@@ -563,6 +561,30 @@ fn add_generics_and_bounds_as_types<'tcx, 'a>(
}
}
insert_ty(res, arg.clone(), ty_generics);
+ } else if let Type::Slice(ref ty) = *arg {
+ let mut ty_generics = Vec::new();
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
+ insert_ty(res, arg.clone(), ty_generics);
+ } else if let Type::Array(ref ty, _) = *arg {
+ let mut ty_generics = Vec::new();
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
+ insert_ty(res, arg.clone(), ty_generics);
} else {
// This is not a type parameter. So for example if we have `T, U: Option<T>`, and we're
// looking at `Option`, we enter this "else" condition, otherwise if it's `T`, we don't.
diff --git a/src/librustdoc/html/render/type_layout.rs b/src/librustdoc/html/render/type_layout.rs
new file mode 100644
index 000000000..c9b95b1e6
--- /dev/null
+++ b/src/librustdoc/html/render/type_layout.rs
@@ -0,0 +1,86 @@
+use askama::Template;
+
+use rustc_data_structures::captures::Captures;
+use rustc_hir::def_id::DefId;
+use rustc_middle::span_bug;
+use rustc_middle::ty::layout::LayoutError;
+use rustc_middle::ty::Adt;
+use rustc_span::symbol::Symbol;
+use rustc_target::abi::{Primitive, TagEncoding, Variants};
+
+use std::fmt;
+
+use crate::html::format::display_fn;
+use crate::html::render::Context;
+
+#[derive(Template)]
+#[template(path = "type_layout.html")]
+struct TypeLayout<'cx> {
+ variants: Vec<(Symbol, TypeLayoutSize)>,
+ type_layout_size: Result<TypeLayoutSize, LayoutError<'cx>>,
+}
+
+#[derive(Template)]
+#[template(path = "type_layout_size.html")]
+struct TypeLayoutSize {
+ is_unsized: bool,
+ is_uninhabited: bool,
+ size: u64,
+}
+
+pub(crate) fn document_type_layout<'a, 'cx: 'a>(
+ cx: &'a Context<'cx>,
+ ty_def_id: DefId,
+) -> impl fmt::Display + 'a + Captures<'cx> {
+ display_fn(move |f| {
+ if !cx.shared.show_type_layout {
+ return Ok(());
+ }
+
+ let tcx = cx.tcx();
+ let param_env = tcx.param_env(ty_def_id);
+ let ty = tcx.type_of(ty_def_id).subst_identity();
+ let type_layout = tcx.layout_of(param_env.and(ty));
+
+ let variants =
+ if let Ok(type_layout) = type_layout &&
+ let Variants::Multiple { variants, tag, tag_encoding, .. } =
+ type_layout.layout.variants() &&
+ !variants.is_empty()
+ {
+ let tag_size =
+ if let TagEncoding::Niche { .. } = tag_encoding {
+ 0
+ } else if let Primitive::Int(i, _) = tag.primitive() {
+ i.size().bytes()
+ } else {
+ span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
+ };
+ variants
+ .iter_enumerated()
+ .map(|(variant_idx, variant_layout)| {
+ let Adt(adt, _) = type_layout.ty.kind() else {
+ span_bug!(tcx.def_span(ty_def_id), "not an adt")
+ };
+ let name = adt.variant(variant_idx).name;
+ let is_unsized = variant_layout.abi.is_unsized();
+ let is_uninhabited = variant_layout.abi.is_uninhabited();
+ let size = variant_layout.size.bytes() - tag_size;
+ let type_layout_size = TypeLayoutSize { is_unsized, is_uninhabited, size };
+ (name, type_layout_size)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+
+ let type_layout_size = tcx.layout_of(param_env.and(ty)).map(|layout| {
+ let is_unsized = layout.abi.is_unsized();
+ let is_uninhabited = layout.abi.is_uninhabited();
+ let size = layout.size.bytes();
+ TypeLayoutSize { is_unsized, is_uninhabited, size }
+ });
+
+ Ok(TypeLayout { variants, type_layout_size }.render_into(f).unwrap())
+ })
+}
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
index c8397967c..a26fa3749 100644
--- a/src/librustdoc/html/sources.rs
+++ b/src/librustdoc/html/sources.rs
@@ -145,7 +145,7 @@ impl DocVisitor for SourceCollector<'_, '_> {
Err(e) => {
self.cx.shared.tcx.sess.span_err(
span,
- &format!(
+ format!(
"failed to render source code for `{}`: {}",
filename.prefer_local(),
e,
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
index 6fbb45086..a7d5f4977 100644
--- a/src/librustdoc/html/static/css/rustdoc.css
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -1259,6 +1259,10 @@ a.tooltip:hover::after {
background-color: var(--search-error-code-background-color);
}
+.search-corrections {
+ font-weight: normal;
+}
+
#src-sidebar-toggle {
position: sticky;
top: 0;
diff --git a/src/librustdoc/html/static/css/settings.css b/src/librustdoc/html/static/css/settings.css
index d13c783d2..c1324c076 100644
--- a/src/librustdoc/html/static/css/settings.css
+++ b/src/librustdoc/html/static/css/settings.css
@@ -1,13 +1,11 @@
.setting-line {
margin: 1.2em 0.6em;
- position: relative;
}
.setting-radio input, .setting-check input {
margin-right: 0.3em;
height: 1.2rem;
width: 1.2rem;
- color: inherit;
border: 2px solid var(--settings-input-border-color);
outline: none;
-webkit-appearance: none;
@@ -16,11 +14,6 @@
.setting-radio input {
border-radius: 50%;
}
-.setting-check input:checked {
- content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 40 40">\
- <path d="M7,25L17,32L33,12" fill="none" stroke="black" stroke-width="5"/>\
- <path d="M7,23L17,30L33,10" fill="none" stroke="white" stroke-width="5"/></svg>');
-}
.setting-radio span, .setting-check span {
padding-bottom: 1px;
@@ -53,6 +46,9 @@
.setting-check input:checked {
background-color: var(--settings-input-color);
border-width: 1px;
+ content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 40 40">\
+ <path d="M7,25L17,32L33,12" fill="none" stroke="black" stroke-width="5"/>\
+ <path d="M7,23L17,30L33,10" fill="none" stroke="white" stroke-width="5"/></svg>');
}
.setting-radio input:focus, .setting-check input:focus {
box-shadow: 0 0 1px 1px var(--settings-input-color);
diff --git a/src/librustdoc/html/static/js/externs.js b/src/librustdoc/html/static/js/externs.js
index 4c81a0979..8b931f74e 100644
--- a/src/librustdoc/html/static/js/externs.js
+++ b/src/librustdoc/html/static/js/externs.js
@@ -9,6 +9,7 @@ function initSearch(searchIndex){}
/**
* @typedef {{
* name: string,
+ * id: integer,
* fullPath: Array<string>,
* pathWithoutLast: Array<string>,
* pathLast: string,
@@ -36,6 +37,8 @@ let ParserState;
* args: Array<QueryElement>,
* returned: Array<QueryElement>,
* foundElems: number,
+ * literalSearch: boolean,
+ * corrections: Array<{from: string, to: integer}>,
* }}
*/
let ParsedQuery;
@@ -139,7 +142,7 @@ let FunctionSearchType;
/**
* @typedef {{
- * name: (null|string),
+ * id: (null|number),
* ty: (null|number),
* generics: Array<FunctionType>,
* }}
diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
index 6f5987e68..bccf675c1 100644
--- a/src/librustdoc/html/static/js/main.js
+++ b/src/librustdoc/html/static/js/main.js
@@ -275,8 +275,7 @@ function preLoadCss(cssUrl) {
document.title = searchState.titleBeforeSearch;
// We also remove the query parameter from the URL.
if (browserSupportsHistoryApi()) {
- history.replaceState(null, window.currentCrate + " - Rust",
- getNakedUrl() + window.location.hash);
+ history.replaceState(null, "", getNakedUrl() + window.location.hash);
}
},
getQueryStringParams: () => {
@@ -376,11 +375,7 @@ function preLoadCss(cssUrl) {
function handleEscape(ev) {
searchState.clearInputTimeout();
- switchDisplayedElement(null);
- if (browserSupportsHistoryApi()) {
- history.replaceState(null, window.currentCrate + " - Rust",
- getNakedUrl() + window.location.hash);
- }
+ searchState.hideResults();
ev.preventDefault();
searchState.defocus();
window.hideAllModals(true); // true = reset focus for tooltips
@@ -535,9 +530,11 @@ function preLoadCss(cssUrl) {
// ignored are included in the attribute `data-ignore-extern-crates`.
const script = document
.querySelector("script[data-ignore-extern-crates]");
- const ignoreExternCrates = script ? script.getAttribute("data-ignore-extern-crates") : "";
+ const ignoreExternCrates = new Set(
+ (script ? script.getAttribute("data-ignore-extern-crates") : "").split(",")
+ );
for (const lib of libs) {
- if (lib === window.currentCrate || ignoreExternCrates.indexOf(lib) !== -1) {
+ if (lib === window.currentCrate || ignoreExternCrates.has(lib)) {
continue;
}
const structs = imp[lib];
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
index 929dae81c..62afe40bb 100644
--- a/src/librustdoc/html/static/js/search.js
+++ b/src/librustdoc/html/static/js/search.js
@@ -58,6 +58,7 @@ function printTab(nb) {
}
iter += 1;
});
+ const isTypeSearch = (nb > 0 || iter === 1);
iter = 0;
onEachLazy(document.getElementById("results").childNodes, elem => {
if (nb === iter) {
@@ -70,6 +71,13 @@ function printTab(nb) {
});
if (foundCurrentTab && foundCurrentResultSet) {
searchState.currentTab = nb;
+ // Corrections only kick in on type-based searches.
+ const correctionsElem = document.getElementsByClassName("search-corrections");
+ if (isTypeSearch) {
+ removeClass(correctionsElem[0], "hidden");
+ } else {
+ addClass(correctionsElem[0], "hidden");
+ }
} else if (nb !== 0) {
printTab(0);
}
@@ -191,6 +199,13 @@ function initSearch(rawSearchIndex) {
*/
let searchIndex;
let currentResults;
+ /**
+ * Map from normalized type names to integers. Used to make type search
+ * more efficient.
+ *
+ * @type {Map<string, integer>}
+ */
+ let typeNameIdMap;
const ALIASES = new Map();
function isWhitespace(c) {
@@ -358,6 +373,7 @@ function initSearch(rawSearchIndex) {
parserState.typeFilter = null;
return {
name: name,
+ id: -1,
fullPath: pathSegments,
pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
pathLast: pathSegments[pathSegments.length - 1],
@@ -718,6 +734,7 @@ function initSearch(rawSearchIndex) {
foundElems: 0,
literalSearch: false,
error: null,
+ correction: null,
};
}
@@ -873,7 +890,7 @@ function initSearch(rawSearchIndex) {
*
* @param {Array<Result>} results_in_args
* @param {Array<Result>} results_returned
- * @param {Array<Result>} results_in_args
+ * @param {Array<Result>} results_others
* @param {ParsedQuery} parsedQuery
*
* @return {ResultsTable}
@@ -1091,48 +1108,50 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row - The object to check.
* @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} defaultDistance - This is the value to return in case there are no
- * generics.
*
- * @return {integer} - Returns the best match (if any) or `maxEditDistance + 1`.
+ * @return {boolean} - Returns true if a match, false otherwise.
*/
- function checkGenerics(row, elem, defaultDistance, maxEditDistance) {
- if (row.generics.length === 0) {
- return elem.generics.length === 0 ? defaultDistance : maxEditDistance + 1;
- } else if (row.generics.length > 0 && row.generics[0].name === null) {
- return checkGenerics(row.generics[0], elem, defaultDistance, maxEditDistance);
- }
- // The names match, but we need to be sure that all generics kinda
- // match as well.
+ function checkGenerics(row, elem) {
+ if (row.generics.length === 0 || elem.generics.length === 0) {
+ return false;
+ }
+ // This function is called if the names match, but we need to make
+ // sure that all generics match as well.
+ //
+ // This search engine implements order-agnostic unification. There
+ // should be no missing duplicates (generics have "bag semantics"),
+ // and the row is allowed to have extras.
if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) {
const elems = new Map();
- for (const entry of row.generics) {
- if (entry.name === "") {
+ const addEntryToElems = function addEntryToElems(entry) {
+ if (entry.id === -1) {
// Pure generic, needs to check into it.
- if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance)
- !== 0) {
- return maxEditDistance + 1;
+ for (const inner_entry of entry.generics) {
+ addEntryToElems(inner_entry);
}
- continue;
+ return;
}
let currentEntryElems;
- if (elems.has(entry.name)) {
- currentEntryElems = elems.get(entry.name);
+ if (elems.has(entry.id)) {
+ currentEntryElems = elems.get(entry.id);
} else {
currentEntryElems = [];
- elems.set(entry.name, currentEntryElems);
+ elems.set(entry.id, currentEntryElems);
}
currentEntryElems.push(entry);
+ };
+ for (const entry of row.generics) {
+ addEntryToElems(entry);
}
// We need to find the type that matches the most to remove it in order
// to move forward.
const handleGeneric = generic => {
- if (!elems.has(generic.name)) {
+ if (!elems.has(generic.id)) {
return false;
}
- const matchElems = elems.get(generic.name);
+ const matchElems = elems.get(generic.id);
const matchIdx = matchElems.findIndex(tmp_elem => {
- if (checkGenerics(tmp_elem, generic, 0, maxEditDistance) !== 0) {
+ if (generic.generics.length > 0 && !checkGenerics(tmp_elem, generic)) {
return false;
}
return typePassesFilter(generic.typeFilter, tmp_elem.ty);
@@ -1142,7 +1161,7 @@ function initSearch(rawSearchIndex) {
}
matchElems.splice(matchIdx, 1);
if (matchElems.length === 0) {
- elems.delete(generic.name);
+ elems.delete(generic.id);
}
return true;
};
@@ -1152,17 +1171,17 @@ function initSearch(rawSearchIndex) {
// own type.
for (const generic of elem.generics) {
if (generic.typeFilter !== -1 && !handleGeneric(generic)) {
- return maxEditDistance + 1;
+ return false;
}
}
for (const generic of elem.generics) {
if (generic.typeFilter === -1 && !handleGeneric(generic)) {
- return maxEditDistance + 1;
+ return false;
}
}
- return 0;
+ return true;
}
- return maxEditDistance + 1;
+ return false;
}
/**
@@ -1172,17 +1191,15 @@ function initSearch(rawSearchIndex) {
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
*
- * @return {integer} - Returns an edit distance to the best match.
+ * @return {boolean} - Returns true if found, false otherwise.
*/
- function checkIfInGenerics(row, elem, maxEditDistance) {
- let dist = maxEditDistance + 1;
+ function checkIfInGenerics(row, elem) {
for (const entry of row.generics) {
- dist = Math.min(checkType(entry, elem, true, maxEditDistance), dist);
- if (dist === 0) {
- break;
+ if (checkType(entry, elem)) {
+ return true;
}
}
- return dist;
+ return false;
}
/**
@@ -1191,75 +1208,26 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
- * @param {boolean} literalSearch
*
- * @return {integer} - Returns an edit distance to the best match. If there is
- * no match, returns `maxEditDistance + 1`.
+ * @return {boolean} - Returns true if the type matches, false otherwise.
*/
- function checkType(row, elem, literalSearch, maxEditDistance) {
- if (row.name === null) {
+ function checkType(row, elem) {
+ if (row.id === -1) {
// This is a pure "generic" search, no need to run other checks.
- if (row.generics.length > 0) {
- return checkIfInGenerics(row, elem, maxEditDistance);
- }
- return maxEditDistance + 1;
+ return row.generics.length > 0 ? checkIfInGenerics(row, elem) : false;
}
- let dist;
- if (typePassesFilter(elem.typeFilter, row.ty)) {
- dist = editDistance(row.name, elem.name, maxEditDistance);
- } else {
- dist = maxEditDistance + 1;
- }
- if (literalSearch) {
- if (dist !== 0) {
- // The name didn't match, let's try to check if the generics do.
- if (elem.generics.length === 0) {
- const checkGeneric = row.generics.length > 0;
- if (checkGeneric && row.generics
- .findIndex(tmp_elem => tmp_elem.name === elem.name &&
- typePassesFilter(elem.typeFilter, tmp_elem.ty)) !== -1) {
- return 0;
- }
- }
- return maxEditDistance + 1;
- } else if (elem.generics.length > 0) {
- return checkGenerics(row, elem, maxEditDistance + 1, maxEditDistance);
- }
- return 0;
- } else if (row.generics.length > 0) {
- if (elem.generics.length === 0) {
- if (dist === 0) {
- return 0;
- }
- // The name didn't match so we now check if the type we're looking for is inside
- // the generics!
- dist = Math.min(dist, checkIfInGenerics(row, elem, maxEditDistance));
- return dist;
- } else if (dist > maxEditDistance) {
- // So our item's name doesn't match at all and has generics.
- //
- // Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
- // looking for "B<C>", we'll need to go down.
- return checkIfInGenerics(row, elem, maxEditDistance);
- } else {
- // At this point, the name kinda match and we have generics to check, so
- // let's go!
- const tmp_dist = checkGenerics(row, elem, dist, maxEditDistance);
- if (tmp_dist > maxEditDistance) {
- return maxEditDistance + 1;
- }
- // We compute the median value of both checks and return it.
- return (tmp_dist + dist) / 2;
+ if (row.id === elem.id && typePassesFilter(elem.typeFilter, row.ty)) {
+ if (elem.generics.length > 0) {
+ return checkGenerics(row, elem);
}
- } else if (elem.generics.length > 0) {
- // In this case, we were expecting generics but there isn't so we simply reject this
- // one.
- return maxEditDistance + 1;
+ return true;
}
- // No generics on our query or on the target type so we can return without doing
- // anything else.
- return dist;
+
+ // If the current item does not match, try [unboxing] the generic.
+ // [unboxing]:
+ // https://ndmitchell.com/downloads/slides-hoogle_fast_type_searching-09_aug_2008.pdf
+ return checkIfInGenerics(row, elem);
}
/**
@@ -1267,17 +1235,11 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
- * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
- * If there is no match, returns
- * `maxEditDistance + 1` and position: -1.
+ * @return {integer} - Returns the position of the match, or -1 if none.
*/
- function findArg(row, elem, maxEditDistance, skipPositions) {
- let dist = maxEditDistance + 1;
- let position = -1;
-
+ function findArg(row, elem, skipPositions) {
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
let i = 0;
for (const input of row.type.inputs) {
@@ -1285,24 +1247,13 @@ function initSearch(rawSearchIndex) {
i += 1;
continue;
}
- const typeDist = checkType(
- input,
- elem,
- parsedQuery.literalSearch,
- maxEditDistance
- );
- if (typeDist === 0) {
- return {dist: 0, position: i};
- }
- if (typeDist < dist) {
- dist = typeDist;
- position = i;
+ if (checkType(input, elem)) {
+ return i;
}
i += 1;
}
}
- dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
- return {dist, position};
+ return -1;
}
/**
@@ -1310,43 +1261,25 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
- * @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
- * @return {dist: integer, position: integer} - Returns an edit distance to the best match.
- * If there is no match, returns
- * `maxEditDistance + 1` and position: -1.
+ * @return {integer} - Returns the position of the matching item, or -1 if none.
*/
- function checkReturned(row, elem, maxEditDistance, skipPositions) {
- let dist = maxEditDistance + 1;
- let position = -1;
-
+ function checkReturned(row, elem, skipPositions) {
if (row && row.type && row.type.output.length > 0) {
- const ret = row.type.output;
let i = 0;
- for (const ret_ty of ret) {
+ for (const ret_ty of row.type.output) {
if (skipPositions.indexOf(i) !== -1) {
i += 1;
continue;
}
- const typeDist = checkType(
- ret_ty,
- elem,
- parsedQuery.literalSearch,
- maxEditDistance
- );
- if (typeDist === 0) {
- return {dist: 0, position: i};
- }
- if (typeDist < dist) {
- dist = typeDist;
- position = i;
+ if (checkType(ret_ty, elem)) {
+ return i;
}
i += 1;
}
}
- dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
- return {dist, position};
+ return -1;
}
function checkPath(contains, ty, maxEditDistance) {
@@ -1543,17 +1476,20 @@ function initSearch(rawSearchIndex) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
- let dist, index = -1, path_dist = 0;
+ let index = -1, path_dist = 0;
const fullId = row.id;
const searchWord = searchWords[pos];
- const in_args = findArg(row, elem, maxEditDistance, []);
- const returned = checkReturned(row, elem, maxEditDistance, []);
-
- // path_dist is 0 because no parent path information is currently stored
- // in the search index
- addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance);
- addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance);
+ const in_args = findArg(row, elem, []);
+ if (in_args !== -1) {
+ // path_dist is 0 because no parent path information is currently stored
+ // in the search index
+ addIntoResults(results_in_args, fullId, pos, -1, 0, 0, maxEditDistance);
+ }
+ const returned = checkReturned(row, elem, []);
+ if (returned !== -1) {
+ addIntoResults(results_returned, fullId, pos, -1, 0, 0, maxEditDistance);
+ }
if (!typePassesFilter(elem.typeFilter, row.ty)) {
return;
@@ -1574,16 +1510,6 @@ function initSearch(rawSearchIndex) {
index = row_index;
}
- // No need to check anything else if it's a "pure" generics search.
- if (elem.name.length === 0) {
- if (row.type !== null) {
- dist = checkGenerics(row.type, elem, maxEditDistance + 1, maxEditDistance);
- // path_dist is 0 because we know it's empty
- addIntoResults(results_others, fullId, pos, index, dist, 0, maxEditDistance);
- }
- return;
- }
-
if (elem.fullPath.length > 1) {
path_dist = checkPath(elem.pathWithoutLast, row, maxEditDistance);
if (path_dist > maxEditDistance) {
@@ -1598,7 +1524,7 @@ function initSearch(rawSearchIndex) {
return;
}
- dist = editDistance(searchWord, elem.pathLast, maxEditDistance);
+ const dist = editDistance(searchWord, elem.pathLast, maxEditDistance);
if (index === -1 && dist + path_dist > maxEditDistance) {
return;
@@ -1616,28 +1542,22 @@ function initSearch(rawSearchIndex) {
* @param {integer} pos - Position in the `searchIndex`.
* @param {Object} results
*/
- function handleArgs(row, pos, results, maxEditDistance) {
+ function handleArgs(row, pos, results) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
- let totalDist = 0;
- let nbDist = 0;
-
// If the result is too "bad", we return false and it ends this search.
function checkArgs(elems, callback) {
const skipPositions = [];
for (const elem of elems) {
// There is more than one parameter to the query so all checks should be "exact"
- const { dist, position } = callback(
+ const position = callback(
row,
elem,
- maxEditDistance,
skipPositions
);
- if (dist <= 1) {
- nbDist += 1;
- totalDist += dist;
+ if (position !== -1) {
skipPositions.push(position);
} else {
return false;
@@ -1652,11 +1572,7 @@ function initSearch(rawSearchIndex) {
return;
}
- if (nbDist === 0) {
- return;
- }
- const dist = Math.round(totalDist / nbDist);
- addIntoResults(results, row.id, pos, 0, dist, 0, maxEditDistance);
+ addIntoResults(results, row.id, pos, 0, 0, 0, Number.MAX_VALUE);
}
function innerRunQuery() {
@@ -1671,6 +1587,53 @@ function initSearch(rawSearchIndex) {
}
const maxEditDistance = Math.floor(queryLen / 3);
+ /**
+ * Convert names to ids in parsed query elements.
+ * This is not used for the "In Names" tab, but is used for the
+ * "In Params", "In Returns", and "In Function Signature" tabs.
+ *
+ * If there is no matching item, but a close-enough match, this
+ * function also that correction.
+ *
+ * See `buildTypeMapIndex` for more information.
+ *
+ * @param {QueryElement} elem
+ */
+ function convertNameToId(elem) {
+ if (typeNameIdMap.has(elem.name)) {
+ elem.id = typeNameIdMap.get(elem.name);
+ } else if (!parsedQuery.literalSearch) {
+ let match = -1;
+ let matchDist = maxEditDistance + 1;
+ let matchName = "";
+ for (const [name, id] of typeNameIdMap) {
+ const dist = editDistance(name, elem.name, maxEditDistance);
+ if (dist <= matchDist && dist <= maxEditDistance) {
+ if (dist === matchDist && matchName > name) {
+ continue;
+ }
+ match = id;
+ matchDist = dist;
+ matchName = name;
+ }
+ }
+ if (match !== -1) {
+ parsedQuery.correction = matchName;
+ }
+ elem.id = match;
+ }
+ for (const elem2 of elem.generics) {
+ convertNameToId(elem2);
+ }
+ }
+
+ for (const elem of parsedQuery.elems) {
+ convertNameToId(elem);
+ }
+ for (const elem of parsedQuery.returned) {
+ convertNameToId(elem);
+ }
+
if (parsedQuery.foundElems === 1) {
if (parsedQuery.elems.length === 1) {
elem = parsedQuery.elems[0];
@@ -1695,22 +1658,23 @@ function initSearch(rawSearchIndex) {
in_returned = checkReturned(
row,
elem,
- maxEditDistance,
[]
);
- addIntoResults(
- results_others,
- row.id,
- i,
- -1,
- in_returned.dist,
- maxEditDistance
- );
+ if (in_returned !== -1) {
+ addIntoResults(
+ results_others,
+ row.id,
+ i,
+ -1,
+ 0,
+ Number.MAX_VALUE
+ );
+ }
}
}
} else if (parsedQuery.foundElems > 0) {
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
- handleArgs(searchIndex[i], i, results_others, maxEditDistance);
+ handleArgs(searchIndex[i], i, results_others);
}
}
}
@@ -2030,6 +1994,16 @@ function initSearch(rawSearchIndex) {
currentTab = 0;
}
+ if (results.query.correction !== null) {
+ const orig = results.query.returned.length > 0
+ ? results.query.returned[0].name
+ : results.query.elems[0].name;
+ output += "<h3 class=\"search-corrections\">" +
+ `Type "${orig}" not found. ` +
+ "Showing results for closest type name " +
+ `"${results.query.correction}" instead.</h3>`;
+ }
+
const resultsElem = document.createElement("div");
resultsElem.id = "results";
resultsElem.appendChild(ret_others[0]);
@@ -2109,6 +2083,34 @@ function initSearch(rawSearchIndex) {
}
/**
+ * Add an item to the type Name->ID map, or, if one already exists, use it.
+ * Returns the number. If name is "" or null, return -1 (pure generic).
+ *
+ * This is effectively string interning, so that function matching can be
+ * done more quickly. Two types with the same name but different item kinds
+ * get the same ID.
+ *
+ * @param {Map<string, integer>} typeNameIdMap
+ * @param {string} name
+ *
+ * @returns {integer}
+ */
+ function buildTypeMapIndex(typeNameIdMap, name) {
+
+ if (name === "" || name === null) {
+ return -1;
+ }
+
+ if (typeNameIdMap.has(name)) {
+ return typeNameIdMap.get(name);
+ } else {
+ const id = typeNameIdMap.size;
+ typeNameIdMap.set(name, id);
+ return id;
+ }
+ }
+
+ /**
* Convert a list of RawFunctionType / ID to object-based FunctionType.
*
* Crates often have lots of functions in them, and it's common to have a large number of
@@ -2126,7 +2128,7 @@ function initSearch(rawSearchIndex) {
*
* @return {Array<FunctionSearchType>}
*/
- function buildItemSearchTypeAll(types, lowercasePaths) {
+ function buildItemSearchTypeAll(types, lowercasePaths, typeNameIdMap) {
const PATH_INDEX_DATA = 0;
const GENERICS_DATA = 1;
return types.map(type => {
@@ -2136,11 +2138,17 @@ function initSearch(rawSearchIndex) {
generics = [];
} else {
pathIndex = type[PATH_INDEX_DATA];
- generics = buildItemSearchTypeAll(type[GENERICS_DATA], lowercasePaths);
+ generics = buildItemSearchTypeAll(
+ type[GENERICS_DATA],
+ lowercasePaths,
+ typeNameIdMap
+ );
}
return {
// `0` is used as a sentinel because it's fewer bytes than `null`
- name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ id: pathIndex === 0
+ ? -1
+ : buildTypeMapIndex(typeNameIdMap, lowercasePaths[pathIndex - 1].name),
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
generics: generics,
};
@@ -2159,10 +2167,11 @@ function initSearch(rawSearchIndex) {
*
* @param {RawFunctionSearchType} functionSearchType
* @param {Array<{name: string, ty: number}>} lowercasePaths
+ * @param {Map<string, integer>}
*
* @return {null|FunctionSearchType}
*/
- function buildFunctionSearchType(functionSearchType, lowercasePaths) {
+ function buildFunctionSearchType(functionSearchType, lowercasePaths, typeNameIdMap) {
const INPUTS_DATA = 0;
const OUTPUT_DATA = 1;
// `0` is used as a sentinel because it's fewer bytes than `null`
@@ -2173,23 +2182,35 @@ function initSearch(rawSearchIndex) {
if (typeof functionSearchType[INPUTS_DATA] === "number") {
const pathIndex = functionSearchType[INPUTS_DATA];
inputs = [{
- name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ id: pathIndex === 0
+ ? -1
+ : buildTypeMapIndex(typeNameIdMap, lowercasePaths[pathIndex - 1].name),
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
generics: [],
}];
} else {
- inputs = buildItemSearchTypeAll(functionSearchType[INPUTS_DATA], lowercasePaths);
+ inputs = buildItemSearchTypeAll(
+ functionSearchType[INPUTS_DATA],
+ lowercasePaths,
+ typeNameIdMap
+ );
}
if (functionSearchType.length > 1) {
if (typeof functionSearchType[OUTPUT_DATA] === "number") {
const pathIndex = functionSearchType[OUTPUT_DATA];
output = [{
- name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ id: pathIndex === 0
+ ? -1
+ : buildTypeMapIndex(typeNameIdMap, lowercasePaths[pathIndex - 1].name),
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
generics: [],
}];
} else {
- output = buildItemSearchTypeAll(functionSearchType[OUTPUT_DATA], lowercasePaths);
+ output = buildItemSearchTypeAll(
+ functionSearchType[OUTPUT_DATA],
+ lowercasePaths,
+ typeNameIdMap
+ );
}
} else {
output = [];
@@ -2202,9 +2223,12 @@ function initSearch(rawSearchIndex) {
function buildIndex(rawSearchIndex) {
searchIndex = [];
/**
+ * List of normalized search words (ASCII lowercased, and undescores removed).
+ *
* @type {Array<string>}
*/
const searchWords = [];
+ typeNameIdMap = new Map();
const charA = "A".charCodeAt(0);
let currentIndex = 0;
let id = 0;
@@ -2337,7 +2361,11 @@ function initSearch(rawSearchIndex) {
path: itemPaths.has(i) ? itemPaths.get(i) : lastPath,
desc: itemDescs[i],
parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
- type: buildFunctionSearchType(itemFunctionSearchTypes[i], lowercasePaths),
+ type: buildFunctionSearchType(
+ itemFunctionSearchTypes[i],
+ lowercasePaths,
+ typeNameIdMap
+ ),
id: id,
normalizedName: word.indexOf("_") === -1 ? word : word.replace(/_/g, ""),
deprecated: deprecatedItems.has(i),
@@ -2412,10 +2440,6 @@ function initSearch(rawSearchIndex) {
const searchAfter500ms = () => {
searchState.clearInputTimeout();
if (searchState.input.value.length === 0) {
- if (browserSupportsHistoryApi()) {
- history.replaceState(null, window.currentCrate + " - Rust",
- getNakedUrl() + window.location.hash);
- }
searchState.hideResults();
} else {
searchState.timeout = setTimeout(search, 500);
diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js
index ebbe6c1ca..2cba32c1b 100644
--- a/src/librustdoc/html/static/js/settings.js
+++ b/src/librustdoc/html/static/js/settings.js
@@ -1,5 +1,5 @@
// Local js definitions:
-/* global getSettingValue, getVirtualKey, updateLocalStorage, updateTheme */
+/* global getSettingValue, updateLocalStorage, updateTheme */
/* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */
/* global MAIN_ID, getVar, getSettingsButton */
@@ -32,21 +32,6 @@
}
}
- function handleKey(ev) {
- // Don't interfere with browser shortcuts
- if (ev.ctrlKey || ev.altKey || ev.metaKey) {
- return;
- }
- switch (getVirtualKey(ev)) {
- case "Enter":
- case "Return":
- case "Space":
- ev.target.checked = !ev.target.checked;
- ev.preventDefault();
- break;
- }
- }
-
function showLightAndDark() {
removeClass(document.getElementById("preferred-light-theme"), "hidden");
removeClass(document.getElementById("preferred-dark-theme"), "hidden");
@@ -77,8 +62,6 @@
toggle.onchange = function() {
changeSetting(this.id, this.checked);
};
- toggle.onkeyup = handleKey;
- toggle.onkeyrelease = handleKey;
});
onEachLazy(settingsElement.querySelectorAll("input[type=\"radio\"]"), elem => {
const settingId = elem.name;
diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js
index 9aa755173..d999f3b36 100644
--- a/src/librustdoc/html/static/js/source-script.js
+++ b/src/librustdoc/html/static/js/source-script.js
@@ -52,12 +52,12 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
const files = document.createElement("div");
files.className = "files";
if (elem[FILES_OFFSET]) {
+ const w = window.location.href.split("#")[0];
for (const file_text of elem[FILES_OFFSET]) {
const file = document.createElement("a");
file.innerText = file_text;
file.href = rootPath + "src/" + fullPath + file_text + ".html";
file.addEventListener("click", closeSidebarIfMobile);
- const w = window.location.href.split("#")[0];
if (!hasFoundFile && w === file.href) {
file.className = "selected";
dirEntry.open = true;
diff --git a/src/librustdoc/html/templates/item_union.html b/src/librustdoc/html/templates/item_union.html
index a01457971..c21967005 100644
--- a/src/librustdoc/html/templates/item_union.html
+++ b/src/librustdoc/html/templates/item_union.html
@@ -1,8 +1,8 @@
<pre class="rust item-decl"><code>
- {{ self.render_attributes_in_pre() | safe }}
+ {{ self::item_template_render_attributes_in_pre(self.borrow()) | safe }}
{{ self.render_union() | safe }}
</code></pre>
-{{ self.document() | safe }}
+{{ self::item_template_document(self.borrow()) | safe }}
{% if self.fields_iter().peek().is_some() %}
<h2 id="fields" class="fields small-section-header">
Fields<a href="#fields" class="anchor">§</a>
@@ -19,5 +19,5 @@
{{ self.document_field(field) | safe }}
{% endfor %}
{% endif %}
-{{ self.render_assoc_items() | safe }}
-{{ self.document_type_layout() | safe }}
+{{ self::item_template_render_assoc_items(self.borrow()) | safe }}
+{{ self::item_template_document_type_layout(self.borrow()) | safe }}
diff --git a/src/librustdoc/html/templates/type_layout.html b/src/librustdoc/html/templates/type_layout.html
new file mode 100644
index 000000000..20e09a548
--- /dev/null
+++ b/src/librustdoc/html/templates/type_layout.html
@@ -0,0 +1,58 @@
+<h2 id="layout" class="small-section-header"> {# #}
+ Layout<a href="#layout" class="anchor">§</a> {# #}
+</h2> {# #}
+<div class="docblock"> {# #}
+ {% match type_layout_size %}
+ {% when Ok(type_layout_size) %}
+ <div class="warning"> {# #}
+ <p> {# #}
+ <strong>Note:</strong> Most layout information is <strong>completely {#+ #}
+ unstable</strong> and may even differ between compilations. {#+ #}
+ The only exception is types with certain <code>repr(...)</code> {#+ #}
+ attributes. Please see the Rust Reference’s {#+ #}
+ <a href="https://doc.rust-lang.org/reference/type-layout.html">“Type Layout”</a> {#+ #}
+ chapter for details on type layout guarantees. {# #}
+ </p> {# #}
+ </div> {# #}
+ <p><strong>Size:</strong> {{ type_layout_size|safe }}</p> {# #}
+ {% if !variants.is_empty() %}
+ <p> {# #}
+ <strong>Size for each variant:</strong> {# #}
+ </p> {# #}
+ <ul> {# #}
+ {% for (name, layout_size) in variants %}
+ <li> {# #}
+ <code>{{ name }}</code>: {#+ #}
+ {{ layout_size|safe }}
+ </li> {# #}
+ {% endfor %}
+ </ul> {# #}
+ {% endif %}
+ {# This kind of layout error can occur with valid code, e.g. if you try to
+ get the layout of a generic type such as `Vec<T>`. #}
+ {% when Err(LayoutError::Unknown(_)) %}
+ <p> {# #}
+ <strong>Note:</strong> Unable to compute type layout, {#+ #}
+ possibly due to this type having generic parameters. {#+ #}
+ Layout can only be computed for concrete, fully-instantiated types. {# #}
+ </p> {# #}
+ {# This kind of error probably can't happen with valid code, but we don't
+ want to panic and prevent the docs from building, so we just let the
+ user know that we couldn't compute the layout. #}
+ {% when Err(LayoutError::SizeOverflow(_)) %}
+ <p> {# #}
+ <strong>Note:</strong> Encountered an error during type layout; {#+ #}
+ the type was too big. {# #}
+ </p> {# #}
+ {% when Err(LayoutError::NormalizationFailure(_, _)) %}
+ <p> {# #}
+ <strong>Note:</strong> Encountered an error during type layout; {#+ #}
+ the type failed to be normalized. {# #}
+ </p> {# #}
+ {% when Err(LayoutError::Cycle) %}
+ <p> {# #}
+ <strong>Note:</strong> Encountered an error during type layout; {#+ #}
+ the type's layout depended on the type's layout itself. {# #}
+ </p> {# #}
+ {% endmatch %}
+</div> {# #}
diff --git a/src/librustdoc/html/templates/type_layout_size.html b/src/librustdoc/html/templates/type_layout_size.html
new file mode 100644
index 000000000..9c2b39edc
--- /dev/null
+++ b/src/librustdoc/html/templates/type_layout_size.html
@@ -0,0 +1,12 @@
+{% if is_unsized %}
+ (unsized)
+{% else %}
+ {% if size == 1 %}
+ 1 byte
+ {% else %}
+ {{ size +}} bytes
+ {% endif %}
+ {% if is_uninhabited %}
+ {# +#} (<a href="https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited">uninhabited</a>)
+ {% endif %}
+{% endif %}
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
index cd6509607..935bb721f 100644
--- a/src/librustdoc/json/conversions.rs
+++ b/src/librustdoc/json/conversions.rs
@@ -40,13 +40,8 @@ impl JsonRenderer<'_> {
(String::from(&**link), id_from_item_default(id.into(), self.tcx))
})
.collect();
- let docs = item.attrs.collapsed_doc_value();
- let attrs = item
- .attrs
- .other_attrs
- .iter()
- .map(rustc_ast_pretty::pprust::attribute_to_string)
- .collect();
+ let docs = item.opt_doc_value();
+ let attrs = item.attributes(self.tcx, true);
let span = item.span(self.tcx);
let visibility = item.visibility(self.tcx);
let clean::Item { name, item_id, .. } = item;
@@ -538,6 +533,10 @@ pub(crate) fn from_trait_bound_modifier(
None => TraitBoundModifier::None,
Maybe => TraitBoundModifier::Maybe,
MaybeConst => TraitBoundModifier::MaybeConst,
+ // FIXME(negative-bounds): This bound should be rendered negative, but
+ // since that's experimental, maybe let's not add it to the rustdoc json
+ // API just now...
+ Negative => TraitBoundModifier::None,
}
}
@@ -575,7 +574,7 @@ impl FromWithTcx<clean::Type> for Type {
name: assoc.name.to_string(),
args: Box::new(assoc.args.into_tcx(tcx)),
self_type: Box::new(self_type.into_tcx(tcx)),
- trait_: trait_.into_tcx(tcx),
+ trait_: trait_.map(|trait_| trait_.into_tcx(tcx)),
},
}
}
@@ -665,7 +664,7 @@ impl FromWithTcx<clean::Impl> for Impl {
let clean::Impl { unsafety, generics, trait_, for_, items, polarity, kind } = impl_;
// FIXME: use something like ImplKind in JSON?
let (synthetic, blanket_impl) = match kind {
- clean::ImplKind::Normal | clean::ImplKind::FakeVaradic => (false, None),
+ clean::ImplKind::Normal | clean::ImplKind::FakeVariadic => (false, None),
clean::ImplKind::Auto => (true, None),
clean::ImplKind::Blanket(ty) => (false, Some(*ty)),
};
@@ -740,7 +739,7 @@ impl FromWithTcx<clean::Variant> for Variant {
impl FromWithTcx<clean::Discriminant> for Discriminant {
fn from_tcx(disr: clean::Discriminant, tcx: TyCtxt<'_>) -> Self {
Discriminant {
- // expr is only none if going through the inlineing path, which gets
+ // expr is only none if going through the inlining path, which gets
// `rustc_middle` types, not `rustc_hir`, but because JSON never inlines
// the expr is always some.
expr: disr.expr(tcx).unwrap(),
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index d6da6e099..9392dd4d0 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -279,7 +279,10 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
p.push(output.index.get(&output.root).unwrap().name.clone().unwrap());
p.set_extension("json");
let mut file = BufWriter::new(try_err!(File::create(&p), p));
- serde_json::ser::to_writer(&mut file, &output).unwrap();
+ self.tcx
+ .sess
+ .time("rustdoc_json_serialization", || serde_json::ser::to_writer(&mut file, &output))
+ .unwrap();
try_err!(file.flush(), p);
Ok(())
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index 4a88dc525..12c622e02 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -7,14 +7,15 @@
#![feature(assert_matches)]
#![feature(box_patterns)]
#![feature(drain_filter)]
+#![feature(impl_trait_in_assoc_type)]
+#![feature(iter_intersperse)]
+#![feature(lazy_cell)]
#![feature(let_chains)]
-#![feature(test)]
#![feature(never_type)]
-#![feature(lazy_cell)]
-#![feature(type_ascription)]
-#![feature(iter_intersperse)]
+#![feature(round_char_boundary)]
+#![feature(test)]
#![feature(type_alias_impl_trait)]
-#![cfg_attr(not(bootstrap), feature(impl_trait_in_assoc_type))]
+#![feature(type_ascription)]
#![recursion_limit = "256"]
#![warn(rustc::internal)]
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
@@ -33,6 +34,7 @@ extern crate tracing;
// Dependencies listed in Cargo.toml do not need `extern crate`.
extern crate pulldown_cmark;
+extern crate rustc_abi;
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_attr;
@@ -154,15 +156,19 @@ pub fn main() {
}
}
- rustc_driver::install_ice_hook();
+ rustc_driver::install_ice_hook(
+ "https://github.com/rust-lang/rust/issues/new\
+ ?labels=C-bug%2C+I-ICE%2C+T-rustdoc&template=ice.md",
+ |_| (),
+ );
- // When using CI artifacts (with `download_stage1 = true`), tracing is unconditionally built
+ // When using CI artifacts with `download-rustc`, tracing is unconditionally built
// with `--features=static_max_level_info`, which disables almost all rustdoc logging. To avoid
// this, compile our own version of `tracing` that logs all levels.
// NOTE: this compiles both versions of tracing unconditionally, because
// - The compile time hit is not that bad, especially compared to rustdoc's incremental times, and
- // - Otherwise, there's no warning that logging is being ignored when `download_stage1 = true`.
- // NOTE: The reason this doesn't show double logging when `download_stage1 = false` and
+ // - Otherwise, there's no warning that logging is being ignored when `download-rustc` is enabled
+ // NOTE: The reason this doesn't show double logging when `download-rustc = false` and
// `debug_logging = true` is because all rustc logging goes to its version of tracing (the one
// in the sysroot), and all of rustdoc's logging goes to its version (the one in Cargo.toml).
init_logging();
@@ -170,7 +176,11 @@ pub fn main() {
let exit_code = rustc_driver::catch_with_exit_code(|| match get_args() {
Some(args) => main_args(&args),
- _ => Err(ErrorGuaranteed::unchecked_claim_error_was_emitted()),
+ _ =>
+ {
+ #[allow(deprecated)]
+ Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ }
});
process::exit(exit_code);
}
@@ -182,11 +192,11 @@ fn init_logging() {
Ok("auto") | Err(VarError::NotPresent) => io::stdout().is_terminal(),
Ok(value) => early_error(
ErrorOutputType::default(),
- &format!("invalid log color value '{}': expected one of always, never, or auto", value),
+ format!("invalid log color value '{}': expected one of always, never, or auto", value),
),
Err(VarError::NotUnicode(value)) => early_error(
ErrorOutputType::default(),
- &format!(
+ format!(
"invalid log color value '{}': expected one of always, never, or auto",
value.to_string_lossy()
),
@@ -218,7 +228,7 @@ fn get_args() -> Option<Vec<String>> {
.map_err(|arg| {
early_warn(
ErrorOutputType::default(),
- &format!("Argument {} is not valid Unicode: {:?}", i, arg),
+ format!("Argument {} is not valid Unicode: {:?}", i, arg),
);
})
.ok()
@@ -675,7 +685,7 @@ fn wrap_return(diag: &rustc_errors::Handler, res: Result<(), String>) -> MainRes
match res {
Ok(()) => diag.has_errors().map_or(Ok(()), Err),
Err(err) => {
- let reported = diag.struct_err(&err).emit();
+ let reported = diag.struct_err(err).emit();
Err(reported)
}
}
@@ -691,10 +701,10 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
Ok(_) => tcx.sess.has_errors().map_or(Ok(()), Err),
Err(e) => {
let mut msg =
- tcx.sess.struct_err(&format!("couldn't generate documentation: {}", e.error));
+ tcx.sess.struct_err(format!("couldn't generate documentation: {}", e.error));
let file = e.file.display().to_string();
if !file.is_empty() {
- msg.note(&format!("failed to create or modify \"{}\"", file));
+ msg.note(format!("failed to create or modify \"{}\"", file));
}
Err(msg.emit())
}
@@ -702,16 +712,26 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
}
fn main_args(at_args: &[String]) -> MainResult {
+ // Throw away the first argument, the name of the binary.
+ // In case of at_args being empty, as might be the case by
+ // passing empty argument array to execve under some platforms,
+ // just use an empty slice.
+ //
+ // This situation was possible before due to arg_expand_all being
+ // called before removing the argument, enabling a crash by calling
+ // the compiler with @empty_file as argv[0] and no more arguments.
+ let at_args = at_args.get(1..).unwrap_or_default();
+
let args = rustc_driver::args::arg_expand_all(at_args);
let mut options = getopts::Options::new();
for option in opts() {
(option.apply)(&mut options);
}
- let matches = match options.parse(&args[1..]) {
+ let matches = match options.parse(&args) {
Ok(m) => m,
Err(err) => {
- early_error(ErrorOutputType::default(), &err.to_string());
+ early_error(ErrorOutputType::default(), err.to_string());
}
};
@@ -723,11 +743,15 @@ fn main_args(at_args: &[String]) -> MainResult {
return if code == 0 {
Ok(())
} else {
+ #[allow(deprecated)]
Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
};
}
};
+ // Set parallel mode before error handler creation, which will create `Lock`s.
+ interface::set_thread_safe_mode(&options.unstable_opts);
+
let diag = core::new_handler(
options.error_format,
None,
diff --git a/src/librustdoc/lint.rs b/src/librustdoc/lint.rs
index 6d289eb99..749c1ff51 100644
--- a/src/librustdoc/lint.rs
+++ b/src/librustdoc/lint.rs
@@ -174,6 +174,17 @@ declare_rustdoc_lint! {
"codeblock could not be parsed as valid Rust or is empty"
}
+declare_rustdoc_lint! {
+ /// The `unescaped_backticks` lint detects unescaped backticks (\`), which usually
+ /// mean broken inline code. This is a `rustdoc` only lint, see the documentation
+ /// in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#unescaped_backticks
+ UNESCAPED_BACKTICKS,
+ Allow,
+ "detects unescaped backticks in doc comments"
+}
+
pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
vec![
BROKEN_INTRA_DOC_LINKS,
@@ -185,6 +196,7 @@ pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
INVALID_HTML_TAGS,
BARE_URLS,
MISSING_CRATE_LEVEL_DOCS,
+ UNESCAPED_BACKTICKS,
]
});
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index be5286b24..6ead0cd96 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -206,13 +206,7 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
let has_docs = !i.attrs.doc_strings.is_empty();
let mut tests = Tests { found_tests: 0 };
- find_testable_code(
- &i.attrs.collapsed_doc_value().unwrap_or_default(),
- &mut tests,
- ErrorCodes::No,
- false,
- None,
- );
+ find_testable_code(&i.doc_value(), &mut tests, ErrorCodes::No, false, None);
let has_doc_example = tests.found_tests != 0;
let hir_id = DocContext::as_local_hir_id(self.ctx.tcx, i.item_id).unwrap();
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 6b13e6c95..b6cd897d3 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -34,9 +34,7 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
fn visit_item(&mut self, item: &Item) {
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
-
- look_for_tests(self.cx, &dox, item);
+ look_for_tests(self.cx, &item.doc_value(), item);
self.visit_item_recur(item)
}
@@ -95,7 +93,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
}
if cx.tcx.is_doc_hidden(def_id.to_def_id())
- || inherits_doc_hidden(cx.tcx, def_id)
+ || inherits_doc_hidden(cx.tcx, def_id, None)
|| cx.tcx.def_span(def_id.to_def_id()).in_derive_expansion()
{
return false;
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 2cd9c8a87..061a572c4 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -8,12 +8,12 @@ use rustc_data_structures::{
fx::{FxHashMap, FxHashSet},
intern::Interned,
};
-use rustc_errors::{Applicability, Diagnostic};
+use rustc_errors::{Applicability, Diagnostic, DiagnosticMessage};
use rustc_hir::def::Namespace::*;
use rustc_hir::def::{DefKind, Namespace, PerNS};
use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
use rustc_hir::Mutability;
-use rustc_middle::ty::{fast_reject::TreatProjections, Ty, TyCtxt};
+use rustc_middle::ty::{Ty, TyCtxt};
use rustc_middle::{bug, ty};
use rustc_resolve::rustdoc::{has_primitive_or_keyword_docs, prepare_to_doc_link_resolution};
use rustc_resolve::rustdoc::{strip_generics_from_path, MalformedGenerics};
@@ -24,13 +24,14 @@ use rustc_span::BytePos;
use smallvec::{smallvec, SmallVec};
use std::borrow::Cow;
+use std::fmt::Display;
use std::mem;
use std::ops::Range;
use crate::clean::{self, utils::find_nearest_parent_module};
use crate::clean::{Crate, Item, ItemLink, PrimitiveType};
use crate::core::DocContext;
-use crate::html::markdown::{markdown_links, MarkdownLink};
+use crate::html::markdown::{markdown_links, MarkdownLink, MarkdownLinkRange};
use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
use crate::passes::Pass;
use crate::visit::DocVisitor;
@@ -148,7 +149,7 @@ impl TryFrom<ResolveRes> for Res {
Def(kind, id) => Ok(Res::Def(kind, id)),
PrimTy(prim) => Ok(Res::Primitive(PrimitiveType::from_hir(prim))),
// e.g. `#[derive]`
- NonMacroAttr(..) | Err => Result::Err(()),
+ ToolMod | NonMacroAttr(..) | Err => Result::Err(()),
other => bug!("unrecognized res {:?}", other),
}
}
@@ -247,7 +248,7 @@ struct DiagnosticInfo<'a> {
item: &'a Item,
dox: &'a str,
ori_link: &'a str,
- link_range: Range<usize>,
+ link_range: MarkdownLinkRange,
}
struct LinkCollector<'a, 'tcx> {
@@ -722,7 +723,7 @@ fn resolve_associated_trait_item<'a>(
.iter()
.flat_map(|&(impl_, trait_)| {
filter_assoc_items_by_name_and_namespace(
- cx.tcx,
+ tcx,
trait_,
Ident::with_dummy_span(item_name),
ns,
@@ -772,11 +773,10 @@ fn trait_impls_for<'a>(
module: DefId,
) -> FxHashSet<(DefId, DefId)> {
let tcx = cx.tcx;
- let iter = tcx.doc_link_traits_in_scope(module).iter().flat_map(|&trait_| {
- trace!("considering explicit impl for trait {:?}", trait_);
+ let mut impls = FxHashSet::default();
- // Look at each trait implementation to see if it's an impl for `did`
- tcx.find_map_relevant_impl(trait_, ty, TreatProjections::ForLookup, |impl_| {
+ for &trait_ in tcx.doc_link_traits_in_scope(module) {
+ tcx.for_each_relevant_impl(trait_, ty, |impl_| {
let trait_ref = tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
// Check if these are the same type.
let impl_type = trait_ref.skip_binder().self_ty();
@@ -800,10 +800,13 @@ fn trait_impls_for<'a>(
_ => false,
};
- if saw_impl { Some((impl_, trait_)) } else { None }
- })
- });
- iter.collect()
+ if saw_impl {
+ impls.insert((impl_, trait_));
+ }
+ });
+ }
+
+ impls
}
/// Check for resolve collisions between a trait and its derive.
@@ -830,7 +833,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
enum PreprocessingError {
/// User error: `[std#x#y]` is not valid
MultipleAnchors,
- Disambiguator(Range<usize>, String),
+ Disambiguator(MarkdownLinkRange, String),
MalformedGenerics(MalformedGenerics, String),
}
@@ -839,7 +842,7 @@ impl PreprocessingError {
match self {
PreprocessingError::MultipleAnchors => report_multiple_anchors(cx, diag_info),
PreprocessingError::Disambiguator(range, msg) => {
- disambiguator_error(cx, diag_info, range.clone(), msg)
+ disambiguator_error(cx, diag_info, range.clone(), msg.as_str())
}
PreprocessingError::MalformedGenerics(err, path_str) => {
report_malformed_generics(cx, diag_info, *err, path_str)
@@ -870,6 +873,7 @@ pub(crate) struct PreprocessedMarkdownLink(
/// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored.
fn preprocess_link(
ori_link: &MarkdownLink,
+ dox: &str,
) -> Option<Result<PreprocessingInfo, PreprocessingError>> {
// [] is mostly likely not supposed to be a link
if ori_link.link.is_empty() {
@@ -903,9 +907,15 @@ fn preprocess_link(
Err((err_msg, relative_range)) => {
// Only report error if we would not have ignored this link. See issue #83859.
if !should_ignore_link_with_disambiguators(link) {
- let no_backticks_range = range_between_backticks(ori_link);
- let disambiguator_range = (no_backticks_range.start + relative_range.start)
- ..(no_backticks_range.start + relative_range.end);
+ let disambiguator_range = match range_between_backticks(&ori_link.range, dox) {
+ MarkdownLinkRange::Destination(no_backticks_range) => {
+ MarkdownLinkRange::Destination(
+ (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end),
+ )
+ }
+ mdlr @ MarkdownLinkRange::WholeLink(_) => mdlr,
+ };
return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
} else {
return None;
@@ -944,7 +954,7 @@ fn preprocess_link(
fn preprocessed_markdown_links(s: &str) -> Vec<PreprocessedMarkdownLink> {
markdown_links(s, |link| {
- preprocess_link(&link).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
+ preprocess_link(&link, s).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
})
}
@@ -1057,22 +1067,12 @@ impl LinkCollector<'_, '_> {
// valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677
// for discussion on the matter.
let kind = self.cx.tcx.def_kind(id);
- self.verify_disambiguator(
- path_str,
- ori_link,
- kind,
- id,
- disambiguator,
- item,
- &diag_info,
- )?;
+ self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?;
} else {
match disambiguator {
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
Some(other) => {
- self.report_disambiguator_mismatch(
- path_str, ori_link, other, res, &diag_info,
- );
+ self.report_disambiguator_mismatch(path_str, other, res, &diag_info);
return None;
}
}
@@ -1093,7 +1093,6 @@ impl LinkCollector<'_, '_> {
};
self.verify_disambiguator(
path_str,
- ori_link,
kind_for_dis,
id_for_dis,
disambiguator,
@@ -1115,7 +1114,6 @@ impl LinkCollector<'_, '_> {
fn verify_disambiguator(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
kind: DefKind,
id: DefId,
disambiguator: Option<Disambiguator>,
@@ -1139,7 +1137,7 @@ impl LinkCollector<'_, '_> {
=> {}
(actual, Some(Disambiguator::Kind(expected))) if actual == expected => {}
(_, Some(specified @ Disambiguator::Kind(_) | specified @ Disambiguator::Primitive)) => {
- self.report_disambiguator_mismatch(path_str,ori_link,specified, Res::Def(kind, id),diag_info);
+ self.report_disambiguator_mismatch(path_str, specified, Res::Def(kind, id), diag_info);
return None;
}
}
@@ -1161,14 +1159,13 @@ impl LinkCollector<'_, '_> {
fn report_disambiguator_mismatch(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
specified: Disambiguator,
resolved: Res,
diag_info: &DiagnosticInfo<'_>,
) {
// The resolved item did not match the disambiguator; give a better error than 'not found'
let msg = format!("incompatible link kind for `{}`", path_str);
- let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>| {
+ let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>, link_range| {
let note = format!(
"this link resolved to {} {}, which is not {} {}",
resolved.article(),
@@ -1177,18 +1174,28 @@ impl LinkCollector<'_, '_> {
specified.descr(),
);
if let Some(sp) = sp {
- diag.span_label(sp, &note);
+ diag.span_label(sp, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
- suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
+ suggest_disambiguator(resolved, diag, path_str, link_range, sp, diag_info);
};
- report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
+ report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, callback);
}
- fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &Range<usize>, item: &Item) {
- let span = super::source_span_for_markdown_range(self.cx.tcx, dox, ori_link, &item.attrs)
- .unwrap_or_else(|| item.attr_span(self.cx.tcx));
+ fn report_rawptr_assoc_feature_gate(
+ &self,
+ dox: &str,
+ ori_link: &MarkdownLinkRange,
+ item: &Item,
+ ) {
+ let span = super::source_span_for_markdown_range(
+ self.cx.tcx,
+ dox,
+ ori_link.inner_range(),
+ &item.attrs,
+ )
+ .unwrap_or_else(|| item.attr_span(self.cx.tcx));
rustc_session::parse::feature_err(
&self.cx.tcx.sess.parse_sess,
sym::intra_doc_pointers,
@@ -1293,7 +1300,8 @@ impl LinkCollector<'_, '_> {
}
}
}
- resolution_failure(self, diag, path_str, disambiguator, smallvec![err])
+ resolution_failure(self, diag, path_str, disambiguator, smallvec![err]);
+ return vec![];
}
}
}
@@ -1329,13 +1337,14 @@ impl LinkCollector<'_, '_> {
.fold(0, |acc, res| if let Ok(res) = res { acc + res.len() } else { acc });
if len == 0 {
- return resolution_failure(
+ resolution_failure(
self,
diag,
path_str,
disambiguator,
candidates.into_iter().filter_map(|res| res.err()).collect(),
);
+ return vec![];
} else if len == 1 {
candidates.into_iter().filter_map(|res| res.ok()).flatten().collect::<Vec<_>>()
} else {
@@ -1349,7 +1358,7 @@ impl LinkCollector<'_, '_> {
if has_derive_trait_collision {
candidates.macro_ns = None;
}
- candidates.into_iter().filter_map(|res| res).flatten().collect::<Vec<_>>()
+ candidates.into_iter().flatten().flatten().collect::<Vec<_>>()
}
}
}
@@ -1366,16 +1375,23 @@ impl LinkCollector<'_, '_> {
/// [`Foo`]
/// ^^^
/// ```
-fn range_between_backticks(ori_link: &MarkdownLink) -> Range<usize> {
- let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0);
- let before_second_backtick_group = ori_link
- .link
+///
+/// This function does nothing if `ori_link.range` is a `MarkdownLinkRange::WholeLink`.
+fn range_between_backticks(ori_link_range: &MarkdownLinkRange, dox: &str) -> MarkdownLinkRange {
+ let range = match ori_link_range {
+ mdlr @ MarkdownLinkRange::WholeLink(_) => return mdlr.clone(),
+ MarkdownLinkRange::Destination(inner) => inner.clone(),
+ };
+ let ori_link_text = &dox[range.clone()];
+ let after_first_backtick_group = ori_link_text.bytes().position(|b| b != b'`').unwrap_or(0);
+ let before_second_backtick_group = ori_link_text
.bytes()
.skip(after_first_backtick_group)
.position(|b| b == b'`')
- .unwrap_or(ori_link.link.len());
- (ori_link.range.start + after_first_backtick_group)
- ..(ori_link.range.start + before_second_backtick_group)
+ .unwrap_or(ori_link_text.len());
+ MarkdownLinkRange::Destination(
+ (range.start + after_first_backtick_group)..(range.start + before_second_backtick_group),
+ )
}
/// Returns true if we should ignore `link` due to it being unlikely
@@ -1419,6 +1435,7 @@ impl Disambiguator {
if let Some(idx) = link.find('@') {
let (prefix, rest) = link.split_at(idx);
let d = match prefix {
+ // If you update this list, please also update the relevant rustdoc book section!
"struct" => Kind(DefKind::Struct),
"enum" => Kind(DefKind::Enum),
"trait" => Kind(DefKind::Trait),
@@ -1437,6 +1454,7 @@ impl Disambiguator {
Ok(Some((d, &rest[1..], &rest[1..])))
} else {
let suffixes = [
+ // If you update this list, please also update the relevant rustdoc book section!
("!()", DefKind::Macro(MacroKind::Bang)),
("!{}", DefKind::Macro(MacroKind::Bang)),
("![]", DefKind::Macro(MacroKind::Bang)),
@@ -1523,14 +1541,23 @@ impl Suggestion {
sp: rustc_span::Span,
) -> Vec<(rustc_span::Span, String)> {
let inner_sp = match ori_link.find('(') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => sp.with_hi(sp.lo() + BytePos(index as _)),
None => sp,
};
let inner_sp = match ori_link.find('!') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_hi(inner_sp.lo() + BytePos(index as _)),
None => inner_sp,
};
let inner_sp = match ori_link.find('@') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_lo(inner_sp.lo() + BytePos(index as u32 + 1)),
None => inner_sp,
};
@@ -1575,9 +1602,9 @@ impl Suggestion {
fn report_diagnostic(
tcx: TyCtxt<'_>,
lint: &'static Lint,
- msg: &str,
+ msg: impl Into<DiagnosticMessage> + Display,
DiagnosticInfo { item, ori_link: _, dox, link_range }: &DiagnosticInfo<'_>,
- decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>),
+ decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>, MarkdownLinkRange),
) {
let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
else {
@@ -1589,16 +1616,32 @@ fn report_diagnostic(
let sp = item.attr_span(tcx);
tcx.struct_span_lint_hir(lint, hir_id, sp, msg, |lint| {
- let span =
- super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
- if dox.as_bytes().get(link_range.start) == Some(&b'`')
- && dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
- {
- sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
- } else {
- sp
- }
- });
+ let (span, link_range) = match link_range {
+ MarkdownLinkRange::Destination(md_range) => {
+ let mut md_range = md_range.clone();
+ let sp = super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs)
+ .map(|mut sp| {
+ while dox.as_bytes().get(md_range.start) == Some(&b' ')
+ || dox.as_bytes().get(md_range.start) == Some(&b'`')
+ {
+ md_range.start += 1;
+ sp = sp.with_lo(sp.lo() + BytePos(1));
+ }
+ while dox.as_bytes().get(md_range.end - 1) == Some(&b' ')
+ || dox.as_bytes().get(md_range.end - 1) == Some(&b'`')
+ {
+ md_range.end -= 1;
+ sp = sp.with_hi(sp.hi() - BytePos(1));
+ }
+ sp
+ });
+ (sp, MarkdownLinkRange::Destination(md_range))
+ }
+ MarkdownLinkRange::WholeLink(md_range) => (
+ super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs),
+ link_range.clone(),
+ ),
+ };
if let Some(sp) = span {
lint.set_span(sp);
@@ -1607,21 +1650,22 @@ fn report_diagnostic(
// ^ ~~~~
// | link_range
// last_new_line_offset
- let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
+ let md_range = link_range.inner_range().clone();
+ let last_new_line_offset = dox[..md_range.start].rfind('\n').map_or(0, |n| n + 1);
let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
- // Print the line containing the `link_range` and manually mark it with '^'s.
- lint.note(&format!(
+ // Print the line containing the `md_range` and manually mark it with '^'s.
+ lint.note(format!(
"the link appears in this line:\n\n{line}\n\
{indicator: <before$}{indicator:^<found$}",
line = line,
indicator = "",
- before = link_range.start - last_new_line_offset,
- found = link_range.len(),
+ before = md_range.start - last_new_line_offset,
+ found = md_range.len(),
));
}
- decorate(lint, span);
+ decorate(lint, span, link_range);
lint
});
@@ -1638,15 +1682,14 @@ fn resolution_failure(
path_str: &str,
disambiguator: Option<Disambiguator>,
kinds: SmallVec<[ResolutionFailure<'_>; 3]>,
-) -> Vec<(Res, Option<DefId>)> {
+) {
let tcx = collector.cx.tcx;
- let mut recovered_res = None;
report_diagnostic(
tcx,
BROKEN_INTRA_DOC_LINKS,
- &format!("unresolved link to `{}`", path_str),
+ format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, link_range| {
let item = |res: Res| format!("the {} `{}`", res.descr(), res.name(tcx),);
let assoc_item_not_allowed = |res: Res| {
let name = res.name(tcx);
@@ -1700,7 +1743,7 @@ fn resolution_failure(
if let Ok(v_res) = collector.resolve(start, ns, item_id, module_id) {
debug!("found partial_res={:?}", v_res);
if !v_res.is_empty() {
- *partial_res = Some(full_res(collector.cx.tcx, v_res[0]));
+ *partial_res = Some(full_res(tcx, v_res[0]));
*unresolved = end.into();
break 'outer;
}
@@ -1725,26 +1768,32 @@ fn resolution_failure(
format!("no item named `{}` in scope", unresolved)
};
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
if !path_str.contains("::") {
if disambiguator.map_or(true, |d| d.ns() == MacroNS)
- && let Some(&res) = collector.cx.tcx.resolutions(()).all_macro_rules
- .get(&Symbol::intern(path_str))
+ && collector
+ .cx
+ .tcx
+ .resolutions(())
+ .all_macro_rules
+ .get(&Symbol::intern(path_str))
+ .is_some()
{
diag.note(format!(
"`macro_rules` named `{path_str}` exists in this crate, \
but it is not in scope at this link's location"
));
- recovered_res = res.try_into().ok().map(|res| (res, None));
} else {
// If the link has `::` in it, assume it was meant to be an
// intra-doc link. Otherwise, the `[]` might be unrelated.
- diag.help("to escape `[` and `]` characters, \
- add '\\' before them like `\\[` or `\\]`");
+ diag.help(
+ "to escape `[` and `]` characters, \
+ add '\\' before them like `\\[` or `\\]`",
+ );
}
}
@@ -1776,9 +1825,9 @@ fn resolution_failure(
let variant = res.name(tcx);
let note = format!("variant `{variant}` has no such field");
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
return;
}
@@ -1801,9 +1850,9 @@ fn resolution_failure(
| InlineConst => {
let note = assoc_item_not_allowed(res);
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
return;
}
@@ -1823,9 +1872,9 @@ fn resolution_failure(
unresolved,
);
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
continue;
@@ -1833,7 +1882,14 @@ fn resolution_failure(
let note = match failure {
ResolutionFailure::NotResolved { .. } => unreachable!("handled above"),
ResolutionFailure::WrongNamespace { res, expected_ns } => {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(
+ res,
+ diag,
+ path_str,
+ link_range.clone(),
+ sp,
+ &diag_info,
+ );
format!(
"this link resolves to {}, which is not in the {} namespace",
@@ -1843,39 +1899,34 @@ fn resolution_failure(
}
};
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
}
},
);
-
- match recovered_res {
- Some(r) => vec![r],
- None => Vec::new(),
- }
}
fn report_multiple_anchors(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>) {
let msg = format!("`{}` contains multiple anchors", diag_info.ori_link);
- anchor_failure(cx, diag_info, &msg, 1)
+ anchor_failure(cx, diag_info, msg, 1)
}
fn report_anchor_conflict(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>, def_id: DefId) {
let (link, kind) = (diag_info.ori_link, Res::from_def_id(cx.tcx, def_id).descr());
let msg = format!("`{link}` contains an anchor, but links to {kind}s are already anchored");
- anchor_failure(cx, diag_info, &msg, 0)
+ anchor_failure(cx, diag_info, msg, 0)
}
/// Report an anchor failure.
fn anchor_failure(
cx: &DocContext<'_>,
diag_info: DiagnosticInfo<'_>,
- msg: &str,
+ msg: String,
anchor_idx: usize,
) {
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp, _link_range| {
if let Some(mut sp) = sp {
if let Some((fragment_offset, _)) =
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
@@ -1891,16 +1942,16 @@ fn anchor_failure(
fn disambiguator_error(
cx: &DocContext<'_>,
mut diag_info: DiagnosticInfo<'_>,
- disambiguator_range: Range<usize>,
- msg: &str,
+ disambiguator_range: MarkdownLinkRange,
+ msg: impl Into<DiagnosticMessage> + Display,
) {
diag_info.link_range = disambiguator_range;
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp, _link_range| {
let msg = format!(
"see {}/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
crate::DOC_RUST_LANG_ORG_CHANNEL
);
- diag.note(&msg);
+ diag.note(msg);
});
}
@@ -1913,9 +1964,9 @@ fn report_malformed_generics(
report_diagnostic(
cx.tcx,
BROKEN_INTRA_DOC_LINKS,
- &format!("unresolved link to `{}`", path_str),
+ format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, _link_range| {
let note = match err {
MalformedGenerics::UnbalancedAngleBrackets => "unbalanced angle brackets",
MalformedGenerics::MissingType => "missing type for generic parameters",
@@ -1988,7 +2039,7 @@ fn ambiguity_error(
}
}
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "ambiguous link");
} else {
@@ -1996,7 +2047,7 @@ fn ambiguity_error(
}
for res in kinds {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(res, diag, path_str, link_range.clone(), sp, diag_info);
}
});
true
@@ -2008,22 +2059,28 @@ fn suggest_disambiguator(
res: Res,
diag: &mut Diagnostic,
path_str: &str,
- ori_link: &str,
+ link_range: MarkdownLinkRange,
sp: Option<rustc_span::Span>,
+ diag_info: &DiagnosticInfo<'_>,
) {
let suggestion = res.disambiguator_suggestion();
let help = format!("to link to the {}, {}", res.descr(), suggestion.descr());
- if let Some(sp) = sp {
+ let ori_link = match link_range {
+ MarkdownLinkRange::Destination(range) => Some(&diag_info.dox[range]),
+ MarkdownLinkRange::WholeLink(_) => None,
+ };
+
+ if let (Some(sp), Some(ori_link)) = (sp, ori_link) {
let mut spans = suggestion.as_help_span(path_str, ori_link, sp);
if spans.len() > 1 {
- diag.multipart_suggestion(&help, spans, Applicability::MaybeIncorrect);
+ diag.multipart_suggestion(help, spans, Applicability::MaybeIncorrect);
} else {
let (sp, suggestion_text) = spans.pop().unwrap();
- diag.span_suggestion_verbose(sp, &help, suggestion_text, Applicability::MaybeIncorrect);
+ diag.span_suggestion_verbose(sp, help, suggestion_text, Applicability::MaybeIncorrect);
}
} else {
- diag.help(&format!("{}: {}", help, suggestion.as_help(path_str)));
+ diag.help(format!("{}: {}", help, suggestion.as_help(path_str)));
}
}
@@ -2040,7 +2097,7 @@ fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str:
let msg =
format!("public documentation for `{}` links to private item `{}`", item_name, path_str);
- report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, _link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "this item is private");
}
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 8d204ddb7..fbf827cce 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -19,9 +19,10 @@ pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass {
};
pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let tcx = cx.tcx;
// We need to check if there are errors before running this pass because it would crash when
// we try to get auto and blanket implementations.
- if cx.tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
return krate;
}
@@ -32,8 +33,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
});
let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
- let prims: FxHashSet<PrimitiveType> =
- local_crate.primitives(cx.tcx).iter().map(|p| p.1).collect();
+ let prims: FxHashSet<PrimitiveType> = local_crate.primitives(tcx).iter().map(|p| p.1).collect();
let crate_items = {
let mut coll = ItemCollector::new();
@@ -46,9 +46,9 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// External trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
- for &cnum in cx.tcx.crates(()) {
- for &impl_def_id in cx.tcx.trait_impls_in_crate(cnum) {
+ let _prof_timer = tcx.sess.prof.generic_activity("build_extern_trait_impls");
+ for &cnum in tcx.crates(()) {
+ for &impl_def_id in tcx.trait_impls_in_crate(cnum) {
inline::build_impl(cx, impl_def_id, None, &mut new_items_external);
}
}
@@ -56,14 +56,13 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// Local trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_local_trait_impls");
+ let _prof_timer = tcx.sess.prof.generic_activity("build_local_trait_impls");
let mut attr_buf = Vec::new();
- for &impl_def_id in cx.tcx.trait_impls_in_crate(LOCAL_CRATE) {
- let mut parent = Some(cx.tcx.parent(impl_def_id));
+ for &impl_def_id in tcx.trait_impls_in_crate(LOCAL_CRATE) {
+ let mut parent = Some(tcx.parent(impl_def_id));
while let Some(did) = parent {
attr_buf.extend(
- cx.tcx
- .get_attrs(did, sym::doc)
+ tcx.get_attrs(did, sym::doc)
.filter(|attr| {
if let Some([attr]) = attr.meta_item_list().as_deref() {
attr.has_name(sym::cfg)
@@ -73,25 +72,24 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
})
.cloned(),
);
- parent = cx.tcx.opt_parent(did);
+ parent = tcx.opt_parent(did);
}
inline::build_impl(cx, impl_def_id, Some((&attr_buf, None)), &mut new_items_local);
attr_buf.clear();
}
}
- cx.tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
- for def_id in PrimitiveType::all_impls(cx.tcx) {
+ tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
+ for def_id in PrimitiveType::all_impls(tcx) {
// Try to inline primitive impls from other crates.
if !def_id.is_local() {
inline::build_impl(cx, def_id, None, &mut new_items_external);
}
}
- for (prim, did) in PrimitiveType::primitive_locations(cx.tcx) {
+ for (prim, did) in PrimitiveType::primitive_locations(tcx) {
// Do not calculate blanket impl list for docs that are not going to be rendered.
// While the `impl` blocks themselves are only in `libcore`, the module with `doc`
// attached is directly included in `libstd` as well.
- let tcx = cx.tcx;
if did.is_local() {
for def_id in prim.impls(tcx).filter(|def_id| {
// Avoid including impl blocks with filled-in generics.
@@ -157,7 +155,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
for it in new_items_external.iter().chain(new_items_local.iter()) {
if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait() &&
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() &&
cleaner.keep_impl(for_, true)
{
let target = items
@@ -199,7 +197,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
if let ImplItem(box Impl { ref for_, ref trait_, ref kind, .. }) = *it.kind {
cleaner.keep_impl(
for_,
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait(),
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait(),
) || trait_.as_ref().map_or(false, |t| cleaner.keep_impl_with_def_id(t.def_id().into()))
|| kind.is_blanket()
} else {
diff --git a/src/librustdoc/passes/lint.rs b/src/librustdoc/passes/lint.rs
index 97031c4f0..e653207b9 100644
--- a/src/librustdoc/passes/lint.rs
+++ b/src/librustdoc/passes/lint.rs
@@ -4,6 +4,7 @@
mod bare_urls;
mod check_code_block_syntax;
mod html_tags;
+mod unescaped_backticks;
use super::Pass;
use crate::clean::*;
@@ -27,6 +28,7 @@ impl<'a, 'tcx> DocVisitor for Linter<'a, 'tcx> {
bare_urls::visit_item(self.cx, item);
check_code_block_syntax::visit_item(self.cx, item);
html_tags::visit_item(self.cx, item);
+ unescaped_backticks::visit_item(self.cx, item);
self.visit_item_recur(item)
}
diff --git a/src/librustdoc/passes/lint/bare_urls.rs b/src/librustdoc/passes/lint/bare_urls.rs
index 423230cfe..a10d5fdb4 100644
--- a/src/librustdoc/passes/lint/bare_urls.rs
+++ b/src/librustdoc/passes/lint/bare_urls.rs
@@ -18,7 +18,7 @@ pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item) {
// If non-local, no need to check anything.
return;
};
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ let dox = item.doc_value();
if !dox.is_empty() {
let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
let sp = source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
index 26fbb03a4..f489f5081 100644
--- a/src/librustdoc/passes/lint/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -17,7 +17,7 @@ use crate::html::markdown::{self, RustCodeBlock};
use crate::passes::source_span_for_markdown_range;
pub(crate) fn visit_item(cx: &DocContext<'_>, item: &clean::Item) {
- if let Some(dox) = &item.attrs.collapsed_doc_value() {
+ if let Some(dox) = &item.opt_doc_value() {
let sp = item.attr_span(cx.tcx);
let extra = crate::html::markdown::ExtraInfo::new(cx.tcx, item.item_id.expect_def_id(), sp);
for code_block in markdown::rust_code_blocks(dox, &extra) {
@@ -108,7 +108,7 @@ fn check_rust_syntax(
// just give a `help` instead.
lint.span_help(
sp.from_inner(InnerSpan::new(0, 3)),
- &format!("{}: ```text", explanation),
+ format!("{}: ```text", explanation),
);
} else if empty_block {
lint.span_suggestion(
@@ -119,12 +119,12 @@ fn check_rust_syntax(
);
}
} else if empty_block || is_ignore {
- lint.help(&format!("{}: ```text", explanation));
+ lint.help(format!("{}: ```text", explanation));
}
// FIXME(#67563): Provide more context for these errors by displaying the spans inline.
for message in buffer.messages.iter() {
- lint.note(message);
+ lint.note(message.clone());
}
lint
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 4f72df5a5..f0403647a 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -15,7 +15,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
// If non-local, no need to check anything.
else { return };
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ let dox = item.doc_value();
if !dox.is_empty() {
let report_diag = |msg: &str, range: &Range<usize>, is_open_tag: bool| {
let sp = match source_span_for_markdown_range(tcx, &dox, range, &item.attrs) {
diff --git a/src/librustdoc/passes/lint/unescaped_backticks.rs b/src/librustdoc/passes/lint/unescaped_backticks.rs
new file mode 100644
index 000000000..865212205
--- /dev/null
+++ b/src/librustdoc/passes/lint/unescaped_backticks.rs
@@ -0,0 +1,416 @@
+//! Detects unescaped backticks (\`) in doc comments.
+
+use crate::clean::Item;
+use crate::core::DocContext;
+use crate::html::markdown::main_body_opts;
+use crate::passes::source_span_for_markdown_range;
+use pulldown_cmark::{BrokenLink, Event, Parser};
+use rustc_errors::DiagnosticBuilder;
+use rustc_lint_defs::Applicability;
+use std::ops::Range;
+
+pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
+ let tcx = cx.tcx;
+ let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id) else {
+ // If non-local, no need to check anything.
+ return;
+ };
+
+ let dox = item.doc_value();
+ if dox.is_empty() {
+ return;
+ }
+
+ let link_names = item.link_names(&cx.cache);
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ link_names
+ .iter()
+ .find(|link| *link.original_text == *broken_link.reference)
+ .map(|link| ((*link.href).into(), (*link.new_text).into()))
+ };
+ let parser = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ .into_offset_iter();
+
+ let mut element_stack = Vec::new();
+
+ let mut prev_text_end = 0;
+ for (event, event_range) in parser {
+ match event {
+ Event::Start(_) => {
+ element_stack.push(Element::new(event_range));
+ }
+ Event::End(_) => {
+ let element = element_stack.pop().unwrap();
+
+ let Some(backtick_index) = element.backtick_index else {
+ continue;
+ };
+
+ // If we can't get a span of the backtick, because it is in a `#[doc = ""]` attribute,
+ // use the span of the entire attribute as a fallback.
+ let span = source_span_for_markdown_range(
+ tcx,
+ &dox,
+ &(backtick_index..backtick_index + 1),
+ &item.attrs,
+ )
+ .unwrap_or_else(|| item.attr_span(tcx));
+
+ tcx.struct_span_lint_hir(crate::lint::UNESCAPED_BACKTICKS, hir_id, span, "unescaped backtick", |lint| {
+ let mut help_emitted = false;
+
+ match element.prev_code_guess {
+ PrevCodeGuess::None => {}
+ PrevCodeGuess::Start { guess, .. } => {
+ // "foo` `bar`" -> "`foo` `bar`"
+ if let Some(suggest_index) = clamp_start(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the opening backtick of a previous inline code may be missing");
+ help_emitted = true;
+ }
+ }
+ PrevCodeGuess::End { guess, .. } => {
+ // "`foo `bar`" -> "`foo` `bar`"
+ // Don't `clamp_end` here, because the suggestion is guaranteed to be inside
+ // an inline code node and we intentionally "break" the inline code here.
+ let suggest_index = guess;
+ if can_suggest_backtick(&dox, suggest_index) {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "a previous inline code might be longer than expected");
+ help_emitted = true;
+ }
+ }
+ }
+
+ if !element.prev_code_guess.is_confident() {
+ // "`foo` bar`" -> "`foo` `bar`"
+ if let Some(guess) = guess_start_of_code(&dox, element.element_range.start..backtick_index)
+ && let Some(suggest_index) = clamp_start(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the opening backtick of an inline code may be missing");
+ help_emitted = true;
+ }
+
+ // "`foo` `bar" -> "`foo` `bar`"
+ // Don't suggest closing backtick after single trailing char,
+ // if we already suggested opening backtick. For example:
+ // "foo`." -> "`foo`." or "foo`s" -> "`foo`s".
+ if let Some(guess) = guess_end_of_code(&dox, backtick_index + 1..element.element_range.end)
+ && let Some(suggest_index) = clamp_end(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ && (!help_emitted || suggest_index - backtick_index > 2)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the closing backtick of an inline code may be missing");
+ help_emitted = true;
+ }
+ }
+
+ if !help_emitted {
+ lint.help("the opening or closing backtick of an inline code may be missing");
+ }
+
+ suggest_insertion(cx, item, &dox, lint, backtick_index, '\\', "if you meant to use a literal backtick, escape it");
+
+ lint
+ });
+ }
+ Event::Code(_) => {
+ let element = element_stack
+ .last_mut()
+ .expect("expected inline code node to be inside of an element");
+ assert!(
+ event_range.start >= element.element_range.start
+ && event_range.end <= element.element_range.end
+ );
+
+ // This inline code might be longer than it's supposed to be.
+ // Only check single backtick inline code for now.
+ if !element.prev_code_guess.is_confident()
+ && dox.as_bytes().get(event_range.start) == Some(&b'`')
+ && dox.as_bytes().get(event_range.start + 1) != Some(&b'`')
+ {
+ let range_inside = event_range.start + 1..event_range.end - 1;
+ let text_inside = &dox[range_inside.clone()];
+
+ let is_confident = text_inside.starts_with(char::is_whitespace)
+ || text_inside.ends_with(char::is_whitespace);
+
+ if let Some(guess) = guess_end_of_code(&dox, range_inside) {
+ // Find earlier end of code.
+ element.prev_code_guess = PrevCodeGuess::End { guess, is_confident };
+ } else {
+ // Find alternate start of code.
+ let range_before = element.element_range.start..event_range.start;
+ if let Some(guess) = guess_start_of_code(&dox, range_before) {
+ element.prev_code_guess = PrevCodeGuess::Start { guess, is_confident };
+ }
+ }
+ }
+ }
+ Event::Text(text) => {
+ let element = element_stack
+ .last_mut()
+ .expect("expected inline text node to be inside of an element");
+ assert!(
+ event_range.start >= element.element_range.start
+ && event_range.end <= element.element_range.end
+ );
+
+ // The first char is escaped if the prev char is \ and not part of a text node.
+ let is_escaped = prev_text_end < event_range.start
+ && dox.as_bytes()[event_range.start - 1] == b'\\';
+
+ // Don't lint backslash-escaped (\`) or html-escaped (&#96;) backticks.
+ if *text == *"`" && !is_escaped && *text == dox[event_range.clone()] {
+ // We found a stray backtick.
+ assert!(
+ element.backtick_index.is_none(),
+ "expected at most one unescaped backtick per element",
+ );
+ element.backtick_index = Some(event_range.start);
+ }
+
+ prev_text_end = event_range.end;
+
+ if is_escaped {
+ // Ensure that we suggest "`\x" and not "\`x".
+ element.suggestible_ranges.push(event_range.start - 1..event_range.end);
+ } else {
+ element.suggestible_ranges.push(event_range);
+ }
+ }
+ _ => {}
+ }
+ }
+}
+
+/// A previous inline code node, that looks wrong.
+///
+/// `guess` is the position, where we want to suggest a \` and the guess `is_confident` if an
+/// inline code starts or ends with a whitespace.
+#[derive(Debug)]
+enum PrevCodeGuess {
+ None,
+
+ /// Missing \` at start.
+ ///
+ /// ```markdown
+ /// foo` `bar`
+ /// ```
+ Start {
+ guess: usize,
+ is_confident: bool,
+ },
+
+ /// Missing \` at end.
+ ///
+ /// ```markdown
+ /// `foo `bar`
+ /// ```
+ End {
+ guess: usize,
+ is_confident: bool,
+ },
+}
+
+impl PrevCodeGuess {
+ fn is_confident(&self) -> bool {
+ match *self {
+ PrevCodeGuess::None => false,
+ PrevCodeGuess::Start { is_confident, .. } | PrevCodeGuess::End { is_confident, .. } => {
+ is_confident
+ }
+ }
+ }
+}
+
+/// A markdown [tagged element], which may or may not contain an unescaped backtick.
+///
+/// [tagged element]: https://docs.rs/pulldown-cmark/0.9/pulldown_cmark/enum.Tag.html
+#[derive(Debug)]
+struct Element {
+ /// The full range (span) of the element in the doc string.
+ element_range: Range<usize>,
+
+ /// The ranges where we're allowed to put backticks.
+ /// This is used to prevent breaking markdown elements like links or lists.
+ suggestible_ranges: Vec<Range<usize>>,
+
+ /// The unescaped backtick.
+ backtick_index: Option<usize>,
+
+ /// Suggest a different start or end of an inline code.
+ prev_code_guess: PrevCodeGuess,
+}
+
+impl Element {
+ const fn new(element_range: Range<usize>) -> Self {
+ Self {
+ element_range,
+ suggestible_ranges: Vec::new(),
+ backtick_index: None,
+ prev_code_guess: PrevCodeGuess::None,
+ }
+ }
+}
+
+/// Given a potentially unclosed inline code, attempt to find the start.
+fn guess_start_of_code(dox: &str, range: Range<usize>) -> Option<usize> {
+ assert!(dox.as_bytes()[range.end] == b'`');
+
+ let mut braces = 0;
+ let mut guess = 0;
+ for (idx, ch) in dox[range.clone()].char_indices().rev() {
+ match ch {
+ ')' | ']' | '}' => braces += 1,
+ '(' | '[' | '{' => {
+ if braces == 0 {
+ guess = idx + 1;
+ break;
+ }
+ braces -= 1;
+ }
+ ch if ch.is_whitespace() && braces == 0 => {
+ guess = idx + 1;
+ break;
+ }
+ _ => (),
+ }
+ }
+
+ guess += range.start;
+
+ // Don't suggest empty inline code or duplicate backticks.
+ can_suggest_backtick(dox, guess).then_some(guess)
+}
+
+/// Given a potentially unclosed inline code, attempt to find the end.
+fn guess_end_of_code(dox: &str, range: Range<usize>) -> Option<usize> {
+ // Punctuation that should be outside of the inline code.
+ const TRAILING_PUNCTUATION: &[u8] = b".,";
+
+ assert!(dox.as_bytes()[range.start - 1] == b'`');
+
+ let text = dox[range.clone()].trim_end();
+ let mut braces = 0;
+ let mut guess = text.len();
+ for (idx, ch) in text.char_indices() {
+ match ch {
+ '(' | '[' | '{' => braces += 1,
+ ')' | ']' | '}' => {
+ if braces == 0 {
+ guess = idx;
+ break;
+ }
+ braces -= 1;
+ }
+ ch if ch.is_whitespace() && braces == 0 => {
+ guess = idx;
+ break;
+ }
+ _ => (),
+ }
+ }
+
+ // Strip a single trailing punctuation.
+ if guess >= 1
+ && TRAILING_PUNCTUATION.contains(&text.as_bytes()[guess - 1])
+ && (guess < 2 || !TRAILING_PUNCTUATION.contains(&text.as_bytes()[guess - 2]))
+ {
+ guess -= 1;
+ }
+
+ guess += range.start;
+
+ // Don't suggest empty inline code or duplicate backticks.
+ can_suggest_backtick(dox, guess).then_some(guess)
+}
+
+/// Returns whether inserting a backtick at `dox[index]` will not produce double backticks.
+fn can_suggest_backtick(dox: &str, index: usize) -> bool {
+ (index == 0 || dox.as_bytes()[index - 1] != b'`')
+ && (index == dox.len() || dox.as_bytes()[index] != b'`')
+}
+
+/// Increase the index until it is inside or one past the end of one of the ranges.
+///
+/// The ranges must be sorted for this to work correctly.
+fn clamp_start(index: usize, ranges: &[Range<usize>]) -> Option<usize> {
+ for range in ranges {
+ if range.start >= index {
+ return Some(range.start);
+ }
+ if index <= range.end {
+ return Some(index);
+ }
+ }
+ None
+}
+
+/// Decrease the index until it is inside or one past the end of one of the ranges.
+///
+/// The ranges must be sorted for this to work correctly.
+fn clamp_end(index: usize, ranges: &[Range<usize>]) -> Option<usize> {
+ for range in ranges.iter().rev() {
+ if range.end <= index {
+ return Some(range.end);
+ }
+ if index >= range.start {
+ return Some(index);
+ }
+ }
+ None
+}
+
+/// Try to emit a span suggestion and fall back to help messages if we can't find a suitable span.
+///
+/// This helps finding backticks in huge macro-generated docs.
+fn suggest_insertion(
+ cx: &DocContext<'_>,
+ item: &Item,
+ dox: &str,
+ lint: &mut DiagnosticBuilder<'_, ()>,
+ insert_index: usize,
+ suggestion: char,
+ message: &str,
+) {
+ /// Maximum bytes of context to show around the insertion.
+ const CONTEXT_MAX_LEN: usize = 80;
+
+ if let Some(span) =
+ source_span_for_markdown_range(cx.tcx, &dox, &(insert_index..insert_index), &item.attrs)
+ {
+ lint.span_suggestion(span, message, suggestion, Applicability::MaybeIncorrect);
+ } else {
+ let line_start = dox[..insert_index].rfind('\n').map_or(0, |idx| idx + 1);
+ let line_end = dox[insert_index..].find('\n').map_or(dox.len(), |idx| idx + insert_index);
+
+ let context_before_max_len = if insert_index - line_start < CONTEXT_MAX_LEN / 2 {
+ insert_index - line_start
+ } else if line_end - insert_index < CONTEXT_MAX_LEN / 2 {
+ CONTEXT_MAX_LEN - (line_end - insert_index)
+ } else {
+ CONTEXT_MAX_LEN / 2
+ };
+ let context_after_max_len = CONTEXT_MAX_LEN - context_before_max_len;
+
+ let (prefix, context_start) = if insert_index - line_start <= context_before_max_len {
+ ("", line_start)
+ } else {
+ ("...", dox.ceil_char_boundary(insert_index - context_before_max_len))
+ };
+ let (suffix, context_end) = if line_end - insert_index <= context_after_max_len {
+ ("", line_end)
+ } else {
+ ("...", dox.floor_char_boundary(insert_index + context_after_max_len))
+ };
+
+ let context_full = &dox[context_start..context_end].trim_end();
+ let context_before = &dox[context_start..insert_index];
+ let context_after = &dox[insert_index..context_end].trim_end();
+ lint.help(format!(
+ "{message}\n change: {prefix}{context_full}{suffix}\nto this: {prefix}{context_before}{suggestion}{context_after}{suffix}"
+ ));
+ }
+}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index a688aa148..972b0c5ec 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -1,5 +1,6 @@
//! Strip all doc(hidden) items from the output.
+use rustc_hir::def_id::LocalDefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym;
use std::mem;
@@ -29,6 +30,7 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea
update_retained: true,
tcx: cx.tcx,
is_in_hidden_item: false,
+ last_reexport: None,
};
stripper.fold_crate(krate)
};
@@ -49,13 +51,24 @@ struct Stripper<'a, 'tcx> {
update_retained: bool,
tcx: TyCtxt<'tcx>,
is_in_hidden_item: bool,
+ last_reexport: Option<LocalDefId>,
}
impl<'a, 'tcx> Stripper<'a, 'tcx> {
+ fn set_last_reexport_then_fold_item(&mut self, i: Item) -> Item {
+ let prev_from_reexport = self.last_reexport;
+ if i.inline_stmt_id.is_some() {
+ self.last_reexport = i.item_id.as_def_id().and_then(|def_id| def_id.as_local());
+ }
+ let ret = self.fold_item_recur(i);
+ self.last_reexport = prev_from_reexport;
+ ret
+ }
+
fn set_is_in_hidden_item_and_fold(&mut self, is_in_hidden_item: bool, i: Item) -> Item {
let prev = self.is_in_hidden_item;
self.is_in_hidden_item |= is_in_hidden_item;
- let ret = self.fold_item_recur(i);
+ let ret = self.set_last_reexport_then_fold_item(i);
self.is_in_hidden_item = prev;
ret
}
@@ -64,7 +77,7 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
/// of `is_in_hidden_item` to `true` because the impl children inherit its visibility.
fn recurse_in_impl_or_exported_macro(&mut self, i: Item) -> Item {
let prev = mem::replace(&mut self.is_in_hidden_item, false);
- let ret = self.fold_item_recur(i);
+ let ret = self.set_last_reexport_then_fold_item(i);
self.is_in_hidden_item = prev;
ret
}
@@ -86,13 +99,20 @@ impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
if !is_impl_or_exported_macro {
is_hidden = self.is_in_hidden_item || has_doc_hidden;
if !is_hidden && i.inline_stmt_id.is_none() {
- // We don't need to check if it's coming from a reexport since the reexport itself was
- // already checked.
+ // `i.inline_stmt_id` is `Some` if the item is directly reexported. If it is, we
+ // don't need to check it, because the reexport itself was already checked.
+ //
+ // If this item is the child of a reexported module, `self.last_reexport` will be
+ // `Some` even though `i.inline_stmt_id` is `None`. Hiddenness inheritance needs to
+ // account for the possibility that an item's true parent module is hidden, but it's
+ // inlined into a visible module true. This code shouldn't be reachable if the
+ // module's reexport is itself hidden, for the same reason it doesn't need to be
+ // checked if `i.inline_stmt_id` is Some: hidden reexports are never inlined.
is_hidden = i
.item_id
.as_def_id()
.and_then(|def_id| def_id.as_local())
- .map(|def_id| inherits_doc_hidden(self.tcx, def_id))
+ .map(|def_id| inherits_doc_hidden(self.tcx, def_id, self.last_reexport))
.unwrap_or(false);
}
}
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index cba55e5fe..73fc26a6b 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -194,7 +194,7 @@ impl<'a> DocFolder for ImplStripper<'a, '_> {
})
{
return None;
- } else if imp.items.is_empty() && i.doc_value().is_none() {
+ } else if imp.items.is_empty() && i.doc_value().is_empty() {
return None;
}
}
diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs
index f28c164d6..d2fa7769b 100644
--- a/src/librustdoc/scrape_examples.rs
+++ b/src/librustdoc/scrape_examples.rs
@@ -286,7 +286,7 @@ pub(crate) fn run(
let (cx, _) = Context::init(krate, renderopts, cache, tcx).map_err(|e| e.to_string())?;
// Collect CrateIds corresponding to provided target crates
- // If two different versions of the crate in the dependency tree, then examples will be collcted from both.
+ // If two different versions of the crate in the dependency tree, then examples will be collected from both.
let all_crates = tcx
.crates(())
.iter()
@@ -331,7 +331,7 @@ pub(crate) fn run(
};
if let Err(e) = inner() {
- tcx.sess.fatal(&e);
+ tcx.sess.fatal(e);
}
Ok(())
@@ -358,7 +358,7 @@ pub(crate) fn load_call_locations(
};
inner().map_err(|e: String| {
- diag.err(&format!("failed to load examples: {}", e));
+ diag.err(format!("failed to load examples: {}", e));
1
})
}
diff --git a/src/librustdoc/theme.rs b/src/librustdoc/theme.rs
index e7a26cb34..722e01cd1 100644
--- a/src/librustdoc/theme.rs
+++ b/src/librustdoc/theme.rs
@@ -241,7 +241,7 @@ pub(crate) fn test_theme_against<P: AsRef<Path>>(
{
Ok(c) => c,
Err(e) => {
- diag.struct_err(&e).emit();
+ diag.struct_err(e).emit();
return (false, vec![]);
}
};
diff --git a/src/librustdoc/theme/tests.rs b/src/librustdoc/theme/tests.rs
index 08a174d27..2a28c19c3 100644
--- a/src/librustdoc/theme/tests.rs
+++ b/src/librustdoc/theme/tests.rs
@@ -13,11 +13,11 @@ rule d
// another line comment
e {}
-rule f/* a multine
+rule f/* a multiline
comment*/{}
-rule g/* another multine
+rule g/* another multiline
comment*/h
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index f54b70b41..6b7ad4cf2 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -5,11 +5,12 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId, LocalDefIdSet};
-use rustc_hir::intravisit::{walk_item, Visitor};
+use rustc_hir::intravisit::{walk_body, walk_item, Visitor};
use rustc_hir::{Node, CRATE_HIR_ID};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
+use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
@@ -26,6 +27,8 @@ pub(crate) struct Module<'hir> {
pub(crate) where_inner: Span,
pub(crate) mods: Vec<Module<'hir>>,
pub(crate) def_id: LocalDefId,
+ pub(crate) renamed: Option<Symbol>,
+ pub(crate) import_id: Option<LocalDefId>,
/// The key is the item `ItemId` and the value is: (item, renamed, import_id).
/// We use `FxIndexMap` to keep the insert order.
pub(crate) items: FxIndexMap<
@@ -36,11 +39,19 @@ pub(crate) struct Module<'hir> {
}
impl Module<'_> {
- pub(crate) fn new(name: Symbol, def_id: LocalDefId, where_inner: Span) -> Self {
+ pub(crate) fn new(
+ name: Symbol,
+ def_id: LocalDefId,
+ where_inner: Span,
+ renamed: Option<Symbol>,
+ import_id: Option<LocalDefId>,
+ ) -> Self {
Module {
name,
def_id,
where_inner,
+ renamed,
+ import_id,
mods: Vec::new(),
items: FxIndexMap::default(),
foreigns: Vec::new(),
@@ -59,9 +70,16 @@ fn def_id_to_path(tcx: TyCtxt<'_>, did: DefId) -> Vec<Symbol> {
std::iter::once(crate_name).chain(relative).collect()
}
-pub(crate) fn inherits_doc_hidden(tcx: TyCtxt<'_>, mut def_id: LocalDefId) -> bool {
+pub(crate) fn inherits_doc_hidden(
+ tcx: TyCtxt<'_>,
+ mut def_id: LocalDefId,
+ stop_at: Option<LocalDefId>,
+) -> bool {
let hir = tcx.hir();
while let Some(id) = tcx.opt_local_parent(def_id) {
+ if let Some(stop_at) = stop_at && id == stop_at {
+ return false;
+ }
def_id = id;
if tcx.is_doc_hidden(def_id.to_def_id()) {
return true;
@@ -87,6 +105,8 @@ pub(crate) struct RustdocVisitor<'a, 'tcx> {
inside_public_path: bool,
exact_paths: DefIdMap<Vec<Symbol>>,
modules: Vec<Module<'tcx>>,
+ is_importable_from_parent: bool,
+ inside_body: bool,
}
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
@@ -98,6 +118,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
cx.tcx.crate_name(LOCAL_CRATE),
CRATE_DEF_ID,
cx.tcx.hir().root_module().spans.inner_span,
+ None,
+ None,
);
RustdocVisitor {
@@ -107,6 +129,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
inside_public_path: true,
exact_paths: Default::default(),
modules: vec![om],
+ is_importable_from_parent: true,
+ inside_body: false,
}
}
@@ -133,14 +157,15 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// is declared but also a reexport of itself producing two exports of the same
// macro in the same module.
let mut inserted = FxHashSet::default();
- for export in self.cx.tcx.module_children_reexports(CRATE_DEF_ID) {
- if let Res::Def(DefKind::Macro(_), def_id) = export.res &&
+ for child in self.cx.tcx.module_children_local(CRATE_DEF_ID) {
+ if !child.reexport_chain.is_empty() &&
+ let Res::Def(DefKind::Macro(_), def_id) = child.res &&
let Some(local_def_id) = def_id.as_local() &&
self.cx.tcx.has_attr(def_id, sym::macro_export) &&
inserted.insert(def_id)
{
- let item = self.cx.tcx.hir().expect_item(local_def_id);
- top_level_module.items.insert((local_def_id, Some(item.ident.name)), (item, None, None));
+ let item = self.cx.tcx.hir().expect_item(local_def_id);
+ top_level_module.items.insert((local_def_id, Some(item.ident.name)), (item, None, None));
}
}
@@ -255,9 +280,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
return false;
};
- let is_private =
- !self.cx.cache.effective_visibilities.is_directly_public(self.cx.tcx, ori_res_did);
- let is_hidden = inherits_doc_hidden(self.cx.tcx, res_did);
+ let is_private = !self.cx.cache.effective_visibilities.is_directly_public(tcx, ori_res_did);
+ let is_hidden = inherits_doc_hidden(tcx, res_did, None);
// Only inline if requested or if the item would otherwise be stripped.
if (!please_inline && !is_private && !is_hidden) || is_no_inline {
@@ -265,7 +289,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}
if !please_inline &&
- let Some(item_def_id) = reexport_chain(self.cx.tcx, def_id, res_did).iter()
+ let Some(item_def_id) = reexport_chain(tcx, def_id, res_did).iter()
.flat_map(|reexport| reexport.id()).map(|id| id.expect_local())
.chain(iter::once(res_did)).nth(1) &&
item_def_id != def_id &&
@@ -273,22 +297,38 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
.cx
.cache
.effective_visibilities
- .is_directly_public(self.cx.tcx, item_def_id.to_def_id()) &&
- !inherits_doc_hidden(self.cx.tcx, item_def_id)
+ .is_directly_public(tcx, item_def_id.to_def_id()) &&
+ !inherits_doc_hidden(tcx, item_def_id, None)
{
// The imported item is public and not `doc(hidden)` so no need to inline it.
return false;
}
- if !self.view_item_stack.insert(res_did) {
+ let is_bang_macro = matches!(
+ tcx.hir().get_by_def_id(res_did),
+ Node::Item(&hir::Item { kind: hir::ItemKind::Macro(_, MacroKind::Bang), .. })
+ );
+
+ if !self.view_item_stack.insert(res_did) && !is_bang_macro {
return false;
}
let ret = match tcx.hir().get_by_def_id(res_did) {
+ // Bang macros are handled a bit on their because of how they are handled by the
+ // compiler. If they have `#[doc(hidden)]` and the re-export doesn't have
+ // `#[doc(inline)]`, then we don't inline it.
+ Node::Item(_)
+ if is_bang_macro
+ && !please_inline
+ && renamed.is_some()
+ && self.cx.tcx.is_doc_hidden(ori_res_did) =>
+ {
+ return false;
+ }
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for &i in m.item_ids {
- let i = self.cx.tcx.hir().item(i);
+ let i = tcx.hir().item(i);
self.visit_item_inner(i, None, Some(def_id));
}
self.inlining = prev;
@@ -319,11 +359,23 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
renamed: Option<Symbol>,
parent_id: Option<LocalDefId>,
) {
- self.modules
- .last_mut()
- .unwrap()
- .items
- .insert((item.owner_id.def_id, renamed), (item, renamed, parent_id));
+ if self.is_importable_from_parent
+ // If we're inside an item, only impl blocks and `macro_rules!` with the `macro_export`
+ // attribute can still be visible.
+ || match item.kind {
+ hir::ItemKind::Impl(..) => true,
+ hir::ItemKind::Macro(_, MacroKind::Bang) => {
+ self.cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export)
+ }
+ _ => false,
+ }
+ {
+ self.modules
+ .last_mut()
+ .unwrap()
+ .items
+ .insert((item.owner_id.def_id, renamed), (item, renamed, parent_id));
+ }
}
fn visit_item_inner(
@@ -331,8 +383,28 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
item: &'tcx hir::Item<'_>,
renamed: Option<Symbol>,
import_id: Option<LocalDefId>,
- ) -> bool {
+ ) {
debug!("visiting item {:?}", item);
+ if self.inside_body {
+ // Only impls can be "seen" outside a body. For example:
+ //
+ // ```
+ // struct Bar;
+ //
+ // fn foo() {
+ // impl Bar { fn bar() {} }
+ // }
+ // Bar::bar();
+ // ```
+ if let hir::ItemKind::Impl(impl_) = item.kind &&
+ // Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
+ // them up regardless of where they're located.
+ impl_.of_trait.is_none()
+ {
+ self.add_to_current_mod(item, None, None);
+ }
+ return;
+ }
let name = renamed.unwrap_or(item.ident.name);
let tcx = self.cx.tcx;
@@ -411,7 +483,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}
}
hir::ItemKind::Mod(ref m) => {
- self.enter_mod(item.owner_id.def_id, m, name);
+ self.enter_mod(item.owner_id.def_id, m, name, renamed, import_id);
}
hir::ItemKind::Fn(..)
| hir::ItemKind::ExternCrate(..)
@@ -420,7 +492,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
| hir::ItemKind::Union(..)
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::OpaqueTy(hir::OpaqueTy {
- origin: hir::OpaqueTyOrigin::TyAlias, ..
+ origin: hir::OpaqueTyOrigin::TyAlias { .. },
+ ..
})
| hir::ItemKind::Static(..)
| hir::ItemKind::Trait(..)
@@ -448,7 +521,6 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}
}
}
- true
}
fn visit_foreign_item_inner(
@@ -465,8 +537,15 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
/// This method will create a new module and push it onto the "modules stack" then call
/// `visit_mod_contents`. Once done, it'll remove it from the "modules stack" and instead
/// add into the list of modules of the current module.
- fn enter_mod(&mut self, id: LocalDefId, m: &'tcx hir::Mod<'tcx>, name: Symbol) {
- self.modules.push(Module::new(name, id, m.spans.inner_span));
+ fn enter_mod(
+ &mut self,
+ id: LocalDefId,
+ m: &'tcx hir::Mod<'tcx>,
+ name: Symbol,
+ renamed: Option<Symbol>,
+ import_id: Option<LocalDefId>,
+ ) {
+ self.modules.push(Module::new(name, id, m.spans.inner_span, renamed, import_id));
self.visit_mod_contents(id, m);
@@ -485,9 +564,18 @@ impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
}
fn visit_item(&mut self, i: &'tcx hir::Item<'tcx>) {
- if self.visit_item_inner(i, None, None) {
- walk_item(self, i);
- }
+ self.visit_item_inner(i, None, None);
+ let new_value = self.is_importable_from_parent
+ && matches!(
+ i.kind,
+ hir::ItemKind::Mod(..)
+ | hir::ItemKind::ForeignMod { .. }
+ | hir::ItemKind::Impl(..)
+ | hir::ItemKind::Trait(..)
+ );
+ let prev = mem::replace(&mut self.is_importable_from_parent, new_value);
+ walk_item(self, i);
+ self.is_importable_from_parent = prev;
}
fn visit_mod(&mut self, _: &hir::Mod<'tcx>, _: Span, _: hir::HirId) {
@@ -513,4 +601,10 @@ impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
fn visit_lifetime(&mut self, _: &hir::Lifetime) {
// Unneeded.
}
+
+ fn visit_body(&mut self, b: &'tcx hir::Body<'tcx>) {
+ let prev = mem::replace(&mut self.inside_body, true);
+ walk_body(self, b);
+ self.inside_body = prev;
+ }
}