summaryrefslogtreecommitdiffstats
path: root/src/librustdoc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:43 +0000
commit3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245 (patch)
treedaf049b282ab10e8c3d03e409b3cd84ff3f7690c /src/librustdoc
parentAdding debian version 1.68.2+dfsg1-1. (diff)
downloadrustc-3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245.tar.xz
rustc-3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245.zip
Merging upstream version 1.69.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/librustdoc')
-rw-r--r--src/librustdoc/Cargo.toml2
-rw-r--r--src/librustdoc/clean/auto_trait.rs6
-rw-r--r--src/librustdoc/clean/blanket_impl.rs2
-rw-r--r--src/librustdoc/clean/cfg.rs8
-rw-r--r--src/librustdoc/clean/cfg/tests.rs5
-rw-r--r--src/librustdoc/clean/inline.rs64
-rw-r--r--src/librustdoc/clean/mod.rs476
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs3
-rw-r--r--src/librustdoc/clean/types.rs248
-rw-r--r--src/librustdoc/clean/types/tests.rs1
-rw-r--r--src/librustdoc/clean/utils.rs23
-rw-r--r--src/librustdoc/config.rs2
-rw-r--r--src/librustdoc/core.rs80
-rw-r--r--src/librustdoc/doctest.rs117
-rw-r--r--src/librustdoc/formats/cache.rs41
-rw-r--r--src/librustdoc/formats/item_type.rs3
-rw-r--r--src/librustdoc/html/format.rs98
-rw-r--r--src/librustdoc/html/highlight.rs30
-rw-r--r--src/librustdoc/html/layout.rs1
-rw-r--r--src/librustdoc/html/length_limit.rs4
-rw-r--r--src/librustdoc/html/length_limit/tests.rs2
-rw-r--r--src/librustdoc/html/markdown.rs209
-rw-r--r--src/librustdoc/html/markdown/tests.rs2
-rw-r--r--src/librustdoc/html/render/context.rs43
-rw-r--r--src/librustdoc/html/render/mod.rs166
-rw-r--r--src/librustdoc/html/render/print_item.rs672
-rw-r--r--src/librustdoc/html/render/search_index.rs11
-rw-r--r--src/librustdoc/html/render/write_shared.rs5
-rw-r--r--src/librustdoc/html/sources.rs3
-rw-r--r--src/librustdoc/html/static/.eslintrc.js1
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css148
-rw-r--r--src/librustdoc/html/static/css/settings.css30
-rw-r--r--src/librustdoc/html/static/css/themes/ayu.css77
-rw-r--r--src/librustdoc/html/static/css/themes/dark.css26
-rw-r--r--src/librustdoc/html/static/css/themes/light.css26
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2bin81320 -> 81540 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2bin59860 -> 59716 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md2
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2bin76180 -> 76260 bytes
-rw-r--r--src/librustdoc/html/static/images/down-arrow.svg1
-rw-r--r--src/librustdoc/html/static/images/toggle-minus.svg1
-rw-r--r--src/librustdoc/html/static/images/toggle-plus.svg1
-rw-r--r--src/librustdoc/html/static/images/wheel.svg2
-rw-r--r--src/librustdoc/html/static/js/main.js125
-rw-r--r--src/librustdoc/html/static/js/search.js355
-rw-r--r--src/librustdoc/html/static/js/settings.js55
-rw-r--r--src/librustdoc/html/static/js/source-script.js3
-rw-r--r--src/librustdoc/html/static/js/storage.js113
-rw-r--r--src/librustdoc/html/static_files.rs3
-rw-r--r--src/librustdoc/html/templates/page.html3
-rw-r--r--src/librustdoc/html/templates/print_item.html2
-rw-r--r--src/librustdoc/json/conversions.rs99
-rw-r--r--src/librustdoc/json/import_finder.rs10
-rw-r--r--src/librustdoc/json/mod.rs34
-rw-r--r--src/librustdoc/lib.rs40
-rw-r--r--src/librustdoc/lint.rs6
-rw-r--r--src/librustdoc/markdown.rs9
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs8
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs38
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs313
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links/early.rs407
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs91
-rw-r--r--src/librustdoc/passes/lint/check_code_block_syntax.rs11
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs10
-rw-r--r--src/librustdoc/passes/mod.rs3
-rw-r--r--src/librustdoc/passes/propagate_doc_cfg.rs17
-rw-r--r--src/librustdoc/passes/strip_hidden.rs117
-rw-r--r--src/librustdoc/passes/stripper.rs32
-rw-r--r--src/librustdoc/scrape_examples.rs2
-rw-r--r--src/librustdoc/visit_ast.rs275
-rw-r--r--src/librustdoc/visit_lib.rs9
71 files changed, 2156 insertions, 2676 deletions
diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml
index 0271c27b4..c48f7998c 100644
--- a/src/librustdoc/Cargo.toml
+++ b/src/librustdoc/Cargo.toml
@@ -12,14 +12,12 @@ askama = { version = "0.11", default-features = false, features = ["config"] }
itertools = "0.10.1"
minifier = "0.2.2"
once_cell = "1.10.0"
-pulldown-cmark = { version = "0.9.2", default-features = false }
regex = "1"
rustdoc-json-types = { path = "../rustdoc-json-types" }
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
smallvec = "1.8.1"
tempfile = "3"
-thin-vec = "0.2.9"
tracing = "0.1"
tracing-tree = "0.2.0"
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
index a302750aa..9479b3ee0 100644
--- a/src/librustdoc/clean/auto_trait.rs
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -137,7 +137,7 @@ where
pub(crate) fn get_auto_trait_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
let tcx = self.cx.tcx;
let param_env = tcx.param_env(item_def_id);
- let ty = tcx.type_of(item_def_id);
+ let ty = tcx.type_of(item_def_id).subst_identity();
let f = auto_trait::AutoTraitFinder::new(tcx);
debug!("get_auto_trait_impls({:?})", ty);
@@ -734,8 +734,8 @@ struct RegionReplacer<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for RegionReplacer<'a, 'tcx> {
+ fn interner(&self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
index e6b2b2349..bcdbbcacc 100644
--- a/src/librustdoc/clean/blanket_impl.rs
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -15,7 +15,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
pub(crate) fn get_blanket_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
let cx = &mut self.cx;
let param_env = cx.tcx.param_env(item_def_id);
- let ty = cx.tcx.bound_type_of(item_def_id);
+ let ty = cx.tcx.type_of(item_def_id);
trace!("get_blanket_impls({:?})", ty);
let mut impls = Vec::new();
diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs
index f1853f369..dd58a5b51 100644
--- a/src/librustdoc/clean/cfg.rs
+++ b/src/librustdoc/clean/cfg.rs
@@ -164,10 +164,10 @@ impl Cfg {
/// Renders the configuration for human display, as a short HTML description.
pub(crate) fn render_short_html(&self) -> String {
let mut msg = Display(self, Format::ShortHtml).to_string();
- if self.should_capitalize_first_letter() {
- if let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) {
- msg[i..i + 1].make_ascii_uppercase();
- }
+ if self.should_capitalize_first_letter() &&
+ let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric())
+ {
+ msg[i..i + 1].make_ascii_uppercase();
}
msg
}
diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs
index 81f676724..bb62660e1 100644
--- a/src/librustdoc/clean/cfg/tests.rs
+++ b/src/librustdoc/clean/cfg/tests.rs
@@ -4,6 +4,7 @@ use rustc_ast::{LitKind, MetaItemLit, Path, StrStyle};
use rustc_span::create_default_session_globals_then;
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::DUMMY_SP;
+use thin_vec::thin_vec;
fn word_cfg(s: &str) -> Cfg {
Cfg::Cfg(Symbol::intern(s), None)
@@ -34,7 +35,7 @@ macro_rules! dummy_meta_item_list {
($name:ident, [$($list:ident),* $(,)?]) => {
MetaItem {
path: Path::from_ident(Ident::from_str(stringify!($name))),
- kind: MetaItemKind::List(vec![
+ kind: MetaItemKind::List(thin_vec![
$(
NestedMetaItem::MetaItem(
dummy_meta_item_word(stringify!($list)),
@@ -48,7 +49,7 @@ macro_rules! dummy_meta_item_list {
($name:ident, [$($list:expr),* $(,)?]) => {
MetaItem {
path: Path::from_ident(Ident::from_str(stringify!($name))),
- kind: MetaItemKind::List(vec![
+ kind: MetaItemKind::List(thin_vec![
$(
NestedMetaItem::MetaItem($list),
)*
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index b3b093312..148243683 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -9,7 +9,7 @@ use rustc_ast as ast;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::def_id::{DefId, DefIdSet, LocalDefId};
use rustc_hir::Mutability;
use rustc_metadata::creader::{CStore, LoadedMacro};
use rustc_middle::ty::{self, TyCtxt};
@@ -45,7 +45,7 @@ pub(crate) fn try_inline(
res: Res,
name: Symbol,
attrs: Option<&[ast::Attribute]>,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
) -> Option<Vec<clean::Item>> {
let did = res.opt_def_id()?;
if did.is_local() {
@@ -163,7 +163,7 @@ pub(crate) fn try_inline_glob(
cx: &mut DocContext<'_>,
res: Res,
current_mod: LocalDefId,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
) -> Option<Vec<clean::Item>> {
let did = res.opt_def_id()?;
@@ -251,7 +251,7 @@ pub(crate) fn build_external_trait(cx: &mut DocContext<'_>, did: DefId) -> clean
}
fn build_external_function<'tcx>(cx: &mut DocContext<'tcx>, did: DefId) -> Box<clean::Function> {
- let sig = cx.tcx.fn_sig(did);
+ let sig = cx.tcx.fn_sig(did).subst_identity();
let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var {
ty::BoundVariableKind::Region(ty::BrNamed(_, name)) if name != kw::UnderscoreLifetime => {
@@ -303,7 +303,8 @@ fn build_union(cx: &mut DocContext<'_>, did: DefId) -> clean::Union {
fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box<clean::Typedef> {
let predicates = cx.tcx.explicit_predicates_of(did);
- let type_ = clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did));
+ let type_ =
+ clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()), cx, Some(did));
Box::new(clean::Typedef {
type_,
@@ -390,18 +391,17 @@ pub(crate) fn build_impl(
// Only inline impl if the implemented trait is
// reachable in rustdoc generated documentation
- if !did.is_local() {
- if let Some(traitref) = associated_trait {
- let did = traitref.def_id;
- if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
- return;
- }
+ if !did.is_local() && let Some(traitref) = associated_trait {
+ let did = traitref.def_id;
+ if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
+ return;
+ }
- if let Some(stab) = tcx.lookup_stability(did) {
- if stab.is_unstable() && stab.feature == sym::rustc_private {
- return;
- }
- }
+ if let Some(stab) = tcx.lookup_stability(did) &&
+ stab.is_unstable() &&
+ stab.feature == sym::rustc_private
+ {
+ return;
}
}
@@ -415,7 +415,9 @@ pub(crate) fn build_impl(
let for_ = match &impl_item {
Some(impl_) => clean_ty(impl_.self_ty, cx),
- None => clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did)),
+ None => {
+ clean_middle_ty(ty::Binder::dummy(tcx.type_of(did).subst_identity()), cx, Some(did))
+ }
};
// Only inline impl if the implementing type is
@@ -525,10 +527,8 @@ pub(crate) fn build_impl(
}
while let Some(ty) = stack.pop() {
- if let Some(did) = ty.def_id(&cx.cache) {
- if tcx.is_doc_hidden(did) {
- return;
- }
+ if let Some(did) = ty.def_id(&cx.cache) && tcx.is_doc_hidden(did) {
+ return;
}
if let Some(generics) = ty.generics() {
stack.extend(generics);
@@ -568,11 +568,7 @@ pub(crate) fn build_impl(
));
}
-fn build_module(
- cx: &mut DocContext<'_>,
- did: DefId,
- visited: &mut FxHashSet<DefId>,
-) -> clean::Module {
+fn build_module(cx: &mut DocContext<'_>, did: DefId, visited: &mut DefIdSet) -> clean::Module {
let items = build_module_items(cx, did, visited, &mut FxHashSet::default(), None);
let span = clean::Span::new(cx.tcx.def_span(did));
@@ -582,9 +578,9 @@ fn build_module(
fn build_module_items(
cx: &mut DocContext<'_>,
did: DefId,
- visited: &mut FxHashSet<DefId>,
+ visited: &mut DefIdSet,
inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
- allowed_def_ids: Option<&FxHashSet<DefId>>,
+ allowed_def_ids: Option<&DefIdSet>,
) -> Vec<clean::Item> {
let mut items = Vec::new();
@@ -659,14 +655,22 @@ pub(crate) fn print_inlined_const(tcx: TyCtxt<'_>, did: DefId) -> String {
fn build_const(cx: &mut DocContext<'_>, def_id: DefId) -> clean::Constant {
clean::Constant {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()),
+ cx,
+ Some(def_id),
+ ),
kind: clean::ConstantKind::Extern { def_id },
}
}
fn build_static(cx: &mut DocContext<'_>, did: DefId, mutable: bool) -> clean::Static {
clean::Static {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()),
+ cx,
+ Some(did),
+ ),
mutability: if mutable { Mutability::Mut } else { Mutability::Not },
expr: None,
}
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 0f0e16265..3edc2cd2e 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -11,18 +11,20 @@ pub(crate) mod types;
pub(crate) mod utils;
use rustc_ast as ast;
+use rustc_ast::token::{Token, TokenKind};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_attr as attr;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId, LOCAL_CRATE};
use rustc_hir::PredicateOrigin;
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData};
-use rustc_middle::middle::resolve_lifetime as rl;
+use rustc_middle::middle::resolve_bound_vars as rbv;
use rustc_middle::ty::fold::TypeFolder;
use rustc_middle::ty::InternalSubsts;
-use rustc_middle::ty::TypeVisitable;
+use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{self, AdtKind, DefIdTree, EarlyBinder, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{AstPass, MacroKind};
@@ -37,6 +39,7 @@ use std::hash::Hash;
use std::mem;
use thin_vec::ThinVec;
+use crate::clean::inline::merge_attrs;
use crate::core::{self, DocContext, ImplTraitParam};
use crate::formats::item_type::ItemType;
use crate::visit_ast::Module as DocModule;
@@ -75,7 +78,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
// This covers the case where somebody does an import which should pull in an item,
// but there's already an item with the same namespace and same name. Rust gives
// priority to the not-imported one, so we should, too.
- items.extend(doc.items.iter().flat_map(|(item, renamed, import_id)| {
+ items.extend(doc.items.values().flat_map(|(item, renamed, import_id)| {
// First, lower everything other than imports.
if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
return Vec::new();
@@ -88,7 +91,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
}
v
}));
- items.extend(doc.items.iter().flat_map(|(item, renamed, _)| {
+ items.extend(doc.items.values().flat_map(|(item, renamed, _)| {
// Now we actually lower the imports, skipping everything else.
if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind {
let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id()));
@@ -116,7 +119,8 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
}
});
- Item::from_hir_id_and_parts(doc.id, Some(doc.name), ModuleItem(Module { items, span }), cx)
+ let kind = ModuleItem(Module { items, span });
+ Item::from_def_id_and_parts(doc.def_id.to_def_id(), Some(doc.name), kind, cx)
}
fn clean_generic_bound<'tcx>(
@@ -197,11 +201,11 @@ fn clean_poly_trait_ref_with_bindings<'tcx>(
}
fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime {
- let def = cx.tcx.named_region(lifetime.hir_id);
+ let def = cx.tcx.named_bound_var(lifetime.hir_id);
if let Some(
- rl::Region::EarlyBound(node_id)
- | rl::Region::LateBound(_, _, node_id)
- | rl::Region::Free(_, node_id),
+ rbv::ResolvedArg::EarlyBound(node_id)
+ | rbv::ResolvedArg::LateBound(_, _, node_id)
+ | rbv::ResolvedArg::Free(_, node_id),
) = def
{
if let Some(lt) = cx.substs.get(&node_id).and_then(|p| p.as_lt()).cloned() {
@@ -214,7 +218,11 @@ fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) ->
pub(crate) fn clean_const<'tcx>(constant: &hir::ConstArg, cx: &mut DocContext<'tcx>) -> Constant {
let def_id = cx.tcx.hir().body_owner_def_id(constant.value.body).to_def_id();
Constant {
- type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)),
+ type_: clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()),
+ cx,
+ Some(def_id),
+ ),
kind: ConstantKind::Anonymous { body: constant.value.body },
}
}
@@ -241,6 +249,7 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Life
ty::ReLateBound(..)
| ty::ReFree(..)
| ty::ReVar(..)
+ | ty::ReError(_)
| ty::RePlaceholder(..)
| ty::ReErased => {
debug!("cannot clean region {:?}", region);
@@ -309,10 +318,13 @@ pub(crate) fn clean_predicate<'tcx>(
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
Some(clean_projection_predicate(bound_predicate.rebind(pred), cx))
}
+ // FIXME(generic_const_exprs): should this do something?
ty::PredicateKind::ConstEvaluatable(..) => None,
ty::PredicateKind::WellFormed(..) => None,
+ ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) => None,
ty::PredicateKind::Subtype(..)
+ | ty::PredicateKind::AliasEq(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
@@ -382,13 +394,10 @@ fn clean_middle_term<'tcx>(
fn clean_hir_term<'tcx>(term: &hir::Term<'tcx>, cx: &mut DocContext<'tcx>) -> Term {
match term {
hir::Term::Ty(ty) => Term::Type(clean_ty(ty, cx)),
- hir::Term::Const(c) => {
- let def_id = cx.tcx.hir().local_def_id(c.hir_id);
- Term::Constant(clean_middle_const(
- ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, def_id)),
- cx,
- ))
- }
+ hir::Term::Const(c) => Term::Constant(clean_middle_const(
+ ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, c.def_id)),
+ cx,
+ )),
}
}
@@ -479,7 +488,7 @@ fn clean_generic_param_def<'tcx>(
ty::GenericParamDefKind::Type { has_default, synthetic, .. } => {
let default = if has_default {
Some(clean_middle_ty(
- ty::Binder::dummy(cx.tcx.type_of(def.def_id)),
+ ty::Binder::dummy(cx.tcx.type_of(def.def_id).subst_identity()),
cx,
Some(def.def_id),
))
@@ -501,7 +510,12 @@ fn clean_generic_param_def<'tcx>(
GenericParamDefKind::Const {
did: def.def_id,
ty: Box::new(clean_middle_ty(
- ty::Binder::dummy(cx.tcx.type_of(def.def_id)),
+ ty::Binder::dummy(
+ cx.tcx
+ .type_of(def.def_id)
+ .no_bound_vars()
+ .expect("const parameter types cannot be generic"),
+ ),
cx,
Some(def.def_id),
)),
@@ -523,12 +537,11 @@ fn clean_generic_param<'tcx>(
generics: Option<&hir::Generics<'tcx>>,
param: &hir::GenericParam<'tcx>,
) -> GenericParamDef {
- let did = cx.tcx.hir().local_def_id(param.hir_id);
let (name, kind) = match param.kind {
hir::GenericParamKind::Lifetime { .. } => {
let outlives = if let Some(generics) = generics {
generics
- .outlives_for_param(did)
+ .outlives_for_param(param.def_id)
.filter(|bp| !bp.in_where_clause)
.flat_map(|bp| bp.bounds)
.map(|bound| match bound {
@@ -544,7 +557,7 @@ fn clean_generic_param<'tcx>(
hir::GenericParamKind::Type { ref default, synthetic } => {
let bounds = if let Some(generics) = generics {
generics
- .bounds_for_param(did)
+ .bounds_for_param(param.def_id)
.filter(|bp| bp.origin != PredicateOrigin::WhereClause)
.flat_map(|bp| bp.bounds)
.filter_map(|x| clean_generic_bound(x, cx))
@@ -555,7 +568,7 @@ fn clean_generic_param<'tcx>(
(
param.name.ident().name,
GenericParamDefKind::Type {
- did: did.to_def_id(),
+ did: param.def_id.to_def_id(),
bounds,
default: default.map(|t| clean_ty(t, cx)).map(Box::new),
synthetic,
@@ -565,12 +578,10 @@ fn clean_generic_param<'tcx>(
hir::GenericParamKind::Const { ty, default } => (
param.name.ident().name,
GenericParamDefKind::Const {
- did: did.to_def_id(),
+ did: param.def_id.to_def_id(),
ty: Box::new(clean_ty(ty, cx)),
- default: default.map(|ct| {
- let def_id = cx.tcx.hir().local_def_id(ct.hir_id);
- Box::new(ty::Const::from_anon_const(cx.tcx, def_id).to_string())
- }),
+ default: default
+ .map(|ct| Box::new(ty::Const::from_anon_const(cx.tcx, ct.def_id).to_string())),
},
),
};
@@ -789,43 +800,43 @@ fn clean_ty_generics<'tcx>(
None
})();
- if let Some(param_idx) = param_idx {
- if let Some(b) = impl_trait.get_mut(&param_idx.into()) {
- let p: WherePredicate = clean_predicate(*p, cx)?;
+ if let Some(param_idx) = param_idx
+ && let Some(b) = impl_trait.get_mut(&param_idx.into())
+ {
+ let p: WherePredicate = clean_predicate(*p, cx)?;
+
+ b.extend(
+ p.get_bounds()
+ .into_iter()
+ .flatten()
+ .cloned()
+ .filter(|b| !b.is_sized_bound(cx)),
+ );
- b.extend(
- p.get_bounds()
+ let proj = projection.map(|p| {
+ (
+ clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
+ p.map_bound(|p| p.term),
+ )
+ });
+ if let Some(((_, trait_did, name), rhs)) = proj
+ .as_ref()
+ .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
+ {
+ // FIXME(...): Remove this unwrap()
+ impl_trait_proj.entry(param_idx).or_default().push((
+ trait_did,
+ name,
+ rhs.map_bound(|rhs| rhs.ty().unwrap()),
+ p.get_bound_params()
.into_iter()
.flatten()
- .cloned()
- .filter(|b| !b.is_sized_bound(cx)),
- );
-
- let proj = projection.map(|p| {
- (
- clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
- p.map_bound(|p| p.term),
- )
- });
- if let Some(((_, trait_did, name), rhs)) = proj
- .as_ref()
- .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
- {
- // FIXME(...): Remove this unwrap()
- impl_trait_proj.entry(param_idx).or_default().push((
- trait_did,
- name,
- rhs.map_bound(|rhs| rhs.ty().unwrap()),
- p.get_bound_params()
- .into_iter()
- .flatten()
- .map(|param| GenericParamDef::lifetime(param.0))
- .collect(),
- ));
- }
-
- return None;
+ .map(|param| GenericParamDef::lifetime(param.0))
+ .collect(),
+ ));
}
+
+ return None;
}
Some(p)
@@ -888,7 +899,7 @@ fn clean_ty_generics<'tcx>(
// `?Sized` bound for each one we didn't find to be `Sized`.
for tp in &stripped_params {
if let types::GenericParamDefKind::Type { .. } = tp.kind
- && !sized_params.contains(&tp.name)
+ && !sized_params.contains(&tp.name)
{
where_predicates.push(WherePredicate::BoundPredicate {
ty: Type::Generic(tp.name),
@@ -1214,7 +1225,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
let kind = match assoc_item.kind {
ty::AssocKind::Const => {
let ty = clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
);
@@ -1230,7 +1241,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
}
}
ty::AssocKind::Fn => {
- let sig = tcx.fn_sig(assoc_item.def_id);
+ let sig = tcx.fn_sig(assoc_item.def_id).subst_identity();
let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var {
ty::BoundVariableKind::Region(ty::BrNamed(_, name))
@@ -1253,7 +1264,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
if assoc_item.fn_has_self_parameter {
let self_ty = match assoc_item.container {
- ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)),
+ ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)).subst_identity(),
ty::TraitContainer => tcx.types.self_param,
};
let self_arg_ty = sig.input(0).skip_binder();
@@ -1400,7 +1411,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
AssocTypeItem(
Box::new(Typedef {
type_: clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
),
@@ -1418,7 +1429,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>(
AssocTypeItem(
Box::new(Typedef {
type_: clean_middle_ty(
- ty::Binder::dummy(tcx.type_of(assoc_item.def_id)),
+ ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()),
cx,
Some(assoc_item.def_id),
),
@@ -1463,10 +1474,10 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
// Try to normalize `<X as Y>::T` to a type
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
// `hir_to_ty` can return projection types with escaping vars for GATs, e.g. `<() as Trait>::Gat<'_>`
- if !ty.has_escaping_bound_vars() {
- if let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty)) {
- return clean_middle_ty(normalized_value, cx, None);
- }
+ if !ty.has_escaping_bound_vars()
+ && let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty))
+ {
+ return clean_middle_ty(normalized_value, cx, None);
}
let trait_segments = &p.segments[..p.segments.len() - 1];
@@ -1528,7 +1539,7 @@ fn maybe_expand_private_type_alias<'tcx>(
let hir::ItemKind::TyAlias(ty, generics) = alias else { return None };
let provided_params = &path.segments.last().expect("segments were empty");
- let mut substs = FxHashMap::default();
+ let mut substs = DefIdMap::default();
let generic_args = provided_params.args();
let mut indices: hir::GenericParamCount = Default::default();
@@ -1547,18 +1558,16 @@ fn maybe_expand_private_type_alias<'tcx>(
_ => None,
});
if let Some(lt) = lifetime {
- let lt_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let cleaned = if !lt.is_anonymous() {
clean_lifetime(lt, cx)
} else {
Lifetime::elided()
};
- substs.insert(lt_def_id.to_def_id(), SubstParam::Lifetime(cleaned));
+ substs.insert(param.def_id.to_def_id(), SubstParam::Lifetime(cleaned));
}
indices.lifetimes += 1;
}
hir::GenericParamKind::Type { ref default, .. } => {
- let ty_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let type_ = generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Type(ty) => {
@@ -1571,17 +1580,14 @@ fn maybe_expand_private_type_alias<'tcx>(
_ => None,
});
if let Some(ty) = type_ {
- substs.insert(ty_param_def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx)));
+ substs.insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx)));
} else if let Some(default) = *default {
- substs.insert(
- ty_param_def_id.to_def_id(),
- SubstParam::Type(clean_ty(default, cx)),
- );
+ substs
+ .insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(default, cx)));
}
indices.types += 1;
}
hir::GenericParamKind::Const { .. } => {
- let const_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let const_ = generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Const(ct) => {
@@ -1595,7 +1601,7 @@ fn maybe_expand_private_type_alias<'tcx>(
});
if let Some(ct) = const_ {
substs.insert(
- const_param_def_id.to_def_id(),
+ param.def_id.to_def_id(),
SubstParam::Constant(clean_const(ct, cx)),
);
}
@@ -1623,7 +1629,6 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
let length = match length {
hir::ArrayLen::Infer(_, _) => "_".to_string(),
hir::ArrayLen::Body(anon_const) => {
- let def_id = cx.tcx.hir().local_def_id(anon_const.hir_id);
// NOTE(min_const_generics): We can't use `const_eval_poly` for constants
// as we currently do not supply the parent generics to anonymous constants
// but do allow `ConstKind::Param`.
@@ -1631,8 +1636,8 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
// `const_eval_poly` tries to first substitute generic parameters which
// results in an ICE while manually constructing the constant and using `eval`
// does nothing for `ConstKind::Param`.
- let ct = ty::Const::from_anon_const(cx.tcx, def_id);
- let param_env = cx.tcx.param_env(def_id);
+ let ct = ty::Const::from_anon_const(cx.tcx, anon_const.def_id);
+ let param_env = cx.tcx.param_env(anon_const.def_id);
print_const(cx, ct.eval(cx.tcx, param_env))
}
};
@@ -1657,7 +1662,7 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
}
TyKind::BareFn(barefn) => BareFunction(Box::new(clean_bare_fn_ty(barefn, cx))),
// Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
- TyKind::Infer | TyKind::Err | TyKind::Typeof(..) => Infer,
+ TyKind::Infer | TyKind::Err(_) | TyKind::Typeof(..) => Infer,
}
}
@@ -1854,6 +1859,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
ty::Bound(..) => panic!("Bound"),
ty::Placeholder(..) => panic!("Placeholder"),
ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
+ ty::GeneratorWitnessMIR(..) => panic!("GeneratorWitnessMIR"),
ty::Infer(..) => panic!("Infer"),
ty::Error(_) => rustc_errors::FatalError.raise(),
}
@@ -1885,11 +1891,9 @@ fn clean_middle_opaque_bounds<'tcx>(
_ => return None,
};
- if let Some(sized) = cx.tcx.lang_items().sized_trait() {
- if trait_ref.def_id() == sized {
- has_sized = true;
- return None;
- }
+ if let Some(sized) = cx.tcx.lang_items().sized_trait() && trait_ref.def_id() == sized {
+ has_sized = true;
+ return None;
}
let bindings: ThinVec<_> = bounds
@@ -1928,15 +1932,18 @@ fn clean_middle_opaque_bounds<'tcx>(
}
pub(crate) fn clean_field<'tcx>(field: &hir::FieldDef<'tcx>, cx: &mut DocContext<'tcx>) -> Item {
- let def_id = cx.tcx.hir().local_def_id(field.hir_id).to_def_id();
- clean_field_with_def_id(def_id, field.ident.name, clean_ty(field.ty, cx), cx)
+ clean_field_with_def_id(field.def_id.to_def_id(), field.ident.name, clean_ty(field.ty, cx), cx)
}
pub(crate) fn clean_middle_field<'tcx>(field: &ty::FieldDef, cx: &mut DocContext<'tcx>) -> Item {
clean_field_with_def_id(
field.did,
field.name,
- clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(field.did)), cx, Some(field.did)),
+ clean_middle_ty(
+ ty::Binder::dummy(cx.tcx.type_of(field.did).subst_identity()),
+ cx,
+ Some(field.did),
+ ),
cx,
)
}
@@ -1979,10 +1986,8 @@ fn clean_variant_data<'tcx>(
disr_expr: &Option<hir::AnonConst>,
cx: &mut DocContext<'tcx>,
) -> Variant {
- let discriminant = disr_expr.map(|disr| Discriminant {
- expr: Some(disr.body),
- value: cx.tcx.hir().local_def_id(disr.hir_id).to_def_id(),
- });
+ let discriminant = disr_expr
+ .map(|disr| Discriminant { expr: Some(disr.body), value: disr.def_id.to_def_id() });
let kind = match variant {
hir::VariantData::Struct(..) => VariantKind::Struct(VariantStruct {
@@ -2067,12 +2072,12 @@ struct OneLevelVisitor<'hir> {
map: rustc_middle::hir::map::Map<'hir>,
item: Option<&'hir hir::Item<'hir>>,
looking_for: Ident,
- target_hir_id: hir::HirId,
+ target_def_id: LocalDefId,
}
impl<'hir> OneLevelVisitor<'hir> {
- fn new(map: rustc_middle::hir::map::Map<'hir>, target_hir_id: hir::HirId) -> Self {
- Self { map, item: None, looking_for: Ident::empty(), target_hir_id }
+ fn new(map: rustc_middle::hir::map::Map<'hir>, target_def_id: LocalDefId) -> Self {
+ Self { map, item: None, looking_for: Ident::empty(), target_def_id }
}
fn reset(&mut self, looking_for: Ident) {
@@ -2091,8 +2096,8 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
fn visit_item(&mut self, item: &'hir hir::Item<'hir>) {
if self.item.is_none()
&& item.ident == self.looking_for
- && matches!(item.kind, hir::ItemKind::Use(_, _))
- || item.hir_id() == self.target_hir_id
+ && (matches!(item.kind, hir::ItemKind::Use(_, _))
+ || item.owner_id.def_id == self.target_def_id)
{
self.item = Some(item);
}
@@ -2106,41 +2111,168 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
fn get_all_import_attributes<'hir>(
mut item: &hir::Item<'hir>,
tcx: TyCtxt<'hir>,
- target_hir_id: hir::HirId,
+ target_def_id: LocalDefId,
attributes: &mut Vec<ast::Attribute>,
+ is_inline: bool,
) {
+ let mut first = true;
let hir_map = tcx.hir();
- let mut visitor = OneLevelVisitor::new(hir_map, target_hir_id);
+ let mut visitor = OneLevelVisitor::new(hir_map, target_def_id);
let mut visited = FxHashSet::default();
+
// If the item is an import and has at least a path with two parts, we go into it.
- while let hir::ItemKind::Use(path, _) = item.kind &&
- path.segments.len() > 1 &&
- let hir::def::Res::Def(_, def_id) = path.segments[path.segments.len() - 2].res &&
- visited.insert(def_id)
- {
- if let Some(hir::Node::Item(parent_item)) = hir_map.get_if_local(def_id) {
- // We add the attributes from this import into the list.
+ while let hir::ItemKind::Use(path, _) = item.kind && visited.insert(item.hir_id()) {
+ if first {
+ // This is the "original" reexport so we get all its attributes without filtering them.
attributes.extend_from_slice(hir_map.attrs(item.hir_id()));
- // We get the `Ident` we will be looking for into `item`.
- let looking_for = path.segments[path.segments.len() - 1].ident;
- visitor.reset(looking_for);
- hir::intravisit::walk_item(&mut visitor, parent_item);
- if let Some(i) = visitor.item {
- item = i;
- } else {
- break;
+ first = false;
+ } else {
+ add_without_unwanted_attributes(attributes, hir_map.attrs(item.hir_id()), is_inline);
+ }
+
+ let def_id = if let [.., parent_segment, _] = &path.segments {
+ match parent_segment.res {
+ hir::def::Res::Def(_, def_id) => def_id,
+ _ if parent_segment.ident.name == kw::Crate => {
+ // In case the "parent" is the crate, it'll give `Res::Err` so we need to
+ // circumvent it this way.
+ tcx.parent(item.owner_id.def_id.to_def_id())
+ }
+ _ => break,
}
} else {
+ // If the path doesn't have a parent, then the parent is the current module.
+ tcx.parent(item.owner_id.def_id.to_def_id())
+ };
+
+ let Some(parent) = hir_map.get_if_local(def_id) else { break };
+
+ // We get the `Ident` we will be looking for into `item`.
+ let looking_for = path.segments[path.segments.len() - 1].ident;
+ visitor.reset(looking_for);
+
+ match parent {
+ hir::Node::Item(parent_item) => {
+ hir::intravisit::walk_item(&mut visitor, parent_item);
+ }
+ hir::Node::Crate(m) => {
+ hir::intravisit::walk_mod(
+ &mut visitor,
+ m,
+ tcx.local_def_id_to_hir_id(def_id.as_local().unwrap()),
+ );
+ }
+ _ => break,
+ }
+ if let Some(i) = visitor.item {
+ item = i;
+ } else {
break;
}
}
}
+fn filter_tokens_from_list(
+ args_tokens: TokenStream,
+ should_retain: impl Fn(&TokenTree) -> bool,
+) -> Vec<TokenTree> {
+ let mut tokens = Vec::with_capacity(args_tokens.len());
+ let mut skip_next_comma = false;
+ for token in args_tokens.into_trees() {
+ match token {
+ TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => {
+ skip_next_comma = false;
+ }
+ token if should_retain(&token) => {
+ skip_next_comma = false;
+ tokens.push(token);
+ }
+ _ => {
+ skip_next_comma = true;
+ }
+ }
+ }
+ tokens
+}
+
+/// When inlining items, we merge its attributes (and all the reexports attributes too) with the
+/// final reexport. For example:
+///
+/// ```ignore (just an example)
+/// #[doc(hidden, cfg(feature = "foo"))]
+/// pub struct Foo;
+///
+/// #[doc(cfg(feature = "bar"))]
+/// #[doc(hidden, no_inline)]
+/// pub use Foo as Foo1;
+///
+/// #[doc(inline)]
+/// pub use Foo2 as Bar;
+/// ```
+///
+/// So `Bar` at the end will have both `cfg(feature = "...")`. However, we don't want to merge all
+/// attributes so we filter out the following ones:
+/// * `doc(inline)`
+/// * `doc(no_inline)`
+/// * `doc(hidden)`
+fn add_without_unwanted_attributes(
+ attrs: &mut Vec<ast::Attribute>,
+ new_attrs: &[ast::Attribute],
+ is_inline: bool,
+) {
+ // If it's `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything.
+ if !is_inline {
+ attrs.extend_from_slice(new_attrs);
+ return;
+ }
+ for attr in new_attrs {
+ let mut attr = attr.clone();
+ match attr.kind {
+ ast::AttrKind::Normal(ref mut normal) => {
+ if let [ident] = &*normal.item.path.segments &&
+ let ident = ident.ident.name &&
+ ident == sym::doc
+ {
+ match normal.item.args {
+ ast::AttrArgs::Delimited(ref mut args) => {
+ let tokens =
+ filter_tokens_from_list(args.tokens.clone(), |token| {
+ !matches!(
+ token,
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(
+ sym::hidden | sym::inline | sym::no_inline,
+ _,
+ ),
+ ..
+ },
+ _,
+ ),
+ )
+ });
+ args.tokens = TokenStream::new(tokens);
+ attrs.push(attr);
+ }
+ ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => {
+ attrs.push(attr);
+ continue;
+ }
+ }
+ }
+ }
+ ast::AttrKind::DocComment(..) => {
+ attrs.push(attr);
+ }
+ }
+ }
+}
+
fn clean_maybe_renamed_item<'tcx>(
cx: &mut DocContext<'tcx>,
item: &hir::Item<'tcx>,
renamed: Option<Symbol>,
- import_id: Option<hir::HirId>,
+ import_id: Option<LocalDefId>,
) -> Vec<Item> {
use hir::ItemKind;
@@ -2185,7 +2317,7 @@ fn clean_maybe_renamed_item<'tcx>(
generics: clean_generics(generics, cx),
fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
}),
- ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx),
+ ItemKind::Impl(impl_) => return clean_impl(impl_, item.owner_id.def_id, cx),
// proc macros can have a name set by attributes
ItemKind::Fn(ref sig, generics, body_id) => {
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
@@ -2218,40 +2350,38 @@ fn clean_maybe_renamed_item<'tcx>(
_ => unreachable!("not yet converted"),
};
- let mut extra_attrs = Vec::new();
- if let Some(hir::Node::Item(use_node)) =
- import_id.and_then(|hir_id| cx.tcx.hir().find(hir_id))
+ let mut import_attrs = Vec::new();
+ let mut target_attrs = Vec::new();
+ if let Some(import_id) = import_id &&
+ let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id)
{
- // We get all the various imports' attributes.
- get_all_import_attributes(use_node, cx.tcx, item.hir_id(), &mut extra_attrs);
- }
-
- if !extra_attrs.is_empty() {
- extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
- let attrs = Attributes::from_ast(&extra_attrs);
- let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg);
-
- vec![Item::from_def_id_and_attrs_and_parts(
- def_id,
- Some(name),
- kind,
- Box::new(attrs),
- cfg,
- )]
+ let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some();
+ // Then we get all the various imports' attributes.
+ get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut import_attrs, is_inline);
+ add_without_unwanted_attributes(&mut target_attrs, inline::load_attrs(cx, def_id), is_inline);
} else {
- vec![Item::from_def_id_and_parts(def_id, Some(name), kind, cx)]
+ // We only keep the item's attributes.
+ target_attrs.extend_from_slice(inline::load_attrs(cx, def_id));
}
+
+ let import_parent = import_id.map(|import_id| cx.tcx.local_parent(import_id).to_def_id());
+ let (attrs, cfg) = merge_attrs(cx, import_parent, &target_attrs, Some(&import_attrs));
+
+ let mut item =
+ Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg);
+ item.inline_stmt_id = import_id.map(|def_id| def_id.to_def_id());
+ vec![item]
})
}
fn clean_variant<'tcx>(variant: &hir::Variant<'tcx>, cx: &mut DocContext<'tcx>) -> Item {
let kind = VariantItem(clean_variant_data(&variant.data, &variant.disr_expr, cx));
- Item::from_hir_id_and_parts(variant.hir_id, Some(variant.ident.name), kind, cx)
+ Item::from_def_id_and_parts(variant.def_id.to_def_id(), Some(variant.ident.name), kind, cx)
}
fn clean_impl<'tcx>(
impl_: &hir::Impl<'tcx>,
- hir_id: hir::HirId,
+ def_id: LocalDefId,
cx: &mut DocContext<'tcx>,
) -> Vec<Item> {
let tcx = cx.tcx;
@@ -2262,7 +2392,6 @@ fn clean_impl<'tcx>(
.iter()
.map(|ii| clean_impl_item(tcx.hir().impl_item(ii.id), cx))
.collect::<Vec<_>>();
- let def_id = tcx.hir().local_def_id(hir_id);
// If this impl block is an implementation of the Deref trait, then we
// need to try inlining the target's inherent impl blocks as well.
@@ -2272,9 +2401,11 @@ fn clean_impl<'tcx>(
let for_ = clean_ty(impl_.self_ty, cx);
let type_alias = for_.def_id(&cx.cache).and_then(|did| match tcx.def_kind(did) {
- DefKind::TyAlias => {
- Some(clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did)))
- }
+ DefKind::TyAlias => Some(clean_middle_ty(
+ ty::Binder::dummy(tcx.type_of(did).subst_identity()),
+ cx,
+ Some(did),
+ )),
_ => None,
});
let mut make_item = |trait_: Option<Path>, for_: Type, items: Vec<Item>| {
@@ -2291,7 +2422,7 @@ fn clean_impl<'tcx>(
ImplKind::Normal
},
}));
- Item::from_hir_id_and_parts(hir_id, None, kind, cx)
+ Item::from_def_id_and_parts(def_id.to_def_id(), None, kind, cx)
};
if let Some(type_alias) = type_alias {
ret.push(make_item(trait_.clone(), type_alias, items.clone()));
@@ -2323,7 +2454,7 @@ fn clean_extern_crate<'tcx>(
let krate_owner_def_id = krate.owner_id.to_def_id();
if please_inline {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
let res = Res::Def(DefKind::Mod, crate_def_id);
@@ -2405,17 +2536,15 @@ fn clean_use_statement_inner<'tcx>(
let is_visible_from_parent_mod =
visibility.is_accessible_from(parent_mod, cx.tcx) && !current_mod.is_top_level_module();
- if pub_underscore {
- if let Some(ref inline) = inline_attr {
- rustc_errors::struct_span_err!(
- cx.tcx.sess,
- inline.span(),
- E0780,
- "anonymous imports cannot be inlined"
- )
- .span_label(import.span, "anonymous import")
- .emit();
- }
+ if pub_underscore && let Some(ref inline) = inline_attr {
+ rustc_errors::struct_span_err!(
+ cx.tcx.sess,
+ inline.span(),
+ E0780,
+ "anonymous imports cannot be inlined"
+ )
+ .span_label(import.span, "anonymous import")
+ .emit();
}
// We consider inlining the documentation of `pub use` statements, but we
@@ -2442,7 +2571,7 @@ fn clean_use_statement_inner<'tcx>(
let path = clean_path(path, cx);
let inner = if kind == hir::UseKind::Glob {
if !denied {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
if let Some(items) =
inline::try_inline_glob(cx, path.res, current_mod, &mut visited, inlined_names)
{
@@ -2451,17 +2580,16 @@ fn clean_use_statement_inner<'tcx>(
}
Import::new_glob(resolve_use_source(cx, path), true)
} else {
- if inline_attr.is_none() {
- if let Res::Def(DefKind::Mod, did) = path.res {
- if !did.is_local() && did.is_crate_root() {
- // if we're `pub use`ing an extern crate root, don't inline it unless we
- // were specifically asked for it
- denied = true;
- }
- }
+ if inline_attr.is_none()
+ && let Res::Def(DefKind::Mod, did) = path.res
+ && !did.is_local() && did.is_crate_root()
+ {
+ // if we're `pub use`ing an extern crate root, don't inline it unless we
+ // were specifically asked for it
+ denied = true;
}
if !denied {
- let mut visited = FxHashSet::default();
+ let mut visited = DefIdSet::default();
let import_def_id = import.owner_id.to_def_id();
if let Some(mut items) = inline::try_inline(
@@ -2512,8 +2640,8 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
hir::ForeignItemKind::Type => ForeignTypeItem,
};
- Item::from_hir_id_and_parts(
- item.hir_id(),
+ Item::from_def_id_and_parts(
+ item.owner_id.def_id.to_def_id(),
Some(renamed.unwrap_or(item.ident.name)),
kind,
cx,
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index ed7683e36..ef38ca3c1 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -63,7 +63,8 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String
let snippet = source_map.span_to_snippet(span).ok()?;
// Create a Parser.
- let sess = ParseSess::new(FilePathMapping::empty());
+ let sess =
+ ParseSess::new(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), FilePathMapping::empty());
let file_name = source_map.span_to_filename(span);
let mut parser =
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 87de41fde..27d18aad7 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -5,13 +5,12 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::OnceLock as OnceCell;
-use std::{cmp, fmt, iter};
+use std::{fmt, iter};
use arrayvec::ArrayVec;
use thin_vec::ThinVec;
-use rustc_ast::util::comments::beautify_doc_string;
-use rustc_ast::{self as ast, AttrStyle};
+use rustc_ast as ast;
use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel};
use rustc_const_eval::const_eval::is_unstable_const_fn;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -24,6 +23,7 @@ use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety;
use rustc_index::vec::IndexVec;
use rustc_middle::ty::fast_reject::SimplifiedType;
use rustc_middle::ty::{self, DefIdTree, TyCtxt, Visibility};
+use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment};
use rustc_session::Session;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@@ -182,10 +182,8 @@ impl ExternalCrate {
return Local;
}
- if extern_url_takes_precedence {
- if let Some(url) = extern_url {
- return to_remote(url);
- }
+ if extern_url_takes_precedence && let Some(url) = extern_url {
+ return to_remote(url);
}
// Failing that, see if there's an attribute specifying where to find this
@@ -405,7 +403,7 @@ impl Item {
pub(crate) fn inner_docs(&self, tcx: TyCtxt<'_>) -> bool {
self.item_id
.as_def_id()
- .map(|did| tcx.get_attrs_unchecked(did).inner_docs())
+ .map(|did| inner_docs(tcx.get_attrs_unchecked(did)))
.unwrap_or(false)
}
@@ -439,17 +437,6 @@ impl Item {
self.attrs.doc_value()
}
- /// Convenience wrapper around [`Self::from_def_id_and_parts`] which converts
- /// `hir_id` to a [`DefId`]
- pub(crate) fn from_hir_id_and_parts(
- hir_id: hir::HirId,
- name: Option<Symbol>,
- kind: ItemKind,
- cx: &mut DocContext<'_>,
- ) -> Item {
- Item::from_def_id_and_parts(cx.tcx.hir().local_def_id(hir_id).to_def_id(), name, kind, cx)
- }
-
pub(crate) fn from_def_id_and_parts(
def_id: DefId,
name: Option<Symbol>,
@@ -493,16 +480,16 @@ impl Item {
}
pub(crate) fn links(&self, cx: &Context<'_>) -> Vec<RenderedLink> {
- use crate::html::format::href;
+ use crate::html::format::{href, link_tooltip};
cx.cache()
.intra_doc_links
.get(&self.item_id)
.map_or(&[][..], |v| v.as_slice())
.iter()
- .filter_map(|ItemLink { link: s, link_text, page_id: did, ref fragment }| {
- debug!(?did);
- if let Ok((mut href, ..)) = href(*did, cx) {
+ .filter_map(|ItemLink { link: s, link_text, page_id: id, ref fragment }| {
+ debug!(?id);
+ if let Ok((mut href, ..)) = href(*id, cx) {
debug!(?href);
if let Some(ref fragment) = *fragment {
fragment.render(&mut href, cx.tcx())
@@ -510,6 +497,7 @@ impl Item {
Some(RenderedLink {
original_text: s.clone(),
new_text: link_text.clone(),
+ tooltip: link_tooltip(*id, fragment, cx),
href,
})
} else {
@@ -534,6 +522,7 @@ impl Item {
original_text: s.clone(),
new_text: link_text.clone(),
href: String::new(),
+ tooltip: String::new(),
})
.collect()
}
@@ -665,7 +654,7 @@ impl Item {
tcx: TyCtxt<'_>,
asyncness: hir::IsAsync,
) -> hir::FnHeader {
- let sig = tcx.fn_sig(def_id);
+ let sig = tcx.fn_sig(def_id).skip_binder();
let constness =
if tcx.is_const_fn(def_id) && is_unstable_const_fn(tcx, def_id).is_none() {
hir::Constness::Const
@@ -677,7 +666,7 @@ impl Item {
let header = match *self.kind {
ItemKind::ForeignFunctionItem(_) => {
let def_id = self.item_id.as_def_id().unwrap();
- let abi = tcx.fn_sig(def_id).abi();
+ let abi = tcx.fn_sig(def_id).skip_binder().abi();
hir::FnHeader {
unsafety: if abi == Abi::RustIntrinsic {
intrinsic_operation_unsafety(tcx, self.item_id.as_def_id().unwrap())
@@ -885,8 +874,6 @@ pub(crate) trait AttributesExt {
fn span(&self) -> Option<rustc_span::Span>;
- fn inner_docs(&self) -> bool;
-
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>>;
}
@@ -905,14 +892,6 @@ impl AttributesExt for [ast::Attribute] {
self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span)
}
- /// Returns whether the first doc-comment is an inner attribute.
- ///
- //// If there are no doc-comments, return true.
- /// FIXME(#78591): Support both inner and outer attributes on the same item.
- fn inner_docs(&self) -> bool {
- self.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == AttrStyle::Inner)
- }
-
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>> {
let sess = tcx.sess;
let doc_cfg_active = tcx.features().doc_cfg;
@@ -1021,58 +1000,6 @@ impl<I: Iterator<Item = ast::NestedMetaItem>> NestedAttributesExt for I {
}
}
-/// A portion of documentation, extracted from a `#[doc]` attribute.
-///
-/// Each variant contains the line number within the complete doc-comment where the fragment
-/// starts, as well as the Span where the corresponding doc comment or attribute is located.
-///
-/// Included files are kept separate from inline doc comments so that proper line-number
-/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are
-/// kept separate because of issue #42760.
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub(crate) struct DocFragment {
- pub(crate) span: rustc_span::Span,
- /// The module this doc-comment came from.
- ///
- /// This allows distinguishing between the original documentation and a pub re-export.
- /// If it is `None`, the item was not re-exported.
- pub(crate) parent_module: Option<DefId>,
- pub(crate) doc: Symbol,
- pub(crate) kind: DocFragmentKind,
- pub(crate) indent: usize,
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Debug)]
-pub(crate) enum DocFragmentKind {
- /// A doc fragment created from a `///` or `//!` doc comment.
- SugaredDoc,
- /// A doc fragment created from a "raw" `#[doc=""]` attribute.
- RawDoc,
-}
-
-/// The goal of this function is to apply the `DocFragment` transformation that is required when
-/// transforming into the final Markdown, which is applying the computed indent to each line in
-/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`).
-///
-/// Note: remove the trailing newline where appropriate
-fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
- let s = frag.doc.as_str();
- let mut iter = s.lines();
- if s.is_empty() {
- out.push('\n');
- return;
- }
- while let Some(line) = iter.next() {
- if line.chars().any(|c| !c.is_whitespace()) {
- assert!(line.len() >= frag.indent);
- out.push_str(&line[frag.indent..]);
- } else {
- out.push_str(line);
- }
- out.push('\n');
- }
-}
-
/// Collapse a collection of [`DocFragment`]s into one string,
/// handling indentation and newlines as needed.
pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
@@ -1084,98 +1011,18 @@ pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
acc
}
-/// Removes excess indentation on comments in order for the Markdown
-/// to be parsed correctly. This is necessary because the convention for
-/// writing documentation is to provide a space between the /// or //! marker
-/// and the doc text, but Markdown is whitespace-sensitive. For example,
-/// a block of text with four-space indentation is parsed as a code block,
-/// so if we didn't unindent comments, these list items
-///
-/// /// A list:
-/// ///
-/// /// - Foo
-/// /// - Bar
-///
-/// would be parsed as if they were in a code block, which is likely not what the user intended.
-fn unindent_doc_fragments(docs: &mut Vec<DocFragment>) {
- // `add` is used in case the most common sugared doc syntax is used ("/// "). The other
- // fragments kind's lines are never starting with a whitespace unless they are using some
- // markdown formatting requiring it. Therefore, if the doc block have a mix between the two,
- // we need to take into account the fact that the minimum indent minus one (to take this
- // whitespace into account).
- //
- // For example:
- //
- // /// hello!
- // #[doc = "another"]
- //
- // In this case, you want "hello! another" and not "hello! another".
- let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind)
- && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc)
- {
- // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to
- // "decide" how much the minimum indent will be.
- 1
- } else {
- 0
- };
-
- // `min_indent` is used to know how much whitespaces from the start of each lines must be
- // removed. Example:
- //
- // /// hello!
- // #[doc = "another"]
- //
- // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum
- // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4
- // (5 - 1) whitespaces.
- let Some(min_indent) = docs
- .iter()
- .map(|fragment| {
- fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| {
- if line.chars().all(|c| c.is_whitespace()) {
- min_indent
- } else {
- // Compare against either space or tab, ignoring whether they are
- // mixed or not.
- let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
- cmp::min(min_indent, whitespace)
- + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add }
- }
- })
- })
- .min()
- else {
- return;
- };
-
- for fragment in docs {
- if fragment.doc == kw::Empty {
- continue;
- }
-
- let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 {
- min_indent - add
- } else {
- min_indent
- };
-
- fragment.indent = min_indent;
- }
-}
-
/// A link that has not yet been rendered.
///
/// This link will be turned into a rendered link by [`Item::links`].
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct ItemLink {
/// The original link written in the markdown
- pub(crate) link: String,
+ pub(crate) link: Box<str>,
/// The link text displayed in the HTML.
///
/// This may not be the same as `link` if there was a disambiguator
/// in an intra-doc link (e.g. \[`fn@f`\])
- pub(crate) link_text: String,
+ pub(crate) link_text: Box<str>,
/// The `DefId` of the Item whose **HTML Page** contains the item being
/// linked to. This will be different to `item_id` on item's that don't
/// have their own page, such as struct fields and enum variants.
@@ -1188,11 +1035,13 @@ pub struct RenderedLink {
/// The text the link was original written as.
///
/// This could potentially include disambiguators and backticks.
- pub(crate) original_text: String,
+ pub(crate) original_text: Box<str>,
/// The text to display in the HTML
- pub(crate) new_text: String,
+ pub(crate) new_text: Box<str>,
/// The URL to put in the `href`
pub(crate) href: String,
+ /// The tooltip.
+ pub(crate) tooltip: String,
}
/// The attributes on an [`Item`], including attributes like `#[derive(...)]` and `#[inline]`,
@@ -1242,26 +1091,7 @@ impl Attributes {
attrs: impl Iterator<Item = (&'a ast::Attribute, Option<DefId>)>,
doc_only: bool,
) -> Attributes {
- let mut doc_strings = Vec::new();
- let mut other_attrs = ast::AttrVec::new();
- for (attr, parent_module) in attrs {
- if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
- trace!("got doc_str={doc_str:?}");
- let doc = beautify_doc_string(doc_str, comment_kind);
- let kind = if attr.is_doc_comment() {
- DocFragmentKind::SugaredDoc
- } else {
- DocFragmentKind::RawDoc
- };
- let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 };
- doc_strings.push(fragment);
- } else if !doc_only {
- other_attrs.push(attr.clone());
- }
- }
-
- unindent_doc_fragments(&mut doc_strings);
-
+ let (doc_strings, other_attrs) = attrs_to_doc_fragments(attrs, doc_only);
Attributes { doc_strings, other_attrs }
}
@@ -1280,20 +1110,6 @@ impl Attributes {
if out.is_empty() { None } else { Some(out) }
}
- /// Return the doc-comments on this item, grouped by the module they came from.
- /// The module can be different if this is a re-export with added documentation.
- ///
- /// The last newline is not trimmed so the produced strings are reusable between
- /// early and late doc link resolution regardless of their position.
- pub(crate) fn prepare_to_doc_link_resolution(&self) -> FxHashMap<Option<DefId>, String> {
- let mut res = FxHashMap::default();
- for fragment in &self.doc_strings {
- let out_str = res.entry(fragment.parent_module).or_default();
- add_doc_fragment(out_str, fragment);
- }
- res
- }
-
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
@@ -1358,10 +1174,10 @@ impl GenericBound {
pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
use rustc_hir::TraitBoundModifier as TBM;
- if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
- if Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() {
- return true;
- }
+ if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self &&
+ Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait()
+ {
+ return true;
}
false
}
@@ -2416,10 +2232,7 @@ impl ConstantKind {
pub(crate) fn is_literal(&self, tcx: TyCtxt<'_>) -> bool {
match *self {
- ConstantKind::TyConst { .. } => false,
- ConstantKind::Extern { def_id } => def_id.as_local().map_or(false, |def_id| {
- is_literal_expr(tcx, tcx.hir().local_def_id_to_hir_id(def_id))
- }),
+ ConstantKind::TyConst { .. } | ConstantKind::Extern { .. } => false,
ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => {
is_literal_expr(tcx, body.hir_id)
}
@@ -2498,14 +2311,7 @@ impl Import {
}
pub(crate) fn imported_item_is_doc_hidden(&self, tcx: TyCtxt<'_>) -> bool {
- match self.source.did {
- Some(did) => tcx
- .get_attrs(did, sym::doc)
- .filter_map(ast::Attribute::meta_item_list)
- .flatten()
- .has_word(sym::hidden),
- None => false,
- }
+ self.source.did.map_or(false, |did| tcx.is_doc_hidden(did))
}
}
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
index 71eddf434..20627c2cf 100644
--- a/src/librustdoc/clean/types/tests.rs
+++ b/src/librustdoc/clean/types/tests.rs
@@ -2,6 +2,7 @@ use super::*;
use crate::clean::collapse_doc_fragments;
+use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind};
use rustc_span::create_default_session_globals_then;
use rustc_span::source_map::DUMMY_SP;
use rustc_span::symbol::Symbol;
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index a12f764fa..c9c1c2c45 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -29,11 +29,6 @@ mod tests;
pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
let module = crate::visit_ast::RustdocVisitor::new(cx).visit();
- for &cnum in cx.tcx.crates(()) {
- // Analyze doc-reachability for extern items
- crate::visit_lib::lib_embargo_visit_item(cx, cnum.as_def_id());
- }
-
// Clean the crate, translating the entire librustc_ast AST to one that is
// understood by rustdoc.
let mut module = clean_doc_module(&module, cx);
@@ -134,7 +129,7 @@ fn external_generic_args<'tcx>(
});
GenericArgs::Parenthesized { inputs, output }
} else {
- GenericArgs::AngleBracketed { args: args.into(), bindings: bindings.into() }
+ GenericArgs::AngleBracketed { args: args.into(), bindings }
}
}
@@ -271,7 +266,7 @@ pub(crate) fn print_evaluated_const(
underscores_and_type: bool,
) -> Option<String> {
tcx.const_eval_poly(def_id).ok().and_then(|val| {
- let ty = tcx.type_of(def_id);
+ let ty = tcx.type_of(def_id).subst_identity();
match (val, ty.kind()) {
(_, &ty::Ref(..)) => None,
(ConstValue::Scalar(_), &ty::Adt(_, _)) => None,
@@ -350,10 +345,10 @@ pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
return true;
}
- if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind {
- if let hir::ExprKind::Lit(_) = &expr.kind {
- return true;
- }
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind &&
+ let hir::ExprKind::Lit(_) = &expr.kind
+ {
+ return true;
}
}
@@ -475,6 +470,12 @@ pub(crate) fn get_auto_trait_and_blanket_impls(
cx: &mut DocContext<'_>,
item_def_id: DefId,
) -> impl Iterator<Item = Item> {
+ // FIXME: To be removed once `parallel_compiler` bugs are fixed!
+ // More information in <https://github.com/rust-lang/rust/pull/106930>.
+ if cfg!(parallel_compiler) {
+ return vec![].into_iter().chain(vec![].into_iter());
+ }
+
let auto_impls = cx
.sess()
.prof
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index 56b40d8c6..2c514a0c8 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -509,7 +509,7 @@ impl Options {
// these values up both in `dataset` and in the storage API, so it needs to be able
// to convert the names back and forth. Despite doing this kebab-case to
// StudlyCaps transformation automatically, the JS DOM API does not provide a
- // mechanism for doing the just transformation on a string. So we want to avoid
+ // mechanism for doing just the transformation on a string. So we want to avoid
// the StudlyCaps representation in the `dataset` property.
//
// We solve this by replacing all the `-`s with `_`s. We do that here, when we
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index 2153e7d8c..fbfc58a43 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -1,23 +1,22 @@
-use rustc_ast::NodeId;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::{self, Lrc};
use rustc_data_structures::unord::UnordSet;
use rustc_errors::emitter::{Emitter, EmitterWriter};
use rustc_errors::json::JsonEmitter;
+use rustc_errors::TerminalUrl;
use rustc_feature::UnstableFeatures;
-use rustc_hir::def::{Namespace, Res};
-use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
+use rustc_hir::def::Res;
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId};
use rustc_hir::intravisit::{self, Visitor};
-use rustc_hir::{HirId, Path, TraitCandidate};
+use rustc_hir::{HirId, Path};
use rustc_interface::interface;
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{ParamEnv, Ty, TyCtxt};
-use rustc_resolve as resolve;
-use rustc_session::config::{self, CrateType, ErrorOutputType};
+use rustc_session::config::{self, CrateType, ErrorOutputType, ResolveDocLinks};
use rustc_session::lint;
use rustc_session::Session;
use rustc_span::symbol::sym;
-use rustc_span::{source_map, Span, Symbol};
+use rustc_span::{source_map, Span};
use std::cell::RefCell;
use std::mem;
@@ -28,30 +27,12 @@ use crate::clean::inline::build_external_trait;
use crate::clean::{self, ItemId};
use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions};
use crate::formats::cache::Cache;
-use crate::passes::collect_intra_doc_links::PreprocessedMarkdownLink;
use crate::passes::{self, Condition::*};
pub(crate) use rustc_session::config::{Input, Options, UnstableOptions};
-pub(crate) struct ResolverCaches {
- pub(crate) markdown_links: Option<FxHashMap<String, Vec<PreprocessedMarkdownLink>>>,
- pub(crate) doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option<Res<NodeId>>>,
- /// Traits in scope for a given module.
- /// See `collect_intra_doc_links::traits_implemented_by` for more details.
- pub(crate) traits_in_scope: DefIdMap<Vec<TraitCandidate>>,
- pub(crate) all_trait_impls: Option<Vec<DefId>>,
- pub(crate) all_macro_rules: FxHashMap<Symbol, Res<NodeId>>,
-}
-
pub(crate) struct DocContext<'tcx> {
pub(crate) tcx: TyCtxt<'tcx>,
- /// Name resolver. Used for intra-doc links.
- ///
- /// The `Rc<RefCell<...>>` wrapping is needed because that is what's returned by
- /// [`rustc_interface::Queries::expansion()`].
- // FIXME: see if we can get rid of this RefCell somehow
- pub(crate) resolver: Rc<RefCell<interface::BoxedResolver>>,
- pub(crate) resolver_caches: ResolverCaches,
/// Used for normalization.
///
/// Most of this logic is copied from rustc_lint::late.
@@ -60,11 +41,11 @@ pub(crate) struct DocContext<'tcx> {
pub(crate) external_traits: Rc<RefCell<FxHashMap<DefId, clean::Trait>>>,
/// Used while populating `external_traits` to ensure we don't process the same trait twice at
/// the same time.
- pub(crate) active_extern_traits: FxHashSet<DefId>,
+ pub(crate) active_extern_traits: DefIdSet,
// The current set of parameter substitutions,
// for expanding type aliases at the HIR level:
/// Table `DefId` of type, lifetime, or const parameter -> substituted type, lifetime, or const
- pub(crate) substs: FxHashMap<DefId, clean::SubstParam>,
+ pub(crate) substs: DefIdMap<clean::SubstParam>,
/// Table synthetic type parameter for `impl Trait` in argument position -> bounds
pub(crate) impl_trait_bounds: FxHashMap<ImplTraitParam, Vec<clean::GenericBound>>,
/// Auto-trait or blanket impls processed so far, as `(self_ty, trait_def_id)`.
@@ -99,20 +80,9 @@ impl<'tcx> DocContext<'tcx> {
ret
}
- pub(crate) fn enter_resolver<F, R>(&self, f: F) -> R
- where
- F: FnOnce(&mut resolve::Resolver<'_>) -> R,
- {
- self.resolver.borrow_mut().access(f)
- }
-
/// Call the closure with the given parameters set as
/// the substitutions for a type alias' RHS.
- pub(crate) fn enter_alias<F, R>(
- &mut self,
- substs: FxHashMap<DefId, clean::SubstParam>,
- f: F,
- ) -> R
+ pub(crate) fn enter_alias<F, R>(&mut self, substs: DefIdMap<clean::SubstParam>, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
@@ -133,12 +103,6 @@ impl<'tcx> DocContext<'tcx> {
_ => None,
}
}
-
- pub(crate) fn with_all_trait_impls(&mut self, f: impl FnOnce(&mut Self, &[DefId])) {
- let all_trait_impls = self.resolver_caches.all_trait_impls.take();
- f(self, all_trait_impls.as_ref().expect("`all_trait_impls` are already borrowed"));
- self.resolver_caches.all_trait_impls = all_trait_impls;
- }
}
/// Creates a new diagnostic `Handler` that can be used to emit warnings and errors.
@@ -151,8 +115,10 @@ pub(crate) fn new_handler(
diagnostic_width: Option<usize>,
unstable_opts: &UnstableOptions,
) -> rustc_errors::Handler {
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+ rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
+ false,
+ );
let emitter: Box<dyn Emitter + sync::Send> = match error_format {
ErrorOutputType::HumanReadable(kind) => {
let (short, color_config) = kind.unzip();
@@ -167,6 +133,7 @@ pub(crate) fn new_handler(
diagnostic_width,
false,
unstable_opts.track_diagnostics,
+ TerminalUrl::No,
)
.ui_testing(unstable_opts.ui_testing),
)
@@ -186,6 +153,7 @@ pub(crate) fn new_handler(
diagnostic_width,
false,
unstable_opts.track_diagnostics,
+ TerminalUrl::No,
)
.ui_testing(unstable_opts.ui_testing),
)
@@ -221,6 +189,7 @@ pub(crate) fn create_config(
scrape_examples_options,
..
}: RustdocOptions,
+ RenderOptions { document_private, .. }: &RenderOptions,
) -> rustc_interface::Config {
// Add the doc cfg into the doc build.
cfgs.push("doc".to_string());
@@ -248,6 +217,13 @@ pub(crate) fn create_config(
let crate_types =
if proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] };
+ let resolve_doc_links = if *document_private {
+ ResolveDocLinks::All
+ } else {
+ // Should be `ResolveDocLinks::Exported` in theory, but for some reason rustdoc
+ // still tries to request resolutions for links on private items.
+ ResolveDocLinks::All
+ };
let test = scrape_examples_options.map(|opts| opts.scrape_tests).unwrap_or(false);
// plays with error output here!
let sessopts = config::Options {
@@ -261,6 +237,7 @@ pub(crate) fn create_config(
target_triple: target,
unstable_features: UnstableFeatures::from_environment(crate_name.as_deref()),
actually_rustdoc: true,
+ resolve_doc_links,
unstable_opts,
error_format,
diagnostic_width,
@@ -279,6 +256,7 @@ pub(crate) fn create_config(
output_file: None,
output_dir: None,
file_loader: None,
+ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES,
lint_caps,
parse_sess_created: None,
register_lints: Some(Box::new(crate::lint::register_lints)),
@@ -316,8 +294,6 @@ pub(crate) fn create_config(
pub(crate) fn run_global_ctxt(
tcx: TyCtxt<'_>,
- resolver: Rc<RefCell<interface::BoxedResolver>>,
- resolver_caches: ResolverCaches,
show_coverage: bool,
render_options: RenderOptions,
output_format: OutputFormat,
@@ -351,8 +327,6 @@ pub(crate) fn run_global_ctxt(
let mut ctxt = DocContext {
tcx,
- resolver,
- resolver_caches,
param_env: ParamEnv::empty(),
external_traits: Default::default(),
active_extern_traits: Default::default(),
@@ -367,6 +341,10 @@ pub(crate) fn run_global_ctxt(
show_coverage,
};
+ for cnum in tcx.crates(()) {
+ crate::visit_lib::lib_embargo_visit_item(&mut ctxt, cnum.as_def_id());
+ }
+
// Small hack to force the Sized trait to be present.
//
// Note that in case of `#![no_core]`, the trait is not available.
diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs
index c1a652c75..9cf84acc7 100644
--- a/src/librustdoc/doctest.rs
+++ b/src/librustdoc/doctest.rs
@@ -1,11 +1,9 @@
use rustc_ast as ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::Lrc;
-use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError};
-use rustc_hir as hir;
-use rustc_hir::def_id::LOCAL_CRATE;
-use rustc_hir::intravisit;
-use rustc_hir::{HirId, CRATE_HIR_ID};
+use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError, TerminalUrl};
+use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
+use rustc_hir::{self as hir, intravisit, CRATE_HIR_ID};
use rustc_interface::interface;
use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter;
@@ -98,6 +96,7 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
output_file: None,
output_dir: None,
file_loader: None,
+ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES,
lint_caps,
parse_sess_created: None,
register_lints: Some(Box::new(crate::lint::register_lints)),
@@ -140,7 +139,7 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
};
hir_collector.visit_testable(
"".to_string(),
- CRATE_HIR_ID,
+ CRATE_DEF_ID,
tcx.hir().span(CRATE_HIR_ID),
|this| tcx.hir().walk_toplevel_module(this),
);
@@ -231,11 +230,11 @@ fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions {
if attr.has_name(sym::no_crate_inject) {
opts.no_crate_inject = true;
}
- if attr.has_name(sym::attr) {
- if let Some(l) = attr.meta_item_list() {
- for item in l {
- opts.attrs.push(pprust::meta_list_item_to_string(item));
- }
+ if attr.has_name(sym::attr)
+ && let Some(l) = attr.meta_item_list()
+ {
+ for item in l {
+ opts.attrs.push(pprust::meta_list_item_to_string(item));
}
}
}
@@ -547,8 +546,10 @@ pub(crate) fn make_test(
// Any errors in parsing should also appear when the doctest is compiled for real, so just
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+ rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
+ false,
+ );
supports_color = EmitterWriter::stderr(
ColorConfig::Auto,
None,
@@ -559,6 +560,7 @@ pub(crate) fn make_test(
Some(80),
false,
false,
+ TerminalUrl::No,
)
.supports_color();
@@ -573,6 +575,7 @@ pub(crate) fn make_test(
None,
false,
false,
+ TerminalUrl::No,
);
// FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
@@ -594,31 +597,28 @@ pub(crate) fn make_test(
loop {
match parser.parse_item(ForceCollect::No) {
Ok(Some(item)) => {
- if !found_main {
- if let ast::ItemKind::Fn(..) = item.kind {
- if item.ident.name == sym::main {
- found_main = true;
- }
- }
+ if !found_main &&
+ let ast::ItemKind::Fn(..) = item.kind &&
+ item.ident.name == sym::main
+ {
+ found_main = true;
}
- if !found_extern_crate {
- if let ast::ItemKind::ExternCrate(original) = item.kind {
- // This code will never be reached if `crate_name` is none because
- // `found_extern_crate` is initialized to `true` if it is none.
- let crate_name = crate_name.unwrap();
+ if !found_extern_crate &&
+ let ast::ItemKind::ExternCrate(original) = item.kind
+ {
+ // This code will never be reached if `crate_name` is none because
+ // `found_extern_crate` is initialized to `true` if it is none.
+ let crate_name = crate_name.unwrap();
- match original {
- Some(name) => found_extern_crate = name.as_str() == crate_name,
- None => found_extern_crate = item.ident.as_str() == crate_name,
- }
+ match original {
+ Some(name) => found_extern_crate = name.as_str() == crate_name,
+ None => found_extern_crate = item.ident.as_str() == crate_name,
}
}
- if !found_macro {
- if let ast::ItemKind::MacCall(..) = item.kind {
- found_macro = true;
- }
+ if !found_macro && let ast::ItemKind::MacCall(..) = item.kind {
+ found_macro = true;
}
if found_main && found_extern_crate {
@@ -744,8 +744,10 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
// Any errors in parsing should also appear when the doctest is compiled for real, so just
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+ rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
+ false,
+ );
let emitter = EmitterWriter::new(
Box::new(io::sink()),
@@ -758,6 +760,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
None,
false,
false,
+ TerminalUrl::No,
);
let handler = Handler::with_emitter(false, None, Box::new(emitter));
@@ -766,8 +769,8 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) {
Ok(p) => p,
Err(_) => {
- debug!("Cannot build a parser to check mod attr so skipping...");
- return true;
+ // If there is an unclosed delimiter, an error will be returned by the tokentrees.
+ return false;
}
};
// If a parsing error happened, it's very likely that the attribute is incomplete.
@@ -775,15 +778,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
e.cancel();
return false;
}
- // We now check if there is an unclosed delimiter for the attribute. To do so, we look at
- // the `unclosed_delims` and see if the opening square bracket was closed.
- parser
- .unclosed_delims()
- .get(0)
- .map(|unclosed| {
- unclosed.unclosed_span.map(|s| s.lo()).unwrap_or(BytePos(0)) != BytePos(2)
- })
- .unwrap_or(true)
+ true
})
})
.unwrap_or(false)
@@ -971,14 +966,12 @@ impl Collector {
fn get_filename(&self) -> FileName {
if let Some(ref source_map) = self.source_map {
let filename = source_map.span_to_filename(self.position);
- if let FileName::Real(ref filename) = filename {
- if let Ok(cur_dir) = env::current_dir() {
- if let Some(local_path) = filename.local_path() {
- if let Ok(path) = local_path.strip_prefix(&cur_dir) {
- return path.to_owned().into();
- }
- }
- }
+ if let FileName::Real(ref filename) = filename &&
+ let Ok(cur_dir) = env::current_dir() &&
+ let Some(local_path) = filename.local_path() &&
+ let Ok(path) = local_path.strip_prefix(&cur_dir)
+ {
+ return path.to_owned().into();
}
filename
} else if let Some(ref filename) = self.filename {
@@ -1214,11 +1207,11 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
fn visit_testable<F: FnOnce(&mut Self)>(
&mut self,
name: String,
- hir_id: HirId,
+ def_id: LocalDefId,
sp: Span,
nested: F,
) {
- let ast_attrs = self.tcx.hir().attrs(hir_id);
+ let ast_attrs = self.tcx.hir().attrs(self.tcx.hir().local_def_id_to_hir_id(def_id));
if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) {
if !cfg.matches(&self.sess.parse_sess, Some(self.tcx.features())) {
return;
@@ -1247,7 +1240,7 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
self.collector.enable_per_target_ignores,
Some(&crate::html::markdown::ExtraInfo::new(
self.tcx,
- hir_id,
+ def_id.to_def_id(),
span_of_attrs(&attrs).unwrap_or(sp),
)),
);
@@ -1276,37 +1269,37 @@ impl<'a, 'hir, 'tcx> intravisit::Visitor<'hir> for HirCollector<'a, 'hir, 'tcx>
_ => item.ident.to_string(),
};
- self.visit_testable(name, item.hir_id(), item.span, |this| {
+ self.visit_testable(name, item.owner_id.def_id, item.span, |this| {
intravisit::walk_item(this, item);
});
}
fn visit_trait_item(&mut self, item: &'hir hir::TraitItem<'_>) {
- self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| {
intravisit::walk_trait_item(this, item);
});
}
fn visit_impl_item(&mut self, item: &'hir hir::ImplItem<'_>) {
- self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| {
intravisit::walk_impl_item(this, item);
});
}
fn visit_foreign_item(&mut self, item: &'hir hir::ForeignItem<'_>) {
- self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| {
intravisit::walk_foreign_item(this, item);
});
}
fn visit_variant(&mut self, v: &'hir hir::Variant<'_>) {
- self.visit_testable(v.ident.to_string(), v.hir_id, v.span, |this| {
+ self.visit_testable(v.ident.to_string(), v.def_id, v.span, |this| {
intravisit::walk_variant(this, v);
});
}
fn visit_field_def(&mut self, f: &'hir hir::FieldDef<'_>) {
- self.visit_testable(f.ident.to_string(), f.hir_id, f.span, |this| {
+ self.visit_testable(f.ident.to_string(), f.def_id, f.span, |this| {
intravisit::walk_field_def(this, f);
});
}
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index 1c78c5b8d..8dbfaf4bb 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -1,7 +1,7 @@
use std::mem;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{CrateNum, DefId};
+use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::Symbol;
@@ -33,7 +33,7 @@ pub(crate) struct Cache {
///
/// The values of the map are a list of implementations and documentation
/// found on that implementation.
- pub(crate) impls: FxHashMap<DefId, Vec<Impl>>,
+ pub(crate) impls: DefIdMap<Vec<Impl>>,
/// Maintains a mapping of local crate `DefId`s to the fully qualified name
/// and "short type description" of that node. This is used when generating
@@ -56,7 +56,7 @@ pub(crate) struct Cache {
/// to the path used if the corresponding type is inlined. By
/// doing this, we can detect duplicate impls on a trait page, and only display
/// the impl for the inlined type.
- pub(crate) exact_paths: FxHashMap<DefId, Vec<Symbol>>,
+ pub(crate) exact_paths: DefIdMap<Vec<Symbol>>,
/// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the
@@ -127,7 +127,7 @@ pub(crate) struct Cache {
struct CacheBuilder<'a, 'tcx> {
cache: &'a mut Cache,
/// This field is used to prevent duplicated impl blocks.
- impl_ids: FxHashMap<DefId, FxHashSet<DefId>>,
+ impl_ids: DefIdMap<DefIdSet>,
tcx: TyCtxt<'tcx>,
}
@@ -173,7 +173,7 @@ impl Cache {
let (krate, mut impl_ids) = {
let mut cache_builder =
- CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: FxHashMap::default() };
+ CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: Default::default() };
krate = cache_builder.fold_crate(krate);
(krate, cache_builder.impl_ids)
};
@@ -229,16 +229,15 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
}
// Collect all the implementors of traits.
- if let clean::ImplItem(ref i) = *item.kind {
- if let Some(trait_) = &i.trait_ {
- if !i.kind.is_blanket() {
- self.cache
- .implementors
- .entry(trait_.def_id())
- .or_default()
- .push(Impl { impl_item: item.clone() });
- }
- }
+ if let clean::ImplItem(ref i) = *item.kind &&
+ let Some(trait_) = &i.trait_ &&
+ !i.kind.is_blanket()
+ {
+ self.cache
+ .implementors
+ .entry(trait_.def_id())
+ .or_default()
+ .push(Impl { impl_item: item.clone() });
}
// Index this method for searching later on.
@@ -288,6 +287,16 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
} else {
let last = self.cache.parent_stack.last().expect("parent_stack is empty 2");
let did = match &*last {
+ ParentStackItem::Impl {
+ // impl Trait for &T { fn method(self); }
+ //
+ // When generating a function index with the above shape, we want it
+ // associated with `T`, not with the primitive reference type. It should
+ // show up as `T::method`, rather than `reference::method`, in the search
+ // results page.
+ for_: clean::Type::BorrowedRef { type_, .. },
+ ..
+ } => type_.def_id(&self.cache),
ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache),
ParentStackItem::Type(item_id) => item_id.as_def_id(),
};
@@ -454,7 +463,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
| clean::BorrowedRef { type_: box clean::Type::Path { ref path }, .. } => {
dids.insert(path.def_id());
if let Some(generics) = path.generics() &&
- let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() &&
+ let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).subst_identity().kind() &&
adt.is_fundamental() {
for ty in generics {
if let Some(did) = ty.def_id(self.cache) {
diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs
index 2f1f4cbf3..452e14918 100644
--- a/src/librustdoc/formats/item_type.rs
+++ b/src/librustdoc/formats/item_type.rs
@@ -21,6 +21,7 @@ use crate::clean;
/// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an
/// ordering based on a helper function inside `item_module`, in the same file.
#[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)]
+#[repr(u8)]
pub(crate) enum ItemType {
Module = 0,
ExternCrate = 1,
@@ -139,7 +140,7 @@ impl From<DefKind> for ItemType {
| DefKind::Field
| DefKind::LifetimeParam
| DefKind::GlobalAsm
- | DefKind::Impl
+ | DefKind::Impl { .. }
| DefKind::Closure
| DefKind::Generator => Self::ForeignType,
}
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index d3dc4065d..0e4c5ed68 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -34,6 +34,7 @@ use crate::clean::{
use crate::formats::item_type::ItemType;
use crate::html::escape::Escape;
use crate::html::render::Context;
+use crate::passes::collect_intra_doc_links::UrlFragment;
use super::url_parts_builder::estimate_item_path_byte_length;
use super::url_parts_builder::UrlPartsBuilder;
@@ -208,7 +209,7 @@ impl clean::GenericParamDef {
if f.alternate() {
write!(f, ": {:#}", print_generic_bounds(bounds, cx))?;
} else {
- write!(f, ":&nbsp;{}", print_generic_bounds(bounds, cx))?;
+ write!(f, ": {}", print_generic_bounds(bounds, cx))?;
}
}
@@ -216,7 +217,7 @@ impl clean::GenericParamDef {
if f.alternate() {
write!(f, " = {:#}", ty.print(cx))?;
} else {
- write!(f, "&nbsp;=&nbsp;{}", ty.print(cx))?;
+ write!(f, " = {}", ty.print(cx))?;
}
}
@@ -226,14 +227,14 @@ impl clean::GenericParamDef {
if f.alternate() {
write!(f, "const {}: {:#}", self.name, ty.print(cx))?;
} else {
- write!(f, "const {}:&nbsp;{}", self.name, ty.print(cx))?;
+ write!(f, "const {}: {}", self.name, ty.print(cx))?;
}
if let Some(default) = default {
if f.alternate() {
write!(f, " = {:#}", default)?;
} else {
- write!(f, "&nbsp;=&nbsp;{}", default)?;
+ write!(f, " = {}", default)?;
}
}
@@ -289,7 +290,7 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
if f.alternate() {
f.write_str(" ")?;
} else {
- f.write_str("<br>")?;
+ f.write_str("\n")?;
}
match pred {
@@ -352,23 +353,31 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
}
} else {
let mut br_with_padding = String::with_capacity(6 * indent + 28);
- br_with_padding.push_str("<br>");
- for _ in 0..indent + 4 {
- br_with_padding.push_str("&nbsp;");
+ br_with_padding.push_str("\n");
+
+ let padding_amout =
+ if ending == Ending::Newline { indent + 4 } else { indent + "fn where ".len() };
+
+ for _ in 0..padding_amout {
+ br_with_padding.push_str(" ");
}
- let where_preds = where_preds.to_string().replace("<br>", &br_with_padding);
+ let where_preds = where_preds.to_string().replace('\n', &br_with_padding);
if ending == Ending::Newline {
- let mut clause = "&nbsp;".repeat(indent.saturating_sub(1));
+ let mut clause = " ".repeat(indent.saturating_sub(1));
write!(clause, "<span class=\"where fmt-newline\">where{where_preds},</span>")?;
clause
} else {
- // insert a <br> tag after a single space but before multiple spaces at the start
+ // insert a newline after a single space but before multiple spaces at the start
if indent == 0 {
- format!("<br><span class=\"where\">where{where_preds}</span>")
+ format!("\n<span class=\"where\">where{where_preds}</span>")
} else {
+ // put the first one on the same line as the 'where' keyword
+ let where_preds = where_preds.replacen(&br_with_padding, " ", 1);
+
let mut clause = br_with_padding;
- clause.truncate(clause.len() - 4 * "&nbsp;".len());
+ clause.truncate(clause.len() - "where ".len());
+
write!(clause, "<span class=\"where\">where{where_preds}</span>")?;
clause
}
@@ -701,11 +710,9 @@ pub(crate) fn href_with_root_path(
}
}
};
- if !is_remote {
- if let Some(root_path) = root_path {
- let root = root_path.trim_end_matches('/');
- url_parts.push_front(root);
- }
+ if !is_remote && let Some(root_path) = root_path {
+ let root = root_path.trim_end_matches('/');
+ url_parts.push_front(root);
}
debug!(?url_parts);
match shortty {
@@ -760,6 +767,28 @@ pub(crate) fn href_relative_parts<'fqp>(
}
}
+pub(crate) fn link_tooltip(did: DefId, fragment: &Option<UrlFragment>, cx: &Context<'_>) -> String {
+ let cache = cx.cache();
+ let Some((fqp, shortty)) = cache.paths.get(&did)
+ .or_else(|| cache.external_paths.get(&did))
+ else { return String::new() };
+ let mut buf = Buffer::new();
+ if let &Some(UrlFragment::Item(id)) = fragment {
+ write!(buf, "{} ", cx.tcx().def_descr(id));
+ for component in fqp {
+ write!(buf, "{component}::");
+ }
+ write!(buf, "{}", cx.tcx().item_name(id));
+ } else if !fqp.is_empty() {
+ let mut fqp_it = fqp.into_iter();
+ write!(buf, "{shortty} {}", fqp_it.next().unwrap());
+ for component in fqp_it {
+ write!(buf, "::{component}");
+ }
+ }
+ buf.into_inner()
+}
+
/// Used to render a [`clean::Path`].
fn resolved_path<'cx>(
w: &mut fmt::Formatter<'_>,
@@ -1064,14 +1093,8 @@ fn fmt_type<'cx>(
fmt_type(ty, f, use_absolute, cx)?;
write!(f, ")")
}
- clean::Generic(..) => {
- primitive_link(
- f,
- PrimitiveType::Reference,
- &format!("{}{}{}", amp, lt, m),
- cx,
- )?;
- fmt_type(ty, f, use_absolute, cx)
+ clean::Generic(name) => {
+ primitive_link(f, PrimitiveType::Reference, &format!("{amp}{lt}{m}{name}"), cx)
}
_ => {
write!(f, "{}{}{}", amp, lt, m)?;
@@ -1313,7 +1336,8 @@ impl clean::FnDecl {
/// * `header_len`: The length of the function header and name. In other words, the number of
/// characters in the function declaration up to but not including the parentheses.
- /// <br>Used to determine line-wrapping.
+ /// This is expected to go into a `<pre>`/`code-header` block, so indentation and newlines
+ /// are preserved.
/// * `indent`: The number of spaces to indent each successive line with, if line-wrapping is
/// necessary.
pub(crate) fn full_print<'a, 'tcx: 'a>(
@@ -1361,7 +1385,7 @@ impl clean::FnDecl {
}
} else {
if i > 0 {
- args.push_str("<br>");
+ args.push_str("\n");
}
if input.is_const {
args.push_str("const ");
@@ -1387,7 +1411,7 @@ impl clean::FnDecl {
let mut args = args.into_inner();
if self.c_variadic {
- args.push_str(",<br> ...");
+ args.push_str(",\n ...");
args_plain.push_str(", ...");
}
@@ -1397,24 +1421,20 @@ impl clean::FnDecl {
let declaration_len = header_len + args_plain.len() + arrow_plain.len();
let output = if declaration_len > 80 {
- let full_pad = format!("<br>{}", "&nbsp;".repeat(indent + 4));
- let close_pad = format!("<br>{}", "&nbsp;".repeat(indent));
+ let full_pad = format!("\n{}", " ".repeat(indent + 4));
+ let close_pad = format!("\n{}", " ".repeat(indent));
format!(
"({pad}{args}{close}){arrow}",
pad = if self.inputs.values.is_empty() { "" } else { &full_pad },
- args = args.replace("<br>", &full_pad),
+ args = args.replace('\n', &full_pad),
close = close_pad,
arrow = arrow
)
} else {
- format!("({args}){arrow}", args = args.replace("<br>", " "), arrow = arrow)
+ format!("({args}){arrow}", args = args.replace('\n', " "), arrow = arrow)
};
- if f.alternate() {
- write!(f, "{}", output.replace("<br>", "\n"))
- } else {
- write!(f, "{}", output)
- }
+ write!(f, "{}", output)
}
}
@@ -1617,7 +1637,7 @@ impl clean::TypeBinding {
if f.alternate() {
write!(f, ": {:#}", print_generic_bounds(bounds, cx))?;
} else {
- write!(f, ":&nbsp;{}", print_generic_bounds(bounds, cx))?;
+ write!(f, ": {}", print_generic_bounds(bounds, cx))?;
}
}
}
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 8a9e6caf6..2c9fc4e3c 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -58,11 +58,11 @@ pub(crate) fn render_example_with_highlighting(
write_footer(out, playground_button);
}
-/// Highlights `src` as a macro, returning the HTML output.
-pub(crate) fn render_macro_with_highlighting(src: &str, out: &mut Buffer) {
- write_header(out, "macro", None, Tooltip::None);
+/// Highlights `src` as an item-decl, returning the HTML output.
+pub(crate) fn render_item_decl_with_highlighting(src: &str, out: &mut Buffer) {
+ write!(out, "<pre class=\"rust item-decl\">");
write_code(out, src, None, None);
- write_footer(out, None);
+ write!(out, "</pre>");
}
/// Highlights `src` as a source code page, returning the HTML output.
@@ -96,13 +96,19 @@ fn write_header(out: &mut Buffer, class: &str, extra_content: Option<Buffer>, to
);
if tooltip != Tooltip::None {
+ let edition_code;
write!(
out,
- "<div class='tooltip'{}>ⓘ</div>",
- if let Tooltip::Edition(edition_info) = tooltip {
- format!(" data-edition=\"{}\"", edition_info)
- } else {
- String::new()
+ "<a href=\"#\" class=\"tooltip\" title=\"{}\">ⓘ</a>",
+ match tooltip {
+ Tooltip::Ignore => "This example is not tested",
+ Tooltip::CompileFail => "This example deliberately fails to compile",
+ Tooltip::ShouldPanic => "This example panics",
+ Tooltip::Edition(edition) => {
+ edition_code = format!("This example runs with edition {edition}");
+ &edition_code
+ }
+ Tooltip::None => unreachable!(),
},
);
}
@@ -460,10 +466,8 @@ impl<'a> PeekIter<'a> {
}
/// Returns the next item after the current one. It doesn't interfere with `peek_next` output.
fn peek(&mut self) -> Option<&(TokenKind, &'a str)> {
- if self.stored.is_empty() {
- if let Some(next) = self.iter.next() {
- self.stored.push_back(next);
- }
+ if self.stored.is_empty() && let Some(next) = self.iter.next() {
+ self.stored.push_back(next);
}
self.stored.front()
}
diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs
index a60e7cb10..6ab849c92 100644
--- a/src/librustdoc/html/layout.rs
+++ b/src/librustdoc/html/layout.rs
@@ -30,7 +30,6 @@ pub(crate) struct Page<'a> {
pub(crate) root_path: &'a str,
pub(crate) static_root_path: Option<&'a str>,
pub(crate) description: &'a str,
- pub(crate) keywords: &'a str,
pub(crate) resource_suffix: &'a str,
}
diff --git a/src/librustdoc/html/length_limit.rs b/src/librustdoc/html/length_limit.rs
index bbdc91c8d..4c8db2c67 100644
--- a/src/librustdoc/html/length_limit.rs
+++ b/src/librustdoc/html/length_limit.rs
@@ -61,14 +61,14 @@ impl HtmlWithLimit {
/// and returns [`ControlFlow::Break`].
pub(super) fn push(&mut self, text: &str) -> ControlFlow<(), ()> {
if self.len + text.len() > self.limit {
- return ControlFlow::BREAK;
+ return ControlFlow::Break(());
}
self.flush_queue();
write!(self.buf, "{}", Escape(text)).unwrap();
self.len += text.len();
- ControlFlow::CONTINUE
+ ControlFlow::Continue(())
}
/// Open an HTML tag.
diff --git a/src/librustdoc/html/length_limit/tests.rs b/src/librustdoc/html/length_limit/tests.rs
index 2d02b8a16..2185c0348 100644
--- a/src/librustdoc/html/length_limit/tests.rs
+++ b/src/librustdoc/html/length_limit/tests.rs
@@ -83,7 +83,7 @@ fn past_the_limit() {
buf.push("word#")?;
buf.push(&n.to_string())?;
buf.close_tag();
- ControlFlow::CONTINUE
+ ControlFlow::Continue(())
});
buf.close_tag();
assert_eq!(
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 4ff67fe15..fe446ae3c 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -27,14 +27,14 @@
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::DefId;
-use rustc_hir::HirId;
use rustc_middle::ty::TyCtxt;
+pub(crate) use rustc_resolve::rustdoc::main_body_opts;
+use rustc_resolve::rustdoc::may_be_doc_link;
use rustc_span::edition::Edition;
use rustc_span::{Span, Symbol};
use once_cell::sync::Lazy;
use std::borrow::Cow;
-use std::cell::RefCell;
use std::collections::VecDeque;
use std::default::Default;
use std::fmt::Write;
@@ -47,6 +47,7 @@ use crate::html::escape::Escape;
use crate::html::format::Buffer;
use crate::html::highlight;
use crate::html::length_limit::HtmlWithLimit;
+use crate::html::render::small_url_encode;
use crate::html::toc::TocBuilder;
use pulldown_cmark::{
@@ -58,15 +59,6 @@ mod tests;
const MAX_HEADER_LEVEL: u32 = 6;
-/// Options for rendering Markdown in the main body of documentation.
-pub(crate) fn main_body_opts() -> Options {
- Options::ENABLE_TABLES
- | Options::ENABLE_FOOTNOTES
- | Options::ENABLE_STRIKETHROUGH
- | Options::ENABLE_TASKLISTS
- | Options::ENABLE_SMART_PUNCTUATION
-}
-
/// Options for rendering Markdown in summaries (e.g., in search results).
pub(crate) fn summary_opts() -> Options {
Options::ENABLE_TABLES
@@ -103,14 +95,14 @@ pub struct Markdown<'a> {
/// E.g. if `heading_offset: HeadingOffset::H2`, then `# something` renders an `<h2>`.
pub heading_offset: HeadingOffset,
}
-/// A tuple struct like `Markdown` that renders the markdown with a table of contents.
-pub(crate) struct MarkdownWithToc<'a>(
- pub(crate) &'a str,
- pub(crate) &'a mut IdMap,
- pub(crate) ErrorCodes,
- pub(crate) Edition,
- pub(crate) &'a Option<Playground>,
-);
+/// A struct like `Markdown` that renders the markdown with a table of contents.
+pub(crate) struct MarkdownWithToc<'a> {
+ pub(crate) content: &'a str,
+ pub(crate) ids: &'a mut IdMap,
+ pub(crate) error_codes: ErrorCodes,
+ pub(crate) edition: Edition,
+ pub(crate) playground: &'a Option<Playground>,
+}
/// A tuple struct like `Markdown` that renders the markdown escaping HTML tags
/// and includes no paragraph tags.
pub(crate) struct MarkdownItemInfo<'a>(pub(crate) &'a str, pub(crate) &'a mut IdMap);
@@ -295,30 +287,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
doctest::make_test(&test, krate, false, &Default::default(), edition, None);
let channel = if test.contains("#![feature(") { "&amp;version=nightly" } else { "" };
- // These characters don't need to be escaped in a URI.
- // FIXME: use a library function for percent encoding.
- fn dont_escape(c: u8) -> bool {
- (b'a' <= c && c <= b'z')
- || (b'A' <= c && c <= b'Z')
- || (b'0' <= c && c <= b'9')
- || c == b'-'
- || c == b'_'
- || c == b'.'
- || c == b'~'
- || c == b'!'
- || c == b'\''
- || c == b'('
- || c == b')'
- || c == b'*'
- }
- let mut test_escaped = String::new();
- for b in test.bytes() {
- if dont_escape(b) {
- test_escaped.push(char::from(b));
- } else {
- write!(test_escaped, "%{:02X}", b).unwrap();
- }
- }
+ let test_escaped = small_url_encode(test);
Some(format!(
r#"<a class="test-arrow" target="_blank" href="{}?code={}{}&amp;edition={}">Run</a>"#,
url, test_escaped, channel, edition,
@@ -391,6 +360,9 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
trace!("it matched");
assert!(self.shortcut_link.is_none(), "shortcut links cannot be nested");
self.shortcut_link = Some(link);
+ if title.is_empty() && !link.tooltip.is_empty() {
+ *title = CowStr::Borrowed(link.tooltip.as_ref());
+ }
}
}
// Now that we're done with the shortcut link, don't replace any more text.
@@ -441,9 +413,12 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
}
// If this is a link, but not a shortcut link,
// replace the URL, since the broken_link_callback was not called.
- Some(Event::Start(Tag::Link(_, dest, _))) => {
+ Some(Event::Start(Tag::Link(_, dest, title))) => {
if let Some(link) = self.links.iter().find(|&link| *link.original_text == **dest) {
*dest = CowStr::Borrowed(link.href.as_ref());
+ if title.is_empty() && !link.tooltip.is_empty() {
+ *title = CowStr::Borrowed(link.tooltip.as_ref());
+ }
}
}
// Anything else couldn't have been a valid Rust path, so no need to replace the text.
@@ -577,10 +552,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> SummaryLine<'a, I> {
}
fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
- matches!(
- t,
- Tag::Paragraph | Tag::Item | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote
- )
+ matches!(t, Tag::Paragraph | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote)
}
fn is_forbidden_tag(t: &Tag<'_>) -> bool {
@@ -771,11 +743,7 @@ pub(crate) fn find_testable_code<T: doctest::Tester>(
}
Event::Text(ref s) if register_header.is_some() => {
let level = register_header.unwrap();
- if s.is_empty() {
- tests.register_header("", level);
- } else {
- tests.register_header(s, level);
- }
+ tests.register_header(s, level);
register_header = None;
}
_ => {}
@@ -784,45 +752,26 @@ pub(crate) fn find_testable_code<T: doctest::Tester>(
}
pub(crate) struct ExtraInfo<'tcx> {
- id: ExtraInfoId,
+ def_id: DefId,
sp: Span,
tcx: TyCtxt<'tcx>,
}
-enum ExtraInfoId {
- Hir(HirId),
- Def(DefId),
-}
-
impl<'tcx> ExtraInfo<'tcx> {
- pub(crate) fn new(tcx: TyCtxt<'tcx>, hir_id: HirId, sp: Span) -> ExtraInfo<'tcx> {
- ExtraInfo { id: ExtraInfoId::Hir(hir_id), sp, tcx }
- }
-
- pub(crate) fn new_did(tcx: TyCtxt<'tcx>, did: DefId, sp: Span) -> ExtraInfo<'tcx> {
- ExtraInfo { id: ExtraInfoId::Def(did), sp, tcx }
+ pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: DefId, sp: Span) -> ExtraInfo<'tcx> {
+ ExtraInfo { def_id, sp, tcx }
}
fn error_invalid_codeblock_attr(&self, msg: &str, help: &str) {
- let hir_id = match self.id {
- ExtraInfoId::Hir(hir_id) => hir_id,
- ExtraInfoId::Def(item_did) => {
- match item_did.as_local() {
- Some(item_did) => self.tcx.hir().local_def_id_to_hir_id(item_did),
- None => {
- // If non-local, no need to check anything.
- return;
- }
- }
- }
- };
- self.tcx.struct_span_lint_hir(
- crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
- hir_id,
- self.sp,
- msg,
- |lint| lint.help(help),
- );
+ if let Some(def_id) = self.def_id.as_local() {
+ self.tcx.struct_span_lint_hir(
+ crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
+ self.tcx.hir().local_def_id_to_hir_id(def_id),
+ self.sp,
+ msg,
+ |lint| lint.help(help),
+ );
+ }
}
}
@@ -1029,8 +978,8 @@ impl Markdown<'_> {
let mut replacer = |broken_link: BrokenLink<'_>| {
links
.iter()
- .find(|link| link.original_text.as_str() == &*broken_link.reference)
- .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ .find(|link| &*link.original_text == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
};
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer));
@@ -1051,7 +1000,7 @@ impl Markdown<'_> {
impl MarkdownWithToc<'_> {
pub(crate) fn into_string(self) -> String {
- let MarkdownWithToc(md, ids, codes, edition, playground) = self;
+ let MarkdownWithToc { content: md, ids, error_codes: codes, edition, playground } = self;
let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
@@ -1112,8 +1061,8 @@ impl MarkdownSummaryLine<'_> {
let mut replacer = |broken_link: BrokenLink<'_>| {
links
.iter()
- .find(|link| link.original_text.as_str() == &*broken_link.reference)
- .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ .find(|link| &*link.original_text == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
};
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer))
@@ -1159,8 +1108,8 @@ fn markdown_summary_with_limit(
let mut replacer = |broken_link: BrokenLink<'_>| {
link_names
.iter()
- .find(|link| link.original_text.as_str() == &*broken_link.reference)
- .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ .find(|link| &*link.original_text == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
};
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
@@ -1191,18 +1140,18 @@ fn markdown_summary_with_limit(
Event::Start(tag) => match tag {
Tag::Emphasis => buf.open_tag("em"),
Tag::Strong => buf.open_tag("strong"),
- Tag::CodeBlock(..) => return ControlFlow::BREAK,
+ Tag::CodeBlock(..) => return ControlFlow::Break(()),
_ => {}
},
Event::End(tag) => match tag {
Tag::Emphasis | Tag::Strong => buf.close_tag(),
- Tag::Paragraph | Tag::Heading(..) => return ControlFlow::BREAK,
+ Tag::Paragraph | Tag::Heading(..) => return ControlFlow::Break(()),
_ => {}
},
Event::HardBreak | Event::SoftBreak => buf.push(" ")?,
_ => {}
};
- ControlFlow::CONTINUE
+ ControlFlow::Continue(())
});
(buf.finish(), stopped_early)
@@ -1230,14 +1179,23 @@ pub(crate) fn short_markdown_summary(markdown: &str, link_names: &[RenderedLink]
/// - Headings, links, and formatting are stripped.
/// - Inline code is rendered as-is, surrounded by backticks.
/// - HTML and code blocks are ignored.
-pub(crate) fn plain_text_summary(md: &str) -> String {
+pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> String {
if md.is_empty() {
return String::new();
}
let mut s = String::with_capacity(md.len() * 3 / 2);
- for event in Parser::new_ext(md, summary_opts()) {
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ link_names
+ .iter()
+ .find(|link| &*link.original_text == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
+ };
+
+ let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
+
+ for event in p {
match &event {
Event::Text(text) => s.push_str(text),
Event::Code(code) => {
@@ -1265,14 +1223,12 @@ pub(crate) struct MarkdownLink {
pub(crate) fn markdown_links<R>(
md: &str,
- filter_map: impl Fn(MarkdownLink) -> Option<R>,
+ preprocess_link: impl Fn(MarkdownLink) -> Option<R>,
) -> Vec<R> {
if md.is_empty() {
return vec![];
}
- let links = RefCell::new(vec![]);
-
// FIXME: remove this function once pulldown_cmark can provide spans for link definitions.
let locate = |s: &str, fallback: Range<usize>| unsafe {
let s_start = s.as_ptr();
@@ -1304,46 +1260,23 @@ pub(crate) fn markdown_links<R>(
}
};
- let mut push = |link: BrokenLink<'_>| {
- let span = span_for_link(&link.reference, link.span);
- filter_map(MarkdownLink {
- kind: LinkType::ShortcutUnknown,
- link: link.reference.to_string(),
- range: span,
- })
- .map(|link| links.borrow_mut().push(link));
- None
- };
- let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut push))
- .into_offset_iter();
-
- // There's no need to thread an IdMap through to here because
- // the IDs generated aren't going to be emitted anywhere.
- let mut ids = IdMap::new();
- let iter = Footnotes::new(HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1));
-
- for ev in iter {
- if let Event::Start(Tag::Link(
- // `<>` links cannot be intra-doc links so we skip them.
- kind @ (LinkType::Inline
- | LinkType::Reference
- | LinkType::ReferenceUnknown
- | LinkType::Collapsed
- | LinkType::CollapsedUnknown
- | LinkType::Shortcut
- | LinkType::ShortcutUnknown),
- dest,
- _,
- )) = ev.0
- {
- debug!("found link: {dest}");
- let span = span_for_link(&dest, ev.1);
- filter_map(MarkdownLink { kind, link: dest.into_string(), range: span })
- .map(|link| links.borrow_mut().push(link));
+ Parser::new_with_broken_link_callback(
+ md,
+ main_body_opts(),
+ Some(&mut |link: BrokenLink<'_>| Some((link.reference, "".into()))),
+ )
+ .into_offset_iter()
+ .filter_map(|(event, span)| match event {
+ Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => {
+ preprocess_link(MarkdownLink {
+ kind: link_type,
+ range: span_for_link(&dest, span),
+ link: dest.into_string(),
+ })
}
- }
-
- links.into_inner()
+ _ => None,
+ })
+ .collect()
}
#[derive(Debug)]
diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs
index 5878c5826..e05635a02 100644
--- a/src/librustdoc/html/markdown/tests.rs
+++ b/src/librustdoc/html/markdown/tests.rs
@@ -249,7 +249,7 @@ fn test_short_markdown_summary() {
#[test]
fn test_plain_text_summary() {
fn t(input: &str, expect: &str) {
- let output = plain_text_summary(input);
+ let output = plain_text_summary(input, &[]);
assert_eq!(output, expect, "original: {}", input);
}
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
index 5cefe9475..5e4a59562 100644
--- a/src/librustdoc/html/render/context.rs
+++ b/src/librustdoc/html/render/context.rs
@@ -6,7 +6,7 @@ use std::rc::Rc;
use std::sync::mpsc::{channel, Receiver};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use rustc_span::edition::Edition;
@@ -18,7 +18,7 @@ use super::search_index::build_index;
use super::write_shared::write_shared;
use super::{
collect_spans_and_sources, print_sidebar, scrape_examples_help, sidebar_module_like, AllTypes,
- LinkFromSrc, NameDoc, StylePath, BASIC_KEYWORDS,
+ LinkFromSrc, StylePath,
};
use crate::clean::{self, types::ExternalLocation, ExternalCrate};
@@ -56,7 +56,7 @@ pub(crate) struct Context<'tcx> {
pub(super) render_redirect_pages: bool,
/// Tracks section IDs for `Deref` targets so they match in both the main
/// body and the sidebar.
- pub(super) deref_id_map: FxHashMap<DefId, String>,
+ pub(super) deref_id_map: DefIdMap<String>,
/// The map used to ensure all generated 'id=' attributes are unique.
pub(super) id_map: IdMap,
/// Shared mutable state.
@@ -182,7 +182,10 @@ impl<'tcx> Context<'tcx> {
};
title.push_str(" - Rust");
let tyname = it.type_();
- let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc));
+ let desc = it
+ .doc_value()
+ .as_ref()
+ .map(|doc| plain_text_summary(doc, &it.link_names(&self.cache())));
let desc = if let Some(desc) = desc {
desc
} else if it.is_crate() {
@@ -195,7 +198,6 @@ impl<'tcx> Context<'tcx> {
self.shared.layout.krate
)
};
- let keywords = make_item_keywords(it);
let name;
let tyname_s = if it.is_crate() {
name = format!("{} crate", tyname);
@@ -212,7 +214,6 @@ impl<'tcx> Context<'tcx> {
static_root_path: clone_shared.static_root_path.as_deref(),
title: &title,
description: &desc,
- keywords: &keywords,
resource_suffix: &clone_shared.resource_suffix,
};
let mut page_buffer = Buffer::html();
@@ -258,7 +259,7 @@ impl<'tcx> Context<'tcx> {
}
/// Construct a map of items shown in the sidebar to a plain-text summary of their docs.
- fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
+ fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<String>> {
// BTreeMap instead of HashMap to get a sorted output
let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new();
let mut inserted: FxHashMap<ItemType, FxHashSet<Symbol>> = FxHashMap::default();
@@ -276,10 +277,7 @@ impl<'tcx> Context<'tcx> {
if inserted.entry(short).or_default().insert(myname) {
let short = short.to_string();
let myname = myname.to_string();
- map.entry(short).or_default().push((
- myname,
- Some(item.doc_value().map_or_else(String::new, |s| plain_text_summary(&s))),
- ));
+ map.entry(short).or_default().push(myname);
}
}
@@ -544,7 +542,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
dst,
render_redirect_pages: false,
id_map,
- deref_id_map: FxHashMap::default(),
+ deref_id_map: Default::default(),
shared: Rc::new(scx),
include_sources,
types_with_notable_traits: FxHashSet::default(),
@@ -572,7 +570,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
current: self.current.clone(),
dst: self.dst.clone(),
render_redirect_pages: self.render_redirect_pages,
- deref_id_map: FxHashMap::default(),
+ deref_id_map: Default::default(),
id_map: IdMap::new(),
shared: Rc::clone(&self.shared),
include_sources: self.include_sources,
@@ -598,7 +596,6 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
root_path: "../",
static_root_path: shared.static_root_path.as_deref(),
description: "List of all items in this crate",
- keywords: BASIC_KEYWORDS,
resource_suffix: &shared.resource_suffix,
};
let all = shared.all.replace(AllTypes::new());
@@ -708,14 +705,12 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
shared.fs.write(scrape_examples_help_file, v)?;
}
- if let Some(ref redirections) = shared.redirections {
- if !redirections.borrow().is_empty() {
- let redirect_map_path =
- self.dst.join(crate_name.as_str()).join("redirect-map.json");
- let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
- shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
- shared.fs.write(redirect_map_path, paths)?;
- }
+ if let Some(ref redirections) = shared.redirections && !redirections.borrow().is_empty() {
+ let redirect_map_path =
+ self.dst.join(crate_name.as_str()).join("redirect-map.json");
+ let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
+ shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
+ shared.fs.write(redirect_map_path, paths)?;
}
// No need for it anymore.
@@ -828,7 +823,3 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
&self.shared.cache
}
}
-
-fn make_item_keywords(it: &clean::Item) -> String {
- format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())
-}
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index 4fa33e890..e6a040d02 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -38,7 +38,7 @@ pub(crate) use self::span_map::{collect_spans_and_sources, LinkFromSrc};
use std::collections::VecDeque;
use std::default::Default;
-use std::fmt;
+use std::fmt::{self, Write};
use std::fs;
use std::iter::Peekable;
use std::path::PathBuf;
@@ -50,7 +50,7 @@ use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::def::CtorKind;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_hir::Mutability;
use rustc_middle::middle::stability;
use rustc_middle::ty;
@@ -83,9 +83,6 @@ use crate::scrape_examples::{CallData, CallLocation};
use crate::try_none;
use crate::DOC_RUST_LANG_ORG_CHANNEL;
-/// A pair of name and its optional document.
-pub(crate) type NameDoc = (String, Option<String>);
-
pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) }
@@ -1115,7 +1112,7 @@ fn render_assoc_items(
it: DefId,
what: AssocItemRender<'_>,
) {
- let mut derefs = FxHashSet::default();
+ let mut derefs = DefIdSet::default();
derefs.insert(it);
render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs)
}
@@ -1126,7 +1123,7 @@ fn render_assoc_items_inner(
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender<'_>,
- derefs: &mut FxHashSet<DefId>,
+ derefs: &mut DefIdSet,
) {
info!("Documenting associated items of {:?}", containing_item.name);
let shared = Rc::clone(&cx.shared);
@@ -1215,7 +1212,7 @@ fn render_deref_methods(
impl_: &Impl,
container_item: &clean::Item,
deref_mut: bool,
- derefs: &mut FxHashSet<DefId>,
+ derefs: &mut DefIdSet,
) {
let cache = cx.cache();
let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
@@ -1313,7 +1310,7 @@ pub(crate) fn notable_traits_button(ty: &clean::Type, cx: &mut Context<'_>) -> O
if has_notable_trait {
cx.types_with_notable_traits.insert(ty.clone());
Some(format!(
- " <a href=\"#\" class=\"notable-traits\" data-ty=\"{ty}\">ⓘ</a>",
+ " <a href=\"#\" class=\"tooltip\" data-notable-ty=\"{ty}\">ⓘ</a>",
ty = Escape(&format!("{:#}", ty.print(cx))),
))
} else {
@@ -1528,11 +1525,7 @@ fn render_impl(
})
})
.map(|item| format!("{}.{}", item.type_(), name));
- write!(
- w,
- "<section id=\"{}\" class=\"{}{} has-srclink\">",
- id, item_type, in_trait_class,
- );
+ write!(w, "<section id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class,);
render_rightside(w, cx, item, containing_item, render_mode);
if trait_.is_some() {
// Anchors are only used on trait impls.
@@ -1554,11 +1547,7 @@ fn render_impl(
kind @ (clean::TyAssocConstItem(ty) | clean::AssocConstItem(ty, _)) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
- write!(
- w,
- "<section id=\"{}\" class=\"{}{} has-srclink\">",
- id, item_type, in_trait_class
- );
+ write!(w, "<section id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class);
render_rightside(w, cx, item, containing_item, render_mode);
if trait_.is_some() {
// Anchors are only used on trait impls.
@@ -1606,11 +1595,7 @@ fn render_impl(
clean::AssocTypeItem(tydef, _bounds) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
- write!(
- w,
- "<section id=\"{}\" class=\"{}{} has-srclink\">",
- id, item_type, in_trait_class
- );
+ write!(w, "<section id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class);
if trait_.is_some() {
// Anchors are only used on trait impls.
write!(w, "<a href=\"#{}\" class=\"anchor\">§</a>", id);
@@ -1844,7 +1829,7 @@ pub(crate) fn render_impl_summary(
} else {
format!(" data-aliases=\"{}\"", aliases.join(","))
};
- write!(w, "<section id=\"{}\" class=\"impl has-srclink\"{}>", id, aliases);
+ write!(w, "<section id=\"{}\" class=\"impl\"{}>", id, aliases);
render_rightside(w, cx, &i.impl_item, containing_item, RenderMode::Normal);
write!(w, "<a href=\"#{}\" class=\"anchor\">§</a>", id);
write!(w, "<h3 class=\"code-header\">");
@@ -2032,31 +2017,60 @@ fn get_associated_constants(
.collect::<Vec<_>>()
}
-// The point is to url encode any potential character from a type with genericity.
-fn small_url_encode(s: String) -> String {
+pub(crate) fn small_url_encode(s: String) -> String {
+ // These characters don't need to be escaped in a URI.
+ // See https://url.spec.whatwg.org/#query-percent-encode-set
+ // and https://url.spec.whatwg.org/#urlencoded-parsing
+ // and https://url.spec.whatwg.org/#url-code-points
+ fn dont_escape(c: u8) -> bool {
+ (b'a' <= c && c <= b'z')
+ || (b'A' <= c && c <= b'Z')
+ || (b'0' <= c && c <= b'9')
+ || c == b'-'
+ || c == b'_'
+ || c == b'.'
+ || c == b','
+ || c == b'~'
+ || c == b'!'
+ || c == b'\''
+ || c == b'('
+ || c == b')'
+ || c == b'*'
+ || c == b'/'
+ || c == b';'
+ || c == b':'
+ || c == b'?'
+ // As described in urlencoded-parsing, the
+ // first `=` is the one that separates key from
+ // value. Following `=`s are part of the value.
+ || c == b'='
+ }
let mut st = String::new();
let mut last_match = 0;
- for (idx, c) in s.char_indices() {
- let escaped = match c {
- '<' => "%3C",
- '>' => "%3E",
- ' ' => "%20",
- '?' => "%3F",
- '\'' => "%27",
- '&' => "%26",
- ',' => "%2C",
- ':' => "%3A",
- ';' => "%3B",
- '[' => "%5B",
- ']' => "%5D",
- '"' => "%22",
- _ => continue,
- };
+ for (idx, b) in s.bytes().enumerate() {
+ if dont_escape(b) {
+ continue;
+ }
- st += &s[last_match..idx];
- st += escaped;
- // NOTE: we only expect single byte characters here - which is fine as long as we
- // only match single byte characters
+ if last_match != idx {
+ // Invariant: `idx` must be the first byte in a character at this point.
+ st += &s[last_match..idx];
+ }
+ if b == b' ' {
+ // URL queries are decoded with + replaced with SP.
+ // While the same is not true for hashes, rustdoc only needs to be
+ // consistent with itself when encoding them.
+ st += "+";
+ } else if b == b'%' {
+ st += "%%";
+ } else {
+ write!(st, "%{:02X}", b).unwrap();
+ }
+ // Invariant: if the current byte is not at the start of a multi-byte character,
+ // we need to get down here so that when the next turn of the loop comes around,
+ // last_match winds up equalling idx.
+ //
+ // In other words, dont_escape must always return `false` in multi-byte character.
last_match = idx + 1;
}
@@ -2175,7 +2189,7 @@ fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
if let Some(impl_) =
v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
{
- let mut derefs = FxHashSet::default();
+ let mut derefs = DefIdSet::default();
derefs.insert(did);
sidebar_deref_methods(cx, out, impl_, v, &mut derefs, &mut used_links);
}
@@ -2195,7 +2209,7 @@ fn sidebar_deref_methods(
out: &mut Buffer,
impl_: &Impl,
v: &[Impl],
- derefs: &mut FxHashSet<DefId>,
+ derefs: &mut DefIdSet,
used_links: &mut FxHashSet<String>,
) {
let c = cx.cache();
@@ -2211,14 +2225,13 @@ fn sidebar_deref_methods(
})
{
debug!("found target, real_target: {:?} {:?}", target, real_target);
- if let Some(did) = target.def_id(c) {
- if let Some(type_did) = impl_.inner_impl().for_.def_id(c) {
- // `impl Deref<Target = S> for S`
- if did == type_did || !derefs.insert(did) {
- // Avoid infinite cycles
- return;
- }
- }
+ if let Some(did) = target.def_id(c) &&
+ let Some(type_did) = impl_.inner_impl().for_.def_id(c) &&
+ // `impl Deref<Target = S> for S`
+ (did == type_did || !derefs.insert(did))
+ {
+ // Avoid infinite cycles
+ return;
}
let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
let inner_impl = target
@@ -2252,25 +2265,24 @@ fn sidebar_deref_methods(
}
// Recurse into any further impls that might exist for `target`
- if let Some(target_did) = target.def_id(c) {
- if let Some(target_impls) = c.impls.get(&target_did) {
- if let Some(target_deref_impl) = target_impls.iter().find(|i| {
- i.inner_impl()
- .trait_
- .as_ref()
- .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
- .unwrap_or(false)
- }) {
- sidebar_deref_methods(
- cx,
- out,
- target_deref_impl,
- target_impls,
- derefs,
- used_links,
- );
- }
- }
+ if let Some(target_did) = target.def_id(c) &&
+ let Some(target_impls) = c.impls.get(&target_did) &&
+ let Some(target_deref_impl) = target_impls.iter().find(|i| {
+ i.inner_impl()
+ .trait_
+ .as_ref()
+ .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
+ .unwrap_or(false)
+ })
+ {
+ sidebar_deref_methods(
+ cx,
+ out,
+ target_deref_impl,
+ target_impls,
+ derefs,
+ used_links,
+ );
}
}
}
@@ -2755,8 +2767,6 @@ fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
}
}
-pub(crate) const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
-
/// Returns a list of all paths used in the type.
/// This is used to help deduplicate imported impls
/// for reexported types. If any of the contained
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index f824c9e3a..2869a3961 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -10,7 +10,7 @@ use rustc_middle::ty::layout::LayoutError;
use rustc_middle::ty::{self, Adt, TyCtxt};
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Symbol};
-use rustc_target::abi::{LayoutS, Primitive, TagEncoding, VariantIdx, Variants};
+use rustc_target::abi::{LayoutS, Primitive, TagEncoding, Variants};
use std::cmp::Ordering;
use std::fmt;
use std::rc::Rc;
@@ -39,10 +39,10 @@ use crate::html::{highlight, static_files};
use askama::Template;
use itertools::Itertools;
-const ITEM_TABLE_OPEN: &str = "<div class=\"item-table\">";
-const ITEM_TABLE_CLOSE: &str = "</div>";
-const ITEM_TABLE_ROW_OPEN: &str = "<div class=\"item-row\">";
-const ITEM_TABLE_ROW_CLOSE: &str = "</div>";
+const ITEM_TABLE_OPEN: &str = "<ul class=\"item-table\">";
+const ITEM_TABLE_CLOSE: &str = "</ul>";
+const ITEM_TABLE_ROW_OPEN: &str = "<li>";
+const ITEM_TABLE_ROW_CLOSE: &str = "</li>";
// A component in a `use` path, like `string` in std::string::ToString
struct PathComponent {
@@ -338,14 +338,14 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
match *src {
Some(src) => write!(
w,
- "<div class=\"item-left\"><code>{}extern crate {} as {};",
+ "<div class=\"item-name\"><code>{}extern crate {} as {};",
visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
anchor(myitem.item_id.expect_def_id(), src, cx),
myitem.name.unwrap(),
),
None => write!(
w,
- "<div class=\"item-left\"><code>{}extern crate {};",
+ "<div class=\"item-name\"><code>{}extern crate {};",
visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
anchor(myitem.item_id.expect_def_id(), myitem.name.unwrap(), cx),
),
@@ -355,7 +355,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
}
clean::ImportItem(ref import) => {
- let (stab, stab_tags) = if let Some(import_def_id) = import.source.did {
+ let stab_tags = if let Some(import_def_id) = import.source.did {
let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id);
let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
@@ -367,15 +367,12 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
..myitem.clone()
};
- let stab = import_item.stability_class(cx.tcx());
let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()));
- (stab, stab_tags)
+ stab_tags
} else {
- (None, None)
+ None
};
- let add = if stab.is_some() { " " } else { "" };
-
w.write_str(ITEM_TABLE_ROW_OPEN);
let id = match import.kind {
clean::ImportKind::Simple(s) => {
@@ -387,15 +384,14 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
let (stab_tags_before, stab_tags_after) = if stab_tags.is_empty() {
("", "")
} else {
- ("<div class=\"item-right docblock-short\">", "</div>")
+ ("<div class=\"desc docblock-short\">", "</div>")
};
write!(
w,
- "<div class=\"item-left {stab}{add}import-item\"{id}>\
+ "<div class=\"item-name\"{id}>\
<code>{vis}{imp}</code>\
</div>\
{stab_tags_before}{stab_tags}{stab_tags_after}",
- stab = stab.unwrap_or_default(),
vis = visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
imp = import.print(cx),
);
@@ -417,9 +413,6 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
_ => "",
};
- let stab = myitem.stability_class(cx.tcx());
- let add = if stab.is_some() { " " } else { "" };
-
let visibility_emoji = match myitem.visibility(tcx) {
Some(ty::Visibility::Restricted(_)) => {
"<span title=\"Restricted Visibility\">&nbsp;🔒</span> "
@@ -433,11 +426,11 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
let (docs_before, docs_after) = if docs.is_empty() {
("", "")
} else {
- ("<div class=\"item-right docblock-short\">", "</div>")
+ ("<div class=\"desc docblock-short\">", "</div>")
};
write!(
w,
- "<div class=\"item-left {stab}{add}module-item\">\
+ "<div class=\"item-name\">\
<a class=\"{class}\" href=\"{href}\" title=\"{title}\">{name}</a>\
{visibility_emoji}\
{unsafety_flag}\
@@ -448,11 +441,9 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
visibility_emoji = visibility_emoji,
stab_tags = extra_info_tags(myitem, item, cx.tcx()),
class = myitem.type_(),
- add = add,
- stab = stab.unwrap_or_default(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
- title = [full_path(cx, myitem), myitem.type_().to_string()]
+ title = [myitem.type_().to_string(), full_path(cx, myitem)]
.iter()
.filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None })
.collect::<Vec<_>>()
@@ -530,26 +521,24 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle
let notable_traits =
f.decl.output.as_return().and_then(|output| notable_traits_button(output, cx));
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- w.reserve(header_len);
- write!(
- w,
- "{vis}{constness}{asyncness}{unsafety}{abi}fn \
- {name}{generics}{decl}{notable_traits}{where_clause}",
- vis = visibility,
- constness = constness,
- asyncness = asyncness,
- unsafety = unsafety,
- abi = abi,
- name = name,
- generics = f.generics.print(cx),
- where_clause = print_where_clause(&f.generics, cx, 0, Ending::Newline),
- decl = f.decl.full_print(header_len, 0, cx),
- notable_traits = notable_traits.unwrap_or_default(),
- );
- });
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ w.reserve(header_len);
+ write!(
+ w,
+ "{vis}{constness}{asyncness}{unsafety}{abi}fn \
+ {name}{generics}{decl}{notable_traits}{where_clause}",
+ vis = visibility,
+ constness = constness,
+ asyncness = asyncness,
+ unsafety = unsafety,
+ abi = abi,
+ name = name,
+ generics = f.generics.print(cx),
+ where_clause = print_where_clause(&f.generics, cx, 0, Ending::Newline),
+ decl = f.decl.full_print(header_len, 0, cx),
+ notable_traits = notable_traits.unwrap_or_default(),
+ );
});
document(w, cx, it, None, HeadingOffset::H2);
}
@@ -569,145 +558,140 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
let must_implement_one_of_functions = tcx.trait_def(t.def_id).must_implement_one_of.clone();
// Output the trait definition
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- write!(
- w,
- "{}{}{}trait {}{}{}",
- visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
- t.unsafety(tcx).print_with_space(),
- if t.is_auto(tcx) { "auto " } else { "" },
- it.name.unwrap(),
- t.generics.print(cx),
- bounds
- );
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "{}{}{}trait {}{}{}",
+ visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
+ t.unsafety(tcx).print_with_space(),
+ if t.is_auto(tcx) { "auto " } else { "" },
+ it.name.unwrap(),
+ t.generics.print(cx),
+ bounds
+ );
- if !t.generics.where_predicates.is_empty() {
- write!(w, "{}", print_where_clause(&t.generics, cx, 0, Ending::Newline));
- } else {
- w.write_str(" ");
- }
+ if !t.generics.where_predicates.is_empty() {
+ write!(w, "{}", print_where_clause(&t.generics, cx, 0, Ending::Newline));
+ } else {
+ w.write_str(" ");
+ }
- if t.items.is_empty() {
- w.write_str("{ }");
- } else {
- // FIXME: we should be using a derived_id for the Anchors here
- w.write_str("{\n");
- let mut toggle = false;
-
- // If there are too many associated types, hide _everything_
- if should_hide_fields(count_types) {
- toggle = true;
- toggle_open(
- w,
- format_args!(
- "{} associated items",
- count_types + count_consts + count_methods
- ),
- );
- }
- for types in [&required_types, &provided_types] {
- for t in types {
- render_assoc_item(
- w,
- t,
- AssocItemLink::Anchor(None),
- ItemType::Trait,
- cx,
- RenderMode::Normal,
- );
- w.write_str(";\n");
- }
- }
- // If there are too many associated constants, hide everything after them
- // We also do this if the types + consts is large because otherwise we could
- // render a bunch of types and _then_ a bunch of consts just because both were
- // _just_ under the limit
- if !toggle && should_hide_fields(count_types + count_consts) {
- toggle = true;
- toggle_open(
- w,
- format_args!(
- "{} associated constant{} and {} method{}",
- count_consts,
- pluralize(count_consts),
- count_methods,
- pluralize(count_methods),
- ),
- );
- }
- if count_types != 0 && (count_consts != 0 || count_methods != 0) {
- w.write_str("\n");
- }
- for consts in [&required_consts, &provided_consts] {
- for c in consts {
- render_assoc_item(
- w,
- c,
- AssocItemLink::Anchor(None),
- ItemType::Trait,
- cx,
- RenderMode::Normal,
- );
- w.write_str(";\n");
- }
- }
- if !toggle && should_hide_fields(count_methods) {
- toggle = true;
- toggle_open(w, format_args!("{} methods", count_methods));
- }
- if count_consts != 0 && count_methods != 0 {
- w.write_str("\n");
- }
- for (pos, m) in required_methods.iter().enumerate() {
+ if t.items.is_empty() {
+ w.write_str("{ }");
+ } else {
+ // FIXME: we should be using a derived_id for the Anchors here
+ w.write_str("{\n");
+ let mut toggle = false;
+
+ // If there are too many associated types, hide _everything_
+ if should_hide_fields(count_types) {
+ toggle = true;
+ toggle_open(
+ w,
+ format_args!("{} associated items", count_types + count_consts + count_methods),
+ );
+ }
+ for types in [&required_types, &provided_types] {
+ for t in types {
render_assoc_item(
w,
- m,
+ t,
AssocItemLink::Anchor(None),
ItemType::Trait,
cx,
RenderMode::Normal,
);
w.write_str(";\n");
-
- if pos < required_methods.len() - 1 {
- w.write_str("<span class=\"item-spacer\"></span>");
- }
- }
- if !required_methods.is_empty() && !provided_methods.is_empty() {
- w.write_str("\n");
}
- for (pos, m) in provided_methods.iter().enumerate() {
+ }
+ // If there are too many associated constants, hide everything after them
+ // We also do this if the types + consts is large because otherwise we could
+ // render a bunch of types and _then_ a bunch of consts just because both were
+ // _just_ under the limit
+ if !toggle && should_hide_fields(count_types + count_consts) {
+ toggle = true;
+ toggle_open(
+ w,
+ format_args!(
+ "{} associated constant{} and {} method{}",
+ count_consts,
+ pluralize(count_consts),
+ count_methods,
+ pluralize(count_methods),
+ ),
+ );
+ }
+ if count_types != 0 && (count_consts != 0 || count_methods != 0) {
+ w.write_str("\n");
+ }
+ for consts in [&required_consts, &provided_consts] {
+ for c in consts {
render_assoc_item(
w,
- m,
+ c,
AssocItemLink::Anchor(None),
ItemType::Trait,
cx,
RenderMode::Normal,
);
- match *m.kind {
- clean::MethodItem(ref inner, _)
- if !inner.generics.where_predicates.is_empty() =>
- {
- w.write_str(",\n { ... }\n");
- }
- _ => {
- w.write_str(" { ... }\n");
- }
- }
+ w.write_str(";\n");
+ }
+ }
+ if !toggle && should_hide_fields(count_methods) {
+ toggle = true;
+ toggle_open(w, format_args!("{} methods", count_methods));
+ }
+ if count_consts != 0 && count_methods != 0 {
+ w.write_str("\n");
+ }
- if pos < provided_methods.len() - 1 {
- w.write_str("<span class=\"item-spacer\"></span>");
- }
+ if !required_methods.is_empty() {
+ write!(w, " // Required method{}\n", pluralize(required_methods.len()));
+ }
+ for (pos, m) in required_methods.iter().enumerate() {
+ render_assoc_item(
+ w,
+ m,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+ w.write_str(";\n");
+
+ if pos < required_methods.len() - 1 {
+ w.write_str("<span class=\"item-spacer\"></span>");
}
- if toggle {
- toggle_close(w);
+ }
+ if !required_methods.is_empty() && !provided_methods.is_empty() {
+ w.write_str("\n");
+ }
+
+ if !provided_methods.is_empty() {
+ write!(w, " // Provided method{}\n", pluralize(provided_methods.len()));
+ }
+ for (pos, m) in provided_methods.iter().enumerate() {
+ render_assoc_item(
+ w,
+ m,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+
+ w.write_str(" { ... }\n");
+
+ if pos < provided_methods.len() - 1 {
+ w.write_str("<span class=\"item-spacer\"></span>");
}
- w.write_str("}");
}
- });
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+ }
});
// Trait documentation
@@ -735,7 +719,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
let method_toggle_class = if item_type.is_method() { " method-toggle" } else { "" };
write!(w, "<details class=\"toggle{method_toggle_class}\" open><summary>");
}
- write!(w, "<section id=\"{}\" class=\"method has-srclink\">", id);
+ write!(w, "<section id=\"{}\" class=\"method\">", id);
render_rightside(w, cx, m, t, RenderMode::Normal);
write!(w, "<h4 class=\"code-header\">");
render_assoc_item(
@@ -1050,18 +1034,16 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
}
fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::TraitAlias) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- write!(
- w,
- "trait {}{}{} = {};",
- it.name.unwrap(),
- t.generics.print(cx),
- print_where_clause(&t.generics, cx, 0, Ending::Newline),
- bounds(&t.bounds, true, cx)
- );
- });
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "trait {}{}{} = {};",
+ it.name.unwrap(),
+ t.generics.print(cx),
+ print_where_clause(&t.generics, cx, 0, Ending::Newline),
+ bounds(&t.bounds, true, cx)
+ );
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1074,18 +1056,16 @@ fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &
}
fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- write!(
- w,
- "type {}{}{where_clause} = impl {bounds};",
- it.name.unwrap(),
- t.generics.print(cx),
- where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
- bounds = bounds(&t.bounds, false, cx),
- );
- });
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "type {}{}{where_clause} = impl {bounds};",
+ it.name.unwrap(),
+ t.generics.print(cx),
+ where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
+ bounds = bounds(&t.bounds, false, cx),
+ );
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1101,10 +1081,10 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea
fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) {
wrap_item(w, |w| {
render_attributes_in_pre(w, it, "");
- write!(w, "{}", visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx));
write!(
w,
- "type {}{}{where_clause} = {type_};",
+ "{}type {}{}{where_clause} = {type_};",
+ visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
it.name.unwrap(),
t.generics.print(cx),
where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
@@ -1113,7 +1093,7 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea
});
}
- wrap_into_item_decl(w, |w| write_content(w, cx, it, t));
+ write_content(w, cx, it, t);
document(w, cx, it, None, HeadingOffset::H2);
@@ -1127,11 +1107,9 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea
}
fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- render_union(w, it, Some(&s.generics), &s.fields, "", cx);
- });
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ render_union(w, it, Some(&s.generics), &s.fields, cx);
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1160,13 +1138,11 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
<a href=\"#{id}\" class=\"anchor field\">§</a>\
<code>{name}: {ty}</code>\
</span>",
- id = id,
- name = name,
shortty = ItemType::StructField,
ty = ty.print(cx),
);
if let Some(stability_class) = field.stability_class(cx.tcx()) {
- write!(w, "<span class=\"stab {stab}\"></span>", stab = stability_class);
+ write!(w, "<span class=\"stab {stability_class}\"></span>");
}
document(w, cx, field, Some(it), HeadingOffset::H3);
}
@@ -1179,7 +1155,7 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item]) {
for (i, ty) in s.iter().enumerate() {
if i > 0 {
- w.write_str(",&nbsp;");
+ w.write_str(", ");
}
match *ty.kind {
clean::StrippedItem(box clean::StructFieldItem(_)) => w.write_str("_"),
@@ -1192,60 +1168,58 @@ fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item]
fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::Enum) {
let tcx = cx.tcx();
let count_variants = e.variants().count();
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_pre(w, it, "");
- write!(
- w,
- "{}enum {}{}",
- visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
- it.name.unwrap(),
- e.generics.print(cx),
- );
- if !print_where_clause_and_check(w, &e.generics, cx) {
- // If there wasn't a `where` clause, we add a whitespace.
- w.write_str(" ");
- }
+ wrap_item(w, |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "{}enum {}{}",
+ visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
+ it.name.unwrap(),
+ e.generics.print(cx),
+ );
+ if !print_where_clause_and_check(w, &e.generics, cx) {
+ // If there wasn't a `where` clause, we add a whitespace.
+ w.write_str(" ");
+ }
- let variants_stripped = e.has_stripped_entries();
- if count_variants == 0 && !variants_stripped {
- w.write_str("{}");
- } else {
- w.write_str("{\n");
- let toggle = should_hide_fields(count_variants);
- if toggle {
- toggle_open(w, format_args!("{} variants", count_variants));
- }
- for v in e.variants() {
- w.write_str(" ");
- let name = v.name.unwrap();
- match *v.kind {
- // FIXME(#101337): Show discriminant
- clean::VariantItem(ref var) => match var.kind {
- clean::VariantKind::CLike => write!(w, "{}", name),
- clean::VariantKind::Tuple(ref s) => {
- write!(w, "{}(", name);
- print_tuple_struct_fields(w, cx, s);
- w.write_str(")");
- }
- clean::VariantKind::Struct(ref s) => {
- render_struct(w, v, None, None, &s.fields, " ", false, cx);
- }
- },
- _ => unreachable!(),
- }
- w.write_str(",\n");
+ let variants_stripped = e.has_stripped_entries();
+ if count_variants == 0 && !variants_stripped {
+ w.write_str("{}");
+ } else {
+ w.write_str("{\n");
+ let toggle = should_hide_fields(count_variants);
+ if toggle {
+ toggle_open(w, format_args!("{} variants", count_variants));
+ }
+ for v in e.variants() {
+ w.write_str(" ");
+ let name = v.name.unwrap();
+ match *v.kind {
+ // FIXME(#101337): Show discriminant
+ clean::VariantItem(ref var) => match var.kind {
+ clean::VariantKind::CLike => write!(w, "{}", name),
+ clean::VariantKind::Tuple(ref s) => {
+ write!(w, "{}(", name);
+ print_tuple_struct_fields(w, cx, s);
+ w.write_str(")");
+ }
+ clean::VariantKind::Struct(ref s) => {
+ render_struct(w, v, None, None, &s.fields, " ", false, cx);
+ }
+ },
+ _ => unreachable!(),
}
+ w.write_str(",\n");
+ }
- if variants_stripped {
- w.write_str(" // some variants omitted\n");
- }
- if toggle {
- toggle_close(w);
- }
- w.write_str("}");
+ if variants_stripped {
+ w.write_str(" // some variants omitted\n");
}
- });
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+ }
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1266,7 +1240,6 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
w,
"<section id=\"{id}\" class=\"variant\">\
<a href=\"#{id}\" class=\"anchor\">§</a>",
- id = id,
);
render_stability_since_raw_with_extra(
w,
@@ -1304,8 +1277,11 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
if let Some((heading, fields)) = heading_and_fields {
let variant_id =
cx.derive_id(format!("{}.{}.fields", ItemType::Variant, variant.name.unwrap()));
- write!(w, "<div class=\"sub-variant\" id=\"{id}\">", id = variant_id);
- write!(w, "<h4>{heading}</h4>", heading = heading);
+ write!(
+ w,
+ "<div class=\"sub-variant\" id=\"{variant_id}\">\
+ <h4>{heading}</h4>",
+ );
document_non_exhaustive(w, variant);
for field in fields {
match *field.kind {
@@ -1321,9 +1297,8 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
"<div class=\"sub-variant-field\">\
<span id=\"{id}\" class=\"small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\">§</a>\
- <code>{f}:&nbsp;{t}</code>\
+ <code>{f}: {t}</code>\
</span>",
- id = id,
f = field.name.unwrap(),
t = ty.print(cx)
);
@@ -1346,38 +1321,30 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
}
fn item_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Macro) {
- wrap_into_item_decl(w, |w| {
- highlight::render_macro_with_highlighting(&t.source, w);
- });
+ highlight::render_item_decl_with_highlighting(&t.source, w);
document(w, cx, it, None, HeadingOffset::H2)
}
fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &clean::ProcMacro) {
- wrap_into_item_decl(w, |w| {
+ wrap_item(w, |w| {
let name = it.name.expect("proc-macros always have names");
match m.kind {
MacroKind::Bang => {
- wrap_item(w, |w| {
- write!(w, "{}!() {{ /* proc-macro */ }}", name);
- });
+ write!(w, "{}!() {{ /* proc-macro */ }}", name);
}
MacroKind::Attr => {
- wrap_item(w, |w| {
- write!(w, "#[{}]", name);
- });
+ write!(w, "#[{}]", name);
}
MacroKind::Derive => {
- wrap_item(w, |w| {
- write!(w, "#[derive({})]", name);
- if !m.helpers.is_empty() {
- w.push_str("\n{\n");
- w.push_str(" // Attributes available to this derive:\n");
- for attr in &m.helpers {
- writeln!(w, " #[{}]", attr);
- }
- w.push_str("}\n");
+ write!(w, "#[derive({})]", name);
+ if !m.helpers.is_empty() {
+ w.push_str("\n{\n");
+ w.push_str(" // Attributes available to this derive:\n");
+ for attr in &m.helpers {
+ writeln!(w, " #[{}]", attr);
}
- });
+ w.push_str("}\n");
+ }
}
}
});
@@ -1400,61 +1367,57 @@ fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
}
fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- let tcx = cx.tcx();
- render_attributes_in_code(w, it);
+ wrap_item(w, |w| {
+ let tcx = cx.tcx();
+ render_attributes_in_code(w, it);
- write!(
- w,
- "{vis}const {name}: {typ}",
- vis = visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
- name = it.name.unwrap(),
- typ = c.type_.print(cx),
- );
+ write!(
+ w,
+ "{vis}const {name}: {typ}",
+ vis = visibility_print_with_space(it.visibility(tcx), it.item_id, cx),
+ name = it.name.unwrap(),
+ typ = c.type_.print(cx),
+ );
- // FIXME: The code below now prints
- // ` = _; // 100i32`
- // if the expression is
- // `50 + 50`
- // which looks just wrong.
- // Should we print
- // ` = 100i32;`
- // instead?
-
- let value = c.value(tcx);
- let is_literal = c.is_literal(tcx);
- let expr = c.expr(tcx);
- if value.is_some() || is_literal {
- write!(w, " = {expr};", expr = Escape(&expr));
- } else {
- w.write_str(";");
- }
+ // FIXME: The code below now prints
+ // ` = _; // 100i32`
+ // if the expression is
+ // `50 + 50`
+ // which looks just wrong.
+ // Should we print
+ // ` = 100i32;`
+ // instead?
+
+ let value = c.value(tcx);
+ let is_literal = c.is_literal(tcx);
+ let expr = c.expr(tcx);
+ if value.is_some() || is_literal {
+ write!(w, " = {expr};", expr = Escape(&expr));
+ } else {
+ w.write_str(";");
+ }
- if !is_literal {
- if let Some(value) = &value {
- let value_lowercase = value.to_lowercase();
- let expr_lowercase = expr.to_lowercase();
+ if !is_literal {
+ if let Some(value) = &value {
+ let value_lowercase = value.to_lowercase();
+ let expr_lowercase = expr.to_lowercase();
- if value_lowercase != expr_lowercase
- && value_lowercase.trim_end_matches("i32") != expr_lowercase
- {
- write!(w, " // {value}", value = Escape(value));
- }
+ if value_lowercase != expr_lowercase
+ && value_lowercase.trim_end_matches("i32") != expr_lowercase
+ {
+ write!(w, " // {value}", value = Escape(value));
}
}
- });
+ }
});
document(w, cx, it, None, HeadingOffset::H2)
}
fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_code(w, it);
- render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx);
- });
+ wrap_item(w, |w| {
+ render_attributes_in_code(w, it);
+ render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx);
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1486,11 +1449,9 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
w,
"<span id=\"{id}\" class=\"{item_type} small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\">§</a>\
- <code>{name}: {ty}</code>\
+ <code>{field_name}: {ty}</code>\
</span>",
item_type = ItemType::StructField,
- id = id,
- name = field_name,
ty = ty.print(cx)
);
document(w, cx, field, Some(it), HeadingOffset::H3);
@@ -1503,34 +1464,30 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
}
fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- render_attributes_in_code(w, it);
- write!(
- w,
- "{vis}static {mutability}{name}: {typ}",
- vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
- mutability = s.mutability.print_with_space(),
- name = it.name.unwrap(),
- typ = s.type_.print(cx)
- );
- });
+ wrap_item(w, |w| {
+ render_attributes_in_code(w, it);
+ write!(
+ w,
+ "{vis}static {mutability}{name}: {typ}",
+ vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
+ mutability = s.mutability.print_with_space(),
+ name = it.name.unwrap(),
+ typ = s.type_.print(cx)
+ );
});
document(w, cx, it, None, HeadingOffset::H2)
}
fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
- wrap_into_item_decl(w, |w| {
- wrap_item(w, |w| {
- w.write_str("extern {\n");
- render_attributes_in_code(w, it);
- write!(
- w,
- " {}type {};\n}}",
- visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
- it.name.unwrap(),
- );
- });
+ wrap_item(w, |w| {
+ w.write_str("extern {\n");
+ render_attributes_in_code(w, it);
+ write!(
+ w,
+ " {}type {};\n}}",
+ visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
+ it.name.unwrap(),
+ );
});
document(w, cx, it, None, HeadingOffset::H2);
@@ -1609,20 +1566,11 @@ fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool, cx: &Context<'_>)
bounds
}
-fn wrap_into_item_decl<F>(w: &mut Buffer, f: F)
-where
- F: FnOnce(&mut Buffer),
-{
- w.write_str("<div class=\"item-decl\">");
- f(w);
- w.write_str("</div>")
-}
-
fn wrap_item<F>(w: &mut Buffer, f: F)
where
F: FnOnce(&mut Buffer),
{
- w.write_str(r#"<pre class="rust"><code>"#);
+ w.write_str(r#"<pre class="rust item-decl"><code>"#);
f(w);
w.write_str("</code></pre>");
}
@@ -1677,7 +1625,6 @@ fn render_union(
it: &clean::Item,
g: Option<&clean::Generics>,
fields: &[clean::Item],
- tab: &str,
cx: &Context<'_>,
) {
let tcx = cx.tcx();
@@ -1700,7 +1647,7 @@ fn render_union(
w.write_str(" ");
}
- write!(w, "{{\n{}", tab);
+ write!(w, "{{\n");
let count_fields =
fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count();
let toggle = should_hide_fields(count_fields);
@@ -1712,17 +1659,16 @@ fn render_union(
if let clean::StructFieldItem(ref ty) = *field.kind {
write!(
w,
- " {}{}: {},\n{}",
+ " {}{}: {},\n",
visibility_print_with_space(field.visibility(tcx), field.item_id, cx),
field.name.unwrap(),
- ty.print(cx),
- tab
+ ty.print(cx)
);
}
}
if it.has_stripped_entries().unwrap() {
- write!(w, " /* private fields */\n{}", tab);
+ write!(w, " /* private fields */\n");
}
if toggle {
toggle_close(w);
@@ -1887,12 +1833,12 @@ fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) {
}
fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
- fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS<VariantIdx>, tag_size: u64) {
+ fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) {
if layout.abi.is_unsized() {
write!(w, "(unsized)");
} else {
- let bytes = layout.size.bytes() - tag_size;
- write!(w, "{size} byte{pl}", size = bytes, pl = if bytes == 1 { "" } else { "s" },);
+ let size = layout.size.bytes() - tag_size;
+ write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" },);
}
}
@@ -1909,7 +1855,7 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
let tcx = cx.tcx();
let param_env = tcx.param_env(ty_def_id);
- let ty = tcx.type_of(ty_def_id);
+ let ty = tcx.type_of(ty_def_id).subst_identity();
match tcx.layout_of(param_env.and(ty)) {
Ok(ty_layout) => {
writeln!(
@@ -1947,7 +1893,7 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
for (index, layout) in variants.iter_enumerated() {
let name = adt.variant(index).name;
- write!(w, "<li><code>{name}</code>: ", name = name);
+ write!(w, "<li><code>{name}</code>: ");
write_size_of_layout(w, layout, tag_size);
writeln!(w, "</li>");
}
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
index 5b0caac09..090ea2cb1 100644
--- a/src/librustdoc/html/render/search_index.rs
+++ b/src/librustdoc/html/render/search_index.rs
@@ -236,7 +236,16 @@ pub(crate) fn build_index<'tcx>(
crate_data.serialize_field("doc", &self.doc)?;
crate_data.serialize_field(
"t",
- &self.items.iter().map(|item| &item.ty).collect::<Vec<_>>(),
+ &self
+ .items
+ .iter()
+ .map(|item| {
+ let n = item.ty as u8;
+ let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
+ assert!(c <= 'z', "item types must fit within ASCII printables");
+ c
+ })
+ .collect::<String>(),
)?;
crate_data.serialize_field(
"n",
diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs
index bc8badad3..54749e9a3 100644
--- a/src/librustdoc/html/render/write_shared.rs
+++ b/src/librustdoc/html/render/write_shared.rs
@@ -11,7 +11,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
-use super::{collect_paths_for_type, ensure_trailing_slash, Context, BASIC_KEYWORDS};
+use super::{collect_paths_for_type, ensure_trailing_slash, Context};
use crate::clean::Crate;
use crate::config::{EmitType, RenderOptions};
use crate::docfs::PathError;
@@ -138,7 +138,7 @@ pub(super) fn write_shared(
Ok((ret, krates))
}
- /// Read a file and return all lines that match the <code>"{crate}":{data},\</code> format,
+ /// Read a file and return all lines that match the <code>"{crate}":{data},\ </code> format,
/// and return a tuple `(Vec<DataString>, Vec<CrateNameString>)`.
///
/// This forms the payload of files that look like this:
@@ -340,7 +340,6 @@ if (typeof exports !== 'undefined') {exports.searchIndex = searchIndex};
root_path: "./",
static_root_path: shared.static_root_path.as_deref(),
description: "List of crates",
- keywords: BASIC_KEYWORDS,
resource_suffix: &shared.resource_suffix,
};
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
index 799c497d1..2c90bf4fa 100644
--- a/src/librustdoc/html/sources.rs
+++ b/src/librustdoc/html/sources.rs
@@ -4,7 +4,7 @@ use crate::error::Error;
use crate::html::format::Buffer;
use crate::html::highlight;
use crate::html::layout;
-use crate::html::render::{Context, BASIC_KEYWORDS};
+use crate::html::render::Context;
use crate::visit::DocVisitor;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -228,7 +228,6 @@ impl SourceCollector<'_, '_> {
root_path: &root_path,
static_root_path: shared.static_root_path.as_deref(),
description: &desc,
- keywords: BASIC_KEYWORDS,
resource_suffix: &shared.resource_suffix,
};
let v = layout::render(
diff --git a/src/librustdoc/html/static/.eslintrc.js b/src/librustdoc/html/static/.eslintrc.js
index fcd925bb3..1a34530c2 100644
--- a/src/librustdoc/html/static/.eslintrc.js
+++ b/src/librustdoc/html/static/.eslintrc.js
@@ -90,7 +90,6 @@ module.exports = {
"no-return-assign": "error",
"no-script-url": "error",
"no-sequences": "error",
- "no-throw-literal": "error",
"no-div-regex": "error",
}
};
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
index a93f60da2..95528e70e 100644
--- a/src/librustdoc/html/static/css/rustdoc.css
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -1,3 +1,11 @@
+/* When static files are updated, their suffixes need to be updated.
+ 1. In the top directory run:
+ ./x.py doc --stage 1 library/core
+ 2. Find the directory containing files named with updated suffixes:
+ find build -path '*'/stage1-std/'*'/static.files
+ 3. Copy the filenames with updated suffixes from the directory.
+*/
+
/* See FiraSans-LICENSE.txt for the Fira Sans license. */
@font-face {
font-family: 'Fira Sans';
@@ -22,7 +30,7 @@
font-style: normal;
font-weight: 400;
src: local('Source Serif 4'),
- url("SourceSerif4-Regular-1f7d512b176f0f72.ttf.woff2") format("woff2");
+ url("SourceSerif4-Regular-46f98efaafac5295.ttf.woff2") format("woff2");
font-display: swap;
}
@font-face {
@@ -30,7 +38,7 @@
font-style: italic;
font-weight: 400;
src: local('Source Serif 4 Italic'),
- url("SourceSerif4-It-d034fe4ef9d0fa00.ttf.woff2") format("woff2");
+ url("SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2") format("woff2");
font-display: swap;
}
@font-face {
@@ -38,7 +46,7 @@
font-style: normal;
font-weight: 700;
src: local('Source Serif 4 Bold'),
- url("SourceSerif4-Bold-124a1ca42af929b6.ttf.woff2") format("woff2");
+ url("SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2") format("woff2");
font-display: swap;
}
@@ -166,6 +174,14 @@ h1, h2, h3, h4 {
.top-doc .docblock > h4 {
border-bottom: 1px solid var(--headings-border-bottom-color);
}
+/* while line-height 1.5 is required for any "block of text",
+ which WCAG defines as more than one sentence, it looks weird for
+ very large main headers */
+h1, h2 {
+ line-height: 1.25;
+ padding-top: 3px;
+ padding-bottom: 9px;
+}
h3.code-header {
font-size: 1.125rem; /* 18px */
}
@@ -176,6 +192,7 @@ h4.code-header {
font-weight: 600;
margin: 0;
padding: 0;
+ white-space: pre-wrap;
}
#crate-search,
@@ -184,7 +201,7 @@ h1, h2, h3, h4, h5, h6,
.mobile-topbar,
.search-input,
.search-results .result-name,
-.item-left > a,
+.item-name > a,
.out-of-band,
span.since,
a.srclink,
@@ -335,7 +352,7 @@ pre {
padding: 14px;
line-height: 1.5; /* https://github.com/rust-lang/rust/issues/105906 */
}
-.item-decl pre {
+pre.item-decl {
overflow-x: auto;
}
/* This rule allows to have scrolling on the X axis. */
@@ -533,7 +550,7 @@ ul.block, .block li {
.rustdoc .example-wrap > pre {
margin: 0;
flex-grow: 1;
- overflow-x: auto;
+ overflow: auto hidden;
}
.rustdoc .example-wrap > pre.example-line-numbers,
@@ -634,6 +651,7 @@ pre, .rustdoc.source .example-wrap {
.fn .where,
.where.fmt-newline {
display: block;
+ white-space: pre-wrap;
font-size: 0.875rem;
}
@@ -697,8 +715,8 @@ h2.small-section-header > .anchor {
.main-heading a:hover,
.example-wrap > pre.rust a:hover,
.all-items a:hover,
-.docblock a:not(.test-arrow):not(.scrape-help):hover,
-.docblock-short a:not(.test-arrow):not(.scrape-help):hover,
+.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover,
+.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover,
.item-info a {
text-decoration: underline;
}
@@ -732,14 +750,16 @@ table,
.item-table {
display: table;
+ padding: 0;
+ margin: 0;
}
-.item-row {
+.item-table > li {
display: table-row;
}
-.item-left, .item-right {
+.item-table > li > div {
display: table-cell;
}
-.item-left {
+.item-table > li > .item-name {
padding-right: 1.25rem;
}
@@ -806,8 +826,11 @@ so that we can apply CSS-filters to change the arrow color in themes */
background-repeat: no-repeat;
background-size: 20px;
background-position: calc(100% - 2px) 56%;
- /* image is black color */
- background-image: url("down-arrow-927217e04c7463ac.svg");
+ /* down arrow (image is black color) */
+ background-image: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
+ width="128" height="128" viewBox="-30 -20 176 176"><path d="M111,40.5L64,87.499L17,40.5" \
+ fill="none" stroke="black" strike-linecap="square" stroke-miterlimit="10" stroke-width="12"/> \
+ </svg>');
/* changes the arrow image color */
filter: var(--crate-search-div-filter);
}
@@ -941,7 +964,7 @@ so that we can apply CSS-filters to change the arrow color in themes */
padding: 3px;
margin-bottom: 5px;
}
-.item-left .stab {
+.item-name .stab {
margin-left: 0.3125em;
}
.stab {
@@ -977,11 +1000,6 @@ so that we can apply CSS-filters to change the arrow color in themes */
0 -1px 0 black;
}
-.module-item.unstable,
-.import-item.unstable {
- opacity: 0.65;
-}
-
.since {
font-weight: normal;
font-size: initial;
@@ -1093,44 +1111,8 @@ pre.rust .doccomment {
display: block;
left: -25px;
top: 5px;
-}
-
-.example-wrap .tooltip:hover::after {
- padding: 5px 3px 3px 3px;
- border-radius: 6px;
- margin-left: 5px;
- font-size: 1rem;
- border: 1px solid var(--border-color);
- position: absolute;
- width: max-content;
- top: -2px;
- z-index: 1;
- background-color: var(--tooltip-background-color);
- color: var(--tooltip-color);
-}
-
-.example-wrap .tooltip:hover::before {
- content: " ";
- position: absolute;
- top: 50%;
- left: 16px;
- margin-top: -5px;
- z-index: 1;
- border: 5px solid transparent;
- border-right-color: var(--tooltip-background-color);
-}
-
-.example-wrap.ignore .tooltip:hover::after {
- content: "This example is not tested";
-}
-.example-wrap.compile_fail .tooltip:hover::after {
- content: "This example deliberately fails to compile";
-}
-.example-wrap.should_panic .tooltip:hover::after {
- content: "This example panics";
-}
-.example-wrap.edition .tooltip:hover::after {
- content: "This code runs with edition " attr(data-edition);
+ margin: 0;
+ line-height: 1;
}
.example-wrap.compile_fail .tooltip,
@@ -1168,6 +1150,7 @@ a.test-arrow:hover {
.item-spacer {
width: 100%;
height: 12px;
+ display: block;
}
.out-of-band > span.since {
@@ -1196,7 +1179,7 @@ a.test-arrow:hover {
border-right: 3px solid var(--target-border-color);
}
-.notable-traits {
+.code-header a.tooltip {
color: inherit;
margin-right: 15px;
position: relative;
@@ -1205,7 +1188,7 @@ a.test-arrow:hover {
/* placeholder thunk so that the mouse can easily travel from "(i)" to popover
the resulting "hover tunnel" is a stepped triangle, approximating
https://bjk5.com/post/44698559168/breaking-down-amazons-mega-dropdown */
-.notable-traits:hover::after {
+a.tooltip:hover::after {
position: absolute;
top: calc(100% - 10px);
left: -15px;
@@ -1214,11 +1197,11 @@ a.test-arrow:hover {
content: "\00a0";
}
-.notable .content {
+.popover.tooltip .content {
margin: 0.25em 0.5em;
}
-.notable .content pre, .notable .content code {
+.popover.tooltip .content pre, .popover.tooltip .content code {
background: transparent;
margin: 0;
padding: 0;
@@ -1226,7 +1209,7 @@ a.test-arrow:hover {
white-space: pre-wrap;
}
-.notable .content > h3:first-child {
+.popover.tooltip .content > h3:first-child {
margin: 0 0 5px 0;
}
@@ -1263,12 +1246,25 @@ a.test-arrow:hover {
line-height: 1.5;
color: inherit;
}
+#search-tabs button:not(.selected) {
+ background-color: var(--search-tab-button-not-selected-background);
+ border-top-color: var(--search-tab-button-not-selected-border-top-color);
+}
+#search-tabs button:hover, #search-tabs button.selected {
+ background-color: var(--search-tab-button-selected-background);
+ border-top-color: var(--search-tab-button-selected-border-top-color);
+}
#search-tabs .count {
font-size: 1rem;
color: var(--search-tab-title-count-color);
}
+#search .error code {
+ border-radius: 3px;
+ background-color: var(--search-error-code-background-color);
+}
+
#src-sidebar-toggle {
position: sticky;
top: 0;
@@ -1433,7 +1429,10 @@ details.toggle > summary.hideme > span {
}
details.toggle > summary::before {
- background: url("toggle-plus-1092eb4930d581b0.svg") no-repeat top left;
+ /* toggle plus */
+ background: url('data:image/svg+xml,<svg width="17" height="17" \
+shape-rendering="crispEdges" stroke="black" fill="none" xmlns="http://www.w3.org/2000/svg"><path \
+d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7M8.5 12V8.625v0V5"/></svg>') no-repeat top left;
content: "";
cursor: pointer;
width: 16px;
@@ -1511,7 +1510,10 @@ details.toggle[open] > summary.hideme > span {
}
details.toggle[open] > summary::before {
- background: url("toggle-minus-31bbd6e4c77f5c96.svg") no-repeat top left;
+ /* toggle minus */
+ background: url('data:image/svg+xml,<svg width="17" height="17" \
+shape-rendering="crispEdges" stroke="black" fill="none" xmlns="http://www.w3.org/2000/svg"><path \
+d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7"/></svg>') no-repeat top left;
}
details.toggle[open] > summary::after {
@@ -1700,7 +1702,7 @@ in storage.js
}
/* Display an alternating layout on tablets and phones */
- .item-table, .item-row, .item-left, .item-right,
+ .item-table, .item-row, .item-table > li, .item-table > li > div,
.search-results > a, .search-results > a > div {
display: block;
}
@@ -1709,7 +1711,7 @@ in storage.js
.search-results > a {
padding: 5px 0px;
}
- .search-results > a > div.desc, .item-right {
+ .search-results > a > div.desc, .item-table > li > div.desc {
padding-left: 2em;
}
@@ -1897,19 +1899,25 @@ in storage.js
right: 0.25em;
}
-.scraped-example:not(.expanded) .code-wrapper:before,
-.scraped-example:not(.expanded) .code-wrapper:after {
+.scraped-example:not(.expanded) .code-wrapper::before,
+.scraped-example:not(.expanded) .code-wrapper::after {
content: " ";
width: 100%;
height: 5px;
position: absolute;
z-index: 1;
}
-.scraped-example:not(.expanded) .code-wrapper:before {
+.scraped-example:not(.expanded) .code-wrapper::before {
top: 0;
+ background: linear-gradient(to bottom,
+ var(--scrape-example-code-wrapper-background-start),
+ var(--scrape-example-code-wrapper-background-end));
}
-.scraped-example:not(.expanded) .code-wrapper:after {
+.scraped-example:not(.expanded) .code-wrapper::after {
bottom: 0;
+ background: linear-gradient(to top,
+ var(--scrape-example-code-wrapper-background-start),
+ var(--scrape-example-code-wrapper-background-end));
}
.scraped-example .code-wrapper .example-wrap {
diff --git a/src/librustdoc/html/static/css/settings.css b/src/librustdoc/html/static/css/settings.css
index 4e9803fe2..920f45c4b 100644
--- a/src/librustdoc/html/static/css/settings.css
+++ b/src/librustdoc/html/static/css/settings.css
@@ -3,8 +3,7 @@
position: relative;
}
-.setting-line .radio-line input,
-.setting-line .settings-toggle input {
+.setting-radio input, .setting-check input {
margin-right: 0.3em;
height: 1.2rem;
width: 1.2rem;
@@ -14,21 +13,20 @@
-webkit-appearance: none;
cursor: pointer;
}
-.setting-line .radio-line input {
+.setting-radio input {
border-radius: 50%;
}
-.setting-line .settings-toggle input:checked {
+.setting-check input:checked {
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 40 40">\
<path d="M7,25L17,32L33,12" fill="none" stroke="black" stroke-width="5"/>\
<path d="M7,23L17,30L33,10" fill="none" stroke="white" stroke-width="5"/></svg>');
}
-.setting-line .radio-line input + span,
-.setting-line .settings-toggle span {
+.setting-radio span, .setting-check span {
padding-bottom: 1px;
}
-.radio-line .choice {
+.setting-radio {
margin-top: 0.1em;
margin-bottom: 0.1em;
min-width: 3.8em;
@@ -37,36 +35,32 @@
align-items: center;
cursor: pointer;
}
-.radio-line .choice + .choice {
+.setting-radio + .setting-radio {
margin-left: 0.5em;
}
-.settings-toggle {
- position: relative;
- width: 100%;
+.setting-check {
margin-right: 20px;
display: flex;
align-items: center;
cursor: pointer;
}
-.setting-line .radio-line input:checked {
+.setting-radio input:checked {
box-shadow: inset 0 0 0 3px var(--main-background-color);
background-color: var(--settings-input-color);
}
-.setting-line .settings-toggle input:checked {
+.setting-check input:checked {
background-color: var(--settings-input-color);
}
-.setting-line .radio-line input:focus,
-.setting-line .settings-toggle input:focus {
+.setting-radio input:focus, .setting-check input:focus {
box-shadow: 0 0 1px 1px var(--settings-input-color);
}
/* In here we combine both `:focus` and `:checked` properties. */
-.setting-line .radio-line input:checked:focus {
+.setting-radio input:checked:focus {
box-shadow: inset 0 0 0 3px var(--main-background-color),
0 0 2px 2px var(--settings-input-color);
}
-.setting-line .radio-line input:hover,
-.setting-line .settings-toggle input:hover {
+.setting-radio input:hover, .setting-check input:hover {
border-color: var(--settings-input-color) !important;
}
diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css
index 979e7e0f9..90cf689ad 100644
--- a/src/librustdoc/html/static/css/themes/ayu.css
+++ b/src/librustdoc/html/static/css/themes/ayu.css
@@ -43,9 +43,14 @@ Original by Dempfi (https://github.com/dempfi/ayu)
--search-result-link-focus-background-color: #3c3c3c;
--search-result-border-color: #aaa3;
--search-color: #fff;
+ --search-error-code-background-color: #4f4c4c;
--search-results-alias-color: #c5c5c5;
--search-results-grey-color: #999;
--search-tab-title-count-color: #888;
+ --search-tab-button-not-selected-border-top-color: none;
+ --search-tab-button-not-selected-background: transparent !important;
+ --search-tab-button-selected-border-top-color: none;
+ --search-tab-button-selected-background: #141920 !important;
--stab-background-color: #314559;
--stab-code-color: #e6e1cf;
--code-highlight-kw-color: #ff7733;
@@ -70,8 +75,6 @@ Original by Dempfi (https://github.com/dempfi/ayu)
--test-arrow-hover-background-color: rgba(57, 175, 215, 0.368);
--target-background-color: rgba(255, 236, 164, 0.06);
--target-border-color: rgba(255, 180, 76, 0.85);
- --tooltip-background-color: #314559;
- --tooltip-color: #c5c5c5;
--kbd-color: #c5c5c5;
--kbd-background: #314559;
--kbd-box-shadow-color: #5c6773;
@@ -97,12 +100,13 @@ Original by Dempfi (https://github.com/dempfi/ayu)
--scrape-example-help-color: #eee;
--scrape-example-help-hover-border-color: #fff;
--scrape-example-help-hover-color: #fff;
+ --scrape-example-code-wrapper-background-start: rgba(15, 20, 25, 1);
+ --scrape-example-code-wrapper-background-end: rgba(15, 20, 25, 0);
}
-h1, h2, h3, h4 {
- color: white;
-}
-h1 a {
+h1, h2, h3, h4,
+h1 a, .sidebar h2 a, .sidebar h3 a,
+#source-sidebar > .title {
color: #fff;
}
h4 {
@@ -112,24 +116,22 @@ h4 {
.docblock code {
color: #ffb454;
}
-.code-header {
- color: #e6e1cf;
-}
-.docblock pre > code, pre > code {
- color: #e6e1cf;
-}
-.item-info code {
- color: #e6e1cf;
-}
.docblock a > code {
color: #39AFD7 !important;
}
-pre, .rustdoc.source .example-wrap {
+.code-header,
+.docblock pre > code,
+pre, pre > code,
+.item-info code,
+.rustdoc.source .example-wrap {
color: #e6e1cf;
}
.sidebar .current,
-.sidebar a:hover {
+.sidebar a:hover,
+#source-sidebar div.files > a:hover, details.dir-entry summary:hover,
+#source-sidebar div.files > a:focus, details.dir-entry summary:focus,
+#source-sidebar div.files > a.selected {
color: #ffb44c;
}
@@ -143,15 +145,12 @@ pre, .rustdoc.source .example-wrap {
border-right: 1px solid #ffb44c;
}
-.search-results a:hover {
- color: #fff !important;
- background-color: #3c3c3c;
-}
-
+.search-results a:hover,
.search-results a:focus {
color: #fff !important;
background-color: #3c3c3c;
}
+
.search-results a {
color: #0096cf;
}
@@ -159,54 +158,22 @@ pre, .rustdoc.source .example-wrap {
color: #c5c5c5;
}
-.sidebar h2 a,
-.sidebar h3 a {
- color: white;
-}
-
.result-name .primitive > i, .result-name .keyword > i {
color: #788797;
}
#search-tabs > button.selected {
- background-color: #141920 !important;
border-bottom: 1px solid #ffb44c !important;
border-top: none;
}
-
#search-tabs > button:not(.selected) {
- background-color: transparent !important;
border: none;
+ background-color: transparent !important;
}
-
#search-tabs > button:hover {
border-bottom: 1px solid rgba(242, 151, 24, 0.3);
}
-/* rules that this theme does not need to set, here to satisfy the rule checker */
-/* note that a lot of these are partially set in some way (meaning they are set
-individually rather than as a group) */
-/* FIXME: these rules should be at the bottom of the file but currently must be
-above the `@media (max-width: 700px)` rules due to a bug in the css checker */
-/* see https://github.com/rust-lang/rust/pull/71237#issuecomment-618170143 */
-#search-tabs > button:hover, #search-tabs > button.selected {}
-
#settings-menu > a img {
filter: invert(100);
}
-
-#source-sidebar > .title {
- color: #fff;
-}
-#source-sidebar div.files > a:hover, details.dir-entry summary:hover,
-#source-sidebar div.files > a:focus, details.dir-entry summary:focus,
-#source-sidebar div.files > a.selected {
- color: #ffb44c;
-}
-
-.scraped-example:not(.expanded) .code-wrapper::before {
- background: linear-gradient(to bottom, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
- background: linear-gradient(to top, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
-}
diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css
index fb15863b0..e8cd06931 100644
--- a/src/librustdoc/html/static/css/themes/dark.css
+++ b/src/librustdoc/html/static/css/themes/dark.css
@@ -38,9 +38,14 @@
--search-result-link-focus-background-color: #616161;
--search-result-border-color: #aaa3;
--search-color: #111;
+ --search-error-code-background-color: #484848;
--search-results-alias-color: #fff;
--search-results-grey-color: #ccc;
--search-tab-title-count-color: #888;
+ --search-tab-button-not-selected-border-top-color: #252525;
+ --search-tab-button-not-selected-background: #252525;
+ --search-tab-button-selected-border-top-color: #0089ff;
+ --search-tab-button-selected-background: #353535;
--stab-background-color: #314559;
--stab-code-color: #e6e1cf;
--code-highlight-kw-color: #ab8ac1;
@@ -65,8 +70,6 @@
--test-arrow-hover-background-color: #4e8bca;
--target-background-color: #494a3d;
--target-border-color: #bb7410;
- --tooltip-background-color: #000;
- --tooltip-color: #fff;
--kbd-color: #000;
--kbd-background: #fafbfc;
--kbd-box-shadow-color: #c6cbd1;
@@ -92,21 +95,6 @@
--scrape-example-help-color: #eee;
--scrape-example-help-hover-border-color: #fff;
--scrape-example-help-hover-color: #fff;
-}
-
-#search-tabs > button:not(.selected) {
- background-color: #252525;
- border-top-color: #252525;
-}
-
-#search-tabs > button:hover, #search-tabs > button.selected {
- border-top-color: #0089ff;
- background-color: #353535;
-}
-
-.scraped-example:not(.expanded) .code-wrapper::before {
- background: linear-gradient(to bottom, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
- background: linear-gradient(to top, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
+ --scrape-example-code-wrapper-background-start: rgba(53, 53, 53, 1);
+ --scrape-example-code-wrapper-background-end: rgba(53, 53, 53, 0);
}
diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css
index 053fa78d1..5e3f14e48 100644
--- a/src/librustdoc/html/static/css/themes/light.css
+++ b/src/librustdoc/html/static/css/themes/light.css
@@ -38,9 +38,14 @@
--search-result-link-focus-background-color: #ccc;
--search-result-border-color: #aaa3;
--search-color: #000;
+ --search-error-code-background-color: #d0cccc;
--search-results-alias-color: #000;
--search-results-grey-color: #999;
--search-tab-title-count-color: #888;
+ --search-tab-button-not-selected-border-top-color: #e6e6e6;
+ --search-tab-button-not-selected-background: #e6e6e6;
+ --search-tab-button-selected-border-top-color: #0089ff;
+ --search-tab-button-selected-background: #ffffff;
--stab-background-color: #fff5d6;
--stab-code-color: #000;
--code-highlight-kw-color: #8959a8;
@@ -65,8 +70,6 @@
--test-arrow-hover-background-color: #4e8bca;
--target-background-color: #fdffd3;
--target-border-color: #ad7c37;
- --tooltip-background-color: #000;
- --tooltip-color: #fff;
--kbd-color: #000;
--kbd-background: #fafbfc;
--kbd-box-shadow-color: #c6cbd1;
@@ -89,21 +92,6 @@
--scrape-example-help-color: #333;
--scrape-example-help-hover-border-color: #000;
--scrape-example-help-hover-color: #000;
-}
-
-#search-tabs > button:not(.selected) {
- background-color: #e6e6e6;
- border-top-color: #e6e6e6;
-}
-
-#search-tabs > button:hover, #search-tabs > button.selected {
- background-color: #ffffff;
- border-top-color: #0089ff;
-}
-
-.scraped-example:not(.expanded) .code-wrapper::before {
- background: linear-gradient(to bottom, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
- background: linear-gradient(to top, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
+ --scrape-example-code-wrapper-background-start: rgba(255, 255, 255, 1);
+ --scrape-example-code-wrapper-background-end: rgba(255, 255, 255, 0);
}
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2
index db57d2145..181a07f63 100644
--- a/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2
index 1cbc021a3..2ae08a7be 100644
--- a/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
index 68ea18924..5871e1f3d 100644
--- a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
@@ -1,4 +1,4 @@
-Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
+Copyright 2014 - 2023 Adobe (http://www.adobe.com/), with Reserved Font Name ‘Source’. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2
index 2db73fe2b..0263fc304 100644
--- a/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/images/down-arrow.svg b/src/librustdoc/html/static/images/down-arrow.svg
deleted file mode 100644
index 5d76a64e9..000000000
--- a/src/librustdoc/html/static/images/down-arrow.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" id="Layer_1" width="128" height="128" enable-background="new 0 0 128 128" version="1.1" viewBox="-30 -20 176 176" xml:space="preserve"><g><line x1="111" x2="64" y1="40.5" y2="87.499" fill="none" stroke="#000000" stroke-linecap="square" stroke-miterlimit="10" stroke-width="12"/><line x1="64" x2="17" y1="87.499" y2="40.5" fill="none" stroke="#000000" stroke-linecap="square" stroke-miterlimit="10" stroke-width="12"/></g></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/toggle-minus.svg b/src/librustdoc/html/static/images/toggle-minus.svg
deleted file mode 100644
index 73154788a..000000000
--- a/src/librustdoc/html/static/images/toggle-minus.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg width="17" height="17" shape-rendering="crispEdges" stroke="#000" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7"/></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/toggle-plus.svg b/src/librustdoc/html/static/images/toggle-plus.svg
deleted file mode 100644
index 08b17033e..000000000
--- a/src/librustdoc/html/static/images/toggle-plus.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg width="17" height="17" shape-rendering="crispEdges" stroke="#000" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7M8.5 12V8.625v0V5"/></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/wheel.svg b/src/librustdoc/html/static/images/wheel.svg
index 01da3b24c..83c07f63d 100644
--- a/src/librustdoc/html/static/images/wheel.svg
+++ b/src/librustdoc/html/static/images/wheel.svg
@@ -1 +1 @@
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" id="Capa_1" width="27.434" height="29.5" enable-background="new 0 0 27.434 29.5" version="1.1" viewBox="0 0 27.434 29.5" xml:space="preserve"><g><path d="M27.315,18.389c-0.165-0.604-0.509-1.113-0.981-1.459c-0.042-0.144-0.083-0.429-0.015-0.761l0.037-0.177v-0.182V14.8 c0-1.247-0.006-1.277-0.048-1.472c-0.076-0.354-0.035-0.653,0.007-0.803c0.477-0.346,0.828-0.861,0.996-1.476 c0.261-0.956,0.076-2.091-0.508-3.114l-0.591-1.032c-0.746-1.307-1.965-2.119-3.182-2.119c-0.378,0-0.75,0.081-1.085,0.235 c-0.198-0.025-0.554-0.15-0.855-0.389l-0.103-0.082l-0.114-0.065l-1.857-1.067L18.92,3.36l-0.105-0.044 c-0.376-0.154-0.658-0.41-0.768-0.556C17.918,1.172,16.349,0,14.296,0H13.14c-2.043,0-3.608,1.154-3.749,2.721 C9.277,2.862,8.999,3.104,8.633,3.25l-0.1,0.039L8.439,3.341L6.495,4.406L6.363,4.479L6.245,4.573 C5.936,4.82,5.596,4.944,5.416,4.977c-0.314-0.139-0.66-0.21-1.011-0.21c-1.198,0-2.411,0.819-3.165,2.139L0.65,7.938 c-0.412,0.72-0.642,1.521-0.644,2.258c-0.003,0.952,0.362,1.756,1.013,2.256c0.034,0.155,0.061,0.448-0.016,0.786 c-0.038,0.168-0.062,0.28-0.062,1.563c0,1.148,0,1.148,0.015,1.262l0.009,0.073l0.017,0.073c0.073,0.346,0.045,0.643,0.011,0.802 C0.348,17.512-0.01,18.314,0,19.268c0.008,0.729,0.238,1.523,0.648,2.242l0.589,1.031c0.761,1.331,1.967,2.159,3.15,2.159 c0.324,0,0.645-0.064,0.938-0.187c0.167,0.038,0.492,0.156,0.813,0.416l0.11,0.088l0.124,0.07l2.045,1.156l0.102,0.057l0.107,0.043 c0.364,0.147,0.646,0.381,0.766,0.521c0.164,1.52,1.719,2.634,3.745,2.634h1.155c2.037,0,3.598-1.134,3.747-2.675 c0.117-0.145,0.401-0.393,0.774-0.549l0.111-0.047l0.105-0.062l1.96-1.159l0.105-0.062l0.097-0.075 c0.309-0.246,0.651-0.371,0.832-0.402c0.313,0.138,0.662,0.212,1.016,0.212c1.199,0,2.412-0.82,3.166-2.139l0.59-1.032 C27.387,20.48,27.575,19.342,27.315,18.389z M25.274,20.635l-0.59,1.032c-0.438,0.765-1.104,1.251-1.639,1.251 c-0.133,0-0.258-0.029-0.369-0.094c-0.15-0.086-0.346-0.127-0.566-0.127c-0.596,0-1.383,0.295-2.01,0.796l-1.96,1.157 c-1.016,0.425-1.846,1.291-1.846,1.929s-0.898,1.159-1.998,1.159H13.14c-1.1,0-1.998-0.514-1.998-1.141s-0.834-1.477-1.854-1.888 l-2.046-1.157c-0.636-0.511-1.425-0.814-2.006-0.814c-0.202,0-0.379,0.037-0.516,0.115c-0.101,0.057-0.214,0.084-0.333,0.084 c-0.518,0-1.179-0.498-1.62-1.271l-0.591-1.032c-0.545-0.954-0.556-1.983-0.024-2.286c0.532-0.305,0.78-1.432,0.551-2.506 c0,0,0-0.003,0-1.042c0-1.088,0.021-1.18,0.021-1.18c0.238-1.072-0.01-2.203-0.552-2.513C1.631,10.8,1.634,9.765,2.18,8.812 L2.769,7.78c0.438-0.766,1.103-1.251,1.636-1.251c0.131,0,0.255,0.029,0.365,0.092C4.92,6.707,5.114,6.747,5.334,6.747 c0.596,0,1.38-0.296,2.007-0.795l1.944-1.065c1.021-0.407,1.856-1.277,1.856-1.933c0-0.656,0.898-1.192,1.998-1.192h1.156V1.761 c1.1,0,1.998,0.545,1.998,1.211c0,0.667,0.832,1.554,1.849,1.973L20,6.013c0.618,0.489,1.401,0.775,2.012,0.775 c0.24,0,0.454-0.045,0.62-0.139c0.122-0.069,0.259-0.102,0.403-0.102c0.551,0,1.221,0.476,1.653,1.231l0.59,1.032 c0.544,0.953,0.518,2.004-0.062,2.334c-0.577,0.331-0.859,1.48-0.627,2.554c0,0,0.01,0.042,0.01,1.103c0,1.012,0,1.012,0,1.012 c-0.218,1.049,0.068,2.174,0.636,2.498C25.802,18.635,25.819,19.68,25.274,20.635z"/><path d="M13.61,7.611c-3.913,0-7.084,3.173-7.084,7.085c0,3.914,3.171,7.085,7.084,7.085s7.085-3.172,7.085-7.085 C20.695,10.784,17.523,7.611,13.61,7.611z M13.61,20.02c-2.936,0-5.323-2.388-5.323-5.323c0-2.935,2.388-5.323,5.323-5.323 s5.324,2.388,5.324,5.323C18.934,17.632,16.546,20.02,13.61,20.02z"/><path d="M13.682,9.908c-2.602,0-4.718,2.116-4.718,4.718c0,2.601,2.116,4.716,4.718,4.716c2.601,0,4.717-2.115,4.717-4.716 C18.399,12.024,16.283,9.908,13.682,9.908z M13.682,17.581c-1.633,0-2.956-1.323-2.956-2.955s1.323-2.956,2.956-2.956 c1.632,0,2.956,1.324,2.956,2.956S15.314,17.581,13.682,17.581z"/></g></svg> \ No newline at end of file
+<svg xmlns="http://www.w3.org/2000/svg" width="27.434" height="29.5" enable-background="new 0 0 27.434 29.5" viewBox="0 0 27.434 29.5"><path d="M27.316 18.39a2.696 2.696 0 0 0-.98-1.46 1.62 1.62 0 0 1-.016-.762l.035-.176v-1.191c0-1.246-.003-1.278-.046-1.473a1.717 1.717 0 0 1 .007-.805c.477-.343.829-.859.997-1.472.257-.957.074-2.094-.508-3.117l-.594-1.032c-.746-1.304-1.965-2.117-3.18-2.117-.379 0-.75.078-1.086.235a1.958 1.958 0 0 1-.855-.391l-.102-.082-.117-.063-1.855-1.07-.094-.055-.106-.043c-.378-.156-.66-.41-.77-.554C17.919 1.172 16.349 0 14.297 0h-1.155c-2.043 0-3.61 1.152-3.75 2.723-.114.14-.391.382-.758.527l-.102.04-.094.05-1.94 1.066-.134.074-.117.094a2.019 2.019 0 0 1-.832.403 2.518 2.518 0 0 0-1.008-.211c-1.199 0-2.414.82-3.168 2.14l-.59 1.032c-.41.718-.64 1.523-.64 2.257-.004.953.36 1.758 1.012 2.258.035.152.058.445-.016.785-.04.168-.063.282-.063 1.563 0 1.148 0 1.148.016 1.261l.008.075.015.074c.075.344.047.64.012.8-.644.5-1.004 1.302-.992 2.259.008.726.238 1.52.648 2.242l.59 1.027c.758 1.332 1.965 2.16 3.149 2.16.324 0 .644-.062.937-.187.168.039.492.156.813.418l.11.086.124.07 2.047 1.156.102.059.105.043c.363.144.648.379.766.52.164 1.519 1.718 2.632 3.746 2.632h1.156c2.035 0 3.598-1.133 3.746-2.672.117-.144.402-.394.773-.55l.114-.047.101-.063 1.961-1.156.106-.063.097-.078c.309-.246.653-.37.832-.398.313.136.66.21 1.016.21 1.2 0 2.41-.82 3.164-2.14l.594-1.031c.59-1.028.777-2.164.52-3.117Zm-2.043 2.247-.59 1.031c-.437.766-1.105 1.25-1.636 1.25a.7.7 0 0 1-.371-.094 1.146 1.146 0 0 0-.567-.129c-.593 0-1.382.297-2.007.797l-1.961 1.156c-1.016.426-1.848 1.293-1.848 1.93 0 .64-.898 1.16-1.996 1.16H13.14c-1.102 0-2-.515-2-1.14 0-.63-.832-1.477-1.852-1.887l-2.047-1.16c-.637-.512-1.426-.813-2.008-.813-.199 0-.379.035-.515.114a.648.648 0 0 1-.332.085c-.52 0-1.18-.5-1.621-1.273l-.59-1.031c-.543-.953-.555-1.98-.024-2.285.532-.305.782-1.434.551-2.504V14.8c0-1.09.02-1.18.02-1.18.238-1.074-.008-2.203-.551-2.516-.54-.304-.54-1.34.008-2.293l.59-1.03c.437-.766 1.101-1.255 1.636-1.255a.73.73 0 0 1 .364.094c.152.086.343.125.566.125.594 0 1.379-.297 2.004-.793l1.945-1.066c1.02-.407 1.856-1.278 1.856-1.934 0-.656.898-1.191 2-1.191h1.156c1.098 0 1.996.543 1.996 1.21 0 .669.832 1.555 1.848 1.973L20 6.012c.617.492 1.402.777 2.012.777.242 0 .453-.047.62-.14a.79.79 0 0 1 .403-.102c.55 0 1.223.476 1.652 1.23l.59 1.032c.543.953.52 2.004-.062 2.336-.574.332-.86 1.48-.625 2.554 0 0 .008.04.008 1.102v1.011c-.215 1.051.07 2.176.636 2.5.567.325.586 1.368.04 2.325Zm0 0"/><path d="M13.61 7.61a7.084 7.084 0 0 0-7.083 7.085 7.085 7.085 0 1 0 14.168 0A7.088 7.088 0 0 0 13.61 7.61Zm0 12.41a5.33 5.33 0 0 1-5.325-5.325 5.33 5.33 0 0 1 5.324-5.32 5.327 5.327 0 0 1 5.325 5.32 5.328 5.328 0 0 1-5.325 5.325Zm0 0"/><path d="M13.684 9.906a4.722 4.722 0 0 0-4.72 4.719 4.722 4.722 0 0 0 4.72 4.719 4.724 4.724 0 0 0 4.714-4.719 4.724 4.724 0 0 0-4.714-4.719Zm0 7.676a2.954 2.954 0 1 1 0-5.91 2.953 2.953 0 0 1 2.953 2.953 2.957 2.957 0 0 1-2.953 2.957Zm0 0"/></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
index 604ab147f..5e8c0e8d1 100644
--- a/src/librustdoc/html/static/js/main.js
+++ b/src/librustdoc/html/static/js/main.js
@@ -180,7 +180,6 @@ function browserSupportsHistoryApi() {
return window.history && typeof window.history.pushState === "function";
}
-// eslint-disable-next-line no-unused-vars
function loadCss(cssUrl) {
const link = document.createElement("link");
link.href = cssUrl;
@@ -379,7 +378,7 @@ function loadCss(cssUrl) {
}
ev.preventDefault();
searchState.defocus();
- window.hideAllModals(true); // true = reset focus for notable traits
+ window.hideAllModals(true); // true = reset focus for tooltips
}
function handleShortcut(ev) {
@@ -456,10 +455,7 @@ function loadCss(cssUrl) {
const ul = document.createElement("ul");
ul.className = "block " + shortty;
- for (const item of filtered) {
- const name = item[0];
- const desc = item[1]; // can be null
-
+ for (const name of filtered) {
let path;
if (shortty === "mod") {
path = name + "/index.html";
@@ -469,7 +465,6 @@ function loadCss(cssUrl) {
const current_page = document.location.href.split("/").pop();
const link = document.createElement("a");
link.href = path;
- link.title = desc;
if (path === current_page) {
link.className = "current";
}
@@ -789,17 +784,17 @@ function loadCss(cssUrl) {
// we need to switch away from mobile mode and make the main content area scrollable.
hideSidebar();
}
- if (window.CURRENT_NOTABLE_ELEMENT) {
- // As a workaround to the behavior of `contains: layout` used in doc togglers, the
- // notable traits popup is positioned using javascript.
+ if (window.CURRENT_TOOLTIP_ELEMENT) {
+ // As a workaround to the behavior of `contains: layout` used in doc togglers,
+ // tooltip popovers are positioned using javascript.
//
// This means when the window is resized, we need to redo the layout.
- const base = window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE;
- const force_visible = base.NOTABLE_FORCE_VISIBLE;
- hideNotable(false);
+ const base = window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE;
+ const force_visible = base.TOOLTIP_FORCE_VISIBLE;
+ hideTooltip(false);
if (force_visible) {
- showNotable(base);
- base.NOTABLE_FORCE_VISIBLE = true;
+ showTooltip(base);
+ base.TOOLTIP_FORCE_VISIBLE = true;
}
}
});
@@ -827,27 +822,35 @@ function loadCss(cssUrl) {
});
});
- function showNotable(e) {
- if (!window.NOTABLE_TRAITS) {
+ function showTooltip(e) {
+ const notable_ty = e.getAttribute("data-notable-ty");
+ if (!window.NOTABLE_TRAITS && notable_ty) {
const data = document.getElementById("notable-traits-data");
if (data) {
window.NOTABLE_TRAITS = JSON.parse(data.innerText);
} else {
- throw new Error("showNotable() called on page without any notable traits!");
+ throw new Error("showTooltip() called with notable without any notable traits!");
}
}
- if (window.CURRENT_NOTABLE_ELEMENT && window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE === e) {
+ if (window.CURRENT_TOOLTIP_ELEMENT && window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE === e) {
// Make this function idempotent.
return;
}
window.hideAllModals(false);
- const ty = e.getAttribute("data-ty");
const wrapper = document.createElement("div");
- wrapper.innerHTML = "<div class=\"content\">" + window.NOTABLE_TRAITS[ty] + "</div>";
- wrapper.className = "notable popover";
+ if (notable_ty) {
+ wrapper.innerHTML = "<div class=\"content\">" +
+ window.NOTABLE_TRAITS[notable_ty] + "</div>";
+ } else if (e.getAttribute("title") !== undefined) {
+ const titleContent = document.createElement("div");
+ titleContent.className = "content";
+ titleContent.appendChild(document.createTextNode(e.getAttribute("title")));
+ wrapper.appendChild(titleContent);
+ }
+ wrapper.className = "tooltip popover";
const focusCatcher = document.createElement("div");
focusCatcher.setAttribute("tabindex", "0");
- focusCatcher.onfocus = hideNotable;
+ focusCatcher.onfocus = hideTooltip;
wrapper.appendChild(focusCatcher);
const pos = e.getBoundingClientRect();
// 5px overlap so that the mouse can easily travel from place to place
@@ -869,62 +872,62 @@ function loadCss(cssUrl) {
);
}
wrapper.style.visibility = "";
- window.CURRENT_NOTABLE_ELEMENT = wrapper;
- window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE = e;
+ window.CURRENT_TOOLTIP_ELEMENT = wrapper;
+ window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE = e;
wrapper.onpointerleave = function(ev) {
// If this is a synthetic touch event, ignore it. A click event will be along shortly.
if (ev.pointerType !== "mouse") {
return;
}
- if (!e.NOTABLE_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) {
- hideNotable(true);
+ if (!e.TOOLTIP_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) {
+ hideTooltip(true);
}
};
}
- function notableBlurHandler(event) {
- if (window.CURRENT_NOTABLE_ELEMENT &&
- !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT) &&
- !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT) &&
- !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE) &&
- !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE)
+ function tooltipBlurHandler(event) {
+ if (window.CURRENT_TOOLTIP_ELEMENT &&
+ !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT) &&
+ !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT) &&
+ !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) &&
+ !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE)
) {
// Work around a difference in the focus behaviour between Firefox, Chrome, and Safari.
- // When I click the button on an already-opened notable trait popover, Safari
+ // When I click the button on an already-opened tooltip popover, Safari
// hides the popover and then immediately shows it again, while everyone else hides it
// and it stays hidden.
//
// To work around this, make sure the click finishes being dispatched before
- // hiding the popover. Since `hideNotable()` is idempotent, this makes Safari behave
+ // hiding the popover. Since `hideTooltip()` is idempotent, this makes Safari behave
// consistently with the other two.
- setTimeout(() => hideNotable(false), 0);
+ setTimeout(() => hideTooltip(false), 0);
}
}
- function hideNotable(focus) {
- if (window.CURRENT_NOTABLE_ELEMENT) {
- if (window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE) {
+ function hideTooltip(focus) {
+ if (window.CURRENT_TOOLTIP_ELEMENT) {
+ if (window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE) {
if (focus) {
- window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.focus();
+ window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.focus();
}
- window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE = false;
+ window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE = false;
}
const body = document.getElementsByTagName("body")[0];
- body.removeChild(window.CURRENT_NOTABLE_ELEMENT);
- window.CURRENT_NOTABLE_ELEMENT = null;
+ body.removeChild(window.CURRENT_TOOLTIP_ELEMENT);
+ window.CURRENT_TOOLTIP_ELEMENT = null;
}
}
- onEachLazy(document.getElementsByClassName("notable-traits"), e => {
+ onEachLazy(document.getElementsByClassName("tooltip"), e => {
e.onclick = function() {
- this.NOTABLE_FORCE_VISIBLE = this.NOTABLE_FORCE_VISIBLE ? false : true;
- if (window.CURRENT_NOTABLE_ELEMENT && !this.NOTABLE_FORCE_VISIBLE) {
- hideNotable(true);
+ this.TOOLTIP_FORCE_VISIBLE = this.TOOLTIP_FORCE_VISIBLE ? false : true;
+ if (window.CURRENT_TOOLTIP_ELEMENT && !this.TOOLTIP_FORCE_VISIBLE) {
+ hideTooltip(true);
} else {
- showNotable(this);
- window.CURRENT_NOTABLE_ELEMENT.setAttribute("tabindex", "0");
- window.CURRENT_NOTABLE_ELEMENT.focus();
- window.CURRENT_NOTABLE_ELEMENT.onblur = notableBlurHandler;
+ showTooltip(this);
+ window.CURRENT_TOOLTIP_ELEMENT.setAttribute("tabindex", "0");
+ window.CURRENT_TOOLTIP_ELEMENT.focus();
+ window.CURRENT_TOOLTIP_ELEMENT.onblur = tooltipBlurHandler;
}
return false;
};
@@ -933,16 +936,16 @@ function loadCss(cssUrl) {
if (ev.pointerType !== "mouse") {
return;
}
- showNotable(this);
+ showTooltip(this);
};
e.onpointerleave = function(ev) {
// If this is a synthetic touch event, ignore it. A click event will be along shortly.
if (ev.pointerType !== "mouse") {
return;
}
- if (!this.NOTABLE_FORCE_VISIBLE &&
- !elemIsInParent(ev.relatedTarget, window.CURRENT_NOTABLE_ELEMENT)) {
- hideNotable(true);
+ if (!this.TOOLTIP_FORCE_VISIBLE &&
+ !elemIsInParent(ev.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT)) {
+ hideTooltip(true);
}
};
});
@@ -1044,14 +1047,14 @@ function loadCss(cssUrl) {
}
/**
- * Hide popover menus, notable trait tooltips, and the sidebar (if applicable).
+ * Hide popover menus, clickable tooltips, and the sidebar (if applicable).
*
- * Pass "true" to reset focus for notable traits.
+ * Pass "true" to reset focus for tooltip popovers.
*/
window.hideAllModals = function(switchFocus) {
hideSidebar();
window.hidePopoverMenus();
- hideNotable(switchFocus);
+ hideTooltip(switchFocus);
};
/**
@@ -1142,7 +1145,11 @@ function loadCss(cssUrl) {
(function() {
let reset_button_timeout = null;
- window.copy_path = but => {
+ const but = document.getElementById("copy-path");
+ if (!but) {
+ return;
+ }
+ but.onclick = () => {
const parent = but.parentElement;
const path = [];
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
index 88592fa0c..b98bced41 100644
--- a/src/librustdoc/html/static/js/search.js
+++ b/src/librustdoc/html/static/js/search.js
@@ -112,7 +112,6 @@ function levenshtein(s1, s2) {
}
function initSearch(rawSearchIndex) {
- const MAX_LEV_DISTANCE = 3;
const MAX_RESULTS = 200;
const NO_TYPE_FILTER = -1;
/**
@@ -143,13 +142,11 @@ function initSearch(rawSearchIndex) {
}
function itemTypeFromName(typename) {
- for (let i = 0, len = itemTypes.length; i < len; ++i) {
- if (itemTypes[i] === typename) {
- return i;
- }
+ const index = itemTypes.findIndex(i => i === typename);
+ if (index < 0) {
+ throw ["Unknown type filter ", typename];
}
-
- throw new Error("Unknown type filter `" + typename + "`");
+ return index;
}
/**
@@ -167,21 +164,21 @@ function initSearch(rawSearchIndex) {
*/
function getStringElem(query, parserState, isInGenerics) {
if (isInGenerics) {
- throw new Error("`\"` cannot be used in generics");
+ throw ["Unexpected ", "\"", " in generics"];
} else if (query.literalSearch) {
- throw new Error("Cannot have more than one literal search element");
+ throw ["Cannot have more than one literal search element"];
} else if (parserState.totalElems - parserState.genericsElems > 0) {
- throw new Error("Cannot use literal search when there is more than one element");
+ throw ["Cannot use literal search when there is more than one element"];
}
parserState.pos += 1;
const start = parserState.pos;
const end = getIdentEndPosition(parserState);
if (parserState.pos >= parserState.length) {
- throw new Error("Unclosed `\"`");
+ throw ["Unclosed ", "\""];
} else if (parserState.userQuery[end] !== "\"") {
- throw new Error(`Unexpected \`${parserState.userQuery[end]}\` in a string element`);
+ throw ["Unexpected ", parserState.userQuery[end], " in a string element"];
} else if (start === end) {
- throw new Error("Cannot have empty string element");
+ throw ["Cannot have empty string element"];
}
// To skip the quote at the end.
parserState.pos += 1;
@@ -260,7 +257,7 @@ function initSearch(rawSearchIndex) {
return;
}
if (query.literalSearch && parserState.totalElems - parserState.genericsElems > 0) {
- throw new Error("You cannot have more than one element if you use quotes");
+ throw ["You cannot have more than one element if you use quotes"];
}
const pathSegments = name.split("::");
if (pathSegments.length > 1) {
@@ -269,17 +266,17 @@ function initSearch(rawSearchIndex) {
if (pathSegment.length === 0) {
if (i === 0) {
- throw new Error("Paths cannot start with `::`");
+ throw ["Paths cannot start with ", "::"];
} else if (i + 1 === len) {
- throw new Error("Paths cannot end with `::`");
+ throw ["Paths cannot end with ", "::"];
}
- throw new Error("Unexpected `::::`");
+ throw ["Unexpected ", "::::"];
}
}
}
// In case we only have something like `<p>`, there is no name.
if (pathSegments.length === 0 || (pathSegments.length === 1 && pathSegments[0] === "")) {
- throw new Error("Found generics without a path");
+ throw ["Found generics without a path"];
}
parserState.totalElems += 1;
if (isInGenerics) {
@@ -303,22 +300,23 @@ function initSearch(rawSearchIndex) {
* @return {integer}
*/
function getIdentEndPosition(parserState) {
+ const start = parserState.pos;
let end = parserState.pos;
- let foundExclamation = false;
+ let foundExclamation = -1;
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
if (!isIdentCharacter(c)) {
if (c === "!") {
- if (foundExclamation) {
- throw new Error("Cannot have more than one `!` in an ident");
+ if (foundExclamation !== -1) {
+ throw ["Cannot have more than one ", "!", " in an ident"];
} else if (parserState.pos + 1 < parserState.length &&
isIdentCharacter(parserState.userQuery[parserState.pos + 1])
) {
- throw new Error("`!` can only be at the end of an ident");
+ throw ["Unexpected ", "!", ": it can only be at the end of an ident"];
}
- foundExclamation = true;
+ foundExclamation = parserState.pos;
} else if (isErrorCharacter(c)) {
- throw new Error(`Unexpected \`${c}\``);
+ throw ["Unexpected ", c];
} else if (
isStopCharacter(c) ||
isSpecialStartCharacter(c) ||
@@ -329,16 +327,40 @@ function initSearch(rawSearchIndex) {
if (!isPathStart(parserState)) {
break;
}
+ if (foundExclamation !== -1) {
+ if (start <= (end - 2)) {
+ throw ["Cannot have associated items in macros"];
+ } else {
+ // if start == end - 1, we got the never type
+ // while the never type has no associated macros, we still
+ // can parse a path like that
+ foundExclamation = -1;
+ }
+ }
// Skip current ":".
parserState.pos += 1;
- foundExclamation = false;
} else {
- throw new Error(`Unexpected \`${c}\``);
+ throw ["Unexpected ", c];
}
}
parserState.pos += 1;
end = parserState.pos;
}
+ // if start == end - 1, we got the never type
+ if (foundExclamation !== -1 && start <= (end - 2)) {
+ if (parserState.typeFilter === null) {
+ parserState.typeFilter = "macro";
+ } else if (parserState.typeFilter !== "macro") {
+ throw [
+ "Invalid search type: macro ",
+ "!",
+ " and ",
+ parserState.typeFilter,
+ " both specified",
+ ];
+ }
+ end = foundExclamation;
+ }
return end;
}
@@ -365,9 +387,9 @@ function initSearch(rawSearchIndex) {
parserState.userQuery[parserState.pos] === "<"
) {
if (isInGenerics) {
- throw new Error("Unexpected `<` after `<`");
+ throw ["Unexpected ", "<", " after ", "<"];
} else if (start >= end) {
- throw new Error("Found generics without a path");
+ throw ["Found generics without a path"];
}
parserState.pos += 1;
getItemsBefore(query, parserState, generics, ">");
@@ -411,24 +433,51 @@ function initSearch(rawSearchIndex) {
foundStopChar = true;
continue;
} else if (c === ":" && isPathStart(parserState)) {
- throw new Error("Unexpected `::`: paths cannot start with `::`");
+ throw ["Unexpected ", "::", ": paths cannot start with ", "::"];
} else if (c === ":" || isEndCharacter(c)) {
let extra = "";
if (endChar === ">") {
- extra = "`<`";
+ extra = "<";
} else if (endChar === "") {
- extra = "`->`";
+ extra = "->";
+ } else {
+ extra = endChar;
}
- throw new Error("Unexpected `" + c + "` after " + extra);
+ throw ["Unexpected ", c, " after ", extra];
}
if (!foundStopChar) {
if (endChar !== "") {
- throw new Error(`Expected \`,\`, \` \` or \`${endChar}\`, found \`${c}\``);
+ throw [
+ "Expected ",
+ ",", // comma
+ ", ",
+ "&nbsp;", // whitespace
+ " or ",
+ endChar,
+ ", found ",
+ c,
+ ];
}
- throw new Error(`Expected \`,\` or \` \`, found \`${c}\``);
+ throw [
+ "Expected ",
+ ",", // comma
+ " or ",
+ "&nbsp;", // whitespace
+ ", found ",
+ c,
+ ];
}
const posBefore = parserState.pos;
getNextElem(query, parserState, elems, endChar === ">");
+ if (endChar !== "") {
+ if (parserState.pos >= parserState.length) {
+ throw ["Unclosed ", "<"];
+ }
+ const c2 = parserState.userQuery[parserState.pos];
+ if (!isSeparatorCharacter(c2) && c2 !== endChar) {
+ throw ["Expected ", endChar, ", found ", c2];
+ }
+ }
// This case can be encountered if `getNextElem` encountered a "stop character" right
// from the start. For example if you have `,,` or `<>`. In this case, we simply move up
// the current position to continue the parsing.
@@ -437,7 +486,10 @@ function initSearch(rawSearchIndex) {
}
foundStopChar = false;
}
- // We are either at the end of the string or on the `endChar`` character, let's move forward
+ if (parserState.pos >= parserState.length && endChar !== "") {
+ throw ["Unclosed ", "<"];
+ }
+ // We are either at the end of the string or on the `endChar` character, let's move forward
// in any case.
parserState.pos += 1;
}
@@ -453,7 +505,7 @@ function initSearch(rawSearchIndex) {
for (let pos = 0; pos < parserState.pos; ++pos) {
if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) {
- throw new Error(`Unexpected \`${query[pos]}\` in type filter`);
+ throw ["Unexpected ", query[pos], " in type filter"];
}
}
}
@@ -466,11 +518,10 @@ function initSearch(rawSearchIndex) {
* @param {ParserState} parserState
*/
function parseInput(query, parserState) {
- let c, before;
let foundStopChar = true;
while (parserState.pos < parserState.length) {
- c = parserState.userQuery[parserState.pos];
+ const c = parserState.userQuery[parserState.pos];
if (isStopCharacter(c)) {
foundStopChar = true;
if (isSeparatorCharacter(c)) {
@@ -480,19 +531,19 @@ function initSearch(rawSearchIndex) {
if (isReturnArrow(parserState)) {
break;
}
- throw new Error(`Unexpected \`${c}\` (did you mean \`->\`?)`);
+ throw ["Unexpected ", c, " (did you mean ", "->", "?)"];
}
- throw new Error(`Unexpected \`${c}\``);
+ throw ["Unexpected ", c];
} else if (c === ":" && !isPathStart(parserState)) {
if (parserState.typeFilter !== null) {
- throw new Error("Unexpected `:`");
+ throw ["Unexpected ", ":"];
}
if (query.elems.length === 0) {
- throw new Error("Expected type filter before `:`");
+ throw ["Expected type filter before ", ":"];
} else if (query.elems.length !== 1 || parserState.totalElems !== 1) {
- throw new Error("Unexpected `:`");
+ throw ["Unexpected ", ":"];
} else if (query.literalSearch) {
- throw new Error("You cannot use quotes on type filter");
+ throw ["You cannot use quotes on type filter"];
}
checkExtraTypeFilterCharacters(parserState);
// The type filter doesn't count as an element since it's a modifier.
@@ -505,11 +556,31 @@ function initSearch(rawSearchIndex) {
}
if (!foundStopChar) {
if (parserState.typeFilter !== null) {
- throw new Error(`Expected \`,\`, \` \` or \`->\`, found \`${c}\``);
+ throw [
+ "Expected ",
+ ",", // comma
+ ", ",
+ "&nbsp;", // whitespace
+ " or ",
+ "->", // arrow
+ ", found ",
+ c,
+ ];
}
- throw new Error(`Expected \`,\`, \` \`, \`:\` or \`->\`, found \`${c}\``);
- }
- before = query.elems.length;
+ throw [
+ "Expected ",
+ ",", // comma
+ ", ",
+ "&nbsp;", // whitespace
+ ", ",
+ ":", // colon
+ " or ",
+ "->", // arrow
+ ", found ",
+ c,
+ ];
+ }
+ const before = query.elems.length;
getNextElem(query, parserState, query.elems, false);
if (query.elems.length === before) {
// Nothing was added, weird... Let's increase the position to not remain stuck.
@@ -518,14 +589,13 @@ function initSearch(rawSearchIndex) {
foundStopChar = false;
}
while (parserState.pos < parserState.length) {
- c = parserState.userQuery[parserState.pos];
if (isReturnArrow(parserState)) {
parserState.pos += 2;
// Get returned elements.
getItemsBefore(query, parserState, query.returned, "");
// Nothing can come afterward!
if (query.returned.length === 0) {
- throw new Error("Expected at least one item after `->`");
+ throw ["Expected at least one item after ", "->"];
}
break;
} else {
@@ -594,8 +664,8 @@ function initSearch(rawSearchIndex) {
*
* The supported syntax by this parser is as follow:
*
- * ident = *(ALPHA / DIGIT / "_") [!]
- * path = ident *(DOUBLE-COLON ident)
+ * ident = *(ALPHA / DIGIT / "_")
+ * path = ident *(DOUBLE-COLON ident) [!]
* arg = path [generics]
* arg-without-generic = path
* type-sep = COMMA/WS *(COMMA/WS)
@@ -679,7 +749,7 @@ function initSearch(rawSearchIndex) {
}
} catch (err) {
query = newParsedQuery(userQuery);
- query.error = err.message;
+ query.error = err;
query.typeFilter = -1;
return query;
}
@@ -897,13 +967,13 @@ function initSearch(rawSearchIndex) {
* @param {QueryElement} elem - The element from the parsed query.
* @param {integer} defaultLev - This is the value to return in case there are no generics.
*
- * @return {integer} - Returns the best match (if any) or `MAX_LEV_DISTANCE + 1`.
+ * @return {integer} - Returns the best match (if any) or `maxLevDistance + 1`.
*/
- function checkGenerics(row, elem, defaultLev) {
+ function checkGenerics(row, elem, defaultLev, maxLevDistance) {
if (row.generics.length === 0) {
- return elem.generics.length === 0 ? defaultLev : MAX_LEV_DISTANCE + 1;
+ return elem.generics.length === 0 ? defaultLev : maxLevDistance + 1;
} else if (row.generics.length > 0 && row.generics[0].name === null) {
- return checkGenerics(row.generics[0], elem, defaultLev);
+ return checkGenerics(row.generics[0], elem, defaultLev, maxLevDistance);
}
// The names match, but we need to be sure that all generics kinda
// match as well.
@@ -914,8 +984,8 @@ function initSearch(rawSearchIndex) {
elem_name = entry.name;
if (elem_name === "") {
// Pure generic, needs to check into it.
- if (checkGenerics(entry, elem, MAX_LEV_DISTANCE + 1) !== 0) {
- return MAX_LEV_DISTANCE + 1;
+ if (checkGenerics(entry, elem, maxLevDistance + 1, maxLevDistance) !== 0) {
+ return maxLevDistance + 1;
}
continue;
}
@@ -942,7 +1012,7 @@ function initSearch(rawSearchIndex) {
}
}
if (match === null) {
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
}
elems[match] -= 1;
if (elems[match] === 0) {
@@ -951,7 +1021,7 @@ function initSearch(rawSearchIndex) {
}
return 0;
}
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
}
/**
@@ -963,10 +1033,10 @@ function initSearch(rawSearchIndex) {
*
* @return {integer} - Returns a Levenshtein distance to the best match.
*/
- function checkIfInGenerics(row, elem) {
- let lev = MAX_LEV_DISTANCE + 1;
+ function checkIfInGenerics(row, elem, maxLevDistance) {
+ let lev = maxLevDistance + 1;
for (const entry of row.generics) {
- lev = Math.min(checkType(entry, elem, true), lev);
+ lev = Math.min(checkType(entry, elem, true, maxLevDistance), lev);
if (lev === 0) {
break;
}
@@ -983,15 +1053,15 @@ function initSearch(rawSearchIndex) {
* @param {boolean} literalSearch
*
* @return {integer} - Returns a Levenshtein distance to the best match. If there is
- * no match, returns `MAX_LEV_DISTANCE + 1`.
+ * no match, returns `maxLevDistance + 1`.
*/
- function checkType(row, elem, literalSearch) {
+ function checkType(row, elem, literalSearch, maxLevDistance) {
if (row.name === null) {
// This is a pure "generic" search, no need to run other checks.
if (row.generics.length > 0) {
- return checkIfInGenerics(row, elem);
+ return checkIfInGenerics(row, elem, maxLevDistance);
}
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
}
let lev = levenshtein(row.name, elem.name);
@@ -1005,9 +1075,9 @@ function initSearch(rawSearchIndex) {
return 0;
}
}
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
} else if (elem.generics.length > 0) {
- return checkGenerics(row, elem, MAX_LEV_DISTANCE + 1);
+ return checkGenerics(row, elem, maxLevDistance + 1, maxLevDistance);
}
return 0;
} else if (row.generics.length > 0) {
@@ -1017,22 +1087,20 @@ function initSearch(rawSearchIndex) {
}
// The name didn't match so we now check if the type we're looking for is inside
// the generics!
- lev = checkIfInGenerics(row, elem);
- // Now whatever happens, the returned distance is "less good" so we should mark
- // it as such, and so we add 0.5 to the distance to make it "less good".
- return lev + 0.5;
- } else if (lev > MAX_LEV_DISTANCE) {
+ lev = Math.min(lev, checkIfInGenerics(row, elem, maxLevDistance));
+ return lev;
+ } else if (lev > maxLevDistance) {
// So our item's name doesn't match at all and has generics.
//
// Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
// looking for "B<C>", we'll need to go down.
- return checkIfInGenerics(row, elem);
+ return checkIfInGenerics(row, elem, maxLevDistance);
} else {
// At this point, the name kinda match and we have generics to check, so
// let's go!
- const tmp_lev = checkGenerics(row, elem, lev);
- if (tmp_lev > MAX_LEV_DISTANCE) {
- return MAX_LEV_DISTANCE + 1;
+ const tmp_lev = checkGenerics(row, elem, lev, maxLevDistance);
+ if (tmp_lev > maxLevDistance) {
+ return maxLevDistance + 1;
}
// We compute the median value of both checks and return it.
return (tmp_lev + lev) / 2;
@@ -1040,7 +1108,7 @@ function initSearch(rawSearchIndex) {
} else if (elem.generics.length > 0) {
// In this case, we were expecting generics but there isn't so we simply reject this
// one.
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
}
// No generics on our query or on the target type so we can return without doing
// anything else.
@@ -1055,23 +1123,26 @@ function initSearch(rawSearchIndex) {
* @param {integer} typeFilter
*
* @return {integer} - Returns a Levenshtein distance to the best match. If there is no
- * match, returns `MAX_LEV_DISTANCE + 1`.
+ * match, returns `maxLevDistance + 1`.
*/
- function findArg(row, elem, typeFilter) {
- let lev = MAX_LEV_DISTANCE + 1;
+ function findArg(row, elem, typeFilter, maxLevDistance) {
+ let lev = maxLevDistance + 1;
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
for (const input of row.type.inputs) {
if (!typePassesFilter(typeFilter, input.ty)) {
continue;
}
- lev = Math.min(lev, checkType(input, elem, parsedQuery.literalSearch));
+ lev = Math.min(
+ lev,
+ checkType(input, elem, parsedQuery.literalSearch, maxLevDistance)
+ );
if (lev === 0) {
return 0;
}
}
}
- return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev;
+ return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
}
/**
@@ -1082,10 +1153,10 @@ function initSearch(rawSearchIndex) {
* @param {integer} typeFilter
*
* @return {integer} - Returns a Levenshtein distance to the best match. If there is no
- * match, returns `MAX_LEV_DISTANCE + 1`.
+ * match, returns `maxLevDistance + 1`.
*/
- function checkReturned(row, elem, typeFilter) {
- let lev = MAX_LEV_DISTANCE + 1;
+ function checkReturned(row, elem, typeFilter, maxLevDistance) {
+ let lev = maxLevDistance + 1;
if (row && row.type && row.type.output.length > 0) {
const ret = row.type.output;
@@ -1093,20 +1164,23 @@ function initSearch(rawSearchIndex) {
if (!typePassesFilter(typeFilter, ret_ty.ty)) {
continue;
}
- lev = Math.min(lev, checkType(ret_ty, elem, parsedQuery.literalSearch));
+ lev = Math.min(
+ lev,
+ checkType(ret_ty, elem, parsedQuery.literalSearch, maxLevDistance)
+ );
if (lev === 0) {
return 0;
}
}
}
- return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev;
+ return parsedQuery.literalSearch ? maxLevDistance + 1 : lev;
}
- function checkPath(contains, ty) {
+ function checkPath(contains, ty, maxLevDistance) {
if (contains.length === 0) {
return 0;
}
- let ret_lev = MAX_LEV_DISTANCE + 1;
+ let ret_lev = maxLevDistance + 1;
const path = ty.path.split("::");
if (ty.parent && ty.parent.name) {
@@ -1116,7 +1190,7 @@ function initSearch(rawSearchIndex) {
const length = path.length;
const clength = contains.length;
if (clength > length) {
- return MAX_LEV_DISTANCE + 1;
+ return maxLevDistance + 1;
}
for (let i = 0; i < length; ++i) {
if (i + clength > length) {
@@ -1126,7 +1200,7 @@ function initSearch(rawSearchIndex) {
let aborted = false;
for (let x = 0; x < clength; ++x) {
const lev = levenshtein(path[i + x], contains[x]);
- if (lev > MAX_LEV_DISTANCE) {
+ if (lev > maxLevDistance) {
aborted = true;
break;
}
@@ -1231,7 +1305,7 @@ function initSearch(rawSearchIndex) {
* following condition:
*
* * If it is a "literal search" (`parsedQuery.literalSearch`), then `lev` must be 0.
- * * If it is not a "literal search", `lev` must be <= `MAX_LEV_DISTANCE`.
+ * * If it is not a "literal search", `lev` must be <= `maxLevDistance`.
*
* The `results` map contains information which will be used to sort the search results:
*
@@ -1249,8 +1323,8 @@ function initSearch(rawSearchIndex) {
* @param {integer} lev
* @param {integer} path_lev
*/
- function addIntoResults(results, fullId, id, index, lev, path_lev) {
- const inBounds = lev <= MAX_LEV_DISTANCE || index !== -1;
+ function addIntoResults(results, fullId, id, index, lev, path_lev, maxLevDistance) {
+ const inBounds = lev <= maxLevDistance || index !== -1;
if (lev === 0 || (!parsedQuery.literalSearch && inBounds)) {
if (results[fullId] !== undefined) {
const result = results[fullId];
@@ -1289,7 +1363,8 @@ function initSearch(rawSearchIndex) {
elem,
results_others,
results_in_args,
- results_returned
+ results_returned,
+ maxLevDistance
) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
@@ -1298,13 +1373,13 @@ function initSearch(rawSearchIndex) {
const fullId = row.id;
const searchWord = searchWords[pos];
- const in_args = findArg(row, elem, parsedQuery.typeFilter);
- const returned = checkReturned(row, elem, parsedQuery.typeFilter);
+ const in_args = findArg(row, elem, parsedQuery.typeFilter, maxLevDistance);
+ const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxLevDistance);
// path_lev is 0 because no parent path information is currently stored
// in the search index
- addIntoResults(results_in_args, fullId, pos, -1, in_args, 0);
- addIntoResults(results_returned, fullId, pos, -1, returned, 0);
+ addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxLevDistance);
+ addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxLevDistance);
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
return;
@@ -1328,16 +1403,16 @@ function initSearch(rawSearchIndex) {
// No need to check anything else if it's a "pure" generics search.
if (elem.name.length === 0) {
if (row.type !== null) {
- lev = checkGenerics(row.type, elem, MAX_LEV_DISTANCE + 1);
+ lev = checkGenerics(row.type, elem, maxLevDistance + 1, maxLevDistance);
// path_lev is 0 because we know it's empty
- addIntoResults(results_others, fullId, pos, index, lev, 0);
+ addIntoResults(results_others, fullId, pos, index, lev, 0, maxLevDistance);
}
return;
}
if (elem.fullPath.length > 1) {
- path_lev = checkPath(elem.pathWithoutLast, row);
- if (path_lev > MAX_LEV_DISTANCE) {
+ path_lev = checkPath(elem.pathWithoutLast, row, maxLevDistance);
+ if (path_lev > maxLevDistance) {
return;
}
}
@@ -1351,11 +1426,11 @@ function initSearch(rawSearchIndex) {
lev = levenshtein(searchWord, elem.pathLast);
- if (index === -1 && lev + path_lev > MAX_LEV_DISTANCE) {
+ if (index === -1 && lev + path_lev > maxLevDistance) {
return;
}
- addIntoResults(results_others, fullId, pos, index, lev, path_lev);
+ addIntoResults(results_others, fullId, pos, index, lev, path_lev, maxLevDistance);
}
/**
@@ -1367,7 +1442,7 @@ function initSearch(rawSearchIndex) {
* @param {integer} pos - Position in the `searchIndex`.
* @param {Object} results
*/
- function handleArgs(row, pos, results) {
+ function handleArgs(row, pos, results, maxLevDistance) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
@@ -1379,7 +1454,7 @@ function initSearch(rawSearchIndex) {
function checkArgs(elems, callback) {
for (const elem of elems) {
// There is more than one parameter to the query so all checks should be "exact"
- const lev = callback(row, elem, NO_TYPE_FILTER);
+ const lev = callback(row, elem, NO_TYPE_FILTER, maxLevDistance);
if (lev <= 1) {
nbLev += 1;
totalLev += lev;
@@ -1400,12 +1475,21 @@ function initSearch(rawSearchIndex) {
return;
}
const lev = Math.round(totalLev / nbLev);
- addIntoResults(results, row.id, pos, 0, lev, 0);
+ addIntoResults(results, row.id, pos, 0, lev, 0, maxLevDistance);
}
function innerRunQuery() {
let elem, i, nSearchWords, in_returned, row;
+ let queryLen = 0;
+ for (const elem of parsedQuery.elems) {
+ queryLen += elem.name.length;
+ }
+ for (const elem of parsedQuery.returned) {
+ queryLen += elem.name.length;
+ }
+ const maxLevDistance = Math.floor(queryLen / 3);
+
if (parsedQuery.foundElems === 1) {
if (parsedQuery.elems.length === 1) {
elem = parsedQuery.elems[0];
@@ -1418,7 +1502,8 @@ function initSearch(rawSearchIndex) {
elem,
results_others,
results_in_args,
- results_returned
+ results_returned,
+ maxLevDistance
);
}
} else if (parsedQuery.returned.length === 1) {
@@ -1426,13 +1511,18 @@ function initSearch(rawSearchIndex) {
elem = parsedQuery.returned[0];
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
row = searchIndex[i];
- in_returned = checkReturned(row, elem, parsedQuery.typeFilter);
- addIntoResults(results_others, row.id, i, -1, in_returned);
+ in_returned = checkReturned(
+ row,
+ elem,
+ parsedQuery.typeFilter,
+ maxLevDistance
+ );
+ addIntoResults(results_others, row.id, i, -1, in_returned, maxLevDistance);
}
}
} else if (parsedQuery.foundElems > 0) {
for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
- handleArgs(searchIndex[i], i, results_others);
+ handleArgs(searchIndex[i], i, results_others, maxLevDistance);
}
}
}
@@ -1470,7 +1560,7 @@ function initSearch(rawSearchIndex) {
*
* @return {boolean} - Whether the result is valid or not
*/
- function validateResult(name, path, keys, parent) {
+ function validateResult(name, path, keys, parent, maxLevDistance) {
if (!keys || !keys.length) {
return true;
}
@@ -1485,7 +1575,7 @@ function initSearch(rawSearchIndex) {
(parent !== undefined && parent.name !== undefined &&
parent.name.toLowerCase().indexOf(key) > -1) ||
// lastly check to see if the name was a levenshtein match
- levenshtein(name, key) <= MAX_LEV_DISTANCE)) {
+ levenshtein(name, key) <= maxLevDistance)) {
return false;
}
}
@@ -1725,7 +1815,16 @@ function initSearch(rawSearchIndex) {
let output = `<h1 class="search-results-title">Results${crates}</h1>`;
if (results.query.error !== null) {
- output += `<h3>Query parser error: "${results.query.error}".</h3>`;
+ const error = results.query.error;
+ error.forEach((value, index) => {
+ value = value.split("<").join("&lt;").split(">").join("&gt;");
+ if (index % 2 !== 0) {
+ error[index] = `<code>${value}</code>`;
+ } else {
+ error[index] = value;
+ }
+ });
+ output += `<h3 class="error">Query parser error: "${error.join("")}".</h3>`;
output += "<div id=\"search-tabs\">" +
makeTabHeader(0, "In Names", ret_others[1]) +
"</div>";
@@ -1922,7 +2021,7 @@ function initSearch(rawSearchIndex) {
* @type {Array<string>}
*/
const searchWords = [];
- let i, word;
+ const charA = "A".charCodeAt(0);
let currentIndex = 0;
let id = 0;
@@ -1936,7 +2035,7 @@ function initSearch(rawSearchIndex) {
/**
* The raw search data for a given crate. `n`, `t`, `d`, and `q`, `i`, and `f`
* are arrays with the same length. n[i] contains the name of an item.
- * t[i] contains the type of that item (as a small integer that represents an
+ * t[i] contains the type of that item (as a string of characters that represent an
* offset in `itemTypes`). d[i] contains the description of that item.
*
* q[i] contains the full path of the item, or an empty string indicating
@@ -1963,7 +2062,7 @@ function initSearch(rawSearchIndex) {
* doc: string,
* a: Object,
* n: Array<string>,
- * t: Array<Number>,
+ * t: String,
* d: Array<string>,
* q: Array<string>,
* i: Array<Number>,
@@ -1992,7 +2091,7 @@ function initSearch(rawSearchIndex) {
searchIndex.push(crateRow);
currentIndex += 1;
- // an array of (Number) item types
+ // a String of one character item type codes
const itemTypes = crateCorpus.t;
// an array of (String) item names
const itemNames = crateCorpus.n;
@@ -2017,7 +2116,7 @@ function initSearch(rawSearchIndex) {
// convert `rawPaths` entries into object form
// generate normalizedPaths for function search mode
let len = paths.length;
- for (i = 0; i < len; ++i) {
+ for (let i = 0; i < len; ++i) {
lowercasePaths.push({ty: paths[i][0], name: paths[i][1].toLowerCase()});
paths[i] = {ty: paths[i][0], name: paths[i][1]};
}
@@ -2031,19 +2130,17 @@ function initSearch(rawSearchIndex) {
// faster analysis operations
len = itemTypes.length;
let lastPath = "";
- for (i = 0; i < len; ++i) {
+ for (let i = 0; i < len; ++i) {
+ let word = "";
// This object should have exactly the same set of fields as the "crateRow"
// object defined above.
if (typeof itemNames[i] === "string") {
word = itemNames[i].toLowerCase();
- searchWords.push(word);
- } else {
- word = "";
- searchWords.push("");
}
+ searchWords.push(word);
const row = {
crate: crate,
- ty: itemTypes[i],
+ ty: itemTypes.charCodeAt(i) - charA,
name: itemNames[i],
path: itemPaths[i] ? itemPaths[i] : lastPath,
desc: itemDescs[i],
diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js
index 84df1b7d3..1cd552e7f 100644
--- a/src/librustdoc/html/static/js/settings.js
+++ b/src/librustdoc/html/static/js/settings.js
@@ -1,5 +1,5 @@
// Local js definitions:
-/* global getSettingValue, getVirtualKey, updateLocalStorage, updateSystemTheme */
+/* global getSettingValue, getVirtualKey, updateLocalStorage, updateTheme */
/* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */
/* global MAIN_ID, getVar, getSettingsButton */
@@ -19,7 +19,7 @@
case "theme":
case "preferred-dark-theme":
case "preferred-light-theme":
- updateSystemTheme();
+ updateTheme();
updateLightAndDark();
break;
case "line-numbers":
@@ -48,13 +48,13 @@
}
function showLightAndDark() {
- removeClass(document.getElementById("preferred-light-theme").parentElement, "hidden");
- removeClass(document.getElementById("preferred-dark-theme").parentElement, "hidden");
+ removeClass(document.getElementById("preferred-light-theme"), "hidden");
+ removeClass(document.getElementById("preferred-dark-theme"), "hidden");
}
function hideLightAndDark() {
- addClass(document.getElementById("preferred-light-theme").parentElement, "hidden");
- addClass(document.getElementById("preferred-dark-theme").parentElement, "hidden");
+ addClass(document.getElementById("preferred-light-theme"), "hidden");
+ addClass(document.getElementById("preferred-dark-theme"), "hidden");
}
function updateLightAndDark() {
@@ -80,17 +80,6 @@
toggle.onkeyup = handleKey;
toggle.onkeyrelease = handleKey;
});
- onEachLazy(settingsElement.getElementsByClassName("select-wrapper"), elem => {
- const select = elem.getElementsByTagName("select")[0];
- const settingId = select.id;
- const settingValue = getSettingValue(settingId);
- if (settingValue !== null) {
- select.value = settingValue;
- }
- select.onchange = function() {
- changeSetting(this.id, this.value);
- };
- });
onEachLazy(settingsElement.querySelectorAll("input[type=\"radio\"]"), elem => {
const settingId = elem.name;
let settingValue = getSettingValue(settingId);
@@ -127,38 +116,40 @@
let output = "";
for (const setting of settings) {
- output += "<div class=\"setting-line\">";
const js_data_name = setting["js_name"];
const setting_name = setting["name"];
if (setting["options"] !== undefined) {
// This is a select setting.
output += `\
-<div class="radio-line" id="${js_data_name}">
- <div class="setting-name">${setting_name}</div>
-<div class="choices">`;
+<div class="setting-line" id="${js_data_name}">
+ <div class="setting-radio-name">${setting_name}</div>
+ <div class="setting-radio-choices">`;
onEach(setting["options"], option => {
const checked = option === setting["default"] ? " checked" : "";
const full = `${js_data_name}-${option.replace(/ /g,"-")}`;
output += `\
-<label for="${full}" class="choice">
- <input type="radio" name="${js_data_name}"
- id="${full}" value="${option}"${checked}>
- <span>${option}</span>
-</label>`;
+ <label for="${full}" class="setting-radio">
+ <input type="radio" name="${js_data_name}"
+ id="${full}" value="${option}"${checked}>
+ <span>${option}</span>
+ </label>`;
});
- output += "</div></div>";
+ output += `\
+ </div>
+</div>`;
} else {
// This is a checkbox toggle.
const checked = setting["default"] === true ? " checked" : "";
output += `\
-<label class="settings-toggle">\
- <input type="checkbox" id="${js_data_name}"${checked}>\
- <span class="label">${setting_name}</span>\
-</label>`;
+<div class="setting-line">\
+ <label class="setting-check">\
+ <input type="checkbox" id="${js_data_name}"${checked}>\
+ <span>${setting_name}</span>\
+ </label>\
+</div>`;
}
- output += "</div>";
}
return output;
}
diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js
index 0e1c864e6..6c0f03b5b 100644
--- a/src/librustdoc/html/static/js/source-script.js
+++ b/src/librustdoc/html/static/js/source-script.js
@@ -117,8 +117,7 @@ function createSourceSidebar() {
sidebar.appendChild(title);
Object.keys(sourcesIndex).forEach(key => {
sourcesIndex[key][NAME_OFFSET] = key;
- hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "",
- hasFoundFile);
+ hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "", hasFoundFile);
});
container.appendChild(sidebar);
diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js
index db2db83ca..c72ac254f 100644
--- a/src/librustdoc/html/static/js/storage.js
+++ b/src/librustdoc/html/static/js/storage.js
@@ -51,7 +51,6 @@ function hasClass(elem, className) {
return elem && elem.classList && elem.classList.contains(className);
}
-// eslint-disable-next-line no-unused-vars
function addClass(elem, className) {
if (!elem || !elem.classList) {
return;
@@ -153,79 +152,74 @@ function switchTheme(styleElem, mainStyleElem, newThemeName, saveTheme) {
}
}
-// This function is called from "main.js".
-// eslint-disable-next-line no-unused-vars
-function useSystemTheme(value) {
- if (value === undefined) {
- value = true;
- }
-
- updateLocalStorage("use-system-theme", value);
-
- // update the toggle if we're on the settings page
- const toggle = document.getElementById("use-system-theme");
- if (toggle && toggle instanceof HTMLInputElement) {
- toggle.checked = value;
- }
-}
-
-const updateSystemTheme = (function() {
- if (!window.matchMedia) {
- // fallback to the CSS computed value
- return () => {
- const cssTheme = getComputedStyle(document.documentElement)
- .getPropertyValue("content");
-
- switchTheme(
- window.currentTheme,
- window.mainTheme,
- JSON.parse(cssTheme) || "light",
- true
- );
+const updateTheme = (function() {
+ /**
+ * Update the current theme to match whatever the current combination of
+ * * the preference for using the system theme
+ * (if this is the case, the value of preferred-light-theme, if the
+ * system theme is light, otherwise if dark, the value of
+ * preferred-dark-theme.)
+ * * the preferred theme
+ * … dictates that it should be.
+ */
+ function updateTheme() {
+ const use = (theme, saveTheme) => {
+ switchTheme(window.currentTheme, window.mainTheme, theme, saveTheme);
};
- }
-
- // only listen to (prefers-color-scheme: dark) because light is the default
- const mql = window.matchMedia("(prefers-color-scheme: dark)");
- function handlePreferenceChange(mql) {
- const use = theme => {
- switchTheme(window.currentTheme, window.mainTheme, theme, true);
- };
// maybe the user has disabled the setting in the meantime!
if (getSettingValue("use-system-theme") !== "false") {
const lightTheme = getSettingValue("preferred-light-theme") || "light";
const darkTheme = getSettingValue("preferred-dark-theme") || "dark";
- if (mql.matches) {
- use(darkTheme);
+ if (isDarkMode()) {
+ use(darkTheme, true);
} else {
// prefers a light theme, or has no preference
- use(lightTheme);
+ use(lightTheme, true);
}
// note: we save the theme so that it doesn't suddenly change when
// the user disables "use-system-theme" and reloads the page or
// navigates to another page
} else {
- use(getSettingValue("theme"));
+ use(getSettingValue("theme"), false);
}
}
- mql.addListener(handlePreferenceChange);
+ // This is always updated below to a function () => bool.
+ let isDarkMode;
- return () => {
- handlePreferenceChange(mql);
- };
-})();
+ // Determine the function for isDarkMode, and if we have
+ // `window.matchMedia`, set up an event listener on the preferred color
+ // scheme.
+ //
+ // Otherwise, fall back to the prefers-color-scheme value CSS captured in
+ // the "content" property.
+ if (window.matchMedia) {
+ // only listen to (prefers-color-scheme: dark) because light is the default
+ const mql = window.matchMedia("(prefers-color-scheme: dark)");
-function switchToSavedTheme() {
- switchTheme(
- window.currentTheme,
- window.mainTheme,
- getSettingValue("theme") || "light",
- false
- );
-}
+ isDarkMode = () => mql.matches;
+
+ if (mql.addEventListener) {
+ mql.addEventListener("change", updateTheme);
+ } else {
+ // This is deprecated, see:
+ // https://developer.mozilla.org/en-US/docs/Web/API/MediaQueryList/addListener
+ mql.addListener(updateTheme);
+ }
+ } else {
+ // fallback to the CSS computed value
+ const cssContent = getComputedStyle(document.documentElement)
+ .getPropertyValue("content");
+ // (Note: the double-quotes come from that this is a CSS value, which
+ // might be a length, string, etc.)
+ const cssColorScheme = cssContent || "\"light\"";
+ isDarkMode = () => (cssColorScheme === "\"dark\"");
+ }
+
+ return updateTheme;
+})();
if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) {
// update the preferred dark theme if the user is already using a dark theme
@@ -235,13 +229,10 @@ if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) {
&& darkThemes.indexOf(localStoredTheme) >= 0) {
updateLocalStorage("preferred-dark-theme", localStoredTheme);
}
-
- // call the function to initialize the theme at least once!
- updateSystemTheme();
-} else {
- switchToSavedTheme();
}
+updateTheme();
+
if (getSettingValue("source-sidebar-show") === "true") {
// At this point in page load, `document.body` is not available yet.
// Set a class on the `<html>` element instead.
@@ -259,6 +250,6 @@ if (getSettingValue("source-sidebar-show") === "true") {
// specifically when talking to a remote website with no caching.
window.addEventListener("pageshow", ev => {
if (ev.persisted) {
- setTimeout(switchToSavedTheme, 0);
+ setTimeout(updateTheme, 0);
}
});
diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs
index b48b82307..767b974cc 100644
--- a/src/librustdoc/html/static_files.rs
+++ b/src/librustdoc/html/static_files.rs
@@ -102,9 +102,6 @@ static_files! {
scrape_examples_js => "static/js/scrape-examples.js",
wheel_svg => "static/images/wheel.svg",
clipboard_svg => "static/images/clipboard.svg",
- down_arrow_svg => "static/images/down-arrow.svg",
- toggle_minus_png => "static/images/toggle-minus.svg",
- toggle_plus_png => "static/images/toggle-plus.svg",
copyright => "static/COPYRIGHT.txt",
license_apache => "static/LICENSE-APACHE.txt",
license_mit => "static/LICENSE-MIT.txt",
diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html
index fddda293b..7690d8f25 100644
--- a/src/librustdoc/html/templates/page.html
+++ b/src/librustdoc/html/templates/page.html
@@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0"> {#- -#}
<meta name="generator" content="rustdoc"> {#- -#}
<meta name="description" content="{{page.description}}"> {#- -#}
- <meta name="keywords" content="{{page.keywords}}"> {#- -#}
<title>{{page.title}}</title> {#- -#}
<link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.source_serif_4_regular}}"> {#- -#}
<link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}{{files.fira_sans_regular}}"> {#- -#}
@@ -24,11 +23,13 @@
{%- for theme in themes -%}
<link rel="stylesheet" disabled href="{{page.root_path|safe}}{{theme}}{{page.resource_suffix}}.css"> {#- -#}
{%- endfor -%}
+ {%- if !layout.default_settings.is_empty() -%}
<script id="default-settings" {# -#}
{% for (k, v) in layout.default_settings %}
data-{{k}}="{{v}}"
{%- endfor -%}
></script> {#- -#}
+ {%- endif -%}
<script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {#- -#}
{%- if page.css_class.contains("crate") -%}
<script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {#- -#}
diff --git a/src/librustdoc/html/templates/print_item.html b/src/librustdoc/html/templates/print_item.html
index ee2880bf6..3a1867b7f 100644
--- a/src/librustdoc/html/templates/print_item.html
+++ b/src/librustdoc/html/templates/print_item.html
@@ -6,7 +6,7 @@
<a href="{{component.path|safe}}index.html">{{component.name}}</a>::<wbr>
{%- endfor -%}
<a class="{{item_type}}" href="#">{{name}}</a> {#- -#}
- <button id="copy-path" onclick="copy_path(this)" title="Copy item path to clipboard"> {#- -#}
+ <button id="copy-path" title="Copy item path to clipboard"> {#- -#}
<img src="{{static_root_path|safe}}{{clipboard_svg}}" {# -#}
width="19" height="18" {# -#}
alt="Copy item path"> {#- -#}
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
index bd95ec186..18c45fd69 100644
--- a/src/librustdoc/json/conversions.rs
+++ b/src/librustdoc/json/conversions.rs
@@ -38,7 +38,7 @@ impl JsonRenderer<'_> {
Some(UrlFragment::UserWritten(_)) | None => *page_id,
};
- (link.clone(), from_item_id(id.into(), self.tcx))
+ (String::from(&**link), id_from_item_default(id.into(), self.tcx))
})
.collect();
let docs = item.attrs.collapsed_doc_value();
@@ -50,7 +50,8 @@ impl JsonRenderer<'_> {
.collect();
let span = item.span(self.tcx);
let visibility = item.visibility(self.tcx);
- let clean::Item { name, attrs: _, kind: _, item_id, cfg: _, .. } = item;
+ let clean::Item { name, item_id, .. } = item;
+ let id = id_from_item(&item, self.tcx);
let inner = match *item.kind {
clean::KeywordItem => return None,
clean::StrippedItem(ref inner) => {
@@ -69,7 +70,7 @@ impl JsonRenderer<'_> {
_ => from_clean_item(item, self.tcx),
};
Some(Item {
- id: from_item_id_with_name(item_id, self.tcx, name),
+ id,
crate_id: item_id.krate().as_u32(),
name: name.map(|sym| sym.to_string()),
span: span.and_then(|span| self.convert_span(span)),
@@ -107,7 +108,7 @@ impl JsonRenderer<'_> {
Some(ty::Visibility::Public) => Visibility::Public,
Some(ty::Visibility::Restricted(did)) if did.is_crate_root() => Visibility::Crate,
Some(ty::Visibility::Restricted(did)) => Visibility::Restricted {
- parent: from_item_id(did.into(), self.tcx),
+ parent: id_from_item_default(did.into(), self.tcx),
path: self.tcx.def_path(did).to_string_no_crate_verbose(),
},
}
@@ -204,21 +205,42 @@ impl FromWithTcx<clean::TypeBindingKind> for TypeBindingKind {
}
}
-/// It generates an ID as follows:
-///
-/// `CRATE_ID:ITEM_ID[:NAME_ID]` (if there is no name, NAME_ID is not generated).
-pub(crate) fn from_item_id(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
- from_item_id_with_name(item_id, tcx, None)
+#[inline]
+pub(crate) fn id_from_item_default(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
+ id_from_item_inner(item_id, tcx, None, None)
}
-// FIXME: this function (and appending the name at the end of the ID) should be removed when
-// reexports are not inlined anymore for json format. It should be done in #93518.
-pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Option<Symbol>) -> Id {
- struct DisplayDefId<'a>(DefId, TyCtxt<'a>, Option<Symbol>);
+/// It generates an ID as follows:
+///
+/// `CRATE_ID:ITEM_ID[:NAME_ID][-EXTRA]`:
+/// * If there is no `name`, `NAME_ID` is not generated.
+/// * If there is no `extra`, `EXTRA` is not generated.
+///
+/// * `name` is the item's name if available (it's not for impl blocks for example).
+/// * `extra` is used for reexports: it contains the ID of the reexported item. It is used to allow
+/// to have items with the same name but different types to both appear in the generated JSON.
+pub(crate) fn id_from_item_inner(
+ item_id: ItemId,
+ tcx: TyCtxt<'_>,
+ name: Option<Symbol>,
+ extra: Option<&Id>,
+) -> Id {
+ struct DisplayDefId<'a, 'b>(DefId, TyCtxt<'a>, Option<&'b Id>, Option<Symbol>);
- impl<'a> fmt::Display for DisplayDefId<'a> {
+ impl<'a, 'b> fmt::Display for DisplayDefId<'a, 'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let DisplayDefId(def_id, tcx, name) = self;
+ let DisplayDefId(def_id, tcx, extra, name) = self;
+ // We need this workaround because primitive types' DefId actually refers to
+ // their parent module, which isn't present in the output JSON items. So
+ // instead, we directly get the primitive symbol and convert it to u32 to
+ // generate the ID.
+ let s;
+ let extra = if let Some(e) = extra {
+ s = format!("-{}", e.0);
+ &s
+ } else {
+ ""
+ };
let name = match name {
Some(name) => format!(":{}", name.as_u32()),
None => {
@@ -240,18 +262,33 @@ pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Opt
}
}
};
- write!(f, "{}:{}{}", self.0.krate.as_u32(), u32::from(self.0.index), name)
+ write!(f, "{}:{}{name}{extra}", def_id.krate.as_u32(), u32::from(def_id.index))
}
}
match item_id {
- ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, name))),
- ItemId::Blanket { for_, impl_id } => {
- Id(format!("b:{}-{}", DisplayDefId(impl_id, tcx, None), DisplayDefId(for_, tcx, name)))
- }
- ItemId::Auto { for_, trait_ } => {
- Id(format!("a:{}-{}", DisplayDefId(trait_, tcx, None), DisplayDefId(for_, tcx, name)))
+ ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, extra, name))),
+ ItemId::Blanket { for_, impl_id } => Id(format!(
+ "b:{}-{}",
+ DisplayDefId(impl_id, tcx, None, None),
+ DisplayDefId(for_, tcx, extra, name)
+ )),
+ ItemId::Auto { for_, trait_ } => Id(format!(
+ "a:{}-{}",
+ DisplayDefId(trait_, tcx, None, None),
+ DisplayDefId(for_, tcx, extra, name)
+ )),
+ }
+}
+
+pub(crate) fn id_from_item(item: &clean::Item, tcx: TyCtxt<'_>) -> Id {
+ match *item.kind {
+ clean::ItemKind::ImportItem(ref import) => {
+ let extra =
+ import.source.did.map(ItemId::from).map(|i| id_from_item_inner(i, tcx, None, None));
+ id_from_item_inner(item.item_id, tcx, item.name, extra.as_ref())
}
+ _ => id_from_item_inner(item.item_id, tcx, item.name, None),
}
}
@@ -525,7 +562,7 @@ impl FromWithTcx<clean::Path> for Path {
fn from_tcx(path: clean::Path, tcx: TyCtxt<'_>) -> Path {
Path {
name: path.whole_name(),
- id: from_item_id(path.def_id().into(), tcx),
+ id: id_from_item_default(path.def_id().into(), tcx),
args: path.segments.last().map(|args| Box::new(args.clone().args.into_tcx(tcx))),
}
}
@@ -702,7 +739,7 @@ impl FromWithTcx<clean::Import> for Import {
Import {
source: import.source.path.whole_name(),
name,
- id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)),
+ id: import.source.did.map(ItemId::from).map(|i| id_from_item_default(i, tcx)),
glob,
}
}
@@ -791,7 +828,7 @@ fn ids(items: impl IntoIterator<Item = clean::Item>, tcx: TyCtxt<'_>) -> Vec<Id>
items
.into_iter()
.filter(|x| !x.is_stripped() && !x.is_keyword())
- .map(|i| from_item_id_with_name(i.item_id, tcx, i.name))
+ .map(|i| id_from_item(&i, tcx))
.collect()
}
@@ -801,12 +838,10 @@ fn ids_keeping_stripped(
) -> Vec<Option<Id>> {
items
.into_iter()
- .map(|i| {
- if !i.is_stripped() && !i.is_keyword() {
- Some(from_item_id_with_name(i.item_id, tcx, i.name))
- } else {
- None
- }
- })
+ .map(
+ |i| {
+ if !i.is_stripped() && !i.is_keyword() { Some(id_from_item(&i, tcx)) } else { None }
+ },
+ )
.collect()
}
diff --git a/src/librustdoc/json/import_finder.rs b/src/librustdoc/json/import_finder.rs
index c5c687df7..982370aa2 100644
--- a/src/librustdoc/json/import_finder.rs
+++ b/src/librustdoc/json/import_finder.rs
@@ -1,5 +1,4 @@
-use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::DefIdSet;
use crate::{
clean::{self, Import, ImportSource, Item},
@@ -14,14 +13,15 @@ use crate::{
/// See [#100973](https://github.com/rust-lang/rust/issues/100973) and
/// [#101103](https://github.com/rust-lang/rust/issues/101103) for times when
/// this information is needed.
-pub(crate) fn get_imports(krate: clean::Crate) -> (clean::Crate, FxHashSet<DefId>) {
- let mut finder = ImportFinder { imported: FxHashSet::default() };
+pub(crate) fn get_imports(krate: clean::Crate) -> (clean::Crate, DefIdSet) {
+ let mut finder = ImportFinder::default();
let krate = finder.fold_crate(krate);
(krate, finder.imported)
}
+#[derive(Default)]
struct ImportFinder {
- imported: FxHashSet<DefId>,
+ imported: DefIdSet,
}
impl DocFolder for ImportFinder {
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index 1196f944f..08bceb59c 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -13,8 +13,8 @@ use std::io::{BufWriter, Write};
use std::path::PathBuf;
use std::rc::Rc;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::DefId;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use rustc_span::def_id::LOCAL_CRATE;
@@ -28,7 +28,7 @@ use crate::docfs::PathError;
use crate::error::Error;
use crate::formats::cache::Cache;
use crate::formats::FormatRenderer;
-use crate::json::conversions::{from_item_id, from_item_id_with_name, IntoWithTcx};
+use crate::json::conversions::{id_from_item, id_from_item_default, IntoWithTcx};
use crate::{clean, try_err};
#[derive(Clone)]
@@ -40,7 +40,7 @@ pub(crate) struct JsonRenderer<'tcx> {
/// The directory where the blob will be written to.
out_path: PathBuf,
cache: Rc<Cache>,
- imported_items: FxHashSet<DefId>,
+ imported_items: DefIdSet,
}
impl<'tcx> JsonRenderer<'tcx> {
@@ -58,7 +58,7 @@ impl<'tcx> JsonRenderer<'tcx> {
.map(|i| {
let item = &i.impl_item;
self.item(item.clone()).unwrap();
- from_item_id_with_name(item.item_id, self.tcx, item.name)
+ id_from_item(&item, self.tcx)
})
.collect()
})
@@ -80,17 +80,16 @@ impl<'tcx> JsonRenderer<'tcx> {
// document primitive items in an arbitrary crate by using
// `doc(primitive)`.
let mut is_primitive_impl = false;
- if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind {
- if impl_.trait_.is_none() {
- if let clean::types::Type::Primitive(_) = impl_.for_ {
- is_primitive_impl = true;
- }
- }
+ if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind &&
+ impl_.trait_.is_none() &&
+ let clean::types::Type::Primitive(_) = impl_.for_
+ {
+ is_primitive_impl = true;
}
if item.item_id.is_local() || is_primitive_impl {
self.item(item.clone()).unwrap();
- Some(from_item_id_with_name(item.item_id, self.tcx, item.name))
+ Some(id_from_item(&item, self.tcx))
} else {
None
}
@@ -151,7 +150,6 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
// Flatten items that recursively store other items
item.kind.inner_items().for_each(|i| self.item(i.clone()).unwrap());
- let name = item.name;
let item_id = item.item_id;
if let Some(mut new_item) = self.convert_item(item) {
let can_be_ignored = match new_item.inner {
@@ -194,10 +192,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
| types::ItemEnum::Macro(_)
| types::ItemEnum::ProcMacro(_) => false,
};
- let removed = self
- .index
- .borrow_mut()
- .insert(from_item_id_with_name(item_id, self.tcx, name), new_item.clone());
+ let removed = self.index.borrow_mut().insert(new_item.id.clone(), new_item.clone());
// FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check
// to make sure the items are unique. The main place this happens is when an item, is
@@ -208,6 +203,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
if !can_be_ignored {
assert_eq!(old_item, new_item);
}
+ trace!("replaced {:?}\nwith {:?}", old_item, new_item);
}
}
@@ -222,7 +218,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
fn after_krate(&mut self) -> Result<(), Error> {
debug!("Done with crate");
- debug!("Adding Primitve impls");
+ debug!("Adding Primitive impls");
for primitive in Rc::clone(&self.cache).primitive_locations.values() {
self.get_impls(*primitive);
}
@@ -247,7 +243,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
.chain(&self.cache.external_paths)
.map(|(&k, &(ref path, kind))| {
(
- from_item_id(k.into(), self.tcx),
+ id_from_item_default(k.into(), self.tcx),
types::ItemSummary {
crate_id: k.krate.as_u32(),
path: path.iter().map(|s| s.to_string()).collect(),
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index 86454e1f2..4fcf08736 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -6,7 +6,6 @@
#![feature(array_methods)]
#![feature(assert_matches)]
#![feature(box_patterns)]
-#![feature(control_flow_enum)]
#![feature(drain_filter)]
#![feature(is_terminal)]
#![feature(let_chains)]
@@ -21,6 +20,7 @@
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
#![allow(rustc::potential_query_instability)]
+extern crate thin_vec;
#[macro_use]
extern crate tracing;
@@ -32,6 +32,7 @@ extern crate tracing;
//
// Dependencies listed in Cargo.toml do not need `extern crate`.
+extern crate pulldown_cmark;
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_attr;
@@ -82,7 +83,6 @@ use rustc_session::getopts;
use rustc_session::{early_error, early_warn};
use crate::clean::utils::DOC_RUST_LANG_ORG_CHANNEL;
-use crate::passes::collect_intra_doc_links;
/// A macro to create a FxHashMap.
///
@@ -742,7 +742,7 @@ fn main_args(at_args: &[String]) -> MainResult {
(false, true) => {
let input = options.input.clone();
let edition = options.edition;
- let config = core::create_config(options);
+ let config = core::create_config(options, &render_options);
// `markdown::render` can invoke `doctest::make_test`, which
// requires session globals and a thread pool, so we use
@@ -775,7 +775,7 @@ fn main_args(at_args: &[String]) -> MainResult {
let scrape_examples_options = options.scrape_examples_options.clone();
let bin_crate = options.bin_crate;
- let config = core::create_config(options);
+ let config = core::create_config(options, &render_options);
interface::run_compiler(config, |compiler| {
let sess = compiler.session();
@@ -793,40 +793,14 @@ fn main_args(at_args: &[String]) -> MainResult {
}
compiler.enter(|queries| {
- // We need to hold on to the complete resolver, so we cause everything to be
- // cloned for the analysis passes to use. Suboptimal, but necessary in the
- // current architecture.
- // FIXME(#83761): Resolver cloning can lead to inconsistencies between data in the
- // two copies because one of the copies can be modified after `TyCtxt` construction.
- let (resolver, resolver_caches) = {
- let expansion = abort_on_err(queries.expansion(), sess);
- let (krate, resolver, _) = &*expansion.borrow();
- let resolver_caches = resolver.borrow_mut().access(|resolver| {
- collect_intra_doc_links::early_resolve_intra_doc_links(
- resolver,
- krate,
- render_options.document_private,
- )
- });
- (resolver.clone(), resolver_caches)
- };
-
+ let mut gcx = abort_on_err(queries.global_ctxt(), sess);
if sess.diagnostic().has_errors_or_lint_errors().is_some() {
sess.fatal("Compilation failed, aborting rustdoc");
}
- let global_ctxt = abort_on_err(queries.global_ctxt(), sess);
-
- global_ctxt.enter(|tcx| {
+ gcx.enter(|tcx| {
let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
- core::run_global_ctxt(
- tcx,
- resolver,
- resolver_caches,
- show_coverage,
- render_options,
- output_format,
- )
+ core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
});
info!("finished with rustc");
diff --git a/src/librustdoc/lint.rs b/src/librustdoc/lint.rs
index 3aad97bc2..6d289eb99 100644
--- a/src/librustdoc/lint.rs
+++ b/src/librustdoc/lint.rs
@@ -194,7 +194,11 @@ pub(crate) fn register_lints(_sess: &Session, lint_store: &mut LintStore) {
true,
"rustdoc::all",
Some("rustdoc"),
- RUSTDOC_LINTS.iter().map(|&lint| LintId::of(lint)).collect(),
+ RUSTDOC_LINTS
+ .iter()
+ .filter(|lint| lint.feature_gate.is_none()) // only include stable lints
+ .map(|&lint| LintId::of(lint))
+ .collect(),
);
for lint in &*RUSTDOC_LINTS {
let name = lint.name_lower();
diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs
index 5f4ad6d2a..4321d4aa3 100644
--- a/src/librustdoc/markdown.rs
+++ b/src/librustdoc/markdown.rs
@@ -72,7 +72,14 @@ pub(crate) fn render<P: AsRef<Path>>(
let mut ids = IdMap::new();
let error_codes = ErrorCodes::from(options.unstable_features.is_nightly_build());
let text = if !options.markdown_no_toc {
- MarkdownWithToc(text, &mut ids, error_codes, edition, &playground).into_string()
+ MarkdownWithToc {
+ content: text,
+ ids: &mut ids,
+ error_codes,
+ edition,
+ playground: &playground,
+ }
+ .into_string()
} else {
Markdown {
content: text,
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index 02b227896..0b22f943d 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -216,13 +216,7 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
);
let has_doc_example = tests.found_tests != 0;
- // The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
- // would presumably panic if a fake `DefIndex` were passed.
- let hir_id = self
- .ctx
- .tcx
- .hir()
- .local_def_id_to_hir_id(i.item_id.expect_def_id().expect_local());
+ let hir_id = DocContext::as_local_hir_id(self.ctx.tcx, i.item_id).unwrap();
let (level, source) = self.ctx.tcx.lint_level_at_node(MISSING_DOCS, hir_id);
// In case we have:
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 6aa2dda98..a39d57d42 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -14,8 +14,8 @@ use crate::visit::DocVisitor;
use crate::visit_ast::inherits_doc_hidden;
use rustc_hir as hir;
use rustc_middle::lint::LintLevelSource;
+use rustc_middle::ty::DefIdTree;
use rustc_session::lint;
-use rustc_span::symbol::sym;
pub(crate) const CHECK_DOC_TEST_VISIBILITY: Pass = Pass {
name: "check_doc_test_visibility",
@@ -79,30 +79,32 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
// The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
// would presumably panic if a fake `DefIndex` were passed.
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(item.item_id.expect_def_id().expect_local());
+ let def_id = item.item_id.expect_def_id().expect_local();
// check if parent is trait impl
- if let Some(parent_hir_id) = cx.tcx.hir().opt_parent_id(hir_id) {
- if let Some(parent_node) = cx.tcx.hir().find(parent_hir_id) {
- if matches!(
- parent_node,
- hir::Node::Item(hir::Item {
- kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
- ..
- })
- ) {
- return false;
- }
- }
+ if let Some(parent_def_id) = cx.tcx.opt_local_parent(def_id) &&
+ let Some(parent_node) = cx.tcx.hir().find_by_def_id(parent_def_id) &&
+ matches!(
+ parent_node,
+ hir::Node::Item(hir::Item {
+ kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
+ ..
+ })
+ )
+ {
+ return false;
}
- if cx.tcx.hir().attrs(hir_id).lists(sym::doc).has_word(sym::hidden)
- || inherits_doc_hidden(cx.tcx, hir_id)
- || cx.tcx.hir().span(hir_id).in_derive_expansion()
+ if cx.tcx.is_doc_hidden(def_id.to_def_id())
+ || inherits_doc_hidden(cx.tcx, def_id)
+ || cx.tcx.def_span(def_id.to_def_id()).in_derive_expansion()
{
return false;
}
- let (level, source) = cx.tcx.lint_level_at_node(crate::lint::MISSING_DOC_CODE_EXAMPLES, hir_id);
+ let (level, source) = cx.tcx.lint_level_at_node(
+ crate::lint::MISSING_DOC_CODE_EXAMPLES,
+ cx.tcx.hir().local_def_id_to_hir_id(def_id),
+ );
level != lint::Level::Allow || matches!(source, LintLevelSource::Default)
}
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 075951312..cbfc58138 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -15,7 +15,8 @@ use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
use rustc_hir::Mutability;
use rustc_middle::ty::{DefIdTree, Ty, TyCtxt};
use rustc_middle::{bug, ty};
-use rustc_resolve::ParentScope;
+use rustc_resolve::rustdoc::MalformedGenerics;
+use rustc_resolve::rustdoc::{prepare_to_doc_link_resolution, strip_generics_from_path};
use rustc_session::lint::Lint;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{sym, Ident, Symbol};
@@ -34,9 +35,6 @@ use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
use crate::passes::Pass;
use crate::visit::DocVisitor;
-mod early;
-pub(crate) use early::early_resolve_intra_doc_links;
-
pub(crate) const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
name: "collect-intra-doc-links",
run: collect_intra_doc_links,
@@ -179,47 +177,6 @@ enum ResolutionFailure<'a> {
NotResolved(UnresolvedPath<'a>),
}
-#[derive(Clone, Copy, Debug)]
-enum MalformedGenerics {
- /// This link has unbalanced angle brackets.
- ///
- /// For example, `Vec<T` should trigger this, as should `Vec<T>>`.
- UnbalancedAngleBrackets,
- /// The generics are not attached to a type.
- ///
- /// For example, `<T>` should trigger this.
- ///
- /// This is detected by checking if the path is empty after the generics are stripped.
- MissingType,
- /// The link uses fully-qualified syntax, which is currently unsupported.
- ///
- /// For example, `<Vec as IntoIterator>::into_iter` should trigger this.
- ///
- /// This is detected by checking if ` as ` (the keyword `as` with spaces around it) is inside
- /// angle brackets.
- HasFullyQualifiedSyntax,
- /// The link has an invalid path separator.
- ///
- /// For example, `Vec:<T>:new()` should trigger this. Note that `Vec:new()` will **not**
- /// trigger this because it has no generics and thus [`strip_generics_from_path`] will not be
- /// called.
- ///
- /// Note that this will also **not** be triggered if the invalid path separator is inside angle
- /// brackets because rustdoc mostly ignores what's inside angle brackets (except for
- /// [`HasFullyQualifiedSyntax`](MalformedGenerics::HasFullyQualifiedSyntax)).
- ///
- /// This is detected by checking if there is a colon followed by a non-colon in the link.
- InvalidPathSeparator,
- /// The link has too many angle brackets.
- ///
- /// For example, `Vec<<T>>` should trigger this.
- TooManyAngleBrackets,
- /// The link has empty angle brackets.
- ///
- /// For example, `Vec<>` should trigger this.
- EmptyAngleBrackets,
-}
-
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub(crate) enum UrlFragment {
Item(DefId),
@@ -271,7 +228,7 @@ struct ResolutionInfo {
item_id: ItemId,
module_id: DefId,
dis: Option<Disambiguator>,
- path_str: String,
+ path_str: Box<str>,
extra_fragment: Option<String>,
}
@@ -336,9 +293,10 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
let ty_res = self.resolve_path(&path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
match ty_res {
- Res::Def(DefKind::Enum, did) => match tcx.type_of(did).kind() {
+ Res::Def(DefKind::Enum, did) => match tcx.type_of(did).subst_identity().kind() {
ty::Adt(def, _) if def.is_enum() => {
- if let Some(field) = def.all_fields().find(|f| f.name == variant_field_name) {
+ if let Some(variant) = def.variants().iter().find(|v| v.name == variant_name)
+ && let Some(field) = variant.fields.iter().find(|f| f.name == variant_field_name) {
Ok((ty_res, field.did))
} else {
Err(UnresolvedPath {
@@ -401,16 +359,16 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
_ => def_id,
})
.and_then(|self_id| match tcx.def_kind(self_id) {
- DefKind::Impl => self.def_id_to_res(self_id),
+ DefKind::Impl { .. } => self.def_id_to_res(self_id),
DefKind::Use => None,
def_kind => Some(Res::Def(def_kind, self_id)),
})
}
- /// Convenience wrapper around `resolve_rustdoc_path`.
+ /// Convenience wrapper around `doc_link_resolutions`.
///
/// This also handles resolving `true` and `false` as booleans.
- /// NOTE: `resolve_rustdoc_path` knows only about paths, not about types.
+ /// NOTE: `doc_link_resolutions` knows only about paths, not about types.
/// Associated items will never be resolved by this function.
fn resolve_path(
&self,
@@ -426,17 +384,11 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// Resolver doesn't know about true, false, and types that aren't paths (e.g. `()`).
let result = self
.cx
- .resolver_caches
- .doc_link_resolutions
- .get(&(Symbol::intern(path_str), ns, module_id))
+ .tcx
+ .doc_link_resolutions(module_id)
+ .get(&(Symbol::intern(path_str), ns))
.copied()
- .unwrap_or_else(|| {
- self.cx.enter_resolver(|resolver| {
- let parent_scope =
- ParentScope::module(resolver.expect_module(module_id), resolver);
- resolver.resolve_rustdoc_path(path_str, ns, parent_scope)
- })
- })
+ .unwrap_or_else(|| panic!("no resolution for {:?} {:?} {:?}", path_str, ns, module_id))
.and_then(|res| res.try_into().ok())
.or_else(|| resolve_primitive(path_str, ns));
debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns);
@@ -519,7 +471,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
/// This is used for resolving type aliases.
fn def_id_to_res(&self, ty_id: DefId) -> Option<Res> {
use PrimitiveType::*;
- Some(match *self.cx.tcx.type_of(ty_id).kind() {
+ Some(match *self.cx.tcx.type_of(ty_id).subst_identity().kind() {
ty::Bool => Res::Primitive(Bool),
ty::Char => Res::Primitive(Char),
ty::Int(ity) => Res::Primitive(ity.into()),
@@ -542,6 +494,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
| ty::Closure(..)
| ty::Generator(..)
| ty::GeneratorWitness(_)
+ | ty::GeneratorWitnessMIR(..)
| ty::Dynamic(..)
| ty::Param(_)
| ty::Bound(..)
@@ -561,27 +514,27 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// FIXME: Only simple types are supported here, see if we can support
// other types such as Tuple, Array, Slice, etc.
// See https://github.com/rust-lang/rust/issues/90703#issuecomment-1004263455
- Some(tcx.mk_ty(match prim {
- Bool => ty::Bool,
- Str => ty::Str,
- Char => ty::Char,
- Never => ty::Never,
- I8 => ty::Int(ty::IntTy::I8),
- I16 => ty::Int(ty::IntTy::I16),
- I32 => ty::Int(ty::IntTy::I32),
- I64 => ty::Int(ty::IntTy::I64),
- I128 => ty::Int(ty::IntTy::I128),
- Isize => ty::Int(ty::IntTy::Isize),
- F32 => ty::Float(ty::FloatTy::F32),
- F64 => ty::Float(ty::FloatTy::F64),
- U8 => ty::Uint(ty::UintTy::U8),
- U16 => ty::Uint(ty::UintTy::U16),
- U32 => ty::Uint(ty::UintTy::U32),
- U64 => ty::Uint(ty::UintTy::U64),
- U128 => ty::Uint(ty::UintTy::U128),
- Usize => ty::Uint(ty::UintTy::Usize),
+ Some(match prim {
+ Bool => tcx.types.bool,
+ Str => tcx.types.str_,
+ Char => tcx.types.char,
+ Never => tcx.types.never,
+ I8 => tcx.types.i8,
+ I16 => tcx.types.i16,
+ I32 => tcx.types.i32,
+ I64 => tcx.types.i64,
+ I128 => tcx.types.i128,
+ Isize => tcx.types.isize,
+ F32 => tcx.types.f32,
+ F64 => tcx.types.f64,
+ U8 => tcx.types.u8,
+ U16 => tcx.types.u16,
+ U32 => tcx.types.u32,
+ U64 => tcx.types.u64,
+ U128 => tcx.types.u128,
+ Usize => tcx.types.usize,
_ => return None,
- }))
+ })
}
/// Resolve an associated item, returning its containing page's `Res`
@@ -619,7 +572,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
debug!("looking for associated item named {} for item {:?}", item_name, did);
// Checks if item_name is a variant of the `SomeItem` enum
if ns == TypeNS && def_kind == DefKind::Enum {
- match tcx.type_of(did).kind() {
+ match tcx.type_of(did).subst_identity().kind() {
ty::Adt(adt_def, _) => {
for variant in adt_def.variants() {
if variant.name == item_name {
@@ -653,7 +606,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// something like [`ambi_fn`](<SomeStruct as SomeTrait>::ambi_fn)
.or_else(|| {
resolve_associated_trait_item(
- tcx.type_of(did),
+ tcx.type_of(did).subst_identity(),
module_id,
item_name,
ns,
@@ -686,7 +639,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// they also look like associated items (`module::Type::Variant`),
// because they are real Rust syntax (unlike the intra-doc links
// field syntax) and are handled by the compiler's resolver.
- let def = match tcx.type_of(did).kind() {
+ let def = match tcx.type_of(did).subst_identity().kind() {
ty::Adt(def, _) if !def.is_enum() => def,
_ => return None,
};
@@ -736,12 +689,12 @@ fn resolve_associated_trait_item<'a>(
.find_by_name_and_namespace(cx.tcx, Ident::with_dummy_span(item_name), ns, trait_)
.map(|trait_assoc| {
trait_assoc_to_impl_assoc_item(cx.tcx, impl_, trait_assoc.def_id)
- .unwrap_or(trait_assoc)
+ .unwrap_or(*trait_assoc)
})
});
// FIXME(#74563): warn about ambiguity
debug!("the candidates were {:?}", candidates.clone().collect::<Vec<_>>());
- candidates.next().copied()
+ candidates.next()
}
/// Find the associated item in the impl `impl_id` that corresponds to the
@@ -758,7 +711,7 @@ fn trait_assoc_to_impl_assoc_item<'tcx>(
tcx: TyCtxt<'tcx>,
impl_id: DefId,
trait_assoc_id: DefId,
-) -> Option<&'tcx ty::AssocItem> {
+) -> Option<ty::AssocItem> {
let trait_to_impl_assoc_map = tcx.impl_item_implementor_ids(impl_id);
debug!(?trait_to_impl_assoc_map);
let impl_assoc_id = *trait_to_impl_assoc_map.get(&trait_assoc_id)?;
@@ -778,8 +731,7 @@ fn trait_impls_for<'a>(
module: DefId,
) -> FxHashSet<(DefId, DefId)> {
let tcx = cx.tcx;
- let iter = cx.resolver_caches.traits_in_scope[&module].iter().flat_map(|trait_candidate| {
- let trait_ = trait_candidate.def_id;
+ let iter = tcx.doc_link_traits_in_scope(module).iter().flat_map(|&trait_| {
trace!("considering explicit impl for trait {:?}", trait_);
// Look at each trait implementation to see if it's an impl for `did`
@@ -845,7 +797,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
// In the presence of re-exports, this is not the same as the module of the item.
// Rather than merging all documentation into one, resolve it one attribute at a time
// so we know which module it came from.
- for (parent_module, doc) in item.attrs.prepare_to_doc_link_resolution() {
+ for (parent_module, doc) in prepare_to_doc_link_resolution(&item.attrs.doc_strings) {
if !may_have_doc_links(&doc) {
continue;
}
@@ -853,22 +805,12 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
// NOTE: if there are links that start in one crate and end in another, this will not resolve them.
// This is a degenerate case and it's not supported by rustdoc.
let parent_node = parent_module.or(parent_node);
- let mut tmp_links = self
- .cx
- .resolver_caches
- .markdown_links
- .take()
- .expect("`markdown_links` are already borrowed");
- if !tmp_links.contains_key(&doc) {
- tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc));
- }
- for md_link in &tmp_links[&doc] {
- let link = self.resolve_link(item, &doc, parent_node, md_link);
+ for md_link in preprocessed_markdown_links(&doc) {
+ let link = self.resolve_link(item, &doc, parent_node, &md_link);
if let Some(link) = link {
self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
}
}
- self.cx.resolver_caches.markdown_links = Some(tmp_links);
}
if item.is_mod() {
@@ -907,10 +849,10 @@ impl PreprocessingError {
#[derive(Clone)]
struct PreprocessingInfo {
- path_str: String,
+ path_str: Box<str>,
disambiguator: Option<Disambiguator>,
extra_fragment: Option<String>,
- link_text: String,
+ link_text: Box<str>,
}
// Not a typedef to avoid leaking several private structures from this module.
@@ -942,7 +884,8 @@ fn preprocess_link(
let mut parts = stripped.split('#');
let link = parts.next().unwrap();
- if link.trim().is_empty() {
+ let link = link.trim();
+ if link.is_empty() {
// This is an anchor to an element of the current page, nothing to do in here!
return None;
}
@@ -955,7 +898,7 @@ fn preprocess_link(
// Parse and strip the disambiguator from the link, if present.
let (disambiguator, path_str, link_text) = match Disambiguator::from_str(link) {
Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()),
- Ok(None) => (None, link.trim(), link.trim()),
+ Ok(None) => (None, link, link),
Err((err_msg, relative_range)) => {
// Only report error if we would not have ignored this link. See issue #83859.
if !should_ignore_link_with_disambiguators(link) {
@@ -974,16 +917,12 @@ fn preprocess_link(
}
// Strip generics from the path.
- let path_str = if path_str.contains(['<', '>'].as_slice()) {
- match strip_generics_from_path(path_str) {
- Ok(path) => path,
- Err(err) => {
- debug!("link has malformed generics: {}", path_str);
- return Some(Err(PreprocessingError::MalformedGenerics(err, path_str.to_owned())));
- }
+ let path_str = match strip_generics_from_path(path_str) {
+ Ok(path) => path,
+ Err(err) => {
+ debug!("link has malformed generics: {}", path_str);
+ return Some(Err(PreprocessingError::MalformedGenerics(err, path_str.to_owned())));
}
- } else {
- path_str.to_owned()
};
// Sanity check to make sure we don't have any angle brackets after stripping generics.
@@ -998,7 +937,7 @@ fn preprocess_link(
path_str,
disambiguator,
extra_fragment: extra_fragment.map(|frag| frag.to_owned()),
- link_text: link_text.to_owned(),
+ link_text: Box::<str>::from(link_text),
}))
}
@@ -1054,7 +993,7 @@ impl LinkCollector<'_, '_> {
item_id: item.item_id,
module_id,
dis: disambiguator,
- path_str: path_str.to_owned(),
+ path_str: path_str.clone(),
extra_fragment: extra_fragment.clone(),
},
diag_info.clone(), // this struct should really be Copy, but Range is not :(
@@ -1128,7 +1067,7 @@ impl LinkCollector<'_, '_> {
}
res.def_id(self.cx.tcx).map(|page_id| ItemLink {
- link: ori_link.link.clone(),
+ link: Box::<str>::from(&*ori_link.link),
link_text: link_text.clone(),
page_id,
fragment,
@@ -1152,7 +1091,7 @@ impl LinkCollector<'_, '_> {
let page_id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
Some(ItemLink {
- link: ori_link.link.clone(),
+ link: Box::<str>::from(&*ori_link.link),
link_text: link_text.clone(),
page_id,
fragment,
@@ -1194,14 +1133,9 @@ impl LinkCollector<'_, '_> {
}
// item can be non-local e.g. when using #[doc(primitive = "pointer")]
- if let Some((src_id, dst_id)) = id
- .as_local()
- // The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
- // would presumably panic if a fake `DefIndex` were passed.
- .and_then(|dst_id| {
- item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
- })
- {
+ if let Some((src_id, dst_id)) = id.as_local().and_then(|dst_id| {
+ item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
+ }) {
if self.cx.tcx.effective_visibilities(()).is_exported(src_id)
&& !self.cx.tcx.effective_visibilities(()).is_exported(dst_id)
{
@@ -1681,7 +1615,7 @@ fn resolution_failure(
// ignore duplicates
let mut variants_seen = SmallVec::<[_; 3]>::new();
for mut failure in kinds {
- let variant = std::mem::discriminant(&failure);
+ let variant = mem::discriminant(&failure);
if variants_seen.contains(&variant) {
continue;
}
@@ -1751,7 +1685,7 @@ fn resolution_failure(
if !path_str.contains("::") {
if disambiguator.map_or(true, |d| d.ns() == MacroNS)
- && let Some(&res) = collector.cx.resolver_caches.all_macro_rules
+ && let Some(&res) = collector.cx.tcx.resolutions(()).all_macro_rules
.get(&Symbol::intern(path_str))
{
diag.note(format!(
@@ -1772,15 +1706,35 @@ fn resolution_failure(
// Otherwise, it must be an associated item or variant
let res = partial_res.expect("None case was handled by `last_found_module`");
- let kind = match res {
- Res::Def(kind, _) => Some(kind),
+ let kind_did = match res {
+ Res::Def(kind, did) => Some((kind, did)),
Res::Primitive(_) => None,
};
- let path_description = if let Some(kind) = kind {
+ let is_struct_variant = |did| {
+ if let ty::Adt(def, _) = tcx.type_of(did).subst_identity().kind()
+ && def.is_enum()
+ && let Some(variant) = def.variants().iter().find(|v| v.name == res.name(tcx)) {
+ // ctor is `None` if variant is a struct
+ variant.ctor.is_none()
+ } else {
+ false
+ }
+ };
+ let path_description = if let Some((kind, did)) = kind_did {
match kind {
Mod | ForeignMod => "inner item",
Struct => "field or associated item",
Enum | Union => "variant or associated item",
+ Variant if is_struct_variant(did) => {
+ let variant = res.name(tcx);
+ let note = format!("variant `{variant}` has no such field");
+ if let Some(span) = sp {
+ diag.span_label(span, &note);
+ } else {
+ diag.note(&note);
+ }
+ return;
+ }
Variant
| Field
| Closure
@@ -1808,7 +1762,7 @@ fn resolution_failure(
}
Trait | TyAlias | ForeignTy | OpaqueTy | ImplTraitPlaceholder
| TraitAlias | TyParam | Static(_) => "associated item",
- Impl | GlobalAsm => unreachable!("not a path"),
+ Impl { .. } | GlobalAsm => unreachable!("not a path"),
}
} else {
"associated item"
@@ -2068,94 +2022,3 @@ fn resolve_primitive(path_str: &str, ns: Namespace) -> Option<Res> {
debug!("resolved primitives {:?}", prim);
Some(Res::Primitive(prim))
}
-
-fn strip_generics_from_path(path_str: &str) -> Result<String, MalformedGenerics> {
- let mut stripped_segments = vec![];
- let mut path = path_str.chars().peekable();
- let mut segment = Vec::new();
-
- while let Some(chr) = path.next() {
- match chr {
- ':' => {
- if path.next_if_eq(&':').is_some() {
- let stripped_segment =
- strip_generics_from_path_segment(mem::take(&mut segment))?;
- if !stripped_segment.is_empty() {
- stripped_segments.push(stripped_segment);
- }
- } else {
- return Err(MalformedGenerics::InvalidPathSeparator);
- }
- }
- '<' => {
- segment.push(chr);
-
- match path.next() {
- Some('<') => {
- return Err(MalformedGenerics::TooManyAngleBrackets);
- }
- Some('>') => {
- return Err(MalformedGenerics::EmptyAngleBrackets);
- }
- Some(chr) => {
- segment.push(chr);
-
- while let Some(chr) = path.next_if(|c| *c != '>') {
- segment.push(chr);
- }
- }
- None => break,
- }
- }
- _ => segment.push(chr),
- }
- trace!("raw segment: {:?}", segment);
- }
-
- if !segment.is_empty() {
- let stripped_segment = strip_generics_from_path_segment(segment)?;
- if !stripped_segment.is_empty() {
- stripped_segments.push(stripped_segment);
- }
- }
-
- debug!("path_str: {:?}\nstripped segments: {:?}", path_str, &stripped_segments);
-
- let stripped_path = stripped_segments.join("::");
-
- if !stripped_path.is_empty() { Ok(stripped_path) } else { Err(MalformedGenerics::MissingType) }
-}
-
-fn strip_generics_from_path_segment(segment: Vec<char>) -> Result<String, MalformedGenerics> {
- let mut stripped_segment = String::new();
- let mut param_depth = 0;
-
- let mut latest_generics_chunk = String::new();
-
- for c in segment {
- if c == '<' {
- param_depth += 1;
- latest_generics_chunk.clear();
- } else if c == '>' {
- param_depth -= 1;
- if latest_generics_chunk.contains(" as ") {
- // The segment tries to use fully-qualified syntax, which is currently unsupported.
- // Give a helpful error message instead of completely ignoring the angle brackets.
- return Err(MalformedGenerics::HasFullyQualifiedSyntax);
- }
- } else {
- if param_depth == 0 {
- stripped_segment.push(c);
- } else {
- latest_generics_chunk.push(c);
- }
- }
- }
-
- if param_depth == 0 {
- Ok(stripped_segment)
- } else {
- // The segment has unbalanced angle brackets, e.g. `Vec<T` or `Vec<T>>`
- Err(MalformedGenerics::UnbalancedAngleBrackets)
- }
-}
diff --git a/src/librustdoc/passes/collect_intra_doc_links/early.rs b/src/librustdoc/passes/collect_intra_doc_links/early.rs
deleted file mode 100644
index 42677bd84..000000000
--- a/src/librustdoc/passes/collect_intra_doc_links/early.rs
+++ /dev/null
@@ -1,407 +0,0 @@
-use crate::clean::Attributes;
-use crate::core::ResolverCaches;
-use crate::passes::collect_intra_doc_links::preprocessed_markdown_links;
-use crate::passes::collect_intra_doc_links::{Disambiguator, PreprocessedMarkdownLink};
-
-use rustc_ast::visit::{self, AssocCtxt, Visitor};
-use rustc_ast::{self as ast, ItemKind};
-use rustc_data_structures::fx::FxHashMap;
-use rustc_hir::def::Namespace::*;
-use rustc_hir::def::{DefKind, Namespace, Res};
-use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, CRATE_DEF_ID};
-use rustc_hir::TraitCandidate;
-use rustc_middle::ty::{DefIdTree, Visibility};
-use rustc_resolve::{ParentScope, Resolver};
-use rustc_span::symbol::sym;
-use rustc_span::{Symbol, SyntaxContext};
-
-use std::collections::hash_map::Entry;
-use std::mem;
-
-pub(crate) fn early_resolve_intra_doc_links(
- resolver: &mut Resolver<'_>,
- krate: &ast::Crate,
- document_private_items: bool,
-) -> ResolverCaches {
- let parent_scope =
- ParentScope::module(resolver.expect_module(CRATE_DEF_ID.to_def_id()), resolver);
- let mut link_resolver = EarlyDocLinkResolver {
- resolver,
- parent_scope,
- visited_mods: Default::default(),
- markdown_links: Default::default(),
- doc_link_resolutions: Default::default(),
- traits_in_scope: Default::default(),
- all_trait_impls: Default::default(),
- all_macro_rules: Default::default(),
- document_private_items,
- };
-
- // Overridden `visit_item` below doesn't apply to the crate root,
- // so we have to visit its attributes and reexports separately.
- link_resolver.resolve_doc_links_local(&krate.attrs);
- link_resolver.process_module_children_or_reexports(CRATE_DEF_ID.to_def_id());
- visit::walk_crate(&mut link_resolver, krate);
-
- // FIXME: somehow rustdoc is still missing crates even though we loaded all
- // the known necessary crates. Load them all unconditionally until we find a way to fix this.
- // DO NOT REMOVE THIS without first testing on the reproducer in
- // https://github.com/jyn514/objr/commit/edcee7b8124abf0e4c63873e8422ff81beb11ebb
- for (extern_name, _) in
- link_resolver.resolver.sess().opts.externs.iter().filter(|(_, entry)| entry.add_prelude)
- {
- link_resolver.resolver.resolve_rustdoc_path(extern_name, TypeNS, parent_scope);
- }
-
- link_resolver.process_extern_impls();
-
- ResolverCaches {
- markdown_links: Some(link_resolver.markdown_links),
- doc_link_resolutions: link_resolver.doc_link_resolutions,
- traits_in_scope: link_resolver.traits_in_scope,
- all_trait_impls: Some(link_resolver.all_trait_impls),
- all_macro_rules: link_resolver.all_macro_rules,
- }
-}
-
-fn doc_attrs<'a>(attrs: impl Iterator<Item = &'a ast::Attribute>) -> Attributes {
- Attributes::from_ast_iter(attrs.map(|attr| (attr, None)), true)
-}
-
-struct EarlyDocLinkResolver<'r, 'ra> {
- resolver: &'r mut Resolver<'ra>,
- parent_scope: ParentScope<'ra>,
- visited_mods: DefIdSet,
- markdown_links: FxHashMap<String, Vec<PreprocessedMarkdownLink>>,
- doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option<Res<ast::NodeId>>>,
- traits_in_scope: DefIdMap<Vec<TraitCandidate>>,
- all_trait_impls: Vec<DefId>,
- all_macro_rules: FxHashMap<Symbol, Res<ast::NodeId>>,
- document_private_items: bool,
-}
-
-impl<'ra> EarlyDocLinkResolver<'_, 'ra> {
- fn add_traits_in_scope(&mut self, def_id: DefId) {
- // Calls to `traits_in_scope` are expensive, so try to avoid them if only possible.
- // Keys in the `traits_in_scope` cache are always module IDs.
- if let Entry::Vacant(entry) = self.traits_in_scope.entry(def_id) {
- let module = self.resolver.get_nearest_non_block_module(def_id);
- let module_id = module.def_id();
- let entry = if module_id == def_id {
- Some(entry)
- } else if let Entry::Vacant(entry) = self.traits_in_scope.entry(module_id) {
- Some(entry)
- } else {
- None
- };
- if let Some(entry) = entry {
- entry.insert(self.resolver.traits_in_scope(
- None,
- &ParentScope::module(module, self.resolver),
- SyntaxContext::root(),
- None,
- ));
- }
- }
- }
-
- /// Add traits in scope for links in impls collected by the `collect-intra-doc-links` pass.
- /// That pass filters impls using type-based information, but we don't yet have such
- /// information here, so we just conservatively calculate traits in scope for *all* modules
- /// having impls in them.
- fn process_extern_impls(&mut self) {
- // Resolving links in already existing crates may trigger loading of new crates.
- let mut start_cnum = 0;
- loop {
- let crates = Vec::from_iter(self.resolver.cstore().crates_untracked());
- for &cnum in &crates[start_cnum..] {
- let all_trait_impls =
- Vec::from_iter(self.resolver.cstore().trait_impls_in_crate_untracked(cnum));
- let all_inherent_impls =
- Vec::from_iter(self.resolver.cstore().inherent_impls_in_crate_untracked(cnum));
- let all_incoherent_impls = Vec::from_iter(
- self.resolver.cstore().incoherent_impls_in_crate_untracked(cnum),
- );
-
- // Querying traits in scope is expensive so we try to prune the impl lists using
- // privacy, private traits and impls from other crates are never documented in
- // the current crate, and links in their doc comments are not resolved.
- for &(trait_def_id, impl_def_id, simplified_self_ty) in &all_trait_impls {
- if self.resolver.cstore().visibility_untracked(trait_def_id).is_public()
- && simplified_self_ty.and_then(|ty| ty.def()).map_or(true, |ty_def_id| {
- self.resolver.cstore().visibility_untracked(ty_def_id).is_public()
- })
- {
- if self.visited_mods.insert(trait_def_id) {
- self.resolve_doc_links_extern_impl(trait_def_id, false);
- }
- self.resolve_doc_links_extern_impl(impl_def_id, false);
- }
- }
- for (ty_def_id, impl_def_id) in all_inherent_impls {
- if self.resolver.cstore().visibility_untracked(ty_def_id).is_public() {
- self.resolve_doc_links_extern_impl(impl_def_id, true);
- }
- }
- for impl_def_id in all_incoherent_impls {
- self.resolve_doc_links_extern_impl(impl_def_id, true);
- }
-
- self.all_trait_impls
- .extend(all_trait_impls.into_iter().map(|(_, def_id, _)| def_id));
- }
-
- if crates.len() > start_cnum {
- start_cnum = crates.len();
- } else {
- break;
- }
- }
- }
-
- fn resolve_doc_links_extern_impl(&mut self, def_id: DefId, is_inherent: bool) {
- self.resolve_doc_links_extern_outer_fixme(def_id, def_id);
- let assoc_item_def_ids = Vec::from_iter(
- self.resolver.cstore().associated_item_def_ids_untracked(def_id, self.resolver.sess()),
- );
- for assoc_def_id in assoc_item_def_ids {
- if !is_inherent || self.resolver.cstore().visibility_untracked(assoc_def_id).is_public()
- {
- self.resolve_doc_links_extern_outer_fixme(assoc_def_id, def_id);
- }
- }
- }
-
- // FIXME: replace all uses with `resolve_doc_links_extern_outer` to actually resolve links, not
- // just add traits in scope. This may be expensive and require benchmarking and optimization.
- fn resolve_doc_links_extern_outer_fixme(&mut self, def_id: DefId, scope_id: DefId) {
- if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
- return;
- }
- if let Some(parent_id) = self.resolver.opt_parent(scope_id) {
- self.add_traits_in_scope(parent_id);
- }
- }
-
- fn resolve_doc_links_extern_outer(&mut self, def_id: DefId, scope_id: DefId) {
- if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
- return;
- }
- let attrs = Vec::from_iter(
- self.resolver.cstore().item_attrs_untracked(def_id, self.resolver.sess()),
- );
- let parent_scope = ParentScope::module(
- self.resolver.get_nearest_non_block_module(
- self.resolver.opt_parent(scope_id).unwrap_or(scope_id),
- ),
- self.resolver,
- );
- self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope);
- }
-
- fn resolve_doc_links_extern_inner(&mut self, def_id: DefId) {
- if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
- return;
- }
- let attrs = Vec::from_iter(
- self.resolver.cstore().item_attrs_untracked(def_id, self.resolver.sess()),
- );
- let parent_scope = ParentScope::module(self.resolver.expect_module(def_id), self.resolver);
- self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope);
- }
-
- fn resolve_doc_links_local(&mut self, attrs: &[ast::Attribute]) {
- if !attrs.iter().any(|attr| attr.may_have_doc_links()) {
- return;
- }
- self.resolve_doc_links(doc_attrs(attrs.iter()), self.parent_scope);
- }
-
- fn resolve_and_cache(
- &mut self,
- path_str: &str,
- ns: Namespace,
- parent_scope: &ParentScope<'ra>,
- ) -> bool {
- // FIXME: This caching may be incorrect in case of multiple `macro_rules`
- // items with the same name in the same module.
- self.doc_link_resolutions
- .entry((Symbol::intern(path_str), ns, parent_scope.module.def_id()))
- .or_insert_with_key(|(path, ns, _)| {
- self.resolver.resolve_rustdoc_path(path.as_str(), *ns, *parent_scope)
- })
- .is_some()
- }
-
- fn resolve_doc_links(&mut self, attrs: Attributes, parent_scope: ParentScope<'ra>) {
- let mut need_traits_in_scope = false;
- for (doc_module, doc) in attrs.prepare_to_doc_link_resolution() {
- assert_eq!(doc_module, None);
- let mut tmp_links = mem::take(&mut self.markdown_links);
- let links =
- tmp_links.entry(doc).or_insert_with_key(|doc| preprocessed_markdown_links(doc));
- for PreprocessedMarkdownLink(pp_link, _) in links {
- if let Ok(pinfo) = pp_link {
- // The logic here is a conservative approximation for path resolution in
- // `resolve_with_disambiguator`.
- if let Some(ns) = pinfo.disambiguator.map(Disambiguator::ns) {
- if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) {
- continue;
- }
- }
-
- // Resolve all namespaces due to no disambiguator or for diagnostics.
- let mut any_resolved = false;
- let mut need_assoc = false;
- for ns in [TypeNS, ValueNS, MacroNS] {
- if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) {
- any_resolved = true;
- } else if ns != MacroNS {
- need_assoc = true;
- }
- }
-
- // Resolve all prefixes for type-relative resolution or for diagnostics.
- if need_assoc || !any_resolved {
- let mut path = &pinfo.path_str[..];
- while let Some(idx) = path.rfind("::") {
- path = &path[..idx];
- need_traits_in_scope = true;
- for ns in [TypeNS, ValueNS, MacroNS] {
- self.resolve_and_cache(path, ns, &parent_scope);
- }
- }
- }
- }
- }
- self.markdown_links = tmp_links;
- }
-
- if need_traits_in_scope {
- self.add_traits_in_scope(parent_scope.module.def_id());
- }
- }
-
- /// When reexports are inlined, they are replaced with item which they refer to, those items
- /// may have links in their doc comments, those links are resolved at the item definition site,
- /// so we need to know traits in scope at that definition site.
- fn process_module_children_or_reexports(&mut self, module_id: DefId) {
- if !self.visited_mods.insert(module_id) {
- return; // avoid infinite recursion
- }
-
- for child in self.resolver.module_children_or_reexports(module_id) {
- // This condition should give a superset of `denied` from `fn clean_use_statement`.
- if child.vis.is_public()
- || self.document_private_items
- && child.vis != Visibility::Restricted(module_id)
- && module_id.is_local()
- {
- if let Some(def_id) = child.res.opt_def_id() && !def_id.is_local() {
- let scope_id = match child.res {
- Res::Def(
- DefKind::Variant
- | DefKind::AssocTy
- | DefKind::AssocFn
- | DefKind::AssocConst,
- ..,
- ) => self.resolver.parent(def_id),
- _ => def_id,
- };
- self.resolve_doc_links_extern_outer(def_id, scope_id); // Outer attribute scope
- if let Res::Def(DefKind::Mod, ..) = child.res {
- self.resolve_doc_links_extern_inner(def_id); // Inner attribute scope
- }
- if let Res::Def(DefKind::Mod | DefKind::Enum | DefKind::Trait, ..) = child.res {
- self.process_module_children_or_reexports(def_id);
- }
- if let Res::Def(DefKind::Struct | DefKind::Union | DefKind::Variant, _) =
- child.res
- {
- let field_def_ids = Vec::from_iter(
- self.resolver
- .cstore()
- .associated_item_def_ids_untracked(def_id, self.resolver.sess()),
- );
- for field_def_id in field_def_ids {
- self.resolve_doc_links_extern_outer(field_def_id, scope_id);
- }
- }
- }
- }
- }
- }
-}
-
-impl Visitor<'_> for EarlyDocLinkResolver<'_, '_> {
- fn visit_item(&mut self, item: &ast::Item) {
- self.resolve_doc_links_local(&item.attrs); // Outer attribute scope
- if let ItemKind::Mod(..) = item.kind {
- let module_def_id = self.resolver.local_def_id(item.id).to_def_id();
- let module = self.resolver.expect_module(module_def_id);
- let old_module = mem::replace(&mut self.parent_scope.module, module);
- let old_macro_rules = self.parent_scope.macro_rules;
- self.resolve_doc_links_local(&item.attrs); // Inner attribute scope
- self.process_module_children_or_reexports(module_def_id);
- visit::walk_item(self, item);
- if item
- .attrs
- .iter()
- .all(|attr| !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape))
- {
- self.parent_scope.macro_rules = old_macro_rules;
- }
- self.parent_scope.module = old_module;
- } else {
- match &item.kind {
- ItemKind::Impl(box ast::Impl { of_trait: Some(trait_ref), .. }) => {
- if let Some(partial_res) = self.resolver.get_partial_res(trait_ref.ref_id)
- && let Some(res) = partial_res.full_res()
- && let Some(trait_def_id) = res.opt_def_id()
- && !trait_def_id.is_local()
- && self.visited_mods.insert(trait_def_id) {
- self.resolve_doc_links_extern_impl(trait_def_id, false);
- }
- self.all_trait_impls.push(self.resolver.local_def_id(item.id).to_def_id());
- }
- ItemKind::MacroDef(macro_def) if macro_def.macro_rules => {
- let (macro_rules_scope, res) =
- self.resolver.macro_rules_scope(self.resolver.local_def_id(item.id));
- self.parent_scope.macro_rules = macro_rules_scope;
- self.all_macro_rules.insert(item.ident.name, res);
- }
- _ => {}
- }
- visit::walk_item(self, item);
- }
- }
-
- fn visit_assoc_item(&mut self, item: &ast::AssocItem, ctxt: AssocCtxt) {
- self.resolve_doc_links_local(&item.attrs);
- visit::walk_assoc_item(self, item, ctxt)
- }
-
- fn visit_foreign_item(&mut self, item: &ast::ForeignItem) {
- self.resolve_doc_links_local(&item.attrs);
- visit::walk_foreign_item(self, item)
- }
-
- fn visit_variant(&mut self, v: &ast::Variant) {
- self.resolve_doc_links_local(&v.attrs);
- visit::walk_variant(self, v)
- }
-
- fn visit_field_def(&mut self, field: &ast::FieldDef) {
- self.resolve_doc_links_local(&field.attrs);
- visit::walk_field_def(self, field)
- }
-
- fn visit_block(&mut self, block: &ast::Block) {
- let old_macro_rules = self.parent_scope.macro_rules;
- visit::walk_block(self, block);
- self.parent_scope.macro_rules = old_macro_rules;
- }
-
- // NOTE: if doc-comments are ever allowed on other nodes (e.g. function parameters),
- // then this will have to implement other visitor methods too.
-}
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 79db3c6c3..01ed4a60b 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -7,8 +7,8 @@ use crate::core::DocContext;
use crate::formats::cache::Cache;
use crate::visit::DocVisitor;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LOCAL_CRATE};
use rustc_middle::ty::{self, DefIdTree};
use rustc_span::symbol::sym;
@@ -45,18 +45,20 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
let mut new_items_local = Vec::new();
// External trait impls.
- cx.with_all_trait_impls(|cx, all_trait_impls| {
+ {
let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
- for &impl_def_id in all_trait_impls.iter().skip_while(|def_id| def_id.is_local()) {
- inline::build_impl(cx, None, impl_def_id, None, &mut new_items_external);
+ for &cnum in cx.tcx.crates(()) {
+ for &impl_def_id in cx.tcx.trait_impls_in_crate(cnum) {
+ inline::build_impl(cx, None, impl_def_id, None, &mut new_items_external);
+ }
}
- });
+ }
// Local trait impls.
- cx.with_all_trait_impls(|cx, all_trait_impls| {
+ {
let _prof_timer = cx.tcx.sess.prof.generic_activity("build_local_trait_impls");
let mut attr_buf = Vec::new();
- for &impl_def_id in all_trait_impls.iter().take_while(|def_id| def_id.is_local()) {
+ for &impl_def_id in cx.tcx.trait_impls_in_crate(LOCAL_CRATE) {
let mut parent = Some(cx.tcx.parent(impl_def_id));
while let Some(did) = parent {
attr_buf.extend(
@@ -76,7 +78,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
inline::build_impl(cx, None, impl_def_id, Some(&attr_buf), &mut new_items_local);
attr_buf.clear();
}
- });
+ }
cx.tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
for def_id in PrimitiveType::all_impls(cx.tcx) {
@@ -107,7 +109,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// `Generics`. To avoid relying on the `impl` block, these
// things would need to be created from wholecloth, in a
// form that is valid for use in type inference.
- let ty = tcx.type_of(def_id);
+ let ty = tcx.type_of(def_id).subst_identity();
match ty.kind() {
ty::Slice(ty)
| ty::Ref(_, ty, _)
@@ -126,14 +128,14 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
});
let mut cleaner = BadImplStripper { prims, items: crate_items, cache: &cx.cache };
- let mut type_did_to_deref_target: FxHashMap<DefId, &Type> = FxHashMap::default();
+ let mut type_did_to_deref_target: DefIdMap<&Type> = DefIdMap::default();
// Follow all `Deref` targets of included items and recursively add them as valid
fn add_deref_target(
cx: &DocContext<'_>,
- map: &FxHashMap<DefId, &Type>,
+ map: &DefIdMap<&Type>,
cleaner: &mut BadImplStripper<'_>,
- targets: &mut FxHashSet<DefId>,
+ targets: &mut DefIdSet,
type_did: DefId,
) {
if let Some(target) = map.get(&type_did) {
@@ -154,39 +156,38 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
for it in new_items_external.iter().chain(new_items_local.iter()) {
- if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind {
- if trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait()
- && cleaner.keep_impl(for_, true)
- {
- let target = items
- .iter()
- .find_map(|item| match *item.kind {
- AssocTypeItem(ref t, _) => Some(&t.type_),
- _ => None,
- })
- .expect("Deref impl without Target type");
+ if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
+ trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait() &&
+ cleaner.keep_impl(for_, true)
+ {
+ let target = items
+ .iter()
+ .find_map(|item| match *item.kind {
+ AssocTypeItem(ref t, _) => Some(&t.type_),
+ _ => None,
+ })
+ .expect("Deref impl without Target type");
- if let Some(prim) = target.primitive_type() {
- cleaner.prims.insert(prim);
- } else if let Some(did) = target.def_id(&cx.cache) {
- cleaner.items.insert(did.into());
- }
- if let Some(for_did) = for_.def_id(&cx.cache) {
- if type_did_to_deref_target.insert(for_did, target).is_none() {
- // Since only the `DefId` portion of the `Type` instances is known to be same for both the
- // `Deref` target type and the impl for type positions, this map of types is keyed by
- // `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
- if cleaner.keep_impl_with_def_id(for_did.into()) {
- let mut targets = FxHashSet::default();
- targets.insert(for_did);
- add_deref_target(
- cx,
- &type_did_to_deref_target,
- &mut cleaner,
- &mut targets,
- for_did,
- );
- }
+ if let Some(prim) = target.primitive_type() {
+ cleaner.prims.insert(prim);
+ } else if let Some(did) = target.def_id(&cx.cache) {
+ cleaner.items.insert(did.into());
+ }
+ if let Some(for_did) = for_.def_id(&cx.cache) {
+ if type_did_to_deref_target.insert(for_did, target).is_none() {
+ // Since only the `DefId` portion of the `Type` instances is known to be same for both the
+ // `Deref` target type and the impl for type positions, this map of types is keyed by
+ // `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
+ if cleaner.keep_impl_with_def_id(for_did.into()) {
+ let mut targets = DefIdSet::default();
+ targets.insert(for_did);
+ add_deref_target(
+ cx,
+ &type_did_to_deref_target,
+ &mut cleaner,
+ &mut targets,
+ for_did,
+ );
}
}
}
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
index 7158355ff..26fbb03a4 100644
--- a/src/librustdoc/passes/lint/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -19,8 +19,7 @@ use crate::passes::source_span_for_markdown_range;
pub(crate) fn visit_item(cx: &DocContext<'_>, item: &clean::Item) {
if let Some(dox) = &item.attrs.collapsed_doc_value() {
let sp = item.attr_span(cx.tcx);
- let extra =
- crate::html::markdown::ExtraInfo::new_did(cx.tcx, item.item_id.expect_def_id(), sp);
+ let extra = crate::html::markdown::ExtraInfo::new(cx.tcx, item.item_id.expect_def_id(), sp);
for code_block in markdown::rust_code_blocks(dox, &extra) {
check_rust_syntax(cx, item, dox, code_block);
}
@@ -34,8 +33,10 @@ fn check_rust_syntax(
code_block: RustCodeBlock,
) {
let buffer = Lrc::new(Lock::new(Buffer::default()));
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+ rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
+ false,
+ );
let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
@@ -73,7 +74,6 @@ fn check_rust_syntax(
return;
};
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
let empty_block = code_block.lang_string == Default::default() && code_block.is_fenced;
let is_ignore = code_block.lang_string.ignore != markdown::Ignore::None;
@@ -93,6 +93,7 @@ fn check_rust_syntax(
// Finally build and emit the completed diagnostic.
// All points of divergence have been handled earlier so this can be
// done the same way whether the span is precise or not.
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
cx.tcx.struct_span_lint_hir(crate::lint::INVALID_RUST_CODEBLOCKS, hir_id, sp, msg, |lint| {
let explanation = if is_ignore {
"`ignore` code blocks require valid Rust code for syntax highlighting; \
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 070c0aab5..4f72df5a5 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -113,7 +113,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
if let Some(link) =
link_names.iter().find(|link| *link.original_text == *broken_link.reference)
{
- Some((link.href.as_str().into(), link.new_text.as_str().into()))
+ Some((link.href.as_str().into(), link.new_text.to_string().into()))
} else if matches!(
&broken_link.link_type,
LinkType::Reference | LinkType::ReferenceUnknown
@@ -210,11 +210,9 @@ fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
.take_while(|(_, c)| is_id_start(*c) || is_id_continue(*c))
.reduce(|_accum, item| item)
.and_then(|(new_pos, c)| is_id_start(c).then_some(new_pos));
- if let Some(new_pos) = new_pos {
- if current_pos != new_pos {
- current_pos = new_pos;
- continue;
- }
+ if let Some(new_pos) = new_pos && current_pos != new_pos {
+ current_pos = new_pos;
+ continue;
}
break;
}
diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs
index 634e70ec9..4b1ff68df 100644
--- a/src/librustdoc/passes/mod.rs
+++ b/src/librustdoc/passes/mod.rs
@@ -2,11 +2,12 @@
//! process.
use rustc_middle::ty::TyCtxt;
+use rustc_resolve::rustdoc::DocFragmentKind;
use rustc_span::{InnerSpan, Span, DUMMY_SP};
use std::ops::Range;
use self::Condition::*;
-use crate::clean::{self, DocFragmentKind};
+use crate::clean;
use crate::core::DocContext;
mod stripper;
diff --git a/src/librustdoc/passes/propagate_doc_cfg.rs b/src/librustdoc/passes/propagate_doc_cfg.rs
index de3a4b339..a4bc48690 100644
--- a/src/librustdoc/passes/propagate_doc_cfg.rs
+++ b/src/librustdoc/passes/propagate_doc_cfg.rs
@@ -9,6 +9,7 @@ use crate::fold::DocFolder;
use crate::passes::Pass;
use rustc_hir::def_id::LocalDefId;
+use rustc_middle::ty::DefIdTree;
pub(crate) const PROPAGATE_DOC_CFG: Pass = Pass {
name: "propagate-doc-cfg",
@@ -41,24 +42,22 @@ impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
let Some(def_id) = item.item_id.as_def_id().and_then(|def_id| def_id.as_local())
else { return };
- let hir = self.cx.tcx.hir();
- let hir_id = hir.local_def_id_to_hir_id(def_id);
-
if check_parent {
- let expected_parent = hir.get_parent_item(hir_id);
+ let expected_parent = self.cx.tcx.opt_local_parent(def_id);
// If parents are different, it means that `item` is a reexport and we need
// to compute the actual `cfg` by iterating through its "real" parents.
- if self.parent == Some(expected_parent.def_id) {
+ if self.parent.is_some() && self.parent == expected_parent {
return;
}
}
let mut attrs = Vec::new();
- for (parent_hir_id, _) in hir.parent_iter(hir_id) {
- if let Some(def_id) = hir.opt_local_def_id(parent_hir_id) {
- attrs.extend_from_slice(load_attrs(self.cx, def_id.to_def_id()));
- }
+ let mut next_def_id = def_id;
+ while let Some(parent_def_id) = self.cx.tcx.opt_local_parent(next_def_id) {
+ attrs.extend_from_slice(load_attrs(self.cx, parent_def_id.to_def_id()));
+ next_def_id = parent_def_id;
}
+
let (_, cfg) = merge_attrs(self.cx, None, item.attrs.other_attrs.as_slice(), Some(&attrs));
item.cfg = cfg;
}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index e07a788a7..890b3e8d6 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -1,4 +1,6 @@
//! Strip all doc(hidden) items from the output.
+
+use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym;
use std::mem;
@@ -7,6 +9,7 @@ use crate::clean::{Item, ItemIdSet, NestedAttributesExt};
use crate::core::DocContext;
use crate::fold::{strip_item, DocFolder};
use crate::passes::{ImplStripper, Pass};
+use crate::visit_ast::inherits_doc_hidden;
pub(crate) const STRIP_HIDDEN: Pass = Pass {
name: "strip-hidden",
@@ -21,7 +24,12 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea
// strip all #[doc(hidden)] items
let krate = {
- let mut stripper = Stripper { retained: &mut retained, update_retained: true };
+ let mut stripper = Stripper {
+ retained: &mut retained,
+ update_retained: true,
+ tcx: cx.tcx,
+ is_in_hidden_item: false,
+ };
stripper.fold_crate(krate)
};
@@ -36,40 +44,97 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea
stripper.fold_crate(krate)
}
-struct Stripper<'a> {
+struct Stripper<'a, 'tcx> {
retained: &'a mut ItemIdSet,
update_retained: bool,
+ tcx: TyCtxt<'tcx>,
+ is_in_hidden_item: bool,
}
-impl<'a> DocFolder for Stripper<'a> {
+impl<'a, 'tcx> Stripper<'a, 'tcx> {
+ fn set_is_in_hidden_item_and_fold(&mut self, is_in_hidden_item: bool, i: Item) -> Item {
+ let prev = self.is_in_hidden_item;
+ self.is_in_hidden_item |= is_in_hidden_item;
+ let ret = self.fold_item_recur(i);
+ self.is_in_hidden_item = prev;
+ ret
+ }
+
+ /// In case `i` is a non-hidden impl block, then we special-case it by changing the value
+ /// of `is_in_hidden_item` to `true` because the impl children inherit its visibility.
+ fn recurse_in_impl_or_exported_macro(&mut self, i: Item) -> Item {
+ let prev = mem::replace(&mut self.is_in_hidden_item, false);
+ let ret = self.fold_item_recur(i);
+ self.is_in_hidden_item = prev;
+ ret
+ }
+}
+
+impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
- if i.attrs.lists(sym::doc).has_word(sym::hidden) {
- debug!("strip_hidden: stripping {:?} {:?}", i.type_(), i.name);
- // Use a dedicated hidden item for fields, variants, and modules.
- // We need to keep private fields and variants, so that the docs
- // can show a placeholder "// some variants omitted". We need to keep
- // private modules, because they can contain impl blocks, and impl
- // block privacy is inherited from the type and trait, not from the
- // module it's defined in. Both of these are marked "stripped," and
- // not included in the final docs, but since they still have an effect
- // on the final doc, cannot be completely removed from the Clean IR.
- match *i.kind {
- clean::StructFieldItem(..) | clean::ModuleItem(..) | clean::VariantItem(..) => {
- // We need to recurse into stripped modules to
- // strip things like impl methods but when doing so
- // we must not add any items to the `retained` set.
- let old = mem::replace(&mut self.update_retained, false);
- let ret = strip_item(self.fold_item_recur(i));
- self.update_retained = old;
- return Some(ret);
- }
- _ => return None,
+ let has_doc_hidden = i.attrs.lists(sym::doc).has_word(sym::hidden);
+ let is_impl_or_exported_macro = match *i.kind {
+ clean::ImplItem(..) => true,
+ // If the macro has the `#[macro_export]` attribute, it means it's accessible at the
+ // crate level so it should be handled differently.
+ clean::MacroItem(..) => {
+ i.attrs.other_attrs.iter().any(|attr| attr.has_name(sym::macro_export))
}
- } else {
+ _ => false,
+ };
+ let mut is_hidden = has_doc_hidden;
+ if !is_impl_or_exported_macro {
+ is_hidden = self.is_in_hidden_item || has_doc_hidden;
+ if !is_hidden && i.inline_stmt_id.is_none() {
+ // We don't need to check if it's coming from a reexport since the reexport itself was
+ // already checked.
+ is_hidden = i
+ .item_id
+ .as_def_id()
+ .and_then(|def_id| def_id.as_local())
+ .map(|def_id| inherits_doc_hidden(self.tcx, def_id))
+ .unwrap_or(false);
+ }
+ }
+ if !is_hidden {
if self.update_retained {
self.retained.insert(i.item_id);
}
+ return Some(if is_impl_or_exported_macro {
+ self.recurse_in_impl_or_exported_macro(i)
+ } else {
+ self.set_is_in_hidden_item_and_fold(false, i)
+ });
+ }
+ debug!("strip_hidden: stripping {:?} {:?}", i.type_(), i.name);
+ // Use a dedicated hidden item for fields, variants, and modules.
+ // We need to keep private fields and variants, so that the docs
+ // can show a placeholder "// some variants omitted". We need to keep
+ // private modules, because they can contain impl blocks, and impl
+ // block privacy is inherited from the type and trait, not from the
+ // module it's defined in. Both of these are marked "stripped," and
+ // not included in the final docs, but since they still have an effect
+ // on the final doc, cannot be completely removed from the Clean IR.
+ match *i.kind {
+ clean::StructFieldItem(..) | clean::ModuleItem(..) | clean::VariantItem(..) => {
+ // We need to recurse into stripped modules to
+ // strip things like impl methods but when doing so
+ // we must not add any items to the `retained` set.
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = strip_item(self.set_is_in_hidden_item_and_fold(true, i));
+ self.update_retained = old;
+ Some(ret)
+ }
+ _ => {
+ let ret = self.set_is_in_hidden_item_and_fold(true, i);
+ if has_doc_hidden {
+ // If the item itself has `#[doc(hidden)]`, then we simply remove it.
+ None
+ } else {
+ // However if it's a "descendant" of a `#[doc(hidden)]` item, then we strip it.
+ Some(strip_item(ret))
+ }
+ }
}
- Some(self.fold_item_recur(i))
}
}
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index 048ed2646..cba55e5fe 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -201,27 +201,25 @@ impl<'a> DocFolder for ImplStripper<'a, '_> {
// Because we don't inline in `maybe_inline_local` if the output format is JSON,
// we need to make a special check for JSON output: we want to keep it unless it has
// a `#[doc(hidden)]` attribute if the `for_` type is exported.
- if let Some(did) = imp.for_.def_id(self.cache) {
- if !imp.for_.is_assoc_ty() && !self.should_keep_impl(&i, did) {
- debug!("ImplStripper: impl item for stripped type; removing");
- return None;
- }
+ if let Some(did) = imp.for_.def_id(self.cache) &&
+ !imp.for_.is_assoc_ty() && !self.should_keep_impl(&i, did)
+ {
+ debug!("ImplStripper: impl item for stripped type; removing");
+ return None;
}
- if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) {
- if !self.should_keep_impl(&i, did) {
- debug!("ImplStripper: impl item for stripped trait; removing");
- return None;
- }
+ if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) &&
+ !self.should_keep_impl(&i, did) {
+ debug!("ImplStripper: impl item for stripped trait; removing");
+ return None;
}
if let Some(generics) = imp.trait_.as_ref().and_then(|t| t.generics()) {
for typaram in generics {
- if let Some(did) = typaram.def_id(self.cache) {
- if !self.should_keep_impl(&i, did) {
- debug!(
- "ImplStripper: stripped item in trait's generics; removing impl"
- );
- return None;
- }
+ if let Some(did) = typaram.def_id(self.cache) && !self.should_keep_impl(&i, did)
+ {
+ debug!(
+ "ImplStripper: stripped item in trait's generics; removing impl"
+ );
+ return None;
}
}
}
diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs
index f2ee99cd9..f28c164d6 100644
--- a/src/librustdoc/scrape_examples.rs
+++ b/src/librustdoc/scrape_examples.rs
@@ -169,7 +169,7 @@ where
};
let ident_span = path.ident.span;
- (tcx.type_of(def_id), call_span, ident_span)
+ (tcx.type_of(def_id).subst_identity(), call_span, ident_span)
}
_ => {
return;
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index 22068ebe0..157e042e4 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -1,13 +1,14 @@
//! The Rust AST Visitor. Extracts useful information and massages it into a form
//! usable for `clean`.
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::DefId;
-use rustc_hir::Node;
-use rustc_hir::CRATE_HIR_ID;
-use rustc_middle::ty::TyCtxt;
+use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId, LocalDefIdSet};
+use rustc_hir::intravisit::{walk_item, Visitor};
+use rustc_hir::{Node, CRATE_HIR_ID};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::{DefIdTree, TyCtxt};
use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
@@ -24,19 +25,30 @@ pub(crate) struct Module<'hir> {
pub(crate) name: Symbol,
pub(crate) where_inner: Span,
pub(crate) mods: Vec<Module<'hir>>,
- pub(crate) id: hir::HirId,
- // (item, renamed, import_id)
- pub(crate) items: Vec<(&'hir hir::Item<'hir>, Option<Symbol>, Option<hir::HirId>)>,
+ pub(crate) def_id: LocalDefId,
+ /// The key is the item `ItemId` and the value is: (item, renamed, import_id).
+ /// We use `FxIndexMap` to keep the insert order.
+ pub(crate) items: FxIndexMap<
+ (LocalDefId, Option<Symbol>),
+ (&'hir hir::Item<'hir>, Option<Symbol>, Option<LocalDefId>),
+ >,
pub(crate) foreigns: Vec<(&'hir hir::ForeignItem<'hir>, Option<Symbol>)>,
}
impl Module<'_> {
- pub(crate) fn new(name: Symbol, id: hir::HirId, where_inner: Span) -> Self {
- Module { name, id, where_inner, mods: Vec::new(), items: Vec::new(), foreigns: Vec::new() }
+ pub(crate) fn new(name: Symbol, def_id: LocalDefId, where_inner: Span) -> Self {
+ Module {
+ name,
+ def_id,
+ where_inner,
+ mods: Vec::new(),
+ items: FxIndexMap::default(),
+ foreigns: Vec::new(),
+ }
}
pub(crate) fn where_outer(&self, tcx: TyCtxt<'_>) -> Span {
- tcx.hir().span(self.id)
+ tcx.def_span(self.def_id)
}
}
@@ -47,39 +59,54 @@ fn def_id_to_path(tcx: TyCtxt<'_>, did: DefId) -> Vec<Symbol> {
std::iter::once(crate_name).chain(relative).collect()
}
-pub(crate) fn inherits_doc_hidden(tcx: TyCtxt<'_>, mut node: hir::HirId) -> bool {
- while let Some(id) = tcx.hir().get_enclosing_scope(node) {
- node = id;
- if tcx.hir().attrs(node).lists(sym::doc).has_word(sym::hidden) {
+pub(crate) fn inherits_doc_hidden(tcx: TyCtxt<'_>, mut def_id: LocalDefId) -> bool {
+ let hir = tcx.hir();
+ while let Some(id) = tcx.opt_local_parent(def_id) {
+ def_id = id;
+ if tcx.is_doc_hidden(def_id.to_def_id()) {
return true;
+ } else if let Some(node) = hir.find_by_def_id(def_id) &&
+ matches!(
+ node,
+ hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(_), .. }),
+ )
+ {
+ // `impl` blocks stand a bit on their own: unless they have `#[doc(hidden)]` directly
+ // on them, they don't inherit it from the parent context.
+ return false;
}
}
false
}
-// Also, is there some reason that this doesn't use the 'visit'
-// framework from syntax?.
-
pub(crate) struct RustdocVisitor<'a, 'tcx> {
cx: &'a mut core::DocContext<'tcx>,
- view_item_stack: FxHashSet<hir::HirId>,
+ view_item_stack: LocalDefIdSet,
inlining: bool,
/// Are the current module and all of its parents public?
inside_public_path: bool,
- exact_paths: FxHashMap<DefId, Vec<Symbol>>,
+ exact_paths: DefIdMap<Vec<Symbol>>,
+ modules: Vec<Module<'tcx>>,
}
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
pub(crate) fn new(cx: &'a mut core::DocContext<'tcx>) -> RustdocVisitor<'a, 'tcx> {
// If the root is re-exported, terminate all recursion.
- let mut stack = FxHashSet::default();
- stack.insert(hir::CRATE_HIR_ID);
+ let mut stack = LocalDefIdSet::default();
+ stack.insert(CRATE_DEF_ID);
+ let om = Module::new(
+ cx.tcx.crate_name(LOCAL_CRATE),
+ CRATE_DEF_ID,
+ cx.tcx.hir().root_module().spans.inner_span,
+ );
+
RustdocVisitor {
cx,
view_item_stack: stack,
inlining: false,
inside_public_path: true,
- exact_paths: FxHashMap::default(),
+ exact_paths: Default::default(),
+ modules: vec![om],
}
}
@@ -89,12 +116,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}
pub(crate) fn visit(mut self) -> Module<'tcx> {
- let mut top_level_module = self.visit_mod_contents(
- hir::CRATE_HIR_ID,
- self.cx.tcx.hir().root_module(),
- self.cx.tcx.crate_name(LOCAL_CRATE),
- None,
- );
+ let root_module = self.cx.tcx.hir().root_module();
+ self.visit_mod_contents(CRATE_DEF_ID, root_module);
+
+ let mut top_level_module = self.modules.pop().unwrap();
// `#[macro_export] macro_rules!` items are reexported at the top level of the
// crate, regardless of where they're defined. We want to document the
@@ -109,15 +134,13 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// macro in the same module.
let mut inserted = FxHashSet::default();
for export in self.cx.tcx.module_reexports(CRATE_DEF_ID).unwrap_or(&[]) {
- if let Res::Def(DefKind::Macro(_), def_id) = export.res {
- if let Some(local_def_id) = def_id.as_local() {
- if self.cx.tcx.has_attr(def_id, sym::macro_export) {
- if inserted.insert(def_id) {
- let item = self.cx.tcx.hir().expect_item(local_def_id);
- top_level_module.items.push((item, None, None));
- }
- }
- }
+ if let Res::Def(DefKind::Macro(_), def_id) = export.res &&
+ let Some(local_def_id) = def_id.as_local() &&
+ self.cx.tcx.has_attr(def_id, sym::macro_export) &&
+ inserted.insert(def_id)
+ {
+ let item = self.cx.tcx.hir().expect_item(local_def_id);
+ top_level_module.items.insert((local_def_id, Some(item.ident.name)), (item, None, None));
}
}
@@ -151,24 +174,22 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
top_level_module
}
- fn visit_mod_contents(
- &mut self,
- id: hir::HirId,
- m: &'tcx hir::Mod<'tcx>,
- name: Symbol,
- parent_id: Option<hir::HirId>,
- ) -> Module<'tcx> {
- let mut om = Module::new(name, id, m.spans.inner_span);
- let def_id = self.cx.tcx.hir().local_def_id(id).to_def_id();
+ /// This method will go through the given module items in two passes:
+ /// 1. The items which are not glob imports/reexports.
+ /// 2. The glob imports/reexports.
+ fn visit_mod_contents(&mut self, def_id: LocalDefId, m: &'tcx hir::Mod<'tcx>) {
+ debug!("Going through module {:?}", m);
// Keep track of if there were any private modules in the path.
let orig_inside_public_path = self.inside_public_path;
- self.inside_public_path &= self.cx.tcx.visibility(def_id).is_public();
+ self.inside_public_path &= self.cx.tcx.local_visibility(def_id).is_public();
+
+ // Reimplementation of `walk_mod` because we need to do it in two passes (explanations in
+ // the second loop):
for &i in m.item_ids {
let item = self.cx.tcx.hir().item(i);
- if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
- continue;
+ if !matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
+ self.visit_item(item);
}
- self.visit_item(item, None, &mut om, parent_id);
}
for &i in m.item_ids {
let item = self.cx.tcx.hir().item(i);
@@ -176,11 +197,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// Later passes in rustdoc will de-duplicate by name and kind, so if glob-
// imported items appear last, then they'll be the ones that get discarded.
if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
- self.visit_item(item, None, &mut om, parent_id);
+ self.visit_item(item);
}
}
self.inside_public_path = orig_inside_public_path;
- om
+ debug!("Leaving module {:?}", m);
}
/// Tries to resolve the target of a `pub use` statement and inlines the
@@ -194,11 +215,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
/// Returns `true` if the target has been inlined.
fn maybe_inline_local(
&mut self,
- id: hir::HirId,
+ def_id: LocalDefId,
res: Res,
renamed: Option<Symbol>,
glob: bool,
- om: &mut Module<'tcx>,
please_inline: bool,
) -> bool {
debug!("maybe_inline_local res: {:?}", res);
@@ -208,11 +228,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
}
let tcx = self.cx.tcx;
- let Some(res_did) = res.opt_def_id() else {
+ let Some(ori_res_did) = res.opt_def_id() else {
return false;
};
- let use_attrs = tcx.hir().attrs(id);
+ let use_attrs = tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(def_id));
// Don't inline `doc(hidden)` imports so they can be stripped at a later stage.
let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline)
|| use_attrs.lists(sym::doc).has_word(sym::hidden);
@@ -221,69 +241,82 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// reachable in documentation -- a previously unreachable item can be
// made reachable by cross-crate inlining which we're checking here.
// (this is done here because we need to know this upfront).
- if !res_did.is_local() && !is_no_inline {
- crate::visit_lib::lib_embargo_visit_item(self.cx, res_did);
+ if !ori_res_did.is_local() && !is_no_inline {
+ crate::visit_lib::lib_embargo_visit_item(self.cx, ori_res_did);
return false;
}
- let res_hir_id = match res_did.as_local() {
- Some(n) => tcx.hir().local_def_id_to_hir_id(n),
- None => return false,
+ let Some(res_did) = ori_res_did.as_local() else {
+ return false;
};
let is_private =
- !self.cx.cache.effective_visibilities.is_directly_public(self.cx.tcx, res_did);
- let is_hidden = inherits_doc_hidden(self.cx.tcx, res_hir_id);
+ !self.cx.cache.effective_visibilities.is_directly_public(self.cx.tcx, ori_res_did);
+ let is_hidden = inherits_doc_hidden(self.cx.tcx, res_did);
// Only inline if requested or if the item would otherwise be stripped.
if (!please_inline && !is_private && !is_hidden) || is_no_inline {
return false;
}
- if !self.view_item_stack.insert(res_hir_id) {
+ if !self.view_item_stack.insert(res_did) {
return false;
}
- let ret = match tcx.hir().get(res_hir_id) {
+ let ret = match tcx.hir().get_by_def_id(res_did) {
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for &i in m.item_ids {
let i = self.cx.tcx.hir().item(i);
- self.visit_item(i, None, om, Some(id));
+ self.visit_item_inner(i, None, Some(def_id));
}
self.inlining = prev;
true
}
Node::Item(it) if !glob => {
let prev = mem::replace(&mut self.inlining, true);
- self.visit_item(it, renamed, om, Some(id));
+ self.visit_item_inner(it, renamed, Some(def_id));
self.inlining = prev;
true
}
Node::ForeignItem(it) if !glob => {
let prev = mem::replace(&mut self.inlining, true);
- self.visit_foreign_item(it, renamed, om);
+ self.visit_foreign_item_inner(it, renamed);
self.inlining = prev;
true
}
_ => false,
};
- self.view_item_stack.remove(&res_hir_id);
+ self.view_item_stack.remove(&res_did);
ret
}
- fn visit_item(
+ #[inline]
+ fn add_to_current_mod(
&mut self,
item: &'tcx hir::Item<'_>,
renamed: Option<Symbol>,
- om: &mut Module<'tcx>,
- parent_id: Option<hir::HirId>,
+ parent_id: Option<LocalDefId>,
) {
+ self.modules
+ .last_mut()
+ .unwrap()
+ .items
+ .insert((item.owner_id.def_id, renamed), (item, renamed, parent_id));
+ }
+
+ fn visit_item_inner(
+ &mut self,
+ item: &'tcx hir::Item<'_>,
+ renamed: Option<Symbol>,
+ import_id: Option<LocalDefId>,
+ ) -> bool {
debug!("visiting item {:?}", item);
let name = renamed.unwrap_or(item.ident.name);
+ let tcx = self.cx.tcx;
let def_id = item.owner_id.to_def_id();
- let is_pub = self.cx.tcx.visibility(def_id).is_public();
+ let is_pub = tcx.visibility(def_id).is_public();
if is_pub {
self.store_path(item.owner_id.to_def_id());
@@ -292,8 +325,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
match item.kind {
hir::ItemKind::ForeignMod { items, .. } => {
for item in items {
- let item = self.cx.tcx.hir().foreign_item(item.id);
- self.visit_foreign_item(item, None, om);
+ let item = tcx.hir().foreign_item(item.id);
+ self.visit_foreign_item_inner(item, None);
}
}
// If we're inlining, skip private items or item reexported as "_".
@@ -308,7 +341,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
continue;
}
- let attrs = self.cx.tcx.hir().attrs(item.hir_id());
+ let attrs =
+ tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(item.owner_id.def_id));
// If there was a private module in the current path then don't bother inlining
// anything as it will probably be stripped anyway.
@@ -322,18 +356,17 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
let is_glob = kind == hir::UseKind::Glob;
let ident = if is_glob { None } else { Some(name) };
if self.maybe_inline_local(
- item.hir_id(),
+ item.owner_id.def_id,
res,
ident,
is_glob,
- om,
please_inline,
) {
continue;
}
}
- om.items.push((item, renamed, parent_id))
+ self.add_to_current_mod(item, renamed, import_id);
}
}
hir::ItemKind::Macro(ref macro_def, _) => {
@@ -350,14 +383,14 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
let def_id = item.owner_id.to_def_id();
let is_macro_2_0 = !macro_def.macro_rules;
- let nonexported = !self.cx.tcx.has_attr(def_id, sym::macro_export);
+ let nonexported = !tcx.has_attr(def_id, sym::macro_export);
if is_macro_2_0 || nonexported || self.inlining {
- om.items.push((item, renamed, None));
+ self.add_to_current_mod(item, renamed, import_id);
}
}
hir::ItemKind::Mod(ref m) => {
- om.mods.push(self.visit_mod_contents(item.hir_id(), m, name, parent_id));
+ self.enter_mod(item.owner_id.def_id, m, name);
}
hir::ItemKind::Fn(..)
| hir::ItemKind::ExternCrate(..)
@@ -365,36 +398,98 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
| hir::ItemKind::Struct(..)
| hir::ItemKind::Union(..)
| hir::ItemKind::TyAlias(..)
- | hir::ItemKind::OpaqueTy(..)
+ | hir::ItemKind::OpaqueTy(hir::OpaqueTy {
+ origin: hir::OpaqueTyOrigin::TyAlias, ..
+ })
| hir::ItemKind::Static(..)
| hir::ItemKind::Trait(..)
- | hir::ItemKind::TraitAlias(..) => om.items.push((item, renamed, parent_id)),
+ | hir::ItemKind::TraitAlias(..) => {
+ self.add_to_current_mod(item, renamed, import_id);
+ }
+ hir::ItemKind::OpaqueTy(hir::OpaqueTy {
+ origin: hir::OpaqueTyOrigin::AsyncFn(_) | hir::OpaqueTyOrigin::FnReturn(_),
+ ..
+ }) => {
+ // return-position impl traits are never nameable, and should never be documented.
+ }
hir::ItemKind::Const(..) => {
// Underscore constants do not correspond to a nameable item and
// so are never useful in documentation.
if name != kw::Underscore {
- om.items.push((item, renamed, parent_id));
+ self.add_to_current_mod(item, renamed, import_id);
}
}
hir::ItemKind::Impl(impl_) => {
// Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
// them up regardless of where they're located.
if !self.inlining && impl_.of_trait.is_none() {
- om.items.push((item, None, None));
+ self.add_to_current_mod(item, None, None);
}
}
}
+ true
}
- fn visit_foreign_item(
+ fn visit_foreign_item_inner(
&mut self,
item: &'tcx hir::ForeignItem<'_>,
renamed: Option<Symbol>,
- om: &mut Module<'tcx>,
) {
// If inlining we only want to include public functions.
if !self.inlining || self.cx.tcx.visibility(item.owner_id).is_public() {
- om.foreigns.push((item, renamed));
+ self.modules.last_mut().unwrap().foreigns.push((item, renamed));
+ }
+ }
+
+ /// This method will create a new module and push it onto the "modules stack" then call
+ /// `visit_mod_contents`. Once done, it'll remove it from the "modules stack" and instead
+ /// add into the list of modules of the current module.
+ fn enter_mod(&mut self, id: LocalDefId, m: &'tcx hir::Mod<'tcx>, name: Symbol) {
+ self.modules.push(Module::new(name, id, m.spans.inner_span));
+
+ self.visit_mod_contents(id, m);
+
+ let last = self.modules.pop().unwrap();
+ self.modules.last_mut().unwrap().mods.push(last);
+ }
+}
+
+// We need to implement this visitor so it'll go everywhere and retrieve items we're interested in
+// such as impl blocks in const blocks.
+impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_item(&mut self, i: &'tcx hir::Item<'tcx>) {
+ if self.visit_item_inner(i, None, None) {
+ walk_item(self, i);
}
}
+
+ fn visit_mod(&mut self, _: &hir::Mod<'tcx>, _: Span, _: hir::HirId) {
+ // Handled in `visit_item_inner`
+ }
+
+ fn visit_use(&mut self, _: &hir::UsePath<'tcx>, _: hir::HirId) {
+ // Handled in `visit_item_inner`
+ }
+
+ fn visit_path(&mut self, _: &hir::Path<'tcx>, _: hir::HirId) {
+ // Handled in `visit_item_inner`
+ }
+
+ fn visit_label(&mut self, _: &rustc_ast::Label) {
+ // Unneeded.
+ }
+
+ fn visit_infer(&mut self, _: &hir::InferArg) {
+ // Unneeded.
+ }
+
+ fn visit_lifetime(&mut self, _: &hir::Lifetime) {
+ // Unneeded.
+ }
}
diff --git a/src/librustdoc/visit_lib.rs b/src/librustdoc/visit_lib.rs
index e490559b0..fd4f92541 100644
--- a/src/librustdoc/visit_lib.rs
+++ b/src/librustdoc/visit_lib.rs
@@ -1,14 +1,13 @@
use crate::core::DocContext;
-use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def::DefKind;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_middle::ty::TyCtxt;
// FIXME: this may not be exhaustive, but is sufficient for rustdocs current uses
#[derive(Default)]
pub(crate) struct RustdocEffectiveVisibilities {
- extern_public: FxHashSet<DefId>,
+ extern_public: DefIdSet,
}
macro_rules! define_method {
@@ -43,9 +42,9 @@ pub(crate) fn lib_embargo_visit_item(cx: &mut DocContext<'_>, def_id: DefId) {
struct LibEmbargoVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
// Effective visibilities for reachable nodes
- extern_public: &'a mut FxHashSet<DefId>,
+ extern_public: &'a mut DefIdSet,
// Keeps track of already visited modules, in case a module re-exports its parent
- visited_mods: FxHashSet<DefId>,
+ visited_mods: DefIdSet,
}
impl LibEmbargoVisitor<'_, '_> {