summaryrefslogtreecommitdiffstats
path: root/src/librustdoc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:25:56 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:25:56 +0000
commit018c4950b9406055dec02ef0fb52f132e2bb1e2c (patch)
treea835ebdf2088ef88fa681f8fad45f09922c1ae9a /src/librustdoc
parentAdding debian version 1.75.0+dfsg1-5. (diff)
downloadrustc-018c4950b9406055dec02ef0fb52f132e2bb1e2c.tar.xz
rustc-018c4950b9406055dec02ef0fb52f132e2bb1e2c.zip
Merging upstream version 1.76.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/librustdoc')
-rw-r--r--src/librustdoc/Cargo.toml2
-rw-r--r--src/librustdoc/clean/auto_trait.rs8
-rw-r--r--src/librustdoc/clean/blanket_impl.rs3
-rw-r--r--src/librustdoc/clean/cfg.rs5
-rw-r--r--src/librustdoc/clean/cfg/tests.rs4
-rw-r--r--src/librustdoc/clean/inline.rs20
-rw-r--r--src/librustdoc/clean/mod.rs198
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs6
-rw-r--r--src/librustdoc/clean/simplify.rs3
-rw-r--r--src/librustdoc/clean/types.rs60
-rw-r--r--src/librustdoc/clean/types/tests.rs5
-rw-r--r--src/librustdoc/clean/utils.rs26
-rw-r--r--src/librustdoc/config.rs94
-rw-r--r--src/librustdoc/core.rs33
-rw-r--r--src/librustdoc/docfs.rs1
-rw-r--r--src/librustdoc/doctest.rs44
-rw-r--r--src/librustdoc/externalfiles.rs22
-rw-r--r--src/librustdoc/formats/cache.rs36
-rw-r--r--src/librustdoc/formats/item_type.rs56
-rw-r--r--src/librustdoc/formats/mod.rs15
-rw-r--r--src/librustdoc/formats/renderer.rs8
-rw-r--r--src/librustdoc/html/escape.rs36
-rw-r--r--src/librustdoc/html/format.rs59
-rw-r--r--src/librustdoc/html/highlight.rs47
-rw-r--r--src/librustdoc/html/highlight/fixtures/dos_line.html2
-rw-r--r--src/librustdoc/html/highlight/fixtures/sample.html8
-rw-r--r--src/librustdoc/html/markdown.rs114
-rw-r--r--src/librustdoc/html/render/context.rs21
-rw-r--r--src/librustdoc/html/render/mod.rs89
-rw-r--r--src/librustdoc/html/render/print_item.rs107
-rw-r--r--src/librustdoc/html/render/search_index.rs433
-rw-r--r--src/librustdoc/html/render/sidebar.rs24
-rw-r--r--src/librustdoc/html/render/span_map.rs4
-rw-r--r--src/librustdoc/html/render/type_layout.rs59
-rw-r--r--src/librustdoc/html/render/write_shared.rs33
-rw-r--r--src/librustdoc/html/sources.rs8
-rw-r--r--src/librustdoc/html/static/css/noscript.css6
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css211
-rw-r--r--src/librustdoc/html/static/js/externs.js6
-rw-r--r--src/librustdoc/html/static/js/main.js303
-rw-r--r--src/librustdoc/html/static/js/search.js1327
-rw-r--r--src/librustdoc/html/static/js/settings.js23
-rw-r--r--src/librustdoc/html/static/js/src-script.js50
-rw-r--r--src/librustdoc/html/static/js/storage.js53
-rw-r--r--src/librustdoc/html/templates/item_union.html4
-rw-r--r--src/librustdoc/html/templates/page.html10
-rw-r--r--src/librustdoc/html/templates/type_layout.html4
-rw-r--r--src/librustdoc/json/conversions.rs15
-rw-r--r--src/librustdoc/json/mod.rs6
-rw-r--r--src/librustdoc/lib.rs60
-rw-r--r--src/librustdoc/lint.rs4
-rw-r--r--src/librustdoc/passes/check_custom_code_classes.rs7
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs12
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs27
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs14
-rw-r--r--src/librustdoc/passes/lint/bare_urls.rs2
-rw-r--r--src/librustdoc/passes/lint/check_code_block_syntax.rs10
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs8
-rw-r--r--src/librustdoc/passes/lint/redundant_explicit_links.rs4
-rw-r--r--src/librustdoc/passes/lint/unescaped_backticks.rs2
-rw-r--r--src/librustdoc/passes/stripper.rs17
-rw-r--r--src/librustdoc/scrape_examples.rs17
-rw-r--r--src/librustdoc/theme.rs6
-rw-r--r--src/librustdoc/visit_ast.rs32
64 files changed, 2552 insertions, 1381 deletions
diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml
index f3917b978..e13e95ef7 100644
--- a/src/librustdoc/Cargo.toml
+++ b/src/librustdoc/Cargo.toml
@@ -9,7 +9,7 @@ path = "lib.rs"
[dependencies]
arrayvec = { version = "0.7", default-features = false }
askama = { version = "0.12", default-features = false, features = ["config"] }
-itertools = "0.10.1"
+itertools = "0.11"
indexmap = "2"
minifier = "0.3.0"
once_cell = "1.10.0"
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
index bdf6a0f6b..e692f4ef7 100644
--- a/src/librustdoc/clean/auto_trait.rs
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -1,9 +1,7 @@
-use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::lang_items::LangItem;
-use rustc_middle::ty::{self, Region, RegionVid, TypeFoldable};
+use rustc_middle::ty::{Region, RegionVid, TypeFoldable};
use rustc_trait_selection::traits::auto_trait::{self, AutoTraitResult};
-use thin_vec::ThinVec;
use std::fmt::Debug;
@@ -723,7 +721,7 @@ where
fn region_name(region: Region<'_>) -> Option<Symbol> {
match *region {
- ty::ReEarlyBound(r) => Some(r.name),
+ ty::ReEarlyParam(r) => Some(r.name),
_ => None,
}
}
@@ -743,7 +741,7 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for RegionReplacer<'a, 'tcx> {
match *r {
// These are the regions that can be seen in the AST.
ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned().unwrap_or(r),
- ty::ReEarlyBound(_) | ty::ReStatic | ty::ReLateBound(..) | ty::ReError(_) => r,
+ ty::ReEarlyParam(_) | ty::ReStatic | ty::ReBound(..) | ty::ReError(_) => r,
r => bug!("unexpected region: {r:?}"),
}
}
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
index dad2aa406..4da85885d 100644
--- a/src/librustdoc/clean/blanket_impl.rs
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -14,7 +14,6 @@ pub(crate) struct BlanketImplFinder<'a, 'tcx> {
impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
pub(crate) fn get_blanket_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
let cx = &mut self.cx;
- let param_env = cx.tcx.param_env(item_def_id);
let ty = cx.tcx.type_of(item_def_id);
trace!("get_blanket_impls({ty:?})");
@@ -40,7 +39,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
let infcx = cx.tcx.infer_ctxt().build();
let args = infcx.fresh_args_for_item(DUMMY_SP, item_def_id);
let impl_ty = ty.instantiate(infcx.tcx, args);
- let param_env = EarlyBinder::bind(param_env).instantiate(infcx.tcx, args);
+ let param_env = ty::ParamEnv::empty();
let impl_args = infcx.fresh_args_for_item(DUMMY_SP, impl_def_id);
let impl_trait_ref = trait_ref.instantiate(infcx.tcx, impl_args);
diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs
index ab5aec12f..0445daf0d 100644
--- a/src/librustdoc/clean/cfg.rs
+++ b/src/librustdoc/clean/cfg.rs
@@ -164,8 +164,8 @@ impl Cfg {
/// Renders the configuration for human display, as a short HTML description.
pub(crate) fn render_short_html(&self) -> String {
let mut msg = Display(self, Format::ShortHtml).to_string();
- if self.should_capitalize_first_letter() &&
- let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric())
+ if self.should_capitalize_first_letter()
+ && let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric())
{
msg[i..i + 1].make_ascii_uppercase();
}
@@ -516,7 +516,6 @@ impl<'a> fmt::Display for Display<'a> {
(sym::target_arch, Some(arch)) => match arch.as_str() {
"aarch64" => "AArch64",
"arm" => "ARM",
- "asmjs" => "JavaScript",
"loongarch64" => "LoongArch LA64",
"m68k" => "M68k",
"csky" => "CSKY",
diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs
index bb62660e1..20bcf1abf 100644
--- a/src/librustdoc/clean/cfg/tests.rs
+++ b/src/librustdoc/clean/cfg/tests.rs
@@ -1,8 +1,8 @@
use super::*;
-use rustc_ast::{LitKind, MetaItemLit, Path, StrStyle};
+use rustc_ast::{MetaItemLit, Path, StrStyle};
use rustc_span::create_default_session_globals_then;
-use rustc_span::symbol::{kw, Ident, Symbol};
+use rustc_span::symbol::{kw, Ident};
use rustc_span::DUMMY_SP;
use thin_vec::thin_vec;
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index 974ba1e3b..014bcb1a8 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -374,15 +374,17 @@ pub(crate) fn build_impl(
// Only inline impl if the implemented trait is
// reachable in rustdoc generated documentation
- if !did.is_local() && let Some(traitref) = associated_trait {
+ if !did.is_local()
+ && let Some(traitref) = associated_trait
+ {
let did = traitref.def_id;
if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
return;
}
- if let Some(stab) = tcx.lookup_stability(did) &&
- stab.is_unstable() &&
- stab.feature == sym::rustc_private
+ if let Some(stab) = tcx.lookup_stability(did)
+ && stab.is_unstable()
+ && stab.feature == sym::rustc_private
{
return;
}
@@ -514,7 +516,10 @@ pub(crate) fn build_impl(
}
while let Some(ty) = stack.pop() {
- if let Some(did) = ty.def_id(&cx.cache) && !document_hidden && tcx.is_doc_hidden(did) {
+ if let Some(did) = ty.def_id(&cx.cache)
+ && !document_hidden
+ && tcx.is_doc_hidden(did)
+ {
return;
}
if let Some(generics) = ty.generics() {
@@ -580,7 +585,8 @@ fn build_module_items(
let res = item.res.expect_non_local();
if let Some(def_id) = res.opt_def_id()
&& let Some(allowed_def_ids) = allowed_def_ids
- && !allowed_def_ids.contains(&def_id) {
+ && !allowed_def_ids.contains(&def_id)
+ {
continue;
}
if let Some(def_id) = res.mod_def_id() {
@@ -634,7 +640,7 @@ fn build_module_items(
pub(crate) fn print_inlined_const(tcx: TyCtxt<'_>, did: DefId) -> String {
if let Some(did) = did.as_local() {
- let hir_id = tcx.hir().local_def_id_to_hir_id(did);
+ let hir_id = tcx.local_def_id_to_hir_id(did);
rustc_hir_pretty::id_to_string(&tcx.hir(), hir_id)
} else {
tcx.rendered_const(did).clone()
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 1b7ca7bf7..75f9560f5 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -26,7 +26,7 @@ use rustc_middle::middle::resolve_bound_vars as rbv;
use rustc_middle::ty::fold::TypeFolder;
use rustc_middle::ty::GenericArgsRef;
use rustc_middle::ty::TypeVisitableExt;
-use rustc_middle::ty::{self, AdtKind, EarlyBinder, Ty, TyCtxt};
+use rustc_middle::ty::{self, AdtKind, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{AstPass, MacroKind};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@@ -54,7 +54,9 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
let mut inserted = FxHashSet::default();
items.extend(doc.foreigns.iter().map(|(item, renamed)| {
let item = clean_maybe_renamed_foreign_item(cx, item, *renamed);
- if let Some(name) = item.name && (cx.render_options.document_hidden || !item.is_doc_hidden()) {
+ if let Some(name) = item.name
+ && (cx.render_options.document_hidden || !item.is_doc_hidden())
+ {
inserted.insert((item.type_(), name));
}
item
@@ -85,7 +87,9 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<
}
let v = clean_maybe_renamed_item(cx, item, *renamed, *import_id);
for item in &v {
- if let Some(name) = item.name && (cx.render_options.document_hidden || !item.is_doc_hidden()) {
+ if let Some(name) = item.name
+ && (cx.render_options.document_hidden || !item.is_doc_hidden())
+ {
inserted.insert((item.type_(), name));
}
}
@@ -180,22 +184,6 @@ fn clean_generic_bound<'tcx>(
) -> Option<GenericBound> {
Some(match *bound {
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(clean_lifetime(lt, cx)),
- hir::GenericBound::LangItemTrait(lang_item, span, _, generic_args) => {
- let def_id = cx.tcx.require_lang_item(lang_item, Some(span));
-
- let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(cx.tcx, def_id));
-
- let generic_args = clean_generic_args(generic_args, cx);
- let GenericArgs::AngleBracketed { bindings, .. } = generic_args else {
- bug!("clean: parenthesized `GenericBound::LangItemTrait`");
- };
-
- let trait_ = clean_trait_ref_with_bindings(cx, trait_ref, bindings);
- GenericBound::TraitBound(
- PolyTrait { trait_, generic_params: vec![] },
- hir::TraitBoundModifier::None,
- )
- }
hir::GenericBound::Trait(ref t, modifier) => {
// `T: ~const Destruct` is hidden because `T: Destruct` is a no-op.
if modifier == hir::TraitBoundModifier::MaybeConst
@@ -286,12 +274,10 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Life
match *region {
ty::ReStatic => Some(Lifetime::statik()),
_ if !region.has_name() => None,
- ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
- Some(Lifetime(name))
- }
- ty::ReEarlyBound(ref data) => Some(Lifetime(data.name)),
- ty::ReLateBound(..)
- | ty::ReFree(..)
+ ty::ReBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => Some(Lifetime(name)),
+ ty::ReEarlyParam(ref data) => Some(Lifetime(data.name)),
+ ty::ReBound(..)
+ | ty::ReLateParam(..)
| ty::ReVar(..)
| ty::ReError(_)
| ty::RePlaceholder(..)
@@ -593,13 +579,13 @@ fn clean_generic_param<'tcx>(
},
)
}
- hir::GenericParamKind::Const { ty, default } => (
+ hir::GenericParamKind::Const { ty, default, is_host_effect } => (
param.name.ident().name,
GenericParamDefKind::Const {
ty: Box::new(clean_ty(ty, cx)),
default: default
.map(|ct| Box::new(ty::Const::from_anon_const(cx.tcx, ct.def_id).to_string())),
- is_host_effect: cx.tcx.has_attr(param.def_id, sym::rustc_host),
+ is_host_effect,
},
),
};
@@ -741,7 +727,7 @@ pub(crate) fn clean_generics<'tcx>(
.into_iter()
.map(|(lifetime, bounds)| WherePredicate::RegionPredicate { lifetime, bounds }),
)
- .chain(eq_predicates.into_iter())
+ .chain(eq_predicates)
.collect(),
}
}
@@ -819,12 +805,7 @@ fn clean_ty_generics<'tcx>(
{
let pred = clean_predicate(*pred, cx)?;
- bounds.extend(
- pred.get_bounds()
- .into_iter()
- .flatten()
- .cloned()
- );
+ bounds.extend(pred.get_bounds().into_iter().flatten().cloned());
if let Some(proj) = projection
&& let lhs = clean_projection(proj.map_bound(|p| p.projection_ty), cx, None)
@@ -991,10 +972,8 @@ fn clean_proc_macro<'tcx>(
cx: &mut DocContext<'tcx>,
) -> ItemKind {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- if kind == MacroKind::Derive &&
- let Some(derive_name) = attrs
- .lists(sym::proc_macro_derive)
- .find_map(|mi| mi.ident())
+ if kind == MacroKind::Derive
+ && let Some(derive_name) = attrs.lists(sym::proc_macro_derive).find_map(|mi| mi.ident())
{
*name = derive_name.name;
}
@@ -1156,7 +1135,9 @@ fn clean_fn_decl_with_args<'tcx>(
hir::FnRetTy::Return(typ) => clean_ty(typ, cx),
hir::FnRetTy::DefaultReturn(..) => Type::Tuple(Vec::new()),
};
- if let Some(header) = header && header.is_async() {
+ if let Some(header) = header
+ && header.is_async()
+ {
output = output.sugared_async_return_type();
}
FnDecl { inputs: args, output, c_variadic: decl.c_variadic }
@@ -1599,7 +1580,6 @@ fn first_non_private<'tcx>(
// Absolute paths are not. We start from the parent of the item.
[.., parent, leaf] => (parent.res.opt_def_id()?.as_local()?, leaf.ident),
};
- let hir = cx.tcx.hir();
// First we try to get the `DefId` of the item.
for child in
cx.tcx.module_children_local(parent_def_id).iter().filter(move |c| c.ident == ident)
@@ -1608,14 +1588,17 @@ fn first_non_private<'tcx>(
continue;
}
- if let Some(def_id) = child.res.opt_def_id() && target_def_id == def_id {
+ if let Some(def_id) = child.res.opt_def_id()
+ && target_def_id == def_id
+ {
let mut last_path_res = None;
'reexps: for reexp in child.reexport_chain.iter() {
- if let Some(use_def_id) = reexp.id() &&
- let Some(local_use_def_id) = use_def_id.as_local() &&
- let Some(hir::Node::Item(item)) = hir.find_by_def_id(local_use_def_id) &&
- !item.ident.name.is_empty() &&
- let hir::ItemKind::Use(path, _) = item.kind
+ if let Some(use_def_id) = reexp.id()
+ && let Some(local_use_def_id) = use_def_id.as_local()
+ && let Some(hir::Node::Item(item)) =
+ cx.tcx.opt_hir_node_by_def_id(local_use_def_id)
+ && !item.ident.name.is_empty()
+ && let hir::ItemKind::Use(path, _) = item.kind
{
for res in &path.res {
if let Res::Def(DefKind::Ctor(..), _) | Res::SelfCtor(..) = res {
@@ -1626,7 +1609,8 @@ fn first_non_private<'tcx>(
// We never check for "cx.render_options.document_private"
// because if a re-export is not fully public, it's never
// documented.
- cx.tcx.local_visibility(local_use_def_id).is_public() {
+ cx.tcx.local_visibility(local_use_def_id).is_public()
+ {
break 'reexps;
}
last_path_res = Some((path, res));
@@ -1641,7 +1625,12 @@ fn first_non_private<'tcx>(
// 1. We found a public reexport.
// 2. We didn't find a public reexport so it's the "end type" path.
if let Some((new_path, _)) = last_path_res {
- return Some(first_non_private_clean_path(cx, path, new_path.segments, new_path.span));
+ return Some(first_non_private_clean_path(
+ cx,
+ path,
+ new_path.segments,
+ new_path.span,
+ ));
}
// If `last_path_res` is `None`, it can mean two things:
//
@@ -1816,11 +1805,8 @@ fn maybe_expand_private_type_alias<'tcx>(
}
_ => None,
});
- if let Some(ct) = const_ {
- args.insert(
- param.def_id.to_def_id(),
- SubstParam::Constant(clean_const(ct, cx)),
- );
+ if let Some(_) = const_ {
+ args.insert(param.def_id.to_def_id(), SubstParam::Constant);
}
// FIXME(const_generics_defaults)
indices.consts += 1;
@@ -1930,15 +1916,13 @@ fn clean_trait_object_lifetime_bound<'tcx>(
// latter contrary to `clean_middle_region`.
match *region {
ty::ReStatic => Some(Lifetime::statik()),
- ty::ReEarlyBound(region) if region.name != kw::Empty => Some(Lifetime(region.name)),
- ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. })
- if name != kw::Empty =>
- {
+ ty::ReEarlyParam(region) if region.name != kw::Empty => Some(Lifetime(region.name)),
+ ty::ReBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) if name != kw::Empty => {
Some(Lifetime(name))
}
- ty::ReEarlyBound(_)
- | ty::ReLateBound(..)
- | ty::ReFree(_)
+ ty::ReEarlyParam(_)
+ | ty::ReBound(..)
+ | ty::ReLateParam(_)
| ty::ReVar(_)
| ty::RePlaceholder(_)
| ty::ReErased
@@ -2308,7 +2292,9 @@ fn clean_middle_opaque_bounds<'tcx>(
_ => return None,
};
- if let Some(sized) = cx.tcx.lang_items().sized_trait() && trait_ref.def_id() == sized {
+ if let Some(sized) = cx.tcx.lang_items().sized_trait()
+ && trait_ref.def_id() == sized
+ {
has_sized = true;
return None;
}
@@ -2493,8 +2479,8 @@ fn clean_variant_data<'tcx>(
.map(|disr| Discriminant { expr: Some(disr.body), value: disr.def_id.to_def_id() });
let kind = match variant {
- hir::VariantData::Struct(..) => VariantKind::Struct(VariantStruct {
- fields: variant.fields().iter().map(|x| clean_field(x, cx)).collect(),
+ hir::VariantData::Struct { fields, .. } => VariantKind::Struct(VariantStruct {
+ fields: fields.iter().map(|x| clean_field(x, cx)).collect(),
}),
hir::VariantData::Tuple(..) => {
VariantKind::Tuple(variant.fields().iter().map(|x| clean_field(x, cx)).collect())
@@ -2534,11 +2520,12 @@ fn clean_generic_args<'tcx>(
}
hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()),
hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)),
- // Checking for `#[rustc_host]` on the `AnonConst` not only accounts for the case
+ // Checking for `is_desugared_from_effects` on the `AnonConst` not only accounts for the case
// where the argument is `host` but for all possible cases (e.g., `true`, `false`).
- hir::GenericArg::Const(ct)
- if cx.tcx.has_attr(ct.value.def_id, sym::rustc_host) =>
- {
+ hir::GenericArg::Const(hir::ConstArg {
+ is_desugared_from_effects: true,
+ ..
+ }) => {
return None;
}
hir::GenericArg::Const(ct) => GenericArg::Const(Box::new(clean_const(ct, cx))),
@@ -2643,6 +2630,40 @@ fn filter_tokens_from_list(
tokens
}
+fn filter_doc_attr_ident(ident: Symbol, is_inline: bool) -> bool {
+ if is_inline {
+ ident == sym::hidden || ident == sym::inline || ident == sym::no_inline
+ } else {
+ ident == sym::cfg
+ }
+}
+
+/// Remove attributes from `normal` that should not be inherited by `use` re-export.
+/// Before calling this function, make sure `normal` is a `#[doc]` attribute.
+fn filter_doc_attr(normal: &mut ast::NormalAttr, is_inline: bool) {
+ match normal.item.args {
+ ast::AttrArgs::Delimited(ref mut args) => {
+ let tokens = filter_tokens_from_list(&args.tokens, |token| {
+ !matches!(
+ token,
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(
+ ident,
+ _,
+ ),
+ ..
+ },
+ _,
+ ) if filter_doc_attr_ident(*ident, is_inline),
+ )
+ });
+ args.tokens = TokenStream::new(tokens);
+ }
+ ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => {}
+ }
+}
+
/// When inlining items, we merge their attributes (and all the reexports attributes too) with the
/// final reexport. For example:
///
@@ -2669,13 +2690,6 @@ fn add_without_unwanted_attributes<'hir>(
is_inline: bool,
import_parent: Option<DefId>,
) {
- // If it's not `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything.
- if !is_inline {
- for attr in new_attrs {
- attrs.push((Cow::Borrowed(attr), import_parent));
- }
- return;
- }
for attr in new_attrs {
if matches!(attr.kind, ast::AttrKind::DocComment(..)) {
attrs.push((Cow::Borrowed(attr), import_parent));
@@ -2684,34 +2698,14 @@ fn add_without_unwanted_attributes<'hir>(
let mut attr = attr.clone();
match attr.kind {
ast::AttrKind::Normal(ref mut normal) => {
- if let [ident] = &*normal.item.path.segments &&
- let ident = ident.ident.name &&
- ident == sym::doc
- {
- match normal.item.args {
- ast::AttrArgs::Delimited(ref mut args) => {
- let tokens =
- filter_tokens_from_list(&args.tokens, |token| {
- !matches!(
- token,
- TokenTree::Token(
- Token {
- kind: TokenKind::Ident(
- sym::hidden | sym::inline | sym::no_inline,
- _,
- ),
- ..
- },
- _,
- ),
- )
- });
- args.tokens = TokenStream::new(tokens);
- attrs.push((Cow::Owned(attr), import_parent));
- }
- ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => {
- attrs.push((Cow::Owned(attr), import_parent));
- }
+ if let [ident] = &*normal.item.path.segments {
+ let ident = ident.ident.name;
+ if ident == sym::doc {
+ filter_doc_attr(normal, is_inline);
+ attrs.push((Cow::Owned(attr), import_parent));
+ } else if ident != sym::cfg {
+ // If it's not a `cfg()` attribute, we keep it.
+ attrs.push((Cow::Owned(attr), import_parent));
}
}
}
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index 66d10f236..605f9e496 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -40,7 +40,7 @@ pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Stri
printer.zerobreak();
printer.ibox(0);
match matcher {
- TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
+ TokenTree::Delimited(_span, _spacing, _delim, tts) => print_tts(&mut printer, tts),
// Matcher which is not a Delimited is unexpected and should've failed
// to compile, but we render whatever it is wrapped in parens.
TokenTree::Token(..) => print_tt(&mut printer, matcher),
@@ -97,7 +97,7 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
printer.hardbreak()
}
}
- TokenTree::Delimited(_span, delim, tts) => {
+ TokenTree::Delimited(_span, _spacing, delim, tts) => {
let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
printer.word(open_delim);
if !tts.is_empty() {
@@ -158,7 +158,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
(_, token::Pound) => (true, Pound),
(_, _) => (true, Other),
},
- TokenTree::Delimited(_, delim, _) => match (state, delim) {
+ TokenTree::Delimited(.., delim, _) => match (state, delim) {
(Dollar, Delimiter::Parenthesis) => (false, DollarParen),
(Pound | PoundBang, Delimiter::Bracket) => (false, Other),
(Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs
index 627f15e67..c35fb9ec7 100644
--- a/src/librustdoc/clean/simplify.rs
+++ b/src/librustdoc/clean/simplify.rs
@@ -146,7 +146,8 @@ pub(crate) fn move_bounds_to_generic_parameters(generics: &mut clean::Generics)
}) = generics.params.iter_mut().find(|param| &param.name == arg)
{
param_bounds.extend(bounds.drain(..));
- } else if let WherePredicate::RegionPredicate { lifetime: Lifetime(arg), bounds } = &mut pred
+ } else if let WherePredicate::RegionPredicate { lifetime: Lifetime(arg), bounds } =
+ &mut pred
&& let Some(GenericParamDef {
kind: GenericParamDefKind::Lifetime { outlives: param_bounds },
..
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 88ee4e3a2..150625c6d 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -31,7 +31,7 @@ use rustc_resolve::rustdoc::{
use rustc_session::Session;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{self, FileName, Loc, DUMMY_SP};
+use rustc_span::{FileName, Loc, DUMMY_SP};
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi::Abi;
@@ -345,7 +345,7 @@ pub(crate) fn rustc_span(def_id: DefId, tcx: TyCtxt<'_>) -> Span {
|| tcx.def_span(def_id),
|local| {
let hir = tcx.hir();
- hir.span_with_body(hir.local_def_id_to_hir_id(local))
+ hir.span_with_body(tcx.local_def_id_to_hir_id(local))
},
))
}
@@ -498,7 +498,7 @@ impl Item {
}
pub(crate) fn is_crate(&self) -> bool {
- self.is_mod() && self.def_id().map_or(false, |did| did.is_crate_root())
+ self.is_mod() && self.def_id().is_some_and(|did| did.is_crate_root())
}
pub(crate) fn is_mod(&self) -> bool {
self.type_() == ItemType::Module
@@ -1269,8 +1269,8 @@ impl GenericBound {
pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
use rustc_hir::TraitBoundModifier as TBM;
- if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self &&
- Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait()
+ if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self
+ && Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait()
{
return true;
}
@@ -1623,7 +1623,7 @@ impl Type {
/// functions.
pub(crate) fn sugared_async_return_type(self) -> Type {
if let Type::ImplTrait(mut v) = self
- && let Some(GenericBound::TraitBound(PolyTrait { mut trait_, .. }, _ )) = v.pop()
+ && let Some(GenericBound::TraitBound(PolyTrait { mut trait_, .. }, _)) = v.pop()
&& let Some(segment) = trait_.segments.pop()
&& let GenericArgs::AngleBracketed { mut bindings, .. } = segment.args
&& let Some(binding) = bindings.pop()
@@ -1651,6 +1651,13 @@ impl Type {
}
}
+ pub(crate) fn generic_args(&self) -> Option<&GenericArgs> {
+ match self {
+ Type::Path { path, .. } => path.generic_args(),
+ _ => None,
+ }
+ }
+
pub(crate) fn generics(&self) -> Option<Vec<&Type>> {
match self {
Type::Path { path, .. } => path.generics(),
@@ -2191,6 +2198,10 @@ impl Path {
}
}
+ pub(crate) fn generic_args(&self) -> Option<&GenericArgs> {
+ self.segments.last().map(|seg| &seg.args)
+ }
+
pub(crate) fn generics(&self) -> Option<Vec<&Type>> {
self.segments.last().and_then(|seg| {
if let GenericArgs::AngleBracketed { ref args, .. } = seg.args {
@@ -2232,6 +2243,39 @@ impl GenericArgs {
GenericArgs::Parenthesized { inputs, output } => inputs.is_empty() && output.is_none(),
}
}
+ pub(crate) fn bindings<'a>(&'a self) -> Box<dyn Iterator<Item = TypeBinding> + 'a> {
+ match self {
+ GenericArgs::AngleBracketed { bindings, .. } => Box::new(bindings.iter().cloned()),
+ GenericArgs::Parenthesized { output, .. } => Box::new(
+ output
+ .as_ref()
+ .map(|ty| TypeBinding {
+ assoc: PathSegment {
+ name: sym::Output,
+ args: GenericArgs::AngleBracketed {
+ args: Vec::new().into_boxed_slice(),
+ bindings: ThinVec::new(),
+ },
+ },
+ kind: TypeBindingKind::Equality { term: Term::Type((**ty).clone()) },
+ })
+ .into_iter(),
+ ),
+ }
+ }
+}
+
+impl<'a> IntoIterator for &'a GenericArgs {
+ type IntoIter = Box<dyn Iterator<Item = GenericArg> + 'a>;
+ type Item = GenericArg;
+ fn into_iter(self) -> Self::IntoIter {
+ match self {
+ GenericArgs::AngleBracketed { args, .. } => Box::new(args.iter().cloned()),
+ GenericArgs::Parenthesized { inputs, .. } => {
+ Box::new(inputs.iter().cloned().map(GenericArg::Type))
+ }
+ }
+ }
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
@@ -2443,7 +2487,7 @@ impl Import {
}
pub(crate) fn imported_item_is_doc_hidden(&self, tcx: TyCtxt<'_>) -> bool {
- self.source.did.map_or(false, |did| tcx.is_doc_hidden(did))
+ self.source.did.is_some_and(|did| tcx.is_doc_hidden(did))
}
}
@@ -2502,7 +2546,7 @@ pub(crate) enum TypeBindingKind {
pub(crate) enum SubstParam {
Type(Type),
Lifetime(Lifetime),
- Constant(Constant),
+ Constant,
}
impl SubstParam {
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
index ee7c0068e..4befce071 100644
--- a/src/librustdoc/clean/types/tests.rs
+++ b/src/librustdoc/clean/types/tests.rs
@@ -1,8 +1,7 @@
use super::*;
-use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind};
-use rustc_span::symbol::Symbol;
-use rustc_span::{create_default_session_globals_then, DUMMY_SP};
+use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragmentKind};
+use rustc_span::create_default_session_globals_then;
fn create_doc_fragment(s: &str) -> Vec<DocFragment> {
vec![DocFragment {
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index 9ff00c194..bdfda07be 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -303,7 +303,8 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
debug!("trying to get a name from pattern: {p:?}");
Symbol::intern(&match p.kind {
- PatKind::Wild | PatKind::Struct(..) => return kw::Underscore,
+ // FIXME(never_patterns): does this make sense?
+ PatKind::Wild | PatKind::Never | PatKind::Struct(..) => return kw::Underscore,
PatKind::Binding(_, _, ident, _) => return ident.name,
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Or(pats) => {
@@ -438,13 +439,13 @@ fn print_const_with_custom_print_scalar<'tcx>(
}
pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
- if let hir::Node::Expr(expr) = tcx.hir().get(hir_id) {
+ if let hir::Node::Expr(expr) = tcx.hir_node(hir_id) {
if let hir::ExprKind::Lit(_) = &expr.kind {
return true;
}
- if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind &&
- let hir::ExprKind::Lit(_) = &expr.kind
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind
+ && let hir::ExprKind::Lit(_) = &expr.kind
{
return true;
}
@@ -573,9 +574,8 @@ pub(crate) fn find_nearest_parent_module(tcx: TyCtxt<'_>, def_id: DefId) -> Opti
/// This function exists because it runs on `hir::Attributes` whereas the other is a
/// `clean::Attributes` method.
pub(crate) fn has_doc_flag(tcx: TyCtxt<'_>, did: DefId, flag: Symbol) -> bool {
- tcx.get_attrs(did, sym::doc).any(|attr| {
- attr.meta_item_list().map_or(false, |l| rustc_attr::list_contains_name(&l, flag))
- })
+ tcx.get_attrs(did, sym::doc)
+ .any(|attr| attr.meta_item_list().is_some_and(|l| rustc_attr::list_contains_name(&l, flag)))
}
/// A link to `doc.rust-lang.org` that includes the channel name. Use this instead of manual links
@@ -641,19 +641,17 @@ pub(crate) fn inherits_doc_hidden(
mut def_id: LocalDefId,
stop_at: Option<LocalDefId>,
) -> bool {
- let hir = tcx.hir();
while let Some(id) = tcx.opt_local_parent(def_id) {
- if let Some(stop_at) = stop_at && id == stop_at {
+ if let Some(stop_at) = stop_at
+ && id == stop_at
+ {
return false;
}
def_id = id;
if tcx.is_doc_hidden(def_id.to_def_id()) {
return true;
- } else if let Some(node) = hir.find_by_def_id(def_id) &&
- matches!(
- node,
- hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(_), .. }),
- )
+ } else if let Some(node) = tcx.opt_hir_node_by_def_id(def_id)
+ && matches!(node, hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(_), .. }),)
{
// `impl` blocks stand a bit on their own: unless they have `#[doc(hidden)]` directly
// on them, they don't inherit it from the parent context.
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index 99aa97902..fb09d399b 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -15,11 +15,11 @@ use rustc_session::config::{
use rustc_session::getopts;
use rustc_session::lint::Level;
use rustc_session::search_paths::SearchPath;
-use rustc_session::EarlyErrorHandler;
+use rustc_session::EarlyDiagCtxt;
use rustc_span::edition::Edition;
use rustc_target::spec::TargetTriple;
-use crate::core::new_handler;
+use crate::core::new_dcx;
use crate::externalfiles::ExternalHtml;
use crate::html;
use crate::html::markdown::IdMap;
@@ -320,38 +320,38 @@ impl Options {
/// Parses the given command-line for options. If an error message or other early-return has
/// been printed, returns `Err` with the exit code.
pub(crate) fn from_matches(
- handler: &mut EarlyErrorHandler,
+ early_dcx: &mut EarlyDiagCtxt,
matches: &getopts::Matches,
args: Vec<String>,
) -> Result<(Options, RenderOptions), i32> {
// Check for unstable options.
- nightly_options::check_nightly_options(handler, matches, &opts());
+ nightly_options::check_nightly_options(early_dcx, matches, &opts());
if args.is_empty() || matches.opt_present("h") || matches.opt_present("help") {
crate::usage("rustdoc");
return Err(0);
} else if matches.opt_present("version") {
- rustc_driver::version!(&handler, "rustdoc", matches);
+ rustc_driver::version!(&early_dcx, "rustdoc", matches);
return Err(0);
}
- if rustc_driver::describe_flag_categories(handler, &matches) {
+ if rustc_driver::describe_flag_categories(early_dcx, &matches) {
return Err(0);
}
- let color = config::parse_color(handler, matches);
+ let color = config::parse_color(early_dcx, matches);
let config::JsonConfig { json_rendered, json_unused_externs, .. } =
- config::parse_json(handler, matches);
- let error_format = config::parse_error_format(handler, matches, color, json_rendered);
+ config::parse_json(early_dcx, matches);
+ let error_format = config::parse_error_format(early_dcx, matches, color, json_rendered);
let diagnostic_width = matches.opt_get("diagnostic-width").unwrap_or_default();
- let codegen_options = CodegenOptions::build(handler, matches);
- let unstable_opts = UnstableOptions::build(handler, matches);
+ let codegen_options = CodegenOptions::build(early_dcx, matches);
+ let unstable_opts = UnstableOptions::build(early_dcx, matches);
- let diag = new_handler(error_format, None, diagnostic_width, &unstable_opts);
+ let dcx = new_dcx(error_format, None, diagnostic_width, &unstable_opts);
// check for deprecated options
- check_deprecated_options(matches, &diag);
+ check_deprecated_options(matches, &dcx);
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
@@ -391,7 +391,7 @@ impl Options {
match kind.parse() {
Ok(kind) => emit.push(kind),
Err(()) => {
- diag.err(format!("unrecognized emission type: {kind}"));
+ dcx.err(format!("unrecognized emission type: {kind}"));
return Err(1);
}
}
@@ -403,7 +403,7 @@ impl Options {
&& !matches.opt_present("show-coverage")
&& !nightly_options::is_unstable_enabled(matches)
{
- handler.early_error(
+ early_dcx.early_error(
"the -Z unstable-options flag must be passed to enable --output-format for documentation generation (see https://github.com/rust-lang/rust/issues/76578)",
);
}
@@ -421,7 +421,7 @@ impl Options {
let paths = match theme::load_css_paths(content) {
Ok(p) => p,
Err(e) => {
- diag.struct_err(e).emit();
+ dcx.struct_err(e).emit();
return Err(1);
}
};
@@ -430,7 +430,7 @@ impl Options {
println!("rustdoc: [check-theme] Starting tests! (Ignoring all other arguments)");
for theme_file in to_check.iter() {
print!(" - Checking \"{theme_file}\"...");
- let (success, differences) = theme::test_theme_against(theme_file, &paths, &diag);
+ let (success, differences) = theme::test_theme_against(theme_file, &paths, &dcx);
if !differences.is_empty() || !success {
println!(" FAILED");
errors += 1;
@@ -447,27 +447,27 @@ impl Options {
return Err(0);
}
- let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(handler, matches);
+ let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(early_dcx, matches);
let input = PathBuf::from(if describe_lints {
"" // dummy, this won't be used
} else if matches.free.is_empty() {
- diag.struct_err("missing file operand").emit();
+ dcx.struct_err("missing file operand").emit();
return Err(1);
} else if matches.free.len() > 1 {
- diag.struct_err("too many file operands").emit();
+ dcx.struct_err("too many file operands").emit();
return Err(1);
} else {
&matches.free[0]
});
let libs =
- matches.opt_strs("L").iter().map(|s| SearchPath::from_cli_opt(handler, s)).collect();
- let externs = parse_externs(handler, matches, &unstable_opts);
+ matches.opt_strs("L").iter().map(|s| SearchPath::from_cli_opt(early_dcx, s)).collect();
+ let externs = parse_externs(early_dcx, matches, &unstable_opts);
let extern_html_root_urls = match parse_extern_html_roots(matches) {
Ok(ex) => ex,
Err(err) => {
- diag.struct_err(err).emit();
+ dcx.struct_err(err).emit();
return Err(1);
}
};
@@ -526,7 +526,7 @@ impl Options {
let no_run = matches.opt_present("no-run");
if !should_test && no_run {
- diag.err("the `--test` flag must be passed to enable `--no-run`");
+ dcx.err("the `--test` flag must be passed to enable `--no-run`");
return Err(1);
}
@@ -534,7 +534,7 @@ impl Options {
let output = matches.opt_str("output").map(|s| PathBuf::from(&s));
let output = match (out_dir, output) {
(Some(_), Some(_)) => {
- diag.struct_err("cannot use both 'out-dir' and 'output' at once").emit();
+ dcx.struct_err("cannot use both 'out-dir' and 'output' at once").emit();
return Err(1);
}
(Some(out_dir), None) => out_dir,
@@ -549,7 +549,7 @@ impl Options {
if let Some(ref p) = extension_css {
if !p.is_file() {
- diag.struct_err("option --extend-css argument must be a file").emit();
+ dcx.struct_err("option --extend-css argument must be a file").emit();
return Err(1);
}
}
@@ -567,7 +567,7 @@ impl Options {
let paths = match theme::load_css_paths(content) {
Ok(p) => p,
Err(e) => {
- diag.struct_err(e).emit();
+ dcx.struct_err(e).emit();
return Err(1);
}
};
@@ -576,23 +576,23 @@ impl Options {
matches.opt_strs("theme").iter().map(|s| (PathBuf::from(&s), s.to_owned()))
{
if !theme_file.is_file() {
- diag.struct_err(format!("invalid argument: \"{theme_s}\""))
+ dcx.struct_err(format!("invalid argument: \"{theme_s}\""))
.help("arguments to --theme must be files")
.emit();
return Err(1);
}
if theme_file.extension() != Some(OsStr::new("css")) {
- diag.struct_err(format!("invalid argument: \"{theme_s}\""))
+ dcx.struct_err(format!("invalid argument: \"{theme_s}\""))
.help("arguments to --theme must have a .css extension")
.emit();
return Err(1);
}
- let (success, ret) = theme::test_theme_against(&theme_file, &paths, &diag);
+ let (success, ret) = theme::test_theme_against(&theme_file, &paths, &dcx);
if !success {
- diag.struct_err(format!("error loading theme file: \"{theme_s}\"")).emit();
+ dcx.struct_err(format!("error loading theme file: \"{theme_s}\"")).emit();
return Err(1);
} else if !ret.is_empty() {
- diag.struct_warn(format!(
+ dcx.struct_warn(format!(
"theme file \"{theme_s}\" is missing CSS rules from the default theme",
))
.warn("the theme may appear incorrect when loaded")
@@ -605,7 +605,7 @@ impl Options {
}
}
- let edition = config::parse_crate_edition(handler, matches);
+ let edition = config::parse_crate_edition(early_dcx, matches);
let mut id_map = html::markdown::IdMap::new();
let Some(external_html) = ExternalHtml::load(
@@ -615,7 +615,7 @@ impl Options {
&matches.opt_strs("markdown-before-content"),
&matches.opt_strs("markdown-after-content"),
nightly_options::match_is_nightly_build(matches),
- &diag,
+ &dcx,
&mut id_map,
edition,
&None,
@@ -626,7 +626,7 @@ impl Options {
match matches.opt_str("r").as_deref() {
Some("rust") | None => {}
Some(s) => {
- diag.struct_err(format!("unknown input format: {s}")).emit();
+ dcx.struct_err(format!("unknown input format: {s}")).emit();
return Err(1);
}
}
@@ -634,19 +634,19 @@ impl Options {
let index_page = matches.opt_str("index-page").map(|s| PathBuf::from(&s));
if let Some(ref index_page) = index_page {
if !index_page.is_file() {
- diag.struct_err("option `--index-page` argument must be a file").emit();
+ dcx.struct_err("option `--index-page` argument must be a file").emit();
return Err(1);
}
}
- let target = parse_target_triple(handler, matches);
+ let target = parse_target_triple(early_dcx, matches);
let show_coverage = matches.opt_present("show-coverage");
let crate_types = match parse_crate_types_from_list(matches.opt_strs("crate-type")) {
Ok(types) => types,
Err(e) => {
- diag.struct_err(format!("unknown crate type: {e}")).emit();
+ dcx.struct_err(format!("unknown crate type: {e}")).emit();
return Err(1);
}
};
@@ -655,7 +655,7 @@ impl Options {
Some(s) => match OutputFormat::try_from(s.as_str()) {
Ok(out_fmt) => {
if !out_fmt.is_json() && show_coverage {
- diag.struct_err(
+ dcx.struct_err(
"html output format isn't supported for the --show-coverage option",
)
.emit();
@@ -664,7 +664,7 @@ impl Options {
out_fmt
}
Err(e) => {
- diag.struct_err(e).emit();
+ dcx.struct_err(e).emit();
return Err(1);
}
},
@@ -709,16 +709,16 @@ impl Options {
let html_no_source = matches.opt_present("html-no-source");
if generate_link_to_definition && (show_coverage || output_format != OutputFormat::Html) {
- diag.struct_err(
+ dcx.struct_err(
"--generate-link-to-definition option can only be used with HTML output format",
)
.emit();
return Err(1);
}
- let scrape_examples_options = ScrapeExamplesOptions::new(matches, &diag)?;
+ let scrape_examples_options = ScrapeExamplesOptions::new(matches, &dcx)?;
let with_examples = matches.opt_strs("with-examples");
- let call_locations = crate::scrape_examples::load_call_locations(with_examples, &diag)?;
+ let call_locations = crate::scrape_examples::load_call_locations(with_examples, &dcx)?;
let unstable_features =
rustc_feature::UnstableFeatures::from_environment(crate_name.as_deref());
@@ -798,17 +798,17 @@ impl Options {
/// Returns `true` if the file given as `self.input` is a Markdown file.
pub(crate) fn markdown_input(&self) -> bool {
- self.input.extension().map_or(false, |e| e == "md" || e == "markdown")
+ self.input.extension().is_some_and(|e| e == "md" || e == "markdown")
}
}
/// Prints deprecation warnings for deprecated options
-fn check_deprecated_options(matches: &getopts::Matches, diag: &rustc_errors::Handler) {
+fn check_deprecated_options(matches: &getopts::Matches, dcx: &rustc_errors::DiagCtxt) {
let deprecated_flags = [];
for &flag in deprecated_flags.iter() {
if matches.opt_present(flag) {
- diag.struct_warn(format!("the `{flag}` flag is deprecated"))
+ dcx.struct_warn(format!("the `{flag}` flag is deprecated"))
.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
@@ -821,7 +821,7 @@ fn check_deprecated_options(matches: &getopts::Matches, diag: &rustc_errors::Han
for &flag in removed_flags.iter() {
if matches.opt_present(flag) {
- let mut err = diag.struct_warn(format!("the `{flag}` flag no longer functions"));
+ let mut err = dcx.struct_warn(format!("the `{flag}` flag no longer functions"));
err.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index 6d9f8b820..4e904ffd7 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -107,12 +107,12 @@ impl<'tcx> DocContext<'tcx> {
r
}
- /// Like `hir().local_def_id_to_hir_id()`, but skips calling it on fake DefIds.
+ /// Like `tcx.local_def_id_to_hir_id()`, but skips calling it on fake DefIds.
/// (This avoids a slice-index-out-of-bounds panic.)
pub(crate) fn as_local_hir_id(tcx: TyCtxt<'_>, item_id: ItemId) -> Option<HirId> {
match item_id {
ItemId::DefId(real_id) => {
- real_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
+ real_id.as_local().map(|def_id| tcx.local_def_id_to_hir_id(def_id))
}
// FIXME: Can this be `Some` for `Auto` or `Blanket`?
_ => None,
@@ -120,16 +120,16 @@ impl<'tcx> DocContext<'tcx> {
}
}
-/// Creates a new diagnostic `Handler` that can be used to emit warnings and errors.
+/// Creates a new `DiagCtxt` that can be used to emit warnings and errors.
///
/// If the given `error_format` is `ErrorOutputType::Json` and no `SourceMap` is given, a new one
-/// will be created for the handler.
-pub(crate) fn new_handler(
+/// will be created for the `DiagCtxt`.
+pub(crate) fn new_dcx(
error_format: ErrorOutputType,
source_map: Option<Lrc<source_map::SourceMap>>,
diagnostic_width: Option<usize>,
unstable_opts: &UnstableOptions,
-) -> rustc_errors::Handler {
+) -> rustc_errors::DiagCtxt {
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
@@ -169,8 +169,7 @@ pub(crate) fn new_handler(
}
};
- rustc_errors::Handler::with_emitter(emitter)
- .with_flags(unstable_opts.diagnostic_handler_flags(true))
+ rustc_errors::DiagCtxt::with_emitter(emitter).with_flags(unstable_opts.dcx_flags(true))
}
/// Parse, resolve, and typecheck the given crate.
@@ -380,13 +379,15 @@ pub(crate) fn run_global_ctxt(
crate::lint::MISSING_CRATE_LEVEL_DOCS,
DocContext::as_local_hir_id(tcx, krate.module.item_id).unwrap(),
"no documentation found for this crate's top-level module",
- |lint| lint.help(help),
+ |lint| {
+ lint.help(help);
+ },
);
}
- fn report_deprecated_attr(name: &str, diag: &rustc_errors::Handler, sp: Span) {
+ fn report_deprecated_attr(name: &str, dcx: &rustc_errors::DiagCtxt, sp: Span) {
let mut msg =
- diag.struct_span_warn(sp, format!("the `#![doc({name})]` attribute is deprecated"));
+ dcx.struct_span_warn(sp, format!("the `#![doc({name})]` attribute is deprecated"));
msg.note(
"see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
for more information",
@@ -406,19 +407,19 @@ pub(crate) fn run_global_ctxt(
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
for attr in krate.module.attrs.lists(sym::doc) {
- let diag = ctxt.sess().diagnostic();
+ let dcx = ctxt.sess().dcx();
let name = attr.name_or_empty();
// `plugins = "..."`, `no_default_passes`, and `passes = "..."` have no effect
if attr.is_word() && name == sym::no_default_passes {
- report_deprecated_attr("no_default_passes", diag, attr.span());
+ report_deprecated_attr("no_default_passes", dcx, attr.span());
} else if attr.value_str().is_some() {
match name {
sym::passes => {
- report_deprecated_attr("passes = \"...\"", diag, attr.span());
+ report_deprecated_attr("passes = \"...\"", dcx, attr.span());
}
sym::plugins => {
- report_deprecated_attr("plugins = \"...\"", diag, attr.span());
+ report_deprecated_attr("plugins = \"...\"", dcx, attr.span());
}
_ => (),
}
@@ -446,7 +447,7 @@ pub(crate) fn run_global_ctxt(
tcx.sess.time("check_lint_expectations", || tcx.check_expectations(Some(sym::rustdoc)));
- if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.dcx().has_errors_or_lint_errors().is_some() {
rustc_errors::FatalError.raise();
}
diff --git a/src/librustdoc/docfs.rs b/src/librustdoc/docfs.rs
index 82c1a5039..1f7abdfc3 100644
--- a/src/librustdoc/docfs.rs
+++ b/src/librustdoc/docfs.rs
@@ -12,7 +12,6 @@ use std::cmp::max;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
-use std::string::ToString;
use std::sync::mpsc::Sender;
use std::thread::available_parallelism;
use threadpool::ThreadPool;
diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs
index 241286580..c74f2ecb0 100644
--- a/src/librustdoc/doctest.rs
+++ b/src/librustdoc/doctest.rs
@@ -127,17 +127,17 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
options,
false,
opts,
- Some(compiler.session().parse_sess.clone_source_map()),
+ Some(compiler.sess.parse_sess.clone_source_map()),
None,
enable_per_target_ignores,
);
let mut hir_collector = HirCollector {
- sess: compiler.session(),
+ sess: &compiler.sess,
collector: &mut collector,
map: tcx.hir(),
codes: ErrorCodes::from(
- compiler.session().opts.unstable_features.is_nightly_build(),
+ compiler.sess.opts.unstable_features.is_nightly_build(),
),
tcx,
};
@@ -150,7 +150,7 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
collector
});
- if compiler.session().diagnostic().has_errors_or_lint_errors().is_some() {
+ if compiler.sess.dcx().has_errors_or_lint_errors().is_some() {
FatalError.raise();
}
@@ -558,7 +558,7 @@ pub(crate) fn make_test(
let result = rustc_driver::catch_fatal_errors(|| {
rustc_span::create_session_if_not_set_then(edition, |_| {
use rustc_errors::emitter::{Emitter, EmitterWriter};
- use rustc_errors::Handler;
+ use rustc_errors::DiagCtxt;
use rustc_parse::parser::ForceCollect;
use rustc_span::source_map::FilePathMapping;
@@ -579,8 +579,8 @@ pub(crate) fn make_test(
let emitter = EmitterWriter::new(Box::new(io::sink()), fallback_bundle);
// FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
- let handler = Handler::with_emitter(Box::new(emitter)).disable_warnings();
- let sess = ParseSess::with_span_handler(handler, sm);
+ let dcx = DiagCtxt::with_emitter(Box::new(emitter)).disable_warnings();
+ let sess = ParseSess::with_dcx(dcx, sm);
let mut found_main = false;
let mut found_extern_crate = crate_name.is_none();
@@ -597,15 +597,15 @@ pub(crate) fn make_test(
loop {
match parser.parse_item(ForceCollect::No) {
Ok(Some(item)) => {
- if !found_main &&
- let ast::ItemKind::Fn(..) = item.kind &&
- item.ident.name == sym::main
+ if !found_main
+ && let ast::ItemKind::Fn(..) = item.kind
+ && item.ident.name == sym::main
{
found_main = true;
}
- if !found_extern_crate &&
- let ast::ItemKind::ExternCrate(original) = item.kind
+ if !found_extern_crate
+ && let ast::ItemKind::ExternCrate(original) = item.kind
{
// This code will never be reached if `crate_name` is none because
// `found_extern_crate` is initialized to `true` if it is none.
@@ -638,10 +638,10 @@ pub(crate) fn make_test(
}
// Reset errors so that they won't be reported as compiler bugs when dropping the
- // handler. Any errors in the tests will be reported when the test file is compiled,
+ // dcx. Any errors in the tests will be reported when the test file is compiled,
// Note that we still need to cancel the errors above otherwise `DiagnosticBuilder`
// will panic on drop.
- sess.span_diagnostic.reset_err_count();
+ sess.dcx.reset_err_count();
(found_main, found_extern_crate, found_macro)
})
@@ -740,7 +740,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
rustc_driver::catch_fatal_errors(|| {
rustc_span::create_session_if_not_set_then(edition, |_| {
use rustc_errors::emitter::EmitterWriter;
- use rustc_errors::Handler;
+ use rustc_errors::DiagCtxt;
use rustc_span::source_map::FilePathMapping;
let filename = FileName::anon_source_code(source);
@@ -754,8 +754,8 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
let emitter = EmitterWriter::new(Box::new(io::sink()), fallback_bundle);
- let handler = Handler::with_emitter(Box::new(emitter)).disable_warnings();
- let sess = ParseSess::with_span_handler(handler, sm);
+ let dcx = DiagCtxt::with_emitter(Box::new(emitter)).disable_warnings();
+ let sess = ParseSess::with_dcx(dcx, sm);
let mut parser =
match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) {
Ok(p) => p,
@@ -957,10 +957,10 @@ impl Collector {
fn get_filename(&self) -> FileName {
if let Some(ref source_map) = self.source_map {
let filename = source_map.span_to_filename(self.position);
- if let FileName::Real(ref filename) = filename &&
- let Ok(cur_dir) = env::current_dir() &&
- let Some(local_path) = filename.local_path() &&
- let Ok(path) = local_path.strip_prefix(&cur_dir)
+ if let FileName::Real(ref filename) = filename
+ && let Ok(cur_dir) = env::current_dir()
+ && let Some(local_path) = filename.local_path()
+ && let Ok(path) = local_path.strip_prefix(&cur_dir)
{
return path.to_owned().into();
}
@@ -1207,7 +1207,7 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
sp: Span,
nested: F,
) {
- let ast_attrs = self.tcx.hir().attrs(self.tcx.hir().local_def_id_to_hir_id(def_id));
+ let ast_attrs = self.tcx.hir().attrs(self.tcx.local_def_id_to_hir_id(def_id));
if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) {
if !cfg.matches(&self.sess.parse_sess, Some(self.tcx.features())) {
return;
diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs
index b34b69b1f..8bc0cdf3a 100644
--- a/src/librustdoc/externalfiles.rs
+++ b/src/librustdoc/externalfiles.rs
@@ -27,15 +27,15 @@ impl ExternalHtml {
md_before_content: &[String],
md_after_content: &[String],
nightly_build: bool,
- diag: &rustc_errors::Handler,
+ dcx: &rustc_errors::DiagCtxt,
id_map: &mut IdMap,
edition: Edition,
playground: &Option<Playground>,
) -> Option<ExternalHtml> {
let codes = ErrorCodes::from(nightly_build);
- let ih = load_external_files(in_header, diag)?;
- let bc = load_external_files(before_content, diag)?;
- let m_bc = load_external_files(md_before_content, diag)?;
+ let ih = load_external_files(in_header, dcx)?;
+ let bc = load_external_files(before_content, dcx)?;
+ let m_bc = load_external_files(md_before_content, dcx)?;
let bc = format!(
"{bc}{}",
Markdown {
@@ -51,8 +51,8 @@ impl ExternalHtml {
}
.into_string()
);
- let ac = load_external_files(after_content, diag)?;
- let m_ac = load_external_files(md_after_content, diag)?;
+ let ac = load_external_files(after_content, dcx)?;
+ let m_ac = load_external_files(md_after_content, dcx)?;
let ac = format!(
"{ac}{}",
Markdown {
@@ -79,13 +79,13 @@ pub(crate) enum LoadStringError {
pub(crate) fn load_string<P: AsRef<Path>>(
file_path: P,
- diag: &rustc_errors::Handler,
+ dcx: &rustc_errors::DiagCtxt,
) -> Result<String, LoadStringError> {
let file_path = file_path.as_ref();
let contents = match fs::read(file_path) {
Ok(bytes) => bytes,
Err(e) => {
- diag.struct_err(format!(
+ dcx.struct_err(format!(
"error reading `{file_path}`: {e}",
file_path = file_path.display()
))
@@ -96,16 +96,16 @@ pub(crate) fn load_string<P: AsRef<Path>>(
match str::from_utf8(&contents) {
Ok(s) => Ok(s.to_string()),
Err(_) => {
- diag.struct_err(format!("error reading `{}`: not UTF-8", file_path.display())).emit();
+ dcx.struct_err(format!("error reading `{}`: not UTF-8", file_path.display())).emit();
Err(LoadStringError::BadUtf8)
}
}
}
-fn load_external_files(names: &[String], diag: &rustc_errors::Handler) -> Option<String> {
+fn load_external_files(names: &[String], dcx: &rustc_errors::DiagCtxt) -> Option<String> {
let mut out = String::new();
for name in names {
- let Ok(s) = load_string(name, diag) else { return None };
+ let Ok(s) = load_string(name, dcx) else { return None };
out.push_str(&s);
out.push('\n');
}
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index abff77253..9802097ea 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -230,14 +230,14 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
// If the impl is from a masked crate or references something from a
// masked crate then remove it completely.
- if let clean::ImplItem(ref i) = *item.kind &&
- (self.cache.masked_crates.contains(&item.item_id.krate())
+ if let clean::ImplItem(ref i) = *item.kind
+ && (self.cache.masked_crates.contains(&item.item_id.krate())
|| i.trait_
.as_ref()
- .map_or(false, |t| is_from_private_dep(self.tcx, self.cache, t.def_id()))
+ .is_some_and(|t| is_from_private_dep(self.tcx, self.cache, t.def_id()))
|| i.for_
.def_id(self.cache)
- .map_or(false, |d| is_from_private_dep(self.tcx, self.cache, d)))
+ .is_some_and(|d| is_from_private_dep(self.tcx, self.cache, d)))
{
return None;
}
@@ -249,9 +249,9 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
}
// Collect all the implementors of traits.
- if let clean::ImplItem(ref i) = *item.kind &&
- let Some(trait_) = &i.trait_ &&
- !i.kind.is_blanket()
+ if let clean::ImplItem(ref i) = *item.kind
+ && let Some(trait_) = &i.trait_
+ && !i.kind.is_blanket()
{
self.cache
.implementors
@@ -264,8 +264,9 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
if let Some(s) = item.name.or_else(|| {
if item.is_stripped() {
None
- } else if let clean::ImportItem(ref i) = *item.kind &&
- let clean::ImportKind::Simple(s) = i.kind {
+ } else if let clean::ImportItem(ref i) = *item.kind
+ && let clean::ImportKind::Simple(s) = i.kind
+ {
Some(s)
} else {
None
@@ -278,7 +279,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
.cache
.parent_stack
.last()
- .map_or(false, |parent| parent.is_trait_impl()) =>
+ .is_some_and(|parent| parent.is_trait_impl()) =>
{
// skip associated items in trait impls
((None, None), false)
@@ -340,7 +341,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
// A crate has a module at its root, containing all items,
// which should not be indexed. The crate-item itself is
// inserted later on when serializing the search-index.
- if item.item_id.as_def_id().map_or(false, |idx| !idx.is_crate_root())
+ if item.item_id.as_def_id().is_some_and(|idx| !idx.is_crate_root())
&& let ty = item.type_()
&& (ty != ItemType::StructField
|| u16::from_str_radix(s.as_str(), 10).is_err())
@@ -357,7 +358,9 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
desc,
parent,
parent_idx: None,
- impl_id: if let Some(ParentStackItem::Impl { item_id, .. }) = self.cache.parent_stack.last() {
+ impl_id: if let Some(ParentStackItem::Impl { item_id, .. }) =
+ self.cache.parent_stack.last()
+ {
item_id.as_def_id()
} else {
None
@@ -366,6 +369,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
&item,
self.tcx,
clean_impl_generics(self.cache.parent_stack.last()).as_ref(),
+ parent,
self.cache,
),
aliases: item.attrs.get_doc_aliases(),
@@ -493,9 +497,11 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
clean::Type::Path { ref path }
| clean::BorrowedRef { type_: box clean::Type::Path { ref path }, .. } => {
dids.insert(path.def_id());
- if let Some(generics) = path.generics() &&
- let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).instantiate_identity().kind() &&
- adt.is_fundamental() {
+ if let Some(generics) = path.generics()
+ && let ty::Adt(adt, _) =
+ self.tcx.type_of(path.def_id()).instantiate_identity().kind()
+ && adt.is_fundamental()
+ {
for ty in generics {
if let Some(did) = ty.def_id(self.cache) {
dids.insert(did);
diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs
index def3a90c8..e80da46ad 100644
--- a/src/librustdoc/formats/item_type.rs
+++ b/src/librustdoc/formats/item_type.rs
@@ -16,6 +16,13 @@ use crate::clean;
/// Consequently, every change to this type should be synchronized to
/// the `itemTypes` mapping table in `html/static/js/search.js`.
///
+/// The search engine in search.js also uses item type numbers as a tie breaker when
+/// sorting results. Keywords and primitives are given first because we want them to be easily
+/// found by new users who don't know about advanced features like type filters. The rest are
+/// mostly in an arbitrary order, but it's easier to test the search engine when
+/// it's deterministic, and these are strictly finer-grained than language namespaces, so
+/// using the path and the item type together to sort ensures that search sorting is stable.
+///
/// In addition, code in `html::render` uses this enum to generate CSS classes, page prefixes, and
/// module headings. If you are adding to this enum and want to ensure that the sidebar also prints
/// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an
@@ -23,32 +30,34 @@ use crate::clean;
#[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)]
#[repr(u8)]
pub(crate) enum ItemType {
- Module = 0,
- ExternCrate = 1,
- Import = 2,
- Struct = 3,
- Enum = 4,
- Function = 5,
- TypeAlias = 6,
- Static = 7,
- Trait = 8,
- Impl = 9,
- TyMethod = 10,
- Method = 11,
- StructField = 12,
- Variant = 13,
- Macro = 14,
- Primitive = 15,
- AssocType = 16,
- Constant = 17,
- AssocConst = 18,
- Union = 19,
- ForeignType = 20,
- Keyword = 21,
+ Keyword = 0,
+ Primitive = 1,
+ Module = 2,
+ ExternCrate = 3,
+ Import = 4,
+ Struct = 5,
+ Enum = 6,
+ Function = 7,
+ TypeAlias = 8,
+ Static = 9,
+ Trait = 10,
+ Impl = 11,
+ TyMethod = 12,
+ Method = 13,
+ StructField = 14,
+ Variant = 15,
+ Macro = 16,
+ AssocType = 17,
+ Constant = 18,
+ AssocConst = 19,
+ Union = 20,
+ ForeignType = 21,
OpaqueTy = 22,
ProcAttribute = 23,
ProcDerive = 24,
TraitAlias = 25,
+ // This number is reserved for use in JavaScript
+ // Generic = 26,
}
impl Serialize for ItemType {
@@ -140,8 +149,7 @@ impl From<DefKind> for ItemType {
| DefKind::LifetimeParam
| DefKind::GlobalAsm
| DefKind::Impl { .. }
- | DefKind::Closure
- | DefKind::Coroutine => Self::ForeignType,
+ | DefKind::Closure => Self::ForeignType,
}
}
}
diff --git a/src/librustdoc/formats/mod.rs b/src/librustdoc/formats/mod.rs
index e607a16ad..0056fb485 100644
--- a/src/librustdoc/formats/mod.rs
+++ b/src/librustdoc/formats/mod.rs
@@ -9,21 +9,6 @@ pub(crate) use renderer::{run_format, FormatRenderer};
use crate::clean::{self, ItemId};
use crate::html::render::Context;
-/// Specifies whether rendering directly implemented trait items or ones from a certain Deref
-/// impl.
-pub(crate) enum AssocItemRender<'a> {
- All,
- DerefFor { trait_: &'a clean::Path, type_: &'a clean::Type, deref_mut_: bool },
-}
-
-/// For different handling of associated items from the Deref target of a type rather than the type
-/// itself.
-#[derive(Copy, Clone, PartialEq)]
-pub(crate) enum RenderMode {
- Normal,
- ForDeref { mut_: bool },
-}
-
/// Metadata about implementations for a type or trait.
#[derive(Clone, Debug)]
pub(crate) struct Impl {
diff --git a/src/librustdoc/formats/renderer.rs b/src/librustdoc/formats/renderer.rs
index c49f1a4d3..2535668b8 100644
--- a/src/librustdoc/formats/renderer.rs
+++ b/src/librustdoc/formats/renderer.rs
@@ -1,5 +1,4 @@
use rustc_middle::ty::TyCtxt;
-use rustc_span::Symbol;
use crate::clean;
use crate::config::RenderOptions;
@@ -68,7 +67,6 @@ pub(crate) fn run_format<'tcx, T: FormatRenderer<'tcx>>(
// Render the crate documentation
let mut work = vec![(format_renderer.make_child_renderer(), krate.module)];
- let unknown = Symbol::intern("<unknown item>");
while let Some((mut cx, item)) = work.pop() {
if item.is_mod() && T::RUN_ON_MODULE {
// modules are special because they add a namespace. We also need to
@@ -90,8 +88,10 @@ pub(crate) fn run_format<'tcx, T: FormatRenderer<'tcx>>(
cx.mod_item_out()?;
// FIXME: checking `item.name.is_some()` is very implicit and leads to lots of special
// cases. Use an explicit match instead.
- } else if item.name.is_some() && !item.is_extern_crate() {
- prof.generic_activity_with_arg("render_item", item.name.unwrap_or(unknown).as_str())
+ } else if let Some(item_name) = item.name
+ && !item.is_extern_crate()
+ {
+ prof.generic_activity_with_arg("render_item", item_name.as_str())
.run(|| cx.item(item))?;
}
}
diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs
index 4a19d0a44..ea4b573ae 100644
--- a/src/librustdoc/html/escape.rs
+++ b/src/librustdoc/html/escape.rs
@@ -38,3 +38,39 @@ impl<'a> fmt::Display for Escape<'a> {
Ok(())
}
}
+
+/// Wrapper struct which will emit the HTML-escaped version of the contained
+/// string when passed to a format string.
+///
+/// This is only safe to use for text nodes. If you need your output to be
+/// safely contained in an attribute, use [`Escape`]. If you don't know the
+/// difference, use [`Escape`].
+pub(crate) struct EscapeBodyText<'a>(pub &'a str);
+
+impl<'a> fmt::Display for EscapeBodyText<'a> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Because the internet is always right, turns out there's not that many
+ // characters to escape: http://stackoverflow.com/questions/7381974
+ let EscapeBodyText(s) = *self;
+ let pile_o_bits = s;
+ let mut last = 0;
+ for (i, ch) in s.char_indices() {
+ let s = match ch {
+ '>' => "&gt;",
+ '<' => "&lt;",
+ '&' => "&amp;",
+ _ => continue,
+ };
+ fmt.write_str(&pile_o_bits[last..i])?;
+ fmt.write_str(s)?;
+ // NOTE: we only expect single byte characters here - which is fine as long as we
+ // only match single byte characters
+ last = i + 1;
+ }
+
+ if last < s.len() {
+ fmt.write_str(&pile_o_bits[last..])?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index 29fd880af..a9c0ab557 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -367,7 +367,7 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
if ending == Ending::Newline {
let mut clause = " ".repeat(indent.saturating_sub(1));
- write!(clause, "<span class=\"where fmt-newline\">where{where_preds},</span>")?;
+ write!(clause, "<div class=\"where\">where{where_preds},</div>")?;
clause
} else {
// insert a newline after a single space but before multiple spaces at the start
@@ -1166,13 +1166,17 @@ fn fmt_type<'cx>(
// we need to surround them with angle brackets in some cases (e.g. `<dyn …>::P`).
if f.alternate() {
- if let Some(trait_) = trait_ && should_show_cast {
+ if let Some(trait_) = trait_
+ && should_show_cast
+ {
write!(f, "<{:#} as {:#}>::", self_type.print(cx), trait_.print(cx))?
} else {
write!(f, "{:#}::", self_type.print(cx))?
}
} else {
- if let Some(trait_) = trait_ && should_show_cast {
+ if let Some(trait_) = trait_
+ && should_show_cast
+ {
write!(f, "&lt;{} as {}&gt;::", self_type.print(cx), trait_.print(cx))?
} else {
write!(f, "{}::", self_type.print(cx))?
@@ -1268,16 +1272,23 @@ impl clean::Impl {
write!(f, " for ")?;
}
- if let clean::Type::Tuple(types) = &self.for_ &&
- let [clean::Type::Generic(name)] = &types[..] &&
- (self.kind.is_fake_variadic() || self.kind.is_auto())
+ if let clean::Type::Tuple(types) = &self.for_
+ && let [clean::Type::Generic(name)] = &types[..]
+ && (self.kind.is_fake_variadic() || self.kind.is_auto())
{
// Hardcoded anchor library/core/src/primitive_docs.rs
// Link should match `# Trait implementations`
- primitive_link_fragment(f, PrimitiveType::Tuple, format_args!("({name}₁, {name}₂, …, {name}ₙ)"), "#trait-implementations-1", cx)?;
- } else if let clean::BareFunction(bare_fn) = &self.for_ &&
- let [clean::Argument { type_: clean::Type::Generic(name), .. }] = &bare_fn.decl.inputs.values[..] &&
- (self.kind.is_fake_variadic() || self.kind.is_auto())
+ primitive_link_fragment(
+ f,
+ PrimitiveType::Tuple,
+ format_args!("({name}₁, {name}₂, …, {name}ₙ)"),
+ "#trait-implementations-1",
+ cx,
+ )?;
+ } else if let clean::BareFunction(bare_fn) = &self.for_
+ && let [clean::Argument { type_: clean::Type::Generic(name), .. }] =
+ &bare_fn.decl.inputs.values[..]
+ && (self.kind.is_fake_variadic() || self.kind.is_auto())
{
// Hardcoded anchor library/core/src/primitive_docs.rs
// Link should match `# Trait implementations`
@@ -1286,22 +1297,18 @@ impl clean::Impl {
let unsafety = bare_fn.unsafety.print_with_space();
let abi = print_abi_with_space(bare_fn.abi);
if f.alternate() {
- write!(
- f,
- "{hrtb:#}{unsafety}{abi:#}",
- )?;
+ write!(f, "{hrtb:#}{unsafety}{abi:#}",)?;
} else {
- write!(
- f,
- "{hrtb}{unsafety}{abi}",
- )?;
+ write!(f, "{hrtb}{unsafety}{abi}",)?;
}
- let ellipsis = if bare_fn.decl.c_variadic {
- ", ..."
- } else {
- ""
- };
- primitive_link_fragment(f, PrimitiveType::Tuple, format_args!("fn ({name}₁, {name}₂, …, {name}ₙ{ellipsis})"), "#trait-implementations-1", cx)?;
+ let ellipsis = if bare_fn.decl.c_variadic { ", ..." } else { "" };
+ primitive_link_fragment(
+ f,
+ PrimitiveType::Tuple,
+ format_args!("fn({name}₁, {name}₂, …, {name}ₙ{ellipsis})"),
+ "#trait-implementations-1",
+ cx,
+ )?;
// Write output.
if !bare_fn.decl.output.is_unit() {
write!(f, " -> ")?;
@@ -1447,7 +1454,9 @@ impl clean::FnDecl {
let amp = if f.alternate() { "&" } else { "&amp;" };
write!(f, "(")?;
- if let Some(n) = line_wrapping_indent && !self.inputs.values.is_empty() {
+ if let Some(n) = line_wrapping_indent
+ && !self.inputs.values.is_empty()
+ {
write!(f, "\n{}", Indent(n + 4))?;
}
for (i, input) in self.inputs.values.iter().enumerate() {
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index d8e36139a..1cdc792a8 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -6,7 +6,7 @@
//! Use the `render_with_highlighting` to highlight some rust code.
use crate::clean::PrimitiveType;
-use crate::html::escape::Escape;
+use crate::html::escape::EscapeBodyText;
use crate::html::render::{Context, LinkFromSrc};
use std::collections::VecDeque;
@@ -185,22 +185,30 @@ impl<'a, 'tcx, F: Write> TokenHandler<'a, 'tcx, F> {
if self.pending_elems.is_empty() {
return false;
}
- if let Some((_, parent_class)) = self.closing_tags.last() &&
- can_merge(current_class, Some(*parent_class), "")
+ if let Some((_, parent_class)) = self.closing_tags.last()
+ && can_merge(current_class, Some(*parent_class), "")
{
for (text, class) in self.pending_elems.iter() {
- string(self.out, Escape(text), *class, &self.href_context, false);
+ string(self.out, EscapeBodyText(text), *class, &self.href_context, false);
}
} else {
// We only want to "open" the tag ourselves if we have more than one pending and if the
// current parent tag is not the same as our pending content.
- let close_tag = if self.pending_elems.len() > 1 && let Some(current_class) = current_class {
+ let close_tag = if self.pending_elems.len() > 1
+ && let Some(current_class) = current_class
+ {
Some(enter_span(self.out, current_class, &self.href_context))
} else {
None
};
for (text, class) in self.pending_elems.iter() {
- string(self.out, Escape(text), *class, &self.href_context, close_tag.is_none());
+ string(
+ self.out,
+ EscapeBodyText(text),
+ *class,
+ &self.href_context,
+ close_tag.is_none(),
+ );
}
if let Some(close_tag) = close_tag {
exit_span(self.out, close_tag);
@@ -260,10 +268,12 @@ pub(super) fn write_code(
Highlight::Token { text, class } => {
// If we received a `ExitSpan` event and then have a non-compatible `Class`, we
// need to close the `<span>`.
- let need_current_class_update = if let Some(pending) = token_handler.pending_exit_span &&
- !can_merge(Some(pending), class, text) {
- token_handler.handle_exit_span();
- true
+ let need_current_class_update = if let Some(pending) =
+ token_handler.pending_exit_span
+ && !can_merge(Some(pending), class, text)
+ {
+ token_handler.handle_exit_span();
+ true
// If the two `Class` are different, time to flush the current content and start
// a new one.
} else if !can_merge(token_handler.current_class, class, text) {
@@ -293,7 +303,8 @@ pub(super) fn write_code(
}
}
if should_add {
- let closing_tag = enter_span(token_handler.out, class, &token_handler.href_context);
+ let closing_tag =
+ enter_span(token_handler.out, class, &token_handler.href_context);
token_handler.closing_tags.push((closing_tag, class));
}
@@ -302,8 +313,14 @@ pub(super) fn write_code(
}
Highlight::ExitSpan => {
token_handler.current_class = None;
- token_handler.pending_exit_span =
- Some(token_handler.closing_tags.last().as_ref().expect("ExitSpan without EnterSpan").1);
+ token_handler.pending_exit_span = Some(
+ token_handler
+ .closing_tags
+ .last()
+ .as_ref()
+ .expect("ExitSpan without EnterSpan")
+ .1,
+ );
}
};
});
@@ -466,7 +483,9 @@ impl<'a> PeekIter<'a> {
}
/// Returns the next item after the current one. It doesn't interfere with `peek_next` output.
fn peek(&mut self) -> Option<&(TokenKind, &'a str)> {
- if self.stored.is_empty() && let Some(next) = self.iter.next() {
+ if self.stored.is_empty()
+ && let Some(next) = self.iter.next()
+ {
self.stored.push_back(next);
}
self.stored.front()
diff --git a/src/librustdoc/html/highlight/fixtures/dos_line.html b/src/librustdoc/html/highlight/fixtures/dos_line.html
index 30b50ca7c..b98e67125 100644
--- a/src/librustdoc/html/highlight/fixtures/dos_line.html
+++ b/src/librustdoc/html/highlight/fixtures/dos_line.html
@@ -1,3 +1,3 @@
<span class="kw">pub fn </span>foo() {
-<span class="macro">println!</span>(<span class="string">&quot;foo&quot;</span>);
+<span class="macro">println!</span>(<span class="string">"foo"</span>);
}
diff --git a/src/librustdoc/html/highlight/fixtures/sample.html b/src/librustdoc/html/highlight/fixtures/sample.html
index fced2eacd..aa735e815 100644
--- a/src/librustdoc/html/highlight/fixtures/sample.html
+++ b/src/librustdoc/html/highlight/fixtures/sample.html
@@ -8,12 +8,12 @@
.lifetime { color: #B76514; }
.question-mark { color: #ff9011; }
</style>
-<pre><code><span class="attr">#![crate_type = <span class="string">&quot;lib&quot;</span>]
+<pre><code><span class="attr">#![crate_type = <span class="string">"lib"</span>]
</span><span class="kw">use </span>std::path::{Path, PathBuf};
-<span class="attr">#[cfg(target_os = <span class="string">&quot;linux&quot;</span>)]
-#[cfg(target_os = <span class="string">&quot;windows&quot;</span>)]
+<span class="attr">#[cfg(target_os = <span class="string">"linux"</span>)]
+#[cfg(target_os = <span class="string">"windows"</span>)]
</span><span class="kw">fn </span>main() -&gt; () {
<span class="kw">let </span>foo = <span class="bool-val">true </span>&amp;&amp; <span class="bool-val">false </span>|| <span class="bool-val">true</span>;
<span class="kw">let _</span>: <span class="kw-2">*const </span>() = <span class="number">0</span>;
@@ -22,7 +22,7 @@
<span class="kw">let _ </span>= <span class="kw-2">*</span>foo;
<span class="macro">mac!</span>(foo, <span class="kw-2">&amp;mut </span>bar);
<span class="macro">assert!</span>(<span class="self">self</span>.length &lt; N &amp;&amp; index &lt;= <span class="self">self</span>.length);
- ::std::env::var(<span class="string">&quot;gateau&quot;</span>).is_ok();
+ ::std::env::var(<span class="string">"gateau"</span>).is_ok();
<span class="attr">#[rustfmt::skip]
</span><span class="kw">let </span>s:std::path::PathBuf = std::path::PathBuf::new();
<span class="kw">let </span><span class="kw-2">mut </span>s = String::new();
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 2807dfed0..642265f5f 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -27,7 +27,7 @@
//! ```
use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
+use rustc_errors::{DiagnosticBuilder, DiagnosticMessage};
use rustc_hir::def_id::DefId;
use rustc_middle::ty::TyCtxt;
pub(crate) use rustc_resolve::rustdoc::main_body_opts;
@@ -234,10 +234,6 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
fn next(&mut self) -> Option<Self::Item> {
let event = self.inner.next();
- let compile_fail;
- let should_panic;
- let ignore;
- let edition;
let Some(Event::Start(Tag::CodeBlock(kind))) = event else {
return event;
};
@@ -253,49 +249,44 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
}
}
- let parse_result = match kind {
- CodeBlockKind::Fenced(ref lang) => {
- let parse_result = LangString::parse_without_check(
- lang,
- self.check_error_codes,
- false,
- self.custom_code_classes_in_docs,
- );
- if !parse_result.rust {
- let added_classes = parse_result.added_classes;
- let lang_string = if let Some(lang) = parse_result.unknown.first() {
- format!("language-{}", lang)
- } else {
- String::new()
- };
- let whitespace = if added_classes.is_empty() { "" } else { " " };
- return Some(Event::Html(
- format!(
- "<div class=\"example-wrap\">\
+ let LangString { added_classes, compile_fail, should_panic, ignore, edition, .. } =
+ match kind {
+ CodeBlockKind::Fenced(ref lang) => {
+ let parse_result = LangString::parse_without_check(
+ lang,
+ self.check_error_codes,
+ false,
+ self.custom_code_classes_in_docs,
+ );
+ if !parse_result.rust {
+ let added_classes = parse_result.added_classes;
+ let lang_string = if let Some(lang) = parse_result.unknown.first() {
+ format!("language-{}", lang)
+ } else {
+ String::new()
+ };
+ let whitespace = if added_classes.is_empty() { "" } else { " " };
+ return Some(Event::Html(
+ format!(
+ "<div class=\"example-wrap\">\
<pre class=\"{lang_string}{whitespace}{added_classes}\">\
<code>{text}</code>\
</pre>\
</div>",
- added_classes = added_classes.join(" "),
- text = Escape(&original_text),
- )
- .into(),
- ));
+ added_classes = added_classes.join(" "),
+ text = Escape(&original_text),
+ )
+ .into(),
+ ));
+ }
+ parse_result
}
- parse_result
- }
- CodeBlockKind::Indented => Default::default(),
- };
+ CodeBlockKind::Indented => Default::default(),
+ };
- let added_classes = parse_result.added_classes;
let lines = original_text.lines().filter_map(|l| map_line(l).for_html());
let text = lines.intersperse("\n".into()).collect::<String>();
- compile_fail = parse_result.compile_fail;
- should_panic = parse_result.should_panic;
- ignore = parse_result.ignore;
- edition = parse_result.edition;
-
let explicit_edition = edition.is_some();
let edition = edition.unwrap_or(self.edition);
@@ -841,10 +832,10 @@ impl<'tcx> ExtraInfo<'tcx> {
if let Some(def_id) = self.def_id.as_local() {
self.tcx.struct_span_lint_hir(
crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
- self.tcx.hir().local_def_id_to_hir_id(def_id),
+ self.tcx.local_def_id_to_hir_id(def_id),
self.sp,
msg,
- |l| l,
+ |_| {},
);
}
}
@@ -852,15 +843,15 @@ impl<'tcx> ExtraInfo<'tcx> {
fn error_invalid_codeblock_attr_with_help(
&self,
msg: impl Into<DiagnosticMessage>,
- help: impl Into<SubdiagnosticMessage>,
+ f: impl for<'a, 'b> FnOnce(&'b mut DiagnosticBuilder<'a, ()>),
) {
if let Some(def_id) = self.def_id.as_local() {
self.tcx.struct_span_lint_hir(
crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
- self.tcx.hir().local_def_id_to_hir_id(def_id),
+ self.tcx.local_def_id_to_hir_id(def_id),
self.sp,
msg,
- |lint| lint.help(help),
+ f,
);
}
}
@@ -1119,10 +1110,10 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> {
return None;
}
let indices = self.parse_string(pos)?;
- if let Some((_, c)) = self.inner.peek().copied() &&
- c != '{' &&
- !is_separator(c) &&
- c != '('
+ if let Some((_, c)) = self.inner.peek().copied()
+ && c != '{'
+ && !is_separator(c)
+ && c != '('
{
self.emit_error(format!("expected ` `, `{{` or `,` after `\"`, found `{c}`"));
return None;
@@ -1294,6 +1285,21 @@ impl LangString {
data.edition = x[7..].parse::<Edition>().ok();
}
LangStringToken::LangToken(x)
+ if x.starts_with("rust") && x[4..].parse::<Edition>().is_ok() =>
+ {
+ if let Some(extra) = extra {
+ extra.error_invalid_codeblock_attr_with_help(
+ format!("unknown attribute `{x}`"),
+ |lint| {
+ lint.help(format!(
+ "there is an attribute with a similar name: `edition{}`",
+ &x[4..],
+ ));
+ },
+ );
+ }
+ }
+ LangStringToken::LangToken(x)
if allow_error_code_check && x.starts_with('E') && x.len() == 5 =>
{
if x[1..].parse::<u32>().is_ok() {
@@ -1337,8 +1343,13 @@ impl LangString {
} {
if let Some(extra) = extra {
extra.error_invalid_codeblock_attr_with_help(
- format!("unknown attribute `{x}`. Did you mean `{flag}`?"),
- help,
+ format!("unknown attribute `{x}`"),
+ |lint| {
+ lint.help(format!(
+ "there is an attribute with a similar name: `{flag}`"
+ ))
+ .help(help);
+ },
);
}
}
@@ -1370,7 +1381,7 @@ impl LangString {
};
if custom_code_classes_in_docs {
- call(&mut TagIterator::new(string, extra).into_iter())
+ call(&mut TagIterator::new(string, extra))
} else {
call(&mut tokens(string))
}
@@ -2000,6 +2011,7 @@ fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
map.insert("themeStyle".into(), 1);
map.insert("settings-menu".into(), 1);
map.insert("help-button".into(), 1);
+ map.insert("sidebar-button".into(), 1);
map.insert("main-content".into(), 1);
map.insert("toggle-all-docs".into(), 1);
map.insert("all-types".into(), 1);
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
index 50777134d..f0199703c 100644
--- a/src/librustdoc/html/render/context.rs
+++ b/src/librustdoc/html/render/context.rs
@@ -176,9 +176,9 @@ impl<'tcx> Context<'tcx> {
let mut render_redirect_pages = self.render_redirect_pages;
// If the item is stripped but inlined, links won't point to the item so no need to generate
// a file for it.
- if it.is_stripped() &&
- let Some(def_id) = it.def_id() &&
- def_id.is_local()
+ if it.is_stripped()
+ && let Some(def_id) = it.def_id()
+ && def_id.is_local()
{
if self.is_inside_inlined_module || self.shared.cache.inlined_items.contains(&def_id) {
// For now we're forced to generate a redirect page for stripped items until
@@ -371,7 +371,9 @@ impl<'tcx> Context<'tcx> {
path = href.into_inner().to_string_lossy().into_owned();
- if let Some(c) = path.as_bytes().last() && *c != b'/' {
+ if let Some(c) = path.as_bytes().last()
+ && *c != b'/'
+ {
path.push('/');
}
@@ -741,9 +743,10 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
shared.fs.write(scrape_examples_help_file, v)?;
}
- if let Some(ref redirections) = shared.redirections && !redirections.borrow().is_empty() {
- let redirect_map_path =
- self.dst.join(crate_name.as_str()).join("redirect-map.json");
+ if let Some(ref redirections) = shared.redirections
+ && !redirections.borrow().is_empty()
+ {
+ let redirect_map_path = self.dst.join(crate_name.as_str()).join("redirect-map.json");
let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
shared.fs.write(redirect_map_path, paths)?;
@@ -790,7 +793,9 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
}
}
if !self.is_inside_inlined_module {
- if let Some(def_id) = item.def_id() && self.cache().inlined_items.contains(&def_id) {
+ if let Some(def_id) = item.def_id()
+ && self.cache().inlined_items.contains(&def_id)
+ {
self.is_inside_inlined_module = true;
}
} else if !self.cache().document_hidden && item.is_doc_hidden() {
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index c52fa01bd..34350c2ed 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -45,7 +45,6 @@ use std::iter::Peekable;
use std::path::PathBuf;
use std::rc::Rc;
use std::str;
-use std::string::ToString;
use askama::Template;
use rustc_attr::{ConstStability, DeprecatedSince, Deprecation, StabilityLevel, StableSince};
@@ -66,7 +65,7 @@ use crate::clean::{self, ItemId, RenderedLink, SelfTy};
use crate::error::Error;
use crate::formats::cache::Cache;
use crate::formats::item_type::ItemType;
-use crate::formats::{AssocItemRender, Impl, RenderMode};
+use crate::formats::Impl;
use crate::html::escape::Escape;
use crate::html::format::{
display_fn, href, join_with_double_colon, print_abi_with_space, print_constness_with_space,
@@ -89,6 +88,21 @@ pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
})
}
+/// Specifies whether rendering directly implemented trait items or ones from a certain Deref
+/// impl.
+pub(crate) enum AssocItemRender<'a> {
+ All,
+ DerefFor { trait_: &'a clean::Path, type_: &'a clean::Type, deref_mut_: bool },
+}
+
+/// For different handling of associated items from the Deref target of a type rather than the type
+/// itself.
+#[derive(Copy, Clone, PartialEq)]
+pub(crate) enum RenderMode {
+ Normal,
+ ForDeref { mut_: bool },
+}
+
// Helper structs for rendering items/sidebars and carrying along contextual
// information
@@ -113,6 +127,7 @@ pub(crate) struct IndexItem {
pub(crate) struct RenderType {
id: Option<RenderTypeId>,
generics: Option<Vec<RenderType>>,
+ bindings: Option<Vec<(RenderTypeId, Vec<RenderType>)>>,
}
impl Serialize for RenderType {
@@ -129,10 +144,15 @@ impl Serialize for RenderType {
Some(RenderTypeId::Index(idx)) => *idx,
_ => panic!("must convert render types to indexes before serializing"),
};
- if let Some(generics) = &self.generics {
+ if self.generics.is_some() || self.bindings.is_some() {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&id)?;
- seq.serialize_element(generics)?;
+ seq.serialize_element(self.generics.as_ref().map(Vec::as_slice).unwrap_or_default())?;
+ if self.bindings.is_some() {
+ seq.serialize_element(
+ self.bindings.as_ref().map(Vec::as_slice).unwrap_or_default(),
+ )?;
+ }
seq.end()
} else {
id.serialize(serializer)
@@ -140,13 +160,31 @@ impl Serialize for RenderType {
}
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Copy, Debug)]
pub(crate) enum RenderTypeId {
DefId(DefId),
Primitive(clean::PrimitiveType),
+ AssociatedType(Symbol),
Index(isize),
}
+impl Serialize for RenderTypeId {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let id = match &self {
+ // 0 is a sentinel, everything else is one-indexed
+ // concrete type
+ RenderTypeId::Index(idx) if *idx >= 0 => idx + 1,
+ // generic type parameter
+ RenderTypeId::Index(idx) => *idx,
+ _ => panic!("must convert render types to indexes before serializing"),
+ };
+ id.serialize(serializer)
+ }
+}
+
/// Full type of functions/methods in the search index.
#[derive(Debug)]
pub(crate) struct IndexItemFunctionType {
@@ -171,16 +209,23 @@ impl Serialize for IndexItemFunctionType {
} else {
let mut seq = serializer.serialize_seq(None)?;
match &self.inputs[..] {
- [one] if one.generics.is_none() => seq.serialize_element(one)?,
+ [one] if one.generics.is_none() && one.bindings.is_none() => {
+ seq.serialize_element(one)?
+ }
_ => seq.serialize_element(&self.inputs)?,
}
match &self.output[..] {
[] if self.where_clause.is_empty() => {}
- [one] if one.generics.is_none() => seq.serialize_element(one)?,
+ [one] if one.generics.is_none() && one.bindings.is_none() => {
+ seq.serialize_element(one)?
+ }
_ => seq.serialize_element(&self.output)?,
}
for constraint in &self.where_clause {
- if let [one] = &constraint[..] && one.generics.is_none() {
+ if let [one] = &constraint[..]
+ && one.generics.is_none()
+ && one.bindings.is_none()
+ {
seq.serialize_element(one)?;
} else {
seq.serialize_element(constraint)?;
@@ -627,7 +672,7 @@ fn short_item_info(
format!("Deprecating in {version}")
}
}
- DeprecatedSince::Future => String::from("Deprecating in a future Rust version"),
+ DeprecatedSince::Future => String::from("Deprecating in a future version"),
DeprecatedSince::NonStandard(since) => {
format!("Deprecated since {}", Escape(since.as_str()))
}
@@ -915,7 +960,9 @@ fn render_stability_since_raw_with_extra(
containing_const_ver: Option<StableSince>,
extra_class: &str,
) -> bool {
- let stable_version = if ver != containing_ver && let Some(ver) = &ver {
+ let stable_version = if ver != containing_ver
+ && let Some(ver) = &ver
+ {
since_to_string(ver)
} else {
None
@@ -1097,7 +1144,7 @@ impl<'a> AssocItemLink<'a> {
fn write_impl_section_heading(mut w: impl fmt::Write, title: &str, id: &str) {
write!(
w,
- "<h2 id=\"{id}\" class=\"small-section-header\">\
+ "<h2 id=\"{id}\" class=\"section-header\">\
{title}\
<a href=\"#{id}\" class=\"anchor\">§</a>\
</h2>"
@@ -1348,8 +1395,7 @@ pub(crate) fn notable_traits_button(ty: &clean::Type, cx: &mut Context<'_>) -> O
if let Some(trait_) = &impl_.trait_ {
let trait_did = trait_.def_id();
- if cx.cache().traits.get(&trait_did).map_or(false, |t| t.is_notable_trait(cx.tcx()))
- {
+ if cx.cache().traits.get(&trait_did).is_some_and(|t| t.is_notable_trait(cx.tcx())) {
has_notable_trait = true;
}
}
@@ -1384,7 +1430,7 @@ fn notable_traits_decl(ty: &clean::Type, cx: &Context<'_>) -> (String, String) {
if let Some(trait_) = &impl_.trait_ {
let trait_did = trait_.def_id();
- if cx.cache().traits.get(&trait_did).map_or(false, |t| t.is_notable_trait(cx.tcx())) {
+ if cx.cache().traits.get(&trait_did).is_some_and(|t| t.is_notable_trait(cx.tcx())) {
if out.is_empty() {
write!(
&mut out,
@@ -1394,15 +1440,10 @@ fn notable_traits_decl(ty: &clean::Type, cx: &Context<'_>) -> (String, String) {
);
}
- //use the "where" class here to make it small
- write!(
- &mut out,
- "<span class=\"where fmt-newline\">{}</span>",
- impl_.print(false, cx)
- );
+ write!(&mut out, "<div class=\"where\">{}</div>", impl_.print(false, cx));
for it in &impl_.items {
if let clean::AssocTypeItem(ref tydef, ref _bounds) = *it.kind {
- out.push_str("<span class=\"where fmt-newline\"> ");
+ out.push_str("<div class=\"where\"> ");
let empty_set = FxHashSet::default();
let src_link = AssocItemLink::GotoSource(trait_did.into(), &empty_set);
assoc_type(
@@ -1415,7 +1456,7 @@ fn notable_traits_decl(ty: &clean::Type, cx: &Context<'_>) -> (String, String) {
0,
cx,
);
- out.push_str(";</span>");
+ out.push_str(";</div>");
}
}
}
@@ -1901,7 +1942,7 @@ pub(crate) fn render_impl_summary(
if show_def_docs {
for it in &inner_impl.items {
if let clean::AssocTypeItem(ref tydef, ref _bounds) = *it.kind {
- w.write_str("<span class=\"where fmt-newline\"> ");
+ w.write_str("<div class=\"where\"> ");
assoc_type(
w,
it,
@@ -1912,7 +1953,7 @@ pub(crate) fn render_impl_summary(
0,
cx,
);
- w.write_str(";</span>");
+ w.write_str(";</div>");
}
}
}
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index d226701ba..5ca623f01 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -1,5 +1,3 @@
-use clean::AttributesExt;
-
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
@@ -22,12 +20,13 @@ use super::{
item_ty_to_section, notable_traits_button, notable_traits_json, render_all_impls,
render_assoc_item, render_assoc_items, render_attributes_in_code, render_attributes_in_pre,
render_impl, render_rightside, render_stability_since_raw,
- render_stability_since_raw_with_extra, AssocItemLink, Context, ImplRenderingParameters,
+ render_stability_since_raw_with_extra, AssocItemLink, AssocItemRender, Context,
+ ImplRenderingParameters, RenderMode,
};
use crate::clean;
use crate::config::ModuleSorting;
use crate::formats::item_type::ItemType;
-use crate::formats::{AssocItemRender, Impl, RenderMode};
+use crate::formats::Impl;
use crate::html::escape::Escape;
use crate::html::format::{
display_fn, join_with_double_colon, print_abi_with_space, print_constness_with_space,
@@ -369,8 +368,8 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
if let (Some(a), Some(b)) = (s1, s2) {
match (a.is_stable(), b.is_stable()) {
(true, true) | (false, false) => {}
- (false, true) => return Ordering::Less,
- (true, false) => return Ordering::Greater,
+ (false, true) => return Ordering::Greater,
+ (true, false) => return Ordering::Less,
}
}
let lhs = i1.name.unwrap_or(kw::Empty);
@@ -429,7 +428,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
last_section = Some(my_section);
write!(
w,
- "<h2 id=\"{id}\" class=\"small-section-header\">\
+ "<h2 id=\"{id}\" class=\"section-header\">\
<a href=\"#{id}\">{name}</a>\
</h2>{ITEM_TABLE_OPEN}",
id = cx.derive_id(my_section.id()),
@@ -464,16 +463,9 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
clean::ImportItem(ref import) => {
let stab_tags = if let Some(import_def_id) = import.source.did {
- let ast_attrs = tcx.get_attrs_unchecked(import_def_id);
- let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
-
// Just need an item with the correct def_id and attrs
- let import_item = clean::Item {
- item_id: import_def_id.into(),
- attrs: import_attrs,
- cfg: ast_attrs.cfg(tcx, &cx.cache().hidden_cfg),
- ..myitem.clone()
- };
+ let import_item =
+ clean::Item { item_id: import_def_id.into(), ..myitem.clone() };
let stab_tags = Some(extra_info_tags(&import_item, item, tcx).to_string());
stab_tags
@@ -596,8 +588,10 @@ fn extra_info_tags<'a, 'tcx: 'a>(
// The "rustc_private" crates are permanently unstable so it makes no sense
// to render "unstable" everywhere.
- if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
- == Some(true)
+ if item
+ .stability(tcx)
+ .as_ref()
+ .is_some_and(|s| s.is_unstable() && s.feature != sym::rustc_private)
{
write!(f, "{}", tag_html("unstable", "", "Experimental"))?;
}
@@ -824,7 +818,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
fn write_small_section_header(w: &mut Buffer, id: &str, title: &str, extra_content: &str) {
write!(
w,
- "<h2 id=\"{0}\" class=\"small-section-header\">\
+ "<h2 id=\"{0}\" class=\"section-header\">\
{1}<a href=\"#{0}\" class=\"anchor\">§</a>\
</h2>{2}",
id, title, extra_content
@@ -974,8 +968,9 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
// if any Types with the same name but different DefId have been found.
let mut implementor_dups: FxHashMap<Symbol, (DefId, bool)> = FxHashMap::default();
for implementor in implementors {
- if let Some(did) = implementor.inner_impl().for_.without_borrowed_ref().def_id(cache) &&
- !did.is_local() {
+ if let Some(did) = implementor.inner_impl().for_.without_borrowed_ref().def_id(cache)
+ && !did.is_local()
+ {
extern_crates.insert(did.krate);
}
match implementor.inner_impl().for_.without_borrowed_ref() {
@@ -1152,9 +1147,10 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
.take(cx.current.len())
.chain(std::iter::once("trait.impl"))
.collect();
- if let Some(did) = it.item_id.as_def_id() &&
- let get_extern = { || cache.external_paths.get(&did).map(|s| &s.0) } &&
- let Some(fqp) = cache.exact_paths.get(&did).or_else(get_extern) {
+ if let Some(did) = it.item_id.as_def_id()
+ && let get_extern = { || cache.external_paths.get(&did).map(|s| &s.0) }
+ && let Some(fqp) = cache.exact_paths.get(&did).or_else(get_extern)
+ {
js_src_path.extend(fqp[..fqp.len() - 1].iter().copied());
js_src_path.push_fmt(format_args!("{}.{}.js", it.type_(), fqp.last().unwrap()));
} else {
@@ -1255,7 +1251,7 @@ fn item_type_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &c
if let Some(inner_type) = &t.inner_type {
write!(
w,
- "<h2 id=\"aliased-type\" class=\"small-section-header\">\
+ "<h2 id=\"aliased-type\" class=\"section-header\">\
Aliased Type<a href=\"#aliased-type\" class=\"anchor\">§</a></h2>"
);
@@ -1499,10 +1495,12 @@ fn print_tuple_struct_fields<'a, 'cx: 'a>(
s: &'a [clean::Item],
) -> impl fmt::Display + 'a + Captures<'cx> {
display_fn(|f| {
- if s.iter()
- .all(|field| matches!(*field.kind, clean::StrippedItem(box clean::StructFieldItem(..))))
+ if !s.is_empty()
+ && s.iter().all(|field| {
+ matches!(*field.kind, clean::StrippedItem(box clean::StructFieldItem(..)))
+ })
{
- return f.write_str("/* private fields */");
+ return f.write_str("<span class=\"comment\">/* private fields */</span>");
}
for (i, ty) in s.iter().enumerate() {
@@ -1564,8 +1562,8 @@ fn should_show_enum_discriminant(
) -> bool {
let mut has_variants_with_value = false;
for variant in variants {
- if let clean::VariantItem(ref var) = *variant.kind &&
- matches!(var.kind, clean::VariantKind::CLike)
+ if let clean::VariantItem(ref var) = *variant.kind
+ && matches!(var.kind, clean::VariantKind::CLike)
{
has_variants_with_value |= var.discriminant.is_some();
} else {
@@ -1659,7 +1657,7 @@ fn render_enum_fields(
}
if variants_stripped && !is_non_exhaustive {
- w.write_str(" // some variants omitted\n");
+ w.write_str(" <span class=\"comment\">// some variants omitted</span>\n");
}
if toggle {
toggle_close(&mut w);
@@ -1678,7 +1676,7 @@ fn item_variants(
let tcx = cx.tcx();
write!(
w,
- "<h2 id=\"variants\" class=\"variants small-section-header\">\
+ "<h2 id=\"variants\" class=\"variants section-header\">\
Variants{}<a href=\"#variants\" class=\"anchor\">§</a>\
</h2>\
{}\
@@ -1706,8 +1704,8 @@ fn item_variants(
" rightside",
);
w.write_str("<h3 class=\"code-header\">");
- if let clean::VariantItem(ref var) = *variant.kind &&
- let clean::VariantKind::CLike = var.kind
+ if let clean::VariantItem(ref var) = *variant.kind
+ && let clean::VariantKind::CLike = var.kind
{
display_c_like_variant(
w,
@@ -1730,7 +1728,14 @@ fn item_variants(
w.write_str("</h3></section>");
let heading_and_fields = match &variant_data.kind {
- clean::VariantKind::Struct(s) => Some(("Fields", &s.fields)),
+ clean::VariantKind::Struct(s) => {
+ // If there is no field to display, no need to add the heading.
+ if s.fields.iter().any(|f| !f.is_doc_hidden()) {
+ Some(("Fields", &s.fields))
+ } else {
+ None
+ }
+ }
clean::VariantKind::Tuple(fields) => {
// Documentation on tuple variant fields is rare, so to reduce noise we only emit
// the section if at least one field is documented.
@@ -1765,7 +1770,7 @@ fn item_variants(
write!(
w,
"<div class=\"sub-variant-field\">\
- <span id=\"{id}\" class=\"small-section-header\">\
+ <span id=\"{id}\" class=\"section-header\">\
<a href=\"#{id}\" class=\"anchor field\">§</a>\
<code>{f}: {t}</code>\
</span>",
@@ -1804,7 +1809,8 @@ fn item_proc_macro(
let name = it.name.expect("proc-macros always have names");
match m.kind {
MacroKind::Bang => {
- write!(buffer, "{name}!() {{ /* proc-macro */ }}").unwrap();
+ write!(buffer, "{name}!() {{ <span class=\"comment\">/* proc-macro */</span> }}")
+ .unwrap();
}
MacroKind::Attr => {
write!(buffer, "#[{name}]").unwrap();
@@ -1812,7 +1818,12 @@ fn item_proc_macro(
MacroKind::Derive => {
write!(buffer, "#[derive({name})]").unwrap();
if !m.helpers.is_empty() {
- buffer.write_str("\n{\n // Attributes available to this derive:\n").unwrap();
+ buffer
+ .write_str(
+ "\n{\n \
+ <span class=\"comment\">// Attributes available to this derive:</span>\n",
+ )
+ .unwrap();
for attr in &m.helpers {
writeln!(buffer, " #[{attr}]").unwrap();
}
@@ -1922,7 +1933,7 @@ fn item_fields(
if fields.peek().is_some() {
write!(
w,
- "<h2 id=\"fields\" class=\"fields small-section-header\">\
+ "<h2 id=\"fields\" class=\"fields section-header\">\
{}{}<a href=\"#fields\" class=\"anchor\">§</a>\
</h2>\
{}",
@@ -1936,7 +1947,7 @@ fn item_fields(
let id = cx.derive_id(format!("{typ}.{field_name}", typ = ItemType::StructField));
write!(
w,
- "<span id=\"{id}\" class=\"{item_type} small-section-header\">\
+ "<span id=\"{id}\" class=\"{item_type} section-header\">\
<a href=\"#{id}\" class=\"anchor field\">§</a>\
<code>{field_name}: {ty}</code>\
</span>",
@@ -2174,7 +2185,7 @@ fn render_union<'a, 'cx: 'a>(
}
if it.has_stripped_entries().unwrap() {
- write!(f, " /* private fields */\n")?;
+ write!(f, " <span class=\"comment\">/* private fields */</span>\n")?;
}
if toggle {
toggle_close(&mut f);
@@ -2260,11 +2271,11 @@ fn render_struct_fields(
if has_visible_fields {
if has_stripped_entries {
- write!(w, "\n{tab} /* private fields */");
+ write!(w, "\n{tab} <span class=\"comment\">/* private fields */</span>");
}
write!(w, "\n{tab}");
} else if has_stripped_entries {
- write!(w, " /* private fields */ ");
+ write!(w, " <span class=\"comment\">/* private fields */</span> ");
}
if toggle {
toggle_close(&mut w);
@@ -2273,10 +2284,12 @@ fn render_struct_fields(
}
Some(CtorKind::Fn) => {
w.write_str("(");
- if fields.iter().all(|field| {
- matches!(*field.kind, clean::StrippedItem(box clean::StructFieldItem(..)))
- }) {
- write!(w, "/* private fields */");
+ if !fields.is_empty()
+ && fields.iter().all(|field| {
+ matches!(*field.kind, clean::StrippedItem(box clean::StructFieldItem(..)))
+ })
+ {
+ write!(w, "<span class=\"comment\">/* private fields */</span>");
} else {
for (i, field) in fields.iter().enumerate() {
if i > 0 {
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
index af1dab594..a1029320d 100644
--- a/src/librustdoc/html/render/search_index.rs
+++ b/src/librustdoc/html/render/search_index.rs
@@ -3,8 +3,10 @@ use std::collections::BTreeMap;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_middle::ty::TyCtxt;
+use rustc_span::def_id::DefId;
use rustc_span::symbol::Symbol;
use serde::ser::{Serialize, SerializeSeq, SerializeStruct, Serializer};
+use thin_vec::ThinVec;
use crate::clean;
use crate::clean::types::{Function, Generics, ItemId, Type, WherePredicate};
@@ -22,6 +24,7 @@ pub(crate) fn build_index<'tcx>(
) -> String {
let mut itemid_to_pathid = FxHashMap::default();
let mut primitives = FxHashMap::default();
+ let mut associated_types = FxHashMap::default();
let mut crate_paths = vec![];
// Attach all orphan items to the type's definition if the type
@@ -38,7 +41,13 @@ pub(crate) fn build_index<'tcx>(
parent: Some(parent),
parent_idx: None,
impl_id,
- search_type: get_function_type_for_search(item, tcx, impl_generics.as_ref(), cache),
+ search_type: get_function_type_for_search(
+ item,
+ tcx,
+ impl_generics.as_ref(),
+ Some(parent),
+ cache,
+ ),
aliases: item.attrs.get_doc_aliases(),
deprecation: item.deprecation(tcx),
});
@@ -76,83 +85,139 @@ pub(crate) fn build_index<'tcx>(
let mut search_index = std::mem::replace(&mut cache.search_index, Vec::new());
for item in search_index.iter_mut() {
fn insert_into_map<F: std::hash::Hash + Eq>(
- ty: &mut RenderType,
map: &mut FxHashMap<F, isize>,
itemid: F,
lastpathid: &mut isize,
crate_paths: &mut Vec<(ItemType, Vec<Symbol>)>,
item_type: ItemType,
path: &[Symbol],
- ) {
+ ) -> RenderTypeId {
match map.entry(itemid) {
- Entry::Occupied(entry) => ty.id = Some(RenderTypeId::Index(*entry.get())),
+ Entry::Occupied(entry) => RenderTypeId::Index(*entry.get()),
Entry::Vacant(entry) => {
let pathid = *lastpathid;
entry.insert(pathid);
*lastpathid += 1;
crate_paths.push((item_type, path.to_vec()));
- ty.id = Some(RenderTypeId::Index(pathid));
+ RenderTypeId::Index(pathid)
}
}
}
- fn convert_render_type(
- ty: &mut RenderType,
+ fn convert_render_type_id(
+ id: RenderTypeId,
cache: &mut Cache,
itemid_to_pathid: &mut FxHashMap<ItemId, isize>,
primitives: &mut FxHashMap<Symbol, isize>,
+ associated_types: &mut FxHashMap<Symbol, isize>,
lastpathid: &mut isize,
crate_paths: &mut Vec<(ItemType, Vec<Symbol>)>,
- ) {
- if let Some(generics) = &mut ty.generics {
- for item in generics {
- convert_render_type(
- item,
- cache,
- itemid_to_pathid,
- primitives,
- lastpathid,
- crate_paths,
- );
- }
- }
+ ) -> Option<RenderTypeId> {
let Cache { ref paths, ref external_paths, .. } = *cache;
- let Some(id) = ty.id.clone() else {
- assert!(ty.generics.is_some());
- return;
- };
match id {
RenderTypeId::DefId(defid) => {
if let Some(&(ref fqp, item_type)) =
paths.get(&defid).or_else(|| external_paths.get(&defid))
{
- insert_into_map(
- ty,
+ Some(insert_into_map(
itemid_to_pathid,
ItemId::DefId(defid),
lastpathid,
crate_paths,
item_type,
fqp,
- );
+ ))
} else {
- ty.id = None;
+ None
}
}
RenderTypeId::Primitive(primitive) => {
let sym = primitive.as_sym();
- insert_into_map(
- ty,
+ Some(insert_into_map(
primitives,
sym,
lastpathid,
crate_paths,
ItemType::Primitive,
&[sym],
+ ))
+ }
+ RenderTypeId::Index(_) => Some(id),
+ RenderTypeId::AssociatedType(sym) => Some(insert_into_map(
+ associated_types,
+ sym,
+ lastpathid,
+ crate_paths,
+ ItemType::AssocType,
+ &[sym],
+ )),
+ }
+ }
+
+ fn convert_render_type(
+ ty: &mut RenderType,
+ cache: &mut Cache,
+ itemid_to_pathid: &mut FxHashMap<ItemId, isize>,
+ primitives: &mut FxHashMap<Symbol, isize>,
+ associated_types: &mut FxHashMap<Symbol, isize>,
+ lastpathid: &mut isize,
+ crate_paths: &mut Vec<(ItemType, Vec<Symbol>)>,
+ ) {
+ if let Some(generics) = &mut ty.generics {
+ for item in generics {
+ convert_render_type(
+ item,
+ cache,
+ itemid_to_pathid,
+ primitives,
+ associated_types,
+ lastpathid,
+ crate_paths,
);
}
- RenderTypeId::Index(_) => {}
}
+ if let Some(bindings) = &mut ty.bindings {
+ bindings.retain_mut(|(associated_type, constraints)| {
+ let converted_associated_type = convert_render_type_id(
+ *associated_type,
+ cache,
+ itemid_to_pathid,
+ primitives,
+ associated_types,
+ lastpathid,
+ crate_paths,
+ );
+ let Some(converted_associated_type) = converted_associated_type else {
+ return false;
+ };
+ *associated_type = converted_associated_type;
+ for constraint in constraints {
+ convert_render_type(
+ constraint,
+ cache,
+ itemid_to_pathid,
+ primitives,
+ associated_types,
+ lastpathid,
+ crate_paths,
+ );
+ }
+ true
+ });
+ }
+ let Some(id) = ty.id.clone() else {
+ assert!(ty.generics.is_some());
+ return;
+ };
+ ty.id = convert_render_type_id(
+ id,
+ cache,
+ itemid_to_pathid,
+ primitives,
+ associated_types,
+ lastpathid,
+ crate_paths,
+ );
}
if let Some(search_type) = &mut item.search_type {
for item in &mut search_type.inputs {
@@ -161,6 +226,7 @@ pub(crate) fn build_index<'tcx>(
cache,
&mut itemid_to_pathid,
&mut primitives,
+ &mut associated_types,
&mut lastpathid,
&mut crate_paths,
);
@@ -171,6 +237,7 @@ pub(crate) fn build_index<'tcx>(
cache,
&mut itemid_to_pathid,
&mut primitives,
+ &mut associated_types,
&mut lastpathid,
&mut crate_paths,
);
@@ -182,6 +249,7 @@ pub(crate) fn build_index<'tcx>(
cache,
&mut itemid_to_pathid,
&mut primitives,
+ &mut associated_types,
&mut lastpathid,
&mut crate_paths,
);
@@ -228,10 +296,11 @@ pub(crate) fn build_index<'tcx>(
let mut associated_item_duplicates = FxHashMap::<(isize, ItemType, Symbol), usize>::default();
for &item in &crate_items {
- if item.impl_id.is_some() && let Some(parent_idx) = item.parent_idx {
- let count = associated_item_duplicates
- .entry((parent_idx, item.ty, item.name))
- .or_insert(0);
+ if item.impl_id.is_some()
+ && let Some(parent_idx) = item.parent_idx
+ {
+ let count =
+ associated_item_duplicates.entry((parent_idx, item.ty, item.name)).or_insert(0);
*count += 1;
}
}
@@ -419,7 +488,7 @@ pub(crate) fn build_index<'tcx>(
// Collect the index into a string
format!(
- r#""{}":{}"#,
+ r#"["{}",{}]"#,
krate.name(tcx),
serde_json::to_string(&CrateData {
doc: crate_doc,
@@ -441,12 +510,39 @@ pub(crate) fn get_function_type_for_search<'tcx>(
item: &clean::Item,
tcx: TyCtxt<'tcx>,
impl_generics: Option<&(clean::Type, clean::Generics)>,
+ parent: Option<DefId>,
cache: &Cache,
) -> Option<IndexItemFunctionType> {
+ let mut trait_info = None;
+ let impl_or_trait_generics = impl_generics.or_else(|| {
+ if let Some(def_id) = parent
+ && let Some(trait_) = cache.traits.get(&def_id)
+ && let Some((path, _)) =
+ cache.paths.get(&def_id).or_else(|| cache.external_paths.get(&def_id))
+ {
+ let path = clean::Path {
+ res: rustc_hir::def::Res::Def(rustc_hir::def::DefKind::Trait, def_id),
+ segments: path
+ .iter()
+ .map(|name| clean::PathSegment {
+ name: *name,
+ args: clean::GenericArgs::AngleBracketed {
+ args: Vec::new().into_boxed_slice(),
+ bindings: ThinVec::new(),
+ },
+ })
+ .collect(),
+ };
+ trait_info = Some((clean::Type::Path { path }, trait_.generics.clone()));
+ Some(trait_info.as_ref().unwrap())
+ } else {
+ None
+ }
+ });
let (mut inputs, mut output, where_clause) = match *item.kind {
- clean::FunctionItem(ref f) => get_fn_inputs_and_outputs(f, tcx, impl_generics, cache),
- clean::MethodItem(ref m, _) => get_fn_inputs_and_outputs(m, tcx, impl_generics, cache),
- clean::TyMethodItem(ref m) => get_fn_inputs_and_outputs(m, tcx, impl_generics, cache),
+ clean::FunctionItem(ref f) | clean::MethodItem(ref f, _) | clean::TyMethodItem(ref f) => {
+ get_fn_inputs_and_outputs(f, tcx, impl_or_trait_generics, cache)
+ }
_ => return None,
};
@@ -456,14 +552,23 @@ pub(crate) fn get_function_type_for_search<'tcx>(
Some(IndexItemFunctionType { inputs, output, where_clause })
}
-fn get_index_type(clean_type: &clean::Type, generics: Vec<RenderType>) -> RenderType {
+fn get_index_type(
+ clean_type: &clean::Type,
+ generics: Vec<RenderType>,
+ rgen: &mut FxHashMap<SimplifiedParam, (isize, Vec<RenderType>)>,
+) -> RenderType {
RenderType {
- id: get_index_type_id(clean_type),
+ id: get_index_type_id(clean_type, rgen),
generics: if generics.is_empty() { None } else { Some(generics) },
+ bindings: None,
}
}
-fn get_index_type_id(clean_type: &clean::Type) -> Option<RenderTypeId> {
+fn get_index_type_id(
+ clean_type: &clean::Type,
+ rgen: &mut FxHashMap<SimplifiedParam, (isize, Vec<RenderType>)>,
+) -> Option<RenderTypeId> {
+ use rustc_hir::def::{DefKind, Res};
match *clean_type {
clean::Type::Path { ref path, .. } => Some(RenderTypeId::DefId(path.def_id())),
clean::DynTrait(ref bounds, _) => {
@@ -471,18 +576,27 @@ fn get_index_type_id(clean_type: &clean::Type) -> Option<RenderTypeId> {
}
clean::Primitive(p) => Some(RenderTypeId::Primitive(p)),
clean::BorrowedRef { ref type_, .. } | clean::RawPointer(_, ref type_) => {
- get_index_type_id(type_)
+ get_index_type_id(type_, rgen)
}
// The type parameters are converted to generics in `simplify_fn_type`
clean::Slice(_) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Slice)),
clean::Array(_, _) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Array)),
clean::Tuple(_) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Tuple)),
+ clean::QPath(ref data) => {
+ if data.self_type.is_self_type()
+ && let Some(clean::Path { res: Res::Def(DefKind::Trait, trait_), .. }) = data.trait_
+ {
+ let idx = -isize::try_from(rgen.len() + 1).unwrap();
+ let (idx, _) = rgen
+ .entry(SimplifiedParam::AssociatedType(trait_, data.assoc.name))
+ .or_insert_with(|| (idx, Vec::new()));
+ Some(RenderTypeId::Index(*idx))
+ } else {
+ None
+ }
+ }
// Not supported yet
- clean::BareFunction(_)
- | clean::Generic(_)
- | clean::ImplTrait(_)
- | clean::QPath { .. }
- | clean::Infer => None,
+ clean::BareFunction(_) | clean::Generic(_) | clean::ImplTrait(_) | clean::Infer => None,
}
}
@@ -492,6 +606,9 @@ enum SimplifiedParam {
Symbol(Symbol),
// every argument-position impl trait is its own type parameter
Anonymous(isize),
+ // in a trait definition, the associated types are all bound to
+ // their own type parameter
+ AssociatedType(DefId, Symbol),
}
/// The point of this function is to lower generics and types into the simplified form that the
@@ -522,10 +639,17 @@ fn simplify_fn_type<'tcx, 'a>(
}
// First, check if it's "Self".
+ let mut is_self = false;
let mut arg = if let Some(self_) = self_ {
match &*arg {
- Type::BorrowedRef { type_, .. } if type_.is_self_type() => self_,
- type_ if type_.is_self_type() => self_,
+ Type::BorrowedRef { type_, .. } if type_.is_self_type() => {
+ is_self = true;
+ self_
+ }
+ type_ if type_.is_self_type() => {
+ is_self = true;
+ self_
+ }
arg => arg,
}
} else {
@@ -584,11 +708,19 @@ fn simplify_fn_type<'tcx, 'a>(
}
}
if let Some((idx, _)) = rgen.get(&SimplifiedParam::Symbol(arg_s)) {
- res.push(RenderType { id: Some(RenderTypeId::Index(*idx)), generics: None });
+ res.push(RenderType {
+ id: Some(RenderTypeId::Index(*idx)),
+ generics: None,
+ bindings: None,
+ });
} else {
let idx = -isize::try_from(rgen.len() + 1).unwrap();
rgen.insert(SimplifiedParam::Symbol(arg_s), (idx, type_bounds));
- res.push(RenderType { id: Some(RenderTypeId::Index(idx)), generics: None });
+ res.push(RenderType {
+ id: Some(RenderTypeId::Index(idx)),
+ generics: None,
+ bindings: None,
+ });
}
} else if let Type::ImplTrait(ref bounds) = *arg {
let mut type_bounds = Vec::new();
@@ -610,12 +742,16 @@ fn simplify_fn_type<'tcx, 'a>(
}
if is_return && !type_bounds.is_empty() {
// In parameter position, `impl Trait` is a unique thing.
- res.push(RenderType { id: None, generics: Some(type_bounds) });
+ res.push(RenderType { id: None, generics: Some(type_bounds), bindings: None });
} else {
// In parameter position, `impl Trait` is the same as an unnamed generic parameter.
let idx = -isize::try_from(rgen.len() + 1).unwrap();
rgen.insert(SimplifiedParam::Anonymous(idx), (idx, type_bounds));
- res.push(RenderType { id: Some(RenderTypeId::Index(idx)), generics: None });
+ res.push(RenderType {
+ id: Some(RenderTypeId::Index(idx)),
+ generics: None,
+ bindings: None,
+ });
}
} else if let Type::Slice(ref ty) = *arg {
let mut ty_generics = Vec::new();
@@ -630,7 +766,7 @@ fn simplify_fn_type<'tcx, 'a>(
is_return,
cache,
);
- res.push(get_index_type(arg, ty_generics));
+ res.push(get_index_type(arg, ty_generics, rgen));
} else if let Type::Array(ref ty, _) = *arg {
let mut ty_generics = Vec::new();
simplify_fn_type(
@@ -644,7 +780,7 @@ fn simplify_fn_type<'tcx, 'a>(
is_return,
cache,
);
- res.push(get_index_type(arg, ty_generics));
+ res.push(get_index_type(arg, ty_generics, rgen));
} else if let Type::Tuple(ref tys) = *arg {
let mut ty_generics = Vec::new();
for ty in tys {
@@ -660,7 +796,7 @@ fn simplify_fn_type<'tcx, 'a>(
cache,
);
}
- res.push(get_index_type(arg, ty_generics));
+ res.push(get_index_type(arg, ty_generics, rgen));
} else {
// This is not a type parameter. So for example if we have `T, U: Option<T>`, and we're
// looking at `Option`, we enter this "else" condition, otherwise if it's `T`, we don't.
@@ -668,12 +804,16 @@ fn simplify_fn_type<'tcx, 'a>(
// So in here, we can add it directly and look for its own type parameters (so for `Option`,
// we will look for them but not for `T`).
let mut ty_generics = Vec::new();
- if let Some(arg_generics) = arg.generics() {
- for gen in arg_generics.iter() {
+ let mut ty_bindings = Vec::new();
+ if let Some(arg_generics) = arg.generic_args() {
+ for ty in arg_generics.into_iter().filter_map(|gen| match gen {
+ clean::GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ }) {
simplify_fn_type(
self_,
generics,
- gen,
+ &ty,
tcx,
recurse + 1,
&mut ty_generics,
@@ -682,17 +822,180 @@ fn simplify_fn_type<'tcx, 'a>(
cache,
);
}
+ for binding in arg_generics.bindings() {
+ simplify_fn_binding(
+ self_,
+ generics,
+ &binding,
+ tcx,
+ recurse + 1,
+ &mut ty_bindings,
+ rgen,
+ is_return,
+ cache,
+ );
+ }
+ }
+ // Every trait associated type on self gets assigned to a type parameter index
+ // this same one is used later for any appearances of these types
+ //
+ // for example, Iterator::next is:
+ //
+ // trait Iterator {
+ // fn next(&mut self) -> Option<Self::Item>
+ // }
+ //
+ // Self is technically just Iterator, but we want to pretend it's more like this:
+ //
+ // fn next<T>(self: Iterator<Item=T>) -> Option<T>
+ if is_self
+ && let Type::Path { path } = arg
+ && let def_id = path.def_id()
+ && let Some(trait_) = cache.traits.get(&def_id)
+ && trait_.items.iter().any(|at| at.is_ty_associated_type())
+ {
+ for assoc_ty in &trait_.items {
+ if let clean::ItemKind::TyAssocTypeItem(_generics, bounds) = &*assoc_ty.kind
+ && let Some(name) = assoc_ty.name
+ {
+ let idx = -isize::try_from(rgen.len() + 1).unwrap();
+ let (idx, stored_bounds) = rgen
+ .entry(SimplifiedParam::AssociatedType(def_id, name))
+ .or_insert_with(|| (idx, Vec::new()));
+ let idx = *idx;
+ if stored_bounds.is_empty() {
+ // Can't just pass stored_bounds to simplify_fn_type,
+ // because it also accepts rgen as a parameter.
+ // Instead, have it fill in this local, then copy it into the map afterward.
+ let mut type_bounds = Vec::new();
+ for bound in bounds {
+ if let Some(path) = bound.get_trait_path() {
+ let ty = Type::Path { path };
+ simplify_fn_type(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut type_bounds,
+ rgen,
+ is_return,
+ cache,
+ );
+ }
+ }
+ let stored_bounds = &mut rgen
+ .get_mut(&SimplifiedParam::AssociatedType(def_id, name))
+ .unwrap()
+ .1;
+ if stored_bounds.is_empty() {
+ *stored_bounds = type_bounds;
+ }
+ }
+ ty_bindings.push((
+ RenderTypeId::AssociatedType(name),
+ vec![RenderType {
+ id: Some(RenderTypeId::Index(idx)),
+ generics: None,
+ bindings: None,
+ }],
+ ))
+ }
+ }
}
- let id = get_index_type_id(&arg);
+ let id = get_index_type_id(&arg, rgen);
if id.is_some() || !ty_generics.is_empty() {
res.push(RenderType {
id,
+ bindings: if ty_bindings.is_empty() { None } else { Some(ty_bindings) },
generics: if ty_generics.is_empty() { None } else { Some(ty_generics) },
});
}
}
}
+fn simplify_fn_binding<'tcx, 'a>(
+ self_: Option<&'a Type>,
+ generics: &Generics,
+ binding: &'a clean::TypeBinding,
+ tcx: TyCtxt<'tcx>,
+ recurse: usize,
+ res: &mut Vec<(RenderTypeId, Vec<RenderType>)>,
+ rgen: &mut FxHashMap<SimplifiedParam, (isize, Vec<RenderType>)>,
+ is_return: bool,
+ cache: &Cache,
+) {
+ let mut ty_binding_constraints = Vec::new();
+ let ty_binding_assoc = RenderTypeId::AssociatedType(binding.assoc.name);
+ for gen in &binding.assoc.args {
+ match gen {
+ clean::GenericArg::Type(arg) => simplify_fn_type(
+ self_,
+ generics,
+ &arg,
+ tcx,
+ recurse + 1,
+ &mut ty_binding_constraints,
+ rgen,
+ is_return,
+ cache,
+ ),
+ clean::GenericArg::Lifetime(_)
+ | clean::GenericArg::Const(_)
+ | clean::GenericArg::Infer => {}
+ }
+ }
+ for binding in binding.assoc.args.bindings() {
+ simplify_fn_binding(
+ self_,
+ generics,
+ &binding,
+ tcx,
+ recurse + 1,
+ res,
+ rgen,
+ is_return,
+ cache,
+ );
+ }
+ match &binding.kind {
+ clean::TypeBindingKind::Equality { term } => {
+ if let clean::Term::Type(arg) = &term {
+ simplify_fn_type(
+ self_,
+ generics,
+ arg,
+ tcx,
+ recurse + 1,
+ &mut ty_binding_constraints,
+ rgen,
+ is_return,
+ cache,
+ );
+ }
+ }
+ clean::TypeBindingKind::Constraint { bounds } => {
+ for bound in &bounds[..] {
+ if let Some(path) = bound.get_trait_path() {
+ let ty = Type::Path { path };
+ simplify_fn_type(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_binding_constraints,
+ rgen,
+ is_return,
+ cache,
+ );
+ }
+ }
+ }
+ }
+ res.push((ty_binding_assoc, ty_binding_constraints));
+}
+
/// Return the full list of types when bounds have been resolved.
///
/// i.e. `fn foo<A: Display, B: Option<A>>(x: u32, y: B)` will return
@@ -700,13 +1003,15 @@ fn simplify_fn_type<'tcx, 'a>(
fn get_fn_inputs_and_outputs<'tcx>(
func: &Function,
tcx: TyCtxt<'tcx>,
- impl_generics: Option<&(clean::Type, clean::Generics)>,
+ impl_or_trait_generics: Option<&(clean::Type, clean::Generics)>,
cache: &Cache,
) -> (Vec<RenderType>, Vec<RenderType>, Vec<Vec<RenderType>>) {
let decl = &func.decl;
+ let mut rgen: FxHashMap<SimplifiedParam, (isize, Vec<RenderType>)> = Default::default();
+
let combined_generics;
- let (self_, generics) = if let Some((impl_self, impl_generics)) = impl_generics {
+ let (self_, generics) = if let Some((impl_self, impl_generics)) = impl_or_trait_generics {
match (impl_generics.is_empty(), func.generics.is_empty()) {
(true, _) => (Some(impl_self), &func.generics),
(_, true) => (Some(impl_self), impl_generics),
@@ -728,8 +1033,6 @@ fn get_fn_inputs_and_outputs<'tcx>(
(None, &func.generics)
};
- let mut rgen: FxHashMap<SimplifiedParam, (isize, Vec<RenderType>)> = Default::default();
-
let mut arg_types = Vec::new();
for arg in decl.inputs.values.iter() {
simplify_fn_type(
diff --git a/src/librustdoc/html/render/sidebar.rs b/src/librustdoc/html/render/sidebar.rs
index ba4aaaff5..3d28937eb 100644
--- a/src/librustdoc/html/render/sidebar.rs
+++ b/src/librustdoc/html/render/sidebar.rs
@@ -435,9 +435,9 @@ fn sidebar_deref_methods<'a>(
}
// Recurse into any further impls that might exist for `target`
- if let Some(target_did) = target.def_id(c) &&
- let Some(target_impls) = c.impls.get(&target_did) &&
- let Some(target_deref_impl) = target_impls.iter().find(|i| {
+ if let Some(target_did) = target.def_id(c)
+ && let Some(target_impls) = c.impls.get(&target_did)
+ && let Some(target_deref_impl) = target_impls.iter().find(|i| {
i.inner_impl()
.trait_
.as_ref()
@@ -445,14 +445,7 @@ fn sidebar_deref_methods<'a>(
.unwrap_or(false)
})
{
- sidebar_deref_methods(
- cx,
- out,
- target_deref_impl,
- target_impls,
- derefs,
- used_links,
- );
+ sidebar_deref_methods(cx, out, target_deref_impl, target_impls, derefs, used_links);
}
}
}
@@ -494,8 +487,13 @@ fn sidebar_module(items: &[clean::Item]) -> LinkBlock<'static> {
&& it
.name
.or_else(|| {
- if let clean::ImportItem(ref i) = *it.kind &&
- let clean::ImportKind::Simple(s) = i.kind { Some(s) } else { None }
+ if let clean::ImportItem(ref i) = *it.kind
+ && let clean::ImportKind::Simple(s) = i.kind
+ {
+ Some(s)
+ } else {
+ None
+ }
})
.is_some()
})
diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs
index 5f130f187..d1ece7337 100644
--- a/src/librustdoc/html/render/span_map.rs
+++ b/src/librustdoc/html/render/span_map.rs
@@ -94,7 +94,7 @@ impl<'tcx> SpanMapVisitor<'tcx> {
/// Used to generate links on items' definition to go to their documentation page.
pub(crate) fn extract_info_from_hir_id(&mut self, hir_id: HirId) {
- if let Some(Node::Item(item)) = self.tcx.hir().find(hir_id) {
+ if let Some(Node::Item(item)) = self.tcx.opt_hir_node(hir_id) {
if let Some(span) = self.tcx.def_ident_span(item.owner_id) {
let cspan = clean::Span::new(span);
// If the span isn't from the current crate, we ignore it.
@@ -177,7 +177,7 @@ impl<'tcx> Visitor<'tcx> for SpanMapVisitor<'tcx> {
if !span.overlaps(m.spans.inner_span) {
// Now that we confirmed it's a file import, we want to get the span for the module
// name only and not all the "mod foo;".
- if let Some(Node::Item(item)) = self.tcx.hir().find(id) {
+ if let Some(Node::Item(item)) = self.tcx.opt_hir_node(id) {
self.matches.insert(
item.ident.span,
LinkFromSrc::Local(clean::Span::new(m.spans.inner_span)),
diff --git a/src/librustdoc/html/render/type_layout.rs b/src/librustdoc/html/render/type_layout.rs
index 377daaeb9..738ea0aee 100644
--- a/src/librustdoc/html/render/type_layout.rs
+++ b/src/librustdoc/html/render/type_layout.rs
@@ -42,37 +42,35 @@ pub(crate) fn document_type_layout<'a, 'cx: 'a>(
let ty = tcx.type_of(ty_def_id).instantiate_identity();
let type_layout = tcx.layout_of(param_env.and(ty));
- let variants =
- if let Ok(type_layout) = type_layout &&
- let Variants::Multiple { variants, tag, tag_encoding, .. } =
- type_layout.layout.variants() &&
- !variants.is_empty()
- {
- let tag_size =
- if let TagEncoding::Niche { .. } = tag_encoding {
- 0
- } else if let Primitive::Int(i, _) = tag.primitive() {
- i.size().bytes()
- } else {
- span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
- };
- variants
- .iter_enumerated()
- .map(|(variant_idx, variant_layout)| {
- let Adt(adt, _) = type_layout.ty.kind() else {
- span_bug!(tcx.def_span(ty_def_id), "not an adt")
- };
- let name = adt.variant(variant_idx).name;
- let is_unsized = variant_layout.abi.is_unsized();
- let is_uninhabited = variant_layout.abi.is_uninhabited();
- let size = variant_layout.size.bytes() - tag_size;
- let type_layout_size = TypeLayoutSize { is_unsized, is_uninhabited, size };
- (name, type_layout_size)
- })
- .collect()
+ let variants = if let Ok(type_layout) = type_layout
+ && let Variants::Multiple { variants, tag, tag_encoding, .. } =
+ type_layout.layout.variants()
+ && !variants.is_empty()
+ {
+ let tag_size = if let TagEncoding::Niche { .. } = tag_encoding {
+ 0
+ } else if let Primitive::Int(i, _) = tag.primitive() {
+ i.size().bytes()
} else {
- Vec::new()
+ span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
};
+ variants
+ .iter_enumerated()
+ .map(|(variant_idx, variant_layout)| {
+ let Adt(adt, _) = type_layout.ty.kind() else {
+ span_bug!(tcx.def_span(ty_def_id), "not an adt")
+ };
+ let name = adt.variant(variant_idx).name;
+ let is_unsized = variant_layout.abi.is_unsized();
+ let is_uninhabited = variant_layout.abi.is_uninhabited();
+ let size = variant_layout.size.bytes() - tag_size;
+ let type_layout_size = TypeLayoutSize { is_unsized, is_uninhabited, size };
+ (name, type_layout_size)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
let type_layout_size = tcx.layout_of(param_env.and(ty)).map(|layout| {
let is_unsized = layout.abi.is_unsized();
@@ -81,6 +79,7 @@ pub(crate) fn document_type_layout<'a, 'cx: 'a>(
TypeLayoutSize { is_unsized, is_uninhabited, size }
});
- Ok(TypeLayout { variants, type_layout_size }.render_into(f).unwrap())
+ TypeLayout { variants, type_layout_size }.render_into(f).unwrap();
+ Ok(())
})
}
diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs
index d2c7c578c..6408e97df 100644
--- a/src/librustdoc/html/render/write_shared.rs
+++ b/src/librustdoc/html/render/write_shared.rs
@@ -15,14 +15,14 @@ use rustc_span::Symbol;
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
-use super::{collect_paths_for_type, ensure_trailing_slash, Context};
+use super::{collect_paths_for_type, ensure_trailing_slash, Context, RenderMode};
use crate::clean::{Crate, Item, ItemId, ItemKind};
use crate::config::{EmitType, RenderOptions};
use crate::docfs::PathError;
use crate::error::Error;
use crate::formats::cache::Cache;
use crate::formats::item_type::ItemType;
-use crate::formats::{Impl, RenderMode};
+use crate::formats::Impl;
use crate::html::format::Buffer;
use crate::html::render::{AssocItemLink, ImplRenderingParameters};
use crate::html::{layout, static_files};
@@ -167,23 +167,24 @@ pub(super) fn write_shared(
let mut krates = Vec::new();
if path.exists() {
- let prefix = format!("\"{krate}\"");
+ let prefix = format!("[\"{krate}\"");
for line in BufReader::new(File::open(path)?).lines() {
let line = line?;
- if !line.starts_with('"') {
+ if !line.starts_with("[\"") {
continue;
}
if line.starts_with(&prefix) {
continue;
}
- if line.ends_with(",\\") {
+ if line.ends_with("],\\") {
ret.push(line[..line.len() - 2].to_string());
} else {
// Ends with "\\" (it's the case for the last added crate line)
ret.push(line[..line.len() - 1].to_string());
}
krates.push(
- line.split('"')
+ line[1..] // We skip the `[` parent at the beginning of the line.
+ .split('"')
.find(|s| !s.is_empty())
.map(|s| s.to_owned())
.unwrap_or_else(String::new),
@@ -285,7 +286,7 @@ pub(super) fn write_shared(
let (mut all_sources, _krates) =
try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
all_sources.push(format!(
- r#""{}":{}"#,
+ r#"["{}",{}]"#,
&krate.name(cx.tcx()),
hierarchy
.to_json_string()
@@ -296,9 +297,12 @@ pub(super) fn write_shared(
.replace("\\\"", "\\\\\"")
));
all_sources.sort();
- let mut v = String::from("var srcIndex = JSON.parse('{\\\n");
+ // This needs to be `var`, not `const`.
+ // This variable needs declared in the current global scope so that if
+ // src-script.js loads first, it can pick it up.
+ let mut v = String::from("var srcIndex = new Map(JSON.parse('[\\\n");
v.push_str(&all_sources.join(",\\\n"));
- v.push_str("\\\n}');\ncreateSrcSidebar();\n");
+ v.push_str("\\\n]'));\ncreateSrcSidebar();\n");
Ok(v.into_bytes())
};
write_invocation_specific("src-files.js", &make_sources)?;
@@ -316,13 +320,16 @@ pub(super) fn write_shared(
// with rustdoc running in parallel.
all_indexes.sort();
write_invocation_specific("search-index.js", &|| {
- let mut v = String::from("var searchIndex = JSON.parse('{\\\n");
+ // This needs to be `var`, not `const`.
+ // This variable needs declared in the current global scope so that if
+ // search.js loads first, it can pick it up.
+ let mut v = String::from("var searchIndex = new Map(JSON.parse('[\\\n");
v.push_str(&all_indexes.join(",\\\n"));
v.push_str(
r#"\
-}');
-if (typeof window !== 'undefined' && window.initSearch) {window.initSearch(searchIndex)};
-if (typeof exports !== 'undefined') {exports.searchIndex = searchIndex};
+]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);
"#,
);
Ok(v.into_bytes())
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
index ce620c226..e160ec12f 100644
--- a/src/librustdoc/html/sources.rs
+++ b/src/librustdoc/html/sources.rs
@@ -90,7 +90,9 @@ impl LocalSourcesCollector<'_, '_> {
);
let mut href = href.into_inner().to_string_lossy().into_owned();
- if let Some(c) = href.as_bytes().last() && *c != b'/' {
+ if let Some(c) = href.as_bytes().last()
+ && *c != b'/'
+ {
href.push('/');
}
let mut src_fname = p.file_name().expect("source has no filename").to_os_string();
@@ -212,7 +214,9 @@ impl SourceCollector<'_, '_> {
let root_path = PathBuf::from("../../").join(root_path.into_inner());
let mut root_path = root_path.to_string_lossy();
- if let Some(c) = root_path.as_bytes().last() && *c != b'/' {
+ if let Some(c) = root_path.as_bytes().last()
+ && *c != b'/'
+ {
root_path += "/";
}
let mut cur = self.dst.join(cur.into_inner());
diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css
index fe0cf6dc8..390e81277 100644
--- a/src/librustdoc/html/static/css/noscript.css
+++ b/src/librustdoc/html/static/css/noscript.css
@@ -9,7 +9,7 @@ rules.
margin-left: 0 !important;
}
-#copy-path {
+#copy-path, #sidebar-button, .sidebar-resizer {
/* It requires JS to work so no need to display it in this case. */
display: none;
}
@@ -132,6 +132,8 @@ nav.sub {
--scrape-example-help-hover-color: #000;
--scrape-example-code-wrapper-background-start: rgba(255, 255, 255, 1);
--scrape-example-code-wrapper-background-end: rgba(255, 255, 255, 0);
+ --sidebar-resizer-hover: hsl(207, 90%, 66%);
+ --sidebar-resizer-active: hsl(207, 90%, 54%);
}
/* End theme: light */
@@ -238,6 +240,8 @@ nav.sub {
--scrape-example-help-hover-color: #fff;
--scrape-example-code-wrapper-background-start: rgba(53, 53, 53, 1);
--scrape-example-code-wrapper-background-end: rgba(53, 53, 53, 0);
+ --sidebar-resizer-hover: hsl(207, 30%, 54%);
+ --sidebar-resizer-active: hsl(207, 90%, 54%);
}
/* End theme: dark */
}
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
index 9efdcd601..c4e97ded1 100644
--- a/src/librustdoc/html/static/css/rustdoc.css
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -9,6 +9,12 @@
:root {
--nav-sub-mobile-padding: 8px;
--search-typename-width: 6.75rem;
+ /* DEFAULT_SIDEBAR_WIDTH
+ see main.js for information on these values
+ and on the RUSTDOC_MOBILE_BREAKPOINT */
+ --desktop-sidebar-width: 200px;
+ --src-sidebar-width: 300px;
+ --desktop-sidebar-z-index: 100;
}
/* See FiraSans-LICENSE.txt for the Fira Sans license. */
@@ -205,7 +211,7 @@ ul.all-items {
#toggle-all-docs,
a.anchor,
-.small-section-header a,
+.section-header a,
#src-sidebar a,
.rust a,
.sidebar h2 a,
@@ -383,13 +389,15 @@ img {
.sidebar {
font-size: 0.875rem;
- flex: 0 0 200px;
+ flex: 0 0 var(--desktop-sidebar-width);
+ width: var(--desktop-sidebar-width);
overflow-y: scroll;
overscroll-behavior: contain;
position: sticky;
height: 100vh;
top: 0;
left: 0;
+ z-index: var(--desktop-sidebar-z-index);
}
.rustdoc.src .sidebar {
@@ -398,7 +406,94 @@ img {
overflow-x: hidden;
/* The sidebar is by default hidden */
overflow-y: hidden;
- z-index: 1;
+}
+
+.hide-sidebar .sidebar,
+.hide-sidebar .sidebar-resizer {
+ display: none;
+}
+
+.sidebar-resizer {
+ touch-action: none;
+ width: 9px;
+ cursor: col-resize;
+ z-index: calc(var(--desktop-sidebar-z-index) + 1);
+ position: fixed;
+ height: 100%;
+ /* make sure there's a 1px gap between the scrollbar and resize handle */
+ left: calc(var(--desktop-sidebar-width) + 1px);
+}
+
+.rustdoc.src .sidebar-resizer {
+ /* when closed, place resizer glow on top of the normal src sidebar border (no need to worry
+ about sidebar) */
+ left: 49px;
+}
+
+.src-sidebar-expanded .rustdoc.src .sidebar-resizer {
+ /* for src sidebar, gap is already provided by 1px border on sidebar itself, so place resizer
+ to right of it */
+ left: var(--src-sidebar-width);
+}
+
+.sidebar-resizing {
+ -moz-user-select: none;
+ -webkit-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+
+.sidebar-resizing * {
+ cursor: col-resize !important;
+}
+
+.sidebar-resizing .sidebar {
+ position: fixed;
+}
+.sidebar-resizing > body {
+ padding-left: var(--resizing-sidebar-width);
+}
+
+.sidebar-resizer:hover,
+.sidebar-resizer:active,
+.sidebar-resizer:focus,
+.sidebar-resizer.active {
+ width: 10px;
+ margin: 0;
+ /* when active or hovered, place resizer glow on top of the sidebar (right next to, or even
+ on top of, the scrollbar) */
+ left: var(--desktop-sidebar-width);
+ border-left: solid 1px var(--sidebar-resizer-hover);
+}
+
+.src-sidebar-expanded .rustdoc.src .sidebar-resizer:hover,
+.src-sidebar-expanded .rustdoc.src .sidebar-resizer:active,
+.src-sidebar-expanded .rustdoc.src .sidebar-resizer:focus,
+.src-sidebar-expanded .rustdoc.src .sidebar-resizer.active {
+ /* when active or hovered, place resizer glow on top of the normal src sidebar border */
+ left: calc(var(--src-sidebar-width) - 1px);
+}
+
+@media (pointer: coarse) {
+ .sidebar-resizer {
+ /* too easy to hit the resizer while trying to hit the [-] toggle */
+ display: none !important;
+ }
+}
+
+.sidebar-resizer.active {
+ /* make the resize tool bigger when actually resizing, to avoid :hover styles on other stuff
+ while resizing */
+ padding: 0 140px;
+ width: 2px;
+ margin-left: -140px;
+ border-left: none;
+}
+.sidebar-resizer.active:before {
+ border-left: solid 2px var(--sidebar-resizer-active);
+ display: block;
+ height: 100%;
+ content: "";
}
.sidebar, .mobile-topbar, .sidebar-menu-toggle,
@@ -416,7 +511,8 @@ img {
.src-sidebar-expanded .src .sidebar {
overflow-y: auto;
- flex-basis: 300px;
+ flex-basis: var(--src-sidebar-width);
+ width: var(--src-sidebar-width);
}
.src-sidebar-expanded .src .sidebar > *:not(#src-sidebar-toggle) {
@@ -477,6 +573,7 @@ ul.block, .block li {
display: block;
padding: 0.25rem; /* 4px */
margin-left: -0.25rem;
+ margin-right: 0.25rem;
}
.sidebar h2 {
@@ -703,11 +800,8 @@ pre, .rustdoc.src .example-wrap {
background: var(--table-alt-row-background-color);
}
-/* Shift "where ..." part of method or fn definition down a line */
-.method .where,
-.fn .where,
-.where.fmt-newline {
- display: block;
+/* "where ..." clauses with block display are also smaller */
+div.where {
white-space: pre-wrap;
font-size: 0.875rem;
}
@@ -742,13 +836,13 @@ nav.sub {
margin: 0 0 15px 0;
}
-.small-section-header {
+.section-header {
/* fields use <span> tags, but should get their own lines */
display: block;
position: relative;
}
-.small-section-header:hover > .anchor, .impl:hover > .anchor,
+.section-header:hover > .anchor, .impl:hover > .anchor,
.trait-impl:hover > .anchor, .variant:hover > .anchor {
display: initial;
}
@@ -761,11 +855,11 @@ nav.sub {
.anchor.field {
left: -5px;
}
-.small-section-header > .anchor {
+.section-header > .anchor {
left: -15px;
padding-right: 8px;
}
-h2.small-section-header > .anchor {
+h2.section-header > .anchor {
padding-right: 6px;
}
@@ -778,7 +872,7 @@ h2.small-section-header > .anchor {
text-decoration: underline;
}
-.crate.block a.current { font-weight: 500; }
+.crate.block li.current a { font-weight: 500; }
/* In most contexts we use `overflow-wrap: anywhere` to ensure that we can wrap
as much as needed on mobile (see
@@ -965,7 +1059,7 @@ so that we can apply CSS-filters to change the arrow color in themes */
position: absolute;
top: 100%;
right: 0;
- z-index: 2;
+ z-index: calc(var(--desktop-sidebar-z-index) + 1);
margin-top: 7px;
border-radius: 3px;
border: 1px solid var(--border-color);
@@ -1084,15 +1178,9 @@ so that we can apply CSS-filters to change the arrow color in themes */
}
.item-info .stab {
- /* This min-height is needed to unify the height of the stab elements because some of them
- have emojis.
- */
- min-height: 36px;
- display: flex;
+ display: block;
padding: 3px;
margin-bottom: 5px;
- align-items: center;
- vertical-align: text-bottom;
}
.item-name .stab {
margin-left: 0.3125em;
@@ -1115,17 +1203,26 @@ so that we can apply CSS-filters to change the arrow color in themes */
color: var(--stab-code-color);
}
-.stab .emoji {
+.stab .emoji, .item-info .stab::before {
font-size: 1.25rem;
+}
+.stab .emoji {
margin-right: 0.3rem;
}
+.item-info .stab::before {
+ /* ensure badges with emoji and without it have same height */
+ content: "\0";
+ width: 0;
+ display: inline-block;
+ color: transparent;
+}
/* Black one-pixel outline around emoji shapes */
.emoji {
text-shadow:
1px 0 0 black,
-1px 0 0 black,
- 0 1px 0 black,
+ 0 1px 0 black,
0 -1px 0 black;
}
@@ -1478,7 +1575,20 @@ a.tooltip:hover::after {
margin-left: 4px;
display: flex;
}
-#settings-menu > a, #help-button > a {
+#sidebar-button {
+ display: none;
+}
+.hide-sidebar #sidebar-button {
+ display: flex;
+ margin-right: 4px;
+ position: fixed;
+ left: 6px;
+ height: 34px;
+ width: 34px;
+ background-color: var(--main-background-color);
+ z-index: 1;
+}
+#settings-menu > a, #help-button > a, #sidebar-button > a {
display: flex;
align-items: center;
justify-content: center;
@@ -1493,10 +1603,21 @@ a.tooltip:hover::after {
}
#settings-menu > a:hover, #settings-menu > a:focus,
-#help-button > a:hover, #help-button > a:focus {
+#help-button > a:hover, #help-button > a:focus,
+#sidebar-button > a:hover, #sidebar-button > a:focus {
border-color: var(--settings-button-border-focus);
}
+#sidebar-button > a:before {
+ content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 22 22" \
+ fill="none" stroke="black">\
+ <rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5"/>\
+ <circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
+ <path d="m7.6121 3v16 M5.375 7.625h-2 m2 3h-2 m2 3h-2" stroke-width="1.25"/></svg>');
+ width: 22px;
+ height: 22px;
+}
+
#copy-path {
color: var(--copy-path-button-color);
background: var(--main-background-color);
@@ -1711,7 +1832,7 @@ However, it's not needed with smaller screen width because the doc/code block is
/*
WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
If you update this line, then you also need to update the line with the same warning
-in src-script.js
+in src-script.js and main.js
*/
@media (max-width: 700px) {
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
@@ -1722,6 +1843,10 @@ in src-script.js
scroll-margin-top: 45px;
}
+ .hide-sidebar #sidebar-button {
+ position: static;
+ }
+
.rustdoc {
/* Sidebar should overlay main content, rather than pushing main content to the right.
Turn off `display: flex` on the body element. */
@@ -1750,7 +1875,8 @@ in src-script.js
/* Hide the logo and item name from the sidebar. Those are displayed
in the mobile-topbar instead. */
.sidebar .logo-container,
- .sidebar .location {
+ .sidebar .location,
+ .sidebar-resizer {
display: none;
}
@@ -1818,6 +1944,10 @@ in src-script.js
top: 0;
}
+ .hide-sidebar .mobile-topbar {
+ display: none;
+ }
+
.sidebar-menu-toggle {
width: 45px;
/* Rare exception to specifying font sizes in rem. Since this is acting
@@ -1827,6 +1957,10 @@ in src-script.js
color: var(--main-color);
}
+ .hide-sidebar .sidebar-menu-toggle {
+ display: none;
+ }
+
.sidebar-elems {
margin-top: 1em;
}
@@ -1870,6 +2004,17 @@ in src-script.js
display: none;
}
+ /* sidebar button becomes topbar button */
+ #sidebar-button > a:before {
+ content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
+ viewBox="0 0 22 22" fill="none" stroke="black">\
+ <rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5"/>\
+ <circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
+ <path d="m3 7.375h16m0-3h-4" stroke-width="1.25"/></svg>');
+ width: 22px;
+ height: 22px;
+ }
+
/* Display an alternating layout on tablets and phones */
.item-table, .item-row, .item-table > li, .item-table > li > div,
.search-results > a, .search-results > a > div {
@@ -2274,6 +2419,8 @@ in src-script.js
--scrape-example-help-hover-color: #000;
--scrape-example-code-wrapper-background-start: rgba(255, 255, 255, 1);
--scrape-example-code-wrapper-background-end: rgba(255, 255, 255, 0);
+ --sidebar-resizer-hover: hsl(207, 90%, 66%);
+ --sidebar-resizer-active: hsl(207, 90%, 54%);
}
/* End theme: light */
@@ -2379,6 +2526,8 @@ in src-script.js
--scrape-example-help-hover-color: #fff;
--scrape-example-code-wrapper-background-start: rgba(53, 53, 53, 1);
--scrape-example-code-wrapper-background-end: rgba(53, 53, 53, 0);
+ --sidebar-resizer-hover: hsl(207, 30%, 54%);
+ --sidebar-resizer-active: hsl(207, 90%, 54%);
}
/* End theme: dark */
@@ -2488,6 +2637,8 @@ Original by Dempfi (https://github.com/dempfi/ayu)
--scrape-example-help-hover-color: #fff;
--scrape-example-code-wrapper-background-start: rgba(15, 20, 25, 1);
--scrape-example-code-wrapper-background-end: rgba(15, 20, 25, 0);
+ --sidebar-resizer-hover: hsl(34, 50%, 33%);
+ --sidebar-resizer-active: hsl(34, 100%, 66%);
}
:root[data-theme="ayu"] h1,
@@ -2519,6 +2670,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
}
:root[data-theme="ayu"] .sidebar .current,
+:root[data-theme="ayu"] .sidebar .current a,
:root[data-theme="ayu"] .sidebar a:hover,
:root[data-theme="ayu"] #src-sidebar div.files > a:hover,
:root[data-theme="ayu"] details.dir-entry summary:hover,
@@ -2569,7 +2721,8 @@ Original by Dempfi (https://github.com/dempfi/ayu)
border-bottom: 1px solid rgba(242, 151, 24, 0.3);
}
-:root[data-theme="ayu"] #settings-menu > a img {
+:root[data-theme="ayu"] #settings-menu > a img,
+:root[data-theme="ayu"] #sidebar-button > a:before {
filter: invert(100);
}
/* End theme: ayu */
diff --git a/src/librustdoc/html/static/js/externs.js b/src/librustdoc/html/static/js/externs.js
index c7811b43d..93709e4e8 100644
--- a/src/librustdoc/html/static/js/externs.js
+++ b/src/librustdoc/html/static/js/externs.js
@@ -14,6 +14,7 @@ function initSearch(searchIndex){}
* pathWithoutLast: Array<string>,
* pathLast: string,
* generics: Array<QueryElement>,
+ * bindings: Map<integer, Array<QueryElement>>,
* }}
*/
let QueryElement;
@@ -24,6 +25,7 @@ let QueryElement;
* totalElems: number,
* typeFilter: (null|string),
* userQuery: string,
+ * isInBinding: (null|string),
* }}
*/
let ParserState;
@@ -40,6 +42,7 @@ let ParserState;
* totalElems: number,
* literalSearch: boolean,
* corrections: Array<{from: string, to: integer}>,
+ * typeFingerprint: Uint32Array,
* }}
*/
let ParsedQuery;
@@ -191,8 +194,9 @@ let FunctionSearchType;
/**
* @typedef {{
* id: (null|number),
- * ty: (null|number),
+ * ty: number,
* generics: Array<FunctionType>,
+ * bindings: Map<integer, Array<FunctionType>>,
* }}
*/
let FunctionType;
diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
index 7c052606a..63ab56053 100644
--- a/src/librustdoc/html/static/js/main.js
+++ b/src/librustdoc/html/static/js/main.js
@@ -1,5 +1,5 @@
// Local js definitions:
-/* global addClass, getSettingValue, hasClass, searchState */
+/* global addClass, getSettingValue, hasClass, searchState, updateLocalStorage */
/* global onEach, onEachLazy, removeClass, getVar */
"use strict";
@@ -25,19 +25,9 @@ function showMain() {
removeClass(document.getElementById(MAIN_ID), "hidden");
}
-function elemIsInParent(elem, parent) {
- while (elem && elem !== document.body) {
- if (elem === parent) {
- return true;
- }
- elem = elem.parentElement;
- }
- return false;
-}
-
function blurHandler(event, parentElem, hideCallback) {
- if (!elemIsInParent(document.activeElement, parentElem) &&
- !elemIsInParent(event.relatedTarget, parentElem)
+ if (!parentElem.contains(document.activeElement) &&
+ !parentElem.contains(event.relatedTarget)
) {
hideCallback();
}
@@ -54,7 +44,7 @@ function setMobileTopbar() {
if (mobileTopbar) {
const mobileTitle = document.createElement("h2");
mobileTitle.className = "location";
- if (hasClass(document.body, "crate")) {
+ if (hasClass(document.querySelector(".rustdoc"), "crate")) {
mobileTitle.innerText = `Crate ${window.currentCrate}`;
} else if (locationTitle) {
mobileTitle.innerHTML = locationTitle.innerHTML;
@@ -485,7 +475,7 @@ function preLoadCss(cssUrl) {
return;
}
- const modpath = hasClass(document.body, "mod") ? "../" : "";
+ const modpath = hasClass(document.querySelector(".rustdoc"), "mod") ? "../" : "";
const h3 = document.createElement("h3");
h3.innerHTML = `<a href="${modpath}index.html#${id}">${longty}</a>`;
@@ -505,7 +495,7 @@ function preLoadCss(cssUrl) {
}
const link = document.createElement("a");
link.href = path;
- if (link.href === current_page) {
+ if (path === current_page) {
link.className = "current";
}
link.textContent = name;
@@ -867,12 +857,12 @@ function preLoadCss(cssUrl) {
for (const crate of window.ALL_CRATES) {
const link = document.createElement("a");
link.href = window.rootPath + crate + "/index.html";
- if (window.rootPath !== "./" && crate === window.currentCrate) {
- link.className = "current";
- }
link.textContent = crate;
const li = document.createElement("li");
+ if (window.rootPath !== "./" && crate === window.currentCrate) {
+ li.className = "current";
+ }
li.appendChild(link);
ul.appendChild(li);
}
@@ -1118,7 +1108,7 @@ function preLoadCss(cssUrl) {
if (ev.pointerType !== "mouse") {
return;
}
- if (!e.TOOLTIP_FORCE_VISIBLE && !elemIsInParent(ev.relatedTarget, e)) {
+ if (!e.TOOLTIP_FORCE_VISIBLE && !e.contains(ev.relatedTarget)) {
// See "Tooltip pointer leave gesture" below.
setTooltipHoverTimeout(e, false);
addClass(wrapper, "fade-out");
@@ -1178,10 +1168,10 @@ function preLoadCss(cssUrl) {
function tooltipBlurHandler(event) {
if (window.CURRENT_TOOLTIP_ELEMENT &&
- !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT) &&
- !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT) &&
- !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) &&
- !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE)
+ !window.CURRENT_TOOLTIP_ELEMENT.contains(document.activeElement) &&
+ !window.CURRENT_TOOLTIP_ELEMENT.contains(event.relatedTarget) &&
+ !window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(document.activeElement) &&
+ !window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(event.relatedTarget)
) {
// Work around a difference in the focus behaviour between Firefox, Chrome, and Safari.
// When I click the button on an already-opened tooltip popover, Safari
@@ -1248,8 +1238,8 @@ function preLoadCss(cssUrl) {
if (ev.pointerType !== "mouse") {
return;
}
- if (!e.TOOLTIP_FORCE_VISIBLE &&
- !elemIsInParent(ev.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT)) {
+ if (!e.TOOLTIP_FORCE_VISIBLE && window.CURRENT_TOOLTIP_ELEMENT &&
+ !window.CURRENT_TOOLTIP_ELEMENT.contains(ev.relatedTarget)) {
// Tooltip pointer leave gesture:
//
// Designing a good hover microinteraction is a matter of guessing user
@@ -1328,8 +1318,7 @@ function preLoadCss(cssUrl) {
const infos = [
`For a full list of all search features, take a look <a \
-href="https://doc.rust-lang.org/${channel}/rustdoc/how-to-read-rustdoc.html\
-#the-search-interface">here</a>.`,
+href="https://doc.rust-lang.org/${channel}/rustdoc/read-documentation/search.html">here</a>.`,
"Prefix searches with a type followed by a colon (e.g., <code>fn:</code>) to \
restrict the search to a given item kind.",
"Accepted kinds are: <code>fn</code>, <code>mod</code>, <code>struct</code>, \
@@ -1484,6 +1473,264 @@ href="https://doc.rust-lang.org/${channel}/rustdoc/how-to-read-rustdoc.html\
searchState.setup();
}());
+// Hide, show, and resize the sidebar
+//
+// The body class and CSS variable are initially set up in storage.js,
+// but in this file, we implement:
+//
+// * the show sidebar button, which appears if the sidebar is hidden
+// and, by clicking on it, will bring it back
+// * the sidebar resize handle, which appears only on large viewports
+// with a [fine precision pointer] to allow the user to change
+// the size of the sidebar
+//
+// [fine precision pointer]: https://developer.mozilla.org/en-US/docs/Web/CSS/@media/pointer
+(function() {
+ // 100 is the size of the logo
+ // don't let the sidebar get smaller than that, or it'll get squished
+ const SIDEBAR_MIN = 100;
+ // Don't let the sidebar get bigger than this
+ const SIDEBAR_MAX = 500;
+ // Don't let the body (including the gutter) get smaller than this
+ //
+ // WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
+ // Acceptable values for BODY_MIN are constrained by the mobile breakpoint
+ // (which is the minimum size of the whole page where the sidebar exists)
+ // and the default sidebar width:
+ //
+ // BODY_MIN <= RUSTDOC_MOBILE_BREAKPOINT - DEFAULT_SIDEBAR_WIDTH
+ //
+ // At the time of this writing, the DEFAULT_SIDEBAR_WIDTH on src pages is
+ // 300px, and the RUSTDOC_MOBILE_BREAKPOINT is 700px, so BODY_MIN must be
+ // at most 400px. Otherwise, it would start out at the default size, then
+ // grabbing the resize handle would suddenly cause it to jank to
+ // its contraint-generated maximum.
+ const RUSTDOC_MOBILE_BREAKPOINT = 700;
+ const BODY_MIN = 400;
+ // At half-way past the minimum size, vanish the sidebar entirely
+ const SIDEBAR_VANISH_THRESHOLD = SIDEBAR_MIN / 2;
+
+ // Toolbar button to show the sidebar.
+ //
+ // On small, "mobile-sized" viewports, it's not persistent and it
+ // can only be activated by going into Settings and hiding the nav bar.
+ // On larger, "desktop-sized" viewports (though that includes many
+ // tablets), it's fixed-position, appears in the left side margin,
+ // and it can be activated by resizing the sidebar into nothing.
+ const sidebarButton = document.getElementById("sidebar-button");
+ if (sidebarButton) {
+ sidebarButton.addEventListener("click", e => {
+ removeClass(document.documentElement, "hide-sidebar");
+ updateLocalStorage("hide-sidebar", "false");
+ e.preventDefault();
+ });
+ }
+
+ // Pointer capture.
+ //
+ // Resizing is a single-pointer gesture. Any secondary pointer is ignored
+ let currentPointerId = null;
+
+ // "Desired" sidebar size.
+ //
+ // This is stashed here for window resizing. If the sidebar gets
+ // shrunk to maintain BODY_MIN, and then the user grows the window again,
+ // it gets the sidebar to restore its size.
+ let desiredSidebarSize = null;
+
+ // Sidebar resize debouncer.
+ //
+ // The sidebar itself is resized instantly, but the body HTML can be too
+ // big for that, causing reflow jank. To reduce this, we queue up a separate
+ // animation frame and throttle it.
+ let pendingSidebarResizingFrame = false;
+
+ // If this page has no sidebar at all, bail out.
+ const resizer = document.querySelector(".sidebar-resizer");
+ const sidebar = document.querySelector(".sidebar");
+ if (!resizer || !sidebar) {
+ return;
+ }
+
+ // src page and docs page use different variables, because the contents of
+ // the sidebar are so different that it's reasonable to thing the user
+ // would want them to have different sizes
+ const isSrcPage = hasClass(document.body, "src");
+
+ // Call this function to hide the sidebar when using the resize handle
+ //
+ // This function also nulls out the sidebar width CSS variable and setting,
+ // causing it to return to its default. This does not happen if you do it
+ // from settings.js, which uses a separate function. It's done here because
+ // the minimum sidebar size is rather uncomfortable, and it must pass
+ // through that size when using the shrink-to-nothing gesture.
+ function hideSidebar() {
+ if (isSrcPage) {
+ window.rustdocCloseSourceSidebar();
+ updateLocalStorage("src-sidebar-width", null);
+ // [RUSTDOCIMPL] CSS variable fast path
+ //
+ // The sidebar width variable is attached to the <html> element by
+ // storage.js, because the sidebar and resizer don't exist yet.
+ // But the resize code, in `resize()`, sets the property on the
+ // sidebar and resizer elements (which are the only elements that
+ // use the variable) to avoid recalculating CSS on the entire
+ // document on every frame.
+ //
+ // So, to clear it, we need to clear all three.
+ document.documentElement.style.removeProperty("--src-sidebar-width");
+ sidebar.style.removeProperty("--src-sidebar-width");
+ resizer.style.removeProperty("--src-sidebar-width");
+ } else {
+ addClass(document.documentElement, "hide-sidebar");
+ updateLocalStorage("hide-sidebar", "true");
+ updateLocalStorage("desktop-sidebar-width", null);
+ document.documentElement.style.removeProperty("--desktop-sidebar-width");
+ sidebar.style.removeProperty("--desktop-sidebar-width");
+ resizer.style.removeProperty("--desktop-sidebar-width");
+ }
+ }
+
+ // Call this function to show the sidebar from the resize handle.
+ // On docs pages, this can only happen if the user has grabbed the resize
+ // handle, shrunk the sidebar down to nothing, and then pulls back into
+ // the visible range without releasing it. You can, however, grab the
+ // resize handle on a source page with the sidebar closed, because it
+ // remains visible all the time on there.
+ function showSidebar() {
+ if (isSrcPage) {
+ window.rustdocShowSourceSidebar();
+ } else {
+ removeClass(document.documentElement, "hide-sidebar");
+ updateLocalStorage("hide-sidebar", "false");
+ }
+ }
+
+ /**
+ * Call this to set the correct CSS variable and setting.
+ * This function doesn't enforce size constraints. Do that before calling it!
+ *
+ * @param {number} size - CSS px width of the sidebar.
+ */
+ function changeSidebarSize(size) {
+ if (isSrcPage) {
+ updateLocalStorage("src-sidebar-width", size);
+ // [RUSTDOCIMPL] CSS variable fast path
+ //
+ // While this property is set on the HTML element at load time,
+ // because the sidebar isn't actually loaded yet,
+ // we scope this update to the sidebar to avoid hitting a slow
+ // path in WebKit.
+ sidebar.style.setProperty("--src-sidebar-width", size + "px");
+ resizer.style.setProperty("--src-sidebar-width", size + "px");
+ } else {
+ updateLocalStorage("desktop-sidebar-width", size);
+ sidebar.style.setProperty("--desktop-sidebar-width", size + "px");
+ resizer.style.setProperty("--desktop-sidebar-width", size + "px");
+ }
+ }
+
+ // Check if the sidebar is hidden. Since src pages and doc pages have
+ // different settings, this function has to check that.
+ function isSidebarHidden() {
+ return isSrcPage ?
+ !hasClass(document.documentElement, "src-sidebar-expanded") :
+ hasClass(document.documentElement, "hide-sidebar");
+ }
+
+ // Respond to the resize handle event.
+ // This function enforces size constraints, and implements the
+ // shrink-to-nothing gesture based on thresholds defined above.
+ function resize(e) {
+ if (currentPointerId === null || currentPointerId !== e.pointerId) {
+ return;
+ }
+ e.preventDefault();
+ const pos = e.clientX - sidebar.offsetLeft - 3;
+ if (pos < SIDEBAR_VANISH_THRESHOLD) {
+ hideSidebar();
+ } else if (pos >= SIDEBAR_MIN) {
+ if (isSidebarHidden()) {
+ showSidebar();
+ }
+ // don't let the sidebar get wider than SIDEBAR_MAX, or the body narrower
+ // than BODY_MIN
+ const constrainedPos = Math.min(pos, window.innerWidth - BODY_MIN, SIDEBAR_MAX);
+ changeSidebarSize(constrainedPos);
+ desiredSidebarSize = constrainedPos;
+ if (pendingSidebarResizingFrame !== false) {
+ clearTimeout(pendingSidebarResizingFrame);
+ }
+ pendingSidebarResizingFrame = setTimeout(() => {
+ if (currentPointerId === null || pendingSidebarResizingFrame === false) {
+ return;
+ }
+ pendingSidebarResizingFrame = false;
+ document.documentElement.style.setProperty(
+ "--resizing-sidebar-width",
+ desiredSidebarSize + "px"
+ );
+ }, 100);
+ }
+ }
+ // Respond to the window resize event.
+ window.addEventListener("resize", () => {
+ if (window.innerWidth < RUSTDOC_MOBILE_BREAKPOINT) {
+ return;
+ }
+ stopResize();
+ if (desiredSidebarSize >= (window.innerWidth - BODY_MIN)) {
+ changeSidebarSize(window.innerWidth - BODY_MIN);
+ } else if (desiredSidebarSize !== null && desiredSidebarSize > SIDEBAR_MIN) {
+ changeSidebarSize(desiredSidebarSize);
+ }
+ });
+ function stopResize(e) {
+ if (currentPointerId === null) {
+ return;
+ }
+ if (e) {
+ e.preventDefault();
+ }
+ desiredSidebarSize = sidebar.getBoundingClientRect().width;
+ removeClass(resizer, "active");
+ window.removeEventListener("pointermove", resize, false);
+ window.removeEventListener("pointerup", stopResize, false);
+ removeClass(document.documentElement, "sidebar-resizing");
+ document.documentElement.style.removeProperty( "--resizing-sidebar-width");
+ if (resizer.releasePointerCapture) {
+ resizer.releasePointerCapture(currentPointerId);
+ currentPointerId = null;
+ }
+ }
+ function initResize(e) {
+ if (currentPointerId !== null || e.altKey || e.ctrlKey || e.metaKey || e.button !== 0) {
+ return;
+ }
+ if (resizer.setPointerCapture) {
+ resizer.setPointerCapture(e.pointerId);
+ if (!resizer.hasPointerCapture(e.pointerId)) {
+ // unable to capture pointer; something else has it
+ // on iOS, this usually means you long-clicked a link instead
+ resizer.releasePointerCapture(e.pointerId);
+ return;
+ }
+ currentPointerId = e.pointerId;
+ }
+ e.preventDefault();
+ window.addEventListener("pointermove", resize, false);
+ window.addEventListener("pointercancel", stopResize, false);
+ window.addEventListener("pointerup", stopResize, false);
+ addClass(resizer, "active");
+ addClass(document.documentElement, "sidebar-resizing");
+ const pos = e.clientX - sidebar.offsetLeft - 3;
+ document.documentElement.style.setProperty( "--resizing-sidebar-width", pos + "px");
+ desiredSidebarSize = null;
+ }
+ resizer.addEventListener("pointerdown", initResize, false);
+}());
+
+// This section handles the copy button that appears next to the path breadcrumbs
(function() {
let reset_button_timeout = null;
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
index 48c9a53a2..e824a1fd4 100644
--- a/src/librustdoc/html/static/js/search.js
+++ b/src/librustdoc/html/static/js/search.js
@@ -18,36 +18,38 @@ if (!Array.prototype.toSpliced) {
// This mapping table should match the discriminants of
// `rustdoc::formats::item_type::ItemType` type in Rust.
const itemTypes = [
+ "keyword",
+ "primitive",
"mod",
"externcrate",
"import",
- "struct",
+ "struct", // 5
"enum",
"fn",
"type",
"static",
- "trait",
+ "trait", // 10
"impl",
"tymethod",
"method",
"structfield",
- "variant",
+ "variant", // 15
"macro",
- "primitive",
"associatedtype",
"constant",
"associatedconstant",
- "union",
+ "union", // 20
"foreigntype",
- "keyword",
"existential",
"attr",
"derive",
- "traitalias",
+ "traitalias", // 25
"generic",
];
const longItemTypes = [
+ "keyword",
+ "primitive type",
"module",
"extern crate",
"re-export",
@@ -63,13 +65,11 @@ const longItemTypes = [
"struct field",
"enum variant",
"macro",
- "primitive type",
"assoc type",
"constant",
"assoc const",
"union",
"foreign type",
- "keyword",
"existential type",
"attribute macro",
"derive macro",
@@ -77,15 +77,9 @@ const longItemTypes = [
];
// used for special search precedence
-const TY_PRIMITIVE = itemTypes.indexOf("primitive");
-const TY_KEYWORD = itemTypes.indexOf("keyword");
const TY_GENERIC = itemTypes.indexOf("generic");
const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../";
-function hasOwnPropertyRustdoc(obj, property) {
- return Object.prototype.hasOwnProperty.call(obj, property);
-}
-
// In the search display, allows to switch between tabs.
function printTab(nb) {
let iter = 0;
@@ -240,12 +234,16 @@ function initSearch(rawSearchIndex) {
* @type {Array<Row>}
*/
let searchIndex;
+ /**
+ * @type {Uint32Array}
+ */
+ let functionTypeFingerprint;
let currentResults;
/**
* Map from normalized type names to integers. Used to make type search
* more efficient.
*
- * @type {Map<string, integer>}
+ * @type {Map<string, {id: integer, assocOnly: boolean}>}
*/
let typeNameIdMap;
const ALIASES = new Map();
@@ -272,37 +270,32 @@ function initSearch(rawSearchIndex) {
* get the same ID.
*
* @param {string} name
+ * @param {boolean} isAssocType - True if this is an assoc type
*
* @returns {integer}
*/
- function buildTypeMapIndex(name) {
+ function buildTypeMapIndex(name, isAssocType) {
if (name === "" || name === null) {
return null;
}
if (typeNameIdMap.has(name)) {
- return typeNameIdMap.get(name);
+ const obj = typeNameIdMap.get(name);
+ obj.assocOnly = isAssocType && obj.assocOnly;
+ return obj.id;
} else {
const id = typeNameIdMap.size;
- typeNameIdMap.set(name, id);
+ typeNameIdMap.set(name, {id, assocOnly: isAssocType});
return id;
}
}
- function isWhitespace(c) {
- return " \t\n\r".indexOf(c) !== -1;
- }
-
function isSpecialStartCharacter(c) {
return "<\"".indexOf(c) !== -1;
}
function isEndCharacter(c) {
- return ",>-]".indexOf(c) !== -1;
- }
-
- function isStopCharacter(c) {
- return isEndCharacter(c);
+ return "=,>-]".indexOf(c) !== -1;
}
function isErrorCharacter(c) {
@@ -398,7 +391,7 @@ function initSearch(rawSearchIndex) {
* @return {boolean}
*/
function isSeparatorCharacter(c) {
- return c === ",";
+ return c === "," || c === "=";
}
/**
@@ -410,7 +403,7 @@ function initSearch(rawSearchIndex) {
* @return {boolean}
*/
function isPathSeparator(c) {
- return c === ":" || isWhitespace(c);
+ return c === ":" || c === " ";
}
/**
@@ -427,7 +420,7 @@ function initSearch(rawSearchIndex) {
const c = parserState.userQuery[pos - 1];
if (c === lookingFor) {
return true;
- } else if (!isWhitespace(c)) {
+ } else if (c !== " ") {
break;
}
pos -= 1;
@@ -456,7 +449,7 @@ function initSearch(rawSearchIndex) {
function skipWhitespace(parserState) {
while (parserState.pos < parserState.userQuery.length) {
const c = parserState.userQuery[parserState.pos];
- if (!isWhitespace(c)) {
+ if (c !== " ") {
break;
}
parserState.pos += 1;
@@ -475,8 +468,6 @@ function initSearch(rawSearchIndex) {
const path = name.trim();
if (path.length === 0 && generics.length === 0) {
throw ["Unexpected ", parserState.userQuery[parserState.pos]];
- } else if (path === "*") {
- throw ["Unexpected ", "*"];
}
if (query.literalSearch && parserState.totalElems - parserState.genericsElems > 0) {
throw ["Cannot have more than one element if you use quotes"];
@@ -500,28 +491,30 @@ function initSearch(rawSearchIndex) {
" does not accept generic parameters",
];
}
+ const bindingName = parserState.isInBinding;
+ parserState.isInBinding = null;
return {
name: "never",
id: null,
fullPath: ["never"],
pathWithoutLast: [],
pathLast: "never",
+ normalizedPathLast: "never",
generics: [],
+ bindings: new Map(),
typeFilter: "primitive",
+ bindingName,
};
}
+ const quadcolon = /::\s*::/.exec(path);
if (path.startsWith("::")) {
throw ["Paths cannot start with ", "::"];
} else if (path.endsWith("::")) {
throw ["Paths cannot end with ", "::"];
- } else if (path.includes("::::")) {
- throw ["Unexpected ", "::::"];
- } else if (path.includes(" ::")) {
- throw ["Unexpected ", " ::"];
- } else if (path.includes(":: ")) {
- throw ["Unexpected ", ":: "];
- }
- const pathSegments = path.split(/::|\s+/);
+ } else if (quadcolon !== null) {
+ throw ["Unexpected ", quadcolon[0]];
+ }
+ const pathSegments = path.split(/(?:::\s*)|(?:\s+(?:::\s*)?)/);
// In case we only have something like `<p>`, there is no name.
if (pathSegments.length === 0 || (pathSegments.length === 1 && pathSegments[0] === "")) {
if (generics.length > 0 || prevIs(parserState, ">")) {
@@ -542,14 +535,29 @@ function initSearch(rawSearchIndex) {
if (isInGenerics) {
parserState.genericsElems += 1;
}
+ const bindingName = parserState.isInBinding;
+ parserState.isInBinding = null;
+ const bindings = new Map();
+ const pathLast = pathSegments[pathSegments.length - 1];
return {
name: name.trim(),
id: null,
fullPath: pathSegments,
pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
- pathLast: pathSegments[pathSegments.length - 1],
- generics: generics,
+ pathLast,
+ normalizedPathLast: pathLast.replace(/_/g, ""),
+ generics: generics.filter(gen => {
+ // Syntactically, bindings are parsed as generics,
+ // but the query engine treats them differently.
+ if (gen.bindingName !== null) {
+ bindings.set(gen.bindingName.name, [gen, ...gen.bindingName.generics]);
+ return false;
+ }
+ return true;
+ }),
+ bindings,
typeFilter,
+ bindingName,
};
}
@@ -589,7 +597,7 @@ function initSearch(rawSearchIndex) {
} else {
while (parserState.pos + 1 < parserState.length) {
const next_c = parserState.userQuery[parserState.pos + 1];
- if (!isWhitespace(next_c)) {
+ if (next_c !== " ") {
break;
}
parserState.pos += 1;
@@ -608,7 +616,7 @@ function initSearch(rawSearchIndex) {
}
} else if (
c === "[" ||
- isStopCharacter(c) ||
+ isEndCharacter(c) ||
isSpecialStartCharacter(c) ||
isSeparatorCharacter(c)
) {
@@ -657,6 +665,7 @@ function initSearch(rawSearchIndex) {
parserState.pos += 1;
getItemsBefore(query, parserState, generics, "]");
const typeFilter = parserState.typeFilter;
+ const isInBinding = parserState.isInBinding;
if (typeFilter !== null && typeFilter !== "primitive") {
throw [
"Invalid search type: primitive ",
@@ -667,18 +676,27 @@ function initSearch(rawSearchIndex) {
];
}
parserState.typeFilter = null;
+ parserState.isInBinding = null;
parserState.totalElems += 1;
if (isInGenerics) {
parserState.genericsElems += 1;
}
+ for (const gen of generics) {
+ if (gen.bindingName !== null) {
+ throw ["Type parameter ", "=", " cannot be within slice ", "[]"];
+ }
+ }
elems.push({
name: "[]",
id: null,
fullPath: ["[]"],
pathWithoutLast: [],
pathLast: "[]",
+ normalizedPathLast: "[]",
generics,
typeFilter: "primitive",
+ bindingName: isInBinding,
+ bindings: new Map(),
});
} else {
const isStringElem = parserState.userQuery[start] === "\"";
@@ -705,15 +723,38 @@ function initSearch(rawSearchIndex) {
if (start >= end && generics.length === 0) {
return;
}
- elems.push(
- createQueryElement(
- query,
- parserState,
- parserState.userQuery.slice(start, end),
- generics,
- isInGenerics
- )
- );
+ if (parserState.userQuery[parserState.pos] === "=") {
+ if (parserState.isInBinding) {
+ throw ["Cannot write ", "=", " twice in a binding"];
+ }
+ if (!isInGenerics) {
+ throw ["Type parameter ", "=", " must be within generics list"];
+ }
+ const name = parserState.userQuery.slice(start, end).trim();
+ if (name === "!") {
+ throw ["Type parameter ", "=", " key cannot be ", "!", " never type"];
+ }
+ if (name.includes("!")) {
+ throw ["Type parameter ", "=", " key cannot be ", "!", " macro"];
+ }
+ if (name.includes("::")) {
+ throw ["Type parameter ", "=", " key cannot contain ", "::", " path"];
+ }
+ if (name.includes(":")) {
+ throw ["Type parameter ", "=", " key cannot contain ", ":", " type"];
+ }
+ parserState.isInBinding = { name, generics };
+ } else {
+ elems.push(
+ createQueryElement(
+ query,
+ parserState,
+ parserState.userQuery.slice(start, end),
+ generics,
+ isInGenerics
+ )
+ );
+ }
}
}
@@ -737,6 +778,8 @@ function initSearch(rawSearchIndex) {
// If this is a generic, keep the outer item's type filter around.
const oldTypeFilter = parserState.typeFilter;
parserState.typeFilter = null;
+ const oldIsInBinding = parserState.isInBinding;
+ parserState.isInBinding = null;
let extra = "";
if (endChar === ">") {
@@ -752,6 +795,9 @@ function initSearch(rawSearchIndex) {
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
if (c === endChar) {
+ if (parserState.isInBinding) {
+ throw ["Unexpected ", endChar, " after ", "="];
+ }
break;
} else if (isSeparatorCharacter(c)) {
parserState.pos += 1;
@@ -791,7 +837,9 @@ function initSearch(rawSearchIndex) {
throw [
"Expected ",
",",
- " or ",
+ ", ",
+ "=",
+ ", or ",
endChar,
...extra,
", found ",
@@ -801,6 +849,8 @@ function initSearch(rawSearchIndex) {
throw [
"Expected ",
",",
+ " or ",
+ "=",
...extra,
", found ",
c,
@@ -828,6 +878,7 @@ function initSearch(rawSearchIndex) {
parserState.pos += 1;
parserState.typeFilter = oldTypeFilter;
+ parserState.isInBinding = oldIsInBinding;
}
/**
@@ -865,7 +916,7 @@ function initSearch(rawSearchIndex) {
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
- if (isStopCharacter(c)) {
+ if (isEndCharacter(c)) {
foundStopChar = true;
if (isSeparatorCharacter(c)) {
parserState.pos += 1;
@@ -900,7 +951,7 @@ function initSearch(rawSearchIndex) {
query.literalSearch = false;
foundStopChar = true;
continue;
- } else if (isWhitespace(c)) {
+ } else if (c === " ") {
skipWhitespace(parserState);
continue;
}
@@ -991,6 +1042,8 @@ function initSearch(rawSearchIndex) {
correction: null,
proposeCorrectionFrom: null,
proposeCorrectionTo: null,
+ // bloom filter build from type ids
+ typeFingerprint: new Uint32Array(4),
};
}
@@ -1021,7 +1074,7 @@ function initSearch(rawSearchIndex) {
if (elem &&
elem.value !== "all crates" &&
- hasOwnPropertyRustdoc(rawSearchIndex, elem.value)
+ rawSearchIndex.has(elem.value)
) {
return elem.value;
}
@@ -1054,8 +1107,13 @@ function initSearch(rawSearchIndex) {
for (const elem2 of elem.generics) {
convertTypeFilterOnElem(elem2);
}
+ for (const constraints of elem.bindings.values()) {
+ for (const constraint of constraints) {
+ convertTypeFilterOnElem(constraint);
+ }
+ }
}
- userQuery = userQuery.trim();
+ userQuery = userQuery.trim().replace(/\r|\n|\t/g, " ");
const parserState = {
length: userQuery.length,
pos: 0,
@@ -1063,6 +1121,7 @@ function initSearch(rawSearchIndex) {
totalElems: 0,
genericsElems: 0,
typeFilter: null,
+ isInBinding: null,
userQuery: userQuery.toLowerCase(),
};
let query = newParsedQuery(userQuery);
@@ -1080,7 +1139,6 @@ function initSearch(rawSearchIndex) {
query.error = err;
return query;
}
-
if (!query.literalSearch) {
// If there is more than one element in the query, we switch to literalSearch in any
// case.
@@ -1114,13 +1172,12 @@ function initSearch(rawSearchIndex) {
* Executes the parsed query and builds a {ResultsTable}.
*
* @param {ParsedQuery} parsedQuery - The parsed user query
- * @param {Object} searchWords - The list of search words to query against
* @param {Object} [filterCrates] - Crate to search in if defined
* @param {Object} [currentCrate] - Current crate, to rank results from this crate higher
*
* @return {ResultsTable}
*/
- function execQuery(parsedQuery, searchWords, filterCrates, currentCrate) {
+ function execQuery(parsedQuery, filterCrates, currentCrate) {
const results_others = new Map(), results_in_args = new Map(),
results_returned = new Map();
@@ -1178,8 +1235,8 @@ function initSearch(rawSearchIndex) {
const userQuery = parsedQuery.userQuery;
const result_list = [];
for (const result of results.values()) {
- result.word = searchWords[result.id];
- result.item = searchIndex[result.id] || {};
+ result.item = searchIndex[result.id];
+ result.word = searchIndex[result.id].word;
result_list.push(result);
}
@@ -1251,16 +1308,6 @@ function initSearch(rawSearchIndex) {
return (a > b ? +1 : -1);
}
- // special precedence for primitive and keyword pages
- if ((aaa.item.ty === TY_PRIMITIVE && bbb.item.ty !== TY_KEYWORD) ||
- (aaa.item.ty === TY_KEYWORD && bbb.item.ty !== TY_PRIMITIVE)) {
- return -1;
- }
- if ((bbb.item.ty === TY_PRIMITIVE && aaa.item.ty !== TY_PRIMITIVE) ||
- (bbb.item.ty === TY_KEYWORD && aaa.item.ty !== TY_KEYWORD)) {
- return 1;
- }
-
// sort by description (no description goes later)
a = (aaa.item.desc === "");
b = (bbb.item.desc === "");
@@ -1286,59 +1333,10 @@ function initSearch(rawSearchIndex) {
return 0;
});
- let nameSplit = null;
- if (parsedQuery.elems.length === 1) {
- const hasPath = typeof parsedQuery.elems[0].path === "undefined";
- nameSplit = hasPath ? null : parsedQuery.elems[0].path;
- }
-
- for (const result of result_list) {
- // this validation does not make sense when searching by types
- if (result.dontValidate) {
- continue;
- }
- const name = result.item.name.toLowerCase(),
- path = result.item.path.toLowerCase(),
- parent = result.item.parent;
-
- if (!isType && !validateResult(name, path, nameSplit, parent)) {
- result.id = -1;
- }
- }
return transformResults(result_list);
}
/**
- * This function checks generics in search query `queryElem` can all be found in the
- * search index (`fnType`),
- *
- * This function returns `true` if it matches, and also writes the results to mgensInout.
- * It returns `false` if no match is found, and leaves mgensInout untouched.
- *
- * @param {FunctionType} fnType - The object to check.
- * @param {QueryElement} queryElem - The element from the parsed query.
- * @param {[FunctionType]} whereClause - Trait bounds for generic items.
- * @param {Map<number,number>|null} mgensInout - Map functions generics to query generics.
- *
- * @return {boolean} - Returns true if a match, false otherwise.
- */
- function checkGenerics(fnType, queryElem, whereClause, mgensInout) {
- return unifyFunctionTypes(
- fnType.generics,
- queryElem.generics,
- whereClause,
- mgensInout,
- mgens => {
- if (mgensInout) {
- for (const [fid, qid] of mgens.entries()) {
- mgensInout.set(fid, qid);
- }
- }
- return true;
- }
- );
- }
- /**
* This function checks if a list of search query `queryElems` can all be found in the
* search index (`fnTypes`).
*
@@ -1348,7 +1346,7 @@ function initSearch(rawSearchIndex) {
* then this function will try with a different solution, or bail with false if it
* runs out of candidates.
*
- * @param {Array<FunctionType>} fnTypes - The objects to check.
+ * @param {Array<FunctionType>} fnTypesIn - The objects to check.
* @param {Array<QueryElement>} queryElems - The elements from the parsed query.
* @param {[FunctionType]} whereClause - Trait bounds for generic items.
* @param {Map<number,number>|null} mgensIn
@@ -1359,9 +1357,9 @@ function initSearch(rawSearchIndex) {
*/
function unifyFunctionTypes(fnTypesIn, queryElems, whereClause, mgensIn, solutionCb) {
/**
- * @type Map<integer, integer>
+ * @type Map<integer, integer>|null
*/
- let mgens = new Map(mgensIn);
+ const mgens = mgensIn === null ? null : new Map(mgensIn);
if (queryElems.length === 0) {
return !solutionCb || solutionCb(mgens);
}
@@ -1369,204 +1367,249 @@ function initSearch(rawSearchIndex) {
return false;
}
const ql = queryElems.length;
- let fl = fnTypesIn.length;
+ const fl = fnTypesIn.length;
+
+ // One element fast path / base case
+ if (ql === 1 && queryElems[0].generics.length === 0
+ && queryElems[0].bindings.size === 0) {
+ const queryElem = queryElems[0];
+ for (const fnType of fnTypesIn) {
+ if (!unifyFunctionTypeIsMatchCandidate(fnType, queryElem, whereClause, mgens)) {
+ continue;
+ }
+ if (fnType.id < 0 && queryElem.id < 0) {
+ if (mgens && mgens.has(fnType.id) &&
+ mgens.get(fnType.id) !== queryElem.id) {
+ continue;
+ }
+ const mgensScratch = new Map(mgens);
+ mgensScratch.set(fnType.id, queryElem.id);
+ if (!solutionCb || solutionCb(mgensScratch)) {
+ return true;
+ }
+ } else if (!solutionCb || solutionCb(mgens ? new Map(mgens) : null)) {
+ // unifyFunctionTypeIsMatchCandidate already checks that ids match
+ return true;
+ }
+ }
+ for (const fnType of fnTypesIn) {
+ if (!unifyFunctionTypeIsUnboxCandidate(fnType, queryElem, whereClause, mgens)) {
+ continue;
+ }
+ if (fnType.id < 0) {
+ if (mgens && mgens.has(fnType.id) &&
+ mgens.get(fnType.id) !== 0) {
+ continue;
+ }
+ const mgensScratch = new Map(mgens);
+ mgensScratch.set(fnType.id, 0);
+ if (unifyFunctionTypes(
+ whereClause[(-fnType.id) - 1],
+ queryElems,
+ whereClause,
+ mgensScratch,
+ solutionCb
+ )) {
+ return true;
+ }
+ } else if (unifyFunctionTypes(
+ [...fnType.generics, ...Array.from(fnType.bindings.values()).flat() ],
+ queryElems,
+ whereClause,
+ mgens ? new Map(mgens) : null,
+ solutionCb
+ )) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Multiple element recursive case
/**
* @type Array<FunctionType>
*/
- let fnTypes = fnTypesIn.slice();
+ const fnTypes = fnTypesIn.slice();
/**
- * loop works by building up a solution set in the working arrays
+ * Algorithm works by building up a solution set in the working arrays
* fnTypes gets mutated in place to make this work, while queryElems
- * is left alone
+ * is left alone.
+ *
+ * It works backwards, because arrays can be cheaply truncated that way.
*
- * vvvvvvv `i` points here
- * queryElems = [ good, good, good, unknown, unknown ],
- * fnTypes = [ good, good, good, unknown, unknown ],
- * ---------------- ^^^^^^^^^^^^^^^^ `j` iterates after `i`,
- * | looking for candidates
- * everything before `i` is the
- * current working solution
+ * vvvvvvv `queryElem`
+ * queryElems = [ unknown, unknown, good, good, good ]
+ * fnTypes = [ unknown, unknown, good, good, good ]
+ * ^^^^^^^^^^^^^^^^ loop over these elements to find candidates
*
* Everything in the current working solution is known to be a good
* match, but it might not be the match we wind up going with, because
* there might be more than one candidate match, and we need to try them all
* before giving up. So, to handle this, it backtracks on failure.
- *
- * @type Array<{
- * "fnTypesScratch": Array<FunctionType>,
- * "queryElemsOffset": integer,
- * "fnTypesOffset": integer
- * }>
*/
- const backtracking = [];
- let i = 0;
- let j = 0;
- const backtrack = () => {
- while (backtracking.length !== 0) {
- // this session failed, but there are other possible solutions
- // to backtrack, reset to (a copy of) the old array, do the swap or unboxing
- const {
- fnTypesScratch,
- mgensScratch,
- queryElemsOffset,
- fnTypesOffset,
- unbox,
- } = backtracking.pop();
- mgens = new Map(mgensScratch);
- const fnType = fnTypesScratch[fnTypesOffset];
- const queryElem = queryElems[queryElemsOffset];
- if (unbox) {
- if (fnType.id < 0) {
- if (mgens.has(fnType.id) && mgens.get(fnType.id) !== 0) {
- continue;
- }
- mgens.set(fnType.id, 0);
- }
- const generics = fnType.id < 0 ?
- whereClause[(-fnType.id) - 1] :
- fnType.generics;
- fnTypes = fnTypesScratch.toSpliced(fnTypesOffset, 1, ...generics);
- fl = fnTypes.length;
- // re-run the matching algorithm on this item
- i = queryElemsOffset - 1;
- } else {
- if (fnType.id < 0) {
- if (mgens.has(fnType.id) && mgens.get(fnType.id) !== queryElem.id) {
- continue;
- }
- mgens.set(fnType.id, queryElem.id);
- }
- fnTypes = fnTypesScratch.slice();
- fl = fnTypes.length;
- const tmp = fnTypes[queryElemsOffset];
- fnTypes[queryElemsOffset] = fnTypes[fnTypesOffset];
- fnTypes[fnTypesOffset] = tmp;
- // this is known as a good match; go to the next one
- i = queryElemsOffset;
- }
- return true;
+ const flast = fl - 1;
+ const qlast = ql - 1;
+ const queryElem = queryElems[qlast];
+ let queryElemsTmp = null;
+ for (let i = flast; i >= 0; i -= 1) {
+ const fnType = fnTypes[i];
+ if (!unifyFunctionTypeIsMatchCandidate(fnType, queryElem, whereClause, mgens)) {
+ continue;
}
- return false;
- };
- for (i = 0; i !== ql; ++i) {
- const queryElem = queryElems[i];
- /**
- * list of potential function types that go with the current query element.
- * @type Array<integer>
- */
- const matchCandidates = [];
- let fnTypesScratch = null;
- let mgensScratch = null;
- // don't try anything before `i`, because they've already been
- // paired off with the other query elements
- for (j = i; j !== fl; ++j) {
- const fnType = fnTypes[j];
- if (unifyFunctionTypeIsMatchCandidate(fnType, queryElem, whereClause, mgens)) {
- if (!fnTypesScratch) {
- fnTypesScratch = fnTypes.slice();
+ let mgensScratch;
+ if (fnType.id < 0) {
+ mgensScratch = new Map(mgens);
+ if (mgensScratch.has(fnType.id)
+ && mgensScratch.get(fnType.id) !== queryElem.id) {
+ continue;
+ }
+ mgensScratch.set(fnType.id, queryElem.id);
+ } else {
+ mgensScratch = mgens;
+ }
+ // fnTypes[i] is a potential match
+ // fnTypes[flast] is the last item in the list
+ // swap them, and drop the potential match from the list
+ // check if the remaining function types also match
+ fnTypes[i] = fnTypes[flast];
+ fnTypes.length = flast;
+ if (!queryElemsTmp) {
+ queryElemsTmp = queryElems.slice(0, qlast);
+ }
+ const passesUnification = unifyFunctionTypes(
+ fnTypes,
+ queryElemsTmp,
+ whereClause,
+ mgensScratch,
+ mgensScratch => {
+ if (fnType.generics.length === 0 && queryElem.generics.length === 0
+ && fnType.bindings.size === 0 && queryElem.bindings.size === 0) {
+ return !solutionCb || solutionCb(mgensScratch);
}
- unifyFunctionTypes(
- fnType.generics,
- queryElem.generics,
+ const solution = unifyFunctionTypeCheckBindings(
+ fnType,
+ queryElem,
whereClause,
- mgens,
- mgensScratch => {
- matchCandidates.push({
- fnTypesScratch,
- mgensScratch,
- queryElemsOffset: i,
- fnTypesOffset: j,
- unbox: false,
- });
- return false; // "reject" all candidates to gather all of them
- }
+ mgensScratch
);
- }
- if (unifyFunctionTypeIsUnboxCandidate(fnType, queryElem, whereClause, mgens)) {
- if (!fnTypesScratch) {
- fnTypesScratch = fnTypes.slice();
+ if (!solution) {
+ return false;
}
- if (!mgensScratch) {
- mgensScratch = new Map(mgens);
+ const simplifiedGenerics = solution.simplifiedGenerics;
+ for (const simplifiedMgens of solution.mgens) {
+ const passesUnification = unifyFunctionTypes(
+ simplifiedGenerics,
+ queryElem.generics,
+ whereClause,
+ simplifiedMgens,
+ solutionCb
+ );
+ if (passesUnification) {
+ return true;
+ }
}
- backtracking.push({
- fnTypesScratch,
- mgensScratch,
- queryElemsOffset: i,
- fnTypesOffset: j,
- unbox: true,
- });
- }
- }
- if (matchCandidates.length === 0) {
- if (backtrack()) {
- continue;
- } else {
return false;
}
+ );
+ if (passesUnification) {
+ return true;
}
- // use the current candidate
- const {fnTypesOffset: candidate, mgensScratch: mgensNew} = matchCandidates.pop();
- if (fnTypes[candidate].id < 0 && queryElems[i].id < 0) {
- mgens.set(fnTypes[candidate].id, queryElems[i].id);
- }
- for (const [fid, qid] of mgensNew) {
- mgens.set(fid, qid);
- }
- // `i` and `j` are paired off
- // `queryElems[i]` is left in place
- // `fnTypes[j]` is swapped with `fnTypes[i]` to pair them off
- const tmp = fnTypes[candidate];
- fnTypes[candidate] = fnTypes[i];
- fnTypes[i] = tmp;
- // write other candidates to backtracking queue
- for (const otherCandidate of matchCandidates) {
- backtracking.push(otherCandidate);
- }
- // If we're on the last item, check the solution with the callback
- // backtrack if the callback says its unsuitable
- while (i === (ql - 1) && solutionCb && !solutionCb(mgens)) {
- if (!backtrack()) {
- return false;
+ // backtrack
+ fnTypes[flast] = fnTypes[i];
+ fnTypes[i] = fnType;
+ fnTypes.length = fl;
+ }
+ for (let i = flast; i >= 0; i -= 1) {
+ const fnType = fnTypes[i];
+ if (!unifyFunctionTypeIsUnboxCandidate(fnType, queryElem, whereClause, mgens)) {
+ continue;
+ }
+ let mgensScratch;
+ if (fnType.id < 0) {
+ mgensScratch = new Map(mgens);
+ if (mgensScratch.has(fnType.id) && mgensScratch.get(fnType.id) !== 0) {
+ continue;
}
+ mgensScratch.set(fnType.id, 0);
+ } else {
+ mgensScratch = mgens;
+ }
+ const generics = fnType.id < 0 ?
+ whereClause[(-fnType.id) - 1] :
+ fnType.generics;
+ const bindings = fnType.bindings ?
+ Array.from(fnType.bindings.values()).flat() :
+ [];
+ const passesUnification = unifyFunctionTypes(
+ fnTypes.toSpliced(i, 1, ...generics, ...bindings),
+ queryElems,
+ whereClause,
+ mgensScratch,
+ solutionCb
+ );
+ if (passesUnification) {
+ return true;
}
}
- return true;
+ return false;
}
- function unifyFunctionTypeIsMatchCandidate(fnType, queryElem, whereClause, mgens) {
+ /**
+ * Check if this function is a match candidate.
+ *
+ * This function is all the fast checks that don't require backtracking.
+ * It checks that two items are not named differently, and is load-bearing for that.
+ * It also checks that, if the query has generics, the function type must have generics
+ * or associated type bindings: that's not load-bearing, but it prevents unnecessary
+ * backtracking later.
+ *
+ * @param {FunctionType} fnType
+ * @param {QueryElement} queryElem
+ * @param {[FunctionSearchType]} whereClause - Trait bounds for generic items.
+ * @param {Map<number,number>|null} mgensIn - Map functions generics to query generics.
+ * @returns {boolean}
+ */
+ function unifyFunctionTypeIsMatchCandidate(fnType, queryElem, whereClause, mgensIn) {
// type filters look like `trait:Read` or `enum:Result`
if (!typePassesFilter(queryElem.typeFilter, fnType.ty)) {
return false;
}
// fnType.id < 0 means generic
// queryElem.id < 0 does too
- // mgens[fnType.id] = queryElem.id
- // or, if mgens[fnType.id] = 0, then we've matched this generic with a bare trait
+ // mgensIn[fnType.id] = queryElem.id
+ // or, if mgensIn[fnType.id] = 0, then we've matched this generic with a bare trait
// and should make that same decision everywhere it appears
if (fnType.id < 0 && queryElem.id < 0) {
- if (mgens.has(fnType.id) && mgens.get(fnType.id) !== queryElem.id) {
- return false;
- }
- for (const [fid, qid] of mgens.entries()) {
- if (fnType.id !== fid && queryElem.id === qid) {
+ if (mgensIn) {
+ if (mgensIn.has(fnType.id) && mgensIn.get(fnType.id) !== queryElem.id) {
return false;
}
- if (fnType.id === fid && queryElem.id !== qid) {
- return false;
+ for (const [fid, qid] of mgensIn.entries()) {
+ if (fnType.id !== fid && queryElem.id === qid) {
+ return false;
+ }
+ if (fnType.id === fid && queryElem.id !== qid) {
+ return false;
+ }
}
}
+ return true;
} else {
if (queryElem.id === typeNameIdOfArrayOrSlice &&
(fnType.id === typeNameIdOfSlice || fnType.id === typeNameIdOfArray)
) {
// [] matches primitive:array or primitive:slice
// if it matches, then we're fine, and this is an appropriate match candidate
- } else if (fnType.id !== queryElem.id) {
+ } else if (fnType.id !== queryElem.id || queryElem.id === null) {
return false;
}
// If the query elem has generics, and the function doesn't,
// it can't match.
- if (fnType.generics.length === 0 && queryElem.generics.length !== 0) {
+ if ((fnType.generics.length + fnType.bindings.size) === 0 &&
+ queryElem.generics.length !== 0
+ ) {
+ return false;
+ }
+ if (fnType.bindings.size < queryElem.bindings.size) {
return false;
}
// If the query element is a path (it contains `::`), we need to check if this
@@ -1595,9 +1638,87 @@ function initSearch(rawSearchIndex) {
return false;
}
}
+ return true;
+ }
+ }
+ /**
+ * This function checks the associated type bindings. Any that aren't matched get converted
+ * to generics, and this function returns an array of the function's generics with these
+ * simplified bindings added to them. That is, it takes a path like this:
+ *
+ * Iterator<Item=u32>
+ *
+ * ... if queryElem itself has an `Item=` in it, then this function returns an empty array.
+ * But if queryElem contains no Item=, then this function returns a one-item array with the
+ * ID of u32 in it, and the rest of the matching engine acts as if `Iterator<u32>` were
+ * the type instead.
+ *
+ * @param {FunctionType} fnType
+ * @param {QueryElement} queryElem
+ * @param {[FunctionType]} whereClause - Trait bounds for generic items.
+ * @param {Map<number,number>} mgensIn - Map functions generics to query generics.
+ * Never modified.
+ * @returns {false|{mgens: [Map<number,number>], simplifiedGenerics: [FunctionType]}}
+ */
+ function unifyFunctionTypeCheckBindings(fnType, queryElem, whereClause, mgensIn) {
+ if (fnType.bindings.size < queryElem.bindings.size) {
+ return false;
+ }
+ let simplifiedGenerics = fnType.generics || [];
+ if (fnType.bindings.size > 0) {
+ let mgensSolutionSet = [mgensIn];
+ for (const [name, constraints] of queryElem.bindings.entries()) {
+ if (mgensSolutionSet.length === 0) {
+ return false;
+ }
+ if (!fnType.bindings.has(name)) {
+ return false;
+ }
+ const fnTypeBindings = fnType.bindings.get(name);
+ mgensSolutionSet = mgensSolutionSet.flatMap(mgens => {
+ const newSolutions = [];
+ unifyFunctionTypes(
+ fnTypeBindings,
+ constraints,
+ whereClause,
+ mgens,
+ newMgens => {
+ newSolutions.push(newMgens);
+ // return `false` makes unifyFunctionTypes return the full set of
+ // possible solutions
+ return false;
+ }
+ );
+ return newSolutions;
+ });
+ }
+ if (mgensSolutionSet.length === 0) {
+ return false;
+ }
+ const binds = Array.from(fnType.bindings.entries()).flatMap(entry => {
+ const [name, constraints] = entry;
+ if (queryElem.bindings.has(name)) {
+ return [];
+ } else {
+ return constraints;
+ }
+ });
+ if (simplifiedGenerics.length > 0) {
+ simplifiedGenerics = [...simplifiedGenerics, ...binds];
+ } else {
+ simplifiedGenerics = binds;
+ }
+ return { simplifiedGenerics, mgens: mgensSolutionSet };
}
- return true;
+ return { simplifiedGenerics, mgens: [mgensIn] };
}
+ /**
+ * @param {FunctionType} fnType
+ * @param {QueryElement} queryElem
+ * @param {[FunctionType]} whereClause - Trait bounds for generic items.
+ * @param {Map<number,number>|null} mgens - Map functions generics to query generics.
+ * @returns {boolean}
+ */
function unifyFunctionTypeIsUnboxCandidate(fnType, queryElem, whereClause, mgens) {
if (fnType.id < 0 && queryElem.id >= 0) {
if (!whereClause) {
@@ -1605,16 +1726,29 @@ function initSearch(rawSearchIndex) {
}
// mgens[fnType.id] === 0 indicates that we committed to unboxing this generic
// mgens[fnType.id] === null indicates that we haven't decided yet
- if (mgens.has(fnType.id) && mgens.get(fnType.id) !== 0) {
+ if (mgens && mgens.has(fnType.id) && mgens.get(fnType.id) !== 0) {
return false;
}
+ // Where clauses can represent cyclical data.
+ // `null` prevents it from trying to unbox in an infinite loop
+ const mgensTmp = new Map(mgens);
+ mgensTmp.set(fnType.id, null);
// This is only a potential unbox if the search query appears in the where clause
// for example, searching `Read -> usize` should find
// `fn read_all<R: Read>(R) -> Result<usize>`
// generic `R` is considered "unboxed"
- return checkIfInList(whereClause[(-fnType.id) - 1], queryElem, whereClause);
- } else if (fnType.generics && fnType.generics.length > 0) {
- return checkIfInList(fnType.generics, queryElem, whereClause);
+ return checkIfInList(
+ whereClause[(-fnType.id) - 1],
+ queryElem,
+ whereClause,
+ mgensTmp
+ );
+ } else if (fnType.generics.length > 0 || fnType.bindings.size > 0) {
+ const simplifiedGenerics = [
+ ...fnType.generics,
+ ...Array.from(fnType.bindings.values()).flat(),
+ ];
+ return checkIfInList(simplifiedGenerics, queryElem, whereClause, mgens);
}
return false;
}
@@ -1626,12 +1760,13 @@ function initSearch(rawSearchIndex) {
* @param {Array<FunctionType>} list
* @param {QueryElement} elem - The element from the parsed query.
* @param {[FunctionType]} whereClause - Trait bounds for generic items.
+ * @param {Map<number,number>|null} mgens - Map functions generics to query generics.
*
* @return {boolean} - Returns true if found, false otherwise.
*/
- function checkIfInList(list, elem, whereClause) {
+ function checkIfInList(list, elem, whereClause, mgens) {
for (const entry of list) {
- if (checkType(entry, elem, whereClause)) {
+ if (checkType(entry, elem, whereClause, mgens)) {
return true;
}
}
@@ -1645,42 +1780,29 @@ function initSearch(rawSearchIndex) {
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
* @param {[FunctionType]} whereClause - Trait bounds for generic items.
+ * @param {Map<number,number>|null} mgens - Map functions generics to query generics.
*
* @return {boolean} - Returns true if the type matches, false otherwise.
*/
- function checkType(row, elem, whereClause) {
- if (row.id === null) {
- // This is a pure "generic" search, no need to run other checks.
- return row.generics.length > 0
- ? checkIfInList(row.generics, elem, whereClause)
- : false;
- }
-
- if (row.id < 0 && elem.id >= 0) {
- const gid = (-row.id) - 1;
- return checkIfInList(whereClause[gid], elem, whereClause);
- }
-
- if (row.id < 0 && elem.id < 0) {
- return true;
- }
-
- const matchesExact = row.id === elem.id;
- const matchesArrayOrSlice = elem.id === typeNameIdOfArrayOrSlice &&
- (row.id === typeNameIdOfSlice || row.id === typeNameIdOfArray);
-
- if ((matchesExact || matchesArrayOrSlice) &&
- typePassesFilter(elem.typeFilter, row.ty)) {
- if (elem.generics.length > 0) {
- return checkGenerics(row, elem, whereClause, new Map());
+ function checkType(row, elem, whereClause, mgens) {
+ if (row.bindings.size === 0 && elem.bindings.size === 0) {
+ if (elem.id < 0) {
+ return row.id < 0 || checkIfInList(row.generics, elem, whereClause, mgens);
+ }
+ if (row.id > 0 && elem.id > 0 && elem.pathWithoutLast.length === 0 &&
+ typePassesFilter(elem.typeFilter, row.ty) && elem.generics.length === 0 &&
+ // special case
+ elem.id !== typeNameIdOfArrayOrSlice
+ ) {
+ return row.id === elem.id || checkIfInList(
+ row.generics,
+ elem,
+ whereClause,
+ mgens
+ );
}
- return true;
}
-
- // If the current item does not match, try [unboxing] the generic.
- // [unboxing]:
- // https://ndmitchell.com/downloads/slides-hoogle_fast_type_searching-09_aug_2008.pdf
- return checkIfInList(row.generics, elem, whereClause);
+ return unifyFunctionTypes([row], [elem], whereClause, mgens);
}
function checkPath(contains, ty, maxEditDistance) {
@@ -1696,26 +1818,16 @@ function initSearch(rawSearchIndex) {
const length = path.length;
const clength = contains.length;
- if (clength > length) {
- return maxEditDistance + 1;
- }
- for (let i = 0; i < length; ++i) {
- if (i + clength > length) {
- break;
- }
+ pathiter: for (let i = length - clength; i >= 0; i -= 1) {
let dist_total = 0;
- let aborted = false;
for (let x = 0; x < clength; ++x) {
const dist = editDistance(path[i + x], contains[x], maxEditDistance);
if (dist > maxEditDistance) {
- aborted = true;
- break;
+ continue pathiter;
}
dist_total += dist;
}
- if (!aborted) {
- ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
- }
+ ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
}
return ret_dist;
}
@@ -1819,7 +1931,7 @@ function initSearch(rawSearchIndex) {
* The `results` map contains information which will be used to sort the search results:
*
* * `fullId` is a `string`` used as the key of the object we use for the `results` map.
- * * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
+ * * `id` is the index in the `searchIndex` array for this element.
* * `index` is an `integer`` used to sort by the position of the word in the item's name.
* * `dist` is the main metric used to sort the search results.
* * `path_dist` is zero if a single-component search query is used, otherwise it's the
@@ -1833,8 +1945,7 @@ function initSearch(rawSearchIndex) {
* @param {integer} path_dist
*/
function addIntoResults(results, fullId, id, index, dist, path_dist, maxEditDistance) {
- const inBounds = dist <= maxEditDistance || index !== -1;
- if (dist === 0 || (!parsedQuery.literalSearch && inBounds)) {
+ if (dist <= maxEditDistance || index !== -1) {
if (results.has(fullId)) {
const result = results.get(fullId);
if (result.dontValidate || result.dist <= dist) {
@@ -1878,40 +1989,44 @@ function initSearch(rawSearchIndex) {
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
return;
}
- let index = -1, path_dist = 0;
+ let path_dist = 0;
const fullId = row.id;
- const searchWord = searchWords[pos];
- const in_args = row.type && row.type.inputs
- && checkIfInList(row.type.inputs, elem, row.type.where_clause);
- if (in_args) {
- // path_dist is 0 because no parent path information is currently stored
- // in the search index
- addIntoResults(results_in_args, fullId, pos, -1, 0, 0, maxEditDistance);
- }
- const returned = row.type && row.type.output
- && checkIfInList(row.type.output, elem, row.type.where_clause);
- if (returned) {
- addIntoResults(results_returned, fullId, pos, -1, 0, 0, maxEditDistance);
+ // fpDist is a minimum possible type distance, where "type distance" is the number of
+ // atoms in the function not present in the query
+ const tfpDist = compareTypeFingerprints(
+ fullId,
+ parsedQuery.typeFingerprint
+ );
+ if (tfpDist !== null) {
+ const in_args = row.type && row.type.inputs
+ && checkIfInList(row.type.inputs, elem, row.type.where_clause);
+ const returned = row.type && row.type.output
+ && checkIfInList(row.type.output, elem, row.type.where_clause);
+ if (in_args) {
+ results_in_args.max_dist = Math.max(results_in_args.max_dist || 0, tfpDist);
+ const maxDist = results_in_args.size < MAX_RESULTS ?
+ (tfpDist + 1) :
+ results_in_args.max_dist;
+ addIntoResults(results_in_args, fullId, pos, -1, tfpDist, 0, maxDist);
+ }
+ if (returned) {
+ results_returned.max_dist = Math.max(results_returned.max_dist || 0, tfpDist);
+ const maxDist = results_returned.size < MAX_RESULTS ?
+ (tfpDist + 1) :
+ results_returned.max_dist;
+ addIntoResults(results_returned, fullId, pos, -1, tfpDist, 0, maxDist);
+ }
}
if (!typePassesFilter(elem.typeFilter, row.ty)) {
return;
}
- const row_index = row.normalizedName.indexOf(elem.pathLast);
- const word_index = searchWord.indexOf(elem.pathLast);
-
- // lower indexes are "better" matches
- // rank based on the "best" match
- if (row_index === -1) {
- index = word_index;
- } else if (word_index === -1) {
- index = row_index;
- } else if (word_index < row_index) {
- index = word_index;
- } else {
- index = row_index;
+ let index = row.word.indexOf(elem.pathLast);
+ const normalizedIndex = row.normalizedName.indexOf(elem.pathLast);
+ if (index === -1 || (index > normalizedIndex && normalizedIndex !== -1)) {
+ index = normalizedIndex;
}
if (elem.fullPath.length > 1) {
@@ -1922,13 +2037,13 @@ function initSearch(rawSearchIndex) {
}
if (parsedQuery.literalSearch) {
- if (searchWord === elem.name) {
+ if (row.word === elem.pathLast) {
addIntoResults(results_others, fullId, pos, index, 0, path_dist);
}
return;
}
- const dist = editDistance(searchWord, elem.pathLast, maxEditDistance);
+ const dist = editDistance(row.normalizedName, elem.normalizedPathLast, maxEditDistance);
if (index === -1 && dist + path_dist > maxEditDistance) {
return;
@@ -1951,6 +2066,17 @@ function initSearch(rawSearchIndex) {
return;
}
+ const tfpDist = compareTypeFingerprints(
+ row.id,
+ parsedQuery.typeFingerprint
+ );
+ if (tfpDist === null) {
+ return;
+ }
+ if (results.size >= MAX_RESULTS && tfpDist > results.max_dist) {
+ return;
+ }
+
// If the result is too "bad", we return false and it ends this search.
if (!unifyFunctionTypes(
row.type.inputs,
@@ -1969,12 +2095,11 @@ function initSearch(rawSearchIndex) {
return;
}
- addIntoResults(results, row.id, pos, 0, 0, 0, Number.MAX_VALUE);
+ results.max_dist = Math.max(results.max_dist || 0, tfpDist);
+ addIntoResults(results, row.id, pos, 0, tfpDist, 0, Number.MAX_VALUE);
}
function innerRunQuery() {
- let elem, i, nSearchWords, in_returned, row;
-
let queryLen = 0;
for (const elem of parsedQuery.elems) {
queryLen += elem.name.length;
@@ -2000,17 +2125,20 @@ function initSearch(rawSearchIndex) {
* See `buildTypeMapIndex` for more information.
*
* @param {QueryElement} elem
+ * @param {boolean} isAssocType
*/
- function convertNameToId(elem) {
- if (typeNameIdMap.has(elem.pathLast)) {
- elem.id = typeNameIdMap.get(elem.pathLast);
+ function convertNameToId(elem, isAssocType) {
+ if (typeNameIdMap.has(elem.normalizedPathLast) &&
+ (isAssocType || !typeNameIdMap.get(elem.normalizedPathLast).assocOnly)) {
+ elem.id = typeNameIdMap.get(elem.normalizedPathLast).id;
} else if (!parsedQuery.literalSearch) {
let match = null;
let matchDist = maxEditDistance + 1;
let matchName = "";
- for (const [name, id] of typeNameIdMap) {
- const dist = editDistance(name, elem.pathLast, maxEditDistance);
- if (dist <= matchDist && dist <= maxEditDistance) {
+ for (const [name, {id, assocOnly}] of typeNameIdMap) {
+ const dist = editDistance(name, elem.normalizedPathLast, maxEditDistance);
+ if (dist <= matchDist && dist <= maxEditDistance &&
+ (isAssocType || !assocOnly)) {
if (dist === matchDist && matchName > name) {
continue;
}
@@ -2025,7 +2153,7 @@ function initSearch(rawSearchIndex) {
elem.id = match;
}
if ((elem.id === null && parsedQuery.totalElems > 1 && elem.typeFilter === -1
- && elem.generics.length === 0)
+ && elem.generics.length === 0 && elem.bindings.size === 0)
|| elem.typeFilter === TY_GENERIC) {
if (genericSymbols.has(elem.name)) {
elem.id = genericSymbols.get(elem.name);
@@ -2068,19 +2196,40 @@ function initSearch(rawSearchIndex) {
for (const elem2 of elem.generics) {
convertNameToId(elem2);
}
+ elem.bindings = new Map(Array.from(elem.bindings.entries())
+ .map(entry => {
+ const [name, constraints] = entry;
+ if (!typeNameIdMap.has(name)) {
+ parsedQuery.error = [
+ "Type parameter ",
+ name,
+ " does not exist",
+ ];
+ return [null, []];
+ }
+ for (const elem2 of constraints) {
+ convertNameToId(elem2);
+ }
+
+ return [typeNameIdMap.get(name).id, constraints];
+ })
+ );
}
+ const fps = new Set();
for (const elem of parsedQuery.elems) {
convertNameToId(elem);
+ buildFunctionTypeFingerprint(elem, parsedQuery.typeFingerprint, fps);
}
for (const elem of parsedQuery.returned) {
convertNameToId(elem);
+ buildFunctionTypeFingerprint(elem, parsedQuery.typeFingerprint, fps);
}
- if (parsedQuery.foundElems === 1) {
+ if (parsedQuery.foundElems === 1 && parsedQuery.returned.length === 0) {
if (parsedQuery.elems.length === 1) {
- elem = parsedQuery.elems[0];
- for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
+ const elem = parsedQuery.elems[0];
+ for (let i = 0, nSearchIndex = searchIndex.length; i < nSearchIndex; ++i) {
// It means we want to check for this element everywhere (in names, args and
// returned).
handleSingleArg(
@@ -2093,30 +2242,25 @@ function initSearch(rawSearchIndex) {
maxEditDistance
);
}
- } else if (parsedQuery.returned.length === 1) {
- // We received one returned argument to check, so looking into returned values.
- elem = parsedQuery.returned[0];
- for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
- row = searchIndex[i];
- in_returned = row.type && unifyFunctionTypes(
- row.type.output,
- parsedQuery.returned,
- row.type.where_clause
- );
- if (in_returned) {
- addIntoResults(
- results_others,
- row.id,
- i,
- -1,
- 0,
- Number.MAX_VALUE
- );
- }
- }
}
} else if (parsedQuery.foundElems > 0) {
- for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
+ // Sort input and output so that generic type variables go first and
+ // types with generic parameters go last.
+ // That's because of the way unification is structured: it eats off
+ // the end, and hits a fast path if the last item is a simple atom.
+ const sortQ = (a, b) => {
+ const ag = a.generics.length === 0 && a.bindings.size === 0;
+ const bg = b.generics.length === 0 && b.bindings.size === 0;
+ if (ag !== bg) {
+ return ag - bg;
+ }
+ const ai = a.id > 0;
+ const bi = b.id > 0;
+ return ai - bi;
+ };
+ parsedQuery.elems.sort(sortQ);
+ parsedQuery.returned.sort(sortQ);
+ for (let i = 0, nSearchIndex = searchIndex.length; i < nSearchIndex; ++i) {
handleArgs(searchIndex[i], i, results_others);
}
}
@@ -2139,44 +2283,6 @@ function initSearch(rawSearchIndex) {
return ret;
}
- /**
- * Validate performs the following boolean logic. For example:
- * "File::open" will give IF A PARENT EXISTS => ("file" && "open")
- * exists in (name || path || parent) OR => ("file" && "open") exists in
- * (name || path )
- *
- * This could be written functionally, but I wanted to minimise
- * functions on stack.
- *
- * @param {string} name - The name of the result
- * @param {string} path - The path of the result
- * @param {string} keys - The keys to be used (["file", "open"])
- * @param {Object} parent - The parent of the result
- *
- * @return {boolean} - Whether the result is valid or not
- */
- function validateResult(name, path, keys, parent, maxEditDistance) {
- if (!keys || !keys.length) {
- return true;
- }
- for (const key of keys) {
- // each check is for validation so we negate the conditions and invalidate
- if (!(
- // check for an exact name match
- name.indexOf(key) > -1 ||
- // then an exact path match
- path.indexOf(key) > -1 ||
- // next if there is a parent, check for exact parent match
- (parent !== undefined && parent.name !== undefined &&
- parent.name.toLowerCase().indexOf(key) > -1) ||
- // lastly check to see if the name was an editDistance match
- editDistance(name, key, maxEditDistance) <= maxEditDistance)) {
- return false;
- }
- }
- return true;
- }
-
function nextTab(direction) {
const next = (searchState.currentTab + direction + 3) % searchState.focusedByTab.length;
searchState.focusedByTab[searchState.currentTab] = document.activeElement;
@@ -2269,13 +2375,9 @@ function initSearch(rawSearchIndex) {
* @param {boolean} display - True if this is the active tab
*/
function addTab(array, query, display) {
- let extraClass = "";
- if (display === true) {
- extraClass = " active";
- }
+ const extraClass = display ? " active" : "";
const output = document.createElement("div");
- let length = 0;
if (array.length > 0) {
output.className = "search-results " + extraClass;
@@ -2285,8 +2387,6 @@ function initSearch(rawSearchIndex) {
const longType = longItemTypes[item.ty];
const typeName = longType.length !== 0 ? `${longType}` : "?";
- length += 1;
-
const link = document.createElement("a");
link.className = "result-" + type;
link.href = item.href;
@@ -2334,7 +2434,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
"href=\"https://docs.rs\">Docs.rs</a> for documentation of crates released on" +
" <a href=\"https://crates.io/\">crates.io</a>.</li></ul>";
}
- return [output, length];
+ return [output, array.length];
}
function makeTabHeader(tabNb, text, nbElems) {
@@ -2413,11 +2513,10 @@ ${item.displayPath}<span class="${type}">${name}</span>\
}
let crates = "";
- const crates_list = Object.keys(rawSearchIndex);
- if (crates_list.length > 1) {
+ if (rawSearchIndex.size > 1) {
crates = " in&nbsp;<div id=\"crate-search-div\"><select id=\"crate-search\">" +
"<option value=\"all crates\">all crates</option>";
- for (const c of crates_list) {
+ for (const c of rawSearchIndex.keys()) {
crates += `<option value="${c}" ${c === filterCrates && "selected"}>${c}</option>`;
}
crates += "</select></div>";
@@ -2514,13 +2613,9 @@ ${item.displayPath}<span class="${type}">${name}</span>\
/**
* Perform a search based on the current state of the search input element
* and display the results.
- * @param {Event} [e] - The event that triggered this search, if any
* @param {boolean} [forced]
*/
- function search(e, forced) {
- if (e) {
- e.preventDefault();
- }
+ function search(forced) {
const query = parseQuery(searchState.input.value.trim());
let filterCrates = getFilterCrates();
@@ -2549,7 +2644,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
updateSearchHistory(buildUrl(query.original, filterCrates));
showResults(
- execQuery(query, searchWords, filterCrates, window.currentCrate),
+ execQuery(query, filterCrates, window.currentCrate),
params.go_to_first,
filterCrates);
}
@@ -2581,19 +2676,42 @@ ${item.displayPath}<span class="${type}">${name}</span>\
*
* @param {RawFunctionType} type
*/
- function buildItemSearchType(type, lowercasePaths) {
+ function buildItemSearchType(type, lowercasePaths, isAssocType) {
const PATH_INDEX_DATA = 0;
const GENERICS_DATA = 1;
- let pathIndex, generics;
+ const BINDINGS_DATA = 2;
+ let pathIndex, generics, bindings;
if (typeof type === "number") {
pathIndex = type;
generics = [];
+ bindings = new Map();
} else {
pathIndex = type[PATH_INDEX_DATA];
generics = buildItemSearchTypeAll(
type[GENERICS_DATA],
lowercasePaths
);
+ if (type.length > BINDINGS_DATA) {
+ bindings = new Map(type[BINDINGS_DATA].map(binding => {
+ const [assocType, constraints] = binding;
+ // Associated type constructors are represented sloppily in rustdoc's
+ // type search, to make the engine simpler.
+ //
+ // MyType<Output<T>=Result<T>> is equivalent to MyType<Output<Result<T>>=T>
+ // and both are, essentially
+ // MyType<Output=(T, Result<T>)>, except the tuple isn't actually there.
+ // It's more like the value of a type binding is naturally an array,
+ // which rustdoc calls "constraints".
+ //
+ // As a result, the key should never have generics on it.
+ return [
+ buildItemSearchType(assocType, lowercasePaths, true).id,
+ buildItemSearchTypeAll(constraints, lowercasePaths),
+ ];
+ }));
+ } else {
+ bindings = new Map();
+ }
}
if (pathIndex < 0) {
// types less than 0 are generic parameters
@@ -2603,6 +2721,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
ty: TY_GENERIC,
path: null,
generics,
+ bindings,
};
}
if (pathIndex === 0) {
@@ -2612,14 +2731,16 @@ ${item.displayPath}<span class="${type}">${name}</span>\
ty: null,
path: null,
generics,
+ bindings,
};
}
const item = lowercasePaths[pathIndex - 1];
return {
- id: buildTypeMapIndex(item.name),
+ id: buildTypeMapIndex(item.name, isAssocType),
ty: item.ty,
path: item.path,
generics,
+ bindings,
};
}
@@ -2679,14 +2800,119 @@ ${item.displayPath}<span class="${type}">${name}</span>\
};
}
+ /**
+ * Type fingerprints allow fast, approximate matching of types.
+ *
+ * This algo creates a compact representation of the type set using a Bloom filter.
+ * This fingerprint is used three ways:
+ *
+ * - It accelerates the matching algorithm by checking the function fingerprint against the
+ * query fingerprint. If any bits are set in the query but not in the function, it can't
+ * match.
+ *
+ * - The fourth section has the number of distinct items in the set.
+ * This is the distance function, used for filtering and for sorting.
+ *
+ * [^1]: Distance is the relatively naive metric of counting the number of distinct items in
+ * the function that are not present in the query.
+ *
+ * @param {FunctionType|QueryElement} type - a single type
+ * @param {Uint32Array} output - write the fingerprint to this data structure: uses 128 bits
+ * @param {Set<number>} fps - Set of distinct items
+ */
+ function buildFunctionTypeFingerprint(type, output, fps) {
+ let input = type.id;
+ // All forms of `[]` get collapsed down to one thing in the bloom filter.
+ // Differentiating between arrays and slices, if the user asks for it, is
+ // still done in the matching algorithm.
+ if (input === typeNameIdOfArray || input === typeNameIdOfSlice) {
+ input = typeNameIdOfArrayOrSlice;
+ }
+ // http://burtleburtle.net/bob/hash/integer.html
+ // ~~ is toInt32. It's used before adding, so
+ // the number stays in safe integer range.
+ const hashint1 = k => {
+ k = (~~k + 0x7ed55d16) + (k << 12);
+ k = (k ^ 0xc761c23c) ^ (k >>> 19);
+ k = (~~k + 0x165667b1) + (k << 5);
+ k = (~~k + 0xd3a2646c) ^ (k << 9);
+ k = (~~k + 0xfd7046c5) + (k << 3);
+ return (k ^ 0xb55a4f09) ^ (k >>> 16);
+ };
+ const hashint2 = k => {
+ k = ~k + (k << 15);
+ k ^= k >>> 12;
+ k += k << 2;
+ k ^= k >>> 4;
+ k = Math.imul(k, 2057);
+ return k ^ (k >> 16);
+ };
+ if (input !== null) {
+ const h0a = hashint1(input);
+ const h0b = hashint2(input);
+ // Less Hashing, Same Performance: Building a Better Bloom Filter
+ // doi=10.1.1.72.2442
+ const h1a = ~~(h0a + Math.imul(h0b, 2));
+ const h1b = ~~(h0a + Math.imul(h0b, 3));
+ const h2a = ~~(h0a + Math.imul(h0b, 4));
+ const h2b = ~~(h0a + Math.imul(h0b, 5));
+ output[0] |= (1 << (h0a % 32)) | (1 << (h1b % 32));
+ output[1] |= (1 << (h1a % 32)) | (1 << (h2b % 32));
+ output[2] |= (1 << (h2a % 32)) | (1 << (h0b % 32));
+ fps.add(input);
+ }
+ for (const g of type.generics) {
+ buildFunctionTypeFingerprint(g, output, fps);
+ }
+ const fb = {
+ id: null,
+ ty: 0,
+ generics: [],
+ bindings: new Map(),
+ };
+ for (const [k, v] of type.bindings.entries()) {
+ fb.id = k;
+ fb.generics = v;
+ buildFunctionTypeFingerprint(fb, output, fps);
+ }
+ output[3] = fps.size;
+ }
+
+ /**
+ * Compare the query fingerprint with the function fingerprint.
+ *
+ * @param {{number}} fullId - The function
+ * @param {{Uint32Array}} queryFingerprint - The query
+ * @returns {number|null} - Null if non-match, number if distance
+ * This function might return 0!
+ */
+ function compareTypeFingerprints(fullId, queryFingerprint) {
+ const fh0 = functionTypeFingerprint[fullId * 4];
+ const fh1 = functionTypeFingerprint[(fullId * 4) + 1];
+ const fh2 = functionTypeFingerprint[(fullId * 4) + 2];
+ const [qh0, qh1, qh2] = queryFingerprint;
+ // Approximate set intersection with bloom filters.
+ // This can be larger than reality, not smaller, because hashes have
+ // the property that if they've got the same value, they hash to the
+ // same thing. False positives exist, but not false negatives.
+ const [in0, in1, in2] = [fh0 & qh0, fh1 & qh1, fh2 & qh2];
+ // Approximate the set of items in the query but not the function.
+ // This might be smaller than reality, but cannot be bigger.
+ //
+ // | in_ | qh_ | XOR | Meaning |
+ // | --- | --- | --- | ------------------------------------------------ |
+ // | 0 | 0 | 0 | Not present |
+ // | 1 | 0 | 1 | IMPOSSIBLE because `in_` is `fh_ & qh_` |
+ // | 1 | 1 | 0 | If one or both is false positive, false negative |
+ // | 0 | 1 | 1 | Since in_ has no false negatives, must be real |
+ if ((in0 ^ qh0) || (in1 ^ qh1) || (in2 ^ qh2)) {
+ return null;
+ }
+ return functionTypeFingerprint[(fullId * 4) + 3];
+ }
+
function buildIndex(rawSearchIndex) {
searchIndex = [];
- /**
- * List of normalized search words (ASCII lowercased, and undescores removed).
- *
- * @type {Array<string>}
- */
- const searchWords = [];
typeNameIdMap = new Map();
const charA = "A".charCodeAt(0);
let currentIndex = 0;
@@ -2698,81 +2924,86 @@ ${item.displayPath}<span class="${type}">${name}</span>\
typeNameIdOfSlice = buildTypeMapIndex("slice");
typeNameIdOfArrayOrSlice = buildTypeMapIndex("[]");
- for (const crate in rawSearchIndex) {
- if (!hasOwnPropertyRustdoc(rawSearchIndex, crate)) {
- continue;
- }
-
- let crateSize = 0;
-
- /**
- * The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
- * are arrays with the same length. `q`, `a`, and `c` use a sparse
- * representation for compactness.
- *
- * `n[i]` contains the name of an item.
- *
- * `t[i]` contains the type of that item
- * (as a string of characters that represent an offset in `itemTypes`).
- *
- * `d[i]` contains the description of that item.
- *
- * `q` contains the full paths of the items. For compactness, it is a set of
- * (index, path) pairs used to create a map. If a given index `i` is
- * not present, this indicates "same as the last index present".
- *
- * `i[i]` contains an item's parent, usually a module. For compactness,
- * it is a set of indexes into the `p` array.
- *
- * `f[i]` contains function signatures, or `0` if the item isn't a function.
- * Functions are themselves encoded as arrays. The first item is a list of
- * types representing the function's inputs, and the second list item is a list
- * of types representing the function's output. Tuples are flattened.
- * Types are also represented as arrays; the first item is an index into the `p`
- * array, while the second is a list of types representing any generic parameters.
- *
- * b[i] contains an item's impl disambiguator. This is only present if an item
- * is defined in an impl block and, the impl block's type has more than one associated
- * item with the same name.
- *
- * `a` defines aliases with an Array of pairs: [name, offset], where `offset`
- * points into the n/t/d/q/i/f arrays.
- *
- * `doc` contains the description of the crate.
- *
- * `p` is a list of path/type pairs. It is used for parents and function parameters.
- *
- * `c` is an array of item indices that are deprecated.
- *
- * @type {{
- * doc: string,
- * a: Object,
- * n: Array<string>,
- * t: String,
- * d: Array<string>,
- * q: Array<[Number, string]>,
- * i: Array<Number>,
- * f: Array<RawFunctionSearchType>,
- * p: Array<Object>,
- * b: Array<[Number, String]>,
- * c: Array<Number>
- * }}
- */
- const crateCorpus = rawSearchIndex[crate];
+ // Function type fingerprints are 128-bit bloom filters that are used to
+ // estimate the distance between function and query.
+ // This loop counts the number of items to allocate a fingerprint for.
+ for (const crate of rawSearchIndex.values()) {
+ // Each item gets an entry in the fingerprint array, and the crate
+ // does, too
+ id += crate.t.length + 1;
+ }
+ functionTypeFingerprint = new Uint32Array((id + 1) * 4);
- searchWords.push(crate);
+ // This loop actually generates the search item indexes, including
+ // normalized names, type signature objects and fingerprints, and aliases.
+ id = 0;
+ /**
+ * The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
+ * are arrays with the same length. `q`, `a`, and `c` use a sparse
+ * representation for compactness.
+ *
+ * `n[i]` contains the name of an item.
+ *
+ * `t[i]` contains the type of that item
+ * (as a string of characters that represent an offset in `itemTypes`).
+ *
+ * `d[i]` contains the description of that item.
+ *
+ * `q` contains the full paths of the items. For compactness, it is a set of
+ * (index, path) pairs used to create a map. If a given index `i` is
+ * not present, this indicates "same as the last index present".
+ *
+ * `i[i]` contains an item's parent, usually a module. For compactness,
+ * it is a set of indexes into the `p` array.
+ *
+ * `f[i]` contains function signatures, or `0` if the item isn't a function.
+ * Functions are themselves encoded as arrays. The first item is a list of
+ * types representing the function's inputs, and the second list item is a list
+ * of types representing the function's output. Tuples are flattened.
+ * Types are also represented as arrays; the first item is an index into the `p`
+ * array, while the second is a list of types representing any generic parameters.
+ *
+ * b[i] contains an item's impl disambiguator. This is only present if an item
+ * is defined in an impl block and, the impl block's type has more than one associated
+ * item with the same name.
+ *
+ * `a` defines aliases with an Array of pairs: [name, offset], where `offset`
+ * points into the n/t/d/q/i/f arrays.
+ *
+ * `doc` contains the description of the crate.
+ *
+ * `p` is a list of path/type pairs. It is used for parents and function parameters.
+ *
+ * `c` is an array of item indices that are deprecated.
+ *
+ * @type {{
+ * doc: string,
+ * a: Object,
+ * n: Array<string>,
+ * t: String,
+ * d: Array<string>,
+ * q: Array<[Number, string]>,
+ * i: Array<Number>,
+ * f: Array<RawFunctionSearchType>,
+ * p: Array<Object>,
+ * b: Array<[Number, String]>,
+ * c: Array<Number>
+ * }}
+ */
+ for (const [crate, crateCorpus] of rawSearchIndex) {
// This object should have exactly the same set of fields as the "row"
// object defined below. Your JavaScript runtime will thank you.
// https://mathiasbynens.be/notes/shapes-ics
const crateRow = {
crate: crate,
- ty: 1, // == ExternCrate
+ ty: 3, // == ExternCrate
name: crate,
path: "",
desc: crateCorpus.doc,
parent: undefined,
type: null,
id: id,
+ word: crate,
normalizedName: crate.indexOf("_") === -1 ? crate : crate.replace(/_/g, ""),
deprecated: null,
implDisambiguator: null,
@@ -2840,13 +3071,34 @@ ${item.displayPath}<span class="${type}">${name}</span>\
len = itemTypes.length;
for (let i = 0; i < len; ++i) {
let word = "";
- // This object should have exactly the same set of fields as the "crateRow"
- // object defined above.
if (typeof itemNames[i] === "string") {
word = itemNames[i].toLowerCase();
}
- searchWords.push(word);
const path = itemPaths.has(i) ? itemPaths.get(i) : lastPath;
+ let type = null;
+ if (itemFunctionSearchTypes[i] !== 0) {
+ type = buildFunctionSearchType(
+ itemFunctionSearchTypes[i],
+ lowercasePaths
+ );
+ if (type) {
+ const fp = functionTypeFingerprint.subarray(id * 4, (id + 1) * 4);
+ const fps = new Set();
+ for (const t of type.inputs) {
+ buildFunctionTypeFingerprint(t, fp, fps);
+ }
+ for (const t of type.output) {
+ buildFunctionTypeFingerprint(t, fp, fps);
+ }
+ for (const w of type.where_clause) {
+ for (const t of w) {
+ buildFunctionTypeFingerprint(t, fp, fps);
+ }
+ }
+ }
+ }
+ // This object should have exactly the same set of fields as the "crateRow"
+ // object defined above.
const row = {
crate: crate,
ty: itemTypes.charCodeAt(i) - charA,
@@ -2854,11 +3106,9 @@ ${item.displayPath}<span class="${type}">${name}</span>\
path: path,
desc: itemDescs[i],
parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
- type: buildFunctionSearchType(
- itemFunctionSearchTypes[i],
- lowercasePaths
- ),
+ type,
id: id,
+ word,
normalizedName: word.indexOf("_") === -1 ? word : word.replace(/_/g, ""),
deprecated: deprecatedItems.has(i),
implDisambiguator: implDisambiguator.has(i) ? implDisambiguator.get(i) : null,
@@ -2866,14 +3116,13 @@ ${item.displayPath}<span class="${type}">${name}</span>\
id += 1;
searchIndex.push(row);
lastPath = row.path;
- crateSize += 1;
}
if (aliases) {
const currentCrateAliases = new Map();
ALIASES.set(crate, currentCrateAliases);
for (const alias_name in aliases) {
- if (!hasOwnPropertyRustdoc(aliases, alias_name)) {
+ if (!Object.prototype.hasOwnProperty.call(aliases, alias_name)) {
continue;
}
@@ -2889,9 +3138,8 @@ ${item.displayPath}<span class="${type}">${name}</span>\
}
}
}
- currentIndex += crateSize;
+ currentIndex += itemTypes.length;
}
- return searchWords;
}
/**
@@ -3031,7 +3279,8 @@ ${item.displayPath}<span class="${type}">${name}</span>\
// popping a state (Firefox), which is why search() is
// called both here and at the end of the startSearch()
// function.
- search(e);
+ e.preventDefault();
+ search();
} else {
searchState.input.value = "";
// When browsing back from search results the main page
@@ -3066,13 +3315,10 @@ ${item.displayPath}<span class="${type}">${name}</span>\
// before paste back the previous search, you get the old search results without
// the filter. To prevent this, we need to remove the previous results.
currentResults = null;
- search(undefined, true);
+ search(true);
}
- /**
- * @type {Array<string>}
- */
- const searchWords = buildIndex(rawSearchIndex);
+ buildIndex(rawSearchIndex);
if (typeof window !== "undefined") {
registerSearchEvents();
// If there's a search term in the URL, execute the search now.
@@ -3086,7 +3332,6 @@ ${item.displayPath}<span class="${type}">${name}</span>\
exports.execQuery = execQuery;
exports.parseQuery = parseQuery;
}
- return searchWords;
}
if (typeof window !== "undefined") {
@@ -3097,7 +3342,7 @@ if (typeof window !== "undefined") {
} else {
// Running in Node, not a browser. Run initSearch just to produce the
// exports.
- initSearch({});
+ initSearch(new Map());
}
diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js
index 63947789c..2b42fbebb 100644
--- a/src/librustdoc/html/static/js/settings.js
+++ b/src/librustdoc/html/static/js/settings.js
@@ -1,6 +1,6 @@
// Local js definitions:
/* global getSettingValue, updateLocalStorage, updateTheme */
-/* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */
+/* global addClass, removeClass, onEach, onEachLazy, blurHandler */
/* global MAIN_ID, getVar, getSettingsButton */
"use strict";
@@ -29,6 +29,13 @@
window.rustdoc_remove_line_numbers_from_examples();
}
break;
+ case "hide-sidebar":
+ if (value === true) {
+ addClass(document.documentElement, "hide-sidebar");
+ } else {
+ removeClass(document.documentElement, "hide-sidebar");
+ }
+ break;
}
}
@@ -187,6 +194,11 @@
"default": false,
},
{
+ "name": "Hide persistent navigation bar",
+ "js_name": "hide-sidebar",
+ "default": false,
+ },
+ {
"name": "Disable keyboard shortcuts",
"js_name": "disable-shortcuts",
"default": false,
@@ -216,6 +228,13 @@
function displaySettings() {
settingsMenu.style.display = "";
+ onEachLazy(settingsMenu.querySelectorAll("input[type='checkbox']"), el => {
+ const val = getSettingValue(el.id);
+ const checked = val === "true";
+ if (checked !== el.checked && val !== null) {
+ el.checked = checked;
+ }
+ });
}
function settingsBlurHandler(event) {
@@ -232,7 +251,7 @@
const settingsButton = getSettingsButton();
const settingsMenu = document.getElementById("settings");
settingsButton.onclick = event => {
- if (elemIsInParent(event.target, settingsMenu)) {
+ if (settingsMenu.contains(event.target)) {
return;
}
event.preventDefault();
diff --git a/src/librustdoc/html/static/js/src-script.js b/src/librustdoc/html/static/js/src-script.js
index 679c2341f..fc1d2d378 100644
--- a/src/librustdoc/html/static/js/src-script.js
+++ b/src/librustdoc/html/static/js/src-script.js
@@ -71,16 +71,31 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
return hasFoundFile;
}
+let toggleLabel;
+
+function getToggleLabel() {
+ toggleLabel = toggleLabel || document.querySelector("#src-sidebar-toggle button");
+ return toggleLabel;
+}
+
+window.rustdocCloseSourceSidebar = () => {
+ removeClass(document.documentElement, "src-sidebar-expanded");
+ getToggleLabel().innerText = ">";
+ updateLocalStorage("source-sidebar-show", "false");
+};
+
+window.rustdocShowSourceSidebar = () => {
+ addClass(document.documentElement, "src-sidebar-expanded");
+ getToggleLabel().innerText = "<";
+ updateLocalStorage("source-sidebar-show", "true");
+};
+
function toggleSidebar() {
const child = this.parentNode.children[0];
if (child.innerText === ">") {
- addClass(document.documentElement, "src-sidebar-expanded");
- child.innerText = "<";
- updateLocalStorage("source-sidebar-show", "true");
+ window.rustdocShowSourceSidebar();
} else {
- removeClass(document.documentElement, "src-sidebar-expanded");
- child.innerText = ">";
- updateLocalStorage("source-sidebar-show", "false");
+ window.rustdocCloseSourceSidebar();
}
}
@@ -118,10 +133,10 @@ function createSrcSidebar() {
title.className = "title";
title.innerText = "Files";
sidebar.appendChild(title);
- Object.keys(srcIndex).forEach(key => {
- srcIndex[key][NAME_OFFSET] = key;
- hasFoundFile = createDirEntry(srcIndex[key], sidebar, "", hasFoundFile);
- });
+ for (const [key, source] of srcIndex) {
+ source[NAME_OFFSET] = key;
+ hasFoundFile = createDirEntry(source, sidebar, "", hasFoundFile);
+ }
container.appendChild(sidebar);
// Focus on the current file in the source files sidebar.
@@ -131,12 +146,8 @@ function createSrcSidebar() {
}
}
-const lineNumbersRegex = /^#?(\d+)(?:-(\d+))?$/;
-
-function highlightSrcLines(match) {
- if (typeof match === "undefined") {
- match = window.location.hash.match(lineNumbersRegex);
- }
+function highlightSrcLines() {
+ const match = window.location.hash.match(/^#?(\d+)(?:-(\d+))?$/);
if (!match) {
return;
}
@@ -218,12 +229,7 @@ const handleSrcHighlight = (function() {
};
}());
-window.addEventListener("hashchange", () => {
- const match = window.location.hash.match(lineNumbersRegex);
- if (match) {
- return highlightSrcLines(match);
- }
-});
+window.addEventListener("hashchange", highlightSrcLines);
onEachLazy(document.getElementsByClassName("src-line-numbers"), el => {
el.addEventListener("click", handleSrcHighlight);
diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js
index c69641092..ac9c6f377 100644
--- a/src/librustdoc/html/static/js/storage.js
+++ b/src/librustdoc/html/static/js/storage.js
@@ -51,22 +51,11 @@ function removeClass(elem, className) {
* Run a callback for every element of an Array.
* @param {Array<?>} arr - The array to iterate over
* @param {function(?)} func - The callback
- * @param {boolean} [reversed] - Whether to iterate in reverse
*/
-function onEach(arr, func, reversed) {
- if (arr && arr.length > 0) {
- if (reversed) {
- for (let i = arr.length - 1; i >= 0; --i) {
- if (func(arr[i])) {
- return true;
- }
- }
- } else {
- for (const elem of arr) {
- if (func(elem)) {
- return true;
- }
- }
+function onEach(arr, func) {
+ for (const elem of arr) {
+ if (func(elem)) {
+ return true;
}
}
return false;
@@ -80,14 +69,12 @@ function onEach(arr, func, reversed) {
* https://developer.mozilla.org/en-US/docs/Web/API/NodeList
* @param {NodeList<?>|HTMLCollection<?>} lazyArray - An array to iterate over
* @param {function(?)} func - The callback
- * @param {boolean} [reversed] - Whether to iterate in reverse
*/
// eslint-disable-next-line no-unused-vars
-function onEachLazy(lazyArray, func, reversed) {
+function onEachLazy(lazyArray, func) {
return onEach(
Array.prototype.slice.call(lazyArray),
- func,
- reversed);
+ func);
}
function updateLocalStorage(name, value) {
@@ -196,11 +183,38 @@ if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) {
updateTheme();
+// Hide, show, and resize the sidebar at page load time
+//
+// This needs to be done here because this JS is render-blocking,
+// so that the sidebar doesn't "jump" after appearing on screen.
+// The user interaction to change this is set up in main.js.
if (getSettingValue("source-sidebar-show") === "true") {
// At this point in page load, `document.body` is not available yet.
// Set a class on the `<html>` element instead.
addClass(document.documentElement, "src-sidebar-expanded");
}
+if (getSettingValue("hide-sidebar") === "true") {
+ // At this point in page load, `document.body` is not available yet.
+ // Set a class on the `<html>` element instead.
+ addClass(document.documentElement, "hide-sidebar");
+}
+function updateSidebarWidth() {
+ const desktopSidebarWidth = getSettingValue("desktop-sidebar-width");
+ if (desktopSidebarWidth && desktopSidebarWidth !== "null") {
+ document.documentElement.style.setProperty(
+ "--desktop-sidebar-width",
+ desktopSidebarWidth + "px"
+ );
+ }
+ const srcSidebarWidth = getSettingValue("src-sidebar-width");
+ if (srcSidebarWidth && srcSidebarWidth !== "null") {
+ document.documentElement.style.setProperty(
+ "--src-sidebar-width",
+ srcSidebarWidth + "px"
+ );
+ }
+}
+updateSidebarWidth();
// If we navigate away (for example to a settings page), and then use the back or
// forward button to get back to a page, the theme may have changed in the meantime.
@@ -214,5 +228,6 @@ if (getSettingValue("source-sidebar-show") === "true") {
window.addEventListener("pageshow", ev => {
if (ev.persisted) {
setTimeout(updateTheme, 0);
+ setTimeout(updateSidebarWidth, 0);
}
});
diff --git a/src/librustdoc/html/templates/item_union.html b/src/librustdoc/html/templates/item_union.html
index f6d2fa348..8db7986fa 100644
--- a/src/librustdoc/html/templates/item_union.html
+++ b/src/librustdoc/html/templates/item_union.html
@@ -4,13 +4,13 @@
</code></pre>
{{ self.document() | safe }}
{% if self.fields_iter().peek().is_some() %}
- <h2 id="fields" class="fields small-section-header"> {# #}
+ <h2 id="fields" class="fields section-header"> {# #}
Fields<a href="#fields" class="anchor">§</a> {# #}
</h2>
{% for (field, ty) in self.fields_iter() %}
{% let name = field.name.expect("union field name") %}
<span id="structfield.{{ name }}" {#+ #}
- class="{{ ItemType::StructField +}} small-section-header"> {# #}
+ class="{{ ItemType::StructField +}} section-header"> {# #}
<a href="#structfield.{{ name }}" class="anchor field">§</a> {# #}
<code>{{ name }}: {{+ self.print_ty(ty) | safe }}</code> {# #}
</span>
diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html
index 3f6147bb9..60ca5660c 100644
--- a/src/librustdoc/html/templates/page.html
+++ b/src/librustdoc/html/templates/page.html
@@ -114,6 +114,7 @@
{% endif %}
{{ sidebar|safe }}
</nav> {# #}
+ <div class="sidebar-resizer"></div>
<main> {# #}
{% if page.css_class != "src" %}<div class="width-limiter">{% endif %}
<nav class="sub"> {# #}
@@ -128,6 +129,11 @@
{% endif %}
<form class="search-form"> {# #}
<span></span> {# This empty span is a hacky fix for Safari - See #93184 #}
+ {% if page.css_class != "src" %}
+ <div id="sidebar-button" tabindex="-1"> {# #}
+ <a href="{{page.root_path|safe}}{{layout.krate|safe}}/all.html" title="show sidebar"></a> {# #}
+ </div> {# #}
+ {% endif %}
<input {#+ #}
class="search-input" {#+ #}
name="search" {#+ #}
@@ -136,8 +142,8 @@
spellcheck="false" {#+ #}
placeholder="Click or press ‘S’ to search, ‘?’ for more options…" {#+ #}
type="search"> {# #}
- <div id="help-button" title="help" tabindex="-1"> {# #}
- <a href="{{page.root_path|safe}}help.html">?</a> {# #}
+ <div id="help-button" tabindex="-1"> {# #}
+ <a href="{{page.root_path|safe}}help.html" title="help">?</a> {# #}
</div> {# #}
<div id="settings-menu" tabindex="-1"> {# #}
<a href="{{page.root_path|safe}}settings.html" title="settings"> {# #}
diff --git a/src/librustdoc/html/templates/type_layout.html b/src/librustdoc/html/templates/type_layout.html
index b8b7785a2..e0516bb42 100644
--- a/src/librustdoc/html/templates/type_layout.html
+++ b/src/librustdoc/html/templates/type_layout.html
@@ -1,4 +1,4 @@
-<h2 id="layout" class="small-section-header"> {# #}
+<h2 id="layout" class="section-header"> {# #}
Layout<a href="#layout" class="anchor">§</a> {# #}
</h2> {# #}
<div class="docblock"> {# #}
@@ -54,7 +54,7 @@
<strong>Note:</strong> Encountered an error during type layout; {#+ #}
the type failed to be normalized. {# #}
</p> {# #}
- {% when Err(LayoutError::Cycle) %}
+ {% when Err(LayoutError::Cycle(_)) %}
<p> {# #}
<strong>Note:</strong> Encountered an error during type layout; {#+ #}
the type's layout depended on the type's layout itself. {# #}
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
index 285923251..61376ab31 100644
--- a/src/librustdoc/json/conversions.rs
+++ b/src/librustdoc/json/conversions.rs
@@ -250,15 +250,16 @@ pub(crate) fn id_from_item_inner(
// their parent module, which isn't present in the output JSON items. So
// instead, we directly get the primitive symbol and convert it to u32 to
// generate the ID.
- if matches!(tcx.def_kind(def_id), DefKind::Mod) &&
- let Some(prim) = tcx.get_attrs(*def_id, sym::rustc_doc_primitive)
- .find_map(|attr| attr.value_str()) {
+ if matches!(tcx.def_kind(def_id), DefKind::Mod)
+ && let Some(prim) = tcx
+ .get_attrs(*def_id, sym::rustc_doc_primitive)
+ .find_map(|attr| attr.value_str())
+ {
format!(":{}", prim.as_u32())
} else {
- tcx
- .opt_item_name(*def_id)
- .map(|n| format!(":{}", n.as_u32()))
- .unwrap_or_default()
+ tcx.opt_item_name(*def_id)
+ .map(|n| format!(":{}", n.as_u32()))
+ .unwrap_or_default()
}
}
};
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index 27e8a27ba..c1d90020e 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -80,9 +80,9 @@ impl<'tcx> JsonRenderer<'tcx> {
// document primitive items in an arbitrary crate by using
// `rustc_doc_primitive`.
let mut is_primitive_impl = false;
- if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind &&
- impl_.trait_.is_none() &&
- let clean::types::Type::Primitive(_) = impl_.for_
+ if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind
+ && impl_.trait_.is_none()
+ && let clean::types::Type::Primitive(_) = impl_.for_
{
is_primitive_impl = true;
}
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index dda06d4c9..52803c2b0 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -53,6 +53,7 @@ extern crate rustc_interface;
extern crate rustc_lexer;
extern crate rustc_lint;
extern crate rustc_lint_defs;
+extern crate rustc_log;
extern crate rustc_macros;
extern crate rustc_metadata;
extern crate rustc_middle;
@@ -81,7 +82,7 @@ use rustc_errors::ErrorGuaranteed;
use rustc_interface::interface;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{make_crate_type_option, ErrorOutputType, RustcOptGroup};
-use rustc_session::{getopts, EarlyErrorHandler};
+use rustc_session::{getopts, EarlyDiagCtxt};
use crate::clean::utils::DOC_RUST_LANG_ORG_CHANNEL;
@@ -156,7 +157,7 @@ pub fn main() {
}
}
- let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
+ let mut early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
let using_internal_features = rustc_driver::install_ice_hook(
"https://github.com/rust-lang/rust/issues/new\
@@ -174,11 +175,11 @@ pub fn main() {
// `debug_logging = true` is because all rustc logging goes to its version of tracing (the one
// in the sysroot), and all of rustdoc's logging goes to its version (the one in Cargo.toml).
- init_logging(&handler);
- rustc_driver::init_env_logger(&handler, "RUSTDOC_LOG");
+ init_logging(&early_dcx);
+ rustc_driver::init_logger(&early_dcx, rustc_log::LoggerConfig::from_env("RUSTDOC_LOG"));
- let exit_code = rustc_driver::catch_with_exit_code(|| match get_args(&handler) {
- Some(args) => main_args(&mut handler, &args, using_internal_features),
+ let exit_code = rustc_driver::catch_with_exit_code(|| match get_args(&early_dcx) {
+ Some(args) => main_args(&mut early_dcx, &args, using_internal_features),
_ =>
{
#[allow(deprecated)]
@@ -188,15 +189,15 @@ pub fn main() {
process::exit(exit_code);
}
-fn init_logging(handler: &EarlyErrorHandler) {
+fn init_logging(early_dcx: &EarlyDiagCtxt) {
let color_logs = match std::env::var("RUSTDOC_LOG_COLOR").as_deref() {
Ok("always") => true,
Ok("never") => false,
Ok("auto") | Err(VarError::NotPresent) => io::stdout().is_terminal(),
- Ok(value) => handler.early_error(format!(
+ Ok(value) => early_dcx.early_error(format!(
"invalid log color value '{value}': expected one of always, never, or auto",
)),
- Err(VarError::NotUnicode(value)) => handler.early_error(format!(
+ Err(VarError::NotUnicode(value)) => early_dcx.early_error(format!(
"invalid log color value '{}': expected one of always, never, or auto",
value.to_string_lossy()
)),
@@ -219,13 +220,13 @@ fn init_logging(handler: &EarlyErrorHandler) {
tracing::subscriber::set_global_default(subscriber).unwrap();
}
-fn get_args(handler: &EarlyErrorHandler) -> Option<Vec<String>> {
+fn get_args(early_dcx: &EarlyDiagCtxt) -> Option<Vec<String>> {
env::args_os()
.enumerate()
.map(|(i, arg)| {
arg.into_string()
.map_err(|arg| {
- handler.early_warn(format!("Argument {i} is not valid Unicode: {arg:?}"));
+ early_dcx.early_warn(format!("Argument {i} is not valid Unicode: {arg:?}"));
})
.ok()
})
@@ -672,11 +673,11 @@ fn usage(argv0: &str) {
/// A result type used by several functions under `main()`.
type MainResult = Result<(), ErrorGuaranteed>;
-fn wrap_return(diag: &rustc_errors::Handler, res: Result<(), String>) -> MainResult {
+fn wrap_return(dcx: &rustc_errors::DiagCtxt, res: Result<(), String>) -> MainResult {
match res {
- Ok(()) => diag.has_errors().map_or(Ok(()), Err),
+ Ok(()) => dcx.has_errors().map_or(Ok(()), Err),
Err(err) => {
- let reported = diag.struct_err(err).emit();
+ let reported = dcx.struct_err(err).emit();
Err(reported)
}
}
@@ -703,7 +704,7 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
}
fn main_args(
- handler: &mut EarlyErrorHandler,
+ early_dcx: &mut EarlyDiagCtxt,
at_args: &[String],
using_internal_features: Arc<AtomicBool>,
) -> MainResult {
@@ -717,7 +718,7 @@ fn main_args(
// the compiler with @empty_file as argv[0] and no more arguments.
let at_args = at_args.get(1..).unwrap_or_default();
- let args = rustc_driver::args::arg_expand_all(handler, at_args);
+ let args = rustc_driver::args::arg_expand_all(early_dcx, at_args);
let mut options = getopts::Options::new();
for option in opts() {
@@ -726,13 +727,13 @@ fn main_args(
let matches = match options.parse(&args) {
Ok(m) => m,
Err(err) => {
- handler.early_error(err.to_string());
+ early_dcx.early_error(err.to_string());
}
};
// Note that we discard any distinction between different non-zero exit
// codes from `from_matches` here.
- let (options, render_options) = match config::Options::from_matches(handler, &matches, args) {
+ let (options, render_options) = match config::Options::from_matches(early_dcx, &matches, args) {
Ok(opts) => opts,
Err(code) => {
return if code == 0 {
@@ -744,12 +745,8 @@ fn main_args(
}
};
- let diag = core::new_handler(
- options.error_format,
- None,
- options.diagnostic_width,
- &options.unstable_opts,
- );
+ let diag =
+ core::new_dcx(options.error_format, None, options.diagnostic_width, &options.unstable_opts);
match (options.should_test, options.markdown_input()) {
(true, true) => return wrap_return(&diag, markdown::test(options)),
@@ -773,7 +770,7 @@ fn main_args(
}
// need to move these items separately because we lose them by the time the closure is called,
- // but we can't create the Handler ahead of time because it's not Send
+ // but we can't create the dcx ahead of time because it's not Send
let show_coverage = options.show_coverage;
let run_check = options.run_check;
@@ -793,23 +790,16 @@ fn main_args(
let config = core::create_config(options, &render_options, using_internal_features);
interface::run_compiler(config, |compiler| {
- let sess = compiler.session();
+ let sess = &compiler.sess;
if sess.opts.describe_lints {
- let mut lint_store = rustc_lint::new_lint_store(sess.enable_internal_lints());
- let registered_lints = if let Some(register_lints) = compiler.register_lints() {
- register_lints(sess, &mut lint_store);
- true
- } else {
- false
- };
- rustc_driver::describe_lints(sess, &lint_store, registered_lints);
+ rustc_driver::describe_lints(sess);
return Ok(());
}
compiler.enter(|queries| {
let mut gcx = abort_on_err(queries.global_ctxt(), sess);
- if sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if sess.dcx().has_errors_or_lint_errors().is_some() {
sess.fatal("Compilation failed, aborting rustdoc");
}
diff --git a/src/librustdoc/lint.rs b/src/librustdoc/lint.rs
index d45040e34..267f1cb0b 100644
--- a/src/librustdoc/lint.rs
+++ b/src/librustdoc/lint.rs
@@ -33,7 +33,7 @@ where
let lints = || {
lint::builtin::HardwiredLints::get_lints()
.into_iter()
- .chain(rustc_lint::SoftLints::get_lints().into_iter())
+ .chain(rustc_lint::SoftLints::get_lints())
};
let lint_opts = lints()
@@ -46,7 +46,7 @@ where
filter_call(lint)
}
})
- .chain(lint_opts.into_iter())
+ .chain(lint_opts)
.collect::<Vec<_>>();
let lint_caps = lints()
diff --git a/src/librustdoc/passes/check_custom_code_classes.rs b/src/librustdoc/passes/check_custom_code_classes.rs
index 6266d3ff5..73f71cc06 100644
--- a/src/librustdoc/passes/check_custom_code_classes.rs
+++ b/src/librustdoc/passes/check_custom_code_classes.rs
@@ -48,7 +48,7 @@ struct TestsWithCustomClasses {
impl crate::doctest::Tester for TestsWithCustomClasses {
fn add_test(&mut self, _: String, config: LangString, _: usize) {
- self.custom_classes_found.extend(config.added_classes.into_iter());
+ self.custom_classes_found.extend(config.added_classes);
}
}
@@ -66,9 +66,8 @@ pub(crate) fn look_for_custom_classes<'tcx>(cx: &DocContext<'tcx>, item: &Item)
if !tests.custom_classes_found.is_empty() {
let span = item.attr_span(cx.tcx);
let sess = &cx.tcx.sess.parse_sess;
- let mut err = sess
- .span_diagnostic
- .struct_span_warn(span, "custom classes in code blocks will change behaviour");
+ let mut err =
+ sess.dcx.struct_span_warn(span, "custom classes in code blocks will change behaviour");
add_feature_diagnostics_for_issue(
&mut err,
sess,
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index d1c4cc1f5..a931e8804 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -79,9 +79,9 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
let def_id = item.item_id.expect_def_id().expect_local();
// check if parent is trait impl
- if let Some(parent_def_id) = cx.tcx.opt_local_parent(def_id) &&
- let Some(parent_node) = cx.tcx.hir().find_by_def_id(parent_def_id) &&
- matches!(
+ if let Some(parent_def_id) = cx.tcx.opt_local_parent(def_id)
+ && let Some(parent_node) = cx.tcx.opt_hir_node_by_def_id(parent_def_id)
+ && matches!(
parent_node,
hir::Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
@@ -100,7 +100,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
}
let (level, source) = cx.tcx.lint_level_at_node(
crate::lint::MISSING_DOC_CODE_EXAMPLES,
- cx.tcx.hir().local_def_id_to_hir_id(def_id),
+ cx.tcx.local_def_id_to_hir_id(def_id),
);
level != lint::Level::Allow || matches!(source, LintLevelSource::Default)
}
@@ -131,7 +131,7 @@ pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item
hir_id,
sp,
"missing code example in this documentation",
- |lint| lint,
+ |_| {},
);
}
} else if tests.found_tests > 0
@@ -142,7 +142,7 @@ pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item
hir_id,
item.attr_span(cx.tcx),
"documentation test in private item",
- |lint| lint,
+ |_| {},
);
}
}
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index fcd078858..ee185ab98 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -303,7 +303,9 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
Res::Def(DefKind::Enum, did) => match tcx.type_of(did).instantiate_identity().kind() {
ty::Adt(def, _) if def.is_enum() => {
if let Some(variant) = def.variants().iter().find(|v| v.name == variant_name)
- && let Some(field) = variant.fields.iter().find(|f| f.name == variant_field_name) {
+ && let Some(field) =
+ variant.fields.iter().find(|f| f.name == variant_field_name)
+ {
Ok((ty_res, field.did))
} else {
Err(UnresolvedPath {
@@ -973,7 +975,8 @@ impl LinkCollector<'_, '_> {
&& let Some(def_id) = item.item_id.as_def_id()
&& let Some(def_id) = def_id.as_local()
&& !self.cx.tcx.effective_visibilities(()).is_exported(def_id)
- && !has_primitive_or_keyword_docs(&item.attrs.other_attrs) {
+ && !has_primitive_or_keyword_docs(&item.attrs.other_attrs)
+ {
// Skip link resolution for non-exported items.
return;
}
@@ -1250,9 +1253,10 @@ impl LinkCollector<'_, '_> {
// FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
// However I'm not sure how to check that across crates.
- if let Some(candidate) = candidates.get(0) &&
- candidate.0 == Res::Primitive(PrimitiveType::RawPointer) &&
- key.path_str.contains("::") // We only want to check this if this is an associated item.
+ if let Some(candidate) = candidates.get(0)
+ && candidate.0 == Res::Primitive(PrimitiveType::RawPointer)
+ && key.path_str.contains("::")
+ // We only want to check this if this is an associated item.
{
if key.item_id.is_local() && !self.cx.tcx.features().intra_doc_pointers {
self.report_rawptr_assoc_feature_gate(diag.dox, &diag.link_range, diag.item);
@@ -1318,8 +1322,8 @@ impl LinkCollector<'_, '_> {
for other_ns in [TypeNS, ValueNS, MacroNS] {
if other_ns != expected_ns {
if let Ok(res) =
- self.resolve(path_str, other_ns, item_id, module_id) &&
- !res.is_empty()
+ self.resolve(path_str, other_ns, item_id, module_id)
+ && !res.is_empty()
{
err = ResolutionFailure::WrongNamespace {
res: full_res(self.cx.tcx, res[0]),
@@ -1751,8 +1755,6 @@ fn report_diagnostic(
}
decorate(lint, span, link_range);
-
- lint
});
}
@@ -1892,8 +1894,10 @@ fn resolution_failure(
};
let is_struct_variant = |did| {
if let ty::Adt(def, _) = tcx.type_of(did).instantiate_identity().kind()
- && def.is_enum()
- && let Some(variant) = def.variants().iter().find(|v| v.name == res.name(tcx)) {
+ && def.is_enum()
+ && let Some(variant) =
+ def.variants().iter().find(|v| v.name == res.name(tcx))
+ {
// ctor is `None` if variant is a struct
variant.ctor.is_none()
} else {
@@ -1918,7 +1922,6 @@ fn resolution_failure(
Variant
| Field
| Closure
- | Coroutine
| AssocTy
| AssocConst
| AssocFn
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index a57321b58..df2e8584b 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -22,7 +22,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
let tcx = cx.tcx;
// We need to check if there are errors before running this pass because it would crash when
// we try to get auto and blanket implementations.
- if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.dcx().has_errors_or_lint_errors().is_some() {
return krate;
}
@@ -154,9 +154,9 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
for it in new_items_external.iter().chain(new_items_local.iter()) {
- if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
- trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() &&
- cleaner.keep_impl(for_, true)
+ if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind
+ && trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait()
+ && cleaner.keep_impl(for_, true)
{
let target = items
.iter()
@@ -198,7 +198,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
cleaner.keep_impl(
for_,
trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait(),
- ) || trait_.as_ref().map_or(false, |t| cleaner.keep_impl_with_def_id(t.def_id().into()))
+ ) || trait_.as_ref().is_some_and(|t| cleaner.keep_impl_with_def_id(t.def_id().into()))
|| kind.is_blanket()
} else {
true
@@ -250,8 +250,8 @@ impl<'cache> DocVisitor for ItemAndAliasCollector<'cache> {
fn visit_item(&mut self, i: &Item) {
self.items.insert(i.item_id);
- if let TypeAliasItem(alias) = &*i.kind &&
- let Some(did) = alias.type_.def_id(self.cache)
+ if let TypeAliasItem(alias) = &*i.kind
+ && let Some(did) = alias.type_.def_id(self.cache)
{
self.items.insert(ItemId::DefId(did));
}
diff --git a/src/librustdoc/passes/lint/bare_urls.rs b/src/librustdoc/passes/lint/bare_urls.rs
index 0c5cfffe1..bffa17da3 100644
--- a/src/librustdoc/passes/lint/bare_urls.rs
+++ b/src/librustdoc/passes/lint/bare_urls.rs
@@ -31,7 +31,7 @@ pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item) {
"use an automatic link instead",
format!("<{url}>"),
Applicability::MachineApplicable,
- )
+ );
});
};
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
index ac8a75a4f..ce42b9c20 100644
--- a/src/librustdoc/passes/lint/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -3,7 +3,7 @@ use rustc_data_structures::sync::{Lock, Lrc};
use rustc_errors::{
emitter::Emitter,
translation::{to_fluent_args, Translate},
- Applicability, Diagnostic, Handler, LazyFallbackBundle,
+ Applicability, DiagCtxt, Diagnostic, LazyFallbackBundle,
};
use rustc_parse::parse_stream_from_source_str;
use rustc_resolve::rustdoc::source_span_for_markdown_range;
@@ -42,9 +42,9 @@ fn check_rust_syntax(
let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
- let handler = Handler::with_emitter(Box::new(emitter)).disable_warnings();
+ let dcx = DiagCtxt::with_emitter(Box::new(emitter)).disable_warnings();
let source = dox[code_block.code].to_owned();
- let sess = ParseSess::with_span_handler(handler, sm);
+ let sess = ParseSess::with_dcx(dcx, sm);
let edition = code_block.lang_string.edition.unwrap_or_else(|| cx.tcx.sess.edition());
let expn_data =
@@ -98,7 +98,7 @@ fn check_rust_syntax(
// Finally build and emit the completed diagnostic.
// All points of divergence have been handled earlier so this can be
// done the same way whether the span is precise or not.
- let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
+ let hir_id = cx.tcx.local_def_id_to_hir_id(local_id);
cx.tcx.struct_span_lint_hir(crate::lint::INVALID_RUST_CODEBLOCKS, hir_id, sp, msg, |lint| {
let explanation = if is_ignore {
"`ignore` code blocks require valid Rust code for syntax highlighting; \
@@ -131,8 +131,6 @@ fn check_rust_syntax(
for message in buffer.messages.iter() {
lint.note(message.clone());
}
-
- lint
});
}
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 00d15a3ca..90874c011 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -89,7 +89,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
if (generics_start > 0 && dox.as_bytes()[generics_start - 1] == b'<')
|| (generics_end < dox.len() && dox.as_bytes()[generics_end] == b'>')
{
- return lint;
+ return;
}
// multipart form is chosen here because ``Vec<i32>`` would be confusing.
lint.multipart_suggestion(
@@ -101,8 +101,6 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
Applicability::MaybeIncorrect,
);
}
-
- lint
});
};
@@ -213,7 +211,9 @@ fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
.take_while(|(_, c)| is_id_start(*c) || is_id_continue(*c))
.reduce(|_accum, item| item)
.and_then(|(new_pos, c)| is_id_start(c).then_some(new_pos));
- if let Some(new_pos) = new_pos && current_pos != new_pos {
+ if let Some(new_pos) = new_pos
+ && current_pos != new_pos
+ {
current_pos = new_pos;
continue;
}
diff --git a/src/librustdoc/passes/lint/redundant_explicit_links.rs b/src/librustdoc/passes/lint/redundant_explicit_links.rs
index 472781e7d..4491d20b4 100644
--- a/src/librustdoc/passes/lint/redundant_explicit_links.rs
+++ b/src/librustdoc/passes/lint/redundant_explicit_links.rs
@@ -181,8 +181,6 @@ fn check_inline_or_reference_unknown_redundancy(
.span_label(display_span, "because label contains path that resolves to same destination")
.note("when a link's destination is not specified,\nthe label is used to resolve intra-doc links")
.span_suggestion_with_style(link_span, "remove explicit link target", format!("[{}]", link_data.display_link), Applicability::MaybeIncorrect, SuggestionStyle::ShowAlways);
-
- lint
});
}
@@ -234,8 +232,6 @@ fn check_reference_redundancy(
.span_note(def_span, "referenced explicit link target defined here")
.note("when a link's destination is not specified,\nthe label is used to resolve intra-doc links")
.span_suggestion_with_style(link_span, "remove explicit link target", format!("[{}]", link_data.display_link), Applicability::MaybeIncorrect, SuggestionStyle::ShowAlways);
-
- lint
});
}
diff --git a/src/librustdoc/passes/lint/unescaped_backticks.rs b/src/librustdoc/passes/lint/unescaped_backticks.rs
index 8b7fdd6ab..0893cd0b4 100644
--- a/src/librustdoc/passes/lint/unescaped_backticks.rs
+++ b/src/librustdoc/passes/lint/unescaped_backticks.rs
@@ -111,8 +111,6 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
}
suggest_insertion(cx, item, &dox, lint, backtick_index, '\\', "if you meant to use a literal backtick, escape it");
-
- lint
});
}
Event::Code(_) => {
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index b35618415..df955421b 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -208,24 +208,25 @@ impl<'a> DocFolder for ImplStripper<'a, '_> {
// Because we don't inline in `maybe_inline_local` if the output format is JSON,
// we need to make a special check for JSON output: we want to keep it unless it has
// a `#[doc(hidden)]` attribute if the `for_` type is exported.
- if let Some(did) = imp.for_.def_id(self.cache) &&
- !imp.for_.is_assoc_ty() && !self.should_keep_impl(&i, did)
+ if let Some(did) = imp.for_.def_id(self.cache)
+ && !imp.for_.is_assoc_ty()
+ && !self.should_keep_impl(&i, did)
{
debug!("ImplStripper: impl item for stripped type; removing");
return None;
}
- if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) &&
- !self.should_keep_impl(&i, did) {
+ if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id())
+ && !self.should_keep_impl(&i, did)
+ {
debug!("ImplStripper: impl item for stripped trait; removing");
return None;
}
if let Some(generics) = imp.trait_.as_ref().and_then(|t| t.generics()) {
for typaram in generics {
- if let Some(did) = typaram.def_id(self.cache) && !self.should_keep_impl(&i, did)
+ if let Some(did) = typaram.def_id(self.cache)
+ && !self.should_keep_impl(&i, did)
{
- debug!(
- "ImplStripper: stripped item in trait's generics; removing impl"
- );
+ debug!("ImplStripper: stripped item in trait's generics; removing impl");
return None;
}
}
diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs
index dd52deef6..a343d7afc 100644
--- a/src/librustdoc/scrape_examples.rs
+++ b/src/librustdoc/scrape_examples.rs
@@ -40,7 +40,7 @@ pub(crate) struct ScrapeExamplesOptions {
impl ScrapeExamplesOptions {
pub(crate) fn new(
matches: &getopts::Matches,
- diag: &rustc_errors::Handler,
+ dcx: &rustc_errors::DiagCtxt,
) -> Result<Option<Self>, i32> {
let output_path = matches.opt_str("scrape-examples-output-path");
let target_crates = matches.opt_strs("scrape-examples-target-crate");
@@ -52,11 +52,11 @@ impl ScrapeExamplesOptions {
scrape_tests,
})),
(Some(_), false, _) | (None, true, _) => {
- diag.err("must use --scrape-examples-output-path and --scrape-examples-target-crate together");
+ dcx.err("must use --scrape-examples-output-path and --scrape-examples-target-crate together");
Err(1)
}
(None, false, true) => {
- diag.err("must use --scrape-examples-output-path and --scrape-examples-target-crate with --scrape-tests");
+ dcx.err("must use --scrape-examples-output-path and --scrape-examples-target-crate with --scrape-tests");
Err(1)
}
(None, false, false) => Ok(None),
@@ -311,7 +311,7 @@ pub(crate) fn run(
// The visitor might have found a type error, which we need to
// promote to a fatal error
- if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.dcx().has_errors_or_lint_errors().is_some() {
return Err(String::from("Compilation failed, aborting rustdoc"));
}
@@ -325,7 +325,7 @@ pub(crate) fn run(
// Save output to provided path
let mut encoder = FileEncoder::new(options.output_path).map_err(|e| e.to_string())?;
calls.encode(&mut encoder);
- encoder.finish().map_err(|e| e.to_string())?;
+ encoder.finish().map_err(|(_path, e)| e.to_string())?;
Ok(())
};
@@ -337,10 +337,11 @@ pub(crate) fn run(
Ok(())
}
-// Note: the Handler must be passed in explicitly because sess isn't available while parsing options
+// Note: the DiagCtxt must be passed in explicitly because sess isn't available while parsing
+// options.
pub(crate) fn load_call_locations(
with_examples: Vec<String>,
- diag: &rustc_errors::Handler,
+ dcx: &rustc_errors::DiagCtxt,
) -> Result<AllCallLocations, i32> {
let inner = || {
let mut all_calls: AllCallLocations = FxHashMap::default();
@@ -358,7 +359,7 @@ pub(crate) fn load_call_locations(
};
inner().map_err(|e: String| {
- diag.err(format!("failed to load examples: {e}"));
+ dcx.err(format!("failed to load examples: {e}"));
1
})
}
diff --git a/src/librustdoc/theme.rs b/src/librustdoc/theme.rs
index 8c1acbd73..98010b056 100644
--- a/src/librustdoc/theme.rs
+++ b/src/librustdoc/theme.rs
@@ -5,7 +5,7 @@ use std::iter::Peekable;
use std::path::Path;
use std::str::Chars;
-use rustc_errors::Handler;
+use rustc_errors::DiagCtxt;
#[cfg(test)]
mod tests;
@@ -236,7 +236,7 @@ pub(crate) fn get_differences(
pub(crate) fn test_theme_against<P: AsRef<Path>>(
f: &P,
origin: &FxHashMap<String, CssPath>,
- diag: &Handler,
+ dcx: &DiagCtxt,
) -> (bool, Vec<String>) {
let against = match fs::read_to_string(f)
.map_err(|e| e.to_string())
@@ -244,7 +244,7 @@ pub(crate) fn test_theme_against<P: AsRef<Path>>(
{
Ok(c) => c,
Err(e) => {
- diag.struct_err(e).emit();
+ dcx.struct_err(e).emit();
return (false, vec![]);
}
};
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index 549fd67e3..907ea6d30 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -135,14 +135,16 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// macro in the same module.
let mut inserted = FxHashSet::default();
for child in self.cx.tcx.module_children_local(CRATE_DEF_ID) {
- if !child.reexport_chain.is_empty() &&
- let Res::Def(DefKind::Macro(_), def_id) = child.res &&
- let Some(local_def_id) = def_id.as_local() &&
- self.cx.tcx.has_attr(def_id, sym::macro_export) &&
- inserted.insert(def_id)
+ if !child.reexport_chain.is_empty()
+ && let Res::Def(DefKind::Macro(_), def_id) = child.res
+ && let Some(local_def_id) = def_id.as_local()
+ && self.cx.tcx.has_attr(def_id, sym::macro_export)
+ && inserted.insert(def_id)
{
let item = self.cx.tcx.hir().expect_item(local_def_id);
- top_level_module.items.insert((local_def_id, Some(item.ident.name)), (item, None, None));
+ top_level_module
+ .items
+ .insert((local_def_id, Some(item.ident.name)), (item, None, None));
}
}
@@ -161,15 +163,16 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
.iter()
.filter_map(|attr| {
Cfg::parse(attr.meta_item()?)
- .map_err(|e| self.cx.sess().diagnostic().span_err(e.span, e.msg))
+ .map_err(|e| self.cx.sess().dcx().span_err(e.span, e.msg))
.ok()
})
.collect::<Vec<_>>()
})
- .chain(
- [Cfg::Cfg(sym::test, None), Cfg::Cfg(sym::doc, None), Cfg::Cfg(sym::doctest, None)]
- .into_iter(),
- )
+ .chain([
+ Cfg::Cfg(sym::test, None),
+ Cfg::Cfg(sym::doc, None),
+ Cfg::Cfg(sym::doctest, None),
+ ])
.collect();
self.cx.cache.exact_paths = self.exact_paths;
@@ -240,7 +243,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
};
let document_hidden = self.cx.render_options.document_hidden;
- let use_attrs = tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(def_id));
+ let use_attrs = tcx.hir().attrs(tcx.local_def_id_to_hir_id(def_id));
// Don't inline `doc(hidden)` imports so they can be stripped at a later stage.
let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline)
|| (document_hidden && use_attrs.lists(sym::doc).has_word(sym::hidden));
@@ -273,7 +276,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
};
let is_private = !self.cx.cache.effective_visibilities.is_directly_public(tcx, ori_res_did);
- let item = tcx.hir().get_by_def_id(res_did);
+ let item = tcx.hir_node_by_def_id(res_did);
if !please_inline {
let inherits_hidden = !document_hidden && inherits_doc_hidden(tcx, res_did, None);
@@ -444,8 +447,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
continue;
}
- let attrs =
- tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(item.owner_id.def_id));
+ let attrs = tcx.hir().attrs(tcx.local_def_id_to_hir_id(item.owner_id.def_id));
// If there was a private module in the current path then don't bother inlining
// anything as it will probably be stripped anyway.