From 2e00214b3efbdfeefaa0fe9e8b8fd519de7adc35 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:19:50 +0200 Subject: Merging upstream version 1.69.0+dfsg1. Signed-off-by: Daniel Baumann --- src/librustdoc/Cargo.toml | 2 - src/librustdoc/clean/auto_trait.rs | 6 +- src/librustdoc/clean/blanket_impl.rs | 2 +- src/librustdoc/clean/cfg.rs | 8 +- src/librustdoc/clean/cfg/tests.rs | 5 +- src/librustdoc/clean/inline.rs | 64 +- src/librustdoc/clean/mod.rs | 476 +++++++++------ src/librustdoc/clean/render_macro_matchers.rs | 3 +- src/librustdoc/clean/types.rs | 248 +------- src/librustdoc/clean/types/tests.rs | 1 + src/librustdoc/clean/utils.rs | 23 +- src/librustdoc/config.rs | 2 +- src/librustdoc/core.rs | 80 +-- src/librustdoc/doctest.rs | 117 ++-- src/librustdoc/formats/cache.rs | 41 +- src/librustdoc/formats/item_type.rs | 3 +- src/librustdoc/html/format.rs | 98 +-- src/librustdoc/html/highlight.rs | 30 +- src/librustdoc/html/layout.rs | 1 - src/librustdoc/html/length_limit.rs | 4 +- src/librustdoc/html/length_limit/tests.rs | 2 +- src/librustdoc/html/markdown.rs | 209 +++---- src/librustdoc/html/markdown/tests.rs | 2 +- src/librustdoc/html/render/context.rs | 43 +- src/librustdoc/html/render/mod.rs | 166 ++--- src/librustdoc/html/render/print_item.rs | 672 ++++++++++----------- src/librustdoc/html/render/search_index.rs | 11 +- src/librustdoc/html/render/write_shared.rs | 5 +- src/librustdoc/html/sources.rs | 3 +- src/librustdoc/html/static/.eslintrc.js | 1 - src/librustdoc/html/static/css/rustdoc.css | 148 ++--- src/librustdoc/html/static/css/settings.css | 30 +- src/librustdoc/html/static/css/themes/ayu.css | 77 +-- src/librustdoc/html/static/css/themes/dark.css | 26 +- src/librustdoc/html/static/css/themes/light.css | 26 +- .../html/static/fonts/SourceSerif4-Bold.ttf.woff2 | Bin 81320 -> 81540 bytes .../html/static/fonts/SourceSerif4-It.ttf.woff2 | Bin 59860 -> 59716 bytes .../html/static/fonts/SourceSerif4-LICENSE.md | 2 +- .../static/fonts/SourceSerif4-Regular.ttf.woff2 | Bin 76180 -> 76260 bytes src/librustdoc/html/static/images/down-arrow.svg | 1 - src/librustdoc/html/static/images/toggle-minus.svg | 1 - src/librustdoc/html/static/images/toggle-plus.svg | 1 - src/librustdoc/html/static/images/wheel.svg | 2 +- src/librustdoc/html/static/js/main.js | 125 ++-- src/librustdoc/html/static/js/search.js | 355 +++++++---- src/librustdoc/html/static/js/settings.js | 55 +- src/librustdoc/html/static/js/source-script.js | 3 +- src/librustdoc/html/static/js/storage.js | 113 ++-- src/librustdoc/html/static_files.rs | 3 - src/librustdoc/html/templates/page.html | 3 +- src/librustdoc/html/templates/print_item.html | 2 +- src/librustdoc/json/conversions.rs | 99 ++- src/librustdoc/json/import_finder.rs | 10 +- src/librustdoc/json/mod.rs | 34 +- src/librustdoc/lib.rs | 40 +- src/librustdoc/lint.rs | 6 +- src/librustdoc/markdown.rs | 9 +- src/librustdoc/passes/calculate_doc_coverage.rs | 8 +- src/librustdoc/passes/check_doc_test_visibility.rs | 38 +- src/librustdoc/passes/collect_intra_doc_links.rs | 313 +++------- .../passes/collect_intra_doc_links/early.rs | 407 ------------- src/librustdoc/passes/collect_trait_impls.rs | 91 +-- .../passes/lint/check_code_block_syntax.rs | 11 +- src/librustdoc/passes/lint/html_tags.rs | 10 +- src/librustdoc/passes/mod.rs | 3 +- src/librustdoc/passes/propagate_doc_cfg.rs | 17 +- src/librustdoc/passes/strip_hidden.rs | 117 +++- src/librustdoc/passes/stripper.rs | 32 +- src/librustdoc/scrape_examples.rs | 2 +- src/librustdoc/visit_ast.rs | 275 ++++++--- src/librustdoc/visit_lib.rs | 9 +- 71 files changed, 2156 insertions(+), 2676 deletions(-) delete mode 100644 src/librustdoc/html/static/images/down-arrow.svg delete mode 100644 src/librustdoc/html/static/images/toggle-minus.svg delete mode 100644 src/librustdoc/html/static/images/toggle-plus.svg delete mode 100644 src/librustdoc/passes/collect_intra_doc_links/early.rs (limited to 'src/librustdoc') diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 0271c27b4..c48f7998c 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -12,14 +12,12 @@ askama = { version = "0.11", default-features = false, features = ["config"] } itertools = "0.10.1" minifier = "0.2.2" once_cell = "1.10.0" -pulldown-cmark = { version = "0.9.2", default-features = false } regex = "1" rustdoc-json-types = { path = "../rustdoc-json-types" } serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } smallvec = "1.8.1" tempfile = "3" -thin-vec = "0.2.9" tracing = "0.1" tracing-tree = "0.2.0" diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index a302750aa..9479b3ee0 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -137,7 +137,7 @@ where pub(crate) fn get_auto_trait_impls(&mut self, item_def_id: DefId) -> Vec { let tcx = self.cx.tcx; let param_env = tcx.param_env(item_def_id); - let ty = tcx.type_of(item_def_id); + let ty = tcx.type_of(item_def_id).subst_identity(); let f = auto_trait::AutoTraitFinder::new(tcx); debug!("get_auto_trait_impls({:?})", ty); @@ -734,8 +734,8 @@ struct RegionReplacer<'a, 'tcx> { tcx: TyCtxt<'tcx>, } -impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { +impl<'a, 'tcx> TypeFolder> for RegionReplacer<'a, 'tcx> { + fn interner(&self) -> TyCtxt<'tcx> { self.tcx } diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs index e6b2b2349..bcdbbcacc 100644 --- a/src/librustdoc/clean/blanket_impl.rs +++ b/src/librustdoc/clean/blanket_impl.rs @@ -15,7 +15,7 @@ impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> { pub(crate) fn get_blanket_impls(&mut self, item_def_id: DefId) -> Vec { let cx = &mut self.cx; let param_env = cx.tcx.param_env(item_def_id); - let ty = cx.tcx.bound_type_of(item_def_id); + let ty = cx.tcx.type_of(item_def_id); trace!("get_blanket_impls({:?})", ty); let mut impls = Vec::new(); diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index f1853f369..dd58a5b51 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -164,10 +164,10 @@ impl Cfg { /// Renders the configuration for human display, as a short HTML description. pub(crate) fn render_short_html(&self) -> String { let mut msg = Display(self, Format::ShortHtml).to_string(); - if self.should_capitalize_first_letter() { - if let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) { - msg[i..i + 1].make_ascii_uppercase(); - } + if self.should_capitalize_first_letter() && + let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) + { + msg[i..i + 1].make_ascii_uppercase(); } msg } diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs index 81f676724..bb62660e1 100644 --- a/src/librustdoc/clean/cfg/tests.rs +++ b/src/librustdoc/clean/cfg/tests.rs @@ -4,6 +4,7 @@ use rustc_ast::{LitKind, MetaItemLit, Path, StrStyle}; use rustc_span::create_default_session_globals_then; use rustc_span::symbol::{kw, Ident, Symbol}; use rustc_span::DUMMY_SP; +use thin_vec::thin_vec; fn word_cfg(s: &str) -> Cfg { Cfg::Cfg(Symbol::intern(s), None) @@ -34,7 +35,7 @@ macro_rules! dummy_meta_item_list { ($name:ident, [$($list:ident),* $(,)?]) => { MetaItem { path: Path::from_ident(Ident::from_str(stringify!($name))), - kind: MetaItemKind::List(vec![ + kind: MetaItemKind::List(thin_vec![ $( NestedMetaItem::MetaItem( dummy_meta_item_word(stringify!($list)), @@ -48,7 +49,7 @@ macro_rules! dummy_meta_item_list { ($name:ident, [$($list:expr),* $(,)?]) => { MetaItem { path: Path::from_ident(Ident::from_str(stringify!($name))), - kind: MetaItemKind::List(vec![ + kind: MetaItemKind::List(thin_vec![ $( NestedMetaItem::MetaItem($list), )* diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index b3b093312..148243683 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -9,7 +9,7 @@ use rustc_ast as ast; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::def_id::{DefId, DefIdSet, LocalDefId}; use rustc_hir::Mutability; use rustc_metadata::creader::{CStore, LoadedMacro}; use rustc_middle::ty::{self, TyCtxt}; @@ -45,7 +45,7 @@ pub(crate) fn try_inline( res: Res, name: Symbol, attrs: Option<&[ast::Attribute]>, - visited: &mut FxHashSet, + visited: &mut DefIdSet, ) -> Option> { let did = res.opt_def_id()?; if did.is_local() { @@ -163,7 +163,7 @@ pub(crate) fn try_inline_glob( cx: &mut DocContext<'_>, res: Res, current_mod: LocalDefId, - visited: &mut FxHashSet, + visited: &mut DefIdSet, inlined_names: &mut FxHashSet<(ItemType, Symbol)>, ) -> Option> { let did = res.opt_def_id()?; @@ -251,7 +251,7 @@ pub(crate) fn build_external_trait(cx: &mut DocContext<'_>, did: DefId) -> clean } fn build_external_function<'tcx>(cx: &mut DocContext<'tcx>, did: DefId) -> Box { - let sig = cx.tcx.fn_sig(did); + let sig = cx.tcx.fn_sig(did).subst_identity(); let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var { ty::BoundVariableKind::Region(ty::BrNamed(_, name)) if name != kw::UnderscoreLifetime => { @@ -303,7 +303,8 @@ fn build_union(cx: &mut DocContext<'_>, did: DefId) -> clean::Union { fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box { let predicates = cx.tcx.explicit_predicates_of(did); - let type_ = clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did)); + let type_ = + clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()), cx, Some(did)); Box::new(clean::Typedef { type_, @@ -390,18 +391,17 @@ pub(crate) fn build_impl( // Only inline impl if the implemented trait is // reachable in rustdoc generated documentation - if !did.is_local() { - if let Some(traitref) = associated_trait { - let did = traitref.def_id; - if !cx.cache.effective_visibilities.is_directly_public(tcx, did) { - return; - } + if !did.is_local() && let Some(traitref) = associated_trait { + let did = traitref.def_id; + if !cx.cache.effective_visibilities.is_directly_public(tcx, did) { + return; + } - if let Some(stab) = tcx.lookup_stability(did) { - if stab.is_unstable() && stab.feature == sym::rustc_private { - return; - } - } + if let Some(stab) = tcx.lookup_stability(did) && + stab.is_unstable() && + stab.feature == sym::rustc_private + { + return; } } @@ -415,7 +415,9 @@ pub(crate) fn build_impl( let for_ = match &impl_item { Some(impl_) => clean_ty(impl_.self_ty, cx), - None => clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did)), + None => { + clean_middle_ty(ty::Binder::dummy(tcx.type_of(did).subst_identity()), cx, Some(did)) + } }; // Only inline impl if the implementing type is @@ -525,10 +527,8 @@ pub(crate) fn build_impl( } while let Some(ty) = stack.pop() { - if let Some(did) = ty.def_id(&cx.cache) { - if tcx.is_doc_hidden(did) { - return; - } + if let Some(did) = ty.def_id(&cx.cache) && tcx.is_doc_hidden(did) { + return; } if let Some(generics) = ty.generics() { stack.extend(generics); @@ -568,11 +568,7 @@ pub(crate) fn build_impl( )); } -fn build_module( - cx: &mut DocContext<'_>, - did: DefId, - visited: &mut FxHashSet, -) -> clean::Module { +fn build_module(cx: &mut DocContext<'_>, did: DefId, visited: &mut DefIdSet) -> clean::Module { let items = build_module_items(cx, did, visited, &mut FxHashSet::default(), None); let span = clean::Span::new(cx.tcx.def_span(did)); @@ -582,9 +578,9 @@ fn build_module( fn build_module_items( cx: &mut DocContext<'_>, did: DefId, - visited: &mut FxHashSet, + visited: &mut DefIdSet, inlined_names: &mut FxHashSet<(ItemType, Symbol)>, - allowed_def_ids: Option<&FxHashSet>, + allowed_def_ids: Option<&DefIdSet>, ) -> Vec { let mut items = Vec::new(); @@ -659,14 +655,22 @@ pub(crate) fn print_inlined_const(tcx: TyCtxt<'_>, did: DefId) -> String { fn build_const(cx: &mut DocContext<'_>, def_id: DefId) -> clean::Constant { clean::Constant { - type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)), + type_: clean_middle_ty( + ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()), + cx, + Some(def_id), + ), kind: clean::ConstantKind::Extern { def_id }, } } fn build_static(cx: &mut DocContext<'_>, did: DefId, mutable: bool) -> clean::Static { clean::Static { - type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(did)), cx, Some(did)), + type_: clean_middle_ty( + ty::Binder::dummy(cx.tcx.type_of(did).subst_identity()), + cx, + Some(did), + ), mutability: if mutable { Mutability::Mut } else { Mutability::Not }, expr: None, } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 0f0e16265..3edc2cd2e 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -11,18 +11,20 @@ pub(crate) mod types; pub(crate) mod utils; use rustc_ast as ast; +use rustc_ast::token::{Token, TokenKind}; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_attr as attr; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind, Res}; -use rustc_hir::def_id::{DefId, LOCAL_CRATE}; +use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId, LOCAL_CRATE}; use rustc_hir::PredicateOrigin; use rustc_hir_analysis::hir_ty_to_ty; use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData}; -use rustc_middle::middle::resolve_lifetime as rl; +use rustc_middle::middle::resolve_bound_vars as rbv; use rustc_middle::ty::fold::TypeFolder; use rustc_middle::ty::InternalSubsts; -use rustc_middle::ty::TypeVisitable; +use rustc_middle::ty::TypeVisitableExt; use rustc_middle::ty::{self, AdtKind, DefIdTree, EarlyBinder, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_span::hygiene::{AstPass, MacroKind}; @@ -37,6 +39,7 @@ use std::hash::Hash; use std::mem; use thin_vec::ThinVec; +use crate::clean::inline::merge_attrs; use crate::core::{self, DocContext, ImplTraitParam}; use crate::formats::item_type::ItemType; use crate::visit_ast::Module as DocModule; @@ -75,7 +78,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< // This covers the case where somebody does an import which should pull in an item, // but there's already an item with the same namespace and same name. Rust gives // priority to the not-imported one, so we should, too. - items.extend(doc.items.iter().flat_map(|(item, renamed, import_id)| { + items.extend(doc.items.values().flat_map(|(item, renamed, import_id)| { // First, lower everything other than imports. if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) { return Vec::new(); @@ -88,7 +91,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< } v })); - items.extend(doc.items.iter().flat_map(|(item, renamed, _)| { + items.extend(doc.items.values().flat_map(|(item, renamed, _)| { // Now we actually lower the imports, skipping everything else. if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind { let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id())); @@ -116,7 +119,8 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< } }); - Item::from_hir_id_and_parts(doc.id, Some(doc.name), ModuleItem(Module { items, span }), cx) + let kind = ModuleItem(Module { items, span }); + Item::from_def_id_and_parts(doc.def_id.to_def_id(), Some(doc.name), kind, cx) } fn clean_generic_bound<'tcx>( @@ -197,11 +201,11 @@ fn clean_poly_trait_ref_with_bindings<'tcx>( } fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime { - let def = cx.tcx.named_region(lifetime.hir_id); + let def = cx.tcx.named_bound_var(lifetime.hir_id); if let Some( - rl::Region::EarlyBound(node_id) - | rl::Region::LateBound(_, _, node_id) - | rl::Region::Free(_, node_id), + rbv::ResolvedArg::EarlyBound(node_id) + | rbv::ResolvedArg::LateBound(_, _, node_id) + | rbv::ResolvedArg::Free(_, node_id), ) = def { if let Some(lt) = cx.substs.get(&node_id).and_then(|p| p.as_lt()).cloned() { @@ -214,7 +218,11 @@ fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> pub(crate) fn clean_const<'tcx>(constant: &hir::ConstArg, cx: &mut DocContext<'tcx>) -> Constant { let def_id = cx.tcx.hir().body_owner_def_id(constant.value.body).to_def_id(); Constant { - type_: clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(def_id)), cx, Some(def_id)), + type_: clean_middle_ty( + ty::Binder::dummy(cx.tcx.type_of(def_id).subst_identity()), + cx, + Some(def_id), + ), kind: ConstantKind::Anonymous { body: constant.value.body }, } } @@ -241,6 +249,7 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option { debug!("cannot clean region {:?}", region); @@ -309,10 +318,13 @@ pub(crate) fn clean_predicate<'tcx>( ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => { Some(clean_projection_predicate(bound_predicate.rebind(pred), cx)) } + // FIXME(generic_const_exprs): should this do something? ty::PredicateKind::ConstEvaluatable(..) => None, ty::PredicateKind::WellFormed(..) => None, + ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..)) => None, ty::PredicateKind::Subtype(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::Coerce(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) @@ -382,13 +394,10 @@ fn clean_middle_term<'tcx>( fn clean_hir_term<'tcx>(term: &hir::Term<'tcx>, cx: &mut DocContext<'tcx>) -> Term { match term { hir::Term::Ty(ty) => Term::Type(clean_ty(ty, cx)), - hir::Term::Const(c) => { - let def_id = cx.tcx.hir().local_def_id(c.hir_id); - Term::Constant(clean_middle_const( - ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, def_id)), - cx, - )) - } + hir::Term::Const(c) => Term::Constant(clean_middle_const( + ty::Binder::dummy(ty::Const::from_anon_const(cx.tcx, c.def_id)), + cx, + )), } } @@ -479,7 +488,7 @@ fn clean_generic_param_def<'tcx>( ty::GenericParamDefKind::Type { has_default, synthetic, .. } => { let default = if has_default { Some(clean_middle_ty( - ty::Binder::dummy(cx.tcx.type_of(def.def_id)), + ty::Binder::dummy(cx.tcx.type_of(def.def_id).subst_identity()), cx, Some(def.def_id), )) @@ -501,7 +510,12 @@ fn clean_generic_param_def<'tcx>( GenericParamDefKind::Const { did: def.def_id, ty: Box::new(clean_middle_ty( - ty::Binder::dummy(cx.tcx.type_of(def.def_id)), + ty::Binder::dummy( + cx.tcx + .type_of(def.def_id) + .no_bound_vars() + .expect("const parameter types cannot be generic"), + ), cx, Some(def.def_id), )), @@ -523,12 +537,11 @@ fn clean_generic_param<'tcx>( generics: Option<&hir::Generics<'tcx>>, param: &hir::GenericParam<'tcx>, ) -> GenericParamDef { - let did = cx.tcx.hir().local_def_id(param.hir_id); let (name, kind) = match param.kind { hir::GenericParamKind::Lifetime { .. } => { let outlives = if let Some(generics) = generics { generics - .outlives_for_param(did) + .outlives_for_param(param.def_id) .filter(|bp| !bp.in_where_clause) .flat_map(|bp| bp.bounds) .map(|bound| match bound { @@ -544,7 +557,7 @@ fn clean_generic_param<'tcx>( hir::GenericParamKind::Type { ref default, synthetic } => { let bounds = if let Some(generics) = generics { generics - .bounds_for_param(did) + .bounds_for_param(param.def_id) .filter(|bp| bp.origin != PredicateOrigin::WhereClause) .flat_map(|bp| bp.bounds) .filter_map(|x| clean_generic_bound(x, cx)) @@ -555,7 +568,7 @@ fn clean_generic_param<'tcx>( ( param.name.ident().name, GenericParamDefKind::Type { - did: did.to_def_id(), + did: param.def_id.to_def_id(), bounds, default: default.map(|t| clean_ty(t, cx)).map(Box::new), synthetic, @@ -565,12 +578,10 @@ fn clean_generic_param<'tcx>( hir::GenericParamKind::Const { ty, default } => ( param.name.ident().name, GenericParamDefKind::Const { - did: did.to_def_id(), + did: param.def_id.to_def_id(), ty: Box::new(clean_ty(ty, cx)), - default: default.map(|ct| { - let def_id = cx.tcx.hir().local_def_id(ct.hir_id); - Box::new(ty::Const::from_anon_const(cx.tcx, def_id).to_string()) - }), + default: default + .map(|ct| Box::new(ty::Const::from_anon_const(cx.tcx, ct.def_id).to_string())), }, ), }; @@ -789,43 +800,43 @@ fn clean_ty_generics<'tcx>( None })(); - if let Some(param_idx) = param_idx { - if let Some(b) = impl_trait.get_mut(¶m_idx.into()) { - let p: WherePredicate = clean_predicate(*p, cx)?; + if let Some(param_idx) = param_idx + && let Some(b) = impl_trait.get_mut(¶m_idx.into()) + { + let p: WherePredicate = clean_predicate(*p, cx)?; + + b.extend( + p.get_bounds() + .into_iter() + .flatten() + .cloned() + .filter(|b| !b.is_sized_bound(cx)), + ); - b.extend( - p.get_bounds() + let proj = projection.map(|p| { + ( + clean_projection(p.map_bound(|p| p.projection_ty), cx, None), + p.map_bound(|p| p.term), + ) + }); + if let Some(((_, trait_did, name), rhs)) = proj + .as_ref() + .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs))) + { + // FIXME(...): Remove this unwrap() + impl_trait_proj.entry(param_idx).or_default().push(( + trait_did, + name, + rhs.map_bound(|rhs| rhs.ty().unwrap()), + p.get_bound_params() .into_iter() .flatten() - .cloned() - .filter(|b| !b.is_sized_bound(cx)), - ); - - let proj = projection.map(|p| { - ( - clean_projection(p.map_bound(|p| p.projection_ty), cx, None), - p.map_bound(|p| p.term), - ) - }); - if let Some(((_, trait_did, name), rhs)) = proj - .as_ref() - .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs))) - { - // FIXME(...): Remove this unwrap() - impl_trait_proj.entry(param_idx).or_default().push(( - trait_did, - name, - rhs.map_bound(|rhs| rhs.ty().unwrap()), - p.get_bound_params() - .into_iter() - .flatten() - .map(|param| GenericParamDef::lifetime(param.0)) - .collect(), - )); - } - - return None; + .map(|param| GenericParamDef::lifetime(param.0)) + .collect(), + )); } + + return None; } Some(p) @@ -888,7 +899,7 @@ fn clean_ty_generics<'tcx>( // `?Sized` bound for each one we didn't find to be `Sized`. for tp in &stripped_params { if let types::GenericParamDefKind::Type { .. } = tp.kind - && !sized_params.contains(&tp.name) + && !sized_params.contains(&tp.name) { where_predicates.push(WherePredicate::BoundPredicate { ty: Type::Generic(tp.name), @@ -1214,7 +1225,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>( let kind = match assoc_item.kind { ty::AssocKind::Const => { let ty = clean_middle_ty( - ty::Binder::dummy(tcx.type_of(assoc_item.def_id)), + ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()), cx, Some(assoc_item.def_id), ); @@ -1230,7 +1241,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>( } } ty::AssocKind::Fn => { - let sig = tcx.fn_sig(assoc_item.def_id); + let sig = tcx.fn_sig(assoc_item.def_id).subst_identity(); let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var { ty::BoundVariableKind::Region(ty::BrNamed(_, name)) @@ -1253,7 +1264,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>( if assoc_item.fn_has_self_parameter { let self_ty = match assoc_item.container { - ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)), + ty::ImplContainer => tcx.type_of(assoc_item.container_id(tcx)).subst_identity(), ty::TraitContainer => tcx.types.self_param, }; let self_arg_ty = sig.input(0).skip_binder(); @@ -1400,7 +1411,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>( AssocTypeItem( Box::new(Typedef { type_: clean_middle_ty( - ty::Binder::dummy(tcx.type_of(assoc_item.def_id)), + ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()), cx, Some(assoc_item.def_id), ), @@ -1418,7 +1429,7 @@ pub(crate) fn clean_middle_assoc_item<'tcx>( AssocTypeItem( Box::new(Typedef { type_: clean_middle_ty( - ty::Binder::dummy(tcx.type_of(assoc_item.def_id)), + ty::Binder::dummy(tcx.type_of(assoc_item.def_id).subst_identity()), cx, Some(assoc_item.def_id), ), @@ -1463,10 +1474,10 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type // Try to normalize `::T` to a type let ty = hir_ty_to_ty(cx.tcx, hir_ty); // `hir_to_ty` can return projection types with escaping vars for GATs, e.g. `<() as Trait>::Gat<'_>` - if !ty.has_escaping_bound_vars() { - if let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty)) { - return clean_middle_ty(normalized_value, cx, None); - } + if !ty.has_escaping_bound_vars() + && let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty)) + { + return clean_middle_ty(normalized_value, cx, None); } let trait_segments = &p.segments[..p.segments.len() - 1]; @@ -1528,7 +1539,7 @@ fn maybe_expand_private_type_alias<'tcx>( let hir::ItemKind::TyAlias(ty, generics) = alias else { return None }; let provided_params = &path.segments.last().expect("segments were empty"); - let mut substs = FxHashMap::default(); + let mut substs = DefIdMap::default(); let generic_args = provided_params.args(); let mut indices: hir::GenericParamCount = Default::default(); @@ -1547,18 +1558,16 @@ fn maybe_expand_private_type_alias<'tcx>( _ => None, }); if let Some(lt) = lifetime { - let lt_def_id = cx.tcx.hir().local_def_id(param.hir_id); let cleaned = if !lt.is_anonymous() { clean_lifetime(lt, cx) } else { Lifetime::elided() }; - substs.insert(lt_def_id.to_def_id(), SubstParam::Lifetime(cleaned)); + substs.insert(param.def_id.to_def_id(), SubstParam::Lifetime(cleaned)); } indices.lifetimes += 1; } hir::GenericParamKind::Type { ref default, .. } => { - let ty_param_def_id = cx.tcx.hir().local_def_id(param.hir_id); let mut j = 0; let type_ = generic_args.args.iter().find_map(|arg| match arg { hir::GenericArg::Type(ty) => { @@ -1571,17 +1580,14 @@ fn maybe_expand_private_type_alias<'tcx>( _ => None, }); if let Some(ty) = type_ { - substs.insert(ty_param_def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx))); + substs.insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx))); } else if let Some(default) = *default { - substs.insert( - ty_param_def_id.to_def_id(), - SubstParam::Type(clean_ty(default, cx)), - ); + substs + .insert(param.def_id.to_def_id(), SubstParam::Type(clean_ty(default, cx))); } indices.types += 1; } hir::GenericParamKind::Const { .. } => { - let const_param_def_id = cx.tcx.hir().local_def_id(param.hir_id); let mut j = 0; let const_ = generic_args.args.iter().find_map(|arg| match arg { hir::GenericArg::Const(ct) => { @@ -1595,7 +1601,7 @@ fn maybe_expand_private_type_alias<'tcx>( }); if let Some(ct) = const_ { substs.insert( - const_param_def_id.to_def_id(), + param.def_id.to_def_id(), SubstParam::Constant(clean_const(ct, cx)), ); } @@ -1623,7 +1629,6 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T let length = match length { hir::ArrayLen::Infer(_, _) => "_".to_string(), hir::ArrayLen::Body(anon_const) => { - let def_id = cx.tcx.hir().local_def_id(anon_const.hir_id); // NOTE(min_const_generics): We can't use `const_eval_poly` for constants // as we currently do not supply the parent generics to anonymous constants // but do allow `ConstKind::Param`. @@ -1631,8 +1636,8 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T // `const_eval_poly` tries to first substitute generic parameters which // results in an ICE while manually constructing the constant and using `eval` // does nothing for `ConstKind::Param`. - let ct = ty::Const::from_anon_const(cx.tcx, def_id); - let param_env = cx.tcx.param_env(def_id); + let ct = ty::Const::from_anon_const(cx.tcx, anon_const.def_id); + let param_env = cx.tcx.param_env(anon_const.def_id); print_const(cx, ct.eval(cx.tcx, param_env)) } }; @@ -1657,7 +1662,7 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T } TyKind::BareFn(barefn) => BareFunction(Box::new(clean_bare_fn_ty(barefn, cx))), // Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s. - TyKind::Infer | TyKind::Err | TyKind::Typeof(..) => Infer, + TyKind::Infer | TyKind::Err(_) | TyKind::Typeof(..) => Infer, } } @@ -1854,6 +1859,7 @@ pub(crate) fn clean_middle_ty<'tcx>( ty::Bound(..) => panic!("Bound"), ty::Placeholder(..) => panic!("Placeholder"), ty::GeneratorWitness(..) => panic!("GeneratorWitness"), + ty::GeneratorWitnessMIR(..) => panic!("GeneratorWitnessMIR"), ty::Infer(..) => panic!("Infer"), ty::Error(_) => rustc_errors::FatalError.raise(), } @@ -1885,11 +1891,9 @@ fn clean_middle_opaque_bounds<'tcx>( _ => return None, }; - if let Some(sized) = cx.tcx.lang_items().sized_trait() { - if trait_ref.def_id() == sized { - has_sized = true; - return None; - } + if let Some(sized) = cx.tcx.lang_items().sized_trait() && trait_ref.def_id() == sized { + has_sized = true; + return None; } let bindings: ThinVec<_> = bounds @@ -1928,15 +1932,18 @@ fn clean_middle_opaque_bounds<'tcx>( } pub(crate) fn clean_field<'tcx>(field: &hir::FieldDef<'tcx>, cx: &mut DocContext<'tcx>) -> Item { - let def_id = cx.tcx.hir().local_def_id(field.hir_id).to_def_id(); - clean_field_with_def_id(def_id, field.ident.name, clean_ty(field.ty, cx), cx) + clean_field_with_def_id(field.def_id.to_def_id(), field.ident.name, clean_ty(field.ty, cx), cx) } pub(crate) fn clean_middle_field<'tcx>(field: &ty::FieldDef, cx: &mut DocContext<'tcx>) -> Item { clean_field_with_def_id( field.did, field.name, - clean_middle_ty(ty::Binder::dummy(cx.tcx.type_of(field.did)), cx, Some(field.did)), + clean_middle_ty( + ty::Binder::dummy(cx.tcx.type_of(field.did).subst_identity()), + cx, + Some(field.did), + ), cx, ) } @@ -1979,10 +1986,8 @@ fn clean_variant_data<'tcx>( disr_expr: &Option, cx: &mut DocContext<'tcx>, ) -> Variant { - let discriminant = disr_expr.map(|disr| Discriminant { - expr: Some(disr.body), - value: cx.tcx.hir().local_def_id(disr.hir_id).to_def_id(), - }); + let discriminant = disr_expr + .map(|disr| Discriminant { expr: Some(disr.body), value: disr.def_id.to_def_id() }); let kind = match variant { hir::VariantData::Struct(..) => VariantKind::Struct(VariantStruct { @@ -2067,12 +2072,12 @@ struct OneLevelVisitor<'hir> { map: rustc_middle::hir::map::Map<'hir>, item: Option<&'hir hir::Item<'hir>>, looking_for: Ident, - target_hir_id: hir::HirId, + target_def_id: LocalDefId, } impl<'hir> OneLevelVisitor<'hir> { - fn new(map: rustc_middle::hir::map::Map<'hir>, target_hir_id: hir::HirId) -> Self { - Self { map, item: None, looking_for: Ident::empty(), target_hir_id } + fn new(map: rustc_middle::hir::map::Map<'hir>, target_def_id: LocalDefId) -> Self { + Self { map, item: None, looking_for: Ident::empty(), target_def_id } } fn reset(&mut self, looking_for: Ident) { @@ -2091,8 +2096,8 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> { fn visit_item(&mut self, item: &'hir hir::Item<'hir>) { if self.item.is_none() && item.ident == self.looking_for - && matches!(item.kind, hir::ItemKind::Use(_, _)) - || item.hir_id() == self.target_hir_id + && (matches!(item.kind, hir::ItemKind::Use(_, _)) + || item.owner_id.def_id == self.target_def_id) { self.item = Some(item); } @@ -2106,41 +2111,168 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> { fn get_all_import_attributes<'hir>( mut item: &hir::Item<'hir>, tcx: TyCtxt<'hir>, - target_hir_id: hir::HirId, + target_def_id: LocalDefId, attributes: &mut Vec, + is_inline: bool, ) { + let mut first = true; let hir_map = tcx.hir(); - let mut visitor = OneLevelVisitor::new(hir_map, target_hir_id); + let mut visitor = OneLevelVisitor::new(hir_map, target_def_id); let mut visited = FxHashSet::default(); + // If the item is an import and has at least a path with two parts, we go into it. - while let hir::ItemKind::Use(path, _) = item.kind && - path.segments.len() > 1 && - let hir::def::Res::Def(_, def_id) = path.segments[path.segments.len() - 2].res && - visited.insert(def_id) - { - if let Some(hir::Node::Item(parent_item)) = hir_map.get_if_local(def_id) { - // We add the attributes from this import into the list. + while let hir::ItemKind::Use(path, _) = item.kind && visited.insert(item.hir_id()) { + if first { + // This is the "original" reexport so we get all its attributes without filtering them. attributes.extend_from_slice(hir_map.attrs(item.hir_id())); - // We get the `Ident` we will be looking for into `item`. - let looking_for = path.segments[path.segments.len() - 1].ident; - visitor.reset(looking_for); - hir::intravisit::walk_item(&mut visitor, parent_item); - if let Some(i) = visitor.item { - item = i; - } else { - break; + first = false; + } else { + add_without_unwanted_attributes(attributes, hir_map.attrs(item.hir_id()), is_inline); + } + + let def_id = if let [.., parent_segment, _] = &path.segments { + match parent_segment.res { + hir::def::Res::Def(_, def_id) => def_id, + _ if parent_segment.ident.name == kw::Crate => { + // In case the "parent" is the crate, it'll give `Res::Err` so we need to + // circumvent it this way. + tcx.parent(item.owner_id.def_id.to_def_id()) + } + _ => break, } + } else { + // If the path doesn't have a parent, then the parent is the current module. + tcx.parent(item.owner_id.def_id.to_def_id()) + }; + + let Some(parent) = hir_map.get_if_local(def_id) else { break }; + + // We get the `Ident` we will be looking for into `item`. + let looking_for = path.segments[path.segments.len() - 1].ident; + visitor.reset(looking_for); + + match parent { + hir::Node::Item(parent_item) => { + hir::intravisit::walk_item(&mut visitor, parent_item); + } + hir::Node::Crate(m) => { + hir::intravisit::walk_mod( + &mut visitor, + m, + tcx.local_def_id_to_hir_id(def_id.as_local().unwrap()), + ); + } + _ => break, + } + if let Some(i) = visitor.item { + item = i; } else { break; } } } +fn filter_tokens_from_list( + args_tokens: TokenStream, + should_retain: impl Fn(&TokenTree) -> bool, +) -> Vec { + let mut tokens = Vec::with_capacity(args_tokens.len()); + let mut skip_next_comma = false; + for token in args_tokens.into_trees() { + match token { + TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => { + skip_next_comma = false; + } + token if should_retain(&token) => { + skip_next_comma = false; + tokens.push(token); + } + _ => { + skip_next_comma = true; + } + } + } + tokens +} + +/// When inlining items, we merge its attributes (and all the reexports attributes too) with the +/// final reexport. For example: +/// +/// ```ignore (just an example) +/// #[doc(hidden, cfg(feature = "foo"))] +/// pub struct Foo; +/// +/// #[doc(cfg(feature = "bar"))] +/// #[doc(hidden, no_inline)] +/// pub use Foo as Foo1; +/// +/// #[doc(inline)] +/// pub use Foo2 as Bar; +/// ``` +/// +/// So `Bar` at the end will have both `cfg(feature = "...")`. However, we don't want to merge all +/// attributes so we filter out the following ones: +/// * `doc(inline)` +/// * `doc(no_inline)` +/// * `doc(hidden)` +fn add_without_unwanted_attributes( + attrs: &mut Vec, + new_attrs: &[ast::Attribute], + is_inline: bool, +) { + // If it's `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything. + if !is_inline { + attrs.extend_from_slice(new_attrs); + return; + } + for attr in new_attrs { + let mut attr = attr.clone(); + match attr.kind { + ast::AttrKind::Normal(ref mut normal) => { + if let [ident] = &*normal.item.path.segments && + let ident = ident.ident.name && + ident == sym::doc + { + match normal.item.args { + ast::AttrArgs::Delimited(ref mut args) => { + let tokens = + filter_tokens_from_list(args.tokens.clone(), |token| { + !matches!( + token, + TokenTree::Token( + Token { + kind: TokenKind::Ident( + sym::hidden | sym::inline | sym::no_inline, + _, + ), + .. + }, + _, + ), + ) + }); + args.tokens = TokenStream::new(tokens); + attrs.push(attr); + } + ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => { + attrs.push(attr); + continue; + } + } + } + } + ast::AttrKind::DocComment(..) => { + attrs.push(attr); + } + } + } +} + fn clean_maybe_renamed_item<'tcx>( cx: &mut DocContext<'tcx>, item: &hir::Item<'tcx>, renamed: Option, - import_id: Option, + import_id: Option, ) -> Vec { use hir::ItemKind; @@ -2185,7 +2317,7 @@ fn clean_maybe_renamed_item<'tcx>( generics: clean_generics(generics, cx), fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(), }), - ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx), + ItemKind::Impl(impl_) => return clean_impl(impl_, item.owner_id.def_id, cx), // proc macros can have a name set by attributes ItemKind::Fn(ref sig, generics, body_id) => { clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx) @@ -2218,40 +2350,38 @@ fn clean_maybe_renamed_item<'tcx>( _ => unreachable!("not yet converted"), }; - let mut extra_attrs = Vec::new(); - if let Some(hir::Node::Item(use_node)) = - import_id.and_then(|hir_id| cx.tcx.hir().find(hir_id)) + let mut import_attrs = Vec::new(); + let mut target_attrs = Vec::new(); + if let Some(import_id) = import_id && + let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id) { - // We get all the various imports' attributes. - get_all_import_attributes(use_node, cx.tcx, item.hir_id(), &mut extra_attrs); - } - - if !extra_attrs.is_empty() { - extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id)); - let attrs = Attributes::from_ast(&extra_attrs); - let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg); - - vec![Item::from_def_id_and_attrs_and_parts( - def_id, - Some(name), - kind, - Box::new(attrs), - cfg, - )] + let is_inline = inline::load_attrs(cx, import_id.to_def_id()).lists(sym::doc).get_word_attr(sym::inline).is_some(); + // Then we get all the various imports' attributes. + get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut import_attrs, is_inline); + add_without_unwanted_attributes(&mut target_attrs, inline::load_attrs(cx, def_id), is_inline); } else { - vec![Item::from_def_id_and_parts(def_id, Some(name), kind, cx)] + // We only keep the item's attributes. + target_attrs.extend_from_slice(inline::load_attrs(cx, def_id)); } + + let import_parent = import_id.map(|import_id| cx.tcx.local_parent(import_id).to_def_id()); + let (attrs, cfg) = merge_attrs(cx, import_parent, &target_attrs, Some(&import_attrs)); + + let mut item = + Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg); + item.inline_stmt_id = import_id.map(|def_id| def_id.to_def_id()); + vec![item] }) } fn clean_variant<'tcx>(variant: &hir::Variant<'tcx>, cx: &mut DocContext<'tcx>) -> Item { let kind = VariantItem(clean_variant_data(&variant.data, &variant.disr_expr, cx)); - Item::from_hir_id_and_parts(variant.hir_id, Some(variant.ident.name), kind, cx) + Item::from_def_id_and_parts(variant.def_id.to_def_id(), Some(variant.ident.name), kind, cx) } fn clean_impl<'tcx>( impl_: &hir::Impl<'tcx>, - hir_id: hir::HirId, + def_id: LocalDefId, cx: &mut DocContext<'tcx>, ) -> Vec { let tcx = cx.tcx; @@ -2262,7 +2392,6 @@ fn clean_impl<'tcx>( .iter() .map(|ii| clean_impl_item(tcx.hir().impl_item(ii.id), cx)) .collect::>(); - let def_id = tcx.hir().local_def_id(hir_id); // If this impl block is an implementation of the Deref trait, then we // need to try inlining the target's inherent impl blocks as well. @@ -2272,9 +2401,11 @@ fn clean_impl<'tcx>( let for_ = clean_ty(impl_.self_ty, cx); let type_alias = for_.def_id(&cx.cache).and_then(|did| match tcx.def_kind(did) { - DefKind::TyAlias => { - Some(clean_middle_ty(ty::Binder::dummy(tcx.type_of(did)), cx, Some(did))) - } + DefKind::TyAlias => Some(clean_middle_ty( + ty::Binder::dummy(tcx.type_of(did).subst_identity()), + cx, + Some(did), + )), _ => None, }); let mut make_item = |trait_: Option, for_: Type, items: Vec| { @@ -2291,7 +2422,7 @@ fn clean_impl<'tcx>( ImplKind::Normal }, })); - Item::from_hir_id_and_parts(hir_id, None, kind, cx) + Item::from_def_id_and_parts(def_id.to_def_id(), None, kind, cx) }; if let Some(type_alias) = type_alias { ret.push(make_item(trait_.clone(), type_alias, items.clone())); @@ -2323,7 +2454,7 @@ fn clean_extern_crate<'tcx>( let krate_owner_def_id = krate.owner_id.to_def_id(); if please_inline { - let mut visited = FxHashSet::default(); + let mut visited = DefIdSet::default(); let res = Res::Def(DefKind::Mod, crate_def_id); @@ -2405,17 +2536,15 @@ fn clean_use_statement_inner<'tcx>( let is_visible_from_parent_mod = visibility.is_accessible_from(parent_mod, cx.tcx) && !current_mod.is_top_level_module(); - if pub_underscore { - if let Some(ref inline) = inline_attr { - rustc_errors::struct_span_err!( - cx.tcx.sess, - inline.span(), - E0780, - "anonymous imports cannot be inlined" - ) - .span_label(import.span, "anonymous import") - .emit(); - } + if pub_underscore && let Some(ref inline) = inline_attr { + rustc_errors::struct_span_err!( + cx.tcx.sess, + inline.span(), + E0780, + "anonymous imports cannot be inlined" + ) + .span_label(import.span, "anonymous import") + .emit(); } // We consider inlining the documentation of `pub use` statements, but we @@ -2442,7 +2571,7 @@ fn clean_use_statement_inner<'tcx>( let path = clean_path(path, cx); let inner = if kind == hir::UseKind::Glob { if !denied { - let mut visited = FxHashSet::default(); + let mut visited = DefIdSet::default(); if let Some(items) = inline::try_inline_glob(cx, path.res, current_mod, &mut visited, inlined_names) { @@ -2451,17 +2580,16 @@ fn clean_use_statement_inner<'tcx>( } Import::new_glob(resolve_use_source(cx, path), true) } else { - if inline_attr.is_none() { - if let Res::Def(DefKind::Mod, did) = path.res { - if !did.is_local() && did.is_crate_root() { - // if we're `pub use`ing an extern crate root, don't inline it unless we - // were specifically asked for it - denied = true; - } - } + if inline_attr.is_none() + && let Res::Def(DefKind::Mod, did) = path.res + && !did.is_local() && did.is_crate_root() + { + // if we're `pub use`ing an extern crate root, don't inline it unless we + // were specifically asked for it + denied = true; } if !denied { - let mut visited = FxHashSet::default(); + let mut visited = DefIdSet::default(); let import_def_id = import.owner_id.to_def_id(); if let Some(mut items) = inline::try_inline( @@ -2512,8 +2640,8 @@ fn clean_maybe_renamed_foreign_item<'tcx>( hir::ForeignItemKind::Type => ForeignTypeItem, }; - Item::from_hir_id_and_parts( - item.hir_id(), + Item::from_def_id_and_parts( + item.owner_id.def_id.to_def_id(), Some(renamed.unwrap_or(item.ident.name)), kind, cx, diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs index ed7683e36..ef38ca3c1 100644 --- a/src/librustdoc/clean/render_macro_matchers.rs +++ b/src/librustdoc/clean/render_macro_matchers.rs @@ -63,7 +63,8 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option) -> bool { self.item_id .as_def_id() - .map(|did| tcx.get_attrs_unchecked(did).inner_docs()) + .map(|did| inner_docs(tcx.get_attrs_unchecked(did))) .unwrap_or(false) } @@ -439,17 +437,6 @@ impl Item { self.attrs.doc_value() } - /// Convenience wrapper around [`Self::from_def_id_and_parts`] which converts - /// `hir_id` to a [`DefId`] - pub(crate) fn from_hir_id_and_parts( - hir_id: hir::HirId, - name: Option, - kind: ItemKind, - cx: &mut DocContext<'_>, - ) -> Item { - Item::from_def_id_and_parts(cx.tcx.hir().local_def_id(hir_id).to_def_id(), name, kind, cx) - } - pub(crate) fn from_def_id_and_parts( def_id: DefId, name: Option, @@ -493,16 +480,16 @@ impl Item { } pub(crate) fn links(&self, cx: &Context<'_>) -> Vec { - use crate::html::format::href; + use crate::html::format::{href, link_tooltip}; cx.cache() .intra_doc_links .get(&self.item_id) .map_or(&[][..], |v| v.as_slice()) .iter() - .filter_map(|ItemLink { link: s, link_text, page_id: did, ref fragment }| { - debug!(?did); - if let Ok((mut href, ..)) = href(*did, cx) { + .filter_map(|ItemLink { link: s, link_text, page_id: id, ref fragment }| { + debug!(?id); + if let Ok((mut href, ..)) = href(*id, cx) { debug!(?href); if let Some(ref fragment) = *fragment { fragment.render(&mut href, cx.tcx()) @@ -510,6 +497,7 @@ impl Item { Some(RenderedLink { original_text: s.clone(), new_text: link_text.clone(), + tooltip: link_tooltip(*id, fragment, cx), href, }) } else { @@ -534,6 +522,7 @@ impl Item { original_text: s.clone(), new_text: link_text.clone(), href: String::new(), + tooltip: String::new(), }) .collect() } @@ -665,7 +654,7 @@ impl Item { tcx: TyCtxt<'_>, asyncness: hir::IsAsync, ) -> hir::FnHeader { - let sig = tcx.fn_sig(def_id); + let sig = tcx.fn_sig(def_id).skip_binder(); let constness = if tcx.is_const_fn(def_id) && is_unstable_const_fn(tcx, def_id).is_none() { hir::Constness::Const @@ -677,7 +666,7 @@ impl Item { let header = match *self.kind { ItemKind::ForeignFunctionItem(_) => { let def_id = self.item_id.as_def_id().unwrap(); - let abi = tcx.fn_sig(def_id).abi(); + let abi = tcx.fn_sig(def_id).skip_binder().abi(); hir::FnHeader { unsafety: if abi == Abi::RustIntrinsic { intrinsic_operation_unsafety(tcx, self.item_id.as_def_id().unwrap()) @@ -885,8 +874,6 @@ pub(crate) trait AttributesExt { fn span(&self) -> Option; - fn inner_docs(&self) -> bool; - fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet) -> Option>; } @@ -905,14 +892,6 @@ impl AttributesExt for [ast::Attribute] { self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span) } - /// Returns whether the first doc-comment is an inner attribute. - /// - //// If there are no doc-comments, return true. - /// FIXME(#78591): Support both inner and outer attributes on the same item. - fn inner_docs(&self) -> bool { - self.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == AttrStyle::Inner) - } - fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet) -> Option> { let sess = tcx.sess; let doc_cfg_active = tcx.features().doc_cfg; @@ -1021,58 +1000,6 @@ impl> NestedAttributesExt for I { } } -/// A portion of documentation, extracted from a `#[doc]` attribute. -/// -/// Each variant contains the line number within the complete doc-comment where the fragment -/// starts, as well as the Span where the corresponding doc comment or attribute is located. -/// -/// Included files are kept separate from inline doc comments so that proper line-number -/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are -/// kept separate because of issue #42760. -#[derive(Clone, PartialEq, Eq, Debug)] -pub(crate) struct DocFragment { - pub(crate) span: rustc_span::Span, - /// The module this doc-comment came from. - /// - /// This allows distinguishing between the original documentation and a pub re-export. - /// If it is `None`, the item was not re-exported. - pub(crate) parent_module: Option, - pub(crate) doc: Symbol, - pub(crate) kind: DocFragmentKind, - pub(crate) indent: usize, -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub(crate) enum DocFragmentKind { - /// A doc fragment created from a `///` or `//!` doc comment. - SugaredDoc, - /// A doc fragment created from a "raw" `#[doc=""]` attribute. - RawDoc, -} - -/// The goal of this function is to apply the `DocFragment` transformation that is required when -/// transforming into the final Markdown, which is applying the computed indent to each line in -/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`). -/// -/// Note: remove the trailing newline where appropriate -fn add_doc_fragment(out: &mut String, frag: &DocFragment) { - let s = frag.doc.as_str(); - let mut iter = s.lines(); - if s.is_empty() { - out.push('\n'); - return; - } - while let Some(line) = iter.next() { - if line.chars().any(|c| !c.is_whitespace()) { - assert!(line.len() >= frag.indent); - out.push_str(&line[frag.indent..]); - } else { - out.push_str(line); - } - out.push('\n'); - } -} - /// Collapse a collection of [`DocFragment`]s into one string, /// handling indentation and newlines as needed. pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String { @@ -1084,98 +1011,18 @@ pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String { acc } -/// Removes excess indentation on comments in order for the Markdown -/// to be parsed correctly. This is necessary because the convention for -/// writing documentation is to provide a space between the /// or //! marker -/// and the doc text, but Markdown is whitespace-sensitive. For example, -/// a block of text with four-space indentation is parsed as a code block, -/// so if we didn't unindent comments, these list items -/// -/// /// A list: -/// /// -/// /// - Foo -/// /// - Bar -/// -/// would be parsed as if they were in a code block, which is likely not what the user intended. -fn unindent_doc_fragments(docs: &mut Vec) { - // `add` is used in case the most common sugared doc syntax is used ("/// "). The other - // fragments kind's lines are never starting with a whitespace unless they are using some - // markdown formatting requiring it. Therefore, if the doc block have a mix between the two, - // we need to take into account the fact that the minimum indent minus one (to take this - // whitespace into account). - // - // For example: - // - // /// hello! - // #[doc = "another"] - // - // In this case, you want "hello! another" and not "hello! another". - let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind) - && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc) - { - // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to - // "decide" how much the minimum indent will be. - 1 - } else { - 0 - }; - - // `min_indent` is used to know how much whitespaces from the start of each lines must be - // removed. Example: - // - // /// hello! - // #[doc = "another"] - // - // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum - // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4 - // (5 - 1) whitespaces. - let Some(min_indent) = docs - .iter() - .map(|fragment| { - fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| { - if line.chars().all(|c| c.is_whitespace()) { - min_indent - } else { - // Compare against either space or tab, ignoring whether they are - // mixed or not. - let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count(); - cmp::min(min_indent, whitespace) - + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add } - } - }) - }) - .min() - else { - return; - }; - - for fragment in docs { - if fragment.doc == kw::Empty { - continue; - } - - let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 { - min_indent - add - } else { - min_indent - }; - - fragment.indent = min_indent; - } -} - /// A link that has not yet been rendered. /// /// This link will be turned into a rendered link by [`Item::links`]. #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct ItemLink { /// The original link written in the markdown - pub(crate) link: String, + pub(crate) link: Box, /// The link text displayed in the HTML. /// /// This may not be the same as `link` if there was a disambiguator /// in an intra-doc link (e.g. \[`fn@f`\]) - pub(crate) link_text: String, + pub(crate) link_text: Box, /// The `DefId` of the Item whose **HTML Page** contains the item being /// linked to. This will be different to `item_id` on item's that don't /// have their own page, such as struct fields and enum variants. @@ -1188,11 +1035,13 @@ pub struct RenderedLink { /// The text the link was original written as. /// /// This could potentially include disambiguators and backticks. - pub(crate) original_text: String, + pub(crate) original_text: Box, /// The text to display in the HTML - pub(crate) new_text: String, + pub(crate) new_text: Box, /// The URL to put in the `href` pub(crate) href: String, + /// The tooltip. + pub(crate) tooltip: String, } /// The attributes on an [`Item`], including attributes like `#[derive(...)]` and `#[inline]`, @@ -1242,26 +1091,7 @@ impl Attributes { attrs: impl Iterator)>, doc_only: bool, ) -> Attributes { - let mut doc_strings = Vec::new(); - let mut other_attrs = ast::AttrVec::new(); - for (attr, parent_module) in attrs { - if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() { - trace!("got doc_str={doc_str:?}"); - let doc = beautify_doc_string(doc_str, comment_kind); - let kind = if attr.is_doc_comment() { - DocFragmentKind::SugaredDoc - } else { - DocFragmentKind::RawDoc - }; - let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 }; - doc_strings.push(fragment); - } else if !doc_only { - other_attrs.push(attr.clone()); - } - } - - unindent_doc_fragments(&mut doc_strings); - + let (doc_strings, other_attrs) = attrs_to_doc_fragments(attrs, doc_only); Attributes { doc_strings, other_attrs } } @@ -1280,20 +1110,6 @@ impl Attributes { if out.is_empty() { None } else { Some(out) } } - /// Return the doc-comments on this item, grouped by the module they came from. - /// The module can be different if this is a re-export with added documentation. - /// - /// The last newline is not trimmed so the produced strings are reusable between - /// early and late doc link resolution regardless of their position. - pub(crate) fn prepare_to_doc_link_resolution(&self) -> FxHashMap, String> { - let mut res = FxHashMap::default(); - for fragment in &self.doc_strings { - let out_str = res.entry(fragment.parent_module).or_default(); - add_doc_fragment(out_str, fragment); - } - res - } - /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined /// with newlines. pub(crate) fn collapsed_doc_value(&self) -> Option { @@ -1358,10 +1174,10 @@ impl GenericBound { pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool { use rustc_hir::TraitBoundModifier as TBM; - if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self { - if Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() { - return true; - } + if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self && + Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() + { + return true; } false } @@ -2416,10 +2232,7 @@ impl ConstantKind { pub(crate) fn is_literal(&self, tcx: TyCtxt<'_>) -> bool { match *self { - ConstantKind::TyConst { .. } => false, - ConstantKind::Extern { def_id } => def_id.as_local().map_or(false, |def_id| { - is_literal_expr(tcx, tcx.hir().local_def_id_to_hir_id(def_id)) - }), + ConstantKind::TyConst { .. } | ConstantKind::Extern { .. } => false, ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => { is_literal_expr(tcx, body.hir_id) } @@ -2498,14 +2311,7 @@ impl Import { } pub(crate) fn imported_item_is_doc_hidden(&self, tcx: TyCtxt<'_>) -> bool { - match self.source.did { - Some(did) => tcx - .get_attrs(did, sym::doc) - .filter_map(ast::Attribute::meta_item_list) - .flatten() - .has_word(sym::hidden), - None => false, - } + self.source.did.map_or(false, |did| tcx.is_doc_hidden(did)) } } diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs index 71eddf434..20627c2cf 100644 --- a/src/librustdoc/clean/types/tests.rs +++ b/src/librustdoc/clean/types/tests.rs @@ -2,6 +2,7 @@ use super::*; use crate::clean::collapse_doc_fragments; +use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind}; use rustc_span::create_default_session_globals_then; use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::Symbol; diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index a12f764fa..c9c1c2c45 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -29,11 +29,6 @@ mod tests; pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate { let module = crate::visit_ast::RustdocVisitor::new(cx).visit(); - for &cnum in cx.tcx.crates(()) { - // Analyze doc-reachability for extern items - crate::visit_lib::lib_embargo_visit_item(cx, cnum.as_def_id()); - } - // Clean the crate, translating the entire librustc_ast AST to one that is // understood by rustdoc. let mut module = clean_doc_module(&module, cx); @@ -134,7 +129,7 @@ fn external_generic_args<'tcx>( }); GenericArgs::Parenthesized { inputs, output } } else { - GenericArgs::AngleBracketed { args: args.into(), bindings: bindings.into() } + GenericArgs::AngleBracketed { args: args.into(), bindings } } } @@ -271,7 +266,7 @@ pub(crate) fn print_evaluated_const( underscores_and_type: bool, ) -> Option { tcx.const_eval_poly(def_id).ok().and_then(|val| { - let ty = tcx.type_of(def_id); + let ty = tcx.type_of(def_id).subst_identity(); match (val, ty.kind()) { (_, &ty::Ref(..)) => None, (ConstValue::Scalar(_), &ty::Adt(_, _)) => None, @@ -350,10 +345,10 @@ pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool { return true; } - if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind { - if let hir::ExprKind::Lit(_) = &expr.kind { - return true; - } + if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind && + let hir::ExprKind::Lit(_) = &expr.kind + { + return true; } } @@ -475,6 +470,12 @@ pub(crate) fn get_auto_trait_and_blanket_impls( cx: &mut DocContext<'_>, item_def_id: DefId, ) -> impl Iterator { + // FIXME: To be removed once `parallel_compiler` bugs are fixed! + // More information in . + if cfg!(parallel_compiler) { + return vec![].into_iter().chain(vec![].into_iter()); + } + let auto_impls = cx .sess() .prof diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 56b40d8c6..2c514a0c8 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -509,7 +509,7 @@ impl Options { // these values up both in `dataset` and in the storage API, so it needs to be able // to convert the names back and forth. Despite doing this kebab-case to // StudlyCaps transformation automatically, the JS DOM API does not provide a - // mechanism for doing the just transformation on a string. So we want to avoid + // mechanism for doing just the transformation on a string. So we want to avoid // the StudlyCaps representation in the `dataset` property. // // We solve this by replacing all the `-`s with `_`s. We do that here, when we diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 2153e7d8c..fbfc58a43 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -1,23 +1,22 @@ -use rustc_ast::NodeId; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::{self, Lrc}; use rustc_data_structures::unord::UnordSet; use rustc_errors::emitter::{Emitter, EmitterWriter}; use rustc_errors::json::JsonEmitter; +use rustc_errors::TerminalUrl; use rustc_feature::UnstableFeatures; -use rustc_hir::def::{Namespace, Res}; -use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId}; +use rustc_hir::def::Res; +use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId}; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{HirId, Path, TraitCandidate}; +use rustc_hir::{HirId, Path}; use rustc_interface::interface; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{ParamEnv, Ty, TyCtxt}; -use rustc_resolve as resolve; -use rustc_session::config::{self, CrateType, ErrorOutputType}; +use rustc_session::config::{self, CrateType, ErrorOutputType, ResolveDocLinks}; use rustc_session::lint; use rustc_session::Session; use rustc_span::symbol::sym; -use rustc_span::{source_map, Span, Symbol}; +use rustc_span::{source_map, Span}; use std::cell::RefCell; use std::mem; @@ -28,30 +27,12 @@ use crate::clean::inline::build_external_trait; use crate::clean::{self, ItemId}; use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions}; use crate::formats::cache::Cache; -use crate::passes::collect_intra_doc_links::PreprocessedMarkdownLink; use crate::passes::{self, Condition::*}; pub(crate) use rustc_session::config::{Input, Options, UnstableOptions}; -pub(crate) struct ResolverCaches { - pub(crate) markdown_links: Option>>, - pub(crate) doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option>>, - /// Traits in scope for a given module. - /// See `collect_intra_doc_links::traits_implemented_by` for more details. - pub(crate) traits_in_scope: DefIdMap>, - pub(crate) all_trait_impls: Option>, - pub(crate) all_macro_rules: FxHashMap>, -} - pub(crate) struct DocContext<'tcx> { pub(crate) tcx: TyCtxt<'tcx>, - /// Name resolver. Used for intra-doc links. - /// - /// The `Rc>` wrapping is needed because that is what's returned by - /// [`rustc_interface::Queries::expansion()`]. - // FIXME: see if we can get rid of this RefCell somehow - pub(crate) resolver: Rc>, - pub(crate) resolver_caches: ResolverCaches, /// Used for normalization. /// /// Most of this logic is copied from rustc_lint::late. @@ -60,11 +41,11 @@ pub(crate) struct DocContext<'tcx> { pub(crate) external_traits: Rc>>, /// Used while populating `external_traits` to ensure we don't process the same trait twice at /// the same time. - pub(crate) active_extern_traits: FxHashSet, + pub(crate) active_extern_traits: DefIdSet, // The current set of parameter substitutions, // for expanding type aliases at the HIR level: /// Table `DefId` of type, lifetime, or const parameter -> substituted type, lifetime, or const - pub(crate) substs: FxHashMap, + pub(crate) substs: DefIdMap, /// Table synthetic type parameter for `impl Trait` in argument position -> bounds pub(crate) impl_trait_bounds: FxHashMap>, /// Auto-trait or blanket impls processed so far, as `(self_ty, trait_def_id)`. @@ -99,20 +80,9 @@ impl<'tcx> DocContext<'tcx> { ret } - pub(crate) fn enter_resolver(&self, f: F) -> R - where - F: FnOnce(&mut resolve::Resolver<'_>) -> R, - { - self.resolver.borrow_mut().access(f) - } - /// Call the closure with the given parameters set as /// the substitutions for a type alias' RHS. - pub(crate) fn enter_alias( - &mut self, - substs: FxHashMap, - f: F, - ) -> R + pub(crate) fn enter_alias(&mut self, substs: DefIdMap, f: F) -> R where F: FnOnce(&mut Self) -> R, { @@ -133,12 +103,6 @@ impl<'tcx> DocContext<'tcx> { _ => None, } } - - pub(crate) fn with_all_trait_impls(&mut self, f: impl FnOnce(&mut Self, &[DefId])) { - let all_trait_impls = self.resolver_caches.all_trait_impls.take(); - f(self, all_trait_impls.as_ref().expect("`all_trait_impls` are already borrowed")); - self.resolver_caches.all_trait_impls = all_trait_impls; - } } /// Creates a new diagnostic `Handler` that can be used to emit warnings and errors. @@ -151,8 +115,10 @@ pub(crate) fn new_handler( diagnostic_width: Option, unstable_opts: &UnstableOptions, ) -> rustc_errors::Handler { - let fallback_bundle = - rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false); + let fallback_bundle = rustc_errors::fallback_fluent_bundle( + rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), + false, + ); let emitter: Box = match error_format { ErrorOutputType::HumanReadable(kind) => { let (short, color_config) = kind.unzip(); @@ -167,6 +133,7 @@ pub(crate) fn new_handler( diagnostic_width, false, unstable_opts.track_diagnostics, + TerminalUrl::No, ) .ui_testing(unstable_opts.ui_testing), ) @@ -186,6 +153,7 @@ pub(crate) fn new_handler( diagnostic_width, false, unstable_opts.track_diagnostics, + TerminalUrl::No, ) .ui_testing(unstable_opts.ui_testing), ) @@ -221,6 +189,7 @@ pub(crate) fn create_config( scrape_examples_options, .. }: RustdocOptions, + RenderOptions { document_private, .. }: &RenderOptions, ) -> rustc_interface::Config { // Add the doc cfg into the doc build. cfgs.push("doc".to_string()); @@ -248,6 +217,13 @@ pub(crate) fn create_config( let crate_types = if proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] }; + let resolve_doc_links = if *document_private { + ResolveDocLinks::All + } else { + // Should be `ResolveDocLinks::Exported` in theory, but for some reason rustdoc + // still tries to request resolutions for links on private items. + ResolveDocLinks::All + }; let test = scrape_examples_options.map(|opts| opts.scrape_tests).unwrap_or(false); // plays with error output here! let sessopts = config::Options { @@ -261,6 +237,7 @@ pub(crate) fn create_config( target_triple: target, unstable_features: UnstableFeatures::from_environment(crate_name.as_deref()), actually_rustdoc: true, + resolve_doc_links, unstable_opts, error_format, diagnostic_width, @@ -279,6 +256,7 @@ pub(crate) fn create_config( output_file: None, output_dir: None, file_loader: None, + locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES, lint_caps, parse_sess_created: None, register_lints: Some(Box::new(crate::lint::register_lints)), @@ -316,8 +294,6 @@ pub(crate) fn create_config( pub(crate) fn run_global_ctxt( tcx: TyCtxt<'_>, - resolver: Rc>, - resolver_caches: ResolverCaches, show_coverage: bool, render_options: RenderOptions, output_format: OutputFormat, @@ -351,8 +327,6 @@ pub(crate) fn run_global_ctxt( let mut ctxt = DocContext { tcx, - resolver, - resolver_caches, param_env: ParamEnv::empty(), external_traits: Default::default(), active_extern_traits: Default::default(), @@ -367,6 +341,10 @@ pub(crate) fn run_global_ctxt( show_coverage, }; + for cnum in tcx.crates(()) { + crate::visit_lib::lib_embargo_visit_item(&mut ctxt, cnum.as_def_id()); + } + // Small hack to force the Sized trait to be present. // // Note that in case of `#![no_core]`, the trait is not available. diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index c1a652c75..9cf84acc7 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1,11 +1,9 @@ use rustc_ast as ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::Lrc; -use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError}; -use rustc_hir as hir; -use rustc_hir::def_id::LOCAL_CRATE; -use rustc_hir::intravisit; -use rustc_hir::{HirId, CRATE_HIR_ID}; +use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError, TerminalUrl}; +use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID, LOCAL_CRATE}; +use rustc_hir::{self as hir, intravisit, CRATE_HIR_ID}; use rustc_interface::interface; use rustc_middle::hir::map::Map; use rustc_middle::hir::nested_filter; @@ -98,6 +96,7 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> { output_file: None, output_dir: None, file_loader: None, + locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES, lint_caps, parse_sess_created: None, register_lints: Some(Box::new(crate::lint::register_lints)), @@ -140,7 +139,7 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> { }; hir_collector.visit_testable( "".to_string(), - CRATE_HIR_ID, + CRATE_DEF_ID, tcx.hir().span(CRATE_HIR_ID), |this| tcx.hir().walk_toplevel_module(this), ); @@ -231,11 +230,11 @@ fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions { if attr.has_name(sym::no_crate_inject) { opts.no_crate_inject = true; } - if attr.has_name(sym::attr) { - if let Some(l) = attr.meta_item_list() { - for item in l { - opts.attrs.push(pprust::meta_list_item_to_string(item)); - } + if attr.has_name(sym::attr) + && let Some(l) = attr.meta_item_list() + { + for item in l { + opts.attrs.push(pprust::meta_list_item_to_string(item)); } } } @@ -547,8 +546,10 @@ pub(crate) fn make_test( // Any errors in parsing should also appear when the doctest is compiled for real, so just // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = - rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false); + let fallback_bundle = rustc_errors::fallback_fluent_bundle( + rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), + false, + ); supports_color = EmitterWriter::stderr( ColorConfig::Auto, None, @@ -559,6 +560,7 @@ pub(crate) fn make_test( Some(80), false, false, + TerminalUrl::No, ) .supports_color(); @@ -573,6 +575,7 @@ pub(crate) fn make_test( None, false, false, + TerminalUrl::No, ); // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser @@ -594,31 +597,28 @@ pub(crate) fn make_test( loop { match parser.parse_item(ForceCollect::No) { Ok(Some(item)) => { - if !found_main { - if let ast::ItemKind::Fn(..) = item.kind { - if item.ident.name == sym::main { - found_main = true; - } - } + if !found_main && + let ast::ItemKind::Fn(..) = item.kind && + item.ident.name == sym::main + { + found_main = true; } - if !found_extern_crate { - if let ast::ItemKind::ExternCrate(original) = item.kind { - // This code will never be reached if `crate_name` is none because - // `found_extern_crate` is initialized to `true` if it is none. - let crate_name = crate_name.unwrap(); + if !found_extern_crate && + let ast::ItemKind::ExternCrate(original) = item.kind + { + // This code will never be reached if `crate_name` is none because + // `found_extern_crate` is initialized to `true` if it is none. + let crate_name = crate_name.unwrap(); - match original { - Some(name) => found_extern_crate = name.as_str() == crate_name, - None => found_extern_crate = item.ident.as_str() == crate_name, - } + match original { + Some(name) => found_extern_crate = name.as_str() == crate_name, + None => found_extern_crate = item.ident.as_str() == crate_name, } } - if !found_macro { - if let ast::ItemKind::MacCall(..) = item.kind { - found_macro = true; - } + if !found_macro && let ast::ItemKind::MacCall(..) = item.kind { + found_macro = true; } if found_main && found_extern_crate { @@ -744,8 +744,10 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { // Any errors in parsing should also appear when the doctest is compiled for real, so just // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = - rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false); + let fallback_bundle = rustc_errors::fallback_fluent_bundle( + rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), + false, + ); let emitter = EmitterWriter::new( Box::new(io::sink()), @@ -758,6 +760,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { None, false, false, + TerminalUrl::No, ); let handler = Handler::with_emitter(false, None, Box::new(emitter)); @@ -766,8 +769,8 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) { Ok(p) => p, Err(_) => { - debug!("Cannot build a parser to check mod attr so skipping..."); - return true; + // If there is an unclosed delimiter, an error will be returned by the tokentrees. + return false; } }; // If a parsing error happened, it's very likely that the attribute is incomplete. @@ -775,15 +778,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { e.cancel(); return false; } - // We now check if there is an unclosed delimiter for the attribute. To do so, we look at - // the `unclosed_delims` and see if the opening square bracket was closed. - parser - .unclosed_delims() - .get(0) - .map(|unclosed| { - unclosed.unclosed_span.map(|s| s.lo()).unwrap_or(BytePos(0)) != BytePos(2) - }) - .unwrap_or(true) + true }) }) .unwrap_or(false) @@ -971,14 +966,12 @@ impl Collector { fn get_filename(&self) -> FileName { if let Some(ref source_map) = self.source_map { let filename = source_map.span_to_filename(self.position); - if let FileName::Real(ref filename) = filename { - if let Ok(cur_dir) = env::current_dir() { - if let Some(local_path) = filename.local_path() { - if let Ok(path) = local_path.strip_prefix(&cur_dir) { - return path.to_owned().into(); - } - } - } + if let FileName::Real(ref filename) = filename && + let Ok(cur_dir) = env::current_dir() && + let Some(local_path) = filename.local_path() && + let Ok(path) = local_path.strip_prefix(&cur_dir) + { + return path.to_owned().into(); } filename } else if let Some(ref filename) = self.filename { @@ -1214,11 +1207,11 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> { fn visit_testable( &mut self, name: String, - hir_id: HirId, + def_id: LocalDefId, sp: Span, nested: F, ) { - let ast_attrs = self.tcx.hir().attrs(hir_id); + let ast_attrs = self.tcx.hir().attrs(self.tcx.hir().local_def_id_to_hir_id(def_id)); if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) { if !cfg.matches(&self.sess.parse_sess, Some(self.tcx.features())) { return; @@ -1247,7 +1240,7 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> { self.collector.enable_per_target_ignores, Some(&crate::html::markdown::ExtraInfo::new( self.tcx, - hir_id, + def_id.to_def_id(), span_of_attrs(&attrs).unwrap_or(sp), )), ); @@ -1276,37 +1269,37 @@ impl<'a, 'hir, 'tcx> intravisit::Visitor<'hir> for HirCollector<'a, 'hir, 'tcx> _ => item.ident.to_string(), }; - self.visit_testable(name, item.hir_id(), item.span, |this| { + self.visit_testable(name, item.owner_id.def_id, item.span, |this| { intravisit::walk_item(this, item); }); } fn visit_trait_item(&mut self, item: &'hir hir::TraitItem<'_>) { - self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| { + self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| { intravisit::walk_trait_item(this, item); }); } fn visit_impl_item(&mut self, item: &'hir hir::ImplItem<'_>) { - self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| { + self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| { intravisit::walk_impl_item(this, item); }); } fn visit_foreign_item(&mut self, item: &'hir hir::ForeignItem<'_>) { - self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| { + self.visit_testable(item.ident.to_string(), item.owner_id.def_id, item.span, |this| { intravisit::walk_foreign_item(this, item); }); } fn visit_variant(&mut self, v: &'hir hir::Variant<'_>) { - self.visit_testable(v.ident.to_string(), v.hir_id, v.span, |this| { + self.visit_testable(v.ident.to_string(), v.def_id, v.span, |this| { intravisit::walk_variant(this, v); }); } fn visit_field_def(&mut self, f: &'hir hir::FieldDef<'_>) { - self.visit_testable(f.ident.to_string(), f.hir_id, f.span, |this| { + self.visit_testable(f.ident.to_string(), f.def_id, f.span, |this| { intravisit::walk_field_def(this, f); }); } diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 1c78c5b8d..8dbfaf4bb 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -1,7 +1,7 @@ use std::mem; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_hir::def_id::{CrateNum, DefId}; +use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet}; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::Symbol; @@ -33,7 +33,7 @@ pub(crate) struct Cache { /// /// The values of the map are a list of implementations and documentation /// found on that implementation. - pub(crate) impls: FxHashMap>, + pub(crate) impls: DefIdMap>, /// Maintains a mapping of local crate `DefId`s to the fully qualified name /// and "short type description" of that node. This is used when generating @@ -56,7 +56,7 @@ pub(crate) struct Cache { /// to the path used if the corresponding type is inlined. By /// doing this, we can detect duplicate impls on a trait page, and only display /// the impl for the inlined type. - pub(crate) exact_paths: FxHashMap>, + pub(crate) exact_paths: DefIdMap>, /// This map contains information about all known traits of this crate. /// Implementations of a crate should inherit the documentation of the @@ -127,7 +127,7 @@ pub(crate) struct Cache { struct CacheBuilder<'a, 'tcx> { cache: &'a mut Cache, /// This field is used to prevent duplicated impl blocks. - impl_ids: FxHashMap>, + impl_ids: DefIdMap, tcx: TyCtxt<'tcx>, } @@ -173,7 +173,7 @@ impl Cache { let (krate, mut impl_ids) = { let mut cache_builder = - CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: FxHashMap::default() }; + CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: Default::default() }; krate = cache_builder.fold_crate(krate); (krate, cache_builder.impl_ids) }; @@ -229,16 +229,15 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> { } // Collect all the implementors of traits. - if let clean::ImplItem(ref i) = *item.kind { - if let Some(trait_) = &i.trait_ { - if !i.kind.is_blanket() { - self.cache - .implementors - .entry(trait_.def_id()) - .or_default() - .push(Impl { impl_item: item.clone() }); - } - } + if let clean::ImplItem(ref i) = *item.kind && + let Some(trait_) = &i.trait_ && + !i.kind.is_blanket() + { + self.cache + .implementors + .entry(trait_.def_id()) + .or_default() + .push(Impl { impl_item: item.clone() }); } // Index this method for searching later on. @@ -288,6 +287,16 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> { } else { let last = self.cache.parent_stack.last().expect("parent_stack is empty 2"); let did = match &*last { + ParentStackItem::Impl { + // impl Trait for &T { fn method(self); } + // + // When generating a function index with the above shape, we want it + // associated with `T`, not with the primitive reference type. It should + // show up as `T::method`, rather than `reference::method`, in the search + // results page. + for_: clean::Type::BorrowedRef { type_, .. }, + .. + } => type_.def_id(&self.cache), ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache), ParentStackItem::Type(item_id) => item_id.as_def_id(), }; @@ -454,7 +463,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> { | clean::BorrowedRef { type_: box clean::Type::Path { ref path }, .. } => { dids.insert(path.def_id()); if let Some(generics) = path.generics() && - let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() && + let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).subst_identity().kind() && adt.is_fundamental() { for ty in generics { if let Some(did) = ty.def_id(self.cache) { diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs index 2f1f4cbf3..452e14918 100644 --- a/src/librustdoc/formats/item_type.rs +++ b/src/librustdoc/formats/item_type.rs @@ -21,6 +21,7 @@ use crate::clean; /// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an /// ordering based on a helper function inside `item_module`, in the same file. #[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)] +#[repr(u8)] pub(crate) enum ItemType { Module = 0, ExternCrate = 1, @@ -139,7 +140,7 @@ impl From for ItemType { | DefKind::Field | DefKind::LifetimeParam | DefKind::GlobalAsm - | DefKind::Impl + | DefKind::Impl { .. } | DefKind::Closure | DefKind::Generator => Self::ForeignType, } diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index d3dc4065d..0e4c5ed68 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -34,6 +34,7 @@ use crate::clean::{ use crate::formats::item_type::ItemType; use crate::html::escape::Escape; use crate::html::render::Context; +use crate::passes::collect_intra_doc_links::UrlFragment; use super::url_parts_builder::estimate_item_path_byte_length; use super::url_parts_builder::UrlPartsBuilder; @@ -208,7 +209,7 @@ impl clean::GenericParamDef { if f.alternate() { write!(f, ": {:#}", print_generic_bounds(bounds, cx))?; } else { - write!(f, ": {}", print_generic_bounds(bounds, cx))?; + write!(f, ": {}", print_generic_bounds(bounds, cx))?; } } @@ -216,7 +217,7 @@ impl clean::GenericParamDef { if f.alternate() { write!(f, " = {:#}", ty.print(cx))?; } else { - write!(f, " = {}", ty.print(cx))?; + write!(f, " = {}", ty.print(cx))?; } } @@ -226,14 +227,14 @@ impl clean::GenericParamDef { if f.alternate() { write!(f, "const {}: {:#}", self.name, ty.print(cx))?; } else { - write!(f, "const {}: {}", self.name, ty.print(cx))?; + write!(f, "const {}: {}", self.name, ty.print(cx))?; } if let Some(default) = default { if f.alternate() { write!(f, " = {:#}", default)?; } else { - write!(f, " = {}", default)?; + write!(f, " = {}", default)?; } } @@ -289,7 +290,7 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>( if f.alternate() { f.write_str(" ")?; } else { - f.write_str("
")?; + f.write_str("\n")?; } match pred { @@ -352,23 +353,31 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>( } } else { let mut br_with_padding = String::with_capacity(6 * indent + 28); - br_with_padding.push_str("
"); - for _ in 0..indent + 4 { - br_with_padding.push_str(" "); + br_with_padding.push_str("\n"); + + let padding_amout = + if ending == Ending::Newline { indent + 4 } else { indent + "fn where ".len() }; + + for _ in 0..padding_amout { + br_with_padding.push_str(" "); } - let where_preds = where_preds.to_string().replace("
", &br_with_padding); + let where_preds = where_preds.to_string().replace('\n', &br_with_padding); if ending == Ending::Newline { - let mut clause = " ".repeat(indent.saturating_sub(1)); + let mut clause = " ".repeat(indent.saturating_sub(1)); write!(clause, "where{where_preds},")?; clause } else { - // insert a
tag after a single space but before multiple spaces at the start + // insert a newline after a single space but before multiple spaces at the start if indent == 0 { - format!("
where{where_preds}") + format!("\nwhere{where_preds}") } else { + // put the first one on the same line as the 'where' keyword + let where_preds = where_preds.replacen(&br_with_padding, " ", 1); + let mut clause = br_with_padding; - clause.truncate(clause.len() - 4 * " ".len()); + clause.truncate(clause.len() - "where ".len()); + write!(clause, "where{where_preds}")?; clause } @@ -701,11 +710,9 @@ pub(crate) fn href_with_root_path( } } }; - if !is_remote { - if let Some(root_path) = root_path { - let root = root_path.trim_end_matches('/'); - url_parts.push_front(root); - } + if !is_remote && let Some(root_path) = root_path { + let root = root_path.trim_end_matches('/'); + url_parts.push_front(root); } debug!(?url_parts); match shortty { @@ -760,6 +767,28 @@ pub(crate) fn href_relative_parts<'fqp>( } } +pub(crate) fn link_tooltip(did: DefId, fragment: &Option, cx: &Context<'_>) -> String { + let cache = cx.cache(); + let Some((fqp, shortty)) = cache.paths.get(&did) + .or_else(|| cache.external_paths.get(&did)) + else { return String::new() }; + let mut buf = Buffer::new(); + if let &Some(UrlFragment::Item(id)) = fragment { + write!(buf, "{} ", cx.tcx().def_descr(id)); + for component in fqp { + write!(buf, "{component}::"); + } + write!(buf, "{}", cx.tcx().item_name(id)); + } else if !fqp.is_empty() { + let mut fqp_it = fqp.into_iter(); + write!(buf, "{shortty} {}", fqp_it.next().unwrap()); + for component in fqp_it { + write!(buf, "::{component}"); + } + } + buf.into_inner() +} + /// Used to render a [`clean::Path`]. fn resolved_path<'cx>( w: &mut fmt::Formatter<'_>, @@ -1064,14 +1093,8 @@ fn fmt_type<'cx>( fmt_type(ty, f, use_absolute, cx)?; write!(f, ")") } - clean::Generic(..) => { - primitive_link( - f, - PrimitiveType::Reference, - &format!("{}{}{}", amp, lt, m), - cx, - )?; - fmt_type(ty, f, use_absolute, cx) + clean::Generic(name) => { + primitive_link(f, PrimitiveType::Reference, &format!("{amp}{lt}{m}{name}"), cx) } _ => { write!(f, "{}{}{}", amp, lt, m)?; @@ -1313,7 +1336,8 @@ impl clean::FnDecl { /// * `header_len`: The length of the function header and name. In other words, the number of /// characters in the function declaration up to but not including the parentheses. - ///
Used to determine line-wrapping. + /// This is expected to go into a `
`/`code-header` block, so indentation and newlines
+    ///   are preserved.
     /// * `indent`: The number of spaces to indent each successive line with, if line-wrapping is
     ///   necessary.
     pub(crate) fn full_print<'a, 'tcx: 'a>(
@@ -1361,7 +1385,7 @@ impl clean::FnDecl {
                 }
             } else {
                 if i > 0 {
-                    args.push_str("
"); + args.push_str("\n"); } if input.is_const { args.push_str("const "); @@ -1387,7 +1411,7 @@ impl clean::FnDecl { let mut args = args.into_inner(); if self.c_variadic { - args.push_str(",
..."); + args.push_str(",\n ..."); args_plain.push_str(", ..."); } @@ -1397,24 +1421,20 @@ impl clean::FnDecl { let declaration_len = header_len + args_plain.len() + arrow_plain.len(); let output = if declaration_len > 80 { - let full_pad = format!("
{}", " ".repeat(indent + 4)); - let close_pad = format!("
{}", " ".repeat(indent)); + let full_pad = format!("\n{}", " ".repeat(indent + 4)); + let close_pad = format!("\n{}", " ".repeat(indent)); format!( "({pad}{args}{close}){arrow}", pad = if self.inputs.values.is_empty() { "" } else { &full_pad }, - args = args.replace("
", &full_pad), + args = args.replace('\n', &full_pad), close = close_pad, arrow = arrow ) } else { - format!("({args}){arrow}", args = args.replace("
", " "), arrow = arrow) + format!("({args}){arrow}", args = args.replace('\n', " "), arrow = arrow) }; - if f.alternate() { - write!(f, "{}", output.replace("
", "\n")) - } else { - write!(f, "{}", output) - } + write!(f, "{}", output) } } @@ -1617,7 +1637,7 @@ impl clean::TypeBinding { if f.alternate() { write!(f, ": {:#}", print_generic_bounds(bounds, cx))?; } else { - write!(f, ": {}", print_generic_bounds(bounds, cx))?; + write!(f, ": {}", print_generic_bounds(bounds, cx))?; } } } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 8a9e6caf6..2c9fc4e3c 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -58,11 +58,11 @@ pub(crate) fn render_example_with_highlighting( write_footer(out, playground_button); } -/// Highlights `src` as a macro, returning the HTML output. -pub(crate) fn render_macro_with_highlighting(src: &str, out: &mut Buffer) { - write_header(out, "macro", None, Tooltip::None); +/// Highlights `src` as an item-decl, returning the HTML output. +pub(crate) fn render_item_decl_with_highlighting(src: &str, out: &mut Buffer) { + write!(out, "
");
     write_code(out, src, None, None);
-    write_footer(out, None);
+    write!(out, "
"); } /// Highlights `src` as a source code page, returning the HTML output. @@ -96,13 +96,19 @@ fn write_header(out: &mut Buffer, class: &str, extra_content: Option, to ); if tooltip != Tooltip::None { + let edition_code; write!( out, - "
ⓘ
", - if let Tooltip::Edition(edition_info) = tooltip { - format!(" data-edition=\"{}\"", edition_info) - } else { - String::new() + "ⓘ", + match tooltip { + Tooltip::Ignore => "This example is not tested", + Tooltip::CompileFail => "This example deliberately fails to compile", + Tooltip::ShouldPanic => "This example panics", + Tooltip::Edition(edition) => { + edition_code = format!("This example runs with edition {edition}"); + &edition_code + } + Tooltip::None => unreachable!(), }, ); } @@ -460,10 +466,8 @@ impl<'a> PeekIter<'a> { } /// Returns the next item after the current one. It doesn't interfere with `peek_next` output. fn peek(&mut self) -> Option<&(TokenKind, &'a str)> { - if self.stored.is_empty() { - if let Some(next) = self.iter.next() { - self.stored.push_back(next); - } + if self.stored.is_empty() && let Some(next) = self.iter.next() { + self.stored.push_back(next); } self.stored.front() } diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index a60e7cb10..6ab849c92 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -30,7 +30,6 @@ pub(crate) struct Page<'a> { pub(crate) root_path: &'a str, pub(crate) static_root_path: Option<&'a str>, pub(crate) description: &'a str, - pub(crate) keywords: &'a str, pub(crate) resource_suffix: &'a str, } diff --git a/src/librustdoc/html/length_limit.rs b/src/librustdoc/html/length_limit.rs index bbdc91c8d..4c8db2c67 100644 --- a/src/librustdoc/html/length_limit.rs +++ b/src/librustdoc/html/length_limit.rs @@ -61,14 +61,14 @@ impl HtmlWithLimit { /// and returns [`ControlFlow::Break`]. pub(super) fn push(&mut self, text: &str) -> ControlFlow<(), ()> { if self.len + text.len() > self.limit { - return ControlFlow::BREAK; + return ControlFlow::Break(()); } self.flush_queue(); write!(self.buf, "{}", Escape(text)).unwrap(); self.len += text.len(); - ControlFlow::CONTINUE + ControlFlow::Continue(()) } /// Open an HTML tag. diff --git a/src/librustdoc/html/length_limit/tests.rs b/src/librustdoc/html/length_limit/tests.rs index 2d02b8a16..2185c0348 100644 --- a/src/librustdoc/html/length_limit/tests.rs +++ b/src/librustdoc/html/length_limit/tests.rs @@ -83,7 +83,7 @@ fn past_the_limit() { buf.push("word#")?; buf.push(&n.to_string())?; buf.close_tag(); - ControlFlow::CONTINUE + ControlFlow::Continue(()) }); buf.close_tag(); assert_eq!( diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 4ff67fe15..fe446ae3c 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -27,14 +27,14 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; -use rustc_hir::HirId; use rustc_middle::ty::TyCtxt; +pub(crate) use rustc_resolve::rustdoc::main_body_opts; +use rustc_resolve::rustdoc::may_be_doc_link; use rustc_span::edition::Edition; use rustc_span::{Span, Symbol}; use once_cell::sync::Lazy; use std::borrow::Cow; -use std::cell::RefCell; use std::collections::VecDeque; use std::default::Default; use std::fmt::Write; @@ -47,6 +47,7 @@ use crate::html::escape::Escape; use crate::html::format::Buffer; use crate::html::highlight; use crate::html::length_limit::HtmlWithLimit; +use crate::html::render::small_url_encode; use crate::html::toc::TocBuilder; use pulldown_cmark::{ @@ -58,15 +59,6 @@ mod tests; const MAX_HEADER_LEVEL: u32 = 6; -/// Options for rendering Markdown in the main body of documentation. -pub(crate) fn main_body_opts() -> Options { - Options::ENABLE_TABLES - | Options::ENABLE_FOOTNOTES - | Options::ENABLE_STRIKETHROUGH - | Options::ENABLE_TASKLISTS - | Options::ENABLE_SMART_PUNCTUATION -} - /// Options for rendering Markdown in summaries (e.g., in search results). pub(crate) fn summary_opts() -> Options { Options::ENABLE_TABLES @@ -103,14 +95,14 @@ pub struct Markdown<'a> { /// E.g. if `heading_offset: HeadingOffset::H2`, then `# something` renders an `

`. pub heading_offset: HeadingOffset, } -/// A tuple struct like `Markdown` that renders the markdown with a table of contents. -pub(crate) struct MarkdownWithToc<'a>( - pub(crate) &'a str, - pub(crate) &'a mut IdMap, - pub(crate) ErrorCodes, - pub(crate) Edition, - pub(crate) &'a Option, -); +/// A struct like `Markdown` that renders the markdown with a table of contents. +pub(crate) struct MarkdownWithToc<'a> { + pub(crate) content: &'a str, + pub(crate) ids: &'a mut IdMap, + pub(crate) error_codes: ErrorCodes, + pub(crate) edition: Edition, + pub(crate) playground: &'a Option, +} /// A tuple struct like `Markdown` that renders the markdown escaping HTML tags /// and includes no paragraph tags. pub(crate) struct MarkdownItemInfo<'a>(pub(crate) &'a str, pub(crate) &'a mut IdMap); @@ -295,30 +287,7 @@ impl<'a, I: Iterator>> Iterator for CodeBlocks<'_, 'a, I> { doctest::make_test(&test, krate, false, &Default::default(), edition, None); let channel = if test.contains("#![feature(") { "&version=nightly" } else { "" }; - // These characters don't need to be escaped in a URI. - // FIXME: use a library function for percent encoding. - fn dont_escape(c: u8) -> bool { - (b'a' <= c && c <= b'z') - || (b'A' <= c && c <= b'Z') - || (b'0' <= c && c <= b'9') - || c == b'-' - || c == b'_' - || c == b'.' - || c == b'~' - || c == b'!' - || c == b'\'' - || c == b'(' - || c == b')' - || c == b'*' - } - let mut test_escaped = String::new(); - for b in test.bytes() { - if dont_escape(b) { - test_escaped.push(char::from(b)); - } else { - write!(test_escaped, "%{:02X}", b).unwrap(); - } - } + let test_escaped = small_url_encode(test); Some(format!( r#"Run"#, url, test_escaped, channel, edition, @@ -391,6 +360,9 @@ impl<'a, I: Iterator>> Iterator for LinkReplacer<'a, I> { trace!("it matched"); assert!(self.shortcut_link.is_none(), "shortcut links cannot be nested"); self.shortcut_link = Some(link); + if title.is_empty() && !link.tooltip.is_empty() { + *title = CowStr::Borrowed(link.tooltip.as_ref()); + } } } // Now that we're done with the shortcut link, don't replace any more text. @@ -441,9 +413,12 @@ impl<'a, I: Iterator>> Iterator for LinkReplacer<'a, I> { } // If this is a link, but not a shortcut link, // replace the URL, since the broken_link_callback was not called. - Some(Event::Start(Tag::Link(_, dest, _))) => { + Some(Event::Start(Tag::Link(_, dest, title))) => { if let Some(link) = self.links.iter().find(|&link| *link.original_text == **dest) { *dest = CowStr::Borrowed(link.href.as_ref()); + if title.is_empty() && !link.tooltip.is_empty() { + *title = CowStr::Borrowed(link.tooltip.as_ref()); + } } } // Anything else couldn't have been a valid Rust path, so no need to replace the text. @@ -577,10 +552,7 @@ impl<'a, I: Iterator>> SummaryLine<'a, I> { } fn check_if_allowed_tag(t: &Tag<'_>) -> bool { - matches!( - t, - Tag::Paragraph | Tag::Item | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote - ) + matches!(t, Tag::Paragraph | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote) } fn is_forbidden_tag(t: &Tag<'_>) -> bool { @@ -771,11 +743,7 @@ pub(crate) fn find_testable_code( } Event::Text(ref s) if register_header.is_some() => { let level = register_header.unwrap(); - if s.is_empty() { - tests.register_header("", level); - } else { - tests.register_header(s, level); - } + tests.register_header(s, level); register_header = None; } _ => {} @@ -784,45 +752,26 @@ pub(crate) fn find_testable_code( } pub(crate) struct ExtraInfo<'tcx> { - id: ExtraInfoId, + def_id: DefId, sp: Span, tcx: TyCtxt<'tcx>, } -enum ExtraInfoId { - Hir(HirId), - Def(DefId), -} - impl<'tcx> ExtraInfo<'tcx> { - pub(crate) fn new(tcx: TyCtxt<'tcx>, hir_id: HirId, sp: Span) -> ExtraInfo<'tcx> { - ExtraInfo { id: ExtraInfoId::Hir(hir_id), sp, tcx } - } - - pub(crate) fn new_did(tcx: TyCtxt<'tcx>, did: DefId, sp: Span) -> ExtraInfo<'tcx> { - ExtraInfo { id: ExtraInfoId::Def(did), sp, tcx } + pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: DefId, sp: Span) -> ExtraInfo<'tcx> { + ExtraInfo { def_id, sp, tcx } } fn error_invalid_codeblock_attr(&self, msg: &str, help: &str) { - let hir_id = match self.id { - ExtraInfoId::Hir(hir_id) => hir_id, - ExtraInfoId::Def(item_did) => { - match item_did.as_local() { - Some(item_did) => self.tcx.hir().local_def_id_to_hir_id(item_did), - None => { - // If non-local, no need to check anything. - return; - } - } - } - }; - self.tcx.struct_span_lint_hir( - crate::lint::INVALID_CODEBLOCK_ATTRIBUTES, - hir_id, - self.sp, - msg, - |lint| lint.help(help), - ); + if let Some(def_id) = self.def_id.as_local() { + self.tcx.struct_span_lint_hir( + crate::lint::INVALID_CODEBLOCK_ATTRIBUTES, + self.tcx.hir().local_def_id_to_hir_id(def_id), + self.sp, + msg, + |lint| lint.help(help), + ); + } } } @@ -1029,8 +978,8 @@ impl Markdown<'_> { let mut replacer = |broken_link: BrokenLink<'_>| { links .iter() - .find(|link| link.original_text.as_str() == &*broken_link.reference) - .map(|link| (link.href.as_str().into(), link.new_text.as_str().into())) + .find(|link| &*link.original_text == &*broken_link.reference) + .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into())) }; let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer)); @@ -1051,7 +1000,7 @@ impl Markdown<'_> { impl MarkdownWithToc<'_> { pub(crate) fn into_string(self) -> String { - let MarkdownWithToc(md, ids, codes, edition, playground) = self; + let MarkdownWithToc { content: md, ids, error_codes: codes, edition, playground } = self; let p = Parser::new_ext(md, main_body_opts()).into_offset_iter(); @@ -1112,8 +1061,8 @@ impl MarkdownSummaryLine<'_> { let mut replacer = |broken_link: BrokenLink<'_>| { links .iter() - .find(|link| link.original_text.as_str() == &*broken_link.reference) - .map(|link| (link.href.as_str().into(), link.new_text.as_str().into())) + .find(|link| &*link.original_text == &*broken_link.reference) + .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into())) }; let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)) @@ -1159,8 +1108,8 @@ fn markdown_summary_with_limit( let mut replacer = |broken_link: BrokenLink<'_>| { link_names .iter() - .find(|link| link.original_text.as_str() == &*broken_link.reference) - .map(|link| (link.href.as_str().into(), link.new_text.as_str().into())) + .find(|link| &*link.original_text == &*broken_link.reference) + .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into())) }; let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)); @@ -1191,18 +1140,18 @@ fn markdown_summary_with_limit( Event::Start(tag) => match tag { Tag::Emphasis => buf.open_tag("em"), Tag::Strong => buf.open_tag("strong"), - Tag::CodeBlock(..) => return ControlFlow::BREAK, + Tag::CodeBlock(..) => return ControlFlow::Break(()), _ => {} }, Event::End(tag) => match tag { Tag::Emphasis | Tag::Strong => buf.close_tag(), - Tag::Paragraph | Tag::Heading(..) => return ControlFlow::BREAK, + Tag::Paragraph | Tag::Heading(..) => return ControlFlow::Break(()), _ => {} }, Event::HardBreak | Event::SoftBreak => buf.push(" ")?, _ => {} }; - ControlFlow::CONTINUE + ControlFlow::Continue(()) }); (buf.finish(), stopped_early) @@ -1230,14 +1179,23 @@ pub(crate) fn short_markdown_summary(markdown: &str, link_names: &[RenderedLink] /// - Headings, links, and formatting are stripped. /// - Inline code is rendered as-is, surrounded by backticks. /// - HTML and code blocks are ignored. -pub(crate) fn plain_text_summary(md: &str) -> String { +pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> String { if md.is_empty() { return String::new(); } let mut s = String::with_capacity(md.len() * 3 / 2); - for event in Parser::new_ext(md, summary_opts()) { + let mut replacer = |broken_link: BrokenLink<'_>| { + link_names + .iter() + .find(|link| &*link.original_text == &*broken_link.reference) + .map(|link| (link.href.as_str().into(), link.tooltip.as_str().into())) + }; + + let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)); + + for event in p { match &event { Event::Text(text) => s.push_str(text), Event::Code(code) => { @@ -1265,14 +1223,12 @@ pub(crate) struct MarkdownLink { pub(crate) fn markdown_links( md: &str, - filter_map: impl Fn(MarkdownLink) -> Option, + preprocess_link: impl Fn(MarkdownLink) -> Option, ) -> Vec { if md.is_empty() { return vec![]; } - let links = RefCell::new(vec![]); - // FIXME: remove this function once pulldown_cmark can provide spans for link definitions. let locate = |s: &str, fallback: Range| unsafe { let s_start = s.as_ptr(); @@ -1304,46 +1260,23 @@ pub(crate) fn markdown_links( } }; - let mut push = |link: BrokenLink<'_>| { - let span = span_for_link(&link.reference, link.span); - filter_map(MarkdownLink { - kind: LinkType::ShortcutUnknown, - link: link.reference.to_string(), - range: span, - }) - .map(|link| links.borrow_mut().push(link)); - None - }; - let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut push)) - .into_offset_iter(); - - // There's no need to thread an IdMap through to here because - // the IDs generated aren't going to be emitted anywhere. - let mut ids = IdMap::new(); - let iter = Footnotes::new(HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1)); - - for ev in iter { - if let Event::Start(Tag::Link( - // `<>` links cannot be intra-doc links so we skip them. - kind @ (LinkType::Inline - | LinkType::Reference - | LinkType::ReferenceUnknown - | LinkType::Collapsed - | LinkType::CollapsedUnknown - | LinkType::Shortcut - | LinkType::ShortcutUnknown), - dest, - _, - )) = ev.0 - { - debug!("found link: {dest}"); - let span = span_for_link(&dest, ev.1); - filter_map(MarkdownLink { kind, link: dest.into_string(), range: span }) - .map(|link| links.borrow_mut().push(link)); + Parser::new_with_broken_link_callback( + md, + main_body_opts(), + Some(&mut |link: BrokenLink<'_>| Some((link.reference, "".into()))), + ) + .into_offset_iter() + .filter_map(|(event, span)| match event { + Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => { + preprocess_link(MarkdownLink { + kind: link_type, + range: span_for_link(&dest, span), + link: dest.into_string(), + }) } - } - - links.into_inner() + _ => None, + }) + .collect() } #[derive(Debug)] diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs index 5878c5826..e05635a02 100644 --- a/src/librustdoc/html/markdown/tests.rs +++ b/src/librustdoc/html/markdown/tests.rs @@ -249,7 +249,7 @@ fn test_short_markdown_summary() { #[test] fn test_plain_text_summary() { fn t(input: &str, expect: &str) { - let output = plain_text_summary(input); + let output = plain_text_summary(input, &[]); assert_eq!(output, expect, "original: {}", input); } diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 5cefe9475..5e4a59562 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -6,7 +6,7 @@ use std::rc::Rc; use std::sync::mpsc::{channel, Receiver}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_hir::def_id::{DefId, LOCAL_CRATE}; +use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE}; use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_span::edition::Edition; @@ -18,7 +18,7 @@ use super::search_index::build_index; use super::write_shared::write_shared; use super::{ collect_spans_and_sources, print_sidebar, scrape_examples_help, sidebar_module_like, AllTypes, - LinkFromSrc, NameDoc, StylePath, BASIC_KEYWORDS, + LinkFromSrc, StylePath, }; use crate::clean::{self, types::ExternalLocation, ExternalCrate}; @@ -56,7 +56,7 @@ pub(crate) struct Context<'tcx> { pub(super) render_redirect_pages: bool, /// Tracks section IDs for `Deref` targets so they match in both the main /// body and the sidebar. - pub(super) deref_id_map: FxHashMap, + pub(super) deref_id_map: DefIdMap, /// The map used to ensure all generated 'id=' attributes are unique. pub(super) id_map: IdMap, /// Shared mutable state. @@ -182,7 +182,10 @@ impl<'tcx> Context<'tcx> { }; title.push_str(" - Rust"); let tyname = it.type_(); - let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc)); + let desc = it + .doc_value() + .as_ref() + .map(|doc| plain_text_summary(doc, &it.link_names(&self.cache()))); let desc = if let Some(desc) = desc { desc } else if it.is_crate() { @@ -195,7 +198,6 @@ impl<'tcx> Context<'tcx> { self.shared.layout.krate ) }; - let keywords = make_item_keywords(it); let name; let tyname_s = if it.is_crate() { name = format!("{} crate", tyname); @@ -212,7 +214,6 @@ impl<'tcx> Context<'tcx> { static_root_path: clone_shared.static_root_path.as_deref(), title: &title, description: &desc, - keywords: &keywords, resource_suffix: &clone_shared.resource_suffix, }; let mut page_buffer = Buffer::html(); @@ -258,7 +259,7 @@ impl<'tcx> Context<'tcx> { } /// Construct a map of items shown in the sidebar to a plain-text summary of their docs. - fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap> { + fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap> { // BTreeMap instead of HashMap to get a sorted output let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new(); let mut inserted: FxHashMap> = FxHashMap::default(); @@ -276,10 +277,7 @@ impl<'tcx> Context<'tcx> { if inserted.entry(short).or_default().insert(myname) { let short = short.to_string(); let myname = myname.to_string(); - map.entry(short).or_default().push(( - myname, - Some(item.doc_value().map_or_else(String::new, |s| plain_text_summary(&s))), - )); + map.entry(short).or_default().push(myname); } } @@ -544,7 +542,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { dst, render_redirect_pages: false, id_map, - deref_id_map: FxHashMap::default(), + deref_id_map: Default::default(), shared: Rc::new(scx), include_sources, types_with_notable_traits: FxHashSet::default(), @@ -572,7 +570,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { current: self.current.clone(), dst: self.dst.clone(), render_redirect_pages: self.render_redirect_pages, - deref_id_map: FxHashMap::default(), + deref_id_map: Default::default(), id_map: IdMap::new(), shared: Rc::clone(&self.shared), include_sources: self.include_sources, @@ -598,7 +596,6 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { root_path: "../", static_root_path: shared.static_root_path.as_deref(), description: "List of all items in this crate", - keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, }; let all = shared.all.replace(AllTypes::new()); @@ -708,14 +705,12 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { shared.fs.write(scrape_examples_help_file, v)?; } - if let Some(ref redirections) = shared.redirections { - if !redirections.borrow().is_empty() { - let redirect_map_path = - self.dst.join(crate_name.as_str()).join("redirect-map.json"); - let paths = serde_json::to_string(&*redirections.borrow()).unwrap(); - shared.ensure_dir(&self.dst.join(crate_name.as_str()))?; - shared.fs.write(redirect_map_path, paths)?; - } + if let Some(ref redirections) = shared.redirections && !redirections.borrow().is_empty() { + let redirect_map_path = + self.dst.join(crate_name.as_str()).join("redirect-map.json"); + let paths = serde_json::to_string(&*redirections.borrow()).unwrap(); + shared.ensure_dir(&self.dst.join(crate_name.as_str()))?; + shared.fs.write(redirect_map_path, paths)?; } // No need for it anymore. @@ -828,7 +823,3 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { &self.shared.cache } } - -fn make_item_keywords(it: &clean::Item) -> String { - format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) -} diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 4fa33e890..e6a040d02 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -38,7 +38,7 @@ pub(crate) use self::span_map::{collect_spans_and_sources, LinkFromSrc}; use std::collections::VecDeque; use std::default::Default; -use std::fmt; +use std::fmt::{self, Write}; use std::fs; use std::iter::Peekable; use std::path::PathBuf; @@ -50,7 +50,7 @@ use rustc_ast_pretty::pprust; use rustc_attr::{ConstStability, Deprecation, StabilityLevel}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def::CtorKind; -use rustc_hir::def_id::DefId; +use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::Mutability; use rustc_middle::middle::stability; use rustc_middle::ty; @@ -83,9 +83,6 @@ use crate::scrape_examples::{CallData, CallLocation}; use crate::try_none; use crate::DOC_RUST_LANG_ORG_CHANNEL; -/// A pair of name and its optional document. -pub(crate) type NameDoc = (String, Option); - pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ { crate::html::format::display_fn(move |f| { if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) } @@ -1115,7 +1112,7 @@ fn render_assoc_items( it: DefId, what: AssocItemRender<'_>, ) { - let mut derefs = FxHashSet::default(); + let mut derefs = DefIdSet::default(); derefs.insert(it); render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs) } @@ -1126,7 +1123,7 @@ fn render_assoc_items_inner( containing_item: &clean::Item, it: DefId, what: AssocItemRender<'_>, - derefs: &mut FxHashSet, + derefs: &mut DefIdSet, ) { info!("Documenting associated items of {:?}", containing_item.name); let shared = Rc::clone(&cx.shared); @@ -1215,7 +1212,7 @@ fn render_deref_methods( impl_: &Impl, container_item: &clean::Item, deref_mut: bool, - derefs: &mut FxHashSet, + derefs: &mut DefIdSet, ) { let cache = cx.cache(); let deref_type = impl_.inner_impl().trait_.as_ref().unwrap(); @@ -1313,7 +1310,7 @@ pub(crate) fn notable_traits_button(ty: &clean::Type, cx: &mut Context<'_>) -> O if has_notable_trait { cx.types_with_notable_traits.insert(ty.clone()); Some(format!( - " ⓘ", + " ⓘ", ty = Escape(&format!("{:#}", ty.print(cx))), )) } else { @@ -1528,11 +1525,7 @@ fn render_impl( }) }) .map(|item| format!("{}.{}", item.type_(), name)); - write!( - w, - "
", - id, item_type, in_trait_class, - ); + write!(w, "
", id, item_type, in_trait_class,); render_rightside(w, cx, item, containing_item, render_mode); if trait_.is_some() { // Anchors are only used on trait impls. @@ -1554,11 +1547,7 @@ fn render_impl( kind @ (clean::TyAssocConstItem(ty) | clean::AssocConstItem(ty, _)) => { let source_id = format!("{}.{}", item_type, name); let id = cx.derive_id(source_id.clone()); - write!( - w, - "
", - id, item_type, in_trait_class - ); + write!(w, "
", id, item_type, in_trait_class); render_rightside(w, cx, item, containing_item, render_mode); if trait_.is_some() { // Anchors are only used on trait impls. @@ -1606,11 +1595,7 @@ fn render_impl( clean::AssocTypeItem(tydef, _bounds) => { let source_id = format!("{}.{}", item_type, name); let id = cx.derive_id(source_id.clone()); - write!( - w, - "
", - id, item_type, in_trait_class - ); + write!(w, "
", id, item_type, in_trait_class); if trait_.is_some() { // Anchors are only used on trait impls. write!(w, "§", id); @@ -1844,7 +1829,7 @@ pub(crate) fn render_impl_summary( } else { format!(" data-aliases=\"{}\"", aliases.join(",")) }; - write!(w, "
", id, aliases); + write!(w, "
", id, aliases); render_rightside(w, cx, &i.impl_item, containing_item, RenderMode::Normal); write!(w, "§", id); write!(w, "

"); @@ -2032,31 +2017,60 @@ fn get_associated_constants( .collect::>() } -// The point is to url encode any potential character from a type with genericity. -fn small_url_encode(s: String) -> String { +pub(crate) fn small_url_encode(s: String) -> String { + // These characters don't need to be escaped in a URI. + // See https://url.spec.whatwg.org/#query-percent-encode-set + // and https://url.spec.whatwg.org/#urlencoded-parsing + // and https://url.spec.whatwg.org/#url-code-points + fn dont_escape(c: u8) -> bool { + (b'a' <= c && c <= b'z') + || (b'A' <= c && c <= b'Z') + || (b'0' <= c && c <= b'9') + || c == b'-' + || c == b'_' + || c == b'.' + || c == b',' + || c == b'~' + || c == b'!' + || c == b'\'' + || c == b'(' + || c == b')' + || c == b'*' + || c == b'/' + || c == b';' + || c == b':' + || c == b'?' + // As described in urlencoded-parsing, the + // first `=` is the one that separates key from + // value. Following `=`s are part of the value. + || c == b'=' + } let mut st = String::new(); let mut last_match = 0; - for (idx, c) in s.char_indices() { - let escaped = match c { - '<' => "%3C", - '>' => "%3E", - ' ' => "%20", - '?' => "%3F", - '\'' => "%27", - '&' => "%26", - ',' => "%2C", - ':' => "%3A", - ';' => "%3B", - '[' => "%5B", - ']' => "%5D", - '"' => "%22", - _ => continue, - }; + for (idx, b) in s.bytes().enumerate() { + if dont_escape(b) { + continue; + } - st += &s[last_match..idx]; - st += escaped; - // NOTE: we only expect single byte characters here - which is fine as long as we - // only match single byte characters + if last_match != idx { + // Invariant: `idx` must be the first byte in a character at this point. + st += &s[last_match..idx]; + } + if b == b' ' { + // URL queries are decoded with + replaced with SP. + // While the same is not true for hashes, rustdoc only needs to be + // consistent with itself when encoding them. + st += "+"; + } else if b == b'%' { + st += "%%"; + } else { + write!(st, "%{:02X}", b).unwrap(); + } + // Invariant: if the current byte is not at the start of a multi-byte character, + // we need to get down here so that when the next turn of the loop comes around, + // last_match winds up equalling idx. + // + // In other words, dont_escape must always return `false` in multi-byte character. last_match = idx + 1; } @@ -2175,7 +2189,7 @@ fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) { if let Some(impl_) = v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait()) { - let mut derefs = FxHashSet::default(); + let mut derefs = DefIdSet::default(); derefs.insert(did); sidebar_deref_methods(cx, out, impl_, v, &mut derefs, &mut used_links); } @@ -2195,7 +2209,7 @@ fn sidebar_deref_methods( out: &mut Buffer, impl_: &Impl, v: &[Impl], - derefs: &mut FxHashSet, + derefs: &mut DefIdSet, used_links: &mut FxHashSet, ) { let c = cx.cache(); @@ -2211,14 +2225,13 @@ fn sidebar_deref_methods( }) { debug!("found target, real_target: {:?} {:?}", target, real_target); - if let Some(did) = target.def_id(c) { - if let Some(type_did) = impl_.inner_impl().for_.def_id(c) { - // `impl Deref for S` - if did == type_did || !derefs.insert(did) { - // Avoid infinite cycles - return; - } - } + if let Some(did) = target.def_id(c) && + let Some(type_did) = impl_.inner_impl().for_.def_id(c) && + // `impl Deref for S` + (did == type_did || !derefs.insert(did)) + { + // Avoid infinite cycles + return; } let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait()); let inner_impl = target @@ -2252,25 +2265,24 @@ fn sidebar_deref_methods( } // Recurse into any further impls that might exist for `target` - if let Some(target_did) = target.def_id(c) { - if let Some(target_impls) = c.impls.get(&target_did) { - if let Some(target_deref_impl) = target_impls.iter().find(|i| { - i.inner_impl() - .trait_ - .as_ref() - .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait()) - .unwrap_or(false) - }) { - sidebar_deref_methods( - cx, - out, - target_deref_impl, - target_impls, - derefs, - used_links, - ); - } - } + if let Some(target_did) = target.def_id(c) && + let Some(target_impls) = c.impls.get(&target_did) && + let Some(target_deref_impl) = target_impls.iter().find(|i| { + i.inner_impl() + .trait_ + .as_ref() + .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait()) + .unwrap_or(false) + }) + { + sidebar_deref_methods( + cx, + out, + target_deref_impl, + target_impls, + derefs, + used_links, + ); } } } @@ -2755,8 +2767,6 @@ fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) { } } -pub(crate) const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang"; - /// Returns a list of all paths used in the type. /// This is used to help deduplicate imported impls /// for reexported types. If any of the contained diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index f824c9e3a..2869a3961 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -10,7 +10,7 @@ use rustc_middle::ty::layout::LayoutError; use rustc_middle::ty::{self, Adt, TyCtxt}; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Symbol}; -use rustc_target::abi::{LayoutS, Primitive, TagEncoding, VariantIdx, Variants}; +use rustc_target::abi::{LayoutS, Primitive, TagEncoding, Variants}; use std::cmp::Ordering; use std::fmt; use std::rc::Rc; @@ -39,10 +39,10 @@ use crate::html::{highlight, static_files}; use askama::Template; use itertools::Itertools; -const ITEM_TABLE_OPEN: &str = "
"; -const ITEM_TABLE_CLOSE: &str = "
"; -const ITEM_TABLE_ROW_OPEN: &str = "
"; -const ITEM_TABLE_ROW_CLOSE: &str = "
"; +const ITEM_TABLE_OPEN: &str = "
    "; +const ITEM_TABLE_CLOSE: &str = "
"; +const ITEM_TABLE_ROW_OPEN: &str = "
  • "; +const ITEM_TABLE_ROW_CLOSE: &str = "
  • "; // A component in a `use` path, like `string` in std::string::ToString struct PathComponent { @@ -338,14 +338,14 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: match *src { Some(src) => write!( w, - "
    {}extern crate {} as {};", + "
    {}extern crate {} as {};", visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx), anchor(myitem.item_id.expect_def_id(), src, cx), myitem.name.unwrap(), ), None => write!( w, - "
    {}extern crate {};", + "
    {}extern crate {};", visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx), anchor(myitem.item_id.expect_def_id(), myitem.name.unwrap(), cx), ), @@ -355,7 +355,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: } clean::ImportItem(ref import) => { - let (stab, stab_tags) = if let Some(import_def_id) = import.source.did { + let stab_tags = if let Some(import_def_id) = import.source.did { let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id); let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs)); @@ -367,15 +367,12 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: ..myitem.clone() }; - let stab = import_item.stability_class(cx.tcx()); let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx())); - (stab, stab_tags) + stab_tags } else { - (None, None) + None }; - let add = if stab.is_some() { " " } else { "" }; - w.write_str(ITEM_TABLE_ROW_OPEN); let id = match import.kind { clean::ImportKind::Simple(s) => { @@ -387,15 +384,14 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: let (stab_tags_before, stab_tags_after) = if stab_tags.is_empty() { ("", "") } else { - ("
    ", "
    ") + ("
    ", "
    ") }; write!( w, - "
    \ + "
    \ {vis}{imp}\
    \ {stab_tags_before}{stab_tags}{stab_tags_after}", - stab = stab.unwrap_or_default(), vis = visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx), imp = import.print(cx), ); @@ -417,9 +413,6 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: _ => "", }; - let stab = myitem.stability_class(cx.tcx()); - let add = if stab.is_some() { " " } else { "" }; - let visibility_emoji = match myitem.visibility(tcx) { Some(ty::Visibility::Restricted(_)) => { " ðŸ”’ " @@ -433,11 +426,11 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: let (docs_before, docs_after) = if docs.is_empty() { ("", "") } else { - ("
    ", "
    ") + ("
    ", "
    ") }; write!( w, - "
    \ + "
    \ {name}\ {visibility_emoji}\ {unsafety_flag}\ @@ -448,11 +441,9 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: visibility_emoji = visibility_emoji, stab_tags = extra_info_tags(myitem, item, cx.tcx()), class = myitem.type_(), - add = add, - stab = stab.unwrap_or_default(), unsafety_flag = unsafety_flag, href = item_path(myitem.type_(), myitem.name.unwrap().as_str()), - title = [full_path(cx, myitem), myitem.type_().to_string()] + title = [myitem.type_().to_string(), full_path(cx, myitem)] .iter() .filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None }) .collect::>() @@ -530,26 +521,24 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle let notable_traits = f.decl.output.as_return().and_then(|output| notable_traits_button(output, cx)); - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - w.reserve(header_len); - write!( - w, - "{vis}{constness}{asyncness}{unsafety}{abi}fn \ - {name}{generics}{decl}{notable_traits}{where_clause}", - vis = visibility, - constness = constness, - asyncness = asyncness, - unsafety = unsafety, - abi = abi, - name = name, - generics = f.generics.print(cx), - where_clause = print_where_clause(&f.generics, cx, 0, Ending::Newline), - decl = f.decl.full_print(header_len, 0, cx), - notable_traits = notable_traits.unwrap_or_default(), - ); - }); + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + w.reserve(header_len); + write!( + w, + "{vis}{constness}{asyncness}{unsafety}{abi}fn \ + {name}{generics}{decl}{notable_traits}{where_clause}", + vis = visibility, + constness = constness, + asyncness = asyncness, + unsafety = unsafety, + abi = abi, + name = name, + generics = f.generics.print(cx), + where_clause = print_where_clause(&f.generics, cx, 0, Ending::Newline), + decl = f.decl.full_print(header_len, 0, cx), + notable_traits = notable_traits.unwrap_or_default(), + ); }); document(w, cx, it, None, HeadingOffset::H2); } @@ -569,145 +558,140 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean: let must_implement_one_of_functions = tcx.trait_def(t.def_id).must_implement_one_of.clone(); // Output the trait definition - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - write!( - w, - "{}{}{}trait {}{}{}", - visibility_print_with_space(it.visibility(tcx), it.item_id, cx), - t.unsafety(tcx).print_with_space(), - if t.is_auto(tcx) { "auto " } else { "" }, - it.name.unwrap(), - t.generics.print(cx), - bounds - ); + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + write!( + w, + "{}{}{}trait {}{}{}", + visibility_print_with_space(it.visibility(tcx), it.item_id, cx), + t.unsafety(tcx).print_with_space(), + if t.is_auto(tcx) { "auto " } else { "" }, + it.name.unwrap(), + t.generics.print(cx), + bounds + ); - if !t.generics.where_predicates.is_empty() { - write!(w, "{}", print_where_clause(&t.generics, cx, 0, Ending::Newline)); - } else { - w.write_str(" "); - } + if !t.generics.where_predicates.is_empty() { + write!(w, "{}", print_where_clause(&t.generics, cx, 0, Ending::Newline)); + } else { + w.write_str(" "); + } - if t.items.is_empty() { - w.write_str("{ }"); - } else { - // FIXME: we should be using a derived_id for the Anchors here - w.write_str("{\n"); - let mut toggle = false; - - // If there are too many associated types, hide _everything_ - if should_hide_fields(count_types) { - toggle = true; - toggle_open( - w, - format_args!( - "{} associated items", - count_types + count_consts + count_methods - ), - ); - } - for types in [&required_types, &provided_types] { - for t in types { - render_assoc_item( - w, - t, - AssocItemLink::Anchor(None), - ItemType::Trait, - cx, - RenderMode::Normal, - ); - w.write_str(";\n"); - } - } - // If there are too many associated constants, hide everything after them - // We also do this if the types + consts is large because otherwise we could - // render a bunch of types and _then_ a bunch of consts just because both were - // _just_ under the limit - if !toggle && should_hide_fields(count_types + count_consts) { - toggle = true; - toggle_open( - w, - format_args!( - "{} associated constant{} and {} method{}", - count_consts, - pluralize(count_consts), - count_methods, - pluralize(count_methods), - ), - ); - } - if count_types != 0 && (count_consts != 0 || count_methods != 0) { - w.write_str("\n"); - } - for consts in [&required_consts, &provided_consts] { - for c in consts { - render_assoc_item( - w, - c, - AssocItemLink::Anchor(None), - ItemType::Trait, - cx, - RenderMode::Normal, - ); - w.write_str(";\n"); - } - } - if !toggle && should_hide_fields(count_methods) { - toggle = true; - toggle_open(w, format_args!("{} methods", count_methods)); - } - if count_consts != 0 && count_methods != 0 { - w.write_str("\n"); - } - for (pos, m) in required_methods.iter().enumerate() { + if t.items.is_empty() { + w.write_str("{ }"); + } else { + // FIXME: we should be using a derived_id for the Anchors here + w.write_str("{\n"); + let mut toggle = false; + + // If there are too many associated types, hide _everything_ + if should_hide_fields(count_types) { + toggle = true; + toggle_open( + w, + format_args!("{} associated items", count_types + count_consts + count_methods), + ); + } + for types in [&required_types, &provided_types] { + for t in types { render_assoc_item( w, - m, + t, AssocItemLink::Anchor(None), ItemType::Trait, cx, RenderMode::Normal, ); w.write_str(";\n"); - - if pos < required_methods.len() - 1 { - w.write_str(""); - } - } - if !required_methods.is_empty() && !provided_methods.is_empty() { - w.write_str("\n"); } - for (pos, m) in provided_methods.iter().enumerate() { + } + // If there are too many associated constants, hide everything after them + // We also do this if the types + consts is large because otherwise we could + // render a bunch of types and _then_ a bunch of consts just because both were + // _just_ under the limit + if !toggle && should_hide_fields(count_types + count_consts) { + toggle = true; + toggle_open( + w, + format_args!( + "{} associated constant{} and {} method{}", + count_consts, + pluralize(count_consts), + count_methods, + pluralize(count_methods), + ), + ); + } + if count_types != 0 && (count_consts != 0 || count_methods != 0) { + w.write_str("\n"); + } + for consts in [&required_consts, &provided_consts] { + for c in consts { render_assoc_item( w, - m, + c, AssocItemLink::Anchor(None), ItemType::Trait, cx, RenderMode::Normal, ); - match *m.kind { - clean::MethodItem(ref inner, _) - if !inner.generics.where_predicates.is_empty() => - { - w.write_str(",\n { ... }\n"); - } - _ => { - w.write_str(" { ... }\n"); - } - } + w.write_str(";\n"); + } + } + if !toggle && should_hide_fields(count_methods) { + toggle = true; + toggle_open(w, format_args!("{} methods", count_methods)); + } + if count_consts != 0 && count_methods != 0 { + w.write_str("\n"); + } - if pos < provided_methods.len() - 1 { - w.write_str(""); - } + if !required_methods.is_empty() { + write!(w, " // Required method{}\n", pluralize(required_methods.len())); + } + for (pos, m) in required_methods.iter().enumerate() { + render_assoc_item( + w, + m, + AssocItemLink::Anchor(None), + ItemType::Trait, + cx, + RenderMode::Normal, + ); + w.write_str(";\n"); + + if pos < required_methods.len() - 1 { + w.write_str(""); } - if toggle { - toggle_close(w); + } + if !required_methods.is_empty() && !provided_methods.is_empty() { + w.write_str("\n"); + } + + if !provided_methods.is_empty() { + write!(w, " // Provided method{}\n", pluralize(provided_methods.len())); + } + for (pos, m) in provided_methods.iter().enumerate() { + render_assoc_item( + w, + m, + AssocItemLink::Anchor(None), + ItemType::Trait, + cx, + RenderMode::Normal, + ); + + w.write_str(" { ... }\n"); + + if pos < provided_methods.len() - 1 { + w.write_str(""); } - w.write_str("}"); } - }); + if toggle { + toggle_close(w); + } + w.write_str("}"); + } }); // Trait documentation @@ -735,7 +719,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean: let method_toggle_class = if item_type.is_method() { " method-toggle" } else { "" }; write!(w, "
    "); } - write!(w, "
    ", id); + write!(w, "
    ", id); render_rightside(w, cx, m, t, RenderMode::Normal); write!(w, "

    "); render_assoc_item( @@ -1050,18 +1034,16 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean: } fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::TraitAlias) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - write!( - w, - "trait {}{}{} = {};", - it.name.unwrap(), - t.generics.print(cx), - print_where_clause(&t.generics, cx, 0, Ending::Newline), - bounds(&t.bounds, true, cx) - ); - }); + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + write!( + w, + "trait {}{}{} = {};", + it.name.unwrap(), + t.generics.print(cx), + print_where_clause(&t.generics, cx, 0, Ending::Newline), + bounds(&t.bounds, true, cx) + ); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1074,18 +1056,16 @@ fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: & } fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - write!( - w, - "type {}{}{where_clause} = impl {bounds};", - it.name.unwrap(), - t.generics.print(cx), - where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline), - bounds = bounds(&t.bounds, false, cx), - ); - }); + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + write!( + w, + "type {}{}{where_clause} = impl {bounds};", + it.name.unwrap(), + t.generics.print(cx), + where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline), + bounds = bounds(&t.bounds, false, cx), + ); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1101,10 +1081,10 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) { wrap_item(w, |w| { render_attributes_in_pre(w, it, ""); - write!(w, "{}", visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx)); write!( w, - "type {}{}{where_clause} = {type_};", + "{}type {}{}{where_clause} = {type_};", + visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), it.name.unwrap(), t.generics.print(cx), where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline), @@ -1113,7 +1093,7 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea }); } - wrap_into_item_decl(w, |w| write_content(w, cx, it, t)); + write_content(w, cx, it, t); document(w, cx, it, None, HeadingOffset::H2); @@ -1127,11 +1107,9 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea } fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - render_union(w, it, Some(&s.generics), &s.fields, "", cx); - }); + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + render_union(w, it, Some(&s.generics), &s.fields, cx); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1160,13 +1138,11 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean: §\ {name}: {ty}\ ", - id = id, - name = name, shortty = ItemType::StructField, ty = ty.print(cx), ); if let Some(stability_class) = field.stability_class(cx.tcx()) { - write!(w, "", stab = stability_class); + write!(w, ""); } document(w, cx, field, Some(it), HeadingOffset::H3); } @@ -1179,7 +1155,7 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean: fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item]) { for (i, ty) in s.iter().enumerate() { if i > 0 { - w.write_str(", "); + w.write_str(", "); } match *ty.kind { clean::StrippedItem(box clean::StructFieldItem(_)) => w.write_str("_"), @@ -1192,60 +1168,58 @@ fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item] fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::Enum) { let tcx = cx.tcx(); let count_variants = e.variants().count(); - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_pre(w, it, ""); - write!( - w, - "{}enum {}{}", - visibility_print_with_space(it.visibility(tcx), it.item_id, cx), - it.name.unwrap(), - e.generics.print(cx), - ); - if !print_where_clause_and_check(w, &e.generics, cx) { - // If there wasn't a `where` clause, we add a whitespace. - w.write_str(" "); - } + wrap_item(w, |w| { + render_attributes_in_pre(w, it, ""); + write!( + w, + "{}enum {}{}", + visibility_print_with_space(it.visibility(tcx), it.item_id, cx), + it.name.unwrap(), + e.generics.print(cx), + ); + if !print_where_clause_and_check(w, &e.generics, cx) { + // If there wasn't a `where` clause, we add a whitespace. + w.write_str(" "); + } - let variants_stripped = e.has_stripped_entries(); - if count_variants == 0 && !variants_stripped { - w.write_str("{}"); - } else { - w.write_str("{\n"); - let toggle = should_hide_fields(count_variants); - if toggle { - toggle_open(w, format_args!("{} variants", count_variants)); - } - for v in e.variants() { - w.write_str(" "); - let name = v.name.unwrap(); - match *v.kind { - // FIXME(#101337): Show discriminant - clean::VariantItem(ref var) => match var.kind { - clean::VariantKind::CLike => write!(w, "{}", name), - clean::VariantKind::Tuple(ref s) => { - write!(w, "{}(", name); - print_tuple_struct_fields(w, cx, s); - w.write_str(")"); - } - clean::VariantKind::Struct(ref s) => { - render_struct(w, v, None, None, &s.fields, " ", false, cx); - } - }, - _ => unreachable!(), - } - w.write_str(",\n"); + let variants_stripped = e.has_stripped_entries(); + if count_variants == 0 && !variants_stripped { + w.write_str("{}"); + } else { + w.write_str("{\n"); + let toggle = should_hide_fields(count_variants); + if toggle { + toggle_open(w, format_args!("{} variants", count_variants)); + } + for v in e.variants() { + w.write_str(" "); + let name = v.name.unwrap(); + match *v.kind { + // FIXME(#101337): Show discriminant + clean::VariantItem(ref var) => match var.kind { + clean::VariantKind::CLike => write!(w, "{}", name), + clean::VariantKind::Tuple(ref s) => { + write!(w, "{}(", name); + print_tuple_struct_fields(w, cx, s); + w.write_str(")"); + } + clean::VariantKind::Struct(ref s) => { + render_struct(w, v, None, None, &s.fields, " ", false, cx); + } + }, + _ => unreachable!(), } + w.write_str(",\n"); + } - if variants_stripped { - w.write_str(" // some variants omitted\n"); - } - if toggle { - toggle_close(w); - } - w.write_str("}"); + if variants_stripped { + w.write_str(" // some variants omitted\n"); } - }); + if toggle { + toggle_close(w); + } + w.write_str("}"); + } }); document(w, cx, it, None, HeadingOffset::H2); @@ -1266,7 +1240,6 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: w, "
    \ §", - id = id, ); render_stability_since_raw_with_extra( w, @@ -1304,8 +1277,11 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: if let Some((heading, fields)) = heading_and_fields { let variant_id = cx.derive_id(format!("{}.{}.fields", ItemType::Variant, variant.name.unwrap())); - write!(w, "
    ", id = variant_id); - write!(w, "

    {heading}

    ", heading = heading); + write!( + w, + "
    \ +

    {heading}

    ", + ); document_non_exhaustive(w, variant); for field in fields { match *field.kind { @@ -1321,9 +1297,8 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: "
    \ \ §\ - {f}: {t}\ + {f}: {t}\ ", - id = id, f = field.name.unwrap(), t = ty.print(cx) ); @@ -1346,38 +1321,30 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: } fn item_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Macro) { - wrap_into_item_decl(w, |w| { - highlight::render_macro_with_highlighting(&t.source, w); - }); + highlight::render_item_decl_with_highlighting(&t.source, w); document(w, cx, it, None, HeadingOffset::H2) } fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &clean::ProcMacro) { - wrap_into_item_decl(w, |w| { + wrap_item(w, |w| { let name = it.name.expect("proc-macros always have names"); match m.kind { MacroKind::Bang => { - wrap_item(w, |w| { - write!(w, "{}!() {{ /* proc-macro */ }}", name); - }); + write!(w, "{}!() {{ /* proc-macro */ }}", name); } MacroKind::Attr => { - wrap_item(w, |w| { - write!(w, "#[{}]", name); - }); + write!(w, "#[{}]", name); } MacroKind::Derive => { - wrap_item(w, |w| { - write!(w, "#[derive({})]", name); - if !m.helpers.is_empty() { - w.push_str("\n{\n"); - w.push_str(" // Attributes available to this derive:\n"); - for attr in &m.helpers { - writeln!(w, " #[{}]", attr); - } - w.push_str("}\n"); + write!(w, "#[derive({})]", name); + if !m.helpers.is_empty() { + w.push_str("\n{\n"); + w.push_str(" // Attributes available to this derive:\n"); + for attr in &m.helpers { + writeln!(w, " #[{}]", attr); } - }); + w.push_str("}\n"); + } } } }); @@ -1400,61 +1367,57 @@ fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) { } fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - let tcx = cx.tcx(); - render_attributes_in_code(w, it); + wrap_item(w, |w| { + let tcx = cx.tcx(); + render_attributes_in_code(w, it); - write!( - w, - "{vis}const {name}: {typ}", - vis = visibility_print_with_space(it.visibility(tcx), it.item_id, cx), - name = it.name.unwrap(), - typ = c.type_.print(cx), - ); + write!( + w, + "{vis}const {name}: {typ}", + vis = visibility_print_with_space(it.visibility(tcx), it.item_id, cx), + name = it.name.unwrap(), + typ = c.type_.print(cx), + ); - // FIXME: The code below now prints - // ` = _; // 100i32` - // if the expression is - // `50 + 50` - // which looks just wrong. - // Should we print - // ` = 100i32;` - // instead? - - let value = c.value(tcx); - let is_literal = c.is_literal(tcx); - let expr = c.expr(tcx); - if value.is_some() || is_literal { - write!(w, " = {expr};", expr = Escape(&expr)); - } else { - w.write_str(";"); - } + // FIXME: The code below now prints + // ` = _; // 100i32` + // if the expression is + // `50 + 50` + // which looks just wrong. + // Should we print + // ` = 100i32;` + // instead? + + let value = c.value(tcx); + let is_literal = c.is_literal(tcx); + let expr = c.expr(tcx); + if value.is_some() || is_literal { + write!(w, " = {expr};", expr = Escape(&expr)); + } else { + w.write_str(";"); + } - if !is_literal { - if let Some(value) = &value { - let value_lowercase = value.to_lowercase(); - let expr_lowercase = expr.to_lowercase(); + if !is_literal { + if let Some(value) = &value { + let value_lowercase = value.to_lowercase(); + let expr_lowercase = expr.to_lowercase(); - if value_lowercase != expr_lowercase - && value_lowercase.trim_end_matches("i32") != expr_lowercase - { - write!(w, " // {value}", value = Escape(value)); - } + if value_lowercase != expr_lowercase + && value_lowercase.trim_end_matches("i32") != expr_lowercase + { + write!(w, " // {value}", value = Escape(value)); } } - }); + } }); document(w, cx, it, None, HeadingOffset::H2) } fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_code(w, it); - render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx); - }); + wrap_item(w, |w| { + render_attributes_in_code(w, it); + render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1486,11 +1449,9 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean w, "\ §\ - {name}: {ty}\ + {field_name}: {ty}\ ", item_type = ItemType::StructField, - id = id, - name = field_name, ty = ty.print(cx) ); document(w, cx, field, Some(it), HeadingOffset::H3); @@ -1503,34 +1464,30 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean } fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - render_attributes_in_code(w, it); - write!( - w, - "{vis}static {mutability}{name}: {typ}", - vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), - mutability = s.mutability.print_with_space(), - name = it.name.unwrap(), - typ = s.type_.print(cx) - ); - }); + wrap_item(w, |w| { + render_attributes_in_code(w, it); + write!( + w, + "{vis}static {mutability}{name}: {typ}", + vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), + mutability = s.mutability.print_with_space(), + name = it.name.unwrap(), + typ = s.type_.print(cx) + ); }); document(w, cx, it, None, HeadingOffset::H2) } fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) { - wrap_into_item_decl(w, |w| { - wrap_item(w, |w| { - w.write_str("extern {\n"); - render_attributes_in_code(w, it); - write!( - w, - " {}type {};\n}}", - visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), - it.name.unwrap(), - ); - }); + wrap_item(w, |w| { + w.write_str("extern {\n"); + render_attributes_in_code(w, it); + write!( + w, + " {}type {};\n}}", + visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), + it.name.unwrap(), + ); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1609,20 +1566,11 @@ fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool, cx: &Context<'_>) bounds } -fn wrap_into_item_decl(w: &mut Buffer, f: F) -where - F: FnOnce(&mut Buffer), -{ - w.write_str("
    "); - f(w); - w.write_str("
    ") -} - fn wrap_item(w: &mut Buffer, f: F) where F: FnOnce(&mut Buffer), { - w.write_str(r#"
    "#);
    +    w.write_str(r#"
    "#);
         f(w);
         w.write_str("
    "); } @@ -1677,7 +1625,6 @@ fn render_union( it: &clean::Item, g: Option<&clean::Generics>, fields: &[clean::Item], - tab: &str, cx: &Context<'_>, ) { let tcx = cx.tcx(); @@ -1700,7 +1647,7 @@ fn render_union( w.write_str(" "); } - write!(w, "{{\n{}", tab); + write!(w, "{{\n"); let count_fields = fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count(); let toggle = should_hide_fields(count_fields); @@ -1712,17 +1659,16 @@ fn render_union( if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, - " {}{}: {},\n{}", + " {}{}: {},\n", visibility_print_with_space(field.visibility(tcx), field.item_id, cx), field.name.unwrap(), - ty.print(cx), - tab + ty.print(cx) ); } } if it.has_stripped_entries().unwrap() { - write!(w, " /* private fields */\n{}", tab); + write!(w, " /* private fields */\n"); } if toggle { toggle_close(w); @@ -1887,12 +1833,12 @@ fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) { } fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { - fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) { + fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) { if layout.abi.is_unsized() { write!(w, "(unsized)"); } else { - let bytes = layout.size.bytes() - tag_size; - write!(w, "{size} byte{pl}", size = bytes, pl = if bytes == 1 { "" } else { "s" },); + let size = layout.size.bytes() - tag_size; + write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" },); } } @@ -1909,7 +1855,7 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { let tcx = cx.tcx(); let param_env = tcx.param_env(ty_def_id); - let ty = tcx.type_of(ty_def_id); + let ty = tcx.type_of(ty_def_id).subst_identity(); match tcx.layout_of(param_env.and(ty)) { Ok(ty_layout) => { writeln!( @@ -1947,7 +1893,7 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { for (index, layout) in variants.iter_enumerated() { let name = adt.variant(index).name; - write!(w, "
  • {name}: ", name = name); + write!(w, "
  • {name}: "); write_size_of_layout(w, layout, tag_size); writeln!(w, "
  • "); } diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs index 5b0caac09..090ea2cb1 100644 --- a/src/librustdoc/html/render/search_index.rs +++ b/src/librustdoc/html/render/search_index.rs @@ -236,7 +236,16 @@ pub(crate) fn build_index<'tcx>( crate_data.serialize_field("doc", &self.doc)?; crate_data.serialize_field( "t", - &self.items.iter().map(|item| &item.ty).collect::>(), + &self + .items + .iter() + .map(|item| { + let n = item.ty as u8; + let c = char::try_from(n + b'A').expect("item types must fit in ASCII"); + assert!(c <= 'z', "item types must fit within ASCII printables"); + c + }) + .collect::(), )?; crate_data.serialize_field( "n", diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs index bc8badad3..54749e9a3 100644 --- a/src/librustdoc/html/render/write_shared.rs +++ b/src/librustdoc/html/render/write_shared.rs @@ -11,7 +11,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use serde::ser::SerializeSeq; use serde::{Serialize, Serializer}; -use super::{collect_paths_for_type, ensure_trailing_slash, Context, BASIC_KEYWORDS}; +use super::{collect_paths_for_type, ensure_trailing_slash, Context}; use crate::clean::Crate; use crate::config::{EmitType, RenderOptions}; use crate::docfs::PathError; @@ -138,7 +138,7 @@ pub(super) fn write_shared( Ok((ret, krates)) } - /// Read a file and return all lines that match the "{crate}":{data},\ format, + /// Read a file and return all lines that match the "{crate}":{data},\ format, /// and return a tuple `(Vec, Vec)`. /// /// This forms the payload of files that look like this: @@ -340,7 +340,6 @@ if (typeof exports !== 'undefined') {exports.searchIndex = searchIndex}; root_path: "./", static_root_path: shared.static_root_path.as_deref(), description: "List of crates", - keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, }; diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs index 799c497d1..2c90bf4fa 100644 --- a/src/librustdoc/html/sources.rs +++ b/src/librustdoc/html/sources.rs @@ -4,7 +4,7 @@ use crate::error::Error; use crate::html::format::Buffer; use crate::html::highlight; use crate::html::layout; -use crate::html::render::{Context, BASIC_KEYWORDS}; +use crate::html::render::Context; use crate::visit::DocVisitor; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -228,7 +228,6 @@ impl SourceCollector<'_, '_> { root_path: &root_path, static_root_path: shared.static_root_path.as_deref(), description: &desc, - keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, }; let v = layout::render( diff --git a/src/librustdoc/html/static/.eslintrc.js b/src/librustdoc/html/static/.eslintrc.js index fcd925bb3..1a34530c2 100644 --- a/src/librustdoc/html/static/.eslintrc.js +++ b/src/librustdoc/html/static/.eslintrc.js @@ -90,7 +90,6 @@ module.exports = { "no-return-assign": "error", "no-script-url": "error", "no-sequences": "error", - "no-throw-literal": "error", "no-div-regex": "error", } }; diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index a93f60da2..95528e70e 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -1,3 +1,11 @@ +/* When static files are updated, their suffixes need to be updated. + 1. In the top directory run: + ./x.py doc --stage 1 library/core + 2. Find the directory containing files named with updated suffixes: + find build -path '*'/stage1-std/'*'/static.files + 3. Copy the filenames with updated suffixes from the directory. +*/ + /* See FiraSans-LICENSE.txt for the Fira Sans license. */ @font-face { font-family: 'Fira Sans'; @@ -22,7 +30,7 @@ font-style: normal; font-weight: 400; src: local('Source Serif 4'), - url("SourceSerif4-Regular-1f7d512b176f0f72.ttf.woff2") format("woff2"); + url("SourceSerif4-Regular-46f98efaafac5295.ttf.woff2") format("woff2"); font-display: swap; } @font-face { @@ -30,7 +38,7 @@ font-style: italic; font-weight: 400; src: local('Source Serif 4 Italic'), - url("SourceSerif4-It-d034fe4ef9d0fa00.ttf.woff2") format("woff2"); + url("SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2") format("woff2"); font-display: swap; } @font-face { @@ -38,7 +46,7 @@ font-style: normal; font-weight: 700; src: local('Source Serif 4 Bold'), - url("SourceSerif4-Bold-124a1ca42af929b6.ttf.woff2") format("woff2"); + url("SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2") format("woff2"); font-display: swap; } @@ -166,6 +174,14 @@ h1, h2, h3, h4 { .top-doc .docblock > h4 { border-bottom: 1px solid var(--headings-border-bottom-color); } +/* while line-height 1.5 is required for any "block of text", + which WCAG defines as more than one sentence, it looks weird for + very large main headers */ +h1, h2 { + line-height: 1.25; + padding-top: 3px; + padding-bottom: 9px; +} h3.code-header { font-size: 1.125rem; /* 18px */ } @@ -176,6 +192,7 @@ h4.code-header { font-weight: 600; margin: 0; padding: 0; + white-space: pre-wrap; } #crate-search, @@ -184,7 +201,7 @@ h1, h2, h3, h4, h5, h6, .mobile-topbar, .search-input, .search-results .result-name, -.item-left > a, +.item-name > a, .out-of-band, span.since, a.srclink, @@ -335,7 +352,7 @@ pre { padding: 14px; line-height: 1.5; /* https://github.com/rust-lang/rust/issues/105906 */ } -.item-decl pre { +pre.item-decl { overflow-x: auto; } /* This rule allows to have scrolling on the X axis. */ @@ -533,7 +550,7 @@ ul.block, .block li { .rustdoc .example-wrap > pre { margin: 0; flex-grow: 1; - overflow-x: auto; + overflow: auto hidden; } .rustdoc .example-wrap > pre.example-line-numbers, @@ -634,6 +651,7 @@ pre, .rustdoc.source .example-wrap { .fn .where, .where.fmt-newline { display: block; + white-space: pre-wrap; font-size: 0.875rem; } @@ -697,8 +715,8 @@ h2.small-section-header > .anchor { .main-heading a:hover, .example-wrap > pre.rust a:hover, .all-items a:hover, -.docblock a:not(.test-arrow):not(.scrape-help):hover, -.docblock-short a:not(.test-arrow):not(.scrape-help):hover, +.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, +.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, .item-info a { text-decoration: underline; } @@ -732,14 +750,16 @@ table, .item-table { display: table; + padding: 0; + margin: 0; } -.item-row { +.item-table > li { display: table-row; } -.item-left, .item-right { +.item-table > li > div { display: table-cell; } -.item-left { +.item-table > li > .item-name { padding-right: 1.25rem; } @@ -806,8 +826,11 @@ so that we can apply CSS-filters to change the arrow color in themes */ background-repeat: no-repeat; background-size: 20px; background-position: calc(100% - 2px) 56%; - /* image is black color */ - background-image: url("down-arrow-927217e04c7463ac.svg"); + /* down arrow (image is black color) */ + background-image: url('data:image/svg+xml, \ + '); /* changes the arrow image color */ filter: var(--crate-search-div-filter); } @@ -941,7 +964,7 @@ so that we can apply CSS-filters to change the arrow color in themes */ padding: 3px; margin-bottom: 5px; } -.item-left .stab { +.item-name .stab { margin-left: 0.3125em; } .stab { @@ -977,11 +1000,6 @@ so that we can apply CSS-filters to change the arrow color in themes */ 0 -1px 0 black; } -.module-item.unstable, -.import-item.unstable { - opacity: 0.65; -} - .since { font-weight: normal; font-size: initial; @@ -1093,44 +1111,8 @@ pre.rust .doccomment { display: block; left: -25px; top: 5px; -} - -.example-wrap .tooltip:hover::after { - padding: 5px 3px 3px 3px; - border-radius: 6px; - margin-left: 5px; - font-size: 1rem; - border: 1px solid var(--border-color); - position: absolute; - width: max-content; - top: -2px; - z-index: 1; - background-color: var(--tooltip-background-color); - color: var(--tooltip-color); -} - -.example-wrap .tooltip:hover::before { - content: " "; - position: absolute; - top: 50%; - left: 16px; - margin-top: -5px; - z-index: 1; - border: 5px solid transparent; - border-right-color: var(--tooltip-background-color); -} - -.example-wrap.ignore .tooltip:hover::after { - content: "This example is not tested"; -} -.example-wrap.compile_fail .tooltip:hover::after { - content: "This example deliberately fails to compile"; -} -.example-wrap.should_panic .tooltip:hover::after { - content: "This example panics"; -} -.example-wrap.edition .tooltip:hover::after { - content: "This code runs with edition " attr(data-edition); + margin: 0; + line-height: 1; } .example-wrap.compile_fail .tooltip, @@ -1168,6 +1150,7 @@ a.test-arrow:hover { .item-spacer { width: 100%; height: 12px; + display: block; } .out-of-band > span.since { @@ -1196,7 +1179,7 @@ a.test-arrow:hover { border-right: 3px solid var(--target-border-color); } -.notable-traits { +.code-header a.tooltip { color: inherit; margin-right: 15px; position: relative; @@ -1205,7 +1188,7 @@ a.test-arrow:hover { /* placeholder thunk so that the mouse can easily travel from "(i)" to popover the resulting "hover tunnel" is a stepped triangle, approximating https://bjk5.com/post/44698559168/breaking-down-amazons-mega-dropdown */ -.notable-traits:hover::after { +a.tooltip:hover::after { position: absolute; top: calc(100% - 10px); left: -15px; @@ -1214,11 +1197,11 @@ a.test-arrow:hover { content: "\00a0"; } -.notable .content { +.popover.tooltip .content { margin: 0.25em 0.5em; } -.notable .content pre, .notable .content code { +.popover.tooltip .content pre, .popover.tooltip .content code { background: transparent; margin: 0; padding: 0; @@ -1226,7 +1209,7 @@ a.test-arrow:hover { white-space: pre-wrap; } -.notable .content > h3:first-child { +.popover.tooltip .content > h3:first-child { margin: 0 0 5px 0; } @@ -1263,12 +1246,25 @@ a.test-arrow:hover { line-height: 1.5; color: inherit; } +#search-tabs button:not(.selected) { + background-color: var(--search-tab-button-not-selected-background); + border-top-color: var(--search-tab-button-not-selected-border-top-color); +} +#search-tabs button:hover, #search-tabs button.selected { + background-color: var(--search-tab-button-selected-background); + border-top-color: var(--search-tab-button-selected-border-top-color); +} #search-tabs .count { font-size: 1rem; color: var(--search-tab-title-count-color); } +#search .error code { + border-radius: 3px; + background-color: var(--search-error-code-background-color); +} + #src-sidebar-toggle { position: sticky; top: 0; @@ -1433,7 +1429,10 @@ details.toggle > summary.hideme > span { } details.toggle > summary::before { - background: url("toggle-plus-1092eb4930d581b0.svg") no-repeat top left; + /* toggle plus */ + background: url('data:image/svg+xml,') no-repeat top left; content: ""; cursor: pointer; width: 16px; @@ -1511,7 +1510,10 @@ details.toggle[open] > summary.hideme > span { } details.toggle[open] > summary::before { - background: url("toggle-minus-31bbd6e4c77f5c96.svg") no-repeat top left; + /* toggle minus */ + background: url('data:image/svg+xml,') no-repeat top left; } details.toggle[open] > summary::after { @@ -1700,7 +1702,7 @@ in storage.js } /* Display an alternating layout on tablets and phones */ - .item-table, .item-row, .item-left, .item-right, + .item-table, .item-row, .item-table > li, .item-table > li > div, .search-results > a, .search-results > a > div { display: block; } @@ -1709,7 +1711,7 @@ in storage.js .search-results > a { padding: 5px 0px; } - .search-results > a > div.desc, .item-right { + .search-results > a > div.desc, .item-table > li > div.desc { padding-left: 2em; } @@ -1897,19 +1899,25 @@ in storage.js right: 0.25em; } -.scraped-example:not(.expanded) .code-wrapper:before, -.scraped-example:not(.expanded) .code-wrapper:after { +.scraped-example:not(.expanded) .code-wrapper::before, +.scraped-example:not(.expanded) .code-wrapper::after { content: " "; width: 100%; height: 5px; position: absolute; z-index: 1; } -.scraped-example:not(.expanded) .code-wrapper:before { +.scraped-example:not(.expanded) .code-wrapper::before { top: 0; + background: linear-gradient(to bottom, + var(--scrape-example-code-wrapper-background-start), + var(--scrape-example-code-wrapper-background-end)); } -.scraped-example:not(.expanded) .code-wrapper:after { +.scraped-example:not(.expanded) .code-wrapper::after { bottom: 0; + background: linear-gradient(to top, + var(--scrape-example-code-wrapper-background-start), + var(--scrape-example-code-wrapper-background-end)); } .scraped-example .code-wrapper .example-wrap { diff --git a/src/librustdoc/html/static/css/settings.css b/src/librustdoc/html/static/css/settings.css index 4e9803fe2..920f45c4b 100644 --- a/src/librustdoc/html/static/css/settings.css +++ b/src/librustdoc/html/static/css/settings.css @@ -3,8 +3,7 @@ position: relative; } -.setting-line .radio-line input, -.setting-line .settings-toggle input { +.setting-radio input, .setting-check input { margin-right: 0.3em; height: 1.2rem; width: 1.2rem; @@ -14,21 +13,20 @@ -webkit-appearance: none; cursor: pointer; } -.setting-line .radio-line input { +.setting-radio input { border-radius: 50%; } -.setting-line .settings-toggle input:checked { +.setting-check input:checked { content: url('data:image/svg+xml,\ \ '); } -.setting-line .radio-line input + span, -.setting-line .settings-toggle span { +.setting-radio span, .setting-check span { padding-bottom: 1px; } -.radio-line .choice { +.setting-radio { margin-top: 0.1em; margin-bottom: 0.1em; min-width: 3.8em; @@ -37,36 +35,32 @@ align-items: center; cursor: pointer; } -.radio-line .choice + .choice { +.setting-radio + .setting-radio { margin-left: 0.5em; } -.settings-toggle { - position: relative; - width: 100%; +.setting-check { margin-right: 20px; display: flex; align-items: center; cursor: pointer; } -.setting-line .radio-line input:checked { +.setting-radio input:checked { box-shadow: inset 0 0 0 3px var(--main-background-color); background-color: var(--settings-input-color); } -.setting-line .settings-toggle input:checked { +.setting-check input:checked { background-color: var(--settings-input-color); } -.setting-line .radio-line input:focus, -.setting-line .settings-toggle input:focus { +.setting-radio input:focus, .setting-check input:focus { box-shadow: 0 0 1px 1px var(--settings-input-color); } /* In here we combine both `:focus` and `:checked` properties. */ -.setting-line .radio-line input:checked:focus { +.setting-radio input:checked:focus { box-shadow: inset 0 0 0 3px var(--main-background-color), 0 0 2px 2px var(--settings-input-color); } -.setting-line .radio-line input:hover, -.setting-line .settings-toggle input:hover { +.setting-radio input:hover, .setting-check input:hover { border-color: var(--settings-input-color) !important; } diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css index 979e7e0f9..90cf689ad 100644 --- a/src/librustdoc/html/static/css/themes/ayu.css +++ b/src/librustdoc/html/static/css/themes/ayu.css @@ -43,9 +43,14 @@ Original by Dempfi (https://github.com/dempfi/ayu) --search-result-link-focus-background-color: #3c3c3c; --search-result-border-color: #aaa3; --search-color: #fff; + --search-error-code-background-color: #4f4c4c; --search-results-alias-color: #c5c5c5; --search-results-grey-color: #999; --search-tab-title-count-color: #888; + --search-tab-button-not-selected-border-top-color: none; + --search-tab-button-not-selected-background: transparent !important; + --search-tab-button-selected-border-top-color: none; + --search-tab-button-selected-background: #141920 !important; --stab-background-color: #314559; --stab-code-color: #e6e1cf; --code-highlight-kw-color: #ff7733; @@ -70,8 +75,6 @@ Original by Dempfi (https://github.com/dempfi/ayu) --test-arrow-hover-background-color: rgba(57, 175, 215, 0.368); --target-background-color: rgba(255, 236, 164, 0.06); --target-border-color: rgba(255, 180, 76, 0.85); - --tooltip-background-color: #314559; - --tooltip-color: #c5c5c5; --kbd-color: #c5c5c5; --kbd-background: #314559; --kbd-box-shadow-color: #5c6773; @@ -97,12 +100,13 @@ Original by Dempfi (https://github.com/dempfi/ayu) --scrape-example-help-color: #eee; --scrape-example-help-hover-border-color: #fff; --scrape-example-help-hover-color: #fff; + --scrape-example-code-wrapper-background-start: rgba(15, 20, 25, 1); + --scrape-example-code-wrapper-background-end: rgba(15, 20, 25, 0); } -h1, h2, h3, h4 { - color: white; -} -h1 a { +h1, h2, h3, h4, +h1 a, .sidebar h2 a, .sidebar h3 a, +#source-sidebar > .title { color: #fff; } h4 { @@ -112,24 +116,22 @@ h4 { .docblock code { color: #ffb454; } -.code-header { - color: #e6e1cf; -} -.docblock pre > code, pre > code { - color: #e6e1cf; -} -.item-info code { - color: #e6e1cf; -} .docblock a > code { color: #39AFD7 !important; } -pre, .rustdoc.source .example-wrap { +.code-header, +.docblock pre > code, +pre, pre > code, +.item-info code, +.rustdoc.source .example-wrap { color: #e6e1cf; } .sidebar .current, -.sidebar a:hover { +.sidebar a:hover, +#source-sidebar div.files > a:hover, details.dir-entry summary:hover, +#source-sidebar div.files > a:focus, details.dir-entry summary:focus, +#source-sidebar div.files > a.selected { color: #ffb44c; } @@ -143,15 +145,12 @@ pre, .rustdoc.source .example-wrap { border-right: 1px solid #ffb44c; } -.search-results a:hover { - color: #fff !important; - background-color: #3c3c3c; -} - +.search-results a:hover, .search-results a:focus { color: #fff !important; background-color: #3c3c3c; } + .search-results a { color: #0096cf; } @@ -159,54 +158,22 @@ pre, .rustdoc.source .example-wrap { color: #c5c5c5; } -.sidebar h2 a, -.sidebar h3 a { - color: white; -} - .result-name .primitive > i, .result-name .keyword > i { color: #788797; } #search-tabs > button.selected { - background-color: #141920 !important; border-bottom: 1px solid #ffb44c !important; border-top: none; } - #search-tabs > button:not(.selected) { - background-color: transparent !important; border: none; + background-color: transparent !important; } - #search-tabs > button:hover { border-bottom: 1px solid rgba(242, 151, 24, 0.3); } -/* rules that this theme does not need to set, here to satisfy the rule checker */ -/* note that a lot of these are partially set in some way (meaning they are set -individually rather than as a group) */ -/* FIXME: these rules should be at the bottom of the file but currently must be -above the `@media (max-width: 700px)` rules due to a bug in the css checker */ -/* see https://github.com/rust-lang/rust/pull/71237#issuecomment-618170143 */ -#search-tabs > button:hover, #search-tabs > button.selected {} - #settings-menu > a img { filter: invert(100); } - -#source-sidebar > .title { - color: #fff; -} -#source-sidebar div.files > a:hover, details.dir-entry summary:hover, -#source-sidebar div.files > a:focus, details.dir-entry summary:focus, -#source-sidebar div.files > a.selected { - color: #ffb44c; -} - -.scraped-example:not(.expanded) .code-wrapper::before { - background: linear-gradient(to bottom, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0)); -} -.scraped-example:not(.expanded) .code-wrapper::after { - background: linear-gradient(to top, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0)); -} diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css index fb15863b0..e8cd06931 100644 --- a/src/librustdoc/html/static/css/themes/dark.css +++ b/src/librustdoc/html/static/css/themes/dark.css @@ -38,9 +38,14 @@ --search-result-link-focus-background-color: #616161; --search-result-border-color: #aaa3; --search-color: #111; + --search-error-code-background-color: #484848; --search-results-alias-color: #fff; --search-results-grey-color: #ccc; --search-tab-title-count-color: #888; + --search-tab-button-not-selected-border-top-color: #252525; + --search-tab-button-not-selected-background: #252525; + --search-tab-button-selected-border-top-color: #0089ff; + --search-tab-button-selected-background: #353535; --stab-background-color: #314559; --stab-code-color: #e6e1cf; --code-highlight-kw-color: #ab8ac1; @@ -65,8 +70,6 @@ --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; - --tooltip-background-color: #000; - --tooltip-color: #fff; --kbd-color: #000; --kbd-background: #fafbfc; --kbd-box-shadow-color: #c6cbd1; @@ -92,21 +95,6 @@ --scrape-example-help-color: #eee; --scrape-example-help-hover-border-color: #fff; --scrape-example-help-hover-color: #fff; -} - -#search-tabs > button:not(.selected) { - background-color: #252525; - border-top-color: #252525; -} - -#search-tabs > button:hover, #search-tabs > button.selected { - border-top-color: #0089ff; - background-color: #353535; -} - -.scraped-example:not(.expanded) .code-wrapper::before { - background: linear-gradient(to bottom, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0)); -} -.scraped-example:not(.expanded) .code-wrapper::after { - background: linear-gradient(to top, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0)); + --scrape-example-code-wrapper-background-start: rgba(53, 53, 53, 1); + --scrape-example-code-wrapper-background-end: rgba(53, 53, 53, 0); } diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css index 053fa78d1..5e3f14e48 100644 --- a/src/librustdoc/html/static/css/themes/light.css +++ b/src/librustdoc/html/static/css/themes/light.css @@ -38,9 +38,14 @@ --search-result-link-focus-background-color: #ccc; --search-result-border-color: #aaa3; --search-color: #000; + --search-error-code-background-color: #d0cccc; --search-results-alias-color: #000; --search-results-grey-color: #999; --search-tab-title-count-color: #888; + --search-tab-button-not-selected-border-top-color: #e6e6e6; + --search-tab-button-not-selected-background: #e6e6e6; + --search-tab-button-selected-border-top-color: #0089ff; + --search-tab-button-selected-background: #ffffff; --stab-background-color: #fff5d6; --stab-code-color: #000; --code-highlight-kw-color: #8959a8; @@ -65,8 +70,6 @@ --test-arrow-hover-background-color: #4e8bca; --target-background-color: #fdffd3; --target-border-color: #ad7c37; - --tooltip-background-color: #000; - --tooltip-color: #fff; --kbd-color: #000; --kbd-background: #fafbfc; --kbd-box-shadow-color: #c6cbd1; @@ -89,21 +92,6 @@ --scrape-example-help-color: #333; --scrape-example-help-hover-border-color: #000; --scrape-example-help-hover-color: #000; -} - -#search-tabs > button:not(.selected) { - background-color: #e6e6e6; - border-top-color: #e6e6e6; -} - -#search-tabs > button:hover, #search-tabs > button.selected { - background-color: #ffffff; - border-top-color: #0089ff; -} - -.scraped-example:not(.expanded) .code-wrapper::before { - background: linear-gradient(to bottom, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0)); -} -.scraped-example:not(.expanded) .code-wrapper::after { - background: linear-gradient(to top, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0)); + --scrape-example-code-wrapper-background-start: rgba(255, 255, 255, 1); + --scrape-example-code-wrapper-background-end: rgba(255, 255, 255, 0); } diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 index db57d2145..181a07f63 100644 Binary files a/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 and b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 differ diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 index 1cbc021a3..2ae08a7be 100644 Binary files a/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 and b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 differ diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md index 68ea18924..5871e1f3d 100644 --- a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md +++ b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md @@ -1,4 +1,4 @@ -Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries. +Copyright 2014 - 2023 Adobe (http://www.adobe.com/), with Reserved Font Name ‘Source’. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries. This Font Software is licensed under the SIL Open Font License, Version 1.1. diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 index 2db73fe2b..0263fc304 100644 Binary files a/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 and b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 differ diff --git a/src/librustdoc/html/static/images/down-arrow.svg b/src/librustdoc/html/static/images/down-arrow.svg deleted file mode 100644 index 5d76a64e9..000000000 --- a/src/librustdoc/html/static/images/down-arrow.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/src/librustdoc/html/static/images/toggle-minus.svg b/src/librustdoc/html/static/images/toggle-minus.svg deleted file mode 100644 index 73154788a..000000000 --- a/src/librustdoc/html/static/images/toggle-minus.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/src/librustdoc/html/static/images/toggle-plus.svg b/src/librustdoc/html/static/images/toggle-plus.svg deleted file mode 100644 index 08b17033e..000000000 --- a/src/librustdoc/html/static/images/toggle-plus.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/src/librustdoc/html/static/images/wheel.svg b/src/librustdoc/html/static/images/wheel.svg index 01da3b24c..83c07f63d 100644 --- a/src/librustdoc/html/static/images/wheel.svg +++ b/src/librustdoc/html/static/images/wheel.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 604ab147f..5e8c0e8d1 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -180,7 +180,6 @@ function browserSupportsHistoryApi() { return window.history && typeof window.history.pushState === "function"; } -// eslint-disable-next-line no-unused-vars function loadCss(cssUrl) { const link = document.createElement("link"); link.href = cssUrl; @@ -379,7 +378,7 @@ function loadCss(cssUrl) { } ev.preventDefault(); searchState.defocus(); - window.hideAllModals(true); // true = reset focus for notable traits + window.hideAllModals(true); // true = reset focus for tooltips } function handleShortcut(ev) { @@ -456,10 +455,7 @@ function loadCss(cssUrl) { const ul = document.createElement("ul"); ul.className = "block " + shortty; - for (const item of filtered) { - const name = item[0]; - const desc = item[1]; // can be null - + for (const name of filtered) { let path; if (shortty === "mod") { path = name + "/index.html"; @@ -469,7 +465,6 @@ function loadCss(cssUrl) { const current_page = document.location.href.split("/").pop(); const link = document.createElement("a"); link.href = path; - link.title = desc; if (path === current_page) { link.className = "current"; } @@ -789,17 +784,17 @@ function loadCss(cssUrl) { // we need to switch away from mobile mode and make the main content area scrollable. hideSidebar(); } - if (window.CURRENT_NOTABLE_ELEMENT) { - // As a workaround to the behavior of `contains: layout` used in doc togglers, the - // notable traits popup is positioned using javascript. + if (window.CURRENT_TOOLTIP_ELEMENT) { + // As a workaround to the behavior of `contains: layout` used in doc togglers, + // tooltip popovers are positioned using javascript. // // This means when the window is resized, we need to redo the layout. - const base = window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE; - const force_visible = base.NOTABLE_FORCE_VISIBLE; - hideNotable(false); + const base = window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE; + const force_visible = base.TOOLTIP_FORCE_VISIBLE; + hideTooltip(false); if (force_visible) { - showNotable(base); - base.NOTABLE_FORCE_VISIBLE = true; + showTooltip(base); + base.TOOLTIP_FORCE_VISIBLE = true; } } }); @@ -827,27 +822,35 @@ function loadCss(cssUrl) { }); }); - function showNotable(e) { - if (!window.NOTABLE_TRAITS) { + function showTooltip(e) { + const notable_ty = e.getAttribute("data-notable-ty"); + if (!window.NOTABLE_TRAITS && notable_ty) { const data = document.getElementById("notable-traits-data"); if (data) { window.NOTABLE_TRAITS = JSON.parse(data.innerText); } else { - throw new Error("showNotable() called on page without any notable traits!"); + throw new Error("showTooltip() called with notable without any notable traits!"); } } - if (window.CURRENT_NOTABLE_ELEMENT && window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE === e) { + if (window.CURRENT_TOOLTIP_ELEMENT && window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE === e) { // Make this function idempotent. return; } window.hideAllModals(false); - const ty = e.getAttribute("data-ty"); const wrapper = document.createElement("div"); - wrapper.innerHTML = "
    " + window.NOTABLE_TRAITS[ty] + "
    "; - wrapper.className = "notable popover"; + if (notable_ty) { + wrapper.innerHTML = "
    " + + window.NOTABLE_TRAITS[notable_ty] + "
    "; + } else if (e.getAttribute("title") !== undefined) { + const titleContent = document.createElement("div"); + titleContent.className = "content"; + titleContent.appendChild(document.createTextNode(e.getAttribute("title"))); + wrapper.appendChild(titleContent); + } + wrapper.className = "tooltip popover"; const focusCatcher = document.createElement("div"); focusCatcher.setAttribute("tabindex", "0"); - focusCatcher.onfocus = hideNotable; + focusCatcher.onfocus = hideTooltip; wrapper.appendChild(focusCatcher); const pos = e.getBoundingClientRect(); // 5px overlap so that the mouse can easily travel from place to place @@ -869,62 +872,62 @@ function loadCss(cssUrl) { ); } wrapper.style.visibility = ""; - window.CURRENT_NOTABLE_ELEMENT = wrapper; - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE = e; + window.CURRENT_TOOLTIP_ELEMENT = wrapper; + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE = e; wrapper.onpointerleave = function(ev) { // If this is a synthetic touch event, ignore it. A click event will be along shortly. if (ev.pointerType !== "mouse") { return; } - if (!e.NOTABLE_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) { - hideNotable(true); + if (!e.TOOLTIP_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) { + hideTooltip(true); } }; } - function notableBlurHandler(event) { - if (window.CURRENT_NOTABLE_ELEMENT && - !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT) && - !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT) && - !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE) && - !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE) + function tooltipBlurHandler(event) { + if (window.CURRENT_TOOLTIP_ELEMENT && + !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT) && + !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT) && + !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) && + !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) ) { // Work around a difference in the focus behaviour between Firefox, Chrome, and Safari. - // When I click the button on an already-opened notable trait popover, Safari + // When I click the button on an already-opened tooltip popover, Safari // hides the popover and then immediately shows it again, while everyone else hides it // and it stays hidden. // // To work around this, make sure the click finishes being dispatched before - // hiding the popover. Since `hideNotable()` is idempotent, this makes Safari behave + // hiding the popover. Since `hideTooltip()` is idempotent, this makes Safari behave // consistently with the other two. - setTimeout(() => hideNotable(false), 0); + setTimeout(() => hideTooltip(false), 0); } } - function hideNotable(focus) { - if (window.CURRENT_NOTABLE_ELEMENT) { - if (window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE) { + function hideTooltip(focus) { + if (window.CURRENT_TOOLTIP_ELEMENT) { + if (window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE) { if (focus) { - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.focus(); + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.focus(); } - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE = false; + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE = false; } const body = document.getElementsByTagName("body")[0]; - body.removeChild(window.CURRENT_NOTABLE_ELEMENT); - window.CURRENT_NOTABLE_ELEMENT = null; + body.removeChild(window.CURRENT_TOOLTIP_ELEMENT); + window.CURRENT_TOOLTIP_ELEMENT = null; } } - onEachLazy(document.getElementsByClassName("notable-traits"), e => { + onEachLazy(document.getElementsByClassName("tooltip"), e => { e.onclick = function() { - this.NOTABLE_FORCE_VISIBLE = this.NOTABLE_FORCE_VISIBLE ? false : true; - if (window.CURRENT_NOTABLE_ELEMENT && !this.NOTABLE_FORCE_VISIBLE) { - hideNotable(true); + this.TOOLTIP_FORCE_VISIBLE = this.TOOLTIP_FORCE_VISIBLE ? false : true; + if (window.CURRENT_TOOLTIP_ELEMENT && !this.TOOLTIP_FORCE_VISIBLE) { + hideTooltip(true); } else { - showNotable(this); - window.CURRENT_NOTABLE_ELEMENT.setAttribute("tabindex", "0"); - window.CURRENT_NOTABLE_ELEMENT.focus(); - window.CURRENT_NOTABLE_ELEMENT.onblur = notableBlurHandler; + showTooltip(this); + window.CURRENT_TOOLTIP_ELEMENT.setAttribute("tabindex", "0"); + window.CURRENT_TOOLTIP_ELEMENT.focus(); + window.CURRENT_TOOLTIP_ELEMENT.onblur = tooltipBlurHandler; } return false; }; @@ -933,16 +936,16 @@ function loadCss(cssUrl) { if (ev.pointerType !== "mouse") { return; } - showNotable(this); + showTooltip(this); }; e.onpointerleave = function(ev) { // If this is a synthetic touch event, ignore it. A click event will be along shortly. if (ev.pointerType !== "mouse") { return; } - if (!this.NOTABLE_FORCE_VISIBLE && - !elemIsInParent(ev.relatedTarget, window.CURRENT_NOTABLE_ELEMENT)) { - hideNotable(true); + if (!this.TOOLTIP_FORCE_VISIBLE && + !elemIsInParent(ev.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT)) { + hideTooltip(true); } }; }); @@ -1044,14 +1047,14 @@ function loadCss(cssUrl) { } /** - * Hide popover menus, notable trait tooltips, and the sidebar (if applicable). + * Hide popover menus, clickable tooltips, and the sidebar (if applicable). * - * Pass "true" to reset focus for notable traits. + * Pass "true" to reset focus for tooltip popovers. */ window.hideAllModals = function(switchFocus) { hideSidebar(); window.hidePopoverMenus(); - hideNotable(switchFocus); + hideTooltip(switchFocus); }; /** @@ -1142,7 +1145,11 @@ function loadCss(cssUrl) { (function() { let reset_button_timeout = null; - window.copy_path = but => { + const but = document.getElementById("copy-path"); + if (!but) { + return; + } + but.onclick = () => { const parent = but.parentElement; const path = []; diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index 88592fa0c..b98bced41 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -112,7 +112,6 @@ function levenshtein(s1, s2) { } function initSearch(rawSearchIndex) { - const MAX_LEV_DISTANCE = 3; const MAX_RESULTS = 200; const NO_TYPE_FILTER = -1; /** @@ -143,13 +142,11 @@ function initSearch(rawSearchIndex) { } function itemTypeFromName(typename) { - for (let i = 0, len = itemTypes.length; i < len; ++i) { - if (itemTypes[i] === typename) { - return i; - } + const index = itemTypes.findIndex(i => i === typename); + if (index < 0) { + throw ["Unknown type filter ", typename]; } - - throw new Error("Unknown type filter `" + typename + "`"); + return index; } /** @@ -167,21 +164,21 @@ function initSearch(rawSearchIndex) { */ function getStringElem(query, parserState, isInGenerics) { if (isInGenerics) { - throw new Error("`\"` cannot be used in generics"); + throw ["Unexpected ", "\"", " in generics"]; } else if (query.literalSearch) { - throw new Error("Cannot have more than one literal search element"); + throw ["Cannot have more than one literal search element"]; } else if (parserState.totalElems - parserState.genericsElems > 0) { - throw new Error("Cannot use literal search when there is more than one element"); + throw ["Cannot use literal search when there is more than one element"]; } parserState.pos += 1; const start = parserState.pos; const end = getIdentEndPosition(parserState); if (parserState.pos >= parserState.length) { - throw new Error("Unclosed `\"`"); + throw ["Unclosed ", "\""]; } else if (parserState.userQuery[end] !== "\"") { - throw new Error(`Unexpected \`${parserState.userQuery[end]}\` in a string element`); + throw ["Unexpected ", parserState.userQuery[end], " in a string element"]; } else if (start === end) { - throw new Error("Cannot have empty string element"); + throw ["Cannot have empty string element"]; } // To skip the quote at the end. parserState.pos += 1; @@ -260,7 +257,7 @@ function initSearch(rawSearchIndex) { return; } if (query.literalSearch && parserState.totalElems - parserState.genericsElems > 0) { - throw new Error("You cannot have more than one element if you use quotes"); + throw ["You cannot have more than one element if you use quotes"]; } const pathSegments = name.split("::"); if (pathSegments.length > 1) { @@ -269,17 +266,17 @@ function initSearch(rawSearchIndex) { if (pathSegment.length === 0) { if (i === 0) { - throw new Error("Paths cannot start with `::`"); + throw ["Paths cannot start with ", "::"]; } else if (i + 1 === len) { - throw new Error("Paths cannot end with `::`"); + throw ["Paths cannot end with ", "::"]; } - throw new Error("Unexpected `::::`"); + throw ["Unexpected ", "::::"]; } } } // In case we only have something like `

    `, there is no name. if (pathSegments.length === 0 || (pathSegments.length === 1 && pathSegments[0] === "")) { - throw new Error("Found generics without a path"); + throw ["Found generics without a path"]; } parserState.totalElems += 1; if (isInGenerics) { @@ -303,22 +300,23 @@ function initSearch(rawSearchIndex) { * @return {integer} */ function getIdentEndPosition(parserState) { + const start = parserState.pos; let end = parserState.pos; - let foundExclamation = false; + let foundExclamation = -1; while (parserState.pos < parserState.length) { const c = parserState.userQuery[parserState.pos]; if (!isIdentCharacter(c)) { if (c === "!") { - if (foundExclamation) { - throw new Error("Cannot have more than one `!` in an ident"); + if (foundExclamation !== -1) { + throw ["Cannot have more than one ", "!", " in an ident"]; } else if (parserState.pos + 1 < parserState.length && isIdentCharacter(parserState.userQuery[parserState.pos + 1]) ) { - throw new Error("`!` can only be at the end of an ident"); + throw ["Unexpected ", "!", ": it can only be at the end of an ident"]; } - foundExclamation = true; + foundExclamation = parserState.pos; } else if (isErrorCharacter(c)) { - throw new Error(`Unexpected \`${c}\``); + throw ["Unexpected ", c]; } else if ( isStopCharacter(c) || isSpecialStartCharacter(c) || @@ -329,16 +327,40 @@ function initSearch(rawSearchIndex) { if (!isPathStart(parserState)) { break; } + if (foundExclamation !== -1) { + if (start <= (end - 2)) { + throw ["Cannot have associated items in macros"]; + } else { + // if start == end - 1, we got the never type + // while the never type has no associated macros, we still + // can parse a path like that + foundExclamation = -1; + } + } // Skip current ":". parserState.pos += 1; - foundExclamation = false; } else { - throw new Error(`Unexpected \`${c}\``); + throw ["Unexpected ", c]; } } parserState.pos += 1; end = parserState.pos; } + // if start == end - 1, we got the never type + if (foundExclamation !== -1 && start <= (end - 2)) { + if (parserState.typeFilter === null) { + parserState.typeFilter = "macro"; + } else if (parserState.typeFilter !== "macro") { + throw [ + "Invalid search type: macro ", + "!", + " and ", + parserState.typeFilter, + " both specified", + ]; + } + end = foundExclamation; + } return end; } @@ -365,9 +387,9 @@ function initSearch(rawSearchIndex) { parserState.userQuery[parserState.pos] === "<" ) { if (isInGenerics) { - throw new Error("Unexpected `<` after `<`"); + throw ["Unexpected ", "<", " after ", "<"]; } else if (start >= end) { - throw new Error("Found generics without a path"); + throw ["Found generics without a path"]; } parserState.pos += 1; getItemsBefore(query, parserState, generics, ">"); @@ -411,24 +433,51 @@ function initSearch(rawSearchIndex) { foundStopChar = true; continue; } else if (c === ":" && isPathStart(parserState)) { - throw new Error("Unexpected `::`: paths cannot start with `::`"); + throw ["Unexpected ", "::", ": paths cannot start with ", "::"]; } else if (c === ":" || isEndCharacter(c)) { let extra = ""; if (endChar === ">") { - extra = "`<`"; + extra = "<"; } else if (endChar === "") { - extra = "`->`"; + extra = "->"; + } else { + extra = endChar; } - throw new Error("Unexpected `" + c + "` after " + extra); + throw ["Unexpected ", c, " after ", extra]; } if (!foundStopChar) { if (endChar !== "") { - throw new Error(`Expected \`,\`, \` \` or \`${endChar}\`, found \`${c}\``); + throw [ + "Expected ", + ",", // comma + ", ", + " ", // whitespace + " or ", + endChar, + ", found ", + c, + ]; } - throw new Error(`Expected \`,\` or \` \`, found \`${c}\``); + throw [ + "Expected ", + ",", // comma + " or ", + " ", // whitespace + ", found ", + c, + ]; } const posBefore = parserState.pos; getNextElem(query, parserState, elems, endChar === ">"); + if (endChar !== "") { + if (parserState.pos >= parserState.length) { + throw ["Unclosed ", "<"]; + } + const c2 = parserState.userQuery[parserState.pos]; + if (!isSeparatorCharacter(c2) && c2 !== endChar) { + throw ["Expected ", endChar, ", found ", c2]; + } + } // This case can be encountered if `getNextElem` encountered a "stop character" right // from the start. For example if you have `,,` or `<>`. In this case, we simply move up // the current position to continue the parsing. @@ -437,7 +486,10 @@ function initSearch(rawSearchIndex) { } foundStopChar = false; } - // We are either at the end of the string or on the `endChar`` character, let's move forward + if (parserState.pos >= parserState.length && endChar !== "") { + throw ["Unclosed ", "<"]; + } + // We are either at the end of the string or on the `endChar` character, let's move forward // in any case. parserState.pos += 1; } @@ -453,7 +505,7 @@ function initSearch(rawSearchIndex) { for (let pos = 0; pos < parserState.pos; ++pos) { if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) { - throw new Error(`Unexpected \`${query[pos]}\` in type filter`); + throw ["Unexpected ", query[pos], " in type filter"]; } } } @@ -466,11 +518,10 @@ function initSearch(rawSearchIndex) { * @param {ParserState} parserState */ function parseInput(query, parserState) { - let c, before; let foundStopChar = true; while (parserState.pos < parserState.length) { - c = parserState.userQuery[parserState.pos]; + const c = parserState.userQuery[parserState.pos]; if (isStopCharacter(c)) { foundStopChar = true; if (isSeparatorCharacter(c)) { @@ -480,19 +531,19 @@ function initSearch(rawSearchIndex) { if (isReturnArrow(parserState)) { break; } - throw new Error(`Unexpected \`${c}\` (did you mean \`->\`?)`); + throw ["Unexpected ", c, " (did you mean ", "->", "?)"]; } - throw new Error(`Unexpected \`${c}\``); + throw ["Unexpected ", c]; } else if (c === ":" && !isPathStart(parserState)) { if (parserState.typeFilter !== null) { - throw new Error("Unexpected `:`"); + throw ["Unexpected ", ":"]; } if (query.elems.length === 0) { - throw new Error("Expected type filter before `:`"); + throw ["Expected type filter before ", ":"]; } else if (query.elems.length !== 1 || parserState.totalElems !== 1) { - throw new Error("Unexpected `:`"); + throw ["Unexpected ", ":"]; } else if (query.literalSearch) { - throw new Error("You cannot use quotes on type filter"); + throw ["You cannot use quotes on type filter"]; } checkExtraTypeFilterCharacters(parserState); // The type filter doesn't count as an element since it's a modifier. @@ -505,11 +556,31 @@ function initSearch(rawSearchIndex) { } if (!foundStopChar) { if (parserState.typeFilter !== null) { - throw new Error(`Expected \`,\`, \` \` or \`->\`, found \`${c}\``); + throw [ + "Expected ", + ",", // comma + ", ", + " ", // whitespace + " or ", + "->", // arrow + ", found ", + c, + ]; } - throw new Error(`Expected \`,\`, \` \`, \`:\` or \`->\`, found \`${c}\``); - } - before = query.elems.length; + throw [ + "Expected ", + ",", // comma + ", ", + " ", // whitespace + ", ", + ":", // colon + " or ", + "->", // arrow + ", found ", + c, + ]; + } + const before = query.elems.length; getNextElem(query, parserState, query.elems, false); if (query.elems.length === before) { // Nothing was added, weird... Let's increase the position to not remain stuck. @@ -518,14 +589,13 @@ function initSearch(rawSearchIndex) { foundStopChar = false; } while (parserState.pos < parserState.length) { - c = parserState.userQuery[parserState.pos]; if (isReturnArrow(parserState)) { parserState.pos += 2; // Get returned elements. getItemsBefore(query, parserState, query.returned, ""); // Nothing can come afterward! if (query.returned.length === 0) { - throw new Error("Expected at least one item after `->`"); + throw ["Expected at least one item after ", "->"]; } break; } else { @@ -594,8 +664,8 @@ function initSearch(rawSearchIndex) { * * The supported syntax by this parser is as follow: * - * ident = *(ALPHA / DIGIT / "_") [!] - * path = ident *(DOUBLE-COLON ident) + * ident = *(ALPHA / DIGIT / "_") + * path = ident *(DOUBLE-COLON ident) [!] * arg = path [generics] * arg-without-generic = path * type-sep = COMMA/WS *(COMMA/WS) @@ -679,7 +749,7 @@ function initSearch(rawSearchIndex) { } } catch (err) { query = newParsedQuery(userQuery); - query.error = err.message; + query.error = err; query.typeFilter = -1; return query; } @@ -897,13 +967,13 @@ function initSearch(rawSearchIndex) { * @param {QueryElement} elem - The element from the parsed query. * @param {integer} defaultLev - This is the value to return in case there are no generics. * - * @return {integer} - Returns the best match (if any) or `MAX_LEV_DISTANCE + 1`. + * @return {integer} - Returns the best match (if any) or `maxLevDistance + 1`. */ - function checkGenerics(row, elem, defaultLev) { + function checkGenerics(row, elem, defaultLev, maxLevDistance) { if (row.generics.length === 0) { - return elem.generics.length === 0 ? defaultLev : MAX_LEV_DISTANCE + 1; + return elem.generics.length === 0 ? defaultLev : maxLevDistance + 1; } else if (row.generics.length > 0 && row.generics[0].name === null) { - return checkGenerics(row.generics[0], elem, defaultLev); + return checkGenerics(row.generics[0], elem, defaultLev, maxLevDistance); } // The names match, but we need to be sure that all generics kinda // match as well. @@ -914,8 +984,8 @@ function initSearch(rawSearchIndex) { elem_name = entry.name; if (elem_name === "") { // Pure generic, needs to check into it. - if (checkGenerics(entry, elem, MAX_LEV_DISTANCE + 1) !== 0) { - return MAX_LEV_DISTANCE + 1; + if (checkGenerics(entry, elem, maxLevDistance + 1, maxLevDistance) !== 0) { + return maxLevDistance + 1; } continue; } @@ -942,7 +1012,7 @@ function initSearch(rawSearchIndex) { } } if (match === null) { - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } elems[match] -= 1; if (elems[match] === 0) { @@ -951,7 +1021,7 @@ function initSearch(rawSearchIndex) { } return 0; } - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } /** @@ -963,10 +1033,10 @@ function initSearch(rawSearchIndex) { * * @return {integer} - Returns a Levenshtein distance to the best match. */ - function checkIfInGenerics(row, elem) { - let lev = MAX_LEV_DISTANCE + 1; + function checkIfInGenerics(row, elem, maxLevDistance) { + let lev = maxLevDistance + 1; for (const entry of row.generics) { - lev = Math.min(checkType(entry, elem, true), lev); + lev = Math.min(checkType(entry, elem, true, maxLevDistance), lev); if (lev === 0) { break; } @@ -983,15 +1053,15 @@ function initSearch(rawSearchIndex) { * @param {boolean} literalSearch * * @return {integer} - Returns a Levenshtein distance to the best match. If there is - * no match, returns `MAX_LEV_DISTANCE + 1`. + * no match, returns `maxLevDistance + 1`. */ - function checkType(row, elem, literalSearch) { + function checkType(row, elem, literalSearch, maxLevDistance) { if (row.name === null) { // This is a pure "generic" search, no need to run other checks. if (row.generics.length > 0) { - return checkIfInGenerics(row, elem); + return checkIfInGenerics(row, elem, maxLevDistance); } - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } let lev = levenshtein(row.name, elem.name); @@ -1005,9 +1075,9 @@ function initSearch(rawSearchIndex) { return 0; } } - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } else if (elem.generics.length > 0) { - return checkGenerics(row, elem, MAX_LEV_DISTANCE + 1); + return checkGenerics(row, elem, maxLevDistance + 1, maxLevDistance); } return 0; } else if (row.generics.length > 0) { @@ -1017,22 +1087,20 @@ function initSearch(rawSearchIndex) { } // The name didn't match so we now check if the type we're looking for is inside // the generics! - lev = checkIfInGenerics(row, elem); - // Now whatever happens, the returned distance is "less good" so we should mark - // it as such, and so we add 0.5 to the distance to make it "less good". - return lev + 0.5; - } else if (lev > MAX_LEV_DISTANCE) { + lev = Math.min(lev, checkIfInGenerics(row, elem, maxLevDistance)); + return lev; + } else if (lev > maxLevDistance) { // So our item's name doesn't match at all and has generics. // // Maybe it's present in a sub generic? For example "f>>()", if we're // looking for "B", we'll need to go down. - return checkIfInGenerics(row, elem); + return checkIfInGenerics(row, elem, maxLevDistance); } else { // At this point, the name kinda match and we have generics to check, so // let's go! - const tmp_lev = checkGenerics(row, elem, lev); - if (tmp_lev > MAX_LEV_DISTANCE) { - return MAX_LEV_DISTANCE + 1; + const tmp_lev = checkGenerics(row, elem, lev, maxLevDistance); + if (tmp_lev > maxLevDistance) { + return maxLevDistance + 1; } // We compute the median value of both checks and return it. return (tmp_lev + lev) / 2; @@ -1040,7 +1108,7 @@ function initSearch(rawSearchIndex) { } else if (elem.generics.length > 0) { // In this case, we were expecting generics but there isn't so we simply reject this // one. - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } // No generics on our query or on the target type so we can return without doing // anything else. @@ -1055,23 +1123,26 @@ function initSearch(rawSearchIndex) { * @param {integer} typeFilter * * @return {integer} - Returns a Levenshtein distance to the best match. If there is no - * match, returns `MAX_LEV_DISTANCE + 1`. + * match, returns `maxLevDistance + 1`. */ - function findArg(row, elem, typeFilter) { - let lev = MAX_LEV_DISTANCE + 1; + function findArg(row, elem, typeFilter, maxLevDistance) { + let lev = maxLevDistance + 1; if (row && row.type && row.type.inputs && row.type.inputs.length > 0) { for (const input of row.type.inputs) { if (!typePassesFilter(typeFilter, input.ty)) { continue; } - lev = Math.min(lev, checkType(input, elem, parsedQuery.literalSearch)); + lev = Math.min( + lev, + checkType(input, elem, parsedQuery.literalSearch, maxLevDistance) + ); if (lev === 0) { return 0; } } } - return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev; + return parsedQuery.literalSearch ? maxLevDistance + 1 : lev; } /** @@ -1082,10 +1153,10 @@ function initSearch(rawSearchIndex) { * @param {integer} typeFilter * * @return {integer} - Returns a Levenshtein distance to the best match. If there is no - * match, returns `MAX_LEV_DISTANCE + 1`. + * match, returns `maxLevDistance + 1`. */ - function checkReturned(row, elem, typeFilter) { - let lev = MAX_LEV_DISTANCE + 1; + function checkReturned(row, elem, typeFilter, maxLevDistance) { + let lev = maxLevDistance + 1; if (row && row.type && row.type.output.length > 0) { const ret = row.type.output; @@ -1093,20 +1164,23 @@ function initSearch(rawSearchIndex) { if (!typePassesFilter(typeFilter, ret_ty.ty)) { continue; } - lev = Math.min(lev, checkType(ret_ty, elem, parsedQuery.literalSearch)); + lev = Math.min( + lev, + checkType(ret_ty, elem, parsedQuery.literalSearch, maxLevDistance) + ); if (lev === 0) { return 0; } } } - return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev; + return parsedQuery.literalSearch ? maxLevDistance + 1 : lev; } - function checkPath(contains, ty) { + function checkPath(contains, ty, maxLevDistance) { if (contains.length === 0) { return 0; } - let ret_lev = MAX_LEV_DISTANCE + 1; + let ret_lev = maxLevDistance + 1; const path = ty.path.split("::"); if (ty.parent && ty.parent.name) { @@ -1116,7 +1190,7 @@ function initSearch(rawSearchIndex) { const length = path.length; const clength = contains.length; if (clength > length) { - return MAX_LEV_DISTANCE + 1; + return maxLevDistance + 1; } for (let i = 0; i < length; ++i) { if (i + clength > length) { @@ -1126,7 +1200,7 @@ function initSearch(rawSearchIndex) { let aborted = false; for (let x = 0; x < clength; ++x) { const lev = levenshtein(path[i + x], contains[x]); - if (lev > MAX_LEV_DISTANCE) { + if (lev > maxLevDistance) { aborted = true; break; } @@ -1231,7 +1305,7 @@ function initSearch(rawSearchIndex) { * following condition: * * * If it is a "literal search" (`parsedQuery.literalSearch`), then `lev` must be 0. - * * If it is not a "literal search", `lev` must be <= `MAX_LEV_DISTANCE`. + * * If it is not a "literal search", `lev` must be <= `maxLevDistance`. * * The `results` map contains information which will be used to sort the search results: * @@ -1249,8 +1323,8 @@ function initSearch(rawSearchIndex) { * @param {integer} lev * @param {integer} path_lev */ - function addIntoResults(results, fullId, id, index, lev, path_lev) { - const inBounds = lev <= MAX_LEV_DISTANCE || index !== -1; + function addIntoResults(results, fullId, id, index, lev, path_lev, maxLevDistance) { + const inBounds = lev <= maxLevDistance || index !== -1; if (lev === 0 || (!parsedQuery.literalSearch && inBounds)) { if (results[fullId] !== undefined) { const result = results[fullId]; @@ -1289,7 +1363,8 @@ function initSearch(rawSearchIndex) { elem, results_others, results_in_args, - results_returned + results_returned, + maxLevDistance ) { if (!row || (filterCrates !== null && row.crate !== filterCrates)) { return; @@ -1298,13 +1373,13 @@ function initSearch(rawSearchIndex) { const fullId = row.id; const searchWord = searchWords[pos]; - const in_args = findArg(row, elem, parsedQuery.typeFilter); - const returned = checkReturned(row, elem, parsedQuery.typeFilter); + const in_args = findArg(row, elem, parsedQuery.typeFilter, maxLevDistance); + const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxLevDistance); // path_lev is 0 because no parent path information is currently stored // in the search index - addIntoResults(results_in_args, fullId, pos, -1, in_args, 0); - addIntoResults(results_returned, fullId, pos, -1, returned, 0); + addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxLevDistance); + addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxLevDistance); if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) { return; @@ -1328,16 +1403,16 @@ function initSearch(rawSearchIndex) { // No need to check anything else if it's a "pure" generics search. if (elem.name.length === 0) { if (row.type !== null) { - lev = checkGenerics(row.type, elem, MAX_LEV_DISTANCE + 1); + lev = checkGenerics(row.type, elem, maxLevDistance + 1, maxLevDistance); // path_lev is 0 because we know it's empty - addIntoResults(results_others, fullId, pos, index, lev, 0); + addIntoResults(results_others, fullId, pos, index, lev, 0, maxLevDistance); } return; } if (elem.fullPath.length > 1) { - path_lev = checkPath(elem.pathWithoutLast, row); - if (path_lev > MAX_LEV_DISTANCE) { + path_lev = checkPath(elem.pathWithoutLast, row, maxLevDistance); + if (path_lev > maxLevDistance) { return; } } @@ -1351,11 +1426,11 @@ function initSearch(rawSearchIndex) { lev = levenshtein(searchWord, elem.pathLast); - if (index === -1 && lev + path_lev > MAX_LEV_DISTANCE) { + if (index === -1 && lev + path_lev > maxLevDistance) { return; } - addIntoResults(results_others, fullId, pos, index, lev, path_lev); + addIntoResults(results_others, fullId, pos, index, lev, path_lev, maxLevDistance); } /** @@ -1367,7 +1442,7 @@ function initSearch(rawSearchIndex) { * @param {integer} pos - Position in the `searchIndex`. * @param {Object} results */ - function handleArgs(row, pos, results) { + function handleArgs(row, pos, results, maxLevDistance) { if (!row || (filterCrates !== null && row.crate !== filterCrates)) { return; } @@ -1379,7 +1454,7 @@ function initSearch(rawSearchIndex) { function checkArgs(elems, callback) { for (const elem of elems) { // There is more than one parameter to the query so all checks should be "exact" - const lev = callback(row, elem, NO_TYPE_FILTER); + const lev = callback(row, elem, NO_TYPE_FILTER, maxLevDistance); if (lev <= 1) { nbLev += 1; totalLev += lev; @@ -1400,12 +1475,21 @@ function initSearch(rawSearchIndex) { return; } const lev = Math.round(totalLev / nbLev); - addIntoResults(results, row.id, pos, 0, lev, 0); + addIntoResults(results, row.id, pos, 0, lev, 0, maxLevDistance); } function innerRunQuery() { let elem, i, nSearchWords, in_returned, row; + let queryLen = 0; + for (const elem of parsedQuery.elems) { + queryLen += elem.name.length; + } + for (const elem of parsedQuery.returned) { + queryLen += elem.name.length; + } + const maxLevDistance = Math.floor(queryLen / 3); + if (parsedQuery.foundElems === 1) { if (parsedQuery.elems.length === 1) { elem = parsedQuery.elems[0]; @@ -1418,7 +1502,8 @@ function initSearch(rawSearchIndex) { elem, results_others, results_in_args, - results_returned + results_returned, + maxLevDistance ); } } else if (parsedQuery.returned.length === 1) { @@ -1426,13 +1511,18 @@ function initSearch(rawSearchIndex) { elem = parsedQuery.returned[0]; for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) { row = searchIndex[i]; - in_returned = checkReturned(row, elem, parsedQuery.typeFilter); - addIntoResults(results_others, row.id, i, -1, in_returned); + in_returned = checkReturned( + row, + elem, + parsedQuery.typeFilter, + maxLevDistance + ); + addIntoResults(results_others, row.id, i, -1, in_returned, maxLevDistance); } } } else if (parsedQuery.foundElems > 0) { for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) { - handleArgs(searchIndex[i], i, results_others); + handleArgs(searchIndex[i], i, results_others, maxLevDistance); } } } @@ -1470,7 +1560,7 @@ function initSearch(rawSearchIndex) { * * @return {boolean} - Whether the result is valid or not */ - function validateResult(name, path, keys, parent) { + function validateResult(name, path, keys, parent, maxLevDistance) { if (!keys || !keys.length) { return true; } @@ -1485,7 +1575,7 @@ function initSearch(rawSearchIndex) { (parent !== undefined && parent.name !== undefined && parent.name.toLowerCase().indexOf(key) > -1) || // lastly check to see if the name was a levenshtein match - levenshtein(name, key) <= MAX_LEV_DISTANCE)) { + levenshtein(name, key) <= maxLevDistance)) { return false; } } @@ -1725,7 +1815,16 @@ function initSearch(rawSearchIndex) { let output = `

    Results${crates}

    `; if (results.query.error !== null) { - output += `

    Query parser error: "${results.query.error}".

    `; + const error = results.query.error; + error.forEach((value, index) => { + value = value.split("<").join("<").split(">").join(">"); + if (index % 2 !== 0) { + error[index] = `${value}`; + } else { + error[index] = value; + } + }); + output += `

    Query parser error: "${error.join("")}".

    `; output += "
    " + makeTabHeader(0, "In Names", ret_others[1]) + "
    "; @@ -1922,7 +2021,7 @@ function initSearch(rawSearchIndex) { * @type {Array} */ const searchWords = []; - let i, word; + const charA = "A".charCodeAt(0); let currentIndex = 0; let id = 0; @@ -1936,7 +2035,7 @@ function initSearch(rawSearchIndex) { /** * The raw search data for a given crate. `n`, `t`, `d`, and `q`, `i`, and `f` * are arrays with the same length. n[i] contains the name of an item. - * t[i] contains the type of that item (as a small integer that represents an + * t[i] contains the type of that item (as a string of characters that represent an * offset in `itemTypes`). d[i] contains the description of that item. * * q[i] contains the full path of the item, or an empty string indicating @@ -1963,7 +2062,7 @@ function initSearch(rawSearchIndex) { * doc: string, * a: Object, * n: Array, - * t: Array, + * t: String, * d: Array, * q: Array, * i: Array, @@ -1992,7 +2091,7 @@ function initSearch(rawSearchIndex) { searchIndex.push(crateRow); currentIndex += 1; - // an array of (Number) item types + // a String of one character item type codes const itemTypes = crateCorpus.t; // an array of (String) item names const itemNames = crateCorpus.n; @@ -2017,7 +2116,7 @@ function initSearch(rawSearchIndex) { // convert `rawPaths` entries into object form // generate normalizedPaths for function search mode let len = paths.length; - for (i = 0; i < len; ++i) { + for (let i = 0; i < len; ++i) { lowercasePaths.push({ty: paths[i][0], name: paths[i][1].toLowerCase()}); paths[i] = {ty: paths[i][0], name: paths[i][1]}; } @@ -2031,19 +2130,17 @@ function initSearch(rawSearchIndex) { // faster analysis operations len = itemTypes.length; let lastPath = ""; - for (i = 0; i < len; ++i) { + for (let i = 0; i < len; ++i) { + let word = ""; // This object should have exactly the same set of fields as the "crateRow" // object defined above. if (typeof itemNames[i] === "string") { word = itemNames[i].toLowerCase(); - searchWords.push(word); - } else { - word = ""; - searchWords.push(""); } + searchWords.push(word); const row = { crate: crate, - ty: itemTypes[i], + ty: itemTypes.charCodeAt(i) - charA, name: itemNames[i], path: itemPaths[i] ? itemPaths[i] : lastPath, desc: itemDescs[i], diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js index 84df1b7d3..1cd552e7f 100644 --- a/src/librustdoc/html/static/js/settings.js +++ b/src/librustdoc/html/static/js/settings.js @@ -1,5 +1,5 @@ // Local js definitions: -/* global getSettingValue, getVirtualKey, updateLocalStorage, updateSystemTheme */ +/* global getSettingValue, getVirtualKey, updateLocalStorage, updateTheme */ /* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */ /* global MAIN_ID, getVar, getSettingsButton */ @@ -19,7 +19,7 @@ case "theme": case "preferred-dark-theme": case "preferred-light-theme": - updateSystemTheme(); + updateTheme(); updateLightAndDark(); break; case "line-numbers": @@ -48,13 +48,13 @@ } function showLightAndDark() { - removeClass(document.getElementById("preferred-light-theme").parentElement, "hidden"); - removeClass(document.getElementById("preferred-dark-theme").parentElement, "hidden"); + removeClass(document.getElementById("preferred-light-theme"), "hidden"); + removeClass(document.getElementById("preferred-dark-theme"), "hidden"); } function hideLightAndDark() { - addClass(document.getElementById("preferred-light-theme").parentElement, "hidden"); - addClass(document.getElementById("preferred-dark-theme").parentElement, "hidden"); + addClass(document.getElementById("preferred-light-theme"), "hidden"); + addClass(document.getElementById("preferred-dark-theme"), "hidden"); } function updateLightAndDark() { @@ -80,17 +80,6 @@ toggle.onkeyup = handleKey; toggle.onkeyrelease = handleKey; }); - onEachLazy(settingsElement.getElementsByClassName("select-wrapper"), elem => { - const select = elem.getElementsByTagName("select")[0]; - const settingId = select.id; - const settingValue = getSettingValue(settingId); - if (settingValue !== null) { - select.value = settingValue; - } - select.onchange = function() { - changeSetting(this.id, this.value); - }; - }); onEachLazy(settingsElement.querySelectorAll("input[type=\"radio\"]"), elem => { const settingId = elem.name; let settingValue = getSettingValue(settingId); @@ -127,38 +116,40 @@ let output = ""; for (const setting of settings) { - output += "
    "; const js_data_name = setting["js_name"]; const setting_name = setting["name"]; if (setting["options"] !== undefined) { // This is a select setting. output += `\ -
    -
    ${setting_name}
    -
    `; +
    +
    ${setting_name}
    +
    `; onEach(setting["options"], option => { const checked = option === setting["default"] ? " checked" : ""; const full = `${js_data_name}-${option.replace(/ /g,"-")}`; output += `\ -`; + `; }); - output += "
    "; + output += `\ +
    +
    `; } else { // This is a checkbox toggle. const checked = setting["default"] === true ? " checked" : ""; output += `\ -`; +
    \ + \ +
    `; } - output += "
    "; } return output; } diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js index 0e1c864e6..6c0f03b5b 100644 --- a/src/librustdoc/html/static/js/source-script.js +++ b/src/librustdoc/html/static/js/source-script.js @@ -117,8 +117,7 @@ function createSourceSidebar() { sidebar.appendChild(title); Object.keys(sourcesIndex).forEach(key => { sourcesIndex[key][NAME_OFFSET] = key; - hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "", - hasFoundFile); + hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "", hasFoundFile); }); container.appendChild(sidebar); diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js index db2db83ca..c72ac254f 100644 --- a/src/librustdoc/html/static/js/storage.js +++ b/src/librustdoc/html/static/js/storage.js @@ -51,7 +51,6 @@ function hasClass(elem, className) { return elem && elem.classList && elem.classList.contains(className); } -// eslint-disable-next-line no-unused-vars function addClass(elem, className) { if (!elem || !elem.classList) { return; @@ -153,79 +152,74 @@ function switchTheme(styleElem, mainStyleElem, newThemeName, saveTheme) { } } -// This function is called from "main.js". -// eslint-disable-next-line no-unused-vars -function useSystemTheme(value) { - if (value === undefined) { - value = true; - } - - updateLocalStorage("use-system-theme", value); - - // update the toggle if we're on the settings page - const toggle = document.getElementById("use-system-theme"); - if (toggle && toggle instanceof HTMLInputElement) { - toggle.checked = value; - } -} - -const updateSystemTheme = (function() { - if (!window.matchMedia) { - // fallback to the CSS computed value - return () => { - const cssTheme = getComputedStyle(document.documentElement) - .getPropertyValue("content"); - - switchTheme( - window.currentTheme, - window.mainTheme, - JSON.parse(cssTheme) || "light", - true - ); +const updateTheme = (function() { + /** + * Update the current theme to match whatever the current combination of + * * the preference for using the system theme + * (if this is the case, the value of preferred-light-theme, if the + * system theme is light, otherwise if dark, the value of + * preferred-dark-theme.) + * * the preferred theme + * … dictates that it should be. + */ + function updateTheme() { + const use = (theme, saveTheme) => { + switchTheme(window.currentTheme, window.mainTheme, theme, saveTheme); }; - } - - // only listen to (prefers-color-scheme: dark) because light is the default - const mql = window.matchMedia("(prefers-color-scheme: dark)"); - function handlePreferenceChange(mql) { - const use = theme => { - switchTheme(window.currentTheme, window.mainTheme, theme, true); - }; // maybe the user has disabled the setting in the meantime! if (getSettingValue("use-system-theme") !== "false") { const lightTheme = getSettingValue("preferred-light-theme") || "light"; const darkTheme = getSettingValue("preferred-dark-theme") || "dark"; - if (mql.matches) { - use(darkTheme); + if (isDarkMode()) { + use(darkTheme, true); } else { // prefers a light theme, or has no preference - use(lightTheme); + use(lightTheme, true); } // note: we save the theme so that it doesn't suddenly change when // the user disables "use-system-theme" and reloads the page or // navigates to another page } else { - use(getSettingValue("theme")); + use(getSettingValue("theme"), false); } } - mql.addListener(handlePreferenceChange); + // This is always updated below to a function () => bool. + let isDarkMode; - return () => { - handlePreferenceChange(mql); - }; -})(); + // Determine the function for isDarkMode, and if we have + // `window.matchMedia`, set up an event listener on the preferred color + // scheme. + // + // Otherwise, fall back to the prefers-color-scheme value CSS captured in + // the "content" property. + if (window.matchMedia) { + // only listen to (prefers-color-scheme: dark) because light is the default + const mql = window.matchMedia("(prefers-color-scheme: dark)"); -function switchToSavedTheme() { - switchTheme( - window.currentTheme, - window.mainTheme, - getSettingValue("theme") || "light", - false - ); -} + isDarkMode = () => mql.matches; + + if (mql.addEventListener) { + mql.addEventListener("change", updateTheme); + } else { + // This is deprecated, see: + // https://developer.mozilla.org/en-US/docs/Web/API/MediaQueryList/addListener + mql.addListener(updateTheme); + } + } else { + // fallback to the CSS computed value + const cssContent = getComputedStyle(document.documentElement) + .getPropertyValue("content"); + // (Note: the double-quotes come from that this is a CSS value, which + // might be a length, string, etc.) + const cssColorScheme = cssContent || "\"light\""; + isDarkMode = () => (cssColorScheme === "\"dark\""); + } + + return updateTheme; +})(); if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) { // update the preferred dark theme if the user is already using a dark theme @@ -235,13 +229,10 @@ if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) { && darkThemes.indexOf(localStoredTheme) >= 0) { updateLocalStorage("preferred-dark-theme", localStoredTheme); } - - // call the function to initialize the theme at least once! - updateSystemTheme(); -} else { - switchToSavedTheme(); } +updateTheme(); + if (getSettingValue("source-sidebar-show") === "true") { // At this point in page load, `document.body` is not available yet. // Set a class on the `` element instead. @@ -259,6 +250,6 @@ if (getSettingValue("source-sidebar-show") === "true") { // specifically when talking to a remote website with no caching. window.addEventListener("pageshow", ev => { if (ev.persisted) { - setTimeout(switchToSavedTheme, 0); + setTimeout(updateTheme, 0); } }); diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs index b48b82307..767b974cc 100644 --- a/src/librustdoc/html/static_files.rs +++ b/src/librustdoc/html/static_files.rs @@ -102,9 +102,6 @@ static_files! { scrape_examples_js => "static/js/scrape-examples.js", wheel_svg => "static/images/wheel.svg", clipboard_svg => "static/images/clipboard.svg", - down_arrow_svg => "static/images/down-arrow.svg", - toggle_minus_png => "static/images/toggle-minus.svg", - toggle_plus_png => "static/images/toggle-plus.svg", copyright => "static/COPYRIGHT.txt", license_apache => "static/LICENSE-APACHE.txt", license_mit => "static/LICENSE-MIT.txt", diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html index fddda293b..7690d8f25 100644 --- a/src/librustdoc/html/templates/page.html +++ b/src/librustdoc/html/templates/page.html @@ -5,7 +5,6 @@ {#- -#} {#- -#} {#- -#} - {#- -#} {{page.title}} {#- -#} {#- -#} {#- -#} @@ -24,11 +23,13 @@ {%- for theme in themes -%} {#- -#} {%- endfor -%} + {%- if !layout.default_settings.is_empty() -%} {#- -#} + {%- endif -%} {#- -#} {%- if page.css_class.contains("crate") -%} {#- -#} diff --git a/src/librustdoc/html/templates/print_item.html b/src/librustdoc/html/templates/print_item.html index ee2880bf6..3a1867b7f 100644 --- a/src/librustdoc/html/templates/print_item.html +++ b/src/librustdoc/html/templates/print_item.html @@ -6,7 +6,7 @@ {{component.name}}:: {%- endfor -%} {{name}} {#- -#} -