summaryrefslogtreecommitdiffstats
path: root/src/librustdoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/librustdoc')
-rw-r--r--src/librustdoc/Cargo.toml41
-rw-r--r--src/librustdoc/README.md3
-rw-r--r--src/librustdoc/askama.toml2
-rw-r--r--src/librustdoc/clean/auto_trait.rs742
-rw-r--r--src/librustdoc/clean/blanket_impl.rs136
-rw-r--r--src/librustdoc/clean/cfg.rs588
-rw-r--r--src/librustdoc/clean/cfg/tests.rs467
-rw-r--r--src/librustdoc/clean/inline.rs727
-rw-r--r--src/librustdoc/clean/mod.rs2242
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs238
-rw-r--r--src/librustdoc/clean/simplify.rs139
-rw-r--r--src/librustdoc/clean/types.rs2508
-rw-r--r--src/librustdoc/clean/types/tests.rs70
-rw-r--r--src/librustdoc/clean/utils.rs616
-rw-r--r--src/librustdoc/clean/utils/tests.rs41
-rw-r--r--src/librustdoc/config.rs828
-rw-r--r--src/librustdoc/core.rs566
-rw-r--r--src/librustdoc/docfs.rs78
-rw-r--r--src/librustdoc/doctest.rs1311
-rw-r--r--src/librustdoc/doctest/tests.rs300
-rw-r--r--src/librustdoc/error.rs59
-rw-r--r--src/librustdoc/externalfiles.rs107
-rw-r--r--src/librustdoc/fold.rs103
-rw-r--r--src/librustdoc/formats/cache.rs569
-rw-r--r--src/librustdoc/formats/item_type.rs185
-rw-r--r--src/librustdoc/formats/mod.rs101
-rw-r--r--src/librustdoc/formats/renderer.rs97
-rw-r--r--src/librustdoc/html/escape.rs40
-rw-r--r--src/librustdoc/html/format.rs1688
-rw-r--r--src/librustdoc/html/highlight.rs805
-rw-r--r--src/librustdoc/html/highlight/fixtures/decorations.html2
-rw-r--r--src/librustdoc/html/highlight/fixtures/dos_line.html3
-rw-r--r--src/librustdoc/html/highlight/fixtures/highlight.html4
-rw-r--r--src/librustdoc/html/highlight/fixtures/sample.html37
-rw-r--r--src/librustdoc/html/highlight/fixtures/sample.rs26
-rw-r--r--src/librustdoc/html/highlight/fixtures/union.html8
-rw-r--r--src/librustdoc/html/highlight/fixtures/union.rs8
-rw-r--r--src/librustdoc/html/highlight/tests.rs81
-rw-r--r--src/librustdoc/html/layout.rs103
-rw-r--r--src/librustdoc/html/length_limit.rs119
-rw-r--r--src/librustdoc/html/length_limit/tests.rs120
-rw-r--r--src/librustdoc/html/markdown.rs1510
-rw-r--r--src/librustdoc/html/markdown/tests.rs312
-rw-r--r--src/librustdoc/html/mod.rs15
-rw-r--r--src/librustdoc/html/render/context.rs762
-rw-r--r--src/librustdoc/html/render/mod.rs2849
-rw-r--r--src/librustdoc/html/render/print_item.rs1974
-rw-r--r--src/librustdoc/html/render/search_index.rs589
-rw-r--r--src/librustdoc/html/render/span_map.rs203
-rw-r--r--src/librustdoc/html/render/tests.rs54
-rw-r--r--src/librustdoc/html/render/write_shared.rs600
-rw-r--r--src/librustdoc/html/sources.rs303
-rw-r--r--src/librustdoc/html/static/.eslintrc.js96
-rw-r--r--src/librustdoc/html/static/COPYRIGHT.txt46
-rw-r--r--src/librustdoc/html/static/LICENSE-APACHE.txt201
-rw-r--r--src/librustdoc/html/static/LICENSE-MIT.txt23
-rw-r--r--src/librustdoc/html/static/css/normalize.css2
-rw-r--r--src/librustdoc/html/static/css/noscript.css20
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css2335
-rw-r--r--src/librustdoc/html/static/css/settings.css90
-rw-r--r--src/librustdoc/html/static/css/themes/ayu.css563
-rw-r--r--src/librustdoc/html/static/css/themes/dark.css409
-rw-r--r--src/librustdoc/html/static/css/themes/light.css395
-rw-r--r--src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt94
-rw-r--r--src/librustdoc/html/static/fonts/FiraSans-Medium.woff2bin0 -> 132780 bytes
-rw-r--r--src/librustdoc/html/static/fonts/FiraSans-Regular.woff2bin0 -> 129188 bytes
-rw-r--r--src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt99
-rw-r--r--src/librustdoc/html/static/fonts/NanumBarunGothic.ttf.woff2bin0 -> 399468 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceCodePro-It.ttf.woff2bin0 -> 44896 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt93
-rw-r--r--src/librustdoc/html/static/fonts/SourceCodePro-Regular.ttf.woff2bin0 -> 52228 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceCodePro-Semibold.ttf.woff2bin0 -> 52348 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2bin0 -> 81320 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2bin0 -> 59860 bytes
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md93
-rw-r--r--src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2bin0 -> 76180 bytes
-rw-r--r--src/librustdoc/html/static/images/clipboard.svg1
-rw-r--r--src/librustdoc/html/static/images/down-arrow.svg1
-rw-r--r--src/librustdoc/html/static/images/favicon-16x16.pngbin0 -> 715 bytes
-rw-r--r--src/librustdoc/html/static/images/favicon-32x32.pngbin0 -> 1125 bytes
-rw-r--r--src/librustdoc/html/static/images/favicon.svg24
-rw-r--r--src/librustdoc/html/static/images/rust-logo.svg61
-rw-r--r--src/librustdoc/html/static/images/toggle-minus.svg1
-rw-r--r--src/librustdoc/html/static/images/toggle-plus.svg1
-rw-r--r--src/librustdoc/html/static/images/wheel.svg1
-rw-r--r--src/librustdoc/html/static/js/README.md15
-rw-r--r--src/librustdoc/html/static/js/externs.js142
-rw-r--r--src/librustdoc/html/static/js/main.js974
-rw-r--r--src/librustdoc/html/static/js/scrape-examples.js106
-rw-r--r--src/librustdoc/html/static/js/search.js2297
-rw-r--r--src/librustdoc/html/static/js/settings.js272
-rw-r--r--src/librustdoc/html/static/js/source-script.js241
-rw-r--r--src/librustdoc/html/static/js/storage.js268
-rw-r--r--src/librustdoc/html/static/scrape-examples-help.md34
-rw-r--r--src/librustdoc/html/static_files.rs168
-rw-r--r--src/librustdoc/html/templates/STYLE.md37
-rw-r--r--src/librustdoc/html/templates/page.html148
-rw-r--r--src/librustdoc/html/templates/print_item.html30
-rw-r--r--src/librustdoc/html/tests.rs50
-rw-r--r--src/librustdoc/html/toc.rs191
-rw-r--r--src/librustdoc/html/toc/tests.rs79
-rw-r--r--src/librustdoc/html/url_parts_builder.rs180
-rw-r--r--src/librustdoc/html/url_parts_builder/tests.rs64
-rw-r--r--src/librustdoc/json/conversions.rs798
-rw-r--r--src/librustdoc/json/mod.rs327
-rw-r--r--src/librustdoc/lib.rs868
-rw-r--r--src/librustdoc/lint.rs202
-rw-r--r--src/librustdoc/markdown.rs151
-rw-r--r--src/librustdoc/passes/bare_urls.rs112
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs276
-rw-r--r--src/librustdoc/passes/check_code_block_syntax.rs205
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs145
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs2161
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links/early.rs405
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs273
-rw-r--r--src/librustdoc/passes/html_tags.rs303
-rw-r--r--src/librustdoc/passes/mod.rs212
-rw-r--r--src/librustdoc/passes/propagate_doc_cfg.rs45
-rw-r--r--src/librustdoc/passes/strip_hidden.rs68
-rw-r--r--src/librustdoc/passes/strip_priv_imports.rs16
-rw-r--r--src/librustdoc/passes/strip_private.rs35
-rw-r--r--src/librustdoc/passes/stripper.rs188
-rw-r--r--src/librustdoc/scrape_examples.rs359
-rw-r--r--src/librustdoc/theme.rs271
-rw-r--r--src/librustdoc/theme/tests.rs117
-rw-r--r--src/librustdoc/visit.rs72
-rw-r--r--src/librustdoc/visit_ast.rs396
-rw-r--r--src/librustdoc/visit_lib.rs82
128 files changed, 43111 insertions, 0 deletions
diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml
new file mode 100644
index 000000000..ddaa7438e
--- /dev/null
+++ b/src/librustdoc/Cargo.toml
@@ -0,0 +1,41 @@
+[package]
+name = "rustdoc"
+version = "0.0.0"
+edition = "2021"
+
+[lib]
+path = "lib.rs"
+
+[dependencies]
+arrayvec = { version = "0.7", default-features = false }
+askama = { version = "0.11", default-features = false, features = ["config"] }
+atty = "0.2"
+pulldown-cmark = { version = "0.9.2", default-features = false }
+minifier = "0.2.1"
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+smallvec = "1.8.1"
+tempfile = "3"
+itertools = "0.10.1"
+regex = "1"
+rustdoc-json-types = { path = "../rustdoc-json-types" }
+tracing = "0.1"
+tracing-tree = "0.2.0"
+once_cell = "1.10.0"
+
+[dependencies.tracing-subscriber]
+version = "0.3.3"
+default-features = false
+features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"]
+
+[target.'cfg(windows)'.dependencies]
+rayon = "1.5.1"
+
+[dev-dependencies]
+expect-test = "1.0"
+
+[features]
+jemalloc = []
+
+[package.metadata.rust-analyzer]
+rustc_private = true
diff --git a/src/librustdoc/README.md b/src/librustdoc/README.md
new file mode 100644
index 000000000..5a5f54706
--- /dev/null
+++ b/src/librustdoc/README.md
@@ -0,0 +1,3 @@
+For more information about how `librustdoc` works, see the [rustc dev guide].
+
+[rustc dev guide]: https://rustc-dev-guide.rust-lang.org/rustdoc.html
diff --git a/src/librustdoc/askama.toml b/src/librustdoc/askama.toml
new file mode 100644
index 000000000..0c984f637
--- /dev/null
+++ b/src/librustdoc/askama.toml
@@ -0,0 +1,2 @@
+[general]
+dirs = ["html/templates"]
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
new file mode 100644
index 000000000..af33c1a6a
--- /dev/null
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -0,0 +1,742 @@
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir as hir;
+use rustc_hir::lang_items::LangItem;
+use rustc_middle::ty::{self, Region, RegionVid, TypeFoldable, TypeSuperFoldable};
+use rustc_trait_selection::traits::auto_trait::{self, AutoTraitResult};
+
+use std::fmt::Debug;
+
+use super::*;
+
+#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
+enum RegionTarget<'tcx> {
+ Region(Region<'tcx>),
+ RegionVid(RegionVid),
+}
+
+#[derive(Default, Debug, Clone)]
+struct RegionDeps<'tcx> {
+ larger: FxHashSet<RegionTarget<'tcx>>,
+ smaller: FxHashSet<RegionTarget<'tcx>>,
+}
+
+pub(crate) struct AutoTraitFinder<'a, 'tcx> {
+ pub(crate) cx: &'a mut core::DocContext<'tcx>,
+}
+
+impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx>
+where
+ 'tcx: 'a, // should be an implied bound; rustc bug #98852.
+{
+ pub(crate) fn new(cx: &'a mut core::DocContext<'tcx>) -> Self {
+ AutoTraitFinder { cx }
+ }
+
+ fn generate_for_trait(
+ &mut self,
+ ty: Ty<'tcx>,
+ trait_def_id: DefId,
+ param_env: ty::ParamEnv<'tcx>,
+ item_def_id: DefId,
+ f: &auto_trait::AutoTraitFinder<'tcx>,
+ // If this is set, show only negative trait implementations, not positive ones.
+ discard_positive_impl: bool,
+ ) -> Option<Item> {
+ let tcx = self.cx.tcx;
+ let trait_ref = ty::TraitRef { def_id: trait_def_id, substs: tcx.mk_substs_trait(ty, &[]) };
+ if !self.cx.generated_synthetics.insert((ty, trait_def_id)) {
+ debug!("get_auto_trait_impl_for({:?}): already generated, aborting", trait_ref);
+ return None;
+ }
+
+ let result = f.find_auto_trait_generics(ty, param_env, trait_def_id, |info| {
+ let region_data = info.region_data;
+
+ let names_map = tcx
+ .generics_of(item_def_id)
+ .params
+ .iter()
+ .filter_map(|param| match param.kind {
+ ty::GenericParamDefKind::Lifetime => Some(param.name),
+ _ => None,
+ })
+ .map(|name| (name, Lifetime(name)))
+ .collect();
+ let lifetime_predicates = Self::handle_lifetimes(&region_data, &names_map);
+ let new_generics = self.param_env_to_generics(
+ item_def_id,
+ info.full_user_env,
+ lifetime_predicates,
+ info.vid_to_region,
+ );
+
+ debug!(
+ "find_auto_trait_generics(item_def_id={:?}, trait_def_id={:?}): \
+ finished with {:?}",
+ item_def_id, trait_def_id, new_generics
+ );
+
+ new_generics
+ });
+
+ let polarity;
+ let new_generics = match result {
+ AutoTraitResult::PositiveImpl(new_generics) => {
+ polarity = ty::ImplPolarity::Positive;
+ if discard_positive_impl {
+ return None;
+ }
+ new_generics
+ }
+ AutoTraitResult::NegativeImpl => {
+ polarity = ty::ImplPolarity::Negative;
+
+ // For negative impls, we use the generic params, but *not* the predicates,
+ // from the original type. Otherwise, the displayed impl appears to be a
+ // conditional negative impl, when it's really unconditional.
+ //
+ // For example, consider the struct Foo<T: Copy>(*mut T). Using
+ // the original predicates in our impl would cause us to generate
+ // `impl !Send for Foo<T: Copy>`, which makes it appear that Foo
+ // implements Send where T is not copy.
+ //
+ // Instead, we generate `impl !Send for Foo<T>`, which better
+ // expresses the fact that `Foo<T>` never implements `Send`,
+ // regardless of the choice of `T`.
+ let raw_generics = clean_ty_generics(
+ self.cx,
+ tcx.generics_of(item_def_id),
+ ty::GenericPredicates::default(),
+ );
+ let params = raw_generics.params;
+
+ Generics { params, where_predicates: Vec::new() }
+ }
+ AutoTraitResult::ExplicitImpl => return None,
+ };
+
+ Some(Item {
+ name: None,
+ attrs: Default::default(),
+ visibility: Inherited,
+ item_id: ItemId::Auto { trait_: trait_def_id, for_: item_def_id },
+ kind: Box::new(ImplItem(Box::new(Impl {
+ unsafety: hir::Unsafety::Normal,
+ generics: new_generics,
+ trait_: Some(clean_trait_ref_with_bindings(self.cx, trait_ref, &[])),
+ for_: clean_middle_ty(ty, self.cx, None),
+ items: Vec::new(),
+ polarity,
+ kind: ImplKind::Auto,
+ }))),
+ cfg: None,
+ })
+ }
+
+ pub(crate) fn get_auto_trait_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
+ let tcx = self.cx.tcx;
+ let param_env = tcx.param_env(item_def_id);
+ let ty = tcx.type_of(item_def_id);
+ let f = auto_trait::AutoTraitFinder::new(tcx);
+
+ debug!("get_auto_trait_impls({:?})", ty);
+ let auto_traits: Vec<_> = self.cx.auto_traits.iter().copied().collect();
+ let mut auto_traits: Vec<Item> = auto_traits
+ .into_iter()
+ .filter_map(|trait_def_id| {
+ self.generate_for_trait(ty, trait_def_id, param_env, item_def_id, &f, false)
+ })
+ .collect();
+ // We are only interested in case the type *doesn't* implement the Sized trait.
+ if !ty.is_sized(tcx.at(rustc_span::DUMMY_SP), param_env) {
+ // In case `#![no_core]` is used, `sized_trait` returns nothing.
+ if let Some(item) = tcx.lang_items().sized_trait().and_then(|sized_trait_did| {
+ self.generate_for_trait(ty, sized_trait_did, param_env, item_def_id, &f, true)
+ }) {
+ auto_traits.push(item);
+ }
+ }
+ auto_traits
+ }
+
+ fn get_lifetime(region: Region<'_>, names_map: &FxHashMap<Symbol, Lifetime>) -> Lifetime {
+ region_name(region)
+ .map(|name| {
+ names_map.get(&name).unwrap_or_else(|| {
+ panic!("Missing lifetime with name {:?} for {:?}", name.as_str(), region)
+ })
+ })
+ .unwrap_or(&Lifetime::statik())
+ .clone()
+ }
+
+ /// This method calculates two things: Lifetime constraints of the form `'a: 'b`,
+ /// and region constraints of the form `RegionVid: 'a`
+ ///
+ /// This is essentially a simplified version of lexical_region_resolve. However,
+ /// handle_lifetimes determines what *needs be* true in order for an impl to hold.
+ /// lexical_region_resolve, along with much of the rest of the compiler, is concerned
+ /// with determining if a given set up constraints/predicates *are* met, given some
+ /// starting conditions (e.g., user-provided code). For this reason, it's easier
+ /// to perform the calculations we need on our own, rather than trying to make
+ /// existing inference/solver code do what we want.
+ fn handle_lifetimes<'cx>(
+ regions: &RegionConstraintData<'cx>,
+ names_map: &FxHashMap<Symbol, Lifetime>,
+ ) -> Vec<WherePredicate> {
+ // Our goal is to 'flatten' the list of constraints by eliminating
+ // all intermediate RegionVids. At the end, all constraints should
+ // be between Regions (aka region variables). This gives us the information
+ // we need to create the Generics.
+ let mut finished: FxHashMap<_, Vec<_>> = Default::default();
+
+ let mut vid_map: FxHashMap<RegionTarget<'_>, RegionDeps<'_>> = Default::default();
+
+ // Flattening is done in two parts. First, we insert all of the constraints
+ // into a map. Each RegionTarget (either a RegionVid or a Region) maps
+ // to its smaller and larger regions. Note that 'larger' regions correspond
+ // to sub-regions in Rust code (e.g., in 'a: 'b, 'a is the larger region).
+ for constraint in regions.constraints.keys() {
+ match *constraint {
+ Constraint::VarSubVar(r1, r2) => {
+ {
+ let deps1 = vid_map.entry(RegionTarget::RegionVid(r1)).or_default();
+ deps1.larger.insert(RegionTarget::RegionVid(r2));
+ }
+
+ let deps2 = vid_map.entry(RegionTarget::RegionVid(r2)).or_default();
+ deps2.smaller.insert(RegionTarget::RegionVid(r1));
+ }
+ Constraint::RegSubVar(region, vid) => {
+ let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
+ deps.smaller.insert(RegionTarget::Region(region));
+ }
+ Constraint::VarSubReg(vid, region) => {
+ let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
+ deps.larger.insert(RegionTarget::Region(region));
+ }
+ Constraint::RegSubReg(r1, r2) => {
+ // The constraint is already in the form that we want, so we're done with it
+ // Desired order is 'larger, smaller', so flip then
+ if region_name(r1) != region_name(r2) {
+ finished
+ .entry(region_name(r2).expect("no region_name found"))
+ .or_default()
+ .push(r1);
+ }
+ }
+ }
+ }
+
+ // Here, we 'flatten' the map one element at a time.
+ // All of the element's sub and super regions are connected
+ // to each other. For example, if we have a graph that looks like this:
+ //
+ // (A, B) - C - (D, E)
+ // Where (A, B) are subregions, and (D,E) are super-regions
+ //
+ // then after deleting 'C', the graph will look like this:
+ // ... - A - (D, E ...)
+ // ... - B - (D, E, ...)
+ // (A, B, ...) - D - ...
+ // (A, B, ...) - E - ...
+ //
+ // where '...' signifies the existing sub and super regions of an entry
+ // When two adjacent ty::Regions are encountered, we've computed a final
+ // constraint, and add it to our list. Since we make sure to never re-add
+ // deleted items, this process will always finish.
+ while !vid_map.is_empty() {
+ let target = *vid_map.keys().next().expect("Keys somehow empty");
+ let deps = vid_map.remove(&target).expect("Entry somehow missing");
+
+ for smaller in deps.smaller.iter() {
+ for larger in deps.larger.iter() {
+ match (smaller, larger) {
+ (&RegionTarget::Region(r1), &RegionTarget::Region(r2)) => {
+ if region_name(r1) != region_name(r2) {
+ finished
+ .entry(region_name(r2).expect("no region name found"))
+ .or_default()
+ .push(r1) // Larger, smaller
+ }
+ }
+ (&RegionTarget::RegionVid(_), &RegionTarget::Region(_)) => {
+ if let Entry::Occupied(v) = vid_map.entry(*smaller) {
+ let smaller_deps = v.into_mut();
+ smaller_deps.larger.insert(*larger);
+ smaller_deps.larger.remove(&target);
+ }
+ }
+ (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => {
+ if let Entry::Occupied(v) = vid_map.entry(*larger) {
+ let deps = v.into_mut();
+ deps.smaller.insert(*smaller);
+ deps.smaller.remove(&target);
+ }
+ }
+ (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => {
+ if let Entry::Occupied(v) = vid_map.entry(*smaller) {
+ let smaller_deps = v.into_mut();
+ smaller_deps.larger.insert(*larger);
+ smaller_deps.larger.remove(&target);
+ }
+
+ if let Entry::Occupied(v) = vid_map.entry(*larger) {
+ let larger_deps = v.into_mut();
+ larger_deps.smaller.insert(*smaller);
+ larger_deps.smaller.remove(&target);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ let lifetime_predicates = names_map
+ .iter()
+ .flat_map(|(name, lifetime)| {
+ let empty = Vec::new();
+ let bounds: FxHashSet<GenericBound> = finished
+ .get(name)
+ .unwrap_or(&empty)
+ .iter()
+ .map(|region| GenericBound::Outlives(Self::get_lifetime(*region, names_map)))
+ .collect();
+
+ if bounds.is_empty() {
+ return None;
+ }
+ Some(WherePredicate::RegionPredicate {
+ lifetime: lifetime.clone(),
+ bounds: bounds.into_iter().collect(),
+ })
+ })
+ .collect();
+
+ lifetime_predicates
+ }
+
+ fn extract_for_generics(&self, pred: ty::Predicate<'tcx>) -> FxHashSet<GenericParamDef> {
+ let bound_predicate = pred.kind();
+ let tcx = self.cx.tcx;
+ let regions = match bound_predicate.skip_binder() {
+ ty::PredicateKind::Trait(poly_trait_pred) => {
+ tcx.collect_referenced_late_bound_regions(&bound_predicate.rebind(poly_trait_pred))
+ }
+ ty::PredicateKind::Projection(poly_proj_pred) => {
+ tcx.collect_referenced_late_bound_regions(&bound_predicate.rebind(poly_proj_pred))
+ }
+ _ => return FxHashSet::default(),
+ };
+
+ regions
+ .into_iter()
+ .filter_map(|br| {
+ match br {
+ // We only care about named late bound regions, as we need to add them
+ // to the 'for<>' section
+ ty::BrNamed(_, name) => Some(GenericParamDef {
+ name,
+ kind: GenericParamDefKind::Lifetime { outlives: vec![] },
+ }),
+ _ => None,
+ }
+ })
+ .collect()
+ }
+
+ fn make_final_bounds(
+ &self,
+ ty_to_bounds: FxHashMap<Type, FxHashSet<GenericBound>>,
+ ty_to_fn: FxHashMap<Type, (PolyTrait, Option<Type>)>,
+ lifetime_to_bounds: FxHashMap<Lifetime, FxHashSet<GenericBound>>,
+ ) -> Vec<WherePredicate> {
+ ty_to_bounds
+ .into_iter()
+ .flat_map(|(ty, mut bounds)| {
+ if let Some((ref poly_trait, ref output)) = ty_to_fn.get(&ty) {
+ let mut new_path = poly_trait.trait_.clone();
+ let last_segment = new_path.segments.pop().expect("segments were empty");
+
+ let (old_input, old_output) = match last_segment.args {
+ GenericArgs::AngleBracketed { args, .. } => {
+ let types = args
+ .iter()
+ .filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty.clone()),
+ _ => None,
+ })
+ .collect();
+ (types, None)
+ }
+ GenericArgs::Parenthesized { inputs, output } => (inputs, output),
+ };
+
+ let output = output.as_ref().cloned().map(Box::new);
+ if old_output.is_some() && old_output != output {
+ panic!("Output mismatch for {:?} {:?} {:?}", ty, old_output, output);
+ }
+
+ let new_params = GenericArgs::Parenthesized { inputs: old_input, output };
+
+ new_path
+ .segments
+ .push(PathSegment { name: last_segment.name, args: new_params });
+
+ bounds.insert(GenericBound::TraitBound(
+ PolyTrait {
+ trait_: new_path,
+ generic_params: poly_trait.generic_params.clone(),
+ },
+ hir::TraitBoundModifier::None,
+ ));
+ }
+ if bounds.is_empty() {
+ return None;
+ }
+
+ let mut bounds_vec = bounds.into_iter().collect();
+ self.sort_where_bounds(&mut bounds_vec);
+
+ Some(WherePredicate::BoundPredicate {
+ ty,
+ bounds: bounds_vec,
+ bound_params: Vec::new(),
+ })
+ })
+ .chain(
+ lifetime_to_bounds.into_iter().filter(|&(_, ref bounds)| !bounds.is_empty()).map(
+ |(lifetime, bounds)| {
+ let mut bounds_vec = bounds.into_iter().collect();
+ self.sort_where_bounds(&mut bounds_vec);
+ WherePredicate::RegionPredicate { lifetime, bounds: bounds_vec }
+ },
+ ),
+ )
+ .collect()
+ }
+
+ /// Converts the calculated `ParamEnv` and lifetime information to a [`clean::Generics`](Generics), suitable for
+ /// display on the docs page. Cleaning the `Predicates` produces sub-optimal [`WherePredicate`]s,
+ /// so we fix them up:
+ ///
+ /// * Multiple bounds for the same type are coalesced into one: e.g., `T: Copy`, `T: Debug`
+ /// becomes `T: Copy + Debug`
+ /// * `Fn` bounds are handled specially - instead of leaving it as `T: Fn(), <T as Fn::Output> =
+ /// K`, we use the dedicated syntax `T: Fn() -> K`
+ /// * We explicitly add a `?Sized` bound if we didn't find any `Sized` predicates for a type
+ fn param_env_to_generics(
+ &mut self,
+ item_def_id: DefId,
+ param_env: ty::ParamEnv<'tcx>,
+ mut existing_predicates: Vec<WherePredicate>,
+ vid_to_region: FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
+ ) -> Generics {
+ debug!(
+ "param_env_to_generics(item_def_id={:?}, param_env={:?}, \
+ existing_predicates={:?})",
+ item_def_id, param_env, existing_predicates
+ );
+
+ let tcx = self.cx.tcx;
+
+ // The `Sized` trait must be handled specially, since we only display it when
+ // it is *not* required (i.e., '?Sized')
+ let sized_trait = tcx.require_lang_item(LangItem::Sized, None);
+
+ let mut replacer = RegionReplacer { vid_to_region: &vid_to_region, tcx };
+
+ let orig_bounds: FxHashSet<_> = tcx.param_env(item_def_id).caller_bounds().iter().collect();
+ let clean_where_predicates = param_env
+ .caller_bounds()
+ .iter()
+ .filter(|p| {
+ !orig_bounds.contains(p)
+ || match p.kind().skip_binder() {
+ ty::PredicateKind::Trait(pred) => pred.def_id() == sized_trait,
+ _ => false,
+ }
+ })
+ .map(|p| p.fold_with(&mut replacer));
+
+ let raw_generics = clean_ty_generics(
+ self.cx,
+ tcx.generics_of(item_def_id),
+ tcx.explicit_predicates_of(item_def_id),
+ );
+ let mut generic_params = raw_generics.params;
+
+ debug!("param_env_to_generics({:?}): generic_params={:?}", item_def_id, generic_params);
+
+ let mut has_sized = FxHashSet::default();
+ let mut ty_to_bounds: FxHashMap<_, FxHashSet<_>> = Default::default();
+ let mut lifetime_to_bounds: FxHashMap<_, FxHashSet<_>> = Default::default();
+ let mut ty_to_traits: FxHashMap<Type, FxHashSet<Path>> = Default::default();
+
+ let mut ty_to_fn: FxHashMap<Type, (PolyTrait, Option<Type>)> = Default::default();
+
+ for p in clean_where_predicates {
+ let (orig_p, p) = (p, p.clean(self.cx));
+ if p.is_none() {
+ continue;
+ }
+ let p = p.unwrap();
+ match p {
+ WherePredicate::BoundPredicate { ty, mut bounds, .. } => {
+ // Writing a projection trait bound of the form
+ // <T as Trait>::Name : ?Sized
+ // is illegal, because ?Sized bounds can only
+ // be written in the (here, nonexistent) definition
+ // of the type.
+ // Therefore, we make sure that we never add a ?Sized
+ // bound for projections
+ if let Type::QPath { .. } = ty {
+ has_sized.insert(ty.clone());
+ }
+
+ if bounds.is_empty() {
+ continue;
+ }
+
+ let mut for_generics = self.extract_for_generics(orig_p);
+
+ assert!(bounds.len() == 1);
+ let mut b = bounds.pop().expect("bounds were empty");
+
+ if b.is_sized_bound(self.cx) {
+ has_sized.insert(ty.clone());
+ } else if !b
+ .get_trait_path()
+ .and_then(|trait_| {
+ ty_to_traits
+ .get(&ty)
+ .map(|bounds| bounds.contains(&strip_path_generics(trait_)))
+ })
+ .unwrap_or(false)
+ {
+ // If we've already added a projection bound for the same type, don't add
+ // this, as it would be a duplicate
+
+ // Handle any 'Fn/FnOnce/FnMut' bounds specially,
+ // as we want to combine them with any 'Output' qpaths
+ // later
+
+ let is_fn = match b {
+ GenericBound::TraitBound(ref mut p, _) => {
+ // Insert regions into the for_generics hash map first, to ensure
+ // that we don't end up with duplicate bounds (e.g., for<'b, 'b>)
+ for_generics.extend(p.generic_params.clone());
+ p.generic_params = for_generics.into_iter().collect();
+ self.is_fn_trait(&p.trait_)
+ }
+ _ => false,
+ };
+
+ let poly_trait = b.get_poly_trait().expect("Cannot get poly trait");
+
+ if is_fn {
+ ty_to_fn
+ .entry(ty.clone())
+ .and_modify(|e| *e = (poly_trait.clone(), e.1.clone()))
+ .or_insert(((poly_trait.clone()), None));
+
+ ty_to_bounds.entry(ty.clone()).or_default();
+ } else {
+ ty_to_bounds.entry(ty.clone()).or_default().insert(b.clone());
+ }
+ }
+ }
+ WherePredicate::RegionPredicate { lifetime, bounds } => {
+ lifetime_to_bounds.entry(lifetime).or_default().extend(bounds);
+ }
+ WherePredicate::EqPredicate { lhs, rhs } => {
+ match lhs {
+ Type::QPath { ref assoc, ref self_type, ref trait_, .. } => {
+ let ty = &*self_type;
+ let mut new_trait = trait_.clone();
+
+ if self.is_fn_trait(trait_) && assoc.name == sym::Output {
+ ty_to_fn
+ .entry(*ty.clone())
+ .and_modify(|e| {
+ *e = (e.0.clone(), Some(rhs.ty().unwrap().clone()))
+ })
+ .or_insert((
+ PolyTrait {
+ trait_: trait_.clone(),
+ generic_params: Vec::new(),
+ },
+ Some(rhs.ty().unwrap().clone()),
+ ));
+ continue;
+ }
+
+ let args = &mut new_trait
+ .segments
+ .last_mut()
+ .expect("segments were empty")
+ .args;
+
+ match args {
+ // Convert something like '<T as Iterator::Item> = u8'
+ // to 'T: Iterator<Item=u8>'
+ GenericArgs::AngleBracketed { ref mut bindings, .. } => {
+ bindings.push(TypeBinding {
+ assoc: *assoc.clone(),
+ kind: TypeBindingKind::Equality { term: rhs },
+ });
+ }
+ GenericArgs::Parenthesized { .. } => {
+ existing_predicates.push(WherePredicate::EqPredicate {
+ lhs: lhs.clone(),
+ rhs,
+ });
+ continue; // If something other than a Fn ends up
+ // with parentheses, leave it alone
+ }
+ }
+
+ let bounds = ty_to_bounds.entry(*ty.clone()).or_default();
+
+ bounds.insert(GenericBound::TraitBound(
+ PolyTrait { trait_: new_trait, generic_params: Vec::new() },
+ hir::TraitBoundModifier::None,
+ ));
+
+ // Remove any existing 'plain' bound (e.g., 'T: Iterator`) so
+ // that we don't see a
+ // duplicate bound like `T: Iterator + Iterator<Item=u8>`
+ // on the docs page.
+ bounds.remove(&GenericBound::TraitBound(
+ PolyTrait { trait_: trait_.clone(), generic_params: Vec::new() },
+ hir::TraitBoundModifier::None,
+ ));
+ // Avoid creating any new duplicate bounds later in the outer
+ // loop
+ ty_to_traits.entry(*ty.clone()).or_default().insert(trait_.clone());
+ }
+ _ => panic!("Unexpected LHS {:?} for {:?}", lhs, item_def_id),
+ }
+ }
+ };
+ }
+
+ let final_bounds = self.make_final_bounds(ty_to_bounds, ty_to_fn, lifetime_to_bounds);
+
+ existing_predicates.extend(final_bounds);
+
+ for param in generic_params.iter_mut() {
+ match param.kind {
+ GenericParamDefKind::Type { ref mut default, ref mut bounds, .. } => {
+ // We never want something like `impl<T=Foo>`.
+ default.take();
+ let generic_ty = Type::Generic(param.name);
+ if !has_sized.contains(&generic_ty) {
+ bounds.insert(0, GenericBound::maybe_sized(self.cx));
+ }
+ }
+ GenericParamDefKind::Lifetime { .. } => {}
+ GenericParamDefKind::Const { ref mut default, .. } => {
+ // We never want something like `impl<const N: usize = 10>`
+ default.take();
+ }
+ }
+ }
+
+ self.sort_where_predicates(&mut existing_predicates);
+
+ Generics { params: generic_params, where_predicates: existing_predicates }
+ }
+
+ /// Ensure that the predicates are in a consistent order. The precise
+ /// ordering doesn't actually matter, but it's important that
+ /// a given set of predicates always appears in the same order -
+ /// both for visual consistency between 'rustdoc' runs, and to
+ /// make writing tests much easier
+ #[inline]
+ fn sort_where_predicates(&self, predicates: &mut Vec<WherePredicate>) {
+ // We should never have identical bounds - and if we do,
+ // they're visually identical as well. Therefore, using
+ // an unstable sort is fine.
+ self.unstable_debug_sort(predicates);
+ }
+
+ /// Ensure that the bounds are in a consistent order. The precise
+ /// ordering doesn't actually matter, but it's important that
+ /// a given set of bounds always appears in the same order -
+ /// both for visual consistency between 'rustdoc' runs, and to
+ /// make writing tests much easier
+ #[inline]
+ fn sort_where_bounds(&self, bounds: &mut Vec<GenericBound>) {
+ // We should never have identical bounds - and if we do,
+ // they're visually identical as well. Therefore, using
+ // an unstable sort is fine.
+ self.unstable_debug_sort(bounds);
+ }
+
+ /// This might look horrendously hacky, but it's actually not that bad.
+ ///
+ /// For performance reasons, we use several different FxHashMaps
+ /// in the process of computing the final set of where predicates.
+ /// However, the iteration order of a HashMap is completely unspecified.
+ /// In fact, the iteration of an FxHashMap can even vary between platforms,
+ /// since FxHasher has different behavior for 32-bit and 64-bit platforms.
+ ///
+ /// Obviously, it's extremely undesirable for documentation rendering
+ /// to be dependent on the platform it's run on. Apart from being confusing
+ /// to end users, it makes writing tests much more difficult, as predicates
+ /// can appear in any order in the final result.
+ ///
+ /// To solve this problem, we sort WherePredicates and GenericBounds
+ /// by their Debug string. The thing to keep in mind is that we don't really
+ /// care what the final order is - we're synthesizing an impl or bound
+ /// ourselves, so any order can be considered equally valid. By sorting the
+ /// predicates and bounds, however, we ensure that for a given codebase, all
+ /// auto-trait impls always render in exactly the same way.
+ ///
+ /// Using the Debug implementation for sorting prevents us from needing to
+ /// write quite a bit of almost entirely useless code (e.g., how should two
+ /// Types be sorted relative to each other). It also allows us to solve the
+ /// problem for both WherePredicates and GenericBounds at the same time. This
+ /// approach is probably somewhat slower, but the small number of items
+ /// involved (impls rarely have more than a few bounds) means that it
+ /// shouldn't matter in practice.
+ fn unstable_debug_sort<T: Debug>(&self, vec: &mut Vec<T>) {
+ vec.sort_by_cached_key(|x| format!("{:?}", x))
+ }
+
+ fn is_fn_trait(&self, path: &Path) -> bool {
+ let tcx = self.cx.tcx;
+ let did = path.def_id();
+ did == tcx.require_lang_item(LangItem::Fn, None)
+ || did == tcx.require_lang_item(LangItem::FnMut, None)
+ || did == tcx.require_lang_item(LangItem::FnOnce, None)
+ }
+}
+
+fn region_name(region: Region<'_>) -> Option<Symbol> {
+ match *region {
+ ty::ReEarlyBound(r) => Some(r.name),
+ _ => None,
+ }
+}
+
+/// Replaces all [`ty::RegionVid`]s in a type with [`ty::Region`]s, using the provided map.
+struct RegionReplacer<'a, 'tcx> {
+ vid_to_region: &'a FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
+ tcx: TyCtxt<'tcx>,
+}
+
+impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ (match *r {
+ ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(),
+ _ => None,
+ })
+ .unwrap_or_else(|| r.super_fold_with(self))
+ }
+}
diff --git a/src/librustdoc/clean/blanket_impl.rs b/src/librustdoc/clean/blanket_impl.rs
new file mode 100644
index 000000000..01dd95e6e
--- /dev/null
+++ b/src/librustdoc/clean/blanket_impl.rs
@@ -0,0 +1,136 @@
+use crate::rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
+use rustc_hir as hir;
+use rustc_infer::infer::{InferOk, TyCtxtInferExt};
+use rustc_infer::traits;
+use rustc_middle::ty::subst::Subst;
+use rustc_middle::ty::ToPredicate;
+use rustc_span::DUMMY_SP;
+
+use super::*;
+
+pub(crate) struct BlanketImplFinder<'a, 'tcx> {
+ pub(crate) cx: &'a mut core::DocContext<'tcx>,
+}
+
+impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
+ pub(crate) fn get_blanket_impls(&mut self, item_def_id: DefId) -> Vec<Item> {
+ let param_env = self.cx.tcx.param_env(item_def_id);
+ let ty = self.cx.tcx.bound_type_of(item_def_id);
+
+ trace!("get_blanket_impls({:?})", ty);
+ let mut impls = Vec::new();
+ self.cx.with_all_traits(|cx, all_traits| {
+ for &trait_def_id in all_traits {
+ if !cx.cache.access_levels.is_public(trait_def_id)
+ || cx.generated_synthetics.get(&(ty.0, trait_def_id)).is_some()
+ {
+ continue;
+ }
+ // NOTE: doesn't use `for_each_relevant_impl` to avoid looking at anything besides blanket impls
+ let trait_impls = cx.tcx.trait_impls_of(trait_def_id);
+ for &impl_def_id in trait_impls.blanket_impls() {
+ trace!(
+ "get_blanket_impls: Considering impl for trait '{:?}' {:?}",
+ trait_def_id,
+ impl_def_id
+ );
+ let trait_ref = cx.tcx.bound_impl_trait_ref(impl_def_id).unwrap();
+ let is_param = matches!(trait_ref.0.self_ty().kind(), ty::Param(_));
+ let may_apply = is_param && cx.tcx.infer_ctxt().enter(|infcx| {
+ let substs = infcx.fresh_substs_for_item(DUMMY_SP, item_def_id);
+ let ty = ty.subst(infcx.tcx, substs);
+ let param_env = EarlyBinder(param_env).subst(infcx.tcx, substs);
+
+ let impl_substs = infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
+ let trait_ref = trait_ref.subst(infcx.tcx, impl_substs);
+
+ // Require the type the impl is implemented on to match
+ // our type, and ignore the impl if there was a mismatch.
+ let cause = traits::ObligationCause::dummy();
+ let eq_result = infcx.at(&cause, param_env).eq(trait_ref.self_ty(), ty);
+ if let Ok(InferOk { value: (), obligations }) = eq_result {
+ // FIXME(eddyb) ignoring `obligations` might cause false positives.
+ drop(obligations);
+
+ trace!(
+ "invoking predicate_may_hold: param_env={:?}, trait_ref={:?}, ty={:?}",
+ param_env,
+ trait_ref,
+ ty
+ );
+ let predicates = cx
+ .tcx
+ .predicates_of(impl_def_id)
+ .instantiate(cx.tcx, impl_substs)
+ .predicates
+ .into_iter()
+ .chain(Some(
+ ty::Binder::dummy(trait_ref)
+ .to_poly_trait_predicate()
+ .map_bound(ty::PredicateKind::Trait)
+ .to_predicate(infcx.tcx),
+ ));
+ for predicate in predicates {
+ debug!("testing predicate {:?}", predicate);
+ let obligation = traits::Obligation::new(
+ traits::ObligationCause::dummy(),
+ param_env,
+ predicate,
+ );
+ match infcx.evaluate_obligation(&obligation) {
+ Ok(eval_result) if eval_result.may_apply() => {}
+ Err(traits::OverflowError::Canonical) => {}
+ Err(traits::OverflowError::ErrorReporting) => {}
+ _ => {
+ return false;
+ }
+ }
+ }
+ true
+ } else {
+ false
+ }
+ });
+ debug!(
+ "get_blanket_impls: found applicable impl: {} for trait_ref={:?}, ty={:?}",
+ may_apply, trait_ref, ty
+ );
+ if !may_apply {
+ continue;
+ }
+
+ cx.generated_synthetics.insert((ty.0, trait_def_id));
+
+ impls.push(Item {
+ name: None,
+ attrs: Default::default(),
+ visibility: Inherited,
+ item_id: ItemId::Blanket { impl_id: impl_def_id, for_: item_def_id },
+ kind: Box::new(ImplItem(Box::new(Impl {
+ unsafety: hir::Unsafety::Normal,
+ generics: clean_ty_generics(
+ cx,
+ cx.tcx.generics_of(impl_def_id),
+ cx.tcx.explicit_predicates_of(impl_def_id),
+ ),
+ // FIXME(eddyb) compute both `trait_` and `for_` from
+ // the post-inference `trait_ref`, as it's more accurate.
+ trait_: Some(clean_trait_ref_with_bindings(cx, trait_ref.0, &[])),
+ for_: clean_middle_ty(ty.0, cx, None),
+ items: cx.tcx
+ .associated_items(impl_def_id)
+ .in_definition_order()
+ .map(|x| x.clean(cx))
+ .collect::<Vec<_>>(),
+ polarity: ty::ImplPolarity::Positive,
+ kind: ImplKind::Blanket(Box::new(clean_middle_ty(trait_ref.0.self_ty(), cx, None))),
+ }))),
+ cfg: None,
+ });
+ }
+ }
+ });
+
+ impls
+ }
+}
diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs
new file mode 100644
index 000000000..f33f5d27d
--- /dev/null
+++ b/src/librustdoc/clean/cfg.rs
@@ -0,0 +1,588 @@
+//! The representation of a `#[doc(cfg(...))]` attribute.
+
+// FIXME: Once the portability lint RFC is implemented (see tracking issue #41619),
+// switch to use those structures instead.
+
+use std::fmt::{self, Write};
+use std::mem;
+use std::ops;
+
+use rustc_ast::{LitKind, MetaItem, MetaItemKind, NestedMetaItem};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_feature::Features;
+use rustc_session::parse::ParseSess;
+use rustc_span::symbol::{sym, Symbol};
+
+use rustc_span::Span;
+
+use crate::html::escape::Escape;
+
+#[cfg(test)]
+mod tests;
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub(crate) enum Cfg {
+ /// Accepts all configurations.
+ True,
+ /// Denies all configurations.
+ False,
+ /// A generic configuration option, e.g., `test` or `target_os = "linux"`.
+ Cfg(Symbol, Option<Symbol>),
+ /// Negates a configuration requirement, i.e., `not(x)`.
+ Not(Box<Cfg>),
+ /// Union of a list of configuration requirements, i.e., `any(...)`.
+ Any(Vec<Cfg>),
+ /// Intersection of a list of configuration requirements, i.e., `all(...)`.
+ All(Vec<Cfg>),
+}
+
+#[derive(PartialEq, Debug)]
+pub(crate) struct InvalidCfgError {
+ pub(crate) msg: &'static str,
+ pub(crate) span: Span,
+}
+
+impl Cfg {
+ /// Parses a `NestedMetaItem` into a `Cfg`.
+ fn parse_nested(
+ nested_cfg: &NestedMetaItem,
+ exclude: &FxHashSet<Cfg>,
+ ) -> Result<Option<Cfg>, InvalidCfgError> {
+ match nested_cfg {
+ NestedMetaItem::MetaItem(ref cfg) => Cfg::parse_without(cfg, exclude),
+ NestedMetaItem::Literal(ref lit) => {
+ Err(InvalidCfgError { msg: "unexpected literal", span: lit.span })
+ }
+ }
+ }
+
+ pub(crate) fn parse_without(
+ cfg: &MetaItem,
+ exclude: &FxHashSet<Cfg>,
+ ) -> Result<Option<Cfg>, InvalidCfgError> {
+ let name = match cfg.ident() {
+ Some(ident) => ident.name,
+ None => {
+ return Err(InvalidCfgError {
+ msg: "expected a single identifier",
+ span: cfg.span,
+ });
+ }
+ };
+ match cfg.kind {
+ MetaItemKind::Word => {
+ let cfg = Cfg::Cfg(name, None);
+ if exclude.contains(&cfg) { Ok(None) } else { Ok(Some(cfg)) }
+ }
+ MetaItemKind::NameValue(ref lit) => match lit.kind {
+ LitKind::Str(value, _) => {
+ let cfg = Cfg::Cfg(name, Some(value));
+ if exclude.contains(&cfg) { Ok(None) } else { Ok(Some(cfg)) }
+ }
+ _ => Err(InvalidCfgError {
+ // FIXME: if the main #[cfg] syntax decided to support non-string literals,
+ // this should be changed as well.
+ msg: "value of cfg option should be a string literal",
+ span: lit.span,
+ }),
+ },
+ MetaItemKind::List(ref items) => {
+ let orig_len = items.len();
+ let sub_cfgs =
+ items.iter().filter_map(|i| Cfg::parse_nested(i, exclude).transpose());
+ let ret = match name {
+ sym::all => sub_cfgs.fold(Ok(Cfg::True), |x, y| Ok(x? & y?)),
+ sym::any => sub_cfgs.fold(Ok(Cfg::False), |x, y| Ok(x? | y?)),
+ sym::not => {
+ if orig_len == 1 {
+ let mut sub_cfgs = sub_cfgs.collect::<Vec<_>>();
+ if sub_cfgs.len() == 1 {
+ Ok(!sub_cfgs.pop().unwrap()?)
+ } else {
+ return Ok(None);
+ }
+ } else {
+ Err(InvalidCfgError { msg: "expected 1 cfg-pattern", span: cfg.span })
+ }
+ }
+ _ => Err(InvalidCfgError { msg: "invalid predicate", span: cfg.span }),
+ };
+ match ret {
+ Ok(c) => Ok(Some(c)),
+ Err(e) => Err(e),
+ }
+ }
+ }
+ }
+
+ /// Parses a `MetaItem` into a `Cfg`.
+ ///
+ /// The `MetaItem` should be the content of the `#[cfg(...)]`, e.g., `unix` or
+ /// `target_os = "redox"`.
+ ///
+ /// If the content is not properly formatted, it will return an error indicating what and where
+ /// the error is.
+ pub(crate) fn parse(cfg: &MetaItem) -> Result<Cfg, InvalidCfgError> {
+ Self::parse_without(cfg, &FxHashSet::default()).map(|ret| ret.unwrap())
+ }
+
+ /// Checks whether the given configuration can be matched in the current session.
+ ///
+ /// Equivalent to `attr::cfg_matches`.
+ // FIXME: Actually make use of `features`.
+ pub(crate) fn matches(&self, parse_sess: &ParseSess, features: Option<&Features>) -> bool {
+ match *self {
+ Cfg::False => false,
+ Cfg::True => true,
+ Cfg::Not(ref child) => !child.matches(parse_sess, features),
+ Cfg::All(ref sub_cfgs) => {
+ sub_cfgs.iter().all(|sub_cfg| sub_cfg.matches(parse_sess, features))
+ }
+ Cfg::Any(ref sub_cfgs) => {
+ sub_cfgs.iter().any(|sub_cfg| sub_cfg.matches(parse_sess, features))
+ }
+ Cfg::Cfg(name, value) => parse_sess.config.contains(&(name, value)),
+ }
+ }
+
+ /// Whether the configuration consists of just `Cfg` or `Not`.
+ fn is_simple(&self) -> bool {
+ match *self {
+ Cfg::False | Cfg::True | Cfg::Cfg(..) | Cfg::Not(..) => true,
+ Cfg::All(..) | Cfg::Any(..) => false,
+ }
+ }
+
+ /// Whether the configuration consists of just `Cfg`, `Not` or `All`.
+ fn is_all(&self) -> bool {
+ match *self {
+ Cfg::False | Cfg::True | Cfg::Cfg(..) | Cfg::Not(..) | Cfg::All(..) => true,
+ Cfg::Any(..) => false,
+ }
+ }
+
+ /// Renders the configuration for human display, as a short HTML description.
+ pub(crate) fn render_short_html(&self) -> String {
+ let mut msg = Display(self, Format::ShortHtml).to_string();
+ if self.should_capitalize_first_letter() {
+ if let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) {
+ msg[i..i + 1].make_ascii_uppercase();
+ }
+ }
+ msg
+ }
+
+ /// Renders the configuration for long display, as a long HTML description.
+ pub(crate) fn render_long_html(&self) -> String {
+ let on = if self.should_use_with_in_description() { "with" } else { "on" };
+
+ let mut msg =
+ format!("Available {on} <strong>{}</strong>", Display(self, Format::LongHtml));
+ if self.should_append_only_to_description() {
+ msg.push_str(" only");
+ }
+ msg.push('.');
+ msg
+ }
+
+ /// Renders the configuration for long display, as a long plain text description.
+ pub(crate) fn render_long_plain(&self) -> String {
+ let on = if self.should_use_with_in_description() { "with" } else { "on" };
+
+ let mut msg = format!("Available {on} {}", Display(self, Format::LongPlain));
+ if self.should_append_only_to_description() {
+ msg.push_str(" only");
+ }
+ msg
+ }
+
+ fn should_capitalize_first_letter(&self) -> bool {
+ match *self {
+ Cfg::False | Cfg::True | Cfg::Not(..) => true,
+ Cfg::Any(ref sub_cfgs) | Cfg::All(ref sub_cfgs) => {
+ sub_cfgs.first().map(Cfg::should_capitalize_first_letter).unwrap_or(false)
+ }
+ Cfg::Cfg(name, _) => name == sym::debug_assertions || name == sym::target_endian,
+ }
+ }
+
+ fn should_append_only_to_description(&self) -> bool {
+ match *self {
+ Cfg::False | Cfg::True => false,
+ Cfg::Any(..) | Cfg::All(..) | Cfg::Cfg(..) => true,
+ Cfg::Not(box Cfg::Cfg(..)) => true,
+ Cfg::Not(..) => false,
+ }
+ }
+
+ fn should_use_with_in_description(&self) -> bool {
+ matches!(self, Cfg::Cfg(sym::target_feature, _))
+ }
+
+ /// Attempt to simplify this cfg by assuming that `assume` is already known to be true, will
+ /// return `None` if simplification managed to completely eliminate any requirements from this
+ /// `Cfg`.
+ ///
+ /// See `tests::test_simplify_with` for examples.
+ pub(crate) fn simplify_with(&self, assume: &Cfg) -> Option<Cfg> {
+ if self == assume {
+ return None;
+ }
+
+ if let Cfg::All(a) = self {
+ let mut sub_cfgs: Vec<Cfg> = if let Cfg::All(b) = assume {
+ a.iter().filter(|a| !b.contains(a)).cloned().collect()
+ } else {
+ a.iter().filter(|&a| a != assume).cloned().collect()
+ };
+ let len = sub_cfgs.len();
+ return match len {
+ 0 => None,
+ 1 => sub_cfgs.pop(),
+ _ => Some(Cfg::All(sub_cfgs)),
+ };
+ } else if let Cfg::All(b) = assume {
+ if b.contains(self) {
+ return None;
+ }
+ }
+
+ Some(self.clone())
+ }
+}
+
+impl ops::Not for Cfg {
+ type Output = Cfg;
+ fn not(self) -> Cfg {
+ match self {
+ Cfg::False => Cfg::True,
+ Cfg::True => Cfg::False,
+ Cfg::Not(cfg) => *cfg,
+ s => Cfg::Not(Box::new(s)),
+ }
+ }
+}
+
+impl ops::BitAndAssign for Cfg {
+ fn bitand_assign(&mut self, other: Cfg) {
+ match (self, other) {
+ (&mut Cfg::False, _) | (_, Cfg::True) => {}
+ (s, Cfg::False) => *s = Cfg::False,
+ (s @ &mut Cfg::True, b) => *s = b,
+ (&mut Cfg::All(ref mut a), Cfg::All(ref mut b)) => {
+ for c in b.drain(..) {
+ if !a.contains(&c) {
+ a.push(c);
+ }
+ }
+ }
+ (&mut Cfg::All(ref mut a), ref mut b) => {
+ if !a.contains(b) {
+ a.push(mem::replace(b, Cfg::True));
+ }
+ }
+ (s, Cfg::All(mut a)) => {
+ let b = mem::replace(s, Cfg::True);
+ if !a.contains(&b) {
+ a.push(b);
+ }
+ *s = Cfg::All(a);
+ }
+ (s, b) => {
+ if *s != b {
+ let a = mem::replace(s, Cfg::True);
+ *s = Cfg::All(vec![a, b]);
+ }
+ }
+ }
+ }
+}
+
+impl ops::BitAnd for Cfg {
+ type Output = Cfg;
+ fn bitand(mut self, other: Cfg) -> Cfg {
+ self &= other;
+ self
+ }
+}
+
+impl ops::BitOrAssign for Cfg {
+ fn bitor_assign(&mut self, other: Cfg) {
+ match (self, other) {
+ (Cfg::True, _) | (_, Cfg::False) | (_, Cfg::True) => {}
+ (s @ &mut Cfg::False, b) => *s = b,
+ (&mut Cfg::Any(ref mut a), Cfg::Any(ref mut b)) => {
+ for c in b.drain(..) {
+ if !a.contains(&c) {
+ a.push(c);
+ }
+ }
+ }
+ (&mut Cfg::Any(ref mut a), ref mut b) => {
+ if !a.contains(b) {
+ a.push(mem::replace(b, Cfg::True));
+ }
+ }
+ (s, Cfg::Any(mut a)) => {
+ let b = mem::replace(s, Cfg::True);
+ if !a.contains(&b) {
+ a.push(b);
+ }
+ *s = Cfg::Any(a);
+ }
+ (s, b) => {
+ if *s != b {
+ let a = mem::replace(s, Cfg::True);
+ *s = Cfg::Any(vec![a, b]);
+ }
+ }
+ }
+ }
+}
+
+impl ops::BitOr for Cfg {
+ type Output = Cfg;
+ fn bitor(mut self, other: Cfg) -> Cfg {
+ self |= other;
+ self
+ }
+}
+
+#[derive(Clone, Copy)]
+enum Format {
+ LongHtml,
+ LongPlain,
+ ShortHtml,
+}
+
+impl Format {
+ fn is_long(self) -> bool {
+ match self {
+ Format::LongHtml | Format::LongPlain => true,
+ Format::ShortHtml => false,
+ }
+ }
+
+ fn is_html(self) -> bool {
+ match self {
+ Format::LongHtml | Format::ShortHtml => true,
+ Format::LongPlain => false,
+ }
+ }
+}
+
+/// Pretty-print wrapper for a `Cfg`. Also indicates what form of rendering should be used.
+struct Display<'a>(&'a Cfg, Format);
+
+fn write_with_opt_paren<T: fmt::Display>(
+ fmt: &mut fmt::Formatter<'_>,
+ has_paren: bool,
+ obj: T,
+) -> fmt::Result {
+ if has_paren {
+ fmt.write_char('(')?;
+ }
+ obj.fmt(fmt)?;
+ if has_paren {
+ fmt.write_char(')')?;
+ }
+ Ok(())
+}
+
+impl<'a> fmt::Display for Display<'a> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self.0 {
+ Cfg::Not(ref child) => match **child {
+ Cfg::Any(ref sub_cfgs) => {
+ let separator =
+ if sub_cfgs.iter().all(Cfg::is_simple) { " nor " } else { ", nor " };
+ for (i, sub_cfg) in sub_cfgs.iter().enumerate() {
+ fmt.write_str(if i == 0 { "neither " } else { separator })?;
+ write_with_opt_paren(fmt, !sub_cfg.is_all(), Display(sub_cfg, self.1))?;
+ }
+ Ok(())
+ }
+ ref simple @ Cfg::Cfg(..) => write!(fmt, "non-{}", Display(simple, self.1)),
+ ref c => write!(fmt, "not ({})", Display(c, self.1)),
+ },
+
+ Cfg::Any(ref sub_cfgs) => {
+ let separator = if sub_cfgs.iter().all(Cfg::is_simple) { " or " } else { ", or " };
+
+ let short_longhand = self.1.is_long() && {
+ let all_crate_features = sub_cfgs
+ .iter()
+ .all(|sub_cfg| matches!(sub_cfg, Cfg::Cfg(sym::feature, Some(_))));
+ let all_target_features = sub_cfgs
+ .iter()
+ .all(|sub_cfg| matches!(sub_cfg, Cfg::Cfg(sym::target_feature, Some(_))));
+
+ if all_crate_features {
+ fmt.write_str("crate features ")?;
+ true
+ } else if all_target_features {
+ fmt.write_str("target features ")?;
+ true
+ } else {
+ false
+ }
+ };
+
+ for (i, sub_cfg) in sub_cfgs.iter().enumerate() {
+ if i != 0 {
+ fmt.write_str(separator)?;
+ }
+ if let (true, Cfg::Cfg(_, Some(feat))) = (short_longhand, sub_cfg) {
+ if self.1.is_html() {
+ write!(fmt, "<code>{}</code>", feat)?;
+ } else {
+ write!(fmt, "`{}`", feat)?;
+ }
+ } else {
+ write_with_opt_paren(fmt, !sub_cfg.is_all(), Display(sub_cfg, self.1))?;
+ }
+ }
+ Ok(())
+ }
+
+ Cfg::All(ref sub_cfgs) => {
+ let short_longhand = self.1.is_long() && {
+ let all_crate_features = sub_cfgs
+ .iter()
+ .all(|sub_cfg| matches!(sub_cfg, Cfg::Cfg(sym::feature, Some(_))));
+ let all_target_features = sub_cfgs
+ .iter()
+ .all(|sub_cfg| matches!(sub_cfg, Cfg::Cfg(sym::target_feature, Some(_))));
+
+ if all_crate_features {
+ fmt.write_str("crate features ")?;
+ true
+ } else if all_target_features {
+ fmt.write_str("target features ")?;
+ true
+ } else {
+ false
+ }
+ };
+
+ for (i, sub_cfg) in sub_cfgs.iter().enumerate() {
+ if i != 0 {
+ fmt.write_str(" and ")?;
+ }
+ if let (true, Cfg::Cfg(_, Some(feat))) = (short_longhand, sub_cfg) {
+ if self.1.is_html() {
+ write!(fmt, "<code>{}</code>", feat)?;
+ } else {
+ write!(fmt, "`{}`", feat)?;
+ }
+ } else {
+ write_with_opt_paren(fmt, !sub_cfg.is_simple(), Display(sub_cfg, self.1))?;
+ }
+ }
+ Ok(())
+ }
+
+ Cfg::True => fmt.write_str("everywhere"),
+ Cfg::False => fmt.write_str("nowhere"),
+
+ Cfg::Cfg(name, value) => {
+ let human_readable = match (name, value) {
+ (sym::unix, None) => "Unix",
+ (sym::windows, None) => "Windows",
+ (sym::debug_assertions, None) => "debug-assertions enabled",
+ (sym::target_os, Some(os)) => match os.as_str() {
+ "android" => "Android",
+ "dragonfly" => "DragonFly BSD",
+ "emscripten" => "Emscripten",
+ "freebsd" => "FreeBSD",
+ "fuchsia" => "Fuchsia",
+ "haiku" => "Haiku",
+ "hermit" => "HermitCore",
+ "illumos" => "illumos",
+ "ios" => "iOS",
+ "l4re" => "L4Re",
+ "linux" => "Linux",
+ "macos" => "macOS",
+ "netbsd" => "NetBSD",
+ "openbsd" => "OpenBSD",
+ "redox" => "Redox",
+ "solaris" => "Solaris",
+ "wasi" => "WASI",
+ "windows" => "Windows",
+ _ => "",
+ },
+ (sym::target_arch, Some(arch)) => match arch.as_str() {
+ "aarch64" => "AArch64",
+ "arm" => "ARM",
+ "asmjs" => "JavaScript",
+ "m68k" => "M68k",
+ "mips" => "MIPS",
+ "mips64" => "MIPS-64",
+ "msp430" => "MSP430",
+ "powerpc" => "PowerPC",
+ "powerpc64" => "PowerPC-64",
+ "riscv32" => "RISC-V RV32",
+ "riscv64" => "RISC-V RV64",
+ "s390x" => "s390x",
+ "sparc64" => "SPARC64",
+ "wasm32" | "wasm64" => "WebAssembly",
+ "x86" => "x86",
+ "x86_64" => "x86-64",
+ _ => "",
+ },
+ (sym::target_vendor, Some(vendor)) => match vendor.as_str() {
+ "apple" => "Apple",
+ "pc" => "PC",
+ "sun" => "Sun",
+ "fortanix" => "Fortanix",
+ _ => "",
+ },
+ (sym::target_env, Some(env)) => match env.as_str() {
+ "gnu" => "GNU",
+ "msvc" => "MSVC",
+ "musl" => "musl",
+ "newlib" => "Newlib",
+ "uclibc" => "uClibc",
+ "sgx" => "SGX",
+ _ => "",
+ },
+ (sym::target_endian, Some(endian)) => return write!(fmt, "{}-endian", endian),
+ (sym::target_pointer_width, Some(bits)) => return write!(fmt, "{}-bit", bits),
+ (sym::target_feature, Some(feat)) => match self.1 {
+ Format::LongHtml => {
+ return write!(fmt, "target feature <code>{}</code>", feat);
+ }
+ Format::LongPlain => return write!(fmt, "target feature `{}`", feat),
+ Format::ShortHtml => return write!(fmt, "<code>{}</code>", feat),
+ },
+ (sym::feature, Some(feat)) => match self.1 {
+ Format::LongHtml => {
+ return write!(fmt, "crate feature <code>{}</code>", feat);
+ }
+ Format::LongPlain => return write!(fmt, "crate feature `{}`", feat),
+ Format::ShortHtml => return write!(fmt, "<code>{}</code>", feat),
+ },
+ _ => "",
+ };
+ if !human_readable.is_empty() {
+ fmt.write_str(human_readable)
+ } else if let Some(v) = value {
+ if self.1.is_html() {
+ write!(
+ fmt,
+ r#"<code>{}="{}"</code>"#,
+ Escape(name.as_str()),
+ Escape(v.as_str())
+ )
+ } else {
+ write!(fmt, r#"`{}="{}"`"#, name, v)
+ }
+ } else if self.1.is_html() {
+ write!(fmt, "<code>{}</code>", Escape(name.as_str()))
+ } else {
+ write!(fmt, "`{}`", name)
+ }
+ }
+ }
+ }
+}
diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs
new file mode 100644
index 000000000..7f72d5d39
--- /dev/null
+++ b/src/librustdoc/clean/cfg/tests.rs
@@ -0,0 +1,467 @@
+use super::*;
+
+use rustc_ast::attr;
+use rustc_ast::Path;
+use rustc_span::create_default_session_globals_then;
+use rustc_span::symbol::{Ident, Symbol};
+use rustc_span::DUMMY_SP;
+
+fn word_cfg(s: &str) -> Cfg {
+ Cfg::Cfg(Symbol::intern(s), None)
+}
+
+fn name_value_cfg(name: &str, value: &str) -> Cfg {
+ Cfg::Cfg(Symbol::intern(name), Some(Symbol::intern(value)))
+}
+
+fn dummy_meta_item_word(name: &str) -> MetaItem {
+ MetaItem {
+ path: Path::from_ident(Ident::from_str(name)),
+ kind: MetaItemKind::Word,
+ span: DUMMY_SP,
+ }
+}
+
+macro_rules! dummy_meta_item_list {
+ ($name:ident, [$($list:ident),* $(,)?]) => {
+ MetaItem {
+ path: Path::from_ident(Ident::from_str(stringify!($name))),
+ kind: MetaItemKind::List(vec![
+ $(
+ NestedMetaItem::MetaItem(
+ dummy_meta_item_word(stringify!($list)),
+ ),
+ )*
+ ]),
+ span: DUMMY_SP,
+ }
+ };
+
+ ($name:ident, [$($list:expr),* $(,)?]) => {
+ MetaItem {
+ path: Path::from_ident(Ident::from_str(stringify!($name))),
+ kind: MetaItemKind::List(vec![
+ $(
+ NestedMetaItem::MetaItem($list),
+ )*
+ ]),
+ span: DUMMY_SP,
+ }
+ };
+}
+
+#[test]
+fn test_cfg_not() {
+ create_default_session_globals_then(|| {
+ assert_eq!(!Cfg::False, Cfg::True);
+ assert_eq!(!Cfg::True, Cfg::False);
+ assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test"))));
+ assert_eq!(
+ !Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
+ Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")])))
+ );
+ assert_eq!(
+ !Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
+ Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")])))
+ );
+ assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test"));
+ })
+}
+
+#[test]
+fn test_cfg_and() {
+ create_default_session_globals_then(|| {
+ let mut x = Cfg::False;
+ x &= Cfg::True;
+ assert_eq!(x, Cfg::False);
+
+ x = word_cfg("test");
+ x &= Cfg::False;
+ assert_eq!(x, Cfg::False);
+
+ x = word_cfg("test2");
+ x &= Cfg::True;
+ assert_eq!(x, word_cfg("test2"));
+
+ x = Cfg::True;
+ x &= word_cfg("test3");
+ assert_eq!(x, word_cfg("test3"));
+
+ x &= word_cfg("test3");
+ assert_eq!(x, word_cfg("test3"));
+
+ x &= word_cfg("test4");
+ assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")]));
+
+ x &= word_cfg("test4");
+ assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")]));
+
+ x &= word_cfg("test5");
+ assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
+
+ x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]);
+ assert_eq!(
+ x,
+ Cfg::All(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ ])
+ );
+
+ x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]);
+ assert_eq!(
+ x,
+ Cfg::All(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ ])
+ );
+
+ let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]);
+ y &= x;
+ assert_eq!(
+ y,
+ Cfg::All(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
+ ])
+ );
+
+ let mut z = word_cfg("test8");
+ z &= Cfg::All(vec![word_cfg("test9"), word_cfg("test10")]);
+ assert_eq!(z, Cfg::All(vec![word_cfg("test9"), word_cfg("test10"), word_cfg("test8")]));
+
+ let mut z = word_cfg("test11");
+ z &= Cfg::All(vec![word_cfg("test11"), word_cfg("test12")]);
+ assert_eq!(z, Cfg::All(vec![word_cfg("test11"), word_cfg("test12")]));
+
+ assert_eq!(
+ word_cfg("a") & word_cfg("b") & word_cfg("c"),
+ Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
+ );
+ })
+}
+
+#[test]
+fn test_cfg_or() {
+ create_default_session_globals_then(|| {
+ let mut x = Cfg::True;
+ x |= Cfg::False;
+ assert_eq!(x, Cfg::True);
+
+ x = word_cfg("test");
+ x |= Cfg::True;
+ assert_eq!(x, word_cfg("test"));
+
+ x = word_cfg("test2");
+ x |= Cfg::False;
+ assert_eq!(x, word_cfg("test2"));
+
+ x = Cfg::False;
+ x |= word_cfg("test3");
+ assert_eq!(x, word_cfg("test3"));
+
+ x |= word_cfg("test3");
+ assert_eq!(x, word_cfg("test3"));
+
+ x |= word_cfg("test4");
+ assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")]));
+
+ x |= word_cfg("test4");
+ assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")]));
+
+ x |= word_cfg("test5");
+ assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
+
+ x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]);
+ assert_eq!(
+ x,
+ Cfg::Any(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ ])
+ );
+
+ x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]);
+ assert_eq!(
+ x,
+ Cfg::Any(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ ])
+ );
+
+ let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]);
+ y |= x;
+ assert_eq!(
+ y,
+ Cfg::Any(vec![
+ word_cfg("test3"),
+ word_cfg("test4"),
+ word_cfg("test5"),
+ word_cfg("test6"),
+ word_cfg("test7"),
+ Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
+ ])
+ );
+
+ let mut z = word_cfg("test8");
+ z |= Cfg::Any(vec![word_cfg("test9"), word_cfg("test10")]);
+ assert_eq!(z, Cfg::Any(vec![word_cfg("test9"), word_cfg("test10"), word_cfg("test8")]));
+
+ let mut z = word_cfg("test11");
+ z |= Cfg::Any(vec![word_cfg("test11"), word_cfg("test12")]);
+ assert_eq!(z, Cfg::Any(vec![word_cfg("test11"), word_cfg("test12")]));
+
+ assert_eq!(
+ word_cfg("a") | word_cfg("b") | word_cfg("c"),
+ Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
+ );
+ })
+}
+
+#[test]
+fn test_parse_ok() {
+ create_default_session_globals_then(|| {
+ let mi = dummy_meta_item_word("all");
+ assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
+
+ let mi =
+ attr::mk_name_value_item_str(Ident::from_str("all"), Symbol::intern("done"), DUMMY_SP);
+ assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
+
+ let mi = dummy_meta_item_list!(all, [a, b]);
+ assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b")));
+
+ let mi = dummy_meta_item_list!(any, [a, b]);
+ assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b")));
+
+ let mi = dummy_meta_item_list!(not, [a]);
+ assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a")));
+
+ let mi = dummy_meta_item_list!(
+ not,
+ [dummy_meta_item_list!(
+ any,
+ [dummy_meta_item_word("a"), dummy_meta_item_list!(all, [b, c]),]
+ ),]
+ );
+ assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c")))));
+
+ let mi = dummy_meta_item_list!(all, [a, b, c]);
+ assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c")));
+ })
+}
+
+#[test]
+fn test_parse_err() {
+ create_default_session_globals_then(|| {
+ let mi = attr::mk_name_value_item(Ident::from_str("foo"), LitKind::Bool(false), DUMMY_SP);
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(not, [a, b]);
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(not, []);
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(foo, []);
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(
+ all,
+ [dummy_meta_item_list!(foo, []), dummy_meta_item_word("b"),]
+ );
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(
+ any,
+ [dummy_meta_item_word("a"), dummy_meta_item_list!(foo, []),]
+ );
+ assert!(Cfg::parse(&mi).is_err());
+
+ let mi = dummy_meta_item_list!(not, [dummy_meta_item_list!(foo, []),]);
+ assert!(Cfg::parse(&mi).is_err());
+ })
+}
+
+#[test]
+fn test_render_short_html() {
+ create_default_session_globals_then(|| {
+ assert_eq!(word_cfg("unix").render_short_html(), "Unix");
+ assert_eq!(name_value_cfg("target_os", "macos").render_short_html(), "macOS");
+ assert_eq!(name_value_cfg("target_pointer_width", "16").render_short_html(), "16-bit");
+ assert_eq!(name_value_cfg("target_endian", "little").render_short_html(), "Little-endian");
+ assert_eq!((!word_cfg("windows")).render_short_html(), "Non-Windows");
+ assert_eq!(
+ (word_cfg("unix") & word_cfg("windows")).render_short_html(),
+ "Unix and Windows"
+ );
+ assert_eq!((word_cfg("unix") | word_cfg("windows")).render_short_html(), "Unix or Windows");
+ assert_eq!(
+ (word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions"))
+ .render_short_html(),
+ "Unix and Windows and debug-assertions enabled"
+ );
+ assert_eq!(
+ (word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
+ .render_short_html(),
+ "Unix or Windows or debug-assertions enabled"
+ );
+ assert_eq!(
+ (!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")))
+ .render_short_html(),
+ "Neither Unix nor Windows nor debug-assertions enabled"
+ );
+ assert_eq!(
+ ((word_cfg("unix") & name_value_cfg("target_arch", "x86_64"))
+ | (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")))
+ .render_short_html(),
+ "Unix and x86-64, or Windows and 64-bit"
+ );
+ assert_eq!(
+ (!(word_cfg("unix") & word_cfg("windows"))).render_short_html(),
+ "Not (Unix and Windows)"
+ );
+ assert_eq!(
+ ((word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix"))
+ .render_short_html(),
+ "(Debug-assertions enabled or Windows) and Unix"
+ );
+ assert_eq!(
+ name_value_cfg("target_feature", "sse2").render_short_html(),
+ "<code>sse2</code>"
+ );
+ assert_eq!(
+ (name_value_cfg("target_arch", "x86_64") & name_value_cfg("target_feature", "sse2"))
+ .render_short_html(),
+ "x86-64 and <code>sse2</code>"
+ );
+ })
+}
+
+#[test]
+fn test_render_long_html() {
+ create_default_session_globals_then(|| {
+ assert_eq!(word_cfg("unix").render_long_html(), "Available on <strong>Unix</strong> only.");
+ assert_eq!(
+ name_value_cfg("target_os", "macos").render_long_html(),
+ "Available on <strong>macOS</strong> only."
+ );
+ assert_eq!(
+ name_value_cfg("target_os", "wasi").render_long_html(),
+ "Available on <strong>WASI</strong> only."
+ );
+ assert_eq!(
+ name_value_cfg("target_pointer_width", "16").render_long_html(),
+ "Available on <strong>16-bit</strong> only."
+ );
+ assert_eq!(
+ name_value_cfg("target_endian", "little").render_long_html(),
+ "Available on <strong>little-endian</strong> only."
+ );
+ assert_eq!(
+ (!word_cfg("windows")).render_long_html(),
+ "Available on <strong>non-Windows</strong> only."
+ );
+ assert_eq!(
+ (word_cfg("unix") & word_cfg("windows")).render_long_html(),
+ "Available on <strong>Unix and Windows</strong> only."
+ );
+ assert_eq!(
+ (word_cfg("unix") | word_cfg("windows")).render_long_html(),
+ "Available on <strong>Unix or Windows</strong> only."
+ );
+ assert_eq!(
+ (word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions"))
+ .render_long_html(),
+ "Available on <strong>Unix and Windows and debug-assertions enabled</strong> only."
+ );
+ assert_eq!(
+ (word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
+ .render_long_html(),
+ "Available on <strong>Unix or Windows or debug-assertions enabled</strong> only."
+ );
+ assert_eq!(
+ (!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")))
+ .render_long_html(),
+ "Available on <strong>neither Unix nor Windows nor debug-assertions enabled</strong>."
+ );
+ assert_eq!(
+ ((word_cfg("unix") & name_value_cfg("target_arch", "x86_64"))
+ | (word_cfg("windows") & name_value_cfg("target_pointer_width", "64")))
+ .render_long_html(),
+ "Available on <strong>Unix and x86-64, or Windows and 64-bit</strong> only."
+ );
+ assert_eq!(
+ (!(word_cfg("unix") & word_cfg("windows"))).render_long_html(),
+ "Available on <strong>not (Unix and Windows)</strong>."
+ );
+ assert_eq!(
+ ((word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix"))
+ .render_long_html(),
+ "Available on <strong>(debug-assertions enabled or Windows) and Unix</strong> only."
+ );
+ assert_eq!(
+ name_value_cfg("target_feature", "sse2").render_long_html(),
+ "Available with <strong>target feature <code>sse2</code></strong> only."
+ );
+ assert_eq!(
+ (name_value_cfg("target_arch", "x86_64") & name_value_cfg("target_feature", "sse2"))
+ .render_long_html(),
+ "Available on <strong>x86-64 and target feature <code>sse2</code></strong> only."
+ );
+ })
+}
+
+#[test]
+fn test_simplify_with() {
+ // This is a tiny subset of things that could be simplified, but it likely covers 90% of
+ // real world usecases well.
+ create_default_session_globals_then(|| {
+ let foo = word_cfg("foo");
+ let bar = word_cfg("bar");
+ let baz = word_cfg("baz");
+ let quux = word_cfg("quux");
+
+ let foobar = Cfg::All(vec![foo.clone(), bar.clone()]);
+ let barbaz = Cfg::All(vec![bar.clone(), baz.clone()]);
+ let foobarbaz = Cfg::All(vec![foo.clone(), bar.clone(), baz.clone()]);
+ let bazquux = Cfg::All(vec![baz.clone(), quux.clone()]);
+
+ // Unrelated cfgs don't affect each other
+ assert_eq!(foo.simplify_with(&bar).as_ref(), Some(&foo));
+ assert_eq!(foobar.simplify_with(&bazquux).as_ref(), Some(&foobar));
+
+ // Identical cfgs are eliminated
+ assert_eq!(foo.simplify_with(&foo), None);
+ assert_eq!(foobar.simplify_with(&foobar), None);
+
+ // Multiple cfgs eliminate a single assumed cfg
+ assert_eq!(foobar.simplify_with(&foo).as_ref(), Some(&bar));
+ assert_eq!(foobar.simplify_with(&bar).as_ref(), Some(&foo));
+
+ // A single cfg is eliminated by multiple assumed cfg containing it
+ assert_eq!(foo.simplify_with(&foobar), None);
+
+ // Multiple cfgs eliminate the matching subset of multiple assumed cfg
+ assert_eq!(foobar.simplify_with(&barbaz).as_ref(), Some(&foo));
+ assert_eq!(foobar.simplify_with(&foobarbaz), None);
+ });
+}
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
new file mode 100644
index 000000000..58d0aedb0
--- /dev/null
+++ b/src/librustdoc/clean/inline.rs
@@ -0,0 +1,727 @@
+//! Support for inlining external documentation into the current AST.
+
+use std::iter::once;
+use std::sync::Arc;
+
+use rustc_ast as ast;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::thin_vec::ThinVec;
+use rustc_hir as hir;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::Mutability;
+use rustc_metadata::creader::{CStore, LoadedMacro};
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_span::hygiene::MacroKind;
+use rustc_span::symbol::{kw, sym, Symbol};
+
+use crate::clean::{
+ self, clean_fn_decl_from_did_and_sig, clean_middle_field, clean_middle_ty,
+ clean_trait_ref_with_bindings, clean_ty, clean_ty_generics, clean_variant_def,
+ clean_visibility, utils, Attributes, AttributesExt, Clean, ImplKind, ItemId, Type, Visibility,
+};
+use crate::core::DocContext;
+use crate::formats::item_type::ItemType;
+
+type Attrs<'hir> = &'hir [ast::Attribute];
+
+/// Attempt to inline a definition into this AST.
+///
+/// This function will fetch the definition specified, and if it is
+/// from another crate it will attempt to inline the documentation
+/// from the other crate into this crate.
+///
+/// This is primarily used for `pub use` statements which are, in general,
+/// implementation details. Inlining the documentation should help provide a
+/// better experience when reading the documentation in this use case.
+///
+/// The returned value is `None` if the definition could not be inlined,
+/// and `Some` of a vector of items if it was successfully expanded.
+///
+/// `parent_module` refers to the parent of the *re-export*, not the original item.
+pub(crate) fn try_inline(
+ cx: &mut DocContext<'_>,
+ parent_module: DefId,
+ import_def_id: Option<DefId>,
+ res: Res,
+ name: Symbol,
+ attrs: Option<Attrs<'_>>,
+ visited: &mut FxHashSet<DefId>,
+) -> Option<Vec<clean::Item>> {
+ let did = res.opt_def_id()?;
+ if did.is_local() {
+ return None;
+ }
+ let mut ret = Vec::new();
+
+ debug!("attrs={:?}", attrs);
+ let attrs_clone = attrs;
+
+ let kind = match res {
+ Res::Def(DefKind::Trait, did) => {
+ record_extern_fqn(cx, did, ItemType::Trait);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::TraitItem(build_external_trait(cx, did))
+ }
+ Res::Def(DefKind::Fn, did) => {
+ record_extern_fqn(cx, did, ItemType::Function);
+ clean::FunctionItem(build_external_function(cx, did))
+ }
+ Res::Def(DefKind::Struct, did) => {
+ record_extern_fqn(cx, did, ItemType::Struct);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::StructItem(build_struct(cx, did))
+ }
+ Res::Def(DefKind::Union, did) => {
+ record_extern_fqn(cx, did, ItemType::Union);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::UnionItem(build_union(cx, did))
+ }
+ Res::Def(DefKind::TyAlias, did) => {
+ record_extern_fqn(cx, did, ItemType::Typedef);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::TypedefItem(build_type_alias(cx, did))
+ }
+ Res::Def(DefKind::Enum, did) => {
+ record_extern_fqn(cx, did, ItemType::Enum);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::EnumItem(build_enum(cx, did))
+ }
+ Res::Def(DefKind::ForeignTy, did) => {
+ record_extern_fqn(cx, did, ItemType::ForeignType);
+ build_impls(cx, Some(parent_module), did, attrs, &mut ret);
+ clean::ForeignTypeItem
+ }
+ // Never inline enum variants but leave them shown as re-exports.
+ Res::Def(DefKind::Variant, _) => return None,
+ // Assume that enum variants and struct types are re-exported next to
+ // their constructors.
+ Res::Def(DefKind::Ctor(..), _) | Res::SelfCtor(..) => return Some(Vec::new()),
+ Res::Def(DefKind::Mod, did) => {
+ record_extern_fqn(cx, did, ItemType::Module);
+ clean::ModuleItem(build_module(cx, did, visited))
+ }
+ Res::Def(DefKind::Static(_), did) => {
+ record_extern_fqn(cx, did, ItemType::Static);
+ clean::StaticItem(build_static(cx, did, cx.tcx.is_mutable_static(did)))
+ }
+ Res::Def(DefKind::Const, did) => {
+ record_extern_fqn(cx, did, ItemType::Constant);
+ clean::ConstantItem(build_const(cx, did))
+ }
+ Res::Def(DefKind::Macro(kind), did) => {
+ let mac = build_macro(cx, did, name, import_def_id);
+
+ let type_kind = match kind {
+ MacroKind::Bang => ItemType::Macro,
+ MacroKind::Attr => ItemType::ProcAttribute,
+ MacroKind::Derive => ItemType::ProcDerive,
+ };
+ record_extern_fqn(cx, did, type_kind);
+ mac
+ }
+ _ => return None,
+ };
+
+ let (attrs, cfg) = merge_attrs(cx, Some(parent_module), load_attrs(cx, did), attrs_clone);
+ cx.inlined.insert(did.into());
+ let mut item = clean::Item::from_def_id_and_attrs_and_parts(
+ did,
+ Some(name),
+ kind,
+ Box::new(attrs),
+ cx,
+ cfg,
+ );
+ if let Some(import_def_id) = import_def_id {
+ // The visibility needs to reflect the one from the reexport and not from the "source" DefId.
+ item.visibility = clean_visibility(cx.tcx.visibility(import_def_id));
+ }
+ ret.push(item);
+ Some(ret)
+}
+
+pub(crate) fn try_inline_glob(
+ cx: &mut DocContext<'_>,
+ res: Res,
+ visited: &mut FxHashSet<DefId>,
+ inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
+) -> Option<Vec<clean::Item>> {
+ let did = res.opt_def_id()?;
+ if did.is_local() {
+ return None;
+ }
+
+ match res {
+ Res::Def(DefKind::Mod, did) => {
+ let mut items = build_module_items(cx, did, visited, inlined_names);
+ items.drain_filter(|item| {
+ if let Some(name) = item.name {
+ // If an item with the same type and name already exists,
+ // it takes priority over the inlined stuff.
+ !inlined_names.insert((item.type_(), name))
+ } else {
+ false
+ }
+ });
+ Some(items)
+ }
+ // glob imports on things like enums aren't inlined even for local exports, so just bail
+ _ => None,
+ }
+}
+
+pub(crate) fn load_attrs<'hir>(cx: &DocContext<'hir>, did: DefId) -> Attrs<'hir> {
+ cx.tcx.get_attrs_unchecked(did)
+}
+
+/// Record an external fully qualified name in the external_paths cache.
+///
+/// These names are used later on by HTML rendering to generate things like
+/// source links back to the original item.
+pub(crate) fn record_extern_fqn(cx: &mut DocContext<'_>, did: DefId, kind: ItemType) {
+ let crate_name = cx.tcx.crate_name(did.krate);
+
+ let relative =
+ cx.tcx.def_path(did).data.into_iter().filter_map(|elem| elem.data.get_opt_name());
+ let fqn = if let ItemType::Macro = kind {
+ // Check to see if it is a macro 2.0 or built-in macro
+ if matches!(
+ CStore::from_tcx(cx.tcx).load_macro_untracked(did, cx.sess()),
+ LoadedMacro::MacroDef(def, _)
+ if matches!(&def.kind, ast::ItemKind::MacroDef(ast_def)
+ if !ast_def.macro_rules)
+ ) {
+ once(crate_name).chain(relative).collect()
+ } else {
+ vec![crate_name, relative.last().expect("relative was empty")]
+ }
+ } else {
+ once(crate_name).chain(relative).collect()
+ };
+
+ if did.is_local() {
+ cx.cache.exact_paths.insert(did, fqn);
+ } else {
+ cx.cache.external_paths.insert(did, (fqn, kind));
+ }
+}
+
+pub(crate) fn build_external_trait(cx: &mut DocContext<'_>, did: DefId) -> clean::Trait {
+ let trait_items = cx
+ .tcx
+ .associated_items(did)
+ .in_definition_order()
+ .map(|item| {
+ // When building an external trait, the cleaned trait will have all items public,
+ // which causes methods to have a `pub` prefix, which is invalid since items in traits
+ // can not have a visibility prefix. Thus we override the visibility here manually.
+ // See https://github.com/rust-lang/rust/issues/81274
+ clean::Item { visibility: Visibility::Inherited, ..item.clean(cx) }
+ })
+ .collect();
+
+ let predicates = cx.tcx.predicates_of(did);
+ let generics = clean_ty_generics(cx, cx.tcx.generics_of(did), predicates);
+ let generics = filter_non_trait_generics(did, generics);
+ let (generics, supertrait_bounds) = separate_supertrait_bounds(generics);
+ clean::Trait { def_id: did, generics, items: trait_items, bounds: supertrait_bounds }
+}
+
+fn build_external_function<'tcx>(cx: &mut DocContext<'tcx>, did: DefId) -> Box<clean::Function> {
+ let sig = cx.tcx.fn_sig(did);
+
+ let predicates = cx.tcx.predicates_of(did);
+ let (generics, decl) = clean::enter_impl_trait(cx, |cx| {
+ // NOTE: generics need to be cleaned before the decl!
+ let generics = clean_ty_generics(cx, cx.tcx.generics_of(did), predicates);
+ let decl = clean_fn_decl_from_did_and_sig(cx, Some(did), sig);
+ (generics, decl)
+ });
+ Box::new(clean::Function { decl, generics })
+}
+
+fn build_enum(cx: &mut DocContext<'_>, did: DefId) -> clean::Enum {
+ let predicates = cx.tcx.explicit_predicates_of(did);
+
+ clean::Enum {
+ generics: clean_ty_generics(cx, cx.tcx.generics_of(did), predicates),
+ variants: cx.tcx.adt_def(did).variants().iter().map(|v| clean_variant_def(v, cx)).collect(),
+ }
+}
+
+fn build_struct(cx: &mut DocContext<'_>, did: DefId) -> clean::Struct {
+ let predicates = cx.tcx.explicit_predicates_of(did);
+ let variant = cx.tcx.adt_def(did).non_enum_variant();
+
+ clean::Struct {
+ struct_type: variant.ctor_kind,
+ generics: clean_ty_generics(cx, cx.tcx.generics_of(did), predicates),
+ fields: variant.fields.iter().map(|x| clean_middle_field(x, cx)).collect(),
+ }
+}
+
+fn build_union(cx: &mut DocContext<'_>, did: DefId) -> clean::Union {
+ let predicates = cx.tcx.explicit_predicates_of(did);
+ let variant = cx.tcx.adt_def(did).non_enum_variant();
+
+ let generics = clean_ty_generics(cx, cx.tcx.generics_of(did), predicates);
+ let fields = variant.fields.iter().map(|x| clean_middle_field(x, cx)).collect();
+ clean::Union { generics, fields }
+}
+
+fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box<clean::Typedef> {
+ let predicates = cx.tcx.explicit_predicates_of(did);
+ let type_ = clean_middle_ty(cx.tcx.type_of(did), cx, Some(did));
+
+ Box::new(clean::Typedef {
+ type_,
+ generics: clean_ty_generics(cx, cx.tcx.generics_of(did), predicates),
+ item_type: None,
+ })
+}
+
+/// Builds all inherent implementations of an ADT (struct/union/enum) or Trait item/path/reexport.
+pub(crate) fn build_impls(
+ cx: &mut DocContext<'_>,
+ parent_module: Option<DefId>,
+ did: DefId,
+ attrs: Option<Attrs<'_>>,
+ ret: &mut Vec<clean::Item>,
+) {
+ let _prof_timer = cx.tcx.sess.prof.generic_activity("build_inherent_impls");
+ let tcx = cx.tcx;
+
+ // for each implementation of an item represented by `did`, build the clean::Item for that impl
+ for &did in tcx.inherent_impls(did).iter() {
+ build_impl(cx, parent_module, did, attrs, ret);
+ }
+}
+
+/// `parent_module` refers to the parent of the re-export, not the original item
+fn merge_attrs(
+ cx: &mut DocContext<'_>,
+ parent_module: Option<DefId>,
+ old_attrs: Attrs<'_>,
+ new_attrs: Option<Attrs<'_>>,
+) -> (clean::Attributes, Option<Arc<clean::cfg::Cfg>>) {
+ // NOTE: If we have additional attributes (from a re-export),
+ // always insert them first. This ensure that re-export
+ // doc comments show up before the original doc comments
+ // when we render them.
+ if let Some(inner) = new_attrs {
+ let mut both = inner.to_vec();
+ both.extend_from_slice(old_attrs);
+ (
+ if let Some(new_id) = parent_module {
+ Attributes::from_ast_with_additional(old_attrs, (inner, new_id))
+ } else {
+ Attributes::from_ast(&both)
+ },
+ both.cfg(cx.tcx, &cx.cache.hidden_cfg),
+ )
+ } else {
+ (Attributes::from_ast(&old_attrs), old_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg))
+ }
+}
+
+/// Inline an `impl`, inherent or of a trait. The `did` must be for an `impl`.
+pub(crate) fn build_impl(
+ cx: &mut DocContext<'_>,
+ parent_module: Option<DefId>,
+ did: DefId,
+ attrs: Option<Attrs<'_>>,
+ ret: &mut Vec<clean::Item>,
+) {
+ if !cx.inlined.insert(did.into()) {
+ return;
+ }
+
+ let _prof_timer = cx.tcx.sess.prof.generic_activity("build_impl");
+
+ let tcx = cx.tcx;
+ let associated_trait = tcx.impl_trait_ref(did);
+
+ // Only inline impl if the implemented trait is
+ // reachable in rustdoc generated documentation
+ if !did.is_local() {
+ if let Some(traitref) = associated_trait {
+ let did = traitref.def_id;
+ if !cx.cache.access_levels.is_public(did) {
+ return;
+ }
+
+ if let Some(stab) = tcx.lookup_stability(did) {
+ if stab.is_unstable() && stab.feature == sym::rustc_private {
+ return;
+ }
+ }
+ }
+ }
+
+ let impl_item = match did.as_local() {
+ Some(did) => match &tcx.hir().expect_item(did).kind {
+ hir::ItemKind::Impl(impl_) => Some(impl_),
+ _ => panic!("`DefID` passed to `build_impl` is not an `impl"),
+ },
+ None => None,
+ };
+
+ let for_ = match &impl_item {
+ Some(impl_) => clean_ty(impl_.self_ty, cx),
+ None => clean_middle_ty(tcx.type_of(did), cx, Some(did)),
+ };
+
+ // Only inline impl if the implementing type is
+ // reachable in rustdoc generated documentation
+ if !did.is_local() {
+ if let Some(did) = for_.def_id(&cx.cache) {
+ if !cx.cache.access_levels.is_public(did) {
+ return;
+ }
+
+ if let Some(stab) = tcx.lookup_stability(did) {
+ if stab.is_unstable() && stab.feature == sym::rustc_private {
+ return;
+ }
+ }
+ }
+ }
+
+ let document_hidden = cx.render_options.document_hidden;
+ let predicates = tcx.explicit_predicates_of(did);
+ let (trait_items, generics) = match impl_item {
+ Some(impl_) => (
+ impl_
+ .items
+ .iter()
+ .map(|item| tcx.hir().impl_item(item.id))
+ .filter(|item| {
+ // Filter out impl items whose corresponding trait item has `doc(hidden)`
+ // not to document such impl items.
+ // For inherent impls, we don't do any filtering, because that's already done in strip_hidden.rs.
+
+ // When `--document-hidden-items` is passed, we don't
+ // do any filtering, too.
+ if document_hidden {
+ return true;
+ }
+ if let Some(associated_trait) = associated_trait {
+ let assoc_kind = match item.kind {
+ hir::ImplItemKind::Const(..) => ty::AssocKind::Const,
+ hir::ImplItemKind::Fn(..) => ty::AssocKind::Fn,
+ hir::ImplItemKind::TyAlias(..) => ty::AssocKind::Type,
+ };
+ let trait_item = tcx
+ .associated_items(associated_trait.def_id)
+ .find_by_name_and_kind(
+ tcx,
+ item.ident,
+ assoc_kind,
+ associated_trait.def_id,
+ )
+ .unwrap(); // SAFETY: For all impl items there exists trait item that has the same name.
+ !tcx.is_doc_hidden(trait_item.def_id)
+ } else {
+ true
+ }
+ })
+ .map(|item| item.clean(cx))
+ .collect::<Vec<_>>(),
+ impl_.generics.clean(cx),
+ ),
+ None => (
+ tcx.associated_items(did)
+ .in_definition_order()
+ .filter(|item| {
+ // If this is a trait impl, filter out associated items whose corresponding item
+ // in the associated trait is marked `doc(hidden)`.
+ // If this is an inherent impl, filter out private associated items.
+ if let Some(associated_trait) = associated_trait {
+ let trait_item = tcx
+ .associated_items(associated_trait.def_id)
+ .find_by_name_and_kind(
+ tcx,
+ item.ident(tcx),
+ item.kind,
+ associated_trait.def_id,
+ )
+ .unwrap(); // corresponding associated item has to exist
+ !tcx.is_doc_hidden(trait_item.def_id)
+ } else {
+ item.visibility(tcx).is_public()
+ }
+ })
+ .map(|item| item.clean(cx))
+ .collect::<Vec<_>>(),
+ clean::enter_impl_trait(cx, |cx| {
+ clean_ty_generics(cx, tcx.generics_of(did), predicates)
+ }),
+ ),
+ };
+ let polarity = tcx.impl_polarity(did);
+ let trait_ = associated_trait.map(|t| clean_trait_ref_with_bindings(cx, t, &[]));
+ if trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() {
+ super::build_deref_target_impls(cx, &trait_items, ret);
+ }
+
+ // Return if the trait itself or any types of the generic parameters are doc(hidden).
+ let mut stack: Vec<&Type> = vec![&for_];
+
+ if let Some(did) = trait_.as_ref().map(|t| t.def_id()) {
+ if tcx.is_doc_hidden(did) {
+ return;
+ }
+ }
+ if let Some(generics) = trait_.as_ref().and_then(|t| t.generics()) {
+ stack.extend(generics);
+ }
+
+ while let Some(ty) = stack.pop() {
+ if let Some(did) = ty.def_id(&cx.cache) {
+ if tcx.is_doc_hidden(did) {
+ return;
+ }
+ }
+ if let Some(generics) = ty.generics() {
+ stack.extend(generics);
+ }
+ }
+
+ if let Some(did) = trait_.as_ref().map(|t| t.def_id()) {
+ record_extern_trait(cx, did);
+ }
+
+ let (merged_attrs, cfg) = merge_attrs(cx, parent_module, load_attrs(cx, did), attrs);
+ trace!("merged_attrs={:?}", merged_attrs);
+
+ trace!(
+ "build_impl: impl {:?} for {:?}",
+ trait_.as_ref().map(|t| t.def_id()),
+ for_.def_id(&cx.cache)
+ );
+ ret.push(clean::Item::from_def_id_and_attrs_and_parts(
+ did,
+ None,
+ clean::ImplItem(Box::new(clean::Impl {
+ unsafety: hir::Unsafety::Normal,
+ generics,
+ trait_,
+ for_,
+ items: trait_items,
+ polarity,
+ kind: if utils::has_doc_flag(tcx, did, sym::fake_variadic) {
+ ImplKind::FakeVaradic
+ } else {
+ ImplKind::Normal
+ },
+ })),
+ Box::new(merged_attrs),
+ cx,
+ cfg,
+ ));
+}
+
+fn build_module(
+ cx: &mut DocContext<'_>,
+ did: DefId,
+ visited: &mut FxHashSet<DefId>,
+) -> clean::Module {
+ let items = build_module_items(cx, did, visited, &mut FxHashSet::default());
+
+ let span = clean::Span::new(cx.tcx.def_span(did));
+ clean::Module { items, span }
+}
+
+fn build_module_items(
+ cx: &mut DocContext<'_>,
+ did: DefId,
+ visited: &mut FxHashSet<DefId>,
+ inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
+) -> Vec<clean::Item> {
+ let mut items = Vec::new();
+
+ // If we're re-exporting a re-export it may actually re-export something in
+ // two namespaces, so the target may be listed twice. Make sure we only
+ // visit each node at most once.
+ for &item in cx.tcx.module_children(did).iter() {
+ if item.vis.is_public() {
+ let res = item.res.expect_non_local();
+ if let Some(def_id) = res.mod_def_id() {
+ // If we're inlining a glob import, it's possible to have
+ // two distinct modules with the same name. We don't want to
+ // inline it, or mark any of its contents as visited.
+ if did == def_id
+ || inlined_names.contains(&(ItemType::Module, item.ident.name))
+ || !visited.insert(def_id)
+ {
+ continue;
+ }
+ }
+ if let Res::PrimTy(p) = res {
+ // Primitive types can't be inlined so generate an import instead.
+ let prim_ty = clean::PrimitiveType::from(p);
+ items.push(clean::Item {
+ name: None,
+ attrs: Box::new(clean::Attributes::default()),
+ item_id: ItemId::Primitive(prim_ty, did.krate),
+ visibility: clean::Public,
+ kind: Box::new(clean::ImportItem(clean::Import::new_simple(
+ item.ident.name,
+ clean::ImportSource {
+ path: clean::Path {
+ res,
+ segments: vec![clean::PathSegment {
+ name: prim_ty.as_sym(),
+ args: clean::GenericArgs::AngleBracketed {
+ args: Default::default(),
+ bindings: ThinVec::new(),
+ },
+ }],
+ },
+ did: None,
+ },
+ true,
+ ))),
+ cfg: None,
+ });
+ } else if let Some(i) = try_inline(cx, did, None, res, item.ident.name, None, visited) {
+ items.extend(i)
+ }
+ }
+ }
+
+ items
+}
+
+pub(crate) fn print_inlined_const(tcx: TyCtxt<'_>, did: DefId) -> String {
+ if let Some(did) = did.as_local() {
+ let hir_id = tcx.hir().local_def_id_to_hir_id(did);
+ rustc_hir_pretty::id_to_string(&tcx.hir(), hir_id)
+ } else {
+ tcx.rendered_const(did).clone()
+ }
+}
+
+fn build_const(cx: &mut DocContext<'_>, def_id: DefId) -> clean::Constant {
+ clean::Constant {
+ type_: clean_middle_ty(cx.tcx.type_of(def_id), cx, Some(def_id)),
+ kind: clean::ConstantKind::Extern { def_id },
+ }
+}
+
+fn build_static(cx: &mut DocContext<'_>, did: DefId, mutable: bool) -> clean::Static {
+ clean::Static {
+ type_: clean_middle_ty(cx.tcx.type_of(did), cx, Some(did)),
+ mutability: if mutable { Mutability::Mut } else { Mutability::Not },
+ expr: None,
+ }
+}
+
+fn build_macro(
+ cx: &mut DocContext<'_>,
+ def_id: DefId,
+ name: Symbol,
+ import_def_id: Option<DefId>,
+) -> clean::ItemKind {
+ match CStore::from_tcx(cx.tcx).load_macro_untracked(def_id, cx.sess()) {
+ LoadedMacro::MacroDef(item_def, _) => {
+ if let ast::ItemKind::MacroDef(ref def) = item_def.kind {
+ let vis = clean_visibility(cx.tcx.visibility(import_def_id.unwrap_or(def_id)));
+ clean::MacroItem(clean::Macro {
+ source: utils::display_macro_source(cx, name, def, def_id, vis),
+ })
+ } else {
+ unreachable!()
+ }
+ }
+ LoadedMacro::ProcMacro(ext) => clean::ProcMacroItem(clean::ProcMacro {
+ kind: ext.macro_kind(),
+ helpers: ext.helper_attrs,
+ }),
+ }
+}
+
+/// A trait's generics clause actually contains all of the predicates for all of
+/// its associated types as well. We specifically move these clauses to the
+/// associated types instead when displaying, so when we're generating the
+/// generics for the trait itself we need to be sure to remove them.
+/// We also need to remove the implied "recursive" Self: Trait bound.
+///
+/// The inverse of this filtering logic can be found in the `Clean`
+/// implementation for `AssociatedType`
+fn filter_non_trait_generics(trait_did: DefId, mut g: clean::Generics) -> clean::Generics {
+ for pred in &mut g.where_predicates {
+ match *pred {
+ clean::WherePredicate::BoundPredicate {
+ ty: clean::Generic(ref s),
+ ref mut bounds,
+ ..
+ } if *s == kw::SelfUpper => {
+ bounds.retain(|bound| match bound {
+ clean::GenericBound::TraitBound(clean::PolyTrait { trait_, .. }, _) => {
+ trait_.def_id() != trait_did
+ }
+ _ => true,
+ });
+ }
+ _ => {}
+ }
+ }
+
+ g.where_predicates.retain(|pred| match pred {
+ clean::WherePredicate::BoundPredicate {
+ ty: clean::QPath { self_type: box clean::Generic(ref s), trait_, .. },
+ bounds,
+ ..
+ } => !(bounds.is_empty() || *s == kw::SelfUpper && trait_.def_id() == trait_did),
+ _ => true,
+ });
+ g
+}
+
+/// Supertrait bounds for a trait are also listed in the generics coming from
+/// the metadata for a crate, so we want to separate those out and create a new
+/// list of explicit supertrait bounds to render nicely.
+fn separate_supertrait_bounds(
+ mut g: clean::Generics,
+) -> (clean::Generics, Vec<clean::GenericBound>) {
+ let mut ty_bounds = Vec::new();
+ g.where_predicates.retain(|pred| match *pred {
+ clean::WherePredicate::BoundPredicate { ty: clean::Generic(ref s), ref bounds, .. }
+ if *s == kw::SelfUpper =>
+ {
+ ty_bounds.extend(bounds.iter().cloned());
+ false
+ }
+ _ => true,
+ });
+ (g, ty_bounds)
+}
+
+pub(crate) fn record_extern_trait(cx: &mut DocContext<'_>, did: DefId) {
+ if did.is_local() {
+ return;
+ }
+
+ {
+ if cx.external_traits.borrow().contains_key(&did) || cx.active_extern_traits.contains(&did)
+ {
+ return;
+ }
+ }
+
+ {
+ cx.active_extern_traits.insert(did);
+ }
+
+ debug!("record_extern_trait: {:?}", did);
+ let trait_ = build_external_trait(cx, did);
+
+ let trait_ = clean::TraitWithExtraInfo {
+ trait_,
+ is_notable: clean::utils::has_doc_flag(cx.tcx, did, sym::notable_trait),
+ };
+ cx.external_traits.borrow_mut().insert(did, trait_);
+ cx.active_extern_traits.remove(&did);
+}
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
new file mode 100644
index 000000000..929f5f89b
--- /dev/null
+++ b/src/librustdoc/clean/mod.rs
@@ -0,0 +1,2242 @@
+//! This module contains the "cleaned" pieces of the AST, and the functions
+//! that clean them.
+
+mod auto_trait;
+mod blanket_impl;
+pub(crate) mod cfg;
+pub(crate) mod inline;
+mod render_macro_matchers;
+mod simplify;
+pub(crate) mod types;
+pub(crate) mod utils;
+
+use rustc_ast as ast;
+use rustc_attr as attr;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, DefKind, Res};
+use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::PredicateOrigin;
+use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData};
+use rustc_middle::middle::resolve_lifetime as rl;
+use rustc_middle::ty::fold::TypeFolder;
+use rustc_middle::ty::subst::{InternalSubsts, Subst};
+use rustc_middle::ty::{self, AdtKind, DefIdTree, EarlyBinder, Lift, Ty, TyCtxt};
+use rustc_middle::{bug, span_bug};
+use rustc_span::hygiene::{AstPass, MacroKind};
+use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::{self, ExpnKind};
+use rustc_typeck::hir_ty_to_ty;
+
+use std::assert_matches::assert_matches;
+use std::collections::hash_map::Entry;
+use std::collections::BTreeMap;
+use std::default::Default;
+use std::hash::Hash;
+use std::{mem, vec};
+
+use crate::core::{self, DocContext, ImplTraitParam};
+use crate::formats::item_type::ItemType;
+use crate::visit_ast::Module as DocModule;
+
+use utils::*;
+
+pub(crate) use self::types::*;
+pub(crate) use self::utils::{get_auto_trait_and_blanket_impls, krate, register_res};
+
+pub(crate) trait Clean<'tcx, T> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> T;
+}
+
+impl<'tcx> Clean<'tcx, Item> for DocModule<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Item {
+ let mut items: Vec<Item> = vec![];
+ let mut inserted = FxHashSet::default();
+ items.extend(self.foreigns.iter().map(|(item, renamed)| {
+ let item = clean_maybe_renamed_foreign_item(cx, item, *renamed);
+ if let Some(name) = item.name {
+ inserted.insert((item.type_(), name));
+ }
+ item
+ }));
+ items.extend(self.mods.iter().map(|x| {
+ inserted.insert((ItemType::Module, x.name));
+ x.clean(cx)
+ }));
+
+ // Split up imports from all other items.
+ //
+ // This covers the case where somebody does an import which should pull in an item,
+ // but there's already an item with the same namespace and same name. Rust gives
+ // priority to the not-imported one, so we should, too.
+ items.extend(self.items.iter().flat_map(|(item, renamed)| {
+ // First, lower everything other than imports.
+ if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
+ return Vec::new();
+ }
+ let v = clean_maybe_renamed_item(cx, item, *renamed);
+ for item in &v {
+ if let Some(name) = item.name {
+ inserted.insert((item.type_(), name));
+ }
+ }
+ v
+ }));
+ items.extend(self.items.iter().flat_map(|(item, renamed)| {
+ // Now we actually lower the imports, skipping everything else.
+ if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind {
+ let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id()));
+ clean_use_statement(item, name, path, hir::UseKind::Glob, cx, &mut inserted)
+ } else {
+ // skip everything else
+ Vec::new()
+ }
+ }));
+
+ // determine if we should display the inner contents or
+ // the outer `mod` item for the source code.
+
+ let span = Span::new({
+ let where_outer = self.where_outer(cx.tcx);
+ let sm = cx.sess().source_map();
+ let outer = sm.lookup_char_pos(where_outer.lo());
+ let inner = sm.lookup_char_pos(self.where_inner.lo());
+ if outer.file.start_pos == inner.file.start_pos {
+ // mod foo { ... }
+ where_outer
+ } else {
+ // mod foo; (and a separate SourceFile for the contents)
+ self.where_inner
+ }
+ });
+
+ Item::from_hir_id_and_parts(
+ self.id,
+ Some(self.name),
+ ModuleItem(Module { items, span }),
+ cx,
+ )
+ }
+}
+
+impl<'tcx> Clean<'tcx, Option<GenericBound>> for hir::GenericBound<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Option<GenericBound> {
+ Some(match *self {
+ hir::GenericBound::Outlives(lt) => GenericBound::Outlives(clean_lifetime(lt, cx)),
+ hir::GenericBound::LangItemTrait(lang_item, span, _, generic_args) => {
+ let def_id = cx.tcx.require_lang_item(lang_item, Some(span));
+
+ let trait_ref = ty::TraitRef::identity(cx.tcx, def_id).skip_binder();
+
+ let generic_args = generic_args.clean(cx);
+ let GenericArgs::AngleBracketed { bindings, .. } = generic_args
+ else {
+ bug!("clean: parenthesized `GenericBound::LangItemTrait`");
+ };
+
+ let trait_ = clean_trait_ref_with_bindings(cx, trait_ref, &bindings);
+ GenericBound::TraitBound(
+ PolyTrait { trait_, generic_params: vec![] },
+ hir::TraitBoundModifier::None,
+ )
+ }
+ hir::GenericBound::Trait(ref t, modifier) => {
+ // `T: ~const Destruct` is hidden because `T: Destruct` is a no-op.
+ if modifier == hir::TraitBoundModifier::MaybeConst
+ && cx.tcx.lang_items().destruct_trait()
+ == Some(t.trait_ref.trait_def_id().unwrap())
+ {
+ return None;
+ }
+
+ GenericBound::TraitBound(t.clean(cx), modifier)
+ }
+ })
+ }
+}
+
+pub(crate) fn clean_trait_ref_with_bindings<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+ bindings: &[TypeBinding],
+) -> Path {
+ let kind = cx.tcx.def_kind(trait_ref.def_id).into();
+ if !matches!(kind, ItemType::Trait | ItemType::TraitAlias) {
+ span_bug!(cx.tcx.def_span(trait_ref.def_id), "`TraitRef` had unexpected kind {:?}", kind);
+ }
+ inline::record_extern_fqn(cx, trait_ref.def_id, kind);
+ let path = external_path(cx, trait_ref.def_id, true, bindings.to_vec(), trait_ref.substs);
+
+ debug!("ty::TraitRef\n subst: {:?}\n", trait_ref.substs);
+
+ path
+}
+
+fn clean_poly_trait_ref_with_bindings<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ poly_trait_ref: ty::PolyTraitRef<'tcx>,
+ bindings: &[TypeBinding],
+) -> GenericBound {
+ let poly_trait_ref = poly_trait_ref.lift_to_tcx(cx.tcx).unwrap();
+
+ // collect any late bound regions
+ let late_bound_regions: Vec<_> = cx
+ .tcx
+ .collect_referenced_late_bound_regions(&poly_trait_ref)
+ .into_iter()
+ .filter_map(|br| match br {
+ ty::BrNamed(_, name) if name != kw::UnderscoreLifetime => Some(GenericParamDef {
+ name,
+ kind: GenericParamDefKind::Lifetime { outlives: vec![] },
+ }),
+ _ => None,
+ })
+ .collect();
+
+ let trait_ = clean_trait_ref_with_bindings(cx, poly_trait_ref.skip_binder(), bindings);
+ GenericBound::TraitBound(
+ PolyTrait { trait_, generic_params: late_bound_regions },
+ hir::TraitBoundModifier::None,
+ )
+}
+
+impl<'tcx> Clean<'tcx, GenericBound> for ty::PolyTraitRef<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> GenericBound {
+ clean_poly_trait_ref_with_bindings(cx, *self, &[])
+ }
+}
+
+fn clean_lifetime<'tcx>(lifetime: hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime {
+ let def = cx.tcx.named_region(lifetime.hir_id);
+ if let Some(
+ rl::Region::EarlyBound(_, node_id)
+ | rl::Region::LateBound(_, _, node_id)
+ | rl::Region::Free(_, node_id),
+ ) = def
+ {
+ if let Some(lt) = cx.substs.get(&node_id).and_then(|p| p.as_lt()).cloned() {
+ return lt;
+ }
+ }
+ Lifetime(lifetime.name.ident().name)
+}
+
+pub(crate) fn clean_const<'tcx>(constant: &hir::ConstArg, cx: &mut DocContext<'tcx>) -> Constant {
+ let def_id = cx.tcx.hir().body_owner_def_id(constant.value.body).to_def_id();
+ Constant {
+ type_: clean_middle_ty(cx.tcx.type_of(def_id), cx, Some(def_id)),
+ kind: ConstantKind::Anonymous { body: constant.value.body },
+ }
+}
+
+pub(crate) fn clean_middle_const<'tcx>(
+ constant: ty::Const<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> Constant {
+ // FIXME: instead of storing the stringified expression, store `self` directly instead.
+ Constant {
+ type_: clean_middle_ty(constant.ty(), cx, None),
+ kind: ConstantKind::TyConst { expr: constant.to_string() },
+ }
+}
+
+pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Lifetime> {
+ match *region {
+ ty::ReStatic => Some(Lifetime::statik()),
+ ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
+ if name != kw::UnderscoreLifetime { Some(Lifetime(name)) } else { None }
+ }
+ ty::ReEarlyBound(ref data) => {
+ if data.name != kw::UnderscoreLifetime {
+ Some(Lifetime(data.name))
+ } else {
+ None
+ }
+ }
+ ty::ReLateBound(..)
+ | ty::ReFree(..)
+ | ty::ReVar(..)
+ | ty::RePlaceholder(..)
+ | ty::ReEmpty(_)
+ | ty::ReErased => {
+ debug!("cannot clean region {:?}", region);
+ None
+ }
+ }
+}
+
+impl<'tcx> Clean<'tcx, Option<WherePredicate>> for hir::WherePredicate<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Option<WherePredicate> {
+ if !self.in_where_clause() {
+ return None;
+ }
+ Some(match *self {
+ hir::WherePredicate::BoundPredicate(ref wbp) => {
+ let bound_params = wbp
+ .bound_generic_params
+ .iter()
+ .map(|param| {
+ // Higher-ranked params must be lifetimes.
+ // Higher-ranked lifetimes can't have bounds.
+ assert_matches!(
+ param,
+ hir::GenericParam { kind: hir::GenericParamKind::Lifetime { .. }, .. }
+ );
+ Lifetime(param.name.ident().name)
+ })
+ .collect();
+ WherePredicate::BoundPredicate {
+ ty: clean_ty(wbp.bounded_ty, cx),
+ bounds: wbp.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
+ bound_params,
+ }
+ }
+
+ hir::WherePredicate::RegionPredicate(ref wrp) => WherePredicate::RegionPredicate {
+ lifetime: clean_lifetime(wrp.lifetime, cx),
+ bounds: wrp.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
+ },
+
+ hir::WherePredicate::EqPredicate(ref wrp) => WherePredicate::EqPredicate {
+ lhs: clean_ty(wrp.lhs_ty, cx),
+ rhs: clean_ty(wrp.rhs_ty, cx).into(),
+ },
+ })
+ }
+}
+
+impl<'tcx> Clean<'tcx, Option<WherePredicate>> for ty::Predicate<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Option<WherePredicate> {
+ let bound_predicate = self.kind();
+ match bound_predicate.skip_binder() {
+ ty::PredicateKind::Trait(pred) => {
+ clean_poly_trait_predicate(bound_predicate.rebind(pred), cx)
+ }
+ ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred),
+ ty::PredicateKind::TypeOutlives(pred) => clean_type_outlives_predicate(pred, cx),
+ ty::PredicateKind::Projection(pred) => Some(clean_projection_predicate(pred, cx)),
+ ty::PredicateKind::ConstEvaluatable(..) => None,
+ ty::PredicateKind::WellFormed(..) => None,
+
+ ty::PredicateKind::Subtype(..)
+ | ty::PredicateKind::Coerce(..)
+ | ty::PredicateKind::ObjectSafe(..)
+ | ty::PredicateKind::ClosureKind(..)
+ | ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::TypeWellFormedFromEnv(..) => panic!("not user writable"),
+ }
+ }
+}
+
+fn clean_poly_trait_predicate<'tcx>(
+ pred: ty::PolyTraitPredicate<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> Option<WherePredicate> {
+ // `T: ~const Destruct` is hidden because `T: Destruct` is a no-op.
+ if pred.skip_binder().constness == ty::BoundConstness::ConstIfConst
+ && Some(pred.skip_binder().def_id()) == cx.tcx.lang_items().destruct_trait()
+ {
+ return None;
+ }
+
+ let poly_trait_ref = pred.map_bound(|pred| pred.trait_ref);
+ Some(WherePredicate::BoundPredicate {
+ ty: clean_middle_ty(poly_trait_ref.skip_binder().self_ty(), cx, None),
+ bounds: vec![poly_trait_ref.clean(cx)],
+ bound_params: Vec::new(),
+ })
+}
+
+fn clean_region_outlives_predicate<'tcx>(
+ pred: ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>,
+) -> Option<WherePredicate> {
+ let ty::OutlivesPredicate(a, b) = pred;
+
+ if a.is_empty() && b.is_empty() {
+ return None;
+ }
+
+ Some(WherePredicate::RegionPredicate {
+ lifetime: clean_middle_region(a).expect("failed to clean lifetime"),
+ bounds: vec![GenericBound::Outlives(
+ clean_middle_region(b).expect("failed to clean bounds"),
+ )],
+ })
+}
+
+fn clean_type_outlives_predicate<'tcx>(
+ pred: ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>,
+ cx: &mut DocContext<'tcx>,
+) -> Option<WherePredicate> {
+ let ty::OutlivesPredicate(ty, lt) = pred;
+
+ if lt.is_empty() {
+ return None;
+ }
+
+ Some(WherePredicate::BoundPredicate {
+ ty: clean_middle_ty(ty, cx, None),
+ bounds: vec![GenericBound::Outlives(
+ clean_middle_region(lt).expect("failed to clean lifetimes"),
+ )],
+ bound_params: Vec::new(),
+ })
+}
+
+fn clean_middle_term<'tcx>(term: ty::Term<'tcx>, cx: &mut DocContext<'tcx>) -> Term {
+ match term {
+ ty::Term::Ty(ty) => Term::Type(clean_middle_ty(ty, cx, None)),
+ ty::Term::Const(c) => Term::Constant(clean_middle_const(c, cx)),
+ }
+}
+
+fn clean_hir_term<'tcx>(term: &hir::Term<'tcx>, cx: &mut DocContext<'tcx>) -> Term {
+ match term {
+ hir::Term::Ty(ty) => Term::Type(clean_ty(ty, cx)),
+ hir::Term::Const(c) => {
+ let def_id = cx.tcx.hir().local_def_id(c.hir_id);
+ Term::Constant(clean_middle_const(ty::Const::from_anon_const(cx.tcx, def_id), cx))
+ }
+ }
+}
+
+fn clean_projection_predicate<'tcx>(
+ pred: ty::ProjectionPredicate<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> WherePredicate {
+ let ty::ProjectionPredicate { projection_ty, term } = pred;
+ WherePredicate::EqPredicate {
+ lhs: clean_projection(projection_ty, cx, None),
+ rhs: clean_middle_term(term, cx),
+ }
+}
+
+fn clean_projection<'tcx>(
+ ty: ty::ProjectionTy<'tcx>,
+ cx: &mut DocContext<'tcx>,
+ def_id: Option<DefId>,
+) -> Type {
+ let lifted = ty.lift_to_tcx(cx.tcx).unwrap();
+ let trait_ = clean_trait_ref_with_bindings(cx, lifted.trait_ref(cx.tcx), &[]);
+ let self_type = clean_middle_ty(ty.self_ty(), cx, None);
+ let self_def_id = if let Some(def_id) = def_id {
+ cx.tcx.opt_parent(def_id).or(Some(def_id))
+ } else {
+ self_type.def_id(&cx.cache)
+ };
+ let should_show_cast = compute_should_show_cast(self_def_id, &trait_, &self_type);
+ Type::QPath {
+ assoc: Box::new(projection_to_path_segment(ty, cx)),
+ should_show_cast,
+ self_type: Box::new(self_type),
+ trait_,
+ }
+}
+
+fn compute_should_show_cast(self_def_id: Option<DefId>, trait_: &Path, self_type: &Type) -> bool {
+ !trait_.segments.is_empty()
+ && self_def_id
+ .zip(Some(trait_.def_id()))
+ .map_or(!self_type.is_self_type(), |(id, trait_)| id != trait_)
+}
+
+fn projection_to_path_segment<'tcx>(
+ ty: ty::ProjectionTy<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> PathSegment {
+ let item = cx.tcx.associated_item(ty.item_def_id);
+ let generics = cx.tcx.generics_of(ty.item_def_id);
+ PathSegment {
+ name: item.name,
+ args: GenericArgs::AngleBracketed {
+ args: substs_to_args(cx, &ty.substs[generics.parent_count..], false).into(),
+ bindings: Default::default(),
+ },
+ }
+}
+
+fn clean_generic_param_def<'tcx>(
+ def: &ty::GenericParamDef,
+ cx: &mut DocContext<'tcx>,
+) -> GenericParamDef {
+ let (name, kind) = match def.kind {
+ ty::GenericParamDefKind::Lifetime => {
+ (def.name, GenericParamDefKind::Lifetime { outlives: vec![] })
+ }
+ ty::GenericParamDefKind::Type { has_default, synthetic, .. } => {
+ let default = if has_default {
+ Some(clean_middle_ty(cx.tcx.type_of(def.def_id), cx, Some(def.def_id)))
+ } else {
+ None
+ };
+ (
+ def.name,
+ GenericParamDefKind::Type {
+ did: def.def_id,
+ bounds: vec![], // These are filled in from the where-clauses.
+ default: default.map(Box::new),
+ synthetic,
+ },
+ )
+ }
+ ty::GenericParamDefKind::Const { has_default } => (
+ def.name,
+ GenericParamDefKind::Const {
+ did: def.def_id,
+ ty: Box::new(clean_middle_ty(cx.tcx.type_of(def.def_id), cx, Some(def.def_id))),
+ default: match has_default {
+ true => Some(Box::new(cx.tcx.const_param_default(def.def_id).to_string())),
+ false => None,
+ },
+ },
+ ),
+ };
+
+ GenericParamDef { name, kind }
+}
+
+fn clean_generic_param<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ generics: Option<&hir::Generics<'tcx>>,
+ param: &hir::GenericParam<'tcx>,
+) -> GenericParamDef {
+ let did = cx.tcx.hir().local_def_id(param.hir_id);
+ let (name, kind) = match param.kind {
+ hir::GenericParamKind::Lifetime { .. } => {
+ let outlives = if let Some(generics) = generics {
+ generics
+ .outlives_for_param(did)
+ .filter(|bp| !bp.in_where_clause)
+ .flat_map(|bp| bp.bounds)
+ .map(|bound| match bound {
+ hir::GenericBound::Outlives(lt) => clean_lifetime(*lt, cx),
+ _ => panic!(),
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+ (param.name.ident().name, GenericParamDefKind::Lifetime { outlives })
+ }
+ hir::GenericParamKind::Type { ref default, synthetic } => {
+ let bounds = if let Some(generics) = generics {
+ generics
+ .bounds_for_param(did)
+ .filter(|bp| bp.origin != PredicateOrigin::WhereClause)
+ .flat_map(|bp| bp.bounds)
+ .filter_map(|x| x.clean(cx))
+ .collect()
+ } else {
+ Vec::new()
+ };
+ (
+ param.name.ident().name,
+ GenericParamDefKind::Type {
+ did: did.to_def_id(),
+ bounds,
+ default: default.map(|t| clean_ty(t, cx)).map(Box::new),
+ synthetic,
+ },
+ )
+ }
+ hir::GenericParamKind::Const { ty, default } => (
+ param.name.ident().name,
+ GenericParamDefKind::Const {
+ did: did.to_def_id(),
+ ty: Box::new(clean_ty(ty, cx)),
+ default: default.map(|ct| {
+ let def_id = cx.tcx.hir().local_def_id(ct.hir_id);
+ Box::new(ty::Const::from_anon_const(cx.tcx, def_id).to_string())
+ }),
+ },
+ ),
+ };
+
+ GenericParamDef { name, kind }
+}
+
+/// Synthetic type-parameters are inserted after normal ones.
+/// In order for normal parameters to be able to refer to synthetic ones,
+/// scans them first.
+fn is_impl_trait(param: &hir::GenericParam<'_>) -> bool {
+ match param.kind {
+ hir::GenericParamKind::Type { synthetic, .. } => synthetic,
+ _ => false,
+ }
+}
+
+/// This can happen for `async fn`, e.g. `async fn f<'_>(&'_ self)`.
+///
+/// See `lifetime_to_generic_param` in `rustc_ast_lowering` for more information.
+fn is_elided_lifetime(param: &hir::GenericParam<'_>) -> bool {
+ matches!(param.kind, hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Elided })
+}
+
+impl<'tcx> Clean<'tcx, Generics> for hir::Generics<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Generics {
+ let impl_trait_params = self
+ .params
+ .iter()
+ .filter(|param| is_impl_trait(param))
+ .map(|param| {
+ let param = clean_generic_param(cx, Some(self), param);
+ match param.kind {
+ GenericParamDefKind::Lifetime { .. } => unreachable!(),
+ GenericParamDefKind::Type { did, ref bounds, .. } => {
+ cx.impl_trait_bounds.insert(did.into(), bounds.clone());
+ }
+ GenericParamDefKind::Const { .. } => unreachable!(),
+ }
+ param
+ })
+ .collect::<Vec<_>>();
+
+ let mut params = Vec::with_capacity(self.params.len());
+ for p in self.params.iter().filter(|p| !is_impl_trait(p) && !is_elided_lifetime(p)) {
+ let p = clean_generic_param(cx, Some(self), p);
+ params.push(p);
+ }
+ params.extend(impl_trait_params);
+
+ let mut generics = Generics {
+ params,
+ where_predicates: self.predicates.iter().filter_map(|x| x.clean(cx)).collect(),
+ };
+
+ // Some duplicates are generated for ?Sized bounds between type params and where
+ // predicates. The point in here is to move the bounds definitions from type params
+ // to where predicates when such cases occur.
+ for where_pred in &mut generics.where_predicates {
+ match *where_pred {
+ WherePredicate::BoundPredicate {
+ ty: Generic(ref name), ref mut bounds, ..
+ } => {
+ if bounds.is_empty() {
+ for param in &mut generics.params {
+ match param.kind {
+ GenericParamDefKind::Lifetime { .. } => {}
+ GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => {
+ if &param.name == name {
+ mem::swap(bounds, ty_bounds);
+ break;
+ }
+ }
+ GenericParamDefKind::Const { .. } => {}
+ }
+ }
+ }
+ }
+ _ => continue,
+ }
+ }
+ generics
+ }
+}
+
+fn clean_ty_generics<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ gens: &ty::Generics,
+ preds: ty::GenericPredicates<'tcx>,
+) -> Generics {
+ // Don't populate `cx.impl_trait_bounds` before `clean`ning `where` clauses,
+ // since `Clean for ty::Predicate` would consume them.
+ let mut impl_trait = BTreeMap::<ImplTraitParam, Vec<GenericBound>>::default();
+
+ // Bounds in the type_params and lifetimes fields are repeated in the
+ // predicates field (see rustc_typeck::collect::ty_generics), so remove
+ // them.
+ let stripped_params = gens
+ .params
+ .iter()
+ .filter_map(|param| match param.kind {
+ ty::GenericParamDefKind::Lifetime if param.name == kw::UnderscoreLifetime => None,
+ ty::GenericParamDefKind::Lifetime => Some(clean_generic_param_def(param, cx)),
+ ty::GenericParamDefKind::Type { synthetic, .. } => {
+ if param.name == kw::SelfUpper {
+ assert_eq!(param.index, 0);
+ return None;
+ }
+ if synthetic {
+ impl_trait.insert(param.index.into(), vec![]);
+ return None;
+ }
+ Some(clean_generic_param_def(param, cx))
+ }
+ ty::GenericParamDefKind::Const { .. } => Some(clean_generic_param_def(param, cx)),
+ })
+ .collect::<Vec<GenericParamDef>>();
+
+ // param index -> [(DefId of trait, associated type name and generics, type)]
+ let mut impl_trait_proj = FxHashMap::<u32, Vec<(DefId, PathSegment, Ty<'_>)>>::default();
+
+ let where_predicates = preds
+ .predicates
+ .iter()
+ .flat_map(|(p, _)| {
+ let mut projection = None;
+ let param_idx = (|| {
+ let bound_p = p.kind();
+ match bound_p.skip_binder() {
+ ty::PredicateKind::Trait(pred) => {
+ if let ty::Param(param) = pred.self_ty().kind() {
+ return Some(param.index);
+ }
+ }
+ ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg)) => {
+ if let ty::Param(param) = ty.kind() {
+ return Some(param.index);
+ }
+ }
+ ty::PredicateKind::Projection(p) => {
+ if let ty::Param(param) = p.projection_ty.self_ty().kind() {
+ projection = Some(bound_p.rebind(p));
+ return Some(param.index);
+ }
+ }
+ _ => (),
+ }
+
+ None
+ })();
+
+ if let Some(param_idx) = param_idx {
+ if let Some(b) = impl_trait.get_mut(&param_idx.into()) {
+ let p: WherePredicate = p.clean(cx)?;
+
+ b.extend(
+ p.get_bounds()
+ .into_iter()
+ .flatten()
+ .cloned()
+ .filter(|b| !b.is_sized_bound(cx)),
+ );
+
+ let proj = projection.map(|p| {
+ (
+ clean_projection(p.skip_binder().projection_ty, cx, None),
+ p.skip_binder().term,
+ )
+ });
+ if let Some(((_, trait_did, name), rhs)) = proj
+ .as_ref()
+ .and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
+ {
+ // FIXME(...): Remove this unwrap()
+ impl_trait_proj.entry(param_idx).or_default().push((
+ trait_did,
+ name,
+ rhs.ty().unwrap(),
+ ));
+ }
+
+ return None;
+ }
+ }
+
+ Some(p)
+ })
+ .collect::<Vec<_>>();
+
+ for (param, mut bounds) in impl_trait {
+ // Move trait bounds to the front.
+ bounds.sort_by_key(|b| !matches!(b, GenericBound::TraitBound(..)));
+
+ if let crate::core::ImplTraitParam::ParamIndex(idx) = param {
+ if let Some(proj) = impl_trait_proj.remove(&idx) {
+ for (trait_did, name, rhs) in proj {
+ let rhs = clean_middle_ty(rhs, cx, None);
+ simplify::merge_bounds(cx, &mut bounds, trait_did, name, &Term::Type(rhs));
+ }
+ }
+ } else {
+ unreachable!();
+ }
+
+ cx.impl_trait_bounds.insert(param, bounds);
+ }
+
+ // Now that `cx.impl_trait_bounds` is populated, we can process
+ // remaining predicates which could contain `impl Trait`.
+ let mut where_predicates =
+ where_predicates.into_iter().flat_map(|p| p.clean(cx)).collect::<Vec<_>>();
+
+ // Type parameters have a Sized bound by default unless removed with
+ // ?Sized. Scan through the predicates and mark any type parameter with
+ // a Sized bound, removing the bounds as we find them.
+ //
+ // Note that associated types also have a sized bound by default, but we
+ // don't actually know the set of associated types right here so that's
+ // handled in cleaning associated types
+ let mut sized_params = FxHashSet::default();
+ where_predicates.retain(|pred| match *pred {
+ WherePredicate::BoundPredicate { ty: Generic(ref g), ref bounds, .. } => {
+ if bounds.iter().any(|b| b.is_sized_bound(cx)) {
+ sized_params.insert(*g);
+ false
+ } else {
+ true
+ }
+ }
+ _ => true,
+ });
+
+ // Run through the type parameters again and insert a ?Sized
+ // unbound for any we didn't find to be Sized.
+ for tp in &stripped_params {
+ if matches!(tp.kind, types::GenericParamDefKind::Type { .. })
+ && !sized_params.contains(&tp.name)
+ {
+ where_predicates.push(WherePredicate::BoundPredicate {
+ ty: Type::Generic(tp.name),
+ bounds: vec![GenericBound::maybe_sized(cx)],
+ bound_params: Vec::new(),
+ })
+ }
+ }
+
+ // It would be nice to collect all of the bounds on a type and recombine
+ // them if possible, to avoid e.g., `where T: Foo, T: Bar, T: Sized, T: 'a`
+ // and instead see `where T: Foo + Bar + Sized + 'a`
+
+ Generics {
+ params: stripped_params,
+ where_predicates: simplify::where_clauses(cx, where_predicates),
+ }
+}
+
+fn clean_fn_or_proc_macro<'tcx>(
+ item: &hir::Item<'tcx>,
+ sig: &hir::FnSig<'tcx>,
+ generics: &hir::Generics<'tcx>,
+ body_id: hir::BodyId,
+ name: &mut Symbol,
+ cx: &mut DocContext<'tcx>,
+) -> ItemKind {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let macro_kind = attrs.iter().find_map(|a| {
+ if a.has_name(sym::proc_macro) {
+ Some(MacroKind::Bang)
+ } else if a.has_name(sym::proc_macro_derive) {
+ Some(MacroKind::Derive)
+ } else if a.has_name(sym::proc_macro_attribute) {
+ Some(MacroKind::Attr)
+ } else {
+ None
+ }
+ });
+ match macro_kind {
+ Some(kind) => {
+ if kind == MacroKind::Derive {
+ *name = attrs
+ .lists(sym::proc_macro_derive)
+ .find_map(|mi| mi.ident())
+ .expect("proc-macro derives require a name")
+ .name;
+ }
+
+ let mut helpers = Vec::new();
+ for mi in attrs.lists(sym::proc_macro_derive) {
+ if !mi.has_name(sym::attributes) {
+ continue;
+ }
+
+ if let Some(list) = mi.meta_item_list() {
+ for inner_mi in list {
+ if let Some(ident) = inner_mi.ident() {
+ helpers.push(ident.name);
+ }
+ }
+ }
+ }
+ ProcMacroItem(ProcMacro { kind, helpers })
+ }
+ None => {
+ let mut func = clean_function(cx, sig, generics, body_id);
+ clean_fn_decl_legacy_const_generics(&mut func, attrs);
+ FunctionItem(func)
+ }
+ }
+}
+
+/// This is needed to make it more "readable" when documenting functions using
+/// `rustc_legacy_const_generics`. More information in
+/// <https://github.com/rust-lang/rust/issues/83167>.
+fn clean_fn_decl_legacy_const_generics(func: &mut Function, attrs: &[ast::Attribute]) {
+ for meta_item_list in attrs
+ .iter()
+ .filter(|a| a.has_name(sym::rustc_legacy_const_generics))
+ .filter_map(|a| a.meta_item_list())
+ {
+ for (pos, literal) in meta_item_list.iter().filter_map(|meta| meta.literal()).enumerate() {
+ match literal.kind {
+ ast::LitKind::Int(a, _) => {
+ let gen = func.generics.params.remove(0);
+ if let GenericParamDef { name, kind: GenericParamDefKind::Const { ty, .. } } =
+ gen
+ {
+ func.decl
+ .inputs
+ .values
+ .insert(a as _, Argument { name, type_: *ty, is_const: true });
+ } else {
+ panic!("unexpected non const in position {pos}");
+ }
+ }
+ _ => panic!("invalid arg index"),
+ }
+ }
+ }
+}
+
+fn clean_function<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ sig: &hir::FnSig<'tcx>,
+ generics: &hir::Generics<'tcx>,
+ body_id: hir::BodyId,
+) -> Box<Function> {
+ let (generics, decl) = enter_impl_trait(cx, |cx| {
+ // NOTE: generics must be cleaned before args
+ let generics = generics.clean(cx);
+ let args = clean_args_from_types_and_body_id(cx, sig.decl.inputs, body_id);
+ let decl = clean_fn_decl_with_args(cx, sig.decl, args);
+ (generics, decl)
+ });
+ Box::new(Function { decl, generics })
+}
+
+fn clean_args_from_types_and_names<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ types: &[hir::Ty<'tcx>],
+ names: &[Ident],
+) -> Arguments {
+ Arguments {
+ values: types
+ .iter()
+ .enumerate()
+ .map(|(i, ty)| {
+ let mut name = names.get(i).map_or(kw::Empty, |ident| ident.name);
+ if name.is_empty() {
+ name = kw::Underscore;
+ }
+ Argument { name, type_: clean_ty(ty, cx), is_const: false }
+ })
+ .collect(),
+ }
+}
+
+fn clean_args_from_types_and_body_id<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ types: &[hir::Ty<'tcx>],
+ body_id: hir::BodyId,
+) -> Arguments {
+ let body = cx.tcx.hir().body(body_id);
+
+ Arguments {
+ values: types
+ .iter()
+ .enumerate()
+ .map(|(i, ty)| Argument {
+ name: name_from_pat(body.params[i].pat),
+ type_: clean_ty(ty, cx),
+ is_const: false,
+ })
+ .collect(),
+ }
+}
+
+fn clean_fn_decl_with_args<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ decl: &hir::FnDecl<'tcx>,
+ args: Arguments,
+) -> FnDecl {
+ let output = match decl.output {
+ hir::FnRetTy::Return(typ) => Return(clean_ty(typ, cx)),
+ hir::FnRetTy::DefaultReturn(..) => DefaultReturn,
+ };
+ FnDecl { inputs: args, output, c_variadic: decl.c_variadic }
+}
+
+fn clean_fn_decl_from_did_and_sig<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ did: Option<DefId>,
+ sig: ty::PolyFnSig<'tcx>,
+) -> FnDecl {
+ let mut names = did.map_or(&[] as &[_], |did| cx.tcx.fn_arg_names(did)).iter();
+
+ // We assume all empty tuples are default return type. This theoretically can discard `-> ()`,
+ // but shouldn't change any code meaning.
+ let output = match clean_middle_ty(sig.skip_binder().output(), cx, None) {
+ Type::Tuple(inner) if inner.is_empty() => DefaultReturn,
+ ty => Return(ty),
+ };
+
+ FnDecl {
+ output,
+ c_variadic: sig.skip_binder().c_variadic,
+ inputs: Arguments {
+ values: sig
+ .skip_binder()
+ .inputs()
+ .iter()
+ .map(|t| Argument {
+ type_: clean_middle_ty(*t, cx, None),
+ name: names.next().map_or(kw::Empty, |i| i.name),
+ is_const: false,
+ })
+ .collect(),
+ },
+ }
+}
+
+fn clean_trait_ref<'tcx>(trait_ref: &hir::TraitRef<'tcx>, cx: &mut DocContext<'tcx>) -> Path {
+ let path = clean_path(trait_ref.path, cx);
+ register_res(cx, path.res);
+ path
+}
+
+impl<'tcx> Clean<'tcx, PolyTrait> for hir::PolyTraitRef<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> PolyTrait {
+ PolyTrait {
+ trait_: clean_trait_ref(&self.trait_ref, cx),
+ generic_params: self
+ .bound_generic_params
+ .iter()
+ .filter(|p| !is_elided_lifetime(p))
+ .map(|x| clean_generic_param(cx, None, x))
+ .collect(),
+ }
+ }
+}
+
+impl<'tcx> Clean<'tcx, Item> for hir::TraitItem<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Item {
+ let local_did = self.def_id.to_def_id();
+ cx.with_param_env(local_did, |cx| {
+ let inner = match self.kind {
+ hir::TraitItemKind::Const(ty, Some(default)) => AssocConstItem(
+ clean_ty(ty, cx),
+ ConstantKind::Local { def_id: local_did, body: default },
+ ),
+ hir::TraitItemKind::Const(ty, None) => TyAssocConstItem(clean_ty(ty, cx)),
+ hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
+ let m = clean_function(cx, sig, self.generics, body);
+ MethodItem(m, None)
+ }
+ hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
+ let (generics, decl) = enter_impl_trait(cx, |cx| {
+ // NOTE: generics must be cleaned before args
+ let generics = self.generics.clean(cx);
+ let args = clean_args_from_types_and_names(cx, sig.decl.inputs, names);
+ let decl = clean_fn_decl_with_args(cx, sig.decl, args);
+ (generics, decl)
+ });
+ TyMethodItem(Box::new(Function { decl, generics }))
+ }
+ hir::TraitItemKind::Type(bounds, Some(default)) => {
+ let generics = enter_impl_trait(cx, |cx| self.generics.clean(cx));
+ let bounds = bounds.iter().filter_map(|x| x.clean(cx)).collect();
+ let item_type = clean_middle_ty(hir_ty_to_ty(cx.tcx, default), cx, None);
+ AssocTypeItem(
+ Box::new(Typedef {
+ type_: clean_ty(default, cx),
+ generics,
+ item_type: Some(item_type),
+ }),
+ bounds,
+ )
+ }
+ hir::TraitItemKind::Type(bounds, None) => {
+ let generics = enter_impl_trait(cx, |cx| self.generics.clean(cx));
+ let bounds = bounds.iter().filter_map(|x| x.clean(cx)).collect();
+ TyAssocTypeItem(Box::new(generics), bounds)
+ }
+ };
+ let what_rustc_thinks =
+ Item::from_def_id_and_parts(local_did, Some(self.ident.name), inner, cx);
+ // Trait items always inherit the trait's visibility -- we don't want to show `pub`.
+ Item { visibility: Inherited, ..what_rustc_thinks }
+ })
+ }
+}
+
+impl<'tcx> Clean<'tcx, Item> for hir::ImplItem<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Item {
+ let local_did = self.def_id.to_def_id();
+ cx.with_param_env(local_did, |cx| {
+ let inner = match self.kind {
+ hir::ImplItemKind::Const(ty, expr) => {
+ let default = ConstantKind::Local { def_id: local_did, body: expr };
+ AssocConstItem(clean_ty(ty, cx), default)
+ }
+ hir::ImplItemKind::Fn(ref sig, body) => {
+ let m = clean_function(cx, sig, self.generics, body);
+ let defaultness = cx.tcx.impl_defaultness(self.def_id);
+ MethodItem(m, Some(defaultness))
+ }
+ hir::ImplItemKind::TyAlias(hir_ty) => {
+ let type_ = clean_ty(hir_ty, cx);
+ let generics = self.generics.clean(cx);
+ let item_type = clean_middle_ty(hir_ty_to_ty(cx.tcx, hir_ty), cx, None);
+ AssocTypeItem(
+ Box::new(Typedef { type_, generics, item_type: Some(item_type) }),
+ Vec::new(),
+ )
+ }
+ };
+
+ let mut what_rustc_thinks =
+ Item::from_def_id_and_parts(local_did, Some(self.ident.name), inner, cx);
+
+ let impl_ref = cx.tcx.impl_trait_ref(cx.tcx.local_parent(self.def_id));
+
+ // Trait impl items always inherit the impl's visibility --
+ // we don't want to show `pub`.
+ if impl_ref.is_some() {
+ what_rustc_thinks.visibility = Inherited;
+ }
+
+ what_rustc_thinks
+ })
+ }
+}
+
+impl<'tcx> Clean<'tcx, Item> for ty::AssocItem {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Item {
+ let tcx = cx.tcx;
+ let kind = match self.kind {
+ ty::AssocKind::Const => {
+ let ty = clean_middle_ty(tcx.type_of(self.def_id), cx, Some(self.def_id));
+
+ let provided = match self.container {
+ ty::ImplContainer => true,
+ ty::TraitContainer => tcx.impl_defaultness(self.def_id).has_value(),
+ };
+ if provided {
+ AssocConstItem(ty, ConstantKind::Extern { def_id: self.def_id })
+ } else {
+ TyAssocConstItem(ty)
+ }
+ }
+ ty::AssocKind::Fn => {
+ let generics = clean_ty_generics(
+ cx,
+ tcx.generics_of(self.def_id),
+ tcx.explicit_predicates_of(self.def_id),
+ );
+ let sig = tcx.fn_sig(self.def_id);
+ let mut decl = clean_fn_decl_from_did_and_sig(cx, Some(self.def_id), sig);
+
+ if self.fn_has_self_parameter {
+ let self_ty = match self.container {
+ ty::ImplContainer => tcx.type_of(self.container_id(tcx)),
+ ty::TraitContainer => tcx.types.self_param,
+ };
+ let self_arg_ty = sig.input(0).skip_binder();
+ if self_arg_ty == self_ty {
+ decl.inputs.values[0].type_ = Generic(kw::SelfUpper);
+ } else if let ty::Ref(_, ty, _) = *self_arg_ty.kind() {
+ if ty == self_ty {
+ match decl.inputs.values[0].type_ {
+ BorrowedRef { ref mut type_, .. } => {
+ **type_ = Generic(kw::SelfUpper)
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+
+ let provided = match self.container {
+ ty::ImplContainer => true,
+ ty::TraitContainer => self.defaultness(tcx).has_value(),
+ };
+ if provided {
+ let defaultness = match self.container {
+ ty::ImplContainer => Some(self.defaultness(tcx)),
+ ty::TraitContainer => None,
+ };
+ MethodItem(Box::new(Function { generics, decl }), defaultness)
+ } else {
+ TyMethodItem(Box::new(Function { generics, decl }))
+ }
+ }
+ ty::AssocKind::Type => {
+ let my_name = self.name;
+
+ fn param_eq_arg(param: &GenericParamDef, arg: &GenericArg) -> bool {
+ match (&param.kind, arg) {
+ (GenericParamDefKind::Type { .. }, GenericArg::Type(Type::Generic(ty)))
+ if *ty == param.name =>
+ {
+ true
+ }
+ (
+ GenericParamDefKind::Lifetime { .. },
+ GenericArg::Lifetime(Lifetime(lt)),
+ ) if *lt == param.name => true,
+ (GenericParamDefKind::Const { .. }, GenericArg::Const(c)) => {
+ match &c.kind {
+ ConstantKind::TyConst { expr } => expr == param.name.as_str(),
+ _ => false,
+ }
+ }
+ _ => false,
+ }
+ }
+
+ if let ty::TraitContainer = self.container {
+ let bounds = tcx.explicit_item_bounds(self.def_id);
+ let predicates = ty::GenericPredicates { parent: None, predicates: bounds };
+ let mut generics =
+ clean_ty_generics(cx, tcx.generics_of(self.def_id), predicates);
+ // Filter out the bounds that are (likely?) directly attached to the associated type,
+ // as opposed to being located in the where clause.
+ let mut bounds = generics
+ .where_predicates
+ .drain_filter(|pred| match *pred {
+ WherePredicate::BoundPredicate {
+ ty: QPath { ref assoc, ref self_type, ref trait_, .. },
+ ..
+ } => {
+ if assoc.name != my_name {
+ return false;
+ }
+ if trait_.def_id() != self.container_id(tcx) {
+ return false;
+ }
+ match **self_type {
+ Generic(ref s) if *s == kw::SelfUpper => {}
+ _ => return false,
+ }
+ match &assoc.args {
+ GenericArgs::AngleBracketed { args, bindings } => {
+ if !bindings.is_empty()
+ || generics
+ .params
+ .iter()
+ .zip(args.iter())
+ .any(|(param, arg)| !param_eq_arg(param, arg))
+ {
+ return false;
+ }
+ }
+ GenericArgs::Parenthesized { .. } => {
+ // The only time this happens is if we're inside the rustdoc for Fn(),
+ // which only has one associated type, which is not a GAT, so whatever.
+ }
+ }
+ true
+ }
+ _ => false,
+ })
+ .flat_map(|pred| {
+ if let WherePredicate::BoundPredicate { bounds, .. } = pred {
+ bounds
+ } else {
+ unreachable!()
+ }
+ })
+ .collect::<Vec<_>>();
+ // Our Sized/?Sized bound didn't get handled when creating the generics
+ // because we didn't actually get our whole set of bounds until just now
+ // (some of them may have come from the trait). If we do have a sized
+ // bound, we remove it, and if we don't then we add the `?Sized` bound
+ // at the end.
+ match bounds.iter().position(|b| b.is_sized_bound(cx)) {
+ Some(i) => {
+ bounds.remove(i);
+ }
+ None => bounds.push(GenericBound::maybe_sized(cx)),
+ }
+
+ if tcx.impl_defaultness(self.def_id).has_value() {
+ AssocTypeItem(
+ Box::new(Typedef {
+ type_: clean_middle_ty(
+ tcx.type_of(self.def_id),
+ cx,
+ Some(self.def_id),
+ ),
+ generics,
+ // FIXME: should we obtain the Type from HIR and pass it on here?
+ item_type: None,
+ }),
+ bounds,
+ )
+ } else {
+ TyAssocTypeItem(Box::new(generics), bounds)
+ }
+ } else {
+ // FIXME: when could this happen? Associated items in inherent impls?
+ AssocTypeItem(
+ Box::new(Typedef {
+ type_: clean_middle_ty(tcx.type_of(self.def_id), cx, Some(self.def_id)),
+ generics: Generics { params: Vec::new(), where_predicates: Vec::new() },
+ item_type: None,
+ }),
+ Vec::new(),
+ )
+ }
+ }
+ };
+
+ let mut what_rustc_thinks =
+ Item::from_def_id_and_parts(self.def_id, Some(self.name), kind, cx);
+
+ let impl_ref = tcx.impl_trait_ref(tcx.parent(self.def_id));
+
+ // Trait impl items always inherit the impl's visibility --
+ // we don't want to show `pub`.
+ if impl_ref.is_some() {
+ what_rustc_thinks.visibility = Visibility::Inherited;
+ }
+
+ what_rustc_thinks
+ }
+}
+
+fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type {
+ let hir::Ty { hir_id: _, span, ref kind } = *hir_ty;
+ let hir::TyKind::Path(qpath) = kind else { unreachable!() };
+
+ match qpath {
+ hir::QPath::Resolved(None, path) => {
+ if let Res::Def(DefKind::TyParam, did) = path.res {
+ if let Some(new_ty) = cx.substs.get(&did).and_then(|p| p.as_ty()).cloned() {
+ return new_ty;
+ }
+ if let Some(bounds) = cx.impl_trait_bounds.remove(&did.into()) {
+ return ImplTrait(bounds);
+ }
+ }
+
+ if let Some(expanded) = maybe_expand_private_type_alias(cx, path) {
+ expanded
+ } else {
+ let path = clean_path(path, cx);
+ resolve_type(cx, path)
+ }
+ }
+ hir::QPath::Resolved(Some(qself), p) => {
+ // Try to normalize `<X as Y>::T` to a type
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ if let Some(normalized_value) = normalize(cx, ty) {
+ return clean_middle_ty(normalized_value, cx, None);
+ }
+
+ let trait_segments = &p.segments[..p.segments.len() - 1];
+ let trait_def = cx.tcx.associated_item(p.res.def_id()).container_id(cx.tcx);
+ let trait_ = self::Path {
+ res: Res::Def(DefKind::Trait, trait_def),
+ segments: trait_segments.iter().map(|x| x.clean(cx)).collect(),
+ };
+ register_res(cx, trait_.res);
+ let self_def_id = DefId::local(qself.hir_id.owner.local_def_index);
+ let self_type = clean_ty(qself, cx);
+ let should_show_cast = compute_should_show_cast(Some(self_def_id), &trait_, &self_type);
+ Type::QPath {
+ assoc: Box::new(p.segments.last().expect("segments were empty").clean(cx)),
+ should_show_cast,
+ self_type: Box::new(self_type),
+ trait_,
+ }
+ }
+ hir::QPath::TypeRelative(qself, segment) => {
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ let res = match ty.kind() {
+ ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
+ // Rustdoc handles `ty::Error`s by turning them into `Type::Infer`s.
+ ty::Error(_) => return Type::Infer,
+ _ => bug!("clean: expected associated type, found `{:?}`", ty),
+ };
+ let trait_ = clean_path(&hir::Path { span, res, segments: &[] }, cx);
+ register_res(cx, trait_.res);
+ let self_def_id = res.opt_def_id();
+ let self_type = clean_ty(qself, cx);
+ let should_show_cast = compute_should_show_cast(self_def_id, &trait_, &self_type);
+ Type::QPath {
+ assoc: Box::new(segment.clean(cx)),
+ should_show_cast,
+ self_type: Box::new(self_type),
+ trait_,
+ }
+ }
+ hir::QPath::LangItem(..) => bug!("clean: requiring documentation of lang item"),
+ }
+}
+
+fn maybe_expand_private_type_alias<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ path: &hir::Path<'tcx>,
+) -> Option<Type> {
+ let Res::Def(DefKind::TyAlias, def_id) = path.res else { return None };
+ // Substitute private type aliases
+ let def_id = def_id.as_local()?;
+ let alias = if !cx.cache.access_levels.is_exported(def_id.to_def_id()) {
+ &cx.tcx.hir().expect_item(def_id).kind
+ } else {
+ return None;
+ };
+ let hir::ItemKind::TyAlias(ty, generics) = alias else { return None };
+
+ let provided_params = &path.segments.last().expect("segments were empty");
+ let mut substs = FxHashMap::default();
+ let generic_args = provided_params.args();
+
+ let mut indices: hir::GenericParamCount = Default::default();
+ for param in generics.params.iter() {
+ match param.kind {
+ hir::GenericParamKind::Lifetime { .. } => {
+ let mut j = 0;
+ let lifetime = generic_args.args.iter().find_map(|arg| match arg {
+ hir::GenericArg::Lifetime(lt) => {
+ if indices.lifetimes == j {
+ return Some(lt);
+ }
+ j += 1;
+ None
+ }
+ _ => None,
+ });
+ if let Some(lt) = lifetime.cloned() {
+ let lt_def_id = cx.tcx.hir().local_def_id(param.hir_id);
+ let cleaned =
+ if !lt.is_elided() { clean_lifetime(lt, cx) } else { Lifetime::elided() };
+ substs.insert(lt_def_id.to_def_id(), SubstParam::Lifetime(cleaned));
+ }
+ indices.lifetimes += 1;
+ }
+ hir::GenericParamKind::Type { ref default, .. } => {
+ let ty_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
+ let mut j = 0;
+ let type_ = generic_args.args.iter().find_map(|arg| match arg {
+ hir::GenericArg::Type(ty) => {
+ if indices.types == j {
+ return Some(ty);
+ }
+ j += 1;
+ None
+ }
+ _ => None,
+ });
+ if let Some(ty) = type_ {
+ substs.insert(ty_param_def_id.to_def_id(), SubstParam::Type(clean_ty(ty, cx)));
+ } else if let Some(default) = *default {
+ substs.insert(
+ ty_param_def_id.to_def_id(),
+ SubstParam::Type(clean_ty(default, cx)),
+ );
+ }
+ indices.types += 1;
+ }
+ hir::GenericParamKind::Const { .. } => {
+ let const_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
+ let mut j = 0;
+ let const_ = generic_args.args.iter().find_map(|arg| match arg {
+ hir::GenericArg::Const(ct) => {
+ if indices.consts == j {
+ return Some(ct);
+ }
+ j += 1;
+ None
+ }
+ _ => None,
+ });
+ if let Some(ct) = const_ {
+ substs.insert(
+ const_param_def_id.to_def_id(),
+ SubstParam::Constant(clean_const(ct, cx)),
+ );
+ }
+ // FIXME(const_generics_defaults)
+ indices.consts += 1;
+ }
+ }
+ }
+
+ Some(cx.enter_alias(substs, |cx| clean_ty(ty, cx)))
+}
+
+pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type {
+ use rustc_hir::*;
+
+ match ty.kind {
+ TyKind::Never => Primitive(PrimitiveType::Never),
+ TyKind::Ptr(ref m) => RawPointer(m.mutbl, Box::new(clean_ty(m.ty, cx))),
+ TyKind::Rptr(ref l, ref m) => {
+ // There are two times a `Fresh` lifetime can be created:
+ // 1. For `&'_ x`, written by the user. This corresponds to `lower_lifetime` in `rustc_ast_lowering`.
+ // 2. For `&x` as a parameter to an `async fn`. This corresponds to `elided_ref_lifetime in `rustc_ast_lowering`.
+ // See #59286 for more information.
+ // Ideally we would only hide the `'_` for case 2., but I don't know a way to distinguish it.
+ // Turning `fn f(&'_ self)` into `fn f(&self)` isn't the worst thing in the world, though;
+ // there's no case where it could cause the function to fail to compile.
+ let elided =
+ l.is_elided() || matches!(l.name, LifetimeName::Param(_, ParamName::Fresh));
+ let lifetime = if elided { None } else { Some(clean_lifetime(*l, cx)) };
+ BorrowedRef { lifetime, mutability: m.mutbl, type_: Box::new(clean_ty(m.ty, cx)) }
+ }
+ TyKind::Slice(ty) => Slice(Box::new(clean_ty(ty, cx))),
+ TyKind::Array(ty, ref length) => {
+ let length = match length {
+ hir::ArrayLen::Infer(_, _) => "_".to_string(),
+ hir::ArrayLen::Body(anon_const) => {
+ let def_id = cx.tcx.hir().local_def_id(anon_const.hir_id);
+ // NOTE(min_const_generics): We can't use `const_eval_poly` for constants
+ // as we currently do not supply the parent generics to anonymous constants
+ // but do allow `ConstKind::Param`.
+ //
+ // `const_eval_poly` tries to to first substitute generic parameters which
+ // results in an ICE while manually constructing the constant and using `eval`
+ // does nothing for `ConstKind::Param`.
+ let ct = ty::Const::from_anon_const(cx.tcx, def_id);
+ let param_env = cx.tcx.param_env(def_id);
+ print_const(cx, ct.eval(cx.tcx, param_env))
+ }
+ };
+
+ Array(Box::new(clean_ty(ty, cx)), length)
+ }
+ TyKind::Tup(tys) => Tuple(tys.iter().map(|ty| clean_ty(ty, cx)).collect()),
+ TyKind::OpaqueDef(item_id, _) => {
+ let item = cx.tcx.hir().item(item_id);
+ if let hir::ItemKind::OpaqueTy(ref ty) = item.kind {
+ ImplTrait(ty.bounds.iter().filter_map(|x| x.clean(cx)).collect())
+ } else {
+ unreachable!()
+ }
+ }
+ TyKind::Path(_) => clean_qpath(ty, cx),
+ TyKind::TraitObject(bounds, ref lifetime, _) => {
+ let bounds = bounds.iter().map(|bound| bound.clean(cx)).collect();
+ let lifetime =
+ if !lifetime.is_elided() { Some(clean_lifetime(*lifetime, cx)) } else { None };
+ DynTrait(bounds, lifetime)
+ }
+ TyKind::BareFn(barefn) => BareFunction(Box::new(barefn.clean(cx))),
+ // Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
+ TyKind::Infer | TyKind::Err => Infer,
+ TyKind::Typeof(..) => panic!("unimplemented type {:?}", ty.kind),
+ }
+}
+
+/// Returns `None` if the type could not be normalized
+fn normalize<'tcx>(cx: &mut DocContext<'tcx>, ty: Ty<'_>) -> Option<Ty<'tcx>> {
+ // HACK: low-churn fix for #79459 while we wait for a trait normalization fix
+ if !cx.tcx.sess.opts.unstable_opts.normalize_docs {
+ return None;
+ }
+
+ use crate::rustc_trait_selection::infer::TyCtxtInferExt;
+ use crate::rustc_trait_selection::traits::query::normalize::AtExt;
+ use rustc_middle::traits::ObligationCause;
+
+ // Try to normalize `<X as Y>::T` to a type
+ let lifted = ty.lift_to_tcx(cx.tcx).unwrap();
+ let normalized = cx.tcx.infer_ctxt().enter(|infcx| {
+ infcx
+ .at(&ObligationCause::dummy(), cx.param_env)
+ .normalize(lifted)
+ .map(|resolved| infcx.resolve_vars_if_possible(resolved.value))
+ });
+ match normalized {
+ Ok(normalized_value) => {
+ debug!("normalized {:?} to {:?}", ty, normalized_value);
+ Some(normalized_value)
+ }
+ Err(err) => {
+ debug!("failed to normalize {:?}: {:?}", ty, err);
+ None
+ }
+ }
+}
+
+pub(crate) fn clean_middle_ty<'tcx>(
+ this: Ty<'tcx>,
+ cx: &mut DocContext<'tcx>,
+ def_id: Option<DefId>,
+) -> Type {
+ trace!("cleaning type: {:?}", this);
+ let ty = normalize(cx, this).unwrap_or(this);
+ match *ty.kind() {
+ ty::Never => Primitive(PrimitiveType::Never),
+ ty::Bool => Primitive(PrimitiveType::Bool),
+ ty::Char => Primitive(PrimitiveType::Char),
+ ty::Int(int_ty) => Primitive(int_ty.into()),
+ ty::Uint(uint_ty) => Primitive(uint_ty.into()),
+ ty::Float(float_ty) => Primitive(float_ty.into()),
+ ty::Str => Primitive(PrimitiveType::Str),
+ ty::Slice(ty) => Slice(Box::new(clean_middle_ty(ty, cx, None))),
+ ty::Array(ty, n) => {
+ let mut n = cx.tcx.lift(n).expect("array lift failed");
+ n = n.eval(cx.tcx, ty::ParamEnv::reveal_all());
+ let n = print_const(cx, n);
+ Array(Box::new(clean_middle_ty(ty, cx, None)), n)
+ }
+ ty::RawPtr(mt) => RawPointer(mt.mutbl, Box::new(clean_middle_ty(mt.ty, cx, None))),
+ ty::Ref(r, ty, mutbl) => BorrowedRef {
+ lifetime: clean_middle_region(r),
+ mutability: mutbl,
+ type_: Box::new(clean_middle_ty(ty, cx, None)),
+ },
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let ty = cx.tcx.lift(this).expect("FnPtr lift failed");
+ let sig = ty.fn_sig(cx.tcx);
+ let decl = clean_fn_decl_from_did_and_sig(cx, None, sig);
+ BareFunction(Box::new(BareFunctionDecl {
+ unsafety: sig.unsafety(),
+ generic_params: Vec::new(),
+ decl,
+ abi: sig.abi(),
+ }))
+ }
+ ty::Adt(def, substs) => {
+ let did = def.did();
+ let kind = match def.adt_kind() {
+ AdtKind::Struct => ItemType::Struct,
+ AdtKind::Union => ItemType::Union,
+ AdtKind::Enum => ItemType::Enum,
+ };
+ inline::record_extern_fqn(cx, did, kind);
+ let path = external_path(cx, did, false, vec![], substs);
+ Type::Path { path }
+ }
+ ty::Foreign(did) => {
+ inline::record_extern_fqn(cx, did, ItemType::ForeignType);
+ let path = external_path(cx, did, false, vec![], InternalSubsts::empty());
+ Type::Path { path }
+ }
+ ty::Dynamic(obj, ref reg) => {
+ // HACK: pick the first `did` as the `did` of the trait object. Someone
+ // might want to implement "native" support for marker-trait-only
+ // trait objects.
+ let mut dids = obj.principal_def_id().into_iter().chain(obj.auto_traits());
+ let did = dids
+ .next()
+ .unwrap_or_else(|| panic!("found trait object `{:?}` with no traits?", this));
+ let substs = match obj.principal() {
+ Some(principal) => principal.skip_binder().substs,
+ // marker traits have no substs.
+ _ => cx.tcx.intern_substs(&[]),
+ };
+
+ inline::record_extern_fqn(cx, did, ItemType::Trait);
+
+ let lifetime = clean_middle_region(*reg);
+ let mut bounds = vec![];
+
+ for did in dids {
+ let empty = cx.tcx.intern_substs(&[]);
+ let path = external_path(cx, did, false, vec![], empty);
+ inline::record_extern_fqn(cx, did, ItemType::Trait);
+ let bound = PolyTrait { trait_: path, generic_params: Vec::new() };
+ bounds.push(bound);
+ }
+
+ let mut bindings = vec![];
+ for pb in obj.projection_bounds() {
+ bindings.push(TypeBinding {
+ assoc: projection_to_path_segment(
+ pb.skip_binder()
+ .lift_to_tcx(cx.tcx)
+ .unwrap()
+ // HACK(compiler-errors): Doesn't actually matter what self
+ // type we put here, because we're only using the GAT's substs.
+ .with_self_ty(cx.tcx, cx.tcx.types.self_param)
+ .projection_ty,
+ cx,
+ ),
+ kind: TypeBindingKind::Equality {
+ term: clean_middle_term(pb.skip_binder().term, cx),
+ },
+ });
+ }
+
+ let path = external_path(cx, did, false, bindings, substs);
+ bounds.insert(0, PolyTrait { trait_: path, generic_params: Vec::new() });
+
+ DynTrait(bounds, lifetime)
+ }
+ ty::Tuple(t) => Tuple(t.iter().map(|t| clean_middle_ty(t, cx, None)).collect()),
+
+ ty::Projection(ref data) => clean_projection(*data, cx, def_id),
+
+ ty::Param(ref p) => {
+ if let Some(bounds) = cx.impl_trait_bounds.remove(&p.index.into()) {
+ ImplTrait(bounds)
+ } else {
+ Generic(p.name)
+ }
+ }
+
+ ty::Opaque(def_id, substs) => {
+ // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
+ // by looking up the bounds associated with the def_id.
+ let substs = cx.tcx.lift(substs).expect("Opaque lift failed");
+ let bounds = cx
+ .tcx
+ .explicit_item_bounds(def_id)
+ .iter()
+ .map(|(bound, _)| EarlyBinder(*bound).subst(cx.tcx, substs))
+ .collect::<Vec<_>>();
+ let mut regions = vec![];
+ let mut has_sized = false;
+ let mut bounds = bounds
+ .iter()
+ .filter_map(|bound| {
+ let bound_predicate = bound.kind();
+ let trait_ref = match bound_predicate.skip_binder() {
+ ty::PredicateKind::Trait(tr) => bound_predicate.rebind(tr.trait_ref),
+ ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(_ty, reg)) => {
+ if let Some(r) = clean_middle_region(reg) {
+ regions.push(GenericBound::Outlives(r));
+ }
+ return None;
+ }
+ _ => return None,
+ };
+
+ if let Some(sized) = cx.tcx.lang_items().sized_trait() {
+ if trait_ref.def_id() == sized {
+ has_sized = true;
+ return None;
+ }
+ }
+
+ let bindings: Vec<_> = bounds
+ .iter()
+ .filter_map(|bound| {
+ if let ty::PredicateKind::Projection(proj) = bound.kind().skip_binder()
+ {
+ if proj.projection_ty.trait_ref(cx.tcx) == trait_ref.skip_binder() {
+ Some(TypeBinding {
+ assoc: projection_to_path_segment(proj.projection_ty, cx),
+ kind: TypeBindingKind::Equality {
+ term: clean_middle_term(proj.term, cx),
+ },
+ })
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ Some(clean_poly_trait_ref_with_bindings(cx, trait_ref, &bindings))
+ })
+ .collect::<Vec<_>>();
+ bounds.extend(regions);
+ if !has_sized && !bounds.is_empty() {
+ bounds.insert(0, GenericBound::maybe_sized(cx));
+ }
+ ImplTrait(bounds)
+ }
+
+ ty::Closure(..) => panic!("Closure"),
+ ty::Generator(..) => panic!("Generator"),
+ ty::Bound(..) => panic!("Bound"),
+ ty::Placeholder(..) => panic!("Placeholder"),
+ ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
+ ty::Infer(..) => panic!("Infer"),
+ ty::Error(_) => panic!("Error"),
+ }
+}
+
+pub(crate) fn clean_field<'tcx>(field: &hir::FieldDef<'tcx>, cx: &mut DocContext<'tcx>) -> Item {
+ let def_id = cx.tcx.hir().local_def_id(field.hir_id).to_def_id();
+ clean_field_with_def_id(def_id, field.ident.name, clean_ty(field.ty, cx), cx)
+}
+
+pub(crate) fn clean_middle_field<'tcx>(field: &ty::FieldDef, cx: &mut DocContext<'tcx>) -> Item {
+ clean_field_with_def_id(
+ field.did,
+ field.name,
+ clean_middle_ty(cx.tcx.type_of(field.did), cx, Some(field.did)),
+ cx,
+ )
+}
+
+pub(crate) fn clean_field_with_def_id(
+ def_id: DefId,
+ name: Symbol,
+ ty: Type,
+ cx: &mut DocContext<'_>,
+) -> Item {
+ let what_rustc_thinks =
+ Item::from_def_id_and_parts(def_id, Some(name), StructFieldItem(ty), cx);
+ if is_field_vis_inherited(cx.tcx, def_id) {
+ // Variant fields inherit their enum's visibility.
+ Item { visibility: Visibility::Inherited, ..what_rustc_thinks }
+ } else {
+ what_rustc_thinks
+ }
+}
+
+fn is_field_vis_inherited(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+ let parent = tcx.parent(def_id);
+ match tcx.def_kind(parent) {
+ DefKind::Struct | DefKind::Union => false,
+ DefKind::Variant => true,
+ parent_kind => panic!("unexpected parent kind: {:?}", parent_kind),
+ }
+}
+
+pub(crate) fn clean_visibility(vis: ty::Visibility) -> Visibility {
+ match vis {
+ ty::Visibility::Public => Visibility::Public,
+ // NOTE: this is not quite right: `ty` uses `Invisible` to mean 'private',
+ // while rustdoc really does mean inherited. That means that for enum variants, such as
+ // `pub enum E { V }`, `V` will be marked as `Public` by `ty`, but as `Inherited` by rustdoc.
+ // Various parts of clean override `tcx.visibility` explicitly to make sure this distinction is captured.
+ ty::Visibility::Invisible => Visibility::Inherited,
+ ty::Visibility::Restricted(module) => Visibility::Restricted(module),
+ }
+}
+
+pub(crate) fn clean_variant_def<'tcx>(variant: &ty::VariantDef, cx: &mut DocContext<'tcx>) -> Item {
+ let kind = match variant.ctor_kind {
+ CtorKind::Const => Variant::CLike,
+ CtorKind::Fn => Variant::Tuple(
+ variant.fields.iter().map(|field| clean_middle_field(field, cx)).collect(),
+ ),
+ CtorKind::Fictive => Variant::Struct(VariantStruct {
+ struct_type: CtorKind::Fictive,
+ fields: variant.fields.iter().map(|field| clean_middle_field(field, cx)).collect(),
+ }),
+ };
+ let what_rustc_thinks =
+ Item::from_def_id_and_parts(variant.def_id, Some(variant.name), VariantItem(kind), cx);
+ // don't show `pub` for variants, which always inherit visibility
+ Item { visibility: Inherited, ..what_rustc_thinks }
+}
+
+fn clean_variant_data<'tcx>(
+ variant: &hir::VariantData<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> Variant {
+ match variant {
+ hir::VariantData::Struct(..) => Variant::Struct(VariantStruct {
+ struct_type: CtorKind::from_hir(variant),
+ fields: variant.fields().iter().map(|x| clean_field(x, cx)).collect(),
+ }),
+ hir::VariantData::Tuple(..) => {
+ Variant::Tuple(variant.fields().iter().map(|x| clean_field(x, cx)).collect())
+ }
+ hir::VariantData::Unit(..) => Variant::CLike,
+ }
+}
+
+fn clean_path<'tcx>(path: &hir::Path<'tcx>, cx: &mut DocContext<'tcx>) -> Path {
+ Path { res: path.res, segments: path.segments.iter().map(|x| x.clean(cx)).collect() }
+}
+
+impl<'tcx> Clean<'tcx, GenericArgs> for hir::GenericArgs<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> GenericArgs {
+ if self.parenthesized {
+ let output = clean_ty(self.bindings[0].ty(), cx);
+ let output =
+ if output != Type::Tuple(Vec::new()) { Some(Box::new(output)) } else { None };
+ let inputs = self.inputs().iter().map(|x| clean_ty(x, cx)).collect::<Vec<_>>().into();
+ GenericArgs::Parenthesized { inputs, output }
+ } else {
+ let args = self
+ .args
+ .iter()
+ .map(|arg| match arg {
+ hir::GenericArg::Lifetime(lt) if !lt.is_elided() => {
+ GenericArg::Lifetime(clean_lifetime(*lt, cx))
+ }
+ hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()),
+ hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)),
+ hir::GenericArg::Const(ct) => GenericArg::Const(Box::new(clean_const(ct, cx))),
+ hir::GenericArg::Infer(_inf) => GenericArg::Infer,
+ })
+ .collect::<Vec<_>>()
+ .into();
+ let bindings =
+ self.bindings.iter().map(|x| clean_type_binding(x, cx)).collect::<Vec<_>>().into();
+ GenericArgs::AngleBracketed { args, bindings }
+ }
+ }
+}
+
+impl<'tcx> Clean<'tcx, PathSegment> for hir::PathSegment<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> PathSegment {
+ PathSegment { name: self.ident.name, args: self.args().clean(cx) }
+ }
+}
+
+impl<'tcx> Clean<'tcx, BareFunctionDecl> for hir::BareFnTy<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> BareFunctionDecl {
+ let (generic_params, decl) = enter_impl_trait(cx, |cx| {
+ // NOTE: generics must be cleaned before args
+ let generic_params = self
+ .generic_params
+ .iter()
+ .filter(|p| !is_elided_lifetime(p))
+ .map(|x| clean_generic_param(cx, None, x))
+ .collect();
+ let args = clean_args_from_types_and_names(cx, self.decl.inputs, self.param_names);
+ let decl = clean_fn_decl_with_args(cx, self.decl, args);
+ (generic_params, decl)
+ });
+ BareFunctionDecl { unsafety: self.unsafety, abi: self.abi, decl, generic_params }
+ }
+}
+
+fn clean_maybe_renamed_item<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ item: &hir::Item<'tcx>,
+ renamed: Option<Symbol>,
+) -> Vec<Item> {
+ use hir::ItemKind;
+
+ let def_id = item.def_id.to_def_id();
+ let mut name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id()));
+ cx.with_param_env(def_id, |cx| {
+ let kind = match item.kind {
+ ItemKind::Static(ty, mutability, body_id) => {
+ StaticItem(Static { type_: clean_ty(ty, cx), mutability, expr: Some(body_id) })
+ }
+ ItemKind::Const(ty, body_id) => ConstantItem(Constant {
+ type_: clean_ty(ty, cx),
+ kind: ConstantKind::Local { body: body_id, def_id },
+ }),
+ ItemKind::OpaqueTy(ref ty) => OpaqueTyItem(OpaqueTy {
+ bounds: ty.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
+ generics: ty.generics.clean(cx),
+ }),
+ ItemKind::TyAlias(hir_ty, generics) => {
+ let rustdoc_ty = clean_ty(hir_ty, cx);
+ let ty = clean_middle_ty(hir_ty_to_ty(cx.tcx, hir_ty), cx, None);
+ TypedefItem(Box::new(Typedef {
+ type_: rustdoc_ty,
+ generics: generics.clean(cx),
+ item_type: Some(ty),
+ }))
+ }
+ ItemKind::Enum(ref def, generics) => EnumItem(Enum {
+ variants: def.variants.iter().map(|v| v.clean(cx)).collect(),
+ generics: generics.clean(cx),
+ }),
+ ItemKind::TraitAlias(generics, bounds) => TraitAliasItem(TraitAlias {
+ generics: generics.clean(cx),
+ bounds: bounds.iter().filter_map(|x| x.clean(cx)).collect(),
+ }),
+ ItemKind::Union(ref variant_data, generics) => UnionItem(Union {
+ generics: generics.clean(cx),
+ fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
+ }),
+ ItemKind::Struct(ref variant_data, generics) => StructItem(Struct {
+ struct_type: CtorKind::from_hir(variant_data),
+ generics: generics.clean(cx),
+ fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
+ }),
+ ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx),
+ // proc macros can have a name set by attributes
+ ItemKind::Fn(ref sig, generics, body_id) => {
+ clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
+ }
+ ItemKind::Macro(ref macro_def, _) => {
+ let ty_vis = clean_visibility(cx.tcx.visibility(def_id));
+ MacroItem(Macro {
+ source: display_macro_source(cx, name, macro_def, def_id, ty_vis),
+ })
+ }
+ ItemKind::Trait(_, _, generics, bounds, item_ids) => {
+ let items =
+ item_ids.iter().map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx)).collect();
+
+ TraitItem(Trait {
+ def_id,
+ items,
+ generics: generics.clean(cx),
+ bounds: bounds.iter().filter_map(|x| x.clean(cx)).collect(),
+ })
+ }
+ ItemKind::ExternCrate(orig_name) => {
+ return clean_extern_crate(item, name, orig_name, cx);
+ }
+ ItemKind::Use(path, kind) => {
+ return clean_use_statement(item, name, path, kind, cx, &mut FxHashSet::default());
+ }
+ _ => unreachable!("not yet converted"),
+ };
+
+ vec![Item::from_def_id_and_parts(def_id, Some(name), kind, cx)]
+ })
+}
+
+impl<'tcx> Clean<'tcx, Item> for hir::Variant<'tcx> {
+ fn clean(&self, cx: &mut DocContext<'tcx>) -> Item {
+ let kind = VariantItem(clean_variant_data(&self.data, cx));
+ let what_rustc_thinks =
+ Item::from_hir_id_and_parts(self.id, Some(self.ident.name), kind, cx);
+ // don't show `pub` for variants, which are always public
+ Item { visibility: Inherited, ..what_rustc_thinks }
+ }
+}
+
+fn clean_impl<'tcx>(
+ impl_: &hir::Impl<'tcx>,
+ hir_id: hir::HirId,
+ cx: &mut DocContext<'tcx>,
+) -> Vec<Item> {
+ let tcx = cx.tcx;
+ let mut ret = Vec::new();
+ let trait_ = impl_.of_trait.as_ref().map(|t| clean_trait_ref(t, cx));
+ let items =
+ impl_.items.iter().map(|ii| tcx.hir().impl_item(ii.id).clean(cx)).collect::<Vec<_>>();
+ let def_id = tcx.hir().local_def_id(hir_id);
+
+ // If this impl block is an implementation of the Deref trait, then we
+ // need to try inlining the target's inherent impl blocks as well.
+ if trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() {
+ build_deref_target_impls(cx, &items, &mut ret);
+ }
+
+ let for_ = clean_ty(impl_.self_ty, cx);
+ let type_alias = for_.def_id(&cx.cache).and_then(|did| match tcx.def_kind(did) {
+ DefKind::TyAlias => Some(clean_middle_ty(tcx.type_of(did), cx, Some(did))),
+ _ => None,
+ });
+ let mut make_item = |trait_: Option<Path>, for_: Type, items: Vec<Item>| {
+ let kind = ImplItem(Box::new(Impl {
+ unsafety: impl_.unsafety,
+ generics: impl_.generics.clean(cx),
+ trait_,
+ for_,
+ items,
+ polarity: tcx.impl_polarity(def_id),
+ kind: if utils::has_doc_flag(tcx, def_id.to_def_id(), sym::fake_variadic) {
+ ImplKind::FakeVaradic
+ } else {
+ ImplKind::Normal
+ },
+ }));
+ Item::from_hir_id_and_parts(hir_id, None, kind, cx)
+ };
+ if let Some(type_alias) = type_alias {
+ ret.push(make_item(trait_.clone(), type_alias, items.clone()));
+ }
+ ret.push(make_item(trait_, for_, items));
+ ret
+}
+
+fn clean_extern_crate<'tcx>(
+ krate: &hir::Item<'tcx>,
+ name: Symbol,
+ orig_name: Option<Symbol>,
+ cx: &mut DocContext<'tcx>,
+) -> Vec<Item> {
+ // this is the ID of the `extern crate` statement
+ let cnum = cx.tcx.extern_mod_stmt_cnum(krate.def_id).unwrap_or(LOCAL_CRATE);
+ // this is the ID of the crate itself
+ let crate_def_id = cnum.as_def_id();
+ let attrs = cx.tcx.hir().attrs(krate.hir_id());
+ let ty_vis = cx.tcx.visibility(krate.def_id);
+ let please_inline = ty_vis.is_public()
+ && attrs.iter().any(|a| {
+ a.has_name(sym::doc)
+ && match a.meta_item_list() {
+ Some(l) => attr::list_contains_name(&l, sym::inline),
+ None => false,
+ }
+ });
+
+ if please_inline {
+ let mut visited = FxHashSet::default();
+
+ let res = Res::Def(DefKind::Mod, crate_def_id);
+
+ if let Some(items) = inline::try_inline(
+ cx,
+ cx.tcx.parent_module(krate.hir_id()).to_def_id(),
+ Some(krate.def_id.to_def_id()),
+ res,
+ name,
+ Some(attrs),
+ &mut visited,
+ ) {
+ return items;
+ }
+ }
+
+ // FIXME: using `from_def_id_and_kind` breaks `rustdoc/masked` for some reason
+ vec![Item {
+ name: Some(name),
+ attrs: Box::new(Attributes::from_ast(attrs)),
+ item_id: crate_def_id.into(),
+ visibility: clean_visibility(ty_vis),
+ kind: Box::new(ExternCrateItem { src: orig_name }),
+ cfg: attrs.cfg(cx.tcx, &cx.cache.hidden_cfg),
+ }]
+}
+
+fn clean_use_statement<'tcx>(
+ import: &hir::Item<'tcx>,
+ name: Symbol,
+ path: &hir::Path<'tcx>,
+ kind: hir::UseKind,
+ cx: &mut DocContext<'tcx>,
+ inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
+) -> Vec<Item> {
+ // We need this comparison because some imports (for std types for example)
+ // are "inserted" as well but directly by the compiler and they should not be
+ // taken into account.
+ if import.span.ctxt().outer_expn_data().kind == ExpnKind::AstPass(AstPass::StdImports) {
+ return Vec::new();
+ }
+
+ let visibility = cx.tcx.visibility(import.def_id);
+ let attrs = cx.tcx.hir().attrs(import.hir_id());
+ let inline_attr = attrs.lists(sym::doc).get_word_attr(sym::inline);
+ let pub_underscore = visibility.is_public() && name == kw::Underscore;
+ let current_mod = cx.tcx.parent_module_from_def_id(import.def_id);
+
+ // The parent of the module in which this import resides. This
+ // is the same as `current_mod` if that's already the top
+ // level module.
+ let parent_mod = cx.tcx.parent_module_from_def_id(current_mod);
+
+ // This checks if the import can be seen from a higher level module.
+ // In other words, it checks if the visibility is the equivalent of
+ // `pub(super)` or higher. If the current module is the top level
+ // module, there isn't really a parent module, which makes the results
+ // meaningless. In this case, we make sure the answer is `false`.
+ let is_visible_from_parent_mod = visibility.is_accessible_from(parent_mod.to_def_id(), cx.tcx)
+ && !current_mod.is_top_level_module();
+
+ if pub_underscore {
+ if let Some(ref inline) = inline_attr {
+ rustc_errors::struct_span_err!(
+ cx.tcx.sess,
+ inline.span(),
+ E0780,
+ "anonymous imports cannot be inlined"
+ )
+ .span_label(import.span, "anonymous import")
+ .emit();
+ }
+ }
+
+ // We consider inlining the documentation of `pub use` statements, but we
+ // forcefully don't inline if this is not public or if the
+ // #[doc(no_inline)] attribute is present.
+ // Don't inline doc(hidden) imports so they can be stripped at a later stage.
+ let mut denied = cx.output_format.is_json()
+ || !(visibility.is_public()
+ || (cx.render_options.document_private && is_visible_from_parent_mod))
+ || pub_underscore
+ || attrs.iter().any(|a| {
+ a.has_name(sym::doc)
+ && match a.meta_item_list() {
+ Some(l) => {
+ attr::list_contains_name(&l, sym::no_inline)
+ || attr::list_contains_name(&l, sym::hidden)
+ }
+ None => false,
+ }
+ });
+
+ // Also check whether imports were asked to be inlined, in case we're trying to re-export a
+ // crate in Rust 2018+
+ let path = clean_path(path, cx);
+ let inner = if kind == hir::UseKind::Glob {
+ if !denied {
+ let mut visited = FxHashSet::default();
+ if let Some(items) = inline::try_inline_glob(cx, path.res, &mut visited, inlined_names)
+ {
+ return items;
+ }
+ }
+ Import::new_glob(resolve_use_source(cx, path), true)
+ } else {
+ if inline_attr.is_none() {
+ if let Res::Def(DefKind::Mod, did) = path.res {
+ if !did.is_local() && did.is_crate_root() {
+ // if we're `pub use`ing an extern crate root, don't inline it unless we
+ // were specifically asked for it
+ denied = true;
+ }
+ }
+ }
+ if !denied {
+ let mut visited = FxHashSet::default();
+ let import_def_id = import.def_id.to_def_id();
+
+ if let Some(mut items) = inline::try_inline(
+ cx,
+ cx.tcx.parent_module(import.hir_id()).to_def_id(),
+ Some(import_def_id),
+ path.res,
+ name,
+ Some(attrs),
+ &mut visited,
+ ) {
+ items.push(Item::from_def_id_and_parts(
+ import_def_id,
+ None,
+ ImportItem(Import::new_simple(name, resolve_use_source(cx, path), false)),
+ cx,
+ ));
+ return items;
+ }
+ }
+ Import::new_simple(name, resolve_use_source(cx, path), true)
+ };
+
+ vec![Item::from_def_id_and_parts(import.def_id.to_def_id(), None, ImportItem(inner), cx)]
+}
+
+fn clean_maybe_renamed_foreign_item<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ item: &hir::ForeignItem<'tcx>,
+ renamed: Option<Symbol>,
+) -> Item {
+ let def_id = item.def_id.to_def_id();
+ cx.with_param_env(def_id, |cx| {
+ let kind = match item.kind {
+ hir::ForeignItemKind::Fn(decl, names, generics) => {
+ let (generics, decl) = enter_impl_trait(cx, |cx| {
+ // NOTE: generics must be cleaned before args
+ let generics = generics.clean(cx);
+ let args = clean_args_from_types_and_names(cx, decl.inputs, names);
+ let decl = clean_fn_decl_with_args(cx, decl, args);
+ (generics, decl)
+ });
+ ForeignFunctionItem(Box::new(Function { decl, generics }))
+ }
+ hir::ForeignItemKind::Static(ty, mutability) => {
+ ForeignStaticItem(Static { type_: clean_ty(ty, cx), mutability, expr: None })
+ }
+ hir::ForeignItemKind::Type => ForeignTypeItem,
+ };
+
+ Item::from_hir_id_and_parts(
+ item.hir_id(),
+ Some(renamed.unwrap_or(item.ident.name)),
+ kind,
+ cx,
+ )
+ })
+}
+
+fn clean_type_binding<'tcx>(
+ type_binding: &hir::TypeBinding<'tcx>,
+ cx: &mut DocContext<'tcx>,
+) -> TypeBinding {
+ TypeBinding {
+ assoc: PathSegment { name: type_binding.ident.name, args: type_binding.gen_args.clean(cx) },
+ kind: match type_binding.kind {
+ hir::TypeBindingKind::Equality { ref term } => {
+ TypeBindingKind::Equality { term: clean_hir_term(term, cx) }
+ }
+ hir::TypeBindingKind::Constraint { bounds } => TypeBindingKind::Constraint {
+ bounds: bounds.iter().filter_map(|b| b.clean(cx)).collect(),
+ },
+ },
+ }
+}
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
new file mode 100644
index 000000000..ed7683e36
--- /dev/null
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -0,0 +1,238 @@
+use rustc_ast::token::{self, BinOpToken, Delimiter};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast_pretty::pprust::state::State as Printer;
+use rustc_ast_pretty::pprust::PrintState;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::parse::ParseSess;
+use rustc_span::source_map::FilePathMapping;
+use rustc_span::symbol::{kw, Ident, Symbol};
+use rustc_span::Span;
+
+/// Render a macro matcher in a format suitable for displaying to the user
+/// as part of an item declaration.
+pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> String {
+ if let Some(snippet) = snippet_equal_to_token(tcx, matcher) {
+ // If the original source code is known, we display the matcher exactly
+ // as present in the source code.
+ return snippet;
+ }
+
+ // If the matcher is macro-generated or some other reason the source code
+ // snippet is not available, we attempt to nicely render the token tree.
+ let mut printer = Printer::new();
+
+ // If the inner ibox fits on one line, we get:
+ //
+ // macro_rules! macroname {
+ // (the matcher) => {...};
+ // }
+ //
+ // If the inner ibox gets wrapped, the cbox will break and get indented:
+ //
+ // macro_rules! macroname {
+ // (
+ // the matcher ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!
+ // ) => {...};
+ // }
+ printer.cbox(8);
+ printer.word("(");
+ printer.zerobreak();
+ printer.ibox(0);
+ match matcher {
+ TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
+ // Matcher which is not a Delimited is unexpected and should've failed
+ // to compile, but we render whatever it is wrapped in parens.
+ TokenTree::Token(..) => print_tt(&mut printer, matcher),
+ }
+ printer.end();
+ printer.break_offset_if_not_bol(0, -4);
+ printer.word(")");
+ printer.end();
+ printer.s.eof()
+}
+
+/// Find the source snippet for this token's Span, reparse it, and return the
+/// snippet if the reparsed TokenTree matches the argument TokenTree.
+fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String> {
+ // Find what rustc thinks is the source snippet.
+ // This may not actually be anything meaningful if this matcher was itself
+ // generated by a macro.
+ let source_map = tcx.sess.source_map();
+ let span = matcher.span();
+ let snippet = source_map.span_to_snippet(span).ok()?;
+
+ // Create a Parser.
+ let sess = ParseSess::new(FilePathMapping::empty());
+ let file_name = source_map.span_to_filename(span);
+ let mut parser =
+ match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
+ Ok(parser) => parser,
+ Err(diagnostics) => {
+ drop(diagnostics);
+ return None;
+ }
+ };
+
+ // Reparse a single token tree.
+ let mut reparsed_trees = match parser.parse_all_token_trees() {
+ Ok(reparsed_trees) => reparsed_trees,
+ Err(diagnostic) => {
+ diagnostic.cancel();
+ return None;
+ }
+ };
+ if reparsed_trees.len() != 1 {
+ return None;
+ }
+ let reparsed_tree = reparsed_trees.pop().unwrap();
+
+ // Compare against the original tree.
+ if reparsed_tree.eq_unspanned(matcher) { Some(snippet) } else { None }
+}
+
+fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
+ match tt {
+ TokenTree::Token(token, _) => {
+ let token_str = printer.token_to_string(token);
+ printer.word(token_str);
+ if let token::DocComment(..) = token.kind {
+ printer.hardbreak()
+ }
+ }
+ TokenTree::Delimited(_span, delim, tts) => {
+ let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
+ printer.word(open_delim);
+ if !tts.is_empty() {
+ if *delim == Delimiter::Brace {
+ printer.space();
+ }
+ print_tts(printer, tts);
+ if *delim == Delimiter::Brace {
+ printer.space();
+ }
+ }
+ let close_delim = printer.token_kind_to_string(&token::CloseDelim(*delim));
+ printer.word(close_delim);
+ }
+ }
+}
+
+fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
+ #[derive(Copy, Clone, PartialEq)]
+ enum State {
+ Start,
+ Dollar,
+ DollarIdent,
+ DollarIdentColon,
+ DollarParen,
+ DollarParenSep,
+ Pound,
+ PoundBang,
+ Ident,
+ Other,
+ }
+
+ use State::*;
+
+ let mut state = Start;
+ for tt in tts.trees() {
+ let (needs_space, next_state) = match &tt {
+ TokenTree::Token(tt, _) => match (state, &tt.kind) {
+ (Dollar, token::Ident(..)) => (false, DollarIdent),
+ (DollarIdent, token::Colon) => (false, DollarIdentColon),
+ (DollarIdentColon, token::Ident(..)) => (false, Other),
+ (
+ DollarParen,
+ token::BinOp(BinOpToken::Plus | BinOpToken::Star) | token::Question,
+ ) => (false, Other),
+ (DollarParen, _) => (false, DollarParenSep),
+ (DollarParenSep, token::BinOp(BinOpToken::Plus | BinOpToken::Star)) => {
+ (false, Other)
+ }
+ (Pound, token::Not) => (false, PoundBang),
+ (_, token::Ident(symbol, /* is_raw */ false))
+ if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
+ {
+ (true, Ident)
+ }
+ (_, token::Comma | token::Semi) => (false, Other),
+ (_, token::Dollar) => (true, Dollar),
+ (_, token::Pound) => (true, Pound),
+ (_, _) => (true, Other),
+ },
+ TokenTree::Delimited(_, delim, _) => match (state, delim) {
+ (Dollar, Delimiter::Parenthesis) => (false, DollarParen),
+ (Pound | PoundBang, Delimiter::Bracket) => (false, Other),
+ (Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
+ (_, _) => (true, Other),
+ },
+ };
+ if state != Start && needs_space {
+ printer.space();
+ }
+ print_tt(printer, tt);
+ state = next_state;
+ }
+}
+
+fn usually_needs_space_between_keyword_and_open_delim(symbol: Symbol, span: Span) -> bool {
+ let ident = Ident { name: symbol, span };
+ let is_keyword = ident.is_used_keyword() || ident.is_unused_keyword();
+ if !is_keyword {
+ // An identifier that is not a keyword usually does not need a space
+ // before an open delim. For example: `f(0)` or `f[0]`.
+ return false;
+ }
+
+ match symbol {
+ // No space after keywords that are syntactically an expression. For
+ // example: a tuple struct created with `let _ = Self(0, 0)`, or if
+ // someone has `impl Index<MyStruct> for bool` then `true[MyStruct]`.
+ kw::False | kw::SelfLower | kw::SelfUpper | kw::True => false,
+
+ // No space, as in `let _: fn();`
+ kw::Fn => false,
+
+ // No space, as in `pub(crate) type T;`
+ kw::Pub => false,
+
+ // No space for keywords that can end an expression, as in `fut.await()`
+ // where fut's Output type is `fn()`.
+ kw::Await => false,
+
+ // Otherwise space after keyword. Some examples:
+ //
+ // `expr as [T; 2]`
+ // ^
+ // `box (tuple,)`
+ // ^
+ // `break (tuple,)`
+ // ^
+ // `type T = dyn (Fn() -> dyn Trait) + Send;`
+ // ^
+ // `for (tuple,) in iter {}`
+ // ^
+ // `if (tuple,) == v {}`
+ // ^
+ // `impl [T] {}`
+ // ^
+ // `for x in [..] {}`
+ // ^
+ // `let () = unit;`
+ // ^
+ // `match [x, y] {...}`
+ // ^
+ // `&mut (x as T)`
+ // ^
+ // `return [];`
+ // ^
+ // `fn f<T>() where (): Into<T>`
+ // ^
+ // `while (a + b).what() {}`
+ // ^
+ // `yield [];`
+ // ^
+ _ => true,
+ }
+}
diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs
new file mode 100644
index 000000000..af7813a77
--- /dev/null
+++ b/src/librustdoc/clean/simplify.rs
@@ -0,0 +1,139 @@
+//! Simplification of where-clauses and parameter bounds into a prettier and
+//! more canonical form.
+//!
+//! Currently all cross-crate-inlined function use `rustc_middle::ty` to reconstruct
+//! the AST (e.g., see all of `clean::inline`), but this is not always a
+//! non-lossy transformation. The current format of storage for where-clauses
+//! for functions and such is simply a list of predicates. One example of this
+//! is that the AST predicate of: `where T: Trait<Foo = Bar>` is encoded as:
+//! `where T: Trait, <T as Trait>::Foo = Bar`.
+//!
+//! This module attempts to reconstruct the original where and/or parameter
+//! bounds by special casing scenarios such as these. Fun!
+
+use rustc_data_structures::fx::FxIndexMap;
+use rustc_hir::def_id::DefId;
+use rustc_middle::ty;
+use rustc_span::Symbol;
+
+use crate::clean;
+use crate::clean::GenericArgs as PP;
+use crate::clean::WherePredicate as WP;
+use crate::core::DocContext;
+
+pub(crate) fn where_clauses(cx: &DocContext<'_>, clauses: Vec<WP>) -> Vec<WP> {
+ // First, partition the where clause into its separate components.
+ //
+ // We use `FxIndexMap` so that the insertion order is preserved to prevent messing up to
+ // the order of the generated bounds.
+ let mut params: FxIndexMap<Symbol, (Vec<_>, Vec<_>)> = FxIndexMap::default();
+ let mut lifetimes = Vec::new();
+ let mut equalities = Vec::new();
+ let mut tybounds = Vec::new();
+
+ for clause in clauses {
+ match clause {
+ WP::BoundPredicate { ty, bounds, bound_params } => match ty {
+ clean::Generic(s) => {
+ let (b, p) = params.entry(s).or_default();
+ b.extend(bounds);
+ p.extend(bound_params);
+ }
+ t => tybounds.push((t, (bounds, bound_params))),
+ },
+ WP::RegionPredicate { lifetime, bounds } => {
+ lifetimes.push((lifetime, bounds));
+ }
+ WP::EqPredicate { lhs, rhs } => equalities.push((lhs, rhs)),
+ }
+ }
+
+ // Look for equality predicates on associated types that can be merged into
+ // general bound predicates
+ equalities.retain(|&(ref lhs, ref rhs)| {
+ let Some((self_, trait_did, name)) = lhs.projection() else {
+ return true;
+ };
+ let clean::Generic(generic) = self_ else { return true };
+ let Some((bounds, _)) = params.get_mut(generic) else { return true };
+
+ merge_bounds(cx, bounds, trait_did, name, rhs)
+ });
+
+ // And finally, let's reassemble everything
+ let mut clauses = Vec::new();
+ clauses.extend(
+ lifetimes.into_iter().map(|(lt, bounds)| WP::RegionPredicate { lifetime: lt, bounds }),
+ );
+ clauses.extend(params.into_iter().map(|(k, (bounds, params))| WP::BoundPredicate {
+ ty: clean::Generic(k),
+ bounds,
+ bound_params: params,
+ }));
+ clauses.extend(tybounds.into_iter().map(|(ty, (bounds, bound_params))| WP::BoundPredicate {
+ ty,
+ bounds,
+ bound_params,
+ }));
+ clauses.extend(equalities.into_iter().map(|(lhs, rhs)| WP::EqPredicate { lhs, rhs }));
+ clauses
+}
+
+pub(crate) fn merge_bounds(
+ cx: &clean::DocContext<'_>,
+ bounds: &mut Vec<clean::GenericBound>,
+ trait_did: DefId,
+ assoc: clean::PathSegment,
+ rhs: &clean::Term,
+) -> bool {
+ !bounds.iter_mut().any(|b| {
+ let trait_ref = match *b {
+ clean::GenericBound::TraitBound(ref mut tr, _) => tr,
+ clean::GenericBound::Outlives(..) => return false,
+ };
+ // If this QPath's trait `trait_did` is the same as, or a supertrait
+ // of, the bound's trait `did` then we can keep going, otherwise
+ // this is just a plain old equality bound.
+ if !trait_is_same_or_supertrait(cx, trait_ref.trait_.def_id(), trait_did) {
+ return false;
+ }
+ let last = trait_ref.trait_.segments.last_mut().expect("segments were empty");
+ match last.args {
+ PP::AngleBracketed { ref mut bindings, .. } => {
+ bindings.push(clean::TypeBinding {
+ assoc: assoc.clone(),
+ kind: clean::TypeBindingKind::Equality { term: rhs.clone() },
+ });
+ }
+ PP::Parenthesized { ref mut output, .. } => match output {
+ Some(o) => assert_eq!(&clean::Term::Type(o.as_ref().clone()), rhs),
+ None => {
+ if *rhs != clean::Term::Type(clean::Type::Tuple(Vec::new())) {
+ *output = Some(Box::new(rhs.ty().unwrap().clone()));
+ }
+ }
+ },
+ };
+ true
+ })
+}
+
+fn trait_is_same_or_supertrait(cx: &DocContext<'_>, child: DefId, trait_: DefId) -> bool {
+ if child == trait_ {
+ return true;
+ }
+ let predicates = cx.tcx.super_predicates_of(child);
+ debug_assert!(cx.tcx.generics_of(child).has_self);
+ let self_ty = cx.tcx.types.self_param;
+ predicates
+ .predicates
+ .iter()
+ .filter_map(|(pred, _)| {
+ if let ty::PredicateKind::Trait(pred) = pred.kind().skip_binder() {
+ if pred.trait_ref.self_ty() == self_ty { Some(pred.def_id()) } else { None }
+ } else {
+ None
+ }
+ })
+ .any(|did| trait_is_same_or_supertrait(cx, did, trait_))
+}
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
new file mode 100644
index 000000000..0e6de842c
--- /dev/null
+++ b/src/librustdoc/clean/types.rs
@@ -0,0 +1,2508 @@
+use std::cell::RefCell;
+use std::default::Default;
+use std::hash::Hash;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::sync::Arc;
+use std::sync::OnceLock as OnceCell;
+use std::{cmp, fmt, iter};
+
+use arrayvec::ArrayVec;
+
+use rustc_ast::attr;
+use rustc_ast::util::comments::beautify_doc_string;
+use rustc_ast::{self as ast, AttrStyle};
+use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel};
+use rustc_const_eval::const_eval::is_unstable_const_fn;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::thin_vec::ThinVec;
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, DefKind, Res};
+use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc_hir::lang_items::LangItem;
+use rustc_hir::{BodyId, Mutability};
+use rustc_index::vec::IndexVec;
+use rustc_middle::ty::fast_reject::SimplifiedType;
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_session::Session;
+use rustc_span::hygiene::MacroKind;
+use rustc_span::source_map::DUMMY_SP;
+use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::{self, FileName, Loc};
+use rustc_target::abi::VariantIdx;
+use rustc_target::spec::abi::Abi;
+use rustc_typeck::check::intrinsic::intrinsic_operation_unsafety;
+
+use crate::clean::cfg::Cfg;
+use crate::clean::clean_visibility;
+use crate::clean::external_path;
+use crate::clean::inline::{self, print_inlined_const};
+use crate::clean::utils::{is_literal_expr, print_const_expr, print_evaluated_const};
+use crate::core::DocContext;
+use crate::formats::cache::Cache;
+use crate::formats::item_type::ItemType;
+use crate::html::render::Context;
+use crate::passes::collect_intra_doc_links::UrlFragment;
+
+pub(crate) use self::FnRetTy::*;
+pub(crate) use self::ItemKind::*;
+pub(crate) use self::SelfTy::*;
+pub(crate) use self::Type::{
+ Array, BareFunction, BorrowedRef, DynTrait, Generic, ImplTrait, Infer, Primitive, QPath,
+ RawPointer, Slice, Tuple,
+};
+pub(crate) use self::Visibility::{Inherited, Public};
+
+#[cfg(test)]
+mod tests;
+
+pub(crate) type ItemIdSet = FxHashSet<ItemId>;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
+pub(crate) enum ItemId {
+ /// A "normal" item that uses a [`DefId`] for identification.
+ DefId(DefId),
+ /// Identifier that is used for auto traits.
+ Auto { trait_: DefId, for_: DefId },
+ /// Identifier that is used for blanket implementations.
+ Blanket { impl_id: DefId, for_: DefId },
+ /// Identifier for primitive types.
+ Primitive(PrimitiveType, CrateNum),
+}
+
+impl ItemId {
+ #[inline]
+ pub(crate) fn is_local(self) -> bool {
+ match self {
+ ItemId::Auto { for_: id, .. }
+ | ItemId::Blanket { for_: id, .. }
+ | ItemId::DefId(id) => id.is_local(),
+ ItemId::Primitive(_, krate) => krate == LOCAL_CRATE,
+ }
+ }
+
+ #[inline]
+ #[track_caller]
+ pub(crate) fn expect_def_id(self) -> DefId {
+ self.as_def_id()
+ .unwrap_or_else(|| panic!("ItemId::expect_def_id: `{:?}` isn't a DefId", self))
+ }
+
+ #[inline]
+ pub(crate) fn as_def_id(self) -> Option<DefId> {
+ match self {
+ ItemId::DefId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ #[inline]
+ pub(crate) fn krate(self) -> CrateNum {
+ match self {
+ ItemId::Auto { for_: id, .. }
+ | ItemId::Blanket { for_: id, .. }
+ | ItemId::DefId(id) => id.krate,
+ ItemId::Primitive(_, krate) => krate,
+ }
+ }
+}
+
+impl From<DefId> for ItemId {
+ fn from(id: DefId) -> Self {
+ Self::DefId(id)
+ }
+}
+
+/// The crate currently being documented.
+#[derive(Clone, Debug)]
+pub(crate) struct Crate {
+ pub(crate) module: Item,
+ pub(crate) primitives: ThinVec<(DefId, PrimitiveType)>,
+ /// Only here so that they can be filtered through the rustdoc passes.
+ pub(crate) external_traits: Rc<RefCell<FxHashMap<DefId, TraitWithExtraInfo>>>,
+}
+
+impl Crate {
+ pub(crate) fn name(&self, tcx: TyCtxt<'_>) -> Symbol {
+ ExternalCrate::LOCAL.name(tcx)
+ }
+
+ pub(crate) fn src(&self, tcx: TyCtxt<'_>) -> FileName {
+ ExternalCrate::LOCAL.src(tcx)
+ }
+}
+
+/// This struct is used to wrap additional information added by rustdoc on a `trait` item.
+#[derive(Clone, Debug)]
+pub(crate) struct TraitWithExtraInfo {
+ pub(crate) trait_: Trait,
+ pub(crate) is_notable: bool,
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct ExternalCrate {
+ pub(crate) crate_num: CrateNum,
+}
+
+impl ExternalCrate {
+ const LOCAL: Self = Self { crate_num: LOCAL_CRATE };
+
+ #[inline]
+ pub(crate) fn def_id(&self) -> DefId {
+ self.crate_num.as_def_id()
+ }
+
+ pub(crate) fn src(&self, tcx: TyCtxt<'_>) -> FileName {
+ let krate_span = tcx.def_span(self.def_id());
+ tcx.sess.source_map().span_to_filename(krate_span)
+ }
+
+ pub(crate) fn name(&self, tcx: TyCtxt<'_>) -> Symbol {
+ tcx.crate_name(self.crate_num)
+ }
+
+ pub(crate) fn src_root(&self, tcx: TyCtxt<'_>) -> PathBuf {
+ match self.src(tcx) {
+ FileName::Real(ref p) => match p.local_path_if_available().parent() {
+ Some(p) => p.to_path_buf(),
+ None => PathBuf::new(),
+ },
+ _ => PathBuf::new(),
+ }
+ }
+
+ /// Attempts to find where an external crate is located, given that we're
+ /// rendering in to the specified source destination.
+ pub(crate) fn location(
+ &self,
+ extern_url: Option<&str>,
+ extern_url_takes_precedence: bool,
+ dst: &std::path::Path,
+ tcx: TyCtxt<'_>,
+ ) -> ExternalLocation {
+ use ExternalLocation::*;
+
+ fn to_remote(url: impl ToString) -> ExternalLocation {
+ let mut url = url.to_string();
+ if !url.ends_with('/') {
+ url.push('/');
+ }
+ Remote(url)
+ }
+
+ // See if there's documentation generated into the local directory
+ // WARNING: since rustdoc creates these directories as it generates documentation, this check is only accurate before rendering starts.
+ // Make sure to call `location()` by that time.
+ let local_location = dst.join(self.name(tcx).as_str());
+ if local_location.is_dir() {
+ return Local;
+ }
+
+ if extern_url_takes_precedence {
+ if let Some(url) = extern_url {
+ return to_remote(url);
+ }
+ }
+
+ // Failing that, see if there's an attribute specifying where to find this
+ // external crate
+ let did = self.crate_num.as_def_id();
+ tcx.get_attrs(did, sym::doc)
+ .flat_map(|attr| attr.meta_item_list().unwrap_or_default())
+ .filter(|a| a.has_name(sym::html_root_url))
+ .filter_map(|a| a.value_str())
+ .map(to_remote)
+ .next()
+ .or_else(|| extern_url.map(to_remote)) // NOTE: only matters if `extern_url_takes_precedence` is false
+ .unwrap_or(Unknown) // Well, at least we tried.
+ }
+
+ pub(crate) fn keywords(&self, tcx: TyCtxt<'_>) -> ThinVec<(DefId, Symbol)> {
+ let root = self.def_id();
+
+ let as_keyword = |res: Res<!>| {
+ if let Res::Def(DefKind::Mod, def_id) = res {
+ let mut keyword = None;
+ let meta_items = tcx
+ .get_attrs(def_id, sym::doc)
+ .flat_map(|attr| attr.meta_item_list().unwrap_or_default());
+ for meta in meta_items {
+ if meta.has_name(sym::keyword) {
+ if let Some(v) = meta.value_str() {
+ keyword = Some(v);
+ break;
+ }
+ }
+ }
+ return keyword.map(|p| (def_id, p));
+ }
+ None
+ };
+ if root.is_local() {
+ tcx.hir()
+ .root_module()
+ .item_ids
+ .iter()
+ .filter_map(|&id| {
+ let item = tcx.hir().item(id);
+ match item.kind {
+ hir::ItemKind::Mod(_) => {
+ as_keyword(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
+ }
+ hir::ItemKind::Use(path, hir::UseKind::Single)
+ if tcx.visibility(id.def_id).is_public() =>
+ {
+ as_keyword(path.res.expect_non_local())
+ .map(|(_, prim)| (id.def_id.to_def_id(), prim))
+ }
+ _ => None,
+ }
+ })
+ .collect()
+ } else {
+ tcx.module_children(root).iter().map(|item| item.res).filter_map(as_keyword).collect()
+ }
+ }
+
+ pub(crate) fn primitives(&self, tcx: TyCtxt<'_>) -> ThinVec<(DefId, PrimitiveType)> {
+ let root = self.def_id();
+
+ // Collect all inner modules which are tagged as implementations of
+ // primitives.
+ //
+ // Note that this loop only searches the top-level items of the crate,
+ // and this is intentional. If we were to search the entire crate for an
+ // item tagged with `#[doc(primitive)]` then we would also have to
+ // search the entirety of external modules for items tagged
+ // `#[doc(primitive)]`, which is a pretty inefficient process (decoding
+ // all that metadata unconditionally).
+ //
+ // In order to keep the metadata load under control, the
+ // `#[doc(primitive)]` feature is explicitly designed to only allow the
+ // primitive tags to show up as the top level items in a crate.
+ //
+ // Also note that this does not attempt to deal with modules tagged
+ // duplicately for the same primitive. This is handled later on when
+ // rendering by delegating everything to a hash map.
+ let as_primitive = |res: Res<!>| {
+ if let Res::Def(DefKind::Mod, def_id) = res {
+ let mut prim = None;
+ let meta_items = tcx
+ .get_attrs(def_id, sym::doc)
+ .flat_map(|attr| attr.meta_item_list().unwrap_or_default());
+ for meta in meta_items {
+ if let Some(v) = meta.value_str() {
+ if meta.has_name(sym::primitive) {
+ prim = PrimitiveType::from_symbol(v);
+ if prim.is_some() {
+ break;
+ }
+ // FIXME: should warn on unknown primitives?
+ }
+ }
+ }
+ return prim.map(|p| (def_id, p));
+ }
+ None
+ };
+
+ if root.is_local() {
+ tcx.hir()
+ .root_module()
+ .item_ids
+ .iter()
+ .filter_map(|&id| {
+ let item = tcx.hir().item(id);
+ match item.kind {
+ hir::ItemKind::Mod(_) => {
+ as_primitive(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
+ }
+ hir::ItemKind::Use(path, hir::UseKind::Single)
+ if tcx.visibility(id.def_id).is_public() =>
+ {
+ as_primitive(path.res.expect_non_local()).map(|(_, prim)| {
+ // Pretend the primitive is local.
+ (id.def_id.to_def_id(), prim)
+ })
+ }
+ _ => None,
+ }
+ })
+ .collect()
+ } else {
+ tcx.module_children(root).iter().map(|item| item.res).filter_map(as_primitive).collect()
+ }
+ }
+}
+
+/// Indicates where an external crate can be found.
+#[derive(Debug)]
+pub(crate) enum ExternalLocation {
+ /// Remote URL root of the external crate
+ Remote(String),
+ /// This external crate can be found in the local doc/ folder
+ Local,
+ /// The external crate could not be found.
+ Unknown,
+}
+
+/// Anything with a source location and set of attributes and, optionally, a
+/// name. That is, anything that can be documented. This doesn't correspond
+/// directly to the AST's concept of an item; it's a strict superset.
+#[derive(Clone)]
+pub(crate) struct Item {
+ /// The name of this item.
+ /// Optional because not every item has a name, e.g. impls.
+ pub(crate) name: Option<Symbol>,
+ pub(crate) attrs: Box<Attributes>,
+ pub(crate) visibility: Visibility,
+ /// Information about this item that is specific to what kind of item it is.
+ /// E.g., struct vs enum vs function.
+ pub(crate) kind: Box<ItemKind>,
+ pub(crate) item_id: ItemId,
+
+ pub(crate) cfg: Option<Arc<Cfg>>,
+}
+
+/// NOTE: this does NOT unconditionally print every item, to avoid thousands of lines of logs.
+/// If you want to see the debug output for attributes and the `kind` as well, use `{:#?}` instead of `{:?}`.
+impl fmt::Debug for Item {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let alternate = f.alternate();
+ // hand-picked fields that don't bloat the logs too much
+ let mut fmt = f.debug_struct("Item");
+ fmt.field("name", &self.name)
+ .field("visibility", &self.visibility)
+ .field("item_id", &self.item_id);
+ // allow printing the full item if someone really wants to
+ if alternate {
+ fmt.field("attrs", &self.attrs).field("kind", &self.kind).field("cfg", &self.cfg);
+ } else {
+ fmt.field("kind", &self.type_());
+ fmt.field("docs", &self.doc_value());
+ }
+ fmt.finish()
+ }
+}
+
+pub(crate) fn rustc_span(def_id: DefId, tcx: TyCtxt<'_>) -> Span {
+ Span::new(def_id.as_local().map_or_else(
+ || tcx.def_span(def_id),
+ |local| {
+ let hir = tcx.hir();
+ hir.span_with_body(hir.local_def_id_to_hir_id(local))
+ },
+ ))
+}
+
+impl Item {
+ pub(crate) fn stability<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<Stability> {
+ self.item_id.as_def_id().and_then(|did| tcx.lookup_stability(did))
+ }
+
+ pub(crate) fn const_stability<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<ConstStability> {
+ self.item_id.as_def_id().and_then(|did| tcx.lookup_const_stability(did))
+ }
+
+ pub(crate) fn deprecation(&self, tcx: TyCtxt<'_>) -> Option<Deprecation> {
+ self.item_id.as_def_id().and_then(|did| tcx.lookup_deprecation(did))
+ }
+
+ pub(crate) fn inner_docs(&self, tcx: TyCtxt<'_>) -> bool {
+ self.item_id
+ .as_def_id()
+ .map(|did| tcx.get_attrs_unchecked(did).inner_docs())
+ .unwrap_or(false)
+ }
+
+ pub(crate) fn span(&self, tcx: TyCtxt<'_>) -> Span {
+ let kind = match &*self.kind {
+ ItemKind::StrippedItem(k) => k,
+ _ => &*self.kind,
+ };
+ match kind {
+ ItemKind::ModuleItem(Module { span, .. }) => *span,
+ ItemKind::ImplItem(box Impl { kind: ImplKind::Auto, .. }) => Span::dummy(),
+ ItemKind::ImplItem(box Impl { kind: ImplKind::Blanket(_), .. }) => {
+ if let ItemId::Blanket { impl_id, .. } = self.item_id {
+ rustc_span(impl_id, tcx)
+ } else {
+ panic!("blanket impl item has non-blanket ID")
+ }
+ }
+ _ => {
+ self.item_id.as_def_id().map(|did| rustc_span(did, tcx)).unwrap_or_else(Span::dummy)
+ }
+ }
+ }
+
+ pub(crate) fn attr_span(&self, tcx: TyCtxt<'_>) -> rustc_span::Span {
+ crate::passes::span_of_attrs(&self.attrs).unwrap_or_else(|| self.span(tcx).inner())
+ }
+
+ /// Finds the `doc` attribute as a NameValue and returns the corresponding
+ /// value found.
+ pub(crate) fn doc_value(&self) -> Option<String> {
+ self.attrs.doc_value()
+ }
+
+ /// Convenience wrapper around [`Self::from_def_id_and_parts`] which converts
+ /// `hir_id` to a [`DefId`]
+ pub(crate) fn from_hir_id_and_parts(
+ hir_id: hir::HirId,
+ name: Option<Symbol>,
+ kind: ItemKind,
+ cx: &mut DocContext<'_>,
+ ) -> Item {
+ Item::from_def_id_and_parts(cx.tcx.hir().local_def_id(hir_id).to_def_id(), name, kind, cx)
+ }
+
+ pub(crate) fn from_def_id_and_parts(
+ def_id: DefId,
+ name: Option<Symbol>,
+ kind: ItemKind,
+ cx: &mut DocContext<'_>,
+ ) -> Item {
+ let ast_attrs = cx.tcx.get_attrs_unchecked(def_id);
+
+ Self::from_def_id_and_attrs_and_parts(
+ def_id,
+ name,
+ kind,
+ Box::new(Attributes::from_ast(ast_attrs)),
+ cx,
+ ast_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg),
+ )
+ }
+
+ pub(crate) fn from_def_id_and_attrs_and_parts(
+ def_id: DefId,
+ name: Option<Symbol>,
+ kind: ItemKind,
+ attrs: Box<Attributes>,
+ cx: &mut DocContext<'_>,
+ cfg: Option<Arc<Cfg>>,
+ ) -> Item {
+ trace!("name={:?}, def_id={:?}", name, def_id);
+
+ // Primitives and Keywords are written in the source code as private modules.
+ // The modules need to be private so that nobody actually uses them, but the
+ // keywords and primitives that they are documenting are public.
+ let visibility = if matches!(&kind, ItemKind::KeywordItem | ItemKind::PrimitiveItem(..)) {
+ Visibility::Public
+ } else {
+ clean_visibility(cx.tcx.visibility(def_id))
+ };
+
+ Item { item_id: def_id.into(), kind: Box::new(kind), name, attrs, visibility, cfg }
+ }
+
+ /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
+ /// with newlines.
+ pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
+ self.attrs.collapsed_doc_value()
+ }
+
+ pub(crate) fn links(&self, cx: &Context<'_>) -> Vec<RenderedLink> {
+ use crate::html::format::href;
+
+ cx.cache()
+ .intra_doc_links
+ .get(&self.item_id)
+ .map_or(&[][..], |v| v.as_slice())
+ .iter()
+ .filter_map(|ItemLink { link: s, link_text, did, ref fragment }| {
+ debug!(?did);
+ if let Ok((mut href, ..)) = href(*did, cx) {
+ debug!(?href);
+ if let Some(ref fragment) = *fragment {
+ fragment.render(&mut href, cx.tcx())
+ }
+ Some(RenderedLink {
+ original_text: s.clone(),
+ new_text: link_text.clone(),
+ href,
+ })
+ } else {
+ None
+ }
+ })
+ .collect()
+ }
+
+ /// Find a list of all link names, without finding their href.
+ ///
+ /// This is used for generating summary text, which does not include
+ /// the link text, but does need to know which `[]`-bracketed names
+ /// are actually links.
+ pub(crate) fn link_names(&self, cache: &Cache) -> Vec<RenderedLink> {
+ cache
+ .intra_doc_links
+ .get(&self.item_id)
+ .map_or(&[][..], |v| v.as_slice())
+ .iter()
+ .map(|ItemLink { link: s, link_text, .. }| RenderedLink {
+ original_text: s.clone(),
+ new_text: link_text.clone(),
+ href: String::new(),
+ })
+ .collect()
+ }
+
+ pub(crate) fn is_crate(&self) -> bool {
+ self.is_mod() && self.item_id.as_def_id().map_or(false, |did| did.is_crate_root())
+ }
+ pub(crate) fn is_mod(&self) -> bool {
+ self.type_() == ItemType::Module
+ }
+ pub(crate) fn is_trait(&self) -> bool {
+ self.type_() == ItemType::Trait
+ }
+ pub(crate) fn is_struct(&self) -> bool {
+ self.type_() == ItemType::Struct
+ }
+ pub(crate) fn is_enum(&self) -> bool {
+ self.type_() == ItemType::Enum
+ }
+ pub(crate) fn is_variant(&self) -> bool {
+ self.type_() == ItemType::Variant
+ }
+ pub(crate) fn is_associated_type(&self) -> bool {
+ matches!(&*self.kind, AssocTypeItem(..) | StrippedItem(box AssocTypeItem(..)))
+ }
+ pub(crate) fn is_ty_associated_type(&self) -> bool {
+ matches!(&*self.kind, TyAssocTypeItem(..) | StrippedItem(box TyAssocTypeItem(..)))
+ }
+ pub(crate) fn is_associated_const(&self) -> bool {
+ matches!(&*self.kind, AssocConstItem(..) | StrippedItem(box AssocConstItem(..)))
+ }
+ pub(crate) fn is_ty_associated_const(&self) -> bool {
+ matches!(&*self.kind, TyAssocConstItem(..) | StrippedItem(box TyAssocConstItem(..)))
+ }
+ pub(crate) fn is_method(&self) -> bool {
+ self.type_() == ItemType::Method
+ }
+ pub(crate) fn is_ty_method(&self) -> bool {
+ self.type_() == ItemType::TyMethod
+ }
+ pub(crate) fn is_typedef(&self) -> bool {
+ self.type_() == ItemType::Typedef
+ }
+ pub(crate) fn is_primitive(&self) -> bool {
+ self.type_() == ItemType::Primitive
+ }
+ pub(crate) fn is_union(&self) -> bool {
+ self.type_() == ItemType::Union
+ }
+ pub(crate) fn is_import(&self) -> bool {
+ self.type_() == ItemType::Import
+ }
+ pub(crate) fn is_extern_crate(&self) -> bool {
+ self.type_() == ItemType::ExternCrate
+ }
+ pub(crate) fn is_keyword(&self) -> bool {
+ self.type_() == ItemType::Keyword
+ }
+ pub(crate) fn is_stripped(&self) -> bool {
+ match *self.kind {
+ StrippedItem(..) => true,
+ ImportItem(ref i) => !i.should_be_displayed,
+ _ => false,
+ }
+ }
+ pub(crate) fn has_stripped_entries(&self) -> Option<bool> {
+ match *self.kind {
+ StructItem(ref struct_) => Some(struct_.has_stripped_entries()),
+ UnionItem(ref union_) => Some(union_.has_stripped_entries()),
+ EnumItem(ref enum_) => Some(enum_.has_stripped_entries()),
+ VariantItem(ref v) => v.has_stripped_entries(),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn stability_class(&self, tcx: TyCtxt<'_>) -> Option<String> {
+ self.stability(tcx).as_ref().and_then(|s| {
+ let mut classes = Vec::with_capacity(2);
+
+ if s.is_unstable() {
+ classes.push("unstable");
+ }
+
+ // FIXME: what about non-staged API items that are deprecated?
+ if self.deprecation(tcx).is_some() {
+ classes.push("deprecated");
+ }
+
+ if !classes.is_empty() { Some(classes.join(" ")) } else { None }
+ })
+ }
+
+ pub(crate) fn stable_since(&self, tcx: TyCtxt<'_>) -> Option<Symbol> {
+ match self.stability(tcx)?.level {
+ StabilityLevel::Stable { since, .. } => Some(since),
+ StabilityLevel::Unstable { .. } => None,
+ }
+ }
+
+ pub(crate) fn const_stable_since(&self, tcx: TyCtxt<'_>) -> Option<Symbol> {
+ match self.const_stability(tcx)?.level {
+ StabilityLevel::Stable { since, .. } => Some(since),
+ StabilityLevel::Unstable { .. } => None,
+ }
+ }
+
+ pub(crate) fn is_non_exhaustive(&self) -> bool {
+ self.attrs.other_attrs.iter().any(|a| a.has_name(sym::non_exhaustive))
+ }
+
+ /// Returns a documentation-level item type from the item.
+ pub(crate) fn type_(&self) -> ItemType {
+ ItemType::from(self)
+ }
+
+ pub(crate) fn is_default(&self) -> bool {
+ match *self.kind {
+ ItemKind::MethodItem(_, Some(defaultness)) => {
+ defaultness.has_value() && !defaultness.is_final()
+ }
+ _ => false,
+ }
+ }
+
+ /// Returns a `FnHeader` if `self` is a function item, otherwise returns `None`.
+ pub(crate) fn fn_header(&self, tcx: TyCtxt<'_>) -> Option<hir::FnHeader> {
+ fn build_fn_header(
+ def_id: DefId,
+ tcx: TyCtxt<'_>,
+ asyncness: hir::IsAsync,
+ ) -> hir::FnHeader {
+ let sig = tcx.fn_sig(def_id);
+ let constness =
+ if tcx.is_const_fn(def_id) && is_unstable_const_fn(tcx, def_id).is_none() {
+ hir::Constness::Const
+ } else {
+ hir::Constness::NotConst
+ };
+ hir::FnHeader { unsafety: sig.unsafety(), abi: sig.abi(), constness, asyncness }
+ }
+ let header = match *self.kind {
+ ItemKind::ForeignFunctionItem(_) => {
+ let abi = tcx.fn_sig(self.item_id.as_def_id().unwrap()).abi();
+ hir::FnHeader {
+ unsafety: if abi == Abi::RustIntrinsic {
+ intrinsic_operation_unsafety(self.name.unwrap())
+ } else {
+ hir::Unsafety::Unsafe
+ },
+ abi,
+ constness: hir::Constness::NotConst,
+ asyncness: hir::IsAsync::NotAsync,
+ }
+ }
+ ItemKind::FunctionItem(_) | ItemKind::MethodItem(_, _) => {
+ let def_id = self.item_id.as_def_id().unwrap();
+ build_fn_header(def_id, tcx, tcx.asyncness(def_id))
+ }
+ ItemKind::TyMethodItem(_) => {
+ build_fn_header(self.item_id.as_def_id().unwrap(), tcx, hir::IsAsync::NotAsync)
+ }
+ _ => return None,
+ };
+ Some(header)
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum ItemKind {
+ ExternCrateItem {
+ /// The crate's name, *not* the name it's imported as.
+ src: Option<Symbol>,
+ },
+ ImportItem(Import),
+ StructItem(Struct),
+ UnionItem(Union),
+ EnumItem(Enum),
+ FunctionItem(Box<Function>),
+ ModuleItem(Module),
+ TypedefItem(Box<Typedef>),
+ OpaqueTyItem(OpaqueTy),
+ StaticItem(Static),
+ ConstantItem(Constant),
+ TraitItem(Trait),
+ TraitAliasItem(TraitAlias),
+ ImplItem(Box<Impl>),
+ /// A required method in a trait declaration meaning it's only a function signature.
+ TyMethodItem(Box<Function>),
+ /// A method in a trait impl or a provided method in a trait declaration.
+ ///
+ /// Compared to [TyMethodItem], it also contains a method body.
+ MethodItem(Box<Function>, Option<hir::Defaultness>),
+ StructFieldItem(Type),
+ VariantItem(Variant),
+ /// `fn`s from an extern block
+ ForeignFunctionItem(Box<Function>),
+ /// `static`s from an extern block
+ ForeignStaticItem(Static),
+ /// `type`s from an extern block
+ ForeignTypeItem,
+ MacroItem(Macro),
+ ProcMacroItem(ProcMacro),
+ PrimitiveItem(PrimitiveType),
+ /// A required associated constant in a trait declaration.
+ TyAssocConstItem(Type),
+ /// An associated associated constant in a trait impl or a provided one in a trait declaration.
+ AssocConstItem(Type, ConstantKind),
+ /// A required associated type in a trait declaration.
+ ///
+ /// The bounds may be non-empty if there is a `where` clause.
+ TyAssocTypeItem(Box<Generics>, Vec<GenericBound>),
+ /// An associated type in a trait impl or a provided one in a trait declaration.
+ AssocTypeItem(Box<Typedef>, Vec<GenericBound>),
+ /// An item that has been stripped by a rustdoc pass
+ StrippedItem(Box<ItemKind>),
+ KeywordItem,
+}
+
+impl ItemKind {
+ /// Some items contain others such as structs (for their fields) and Enums
+ /// (for their variants). This method returns those contained items.
+ pub(crate) fn inner_items(&self) -> impl Iterator<Item = &Item> {
+ match self {
+ StructItem(s) => s.fields.iter(),
+ UnionItem(u) => u.fields.iter(),
+ VariantItem(Variant::Struct(v)) => v.fields.iter(),
+ VariantItem(Variant::Tuple(v)) => v.iter(),
+ EnumItem(e) => e.variants.iter(),
+ TraitItem(t) => t.items.iter(),
+ ImplItem(i) => i.items.iter(),
+ ModuleItem(m) => m.items.iter(),
+ ExternCrateItem { .. }
+ | ImportItem(_)
+ | FunctionItem(_)
+ | TypedefItem(_)
+ | OpaqueTyItem(_)
+ | StaticItem(_)
+ | ConstantItem(_)
+ | TraitAliasItem(_)
+ | TyMethodItem(_)
+ | MethodItem(_, _)
+ | StructFieldItem(_)
+ | VariantItem(_)
+ | ForeignFunctionItem(_)
+ | ForeignStaticItem(_)
+ | ForeignTypeItem
+ | MacroItem(_)
+ | ProcMacroItem(_)
+ | PrimitiveItem(_)
+ | TyAssocConstItem(_)
+ | AssocConstItem(_, _)
+ | TyAssocTypeItem(..)
+ | AssocTypeItem(..)
+ | StrippedItem(_)
+ | KeywordItem => [].iter(),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Module {
+ pub(crate) items: Vec<Item>,
+ pub(crate) span: Span,
+}
+
+pub(crate) trait AttributesExt {
+ type AttributeIterator<'a>: Iterator<Item = ast::NestedMetaItem>
+ where
+ Self: 'a;
+
+ fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a>;
+
+ fn span(&self) -> Option<rustc_span::Span>;
+
+ fn inner_docs(&self) -> bool;
+
+ fn other_attrs(&self) -> Vec<ast::Attribute>;
+
+ fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>>;
+}
+
+impl AttributesExt for [ast::Attribute] {
+ type AttributeIterator<'a> = impl Iterator<Item = ast::NestedMetaItem> + 'a;
+
+ fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
+ self.iter()
+ .filter(move |attr| attr.has_name(name))
+ .filter_map(ast::Attribute::meta_item_list)
+ .flatten()
+ }
+
+ /// Return the span of the first doc-comment, if it exists.
+ fn span(&self) -> Option<rustc_span::Span> {
+ self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span)
+ }
+
+ /// Returns whether the first doc-comment is an inner attribute.
+ ///
+ //// If there are no doc-comments, return true.
+ /// FIXME(#78591): Support both inner and outer attributes on the same item.
+ fn inner_docs(&self) -> bool {
+ self.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == AttrStyle::Inner)
+ }
+
+ fn other_attrs(&self) -> Vec<ast::Attribute> {
+ self.iter().filter(|attr| attr.doc_str().is_none()).cloned().collect()
+ }
+
+ fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>> {
+ let sess = tcx.sess;
+ let doc_cfg_active = tcx.features().doc_cfg;
+ let doc_auto_cfg_active = tcx.features().doc_auto_cfg;
+
+ fn single<T: IntoIterator>(it: T) -> Option<T::Item> {
+ let mut iter = it.into_iter();
+ let item = iter.next()?;
+ if iter.next().is_some() {
+ return None;
+ }
+ Some(item)
+ }
+
+ let mut cfg = if doc_cfg_active || doc_auto_cfg_active {
+ let mut doc_cfg = self
+ .iter()
+ .filter(|attr| attr.has_name(sym::doc))
+ .flat_map(|attr| attr.meta_item_list().unwrap_or_default())
+ .filter(|attr| attr.has_name(sym::cfg))
+ .peekable();
+ if doc_cfg.peek().is_some() && doc_cfg_active {
+ doc_cfg
+ .filter_map(|attr| Cfg::parse(attr.meta_item()?).ok())
+ .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg)
+ } else if doc_auto_cfg_active {
+ self.iter()
+ .filter(|attr| attr.has_name(sym::cfg))
+ .filter_map(|attr| single(attr.meta_item_list()?))
+ .filter_map(|attr| {
+ Cfg::parse_without(attr.meta_item()?, hidden_cfg).ok().flatten()
+ })
+ .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg)
+ } else {
+ Cfg::True
+ }
+ } else {
+ Cfg::True
+ };
+
+ for attr in self.iter() {
+ // #[doc]
+ if attr.doc_str().is_none() && attr.has_name(sym::doc) {
+ // #[doc(...)]
+ if let Some(list) = attr.meta().as_ref().and_then(|mi| mi.meta_item_list()) {
+ for item in list {
+ // #[doc(hidden)]
+ if !item.has_name(sym::cfg) {
+ continue;
+ }
+ // #[doc(cfg(...))]
+ if let Some(cfg_mi) = item
+ .meta_item()
+ .and_then(|item| rustc_expand::config::parse_cfg(item, sess))
+ {
+ match Cfg::parse(cfg_mi) {
+ Ok(new_cfg) => cfg &= new_cfg,
+ Err(e) => {
+ sess.span_err(e.span, e.msg);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // treat #[target_feature(enable = "feat")] attributes as if they were
+ // #[doc(cfg(target_feature = "feat"))] attributes as well
+ for attr in self.lists(sym::target_feature) {
+ if attr.has_name(sym::enable) {
+ if let Some(feat) = attr.value_str() {
+ let meta = attr::mk_name_value_item_str(
+ Ident::with_dummy_span(sym::target_feature),
+ feat,
+ DUMMY_SP,
+ );
+ if let Ok(feat_cfg) = Cfg::parse(&meta) {
+ cfg &= feat_cfg;
+ }
+ }
+ }
+ }
+
+ if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) }
+ }
+}
+
+pub(crate) trait NestedAttributesExt {
+ /// Returns `true` if the attribute list contains a specific `word`
+ fn has_word(self, word: Symbol) -> bool
+ where
+ Self: std::marker::Sized,
+ {
+ <Self as NestedAttributesExt>::get_word_attr(self, word).is_some()
+ }
+
+ /// Returns `Some(attr)` if the attribute list contains 'attr'
+ /// corresponding to a specific `word`
+ fn get_word_attr(self, word: Symbol) -> Option<ast::NestedMetaItem>;
+}
+
+impl<I: Iterator<Item = ast::NestedMetaItem>> NestedAttributesExt for I {
+ fn get_word_attr(mut self, word: Symbol) -> Option<ast::NestedMetaItem> {
+ self.find(|attr| attr.is_word() && attr.has_name(word))
+ }
+}
+
+/// A portion of documentation, extracted from a `#[doc]` attribute.
+///
+/// Each variant contains the line number within the complete doc-comment where the fragment
+/// starts, as well as the Span where the corresponding doc comment or attribute is located.
+///
+/// Included files are kept separate from inline doc comments so that proper line-number
+/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are
+/// kept separate because of issue #42760.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub(crate) struct DocFragment {
+ pub(crate) span: rustc_span::Span,
+ /// The module this doc-comment came from.
+ ///
+ /// This allows distinguishing between the original documentation and a pub re-export.
+ /// If it is `None`, the item was not re-exported.
+ pub(crate) parent_module: Option<DefId>,
+ pub(crate) doc: Symbol,
+ pub(crate) kind: DocFragmentKind,
+ pub(crate) indent: usize,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub(crate) enum DocFragmentKind {
+ /// A doc fragment created from a `///` or `//!` doc comment.
+ SugaredDoc,
+ /// A doc fragment created from a "raw" `#[doc=""]` attribute.
+ RawDoc,
+}
+
+/// The goal of this function is to apply the `DocFragment` transformation that is required when
+/// transforming into the final Markdown, which is applying the computed indent to each line in
+/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`).
+///
+/// Note: remove the trailing newline where appropriate
+fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
+ let s = frag.doc.as_str();
+ let mut iter = s.lines();
+ if s.is_empty() {
+ out.push('\n');
+ return;
+ }
+ while let Some(line) = iter.next() {
+ if line.chars().any(|c| !c.is_whitespace()) {
+ assert!(line.len() >= frag.indent);
+ out.push_str(&line[frag.indent..]);
+ } else {
+ out.push_str(line);
+ }
+ out.push('\n');
+ }
+}
+
+/// Collapse a collection of [`DocFragment`]s into one string,
+/// handling indentation and newlines as needed.
+pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String {
+ let mut acc = String::new();
+ for frag in doc_strings {
+ add_doc_fragment(&mut acc, frag);
+ }
+ acc.pop();
+ acc
+}
+
+/// Removes excess indentation on comments in order for the Markdown
+/// to be parsed correctly. This is necessary because the convention for
+/// writing documentation is to provide a space between the /// or //! marker
+/// and the doc text, but Markdown is whitespace-sensitive. For example,
+/// a block of text with four-space indentation is parsed as a code block,
+/// so if we didn't unindent comments, these list items
+///
+/// /// A list:
+/// ///
+/// /// - Foo
+/// /// - Bar
+///
+/// would be parsed as if they were in a code block, which is likely not what the user intended.
+fn unindent_doc_fragments(docs: &mut Vec<DocFragment>) {
+ // `add` is used in case the most common sugared doc syntax is used ("/// "). The other
+ // fragments kind's lines are never starting with a whitespace unless they are using some
+ // markdown formatting requiring it. Therefore, if the doc block have a mix between the two,
+ // we need to take into account the fact that the minimum indent minus one (to take this
+ // whitespace into account).
+ //
+ // For example:
+ //
+ // /// hello!
+ // #[doc = "another"]
+ //
+ // In this case, you want "hello! another" and not "hello! another".
+ let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind)
+ && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc)
+ {
+ // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to
+ // "decide" how much the minimum indent will be.
+ 1
+ } else {
+ 0
+ };
+
+ // `min_indent` is used to know how much whitespaces from the start of each lines must be
+ // removed. Example:
+ //
+ // /// hello!
+ // #[doc = "another"]
+ //
+ // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum
+ // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4
+ // (5 - 1) whitespaces.
+ let Some(min_indent) = docs
+ .iter()
+ .map(|fragment| {
+ fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| {
+ if line.chars().all(|c| c.is_whitespace()) {
+ min_indent
+ } else {
+ // Compare against either space or tab, ignoring whether they are
+ // mixed or not.
+ let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
+ cmp::min(min_indent, whitespace)
+ + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add }
+ }
+ })
+ })
+ .min()
+ else {
+ return;
+ };
+
+ for fragment in docs {
+ if fragment.doc == kw::Empty {
+ continue;
+ }
+
+ let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 {
+ min_indent - add
+ } else {
+ min_indent
+ };
+
+ fragment.indent = min_indent;
+ }
+}
+
+/// A link that has not yet been rendered.
+///
+/// This link will be turned into a rendered link by [`Item::links`].
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct ItemLink {
+ /// The original link written in the markdown
+ pub(crate) link: String,
+ /// The link text displayed in the HTML.
+ ///
+ /// This may not be the same as `link` if there was a disambiguator
+ /// in an intra-doc link (e.g. \[`fn@f`\])
+ pub(crate) link_text: String,
+ pub(crate) did: DefId,
+ /// The url fragment to append to the link
+ pub(crate) fragment: Option<UrlFragment>,
+}
+
+pub struct RenderedLink {
+ /// The text the link was original written as.
+ ///
+ /// This could potentially include disambiguators and backticks.
+ pub(crate) original_text: String,
+ /// The text to display in the HTML
+ pub(crate) new_text: String,
+ /// The URL to put in the `href`
+ pub(crate) href: String,
+}
+
+/// The attributes on an [`Item`], including attributes like `#[derive(...)]` and `#[inline]`,
+/// as well as doc comments.
+#[derive(Clone, Debug, Default)]
+pub(crate) struct Attributes {
+ pub(crate) doc_strings: Vec<DocFragment>,
+ pub(crate) other_attrs: Vec<ast::Attribute>,
+}
+
+impl Attributes {
+ pub(crate) fn lists(&self, name: Symbol) -> impl Iterator<Item = ast::NestedMetaItem> + '_ {
+ self.other_attrs.lists(name)
+ }
+
+ pub(crate) fn has_doc_flag(&self, flag: Symbol) -> bool {
+ for attr in &self.other_attrs {
+ if !attr.has_name(sym::doc) {
+ continue;
+ }
+
+ if let Some(items) = attr.meta_item_list() {
+ if items.iter().filter_map(|i| i.meta_item()).any(|it| it.has_name(flag)) {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+
+ pub(crate) fn from_ast(attrs: &[ast::Attribute]) -> Attributes {
+ Attributes::from_ast_iter(attrs.iter().map(|attr| (attr, None)), false)
+ }
+
+ pub(crate) fn from_ast_with_additional(
+ attrs: &[ast::Attribute],
+ (additional_attrs, def_id): (&[ast::Attribute], DefId),
+ ) -> Attributes {
+ // Additional documentation should be shown before the original documentation.
+ let attrs1 = additional_attrs.iter().map(|attr| (attr, Some(def_id)));
+ let attrs2 = attrs.iter().map(|attr| (attr, None));
+ Attributes::from_ast_iter(attrs1.chain(attrs2), false)
+ }
+
+ pub(crate) fn from_ast_iter<'a>(
+ attrs: impl Iterator<Item = (&'a ast::Attribute, Option<DefId>)>,
+ doc_only: bool,
+ ) -> Attributes {
+ let mut doc_strings = Vec::new();
+ let mut other_attrs = Vec::new();
+ for (attr, parent_module) in attrs {
+ if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
+ trace!("got doc_str={doc_str:?}");
+ let doc = beautify_doc_string(doc_str, comment_kind);
+ let kind = if attr.is_doc_comment() {
+ DocFragmentKind::SugaredDoc
+ } else {
+ DocFragmentKind::RawDoc
+ };
+ let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 };
+ doc_strings.push(fragment);
+ } else if !doc_only {
+ other_attrs.push(attr.clone());
+ }
+ }
+
+ unindent_doc_fragments(&mut doc_strings);
+
+ Attributes { doc_strings, other_attrs }
+ }
+
+ /// Finds the `doc` attribute as a NameValue and returns the corresponding
+ /// value found.
+ pub(crate) fn doc_value(&self) -> Option<String> {
+ let mut iter = self.doc_strings.iter();
+
+ let ori = iter.next()?;
+ let mut out = String::new();
+ add_doc_fragment(&mut out, ori);
+ for new_frag in iter {
+ add_doc_fragment(&mut out, new_frag);
+ }
+ out.pop();
+ if out.is_empty() { None } else { Some(out) }
+ }
+
+ /// Return the doc-comments on this item, grouped by the module they came from.
+ /// The module can be different if this is a re-export with added documentation.
+ ///
+ /// The last newline is not trimmed so the produced strings are reusable between
+ /// early and late doc link resolution regardless of their position.
+ pub(crate) fn prepare_to_doc_link_resolution(&self) -> FxHashMap<Option<DefId>, String> {
+ let mut res = FxHashMap::default();
+ for fragment in &self.doc_strings {
+ let out_str = res.entry(fragment.parent_module).or_default();
+ add_doc_fragment(out_str, fragment);
+ }
+ res
+ }
+
+ /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
+ /// with newlines.
+ pub(crate) fn collapsed_doc_value(&self) -> Option<String> {
+ if self.doc_strings.is_empty() {
+ None
+ } else {
+ Some(collapse_doc_fragments(&self.doc_strings))
+ }
+ }
+
+ pub(crate) fn get_doc_aliases(&self) -> Box<[Symbol]> {
+ let mut aliases = FxHashSet::default();
+
+ for attr in self.other_attrs.lists(sym::doc).filter(|a| a.has_name(sym::alias)) {
+ if let Some(values) = attr.meta_item_list() {
+ for l in values {
+ match l.literal().unwrap().kind {
+ ast::LitKind::Str(s, _) => {
+ aliases.insert(s);
+ }
+ _ => unreachable!(),
+ }
+ }
+ } else {
+ aliases.insert(attr.value_str().unwrap());
+ }
+ }
+ aliases.into_iter().collect::<Vec<_>>().into()
+ }
+}
+
+impl PartialEq for Attributes {
+ fn eq(&self, rhs: &Self) -> bool {
+ self.doc_strings == rhs.doc_strings
+ && self
+ .other_attrs
+ .iter()
+ .map(|attr| attr.id)
+ .eq(rhs.other_attrs.iter().map(|attr| attr.id))
+ }
+}
+
+impl Eq for Attributes {}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum GenericBound {
+ TraitBound(PolyTrait, hir::TraitBoundModifier),
+ Outlives(Lifetime),
+}
+
+impl GenericBound {
+ pub(crate) fn maybe_sized(cx: &mut DocContext<'_>) -> GenericBound {
+ let did = cx.tcx.require_lang_item(LangItem::Sized, None);
+ let empty = cx.tcx.intern_substs(&[]);
+ let path = external_path(cx, did, false, vec![], empty);
+ inline::record_extern_fqn(cx, did, ItemType::Trait);
+ GenericBound::TraitBound(
+ PolyTrait { trait_: path, generic_params: Vec::new() },
+ hir::TraitBoundModifier::Maybe,
+ )
+ }
+
+ pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
+ use rustc_hir::TraitBoundModifier as TBM;
+ if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
+ if Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() {
+ return true;
+ }
+ }
+ false
+ }
+
+ pub(crate) fn get_poly_trait(&self) -> Option<PolyTrait> {
+ if let GenericBound::TraitBound(ref p, _) = *self {
+ return Some(p.clone());
+ }
+ None
+ }
+
+ pub(crate) fn get_trait_path(&self) -> Option<Path> {
+ if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self {
+ Some(trait_.clone())
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct Lifetime(pub Symbol);
+
+impl Lifetime {
+ pub(crate) fn statik() -> Lifetime {
+ Lifetime(kw::StaticLifetime)
+ }
+
+ pub(crate) fn elided() -> Lifetime {
+ Lifetime(kw::UnderscoreLifetime)
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum WherePredicate {
+ BoundPredicate { ty: Type, bounds: Vec<GenericBound>, bound_params: Vec<Lifetime> },
+ RegionPredicate { lifetime: Lifetime, bounds: Vec<GenericBound> },
+ EqPredicate { lhs: Type, rhs: Term },
+}
+
+impl WherePredicate {
+ pub(crate) fn get_bounds(&self) -> Option<&[GenericBound]> {
+ match *self {
+ WherePredicate::BoundPredicate { ref bounds, .. } => Some(bounds),
+ WherePredicate::RegionPredicate { ref bounds, .. } => Some(bounds),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum GenericParamDefKind {
+ Lifetime { outlives: Vec<Lifetime> },
+ Type { did: DefId, bounds: Vec<GenericBound>, default: Option<Box<Type>>, synthetic: bool },
+ Const { did: DefId, ty: Box<Type>, default: Option<Box<String>> },
+}
+
+impl GenericParamDefKind {
+ pub(crate) fn is_type(&self) -> bool {
+ matches!(self, GenericParamDefKind::Type { .. })
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct GenericParamDef {
+ pub(crate) name: Symbol,
+ pub(crate) kind: GenericParamDefKind,
+}
+
+impl GenericParamDef {
+ pub(crate) fn is_synthetic_type_param(&self) -> bool {
+ match self.kind {
+ GenericParamDefKind::Lifetime { .. } | GenericParamDefKind::Const { .. } => false,
+ GenericParamDefKind::Type { synthetic, .. } => synthetic,
+ }
+ }
+
+ pub(crate) fn is_type(&self) -> bool {
+ self.kind.is_type()
+ }
+
+ pub(crate) fn get_bounds(&self) -> Option<&[GenericBound]> {
+ match self.kind {
+ GenericParamDefKind::Type { ref bounds, .. } => Some(bounds),
+ _ => None,
+ }
+ }
+}
+
+// maybe use a Generic enum and use Vec<Generic>?
+#[derive(Clone, Debug, Default)]
+pub(crate) struct Generics {
+ pub(crate) params: Vec<GenericParamDef>,
+ pub(crate) where_predicates: Vec<WherePredicate>,
+}
+
+impl Generics {
+ pub(crate) fn is_empty(&self) -> bool {
+ self.params.is_empty() && self.where_predicates.is_empty()
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Function {
+ pub(crate) decl: FnDecl,
+ pub(crate) generics: Generics,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct FnDecl {
+ pub(crate) inputs: Arguments,
+ pub(crate) output: FnRetTy,
+ pub(crate) c_variadic: bool,
+}
+
+impl FnDecl {
+ pub(crate) fn self_type(&self) -> Option<SelfTy> {
+ self.inputs.values.get(0).and_then(|v| v.to_self())
+ }
+
+ /// Returns the sugared return type for an async function.
+ ///
+ /// For example, if the return type is `impl std::future::Future<Output = i32>`, this function
+ /// will return `i32`.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if the return type does not match the expected sugaring for async
+ /// functions.
+ pub(crate) fn sugared_async_return_type(&self) -> FnRetTy {
+ match &self.output {
+ FnRetTy::Return(Type::ImplTrait(bounds)) => match &bounds[0] {
+ GenericBound::TraitBound(PolyTrait { trait_, .. }, ..) => {
+ let bindings = trait_.bindings().unwrap();
+ let ret_ty = bindings[0].term();
+ let ty = ret_ty.ty().expect("Unexpected constant return term");
+ FnRetTy::Return(ty.clone())
+ }
+ _ => panic!("unexpected desugaring of async function"),
+ },
+ _ => panic!("unexpected desugaring of async function"),
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct Arguments {
+ pub(crate) values: Vec<Argument>,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct Argument {
+ pub(crate) type_: Type,
+ pub(crate) name: Symbol,
+ /// This field is used to represent "const" arguments from the `rustc_legacy_const_generics`
+ /// feature. More information in <https://github.com/rust-lang/rust/issues/83167>.
+ pub(crate) is_const: bool,
+}
+
+#[derive(Clone, PartialEq, Debug)]
+pub(crate) enum SelfTy {
+ SelfValue,
+ SelfBorrowed(Option<Lifetime>, Mutability),
+ SelfExplicit(Type),
+}
+
+impl Argument {
+ pub(crate) fn to_self(&self) -> Option<SelfTy> {
+ if self.name != kw::SelfLower {
+ return None;
+ }
+ if self.type_.is_self_type() {
+ return Some(SelfValue);
+ }
+ match self.type_ {
+ BorrowedRef { ref lifetime, mutability, ref type_ } if type_.is_self_type() => {
+ Some(SelfBorrowed(lifetime.clone(), mutability))
+ }
+ _ => Some(SelfExplicit(self.type_.clone())),
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum FnRetTy {
+ Return(Type),
+ DefaultReturn,
+}
+
+impl FnRetTy {
+ pub(crate) fn as_return(&self) -> Option<&Type> {
+ match self {
+ Return(ret) => Some(ret),
+ DefaultReturn => None,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Trait {
+ pub(crate) def_id: DefId,
+ pub(crate) items: Vec<Item>,
+ pub(crate) generics: Generics,
+ pub(crate) bounds: Vec<GenericBound>,
+}
+
+impl Trait {
+ pub(crate) fn is_auto(&self, tcx: TyCtxt<'_>) -> bool {
+ tcx.trait_is_auto(self.def_id)
+ }
+ pub(crate) fn unsafety(&self, tcx: TyCtxt<'_>) -> hir::Unsafety {
+ tcx.trait_def(self.def_id).unsafety
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct TraitAlias {
+ pub(crate) generics: Generics,
+ pub(crate) bounds: Vec<GenericBound>,
+}
+
+/// A trait reference, which may have higher ranked lifetimes.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct PolyTrait {
+ pub(crate) trait_: Path,
+ pub(crate) generic_params: Vec<GenericParamDef>,
+}
+
+/// Rustdoc's representation of types, mostly based on the [`hir::Ty`].
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum Type {
+ /// A named type, which could be a trait.
+ ///
+ /// This is mostly Rustdoc's version of [`hir::Path`].
+ /// It has to be different because Rustdoc's [`PathSegment`] can contain cleaned generics.
+ Path { path: Path },
+ /// A `dyn Trait` object: `dyn for<'a> Trait<'a> + Send + 'static`
+ DynTrait(Vec<PolyTrait>, Option<Lifetime>),
+ /// A type parameter.
+ Generic(Symbol),
+ /// A primitive (aka, builtin) type.
+ Primitive(PrimitiveType),
+ /// A function pointer: `extern "ABI" fn(...) -> ...`
+ BareFunction(Box<BareFunctionDecl>),
+ /// A tuple type: `(i32, &str)`.
+ Tuple(Vec<Type>),
+ /// A slice type (does *not* include the `&`): `[i32]`
+ Slice(Box<Type>),
+ /// An array type.
+ ///
+ /// The `String` field is a stringified version of the array's length parameter.
+ Array(Box<Type>, String),
+ /// A raw pointer type: `*const i32`, `*mut i32`
+ RawPointer(Mutability, Box<Type>),
+ /// A reference type: `&i32`, `&'a mut Foo`
+ BorrowedRef { lifetime: Option<Lifetime>, mutability: Mutability, type_: Box<Type> },
+
+ /// A qualified path to an associated item: `<Type as Trait>::Name`
+ QPath {
+ assoc: Box<PathSegment>,
+ self_type: Box<Type>,
+ /// FIXME: compute this field on demand.
+ should_show_cast: bool,
+ trait_: Path,
+ },
+
+ /// A type that is inferred: `_`
+ Infer,
+
+ /// An `impl Trait`: `impl TraitA + TraitB + ...`
+ ImplTrait(Vec<GenericBound>),
+}
+
+impl Type {
+ /// When comparing types for equality, it can help to ignore `&` wrapping.
+ pub(crate) fn without_borrowed_ref(&self) -> &Type {
+ let mut result = self;
+ while let Type::BorrowedRef { type_, .. } = result {
+ result = &*type_;
+ }
+ result
+ }
+
+ /// Check if two types are "potentially the same".
+ /// This is different from `Eq`, because it knows that things like
+ /// `Placeholder` are possible matches for everything.
+ pub(crate) fn is_same(&self, other: &Self, cache: &Cache) -> bool {
+ match (self, other) {
+ // Recursive cases.
+ (Type::Tuple(a), Type::Tuple(b)) => {
+ a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(b, cache))
+ }
+ (Type::Slice(a), Type::Slice(b)) => a.is_same(b, cache),
+ (Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(b, cache),
+ (Type::RawPointer(mutability, type_), Type::RawPointer(b_mutability, b_type_)) => {
+ mutability == b_mutability && type_.is_same(b_type_, cache)
+ }
+ (
+ Type::BorrowedRef { mutability, type_, .. },
+ Type::BorrowedRef { mutability: b_mutability, type_: b_type_, .. },
+ ) => mutability == b_mutability && type_.is_same(b_type_, cache),
+ // Placeholders and generics are equal to all other types.
+ (Type::Infer, _) | (_, Type::Infer) => true,
+ (Type::Generic(_), _) | (_, Type::Generic(_)) => true,
+ // Other cases, such as primitives, just use recursion.
+ (a, b) => a
+ .def_id(cache)
+ .and_then(|a| Some((a, b.def_id(cache)?)))
+ .map(|(a, b)| a == b)
+ .unwrap_or(false),
+ }
+ }
+
+ pub(crate) fn primitive_type(&self) -> Option<PrimitiveType> {
+ match *self {
+ Primitive(p) | BorrowedRef { type_: box Primitive(p), .. } => Some(p),
+ Slice(..) | BorrowedRef { type_: box Slice(..), .. } => Some(PrimitiveType::Slice),
+ Array(..) | BorrowedRef { type_: box Array(..), .. } => Some(PrimitiveType::Array),
+ Tuple(ref tys) => {
+ if tys.is_empty() {
+ Some(PrimitiveType::Unit)
+ } else {
+ Some(PrimitiveType::Tuple)
+ }
+ }
+ RawPointer(..) => Some(PrimitiveType::RawPointer),
+ BareFunction(..) => Some(PrimitiveType::Fn),
+ _ => None,
+ }
+ }
+
+ /// Checks if this is a `T::Name` path for an associated type.
+ pub(crate) fn is_assoc_ty(&self) -> bool {
+ match self {
+ Type::Path { path, .. } => path.is_assoc_ty(),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn is_self_type(&self) -> bool {
+ match *self {
+ Generic(name) => name == kw::SelfUpper,
+ _ => false,
+ }
+ }
+
+ pub(crate) fn generics(&self) -> Option<Vec<&Type>> {
+ match self {
+ Type::Path { path, .. } => path.generics(),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn is_full_generic(&self) -> bool {
+ matches!(self, Type::Generic(_))
+ }
+
+ pub(crate) fn is_impl_trait(&self) -> bool {
+ matches!(self, Type::ImplTrait(_))
+ }
+
+ pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
+ if let QPath { self_type, trait_, assoc, .. } = self {
+ Some((self_type, trait_.def_id(), *assoc.clone()))
+ } else {
+ None
+ }
+ }
+
+ fn inner_def_id(&self, cache: Option<&Cache>) -> Option<DefId> {
+ let t: PrimitiveType = match *self {
+ Type::Path { ref path } => return Some(path.def_id()),
+ DynTrait(ref bounds, _) => return Some(bounds[0].trait_.def_id()),
+ Primitive(p) => return cache.and_then(|c| c.primitive_locations.get(&p).cloned()),
+ BorrowedRef { type_: box Generic(..), .. } => PrimitiveType::Reference,
+ BorrowedRef { ref type_, .. } => return type_.inner_def_id(cache),
+ Tuple(ref tys) => {
+ if tys.is_empty() {
+ PrimitiveType::Unit
+ } else {
+ PrimitiveType::Tuple
+ }
+ }
+ BareFunction(..) => PrimitiveType::Fn,
+ Slice(..) => PrimitiveType::Slice,
+ Array(..) => PrimitiveType::Array,
+ RawPointer(..) => PrimitiveType::RawPointer,
+ QPath { ref self_type, .. } => return self_type.inner_def_id(cache),
+ Generic(_) | Infer | ImplTrait(_) => return None,
+ };
+ cache.and_then(|c| Primitive(t).def_id(c))
+ }
+
+ /// Use this method to get the [DefId] of a [clean] AST node, including [PrimitiveType]s.
+ ///
+ /// [clean]: crate::clean
+ pub(crate) fn def_id(&self, cache: &Cache) -> Option<DefId> {
+ self.inner_def_id(Some(cache))
+ }
+}
+
+/// A primitive (aka, builtin) type.
+///
+/// This represents things like `i32`, `str`, etc.
+///
+/// N.B. This has to be different from [`hir::PrimTy`] because it also includes types that aren't
+/// paths, like [`Self::Unit`].
+#[derive(Clone, PartialEq, Eq, Hash, Copy, Debug)]
+pub(crate) enum PrimitiveType {
+ Isize,
+ I8,
+ I16,
+ I32,
+ I64,
+ I128,
+ Usize,
+ U8,
+ U16,
+ U32,
+ U64,
+ U128,
+ F32,
+ F64,
+ Char,
+ Bool,
+ Str,
+ Slice,
+ Array,
+ Tuple,
+ Unit,
+ RawPointer,
+ Reference,
+ Fn,
+ Never,
+}
+
+type SimplifiedTypes = FxHashMap<PrimitiveType, ArrayVec<SimplifiedType, 3>>;
+impl PrimitiveType {
+ pub(crate) fn from_hir(prim: hir::PrimTy) -> PrimitiveType {
+ use ast::{FloatTy, IntTy, UintTy};
+ match prim {
+ hir::PrimTy::Int(IntTy::Isize) => PrimitiveType::Isize,
+ hir::PrimTy::Int(IntTy::I8) => PrimitiveType::I8,
+ hir::PrimTy::Int(IntTy::I16) => PrimitiveType::I16,
+ hir::PrimTy::Int(IntTy::I32) => PrimitiveType::I32,
+ hir::PrimTy::Int(IntTy::I64) => PrimitiveType::I64,
+ hir::PrimTy::Int(IntTy::I128) => PrimitiveType::I128,
+ hir::PrimTy::Uint(UintTy::Usize) => PrimitiveType::Usize,
+ hir::PrimTy::Uint(UintTy::U8) => PrimitiveType::U8,
+ hir::PrimTy::Uint(UintTy::U16) => PrimitiveType::U16,
+ hir::PrimTy::Uint(UintTy::U32) => PrimitiveType::U32,
+ hir::PrimTy::Uint(UintTy::U64) => PrimitiveType::U64,
+ hir::PrimTy::Uint(UintTy::U128) => PrimitiveType::U128,
+ hir::PrimTy::Float(FloatTy::F32) => PrimitiveType::F32,
+ hir::PrimTy::Float(FloatTy::F64) => PrimitiveType::F64,
+ hir::PrimTy::Str => PrimitiveType::Str,
+ hir::PrimTy::Bool => PrimitiveType::Bool,
+ hir::PrimTy::Char => PrimitiveType::Char,
+ }
+ }
+
+ pub(crate) fn from_symbol(s: Symbol) -> Option<PrimitiveType> {
+ match s {
+ sym::isize => Some(PrimitiveType::Isize),
+ sym::i8 => Some(PrimitiveType::I8),
+ sym::i16 => Some(PrimitiveType::I16),
+ sym::i32 => Some(PrimitiveType::I32),
+ sym::i64 => Some(PrimitiveType::I64),
+ sym::i128 => Some(PrimitiveType::I128),
+ sym::usize => Some(PrimitiveType::Usize),
+ sym::u8 => Some(PrimitiveType::U8),
+ sym::u16 => Some(PrimitiveType::U16),
+ sym::u32 => Some(PrimitiveType::U32),
+ sym::u64 => Some(PrimitiveType::U64),
+ sym::u128 => Some(PrimitiveType::U128),
+ sym::bool => Some(PrimitiveType::Bool),
+ sym::char => Some(PrimitiveType::Char),
+ sym::str => Some(PrimitiveType::Str),
+ sym::f32 => Some(PrimitiveType::F32),
+ sym::f64 => Some(PrimitiveType::F64),
+ sym::array => Some(PrimitiveType::Array),
+ sym::slice => Some(PrimitiveType::Slice),
+ sym::tuple => Some(PrimitiveType::Tuple),
+ sym::unit => Some(PrimitiveType::Unit),
+ sym::pointer => Some(PrimitiveType::RawPointer),
+ sym::reference => Some(PrimitiveType::Reference),
+ kw::Fn => Some(PrimitiveType::Fn),
+ sym::never => Some(PrimitiveType::Never),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn simplified_types() -> &'static SimplifiedTypes {
+ use ty::fast_reject::SimplifiedTypeGen::*;
+ use ty::{FloatTy, IntTy, UintTy};
+ use PrimitiveType::*;
+ static CELL: OnceCell<SimplifiedTypes> = OnceCell::new();
+
+ let single = |x| iter::once(x).collect();
+ CELL.get_or_init(move || {
+ map! {
+ Isize => single(IntSimplifiedType(IntTy::Isize)),
+ I8 => single(IntSimplifiedType(IntTy::I8)),
+ I16 => single(IntSimplifiedType(IntTy::I16)),
+ I32 => single(IntSimplifiedType(IntTy::I32)),
+ I64 => single(IntSimplifiedType(IntTy::I64)),
+ I128 => single(IntSimplifiedType(IntTy::I128)),
+ Usize => single(UintSimplifiedType(UintTy::Usize)),
+ U8 => single(UintSimplifiedType(UintTy::U8)),
+ U16 => single(UintSimplifiedType(UintTy::U16)),
+ U32 => single(UintSimplifiedType(UintTy::U32)),
+ U64 => single(UintSimplifiedType(UintTy::U64)),
+ U128 => single(UintSimplifiedType(UintTy::U128)),
+ F32 => single(FloatSimplifiedType(FloatTy::F32)),
+ F64 => single(FloatSimplifiedType(FloatTy::F64)),
+ Str => single(StrSimplifiedType),
+ Bool => single(BoolSimplifiedType),
+ Char => single(CharSimplifiedType),
+ Array => single(ArraySimplifiedType),
+ Slice => single(SliceSimplifiedType),
+ // FIXME: If we ever add an inherent impl for tuples
+ // with different lengths, they won't show in rustdoc.
+ //
+ // Either manually update this arrayvec at this point
+ // or start with a more complex refactoring.
+ Tuple => [TupleSimplifiedType(1), TupleSimplifiedType(2), TupleSimplifiedType(3)].into(),
+ Unit => single(TupleSimplifiedType(0)),
+ RawPointer => [PtrSimplifiedType(Mutability::Not), PtrSimplifiedType(Mutability::Mut)].into_iter().collect(),
+ Reference => [RefSimplifiedType(Mutability::Not), RefSimplifiedType(Mutability::Mut)].into_iter().collect(),
+ // FIXME: This will be wrong if we ever add inherent impls
+ // for function pointers.
+ Fn => single(FunctionSimplifiedType(1)),
+ Never => single(NeverSimplifiedType),
+ }
+ })
+ }
+
+ pub(crate) fn impls<'tcx>(&self, tcx: TyCtxt<'tcx>) -> impl Iterator<Item = DefId> + 'tcx {
+ Self::simplified_types()
+ .get(self)
+ .into_iter()
+ .flatten()
+ .flat_map(move |&simp| tcx.incoherent_impls(simp))
+ .copied()
+ }
+
+ pub(crate) fn all_impls(tcx: TyCtxt<'_>) -> impl Iterator<Item = DefId> + '_ {
+ Self::simplified_types()
+ .values()
+ .flatten()
+ .flat_map(move |&simp| tcx.incoherent_impls(simp))
+ .copied()
+ }
+
+ pub(crate) fn as_sym(&self) -> Symbol {
+ use PrimitiveType::*;
+ match self {
+ Isize => sym::isize,
+ I8 => sym::i8,
+ I16 => sym::i16,
+ I32 => sym::i32,
+ I64 => sym::i64,
+ I128 => sym::i128,
+ Usize => sym::usize,
+ U8 => sym::u8,
+ U16 => sym::u16,
+ U32 => sym::u32,
+ U64 => sym::u64,
+ U128 => sym::u128,
+ F32 => sym::f32,
+ F64 => sym::f64,
+ Str => sym::str,
+ Bool => sym::bool,
+ Char => sym::char,
+ Array => sym::array,
+ Slice => sym::slice,
+ Tuple => sym::tuple,
+ Unit => sym::unit,
+ RawPointer => sym::pointer,
+ Reference => sym::reference,
+ Fn => kw::Fn,
+ Never => sym::never,
+ }
+ }
+
+ /// Returns the DefId of the module with `doc(primitive)` for this primitive type.
+ /// Panics if there is no such module.
+ ///
+ /// This gives precedence to primitives defined in the current crate, and deprioritizes primitives defined in `core`,
+ /// but otherwise, if multiple crates define the same primitive, there is no guarantee of which will be picked.
+ /// In particular, if a crate depends on both `std` and another crate that also defines `doc(primitive)`, then
+ /// it's entirely random whether `std` or the other crate is picked. (no_std crates are usually fine unless multiple dependencies define a primitive.)
+ pub(crate) fn primitive_locations(tcx: TyCtxt<'_>) -> &FxHashMap<PrimitiveType, DefId> {
+ static PRIMITIVE_LOCATIONS: OnceCell<FxHashMap<PrimitiveType, DefId>> = OnceCell::new();
+ PRIMITIVE_LOCATIONS.get_or_init(|| {
+ let mut primitive_locations = FxHashMap::default();
+ // NOTE: technically this misses crates that are only passed with `--extern` and not loaded when checking the crate.
+ // This is a degenerate case that I don't plan to support.
+ for &crate_num in tcx.crates(()) {
+ let e = ExternalCrate { crate_num };
+ let crate_name = e.name(tcx);
+ debug!(?crate_num, ?crate_name);
+ for &(def_id, prim) in &e.primitives(tcx) {
+ // HACK: try to link to std instead where possible
+ if crate_name == sym::core && primitive_locations.contains_key(&prim) {
+ continue;
+ }
+ primitive_locations.insert(prim, def_id);
+ }
+ }
+ let local_primitives = ExternalCrate { crate_num: LOCAL_CRATE }.primitives(tcx);
+ for (def_id, prim) in local_primitives {
+ primitive_locations.insert(prim, def_id);
+ }
+ primitive_locations
+ })
+ }
+}
+
+impl From<ast::IntTy> for PrimitiveType {
+ fn from(int_ty: ast::IntTy) -> PrimitiveType {
+ match int_ty {
+ ast::IntTy::Isize => PrimitiveType::Isize,
+ ast::IntTy::I8 => PrimitiveType::I8,
+ ast::IntTy::I16 => PrimitiveType::I16,
+ ast::IntTy::I32 => PrimitiveType::I32,
+ ast::IntTy::I64 => PrimitiveType::I64,
+ ast::IntTy::I128 => PrimitiveType::I128,
+ }
+ }
+}
+
+impl From<ast::UintTy> for PrimitiveType {
+ fn from(uint_ty: ast::UintTy) -> PrimitiveType {
+ match uint_ty {
+ ast::UintTy::Usize => PrimitiveType::Usize,
+ ast::UintTy::U8 => PrimitiveType::U8,
+ ast::UintTy::U16 => PrimitiveType::U16,
+ ast::UintTy::U32 => PrimitiveType::U32,
+ ast::UintTy::U64 => PrimitiveType::U64,
+ ast::UintTy::U128 => PrimitiveType::U128,
+ }
+ }
+}
+
+impl From<ast::FloatTy> for PrimitiveType {
+ fn from(float_ty: ast::FloatTy) -> PrimitiveType {
+ match float_ty {
+ ast::FloatTy::F32 => PrimitiveType::F32,
+ ast::FloatTy::F64 => PrimitiveType::F64,
+ }
+ }
+}
+
+impl From<ty::IntTy> for PrimitiveType {
+ fn from(int_ty: ty::IntTy) -> PrimitiveType {
+ match int_ty {
+ ty::IntTy::Isize => PrimitiveType::Isize,
+ ty::IntTy::I8 => PrimitiveType::I8,
+ ty::IntTy::I16 => PrimitiveType::I16,
+ ty::IntTy::I32 => PrimitiveType::I32,
+ ty::IntTy::I64 => PrimitiveType::I64,
+ ty::IntTy::I128 => PrimitiveType::I128,
+ }
+ }
+}
+
+impl From<ty::UintTy> for PrimitiveType {
+ fn from(uint_ty: ty::UintTy) -> PrimitiveType {
+ match uint_ty {
+ ty::UintTy::Usize => PrimitiveType::Usize,
+ ty::UintTy::U8 => PrimitiveType::U8,
+ ty::UintTy::U16 => PrimitiveType::U16,
+ ty::UintTy::U32 => PrimitiveType::U32,
+ ty::UintTy::U64 => PrimitiveType::U64,
+ ty::UintTy::U128 => PrimitiveType::U128,
+ }
+ }
+}
+
+impl From<ty::FloatTy> for PrimitiveType {
+ fn from(float_ty: ty::FloatTy) -> PrimitiveType {
+ match float_ty {
+ ty::FloatTy::F32 => PrimitiveType::F32,
+ ty::FloatTy::F64 => PrimitiveType::F64,
+ }
+ }
+}
+
+impl From<hir::PrimTy> for PrimitiveType {
+ fn from(prim_ty: hir::PrimTy) -> PrimitiveType {
+ match prim_ty {
+ hir::PrimTy::Int(int_ty) => int_ty.into(),
+ hir::PrimTy::Uint(uint_ty) => uint_ty.into(),
+ hir::PrimTy::Float(float_ty) => float_ty.into(),
+ hir::PrimTy::Str => PrimitiveType::Str,
+ hir::PrimTy::Bool => PrimitiveType::Bool,
+ hir::PrimTy::Char => PrimitiveType::Char,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) enum Visibility {
+ /// `pub`
+ Public,
+ /// Visibility inherited from parent.
+ ///
+ /// For example, this is the visibility of private items and of enum variants.
+ Inherited,
+ /// `pub(crate)`, `pub(super)`, or `pub(in path::to::somewhere)`
+ Restricted(DefId),
+}
+
+impl Visibility {
+ pub(crate) fn is_public(&self) -> bool {
+ matches!(self, Visibility::Public)
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Struct {
+ pub(crate) struct_type: CtorKind,
+ pub(crate) generics: Generics,
+ pub(crate) fields: Vec<Item>,
+}
+
+impl Struct {
+ pub(crate) fn has_stripped_entries(&self) -> bool {
+ self.fields.iter().any(|f| f.is_stripped())
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Union {
+ pub(crate) generics: Generics,
+ pub(crate) fields: Vec<Item>,
+}
+
+impl Union {
+ pub(crate) fn has_stripped_entries(&self) -> bool {
+ self.fields.iter().any(|f| f.is_stripped())
+ }
+}
+
+/// This is a more limited form of the standard Struct, different in that
+/// it lacks the things most items have (name, id, parameterization). Found
+/// only as a variant in an enum.
+#[derive(Clone, Debug)]
+pub(crate) struct VariantStruct {
+ pub(crate) struct_type: CtorKind,
+ pub(crate) fields: Vec<Item>,
+}
+
+impl VariantStruct {
+ pub(crate) fn has_stripped_entries(&self) -> bool {
+ self.fields.iter().any(|f| f.is_stripped())
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Enum {
+ pub(crate) variants: IndexVec<VariantIdx, Item>,
+ pub(crate) generics: Generics,
+}
+
+impl Enum {
+ pub(crate) fn has_stripped_entries(&self) -> bool {
+ self.variants.iter().any(|f| f.is_stripped())
+ }
+
+ pub(crate) fn variants(&self) -> impl Iterator<Item = &Item> {
+ self.variants.iter().filter(|v| !v.is_stripped())
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum Variant {
+ CLike,
+ Tuple(Vec<Item>),
+ Struct(VariantStruct),
+}
+
+impl Variant {
+ pub(crate) fn has_stripped_entries(&self) -> Option<bool> {
+ match *self {
+ Self::Struct(ref struct_) => Some(struct_.has_stripped_entries()),
+ Self::CLike | Self::Tuple(_) => None,
+ }
+ }
+}
+
+/// Small wrapper around [`rustc_span::Span`] that adds helper methods
+/// and enforces calling [`rustc_span::Span::source_callsite()`].
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct Span(rustc_span::Span);
+
+impl Span {
+ /// Wraps a [`rustc_span::Span`]. In case this span is the result of a macro expansion, the
+ /// span will be updated to point to the macro invocation instead of the macro definition.
+ ///
+ /// (See rust-lang/rust#39726)
+ pub(crate) fn new(sp: rustc_span::Span) -> Self {
+ Self(sp.source_callsite())
+ }
+
+ pub(crate) fn inner(&self) -> rustc_span::Span {
+ self.0
+ }
+
+ pub(crate) fn dummy() -> Self {
+ Self(rustc_span::DUMMY_SP)
+ }
+
+ pub(crate) fn is_dummy(&self) -> bool {
+ self.0.is_dummy()
+ }
+
+ pub(crate) fn filename(&self, sess: &Session) -> FileName {
+ sess.source_map().span_to_filename(self.0)
+ }
+
+ pub(crate) fn lo(&self, sess: &Session) -> Loc {
+ sess.source_map().lookup_char_pos(self.0.lo())
+ }
+
+ pub(crate) fn hi(&self, sess: &Session) -> Loc {
+ sess.source_map().lookup_char_pos(self.0.hi())
+ }
+
+ pub(crate) fn cnum(&self, sess: &Session) -> CrateNum {
+ // FIXME: is there a time when the lo and hi crate would be different?
+ self.lo(sess).file.cnum
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct Path {
+ pub(crate) res: Res,
+ pub(crate) segments: Vec<PathSegment>,
+}
+
+impl Path {
+ pub(crate) fn def_id(&self) -> DefId {
+ self.res.def_id()
+ }
+
+ pub(crate) fn last_opt(&self) -> Option<Symbol> {
+ self.segments.last().map(|s| s.name)
+ }
+
+ pub(crate) fn last(&self) -> Symbol {
+ self.last_opt().expect("segments were empty")
+ }
+
+ pub(crate) fn whole_name(&self) -> String {
+ self.segments
+ .iter()
+ .map(|s| if s.name == kw::PathRoot { "" } else { s.name.as_str() })
+ .intersperse("::")
+ .collect()
+ }
+
+ /// Checks if this is a `T::Name` path for an associated type.
+ pub(crate) fn is_assoc_ty(&self) -> bool {
+ match self.res {
+ Res::SelfTy { .. } if self.segments.len() != 1 => true,
+ Res::Def(DefKind::TyParam, _) if self.segments.len() != 1 => true,
+ Res::Def(DefKind::AssocTy, _) => true,
+ _ => false,
+ }
+ }
+
+ pub(crate) fn generics(&self) -> Option<Vec<&Type>> {
+ self.segments.last().and_then(|seg| {
+ if let GenericArgs::AngleBracketed { ref args, .. } = seg.args {
+ Some(
+ args.iter()
+ .filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ .collect(),
+ )
+ } else {
+ None
+ }
+ })
+ }
+
+ pub(crate) fn bindings(&self) -> Option<&[TypeBinding]> {
+ self.segments.last().and_then(|seg| {
+ if let GenericArgs::AngleBracketed { ref bindings, .. } = seg.args {
+ Some(&**bindings)
+ } else {
+ None
+ }
+ })
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum GenericArg {
+ Lifetime(Lifetime),
+ Type(Type),
+ Const(Box<Constant>),
+ Infer,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum GenericArgs {
+ AngleBracketed { args: Box<[GenericArg]>, bindings: ThinVec<TypeBinding> },
+ Parenthesized { inputs: Box<[Type]>, output: Option<Box<Type>> },
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct PathSegment {
+ pub(crate) name: Symbol,
+ pub(crate) args: GenericArgs,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Typedef {
+ pub(crate) type_: Type,
+ pub(crate) generics: Generics,
+ /// `type_` can come from either the HIR or from metadata. If it comes from HIR, it may be a type
+ /// alias instead of the final type. This will always have the final type, regardless of whether
+ /// `type_` came from HIR or from metadata.
+ ///
+ /// If `item_type.is_none()`, `type_` is guaranteed to come from metadata (and therefore hold the
+ /// final type).
+ pub(crate) item_type: Option<Type>,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct OpaqueTy {
+ pub(crate) bounds: Vec<GenericBound>,
+ pub(crate) generics: Generics,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct BareFunctionDecl {
+ pub(crate) unsafety: hir::Unsafety,
+ pub(crate) generic_params: Vec<GenericParamDef>,
+ pub(crate) decl: FnDecl,
+ pub(crate) abi: Abi,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Static {
+ pub(crate) type_: Type,
+ pub(crate) mutability: Mutability,
+ pub(crate) expr: Option<BodyId>,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub(crate) struct Constant {
+ pub(crate) type_: Type,
+ pub(crate) kind: ConstantKind,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub(crate) enum Term {
+ Type(Type),
+ Constant(Constant),
+}
+
+impl Term {
+ pub(crate) fn ty(&self) -> Option<&Type> {
+ if let Term::Type(ty) = self { Some(ty) } else { None }
+ }
+}
+
+impl From<Type> for Term {
+ fn from(ty: Type) -> Self {
+ Term::Type(ty)
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub(crate) enum ConstantKind {
+ /// This is the wrapper around `ty::Const` for a non-local constant. Because it doesn't have a
+ /// `BodyId`, we need to handle it on its own.
+ ///
+ /// Note that `ty::Const` includes generic parameters, and may not always be uniquely identified
+ /// by a DefId. So this field must be different from `Extern`.
+ TyConst { expr: String },
+ /// A constant (expression) that's not an item or associated item. These are usually found
+ /// nested inside types (e.g., array lengths) or expressions (e.g., repeat counts), and also
+ /// used to define explicit discriminant values for enum variants.
+ Anonymous { body: BodyId },
+ /// A constant from a different crate.
+ Extern { def_id: DefId },
+ /// `const FOO: u32 = ...;`
+ Local { def_id: DefId, body: BodyId },
+}
+
+impl Constant {
+ pub(crate) fn expr(&self, tcx: TyCtxt<'_>) -> String {
+ self.kind.expr(tcx)
+ }
+
+ pub(crate) fn value(&self, tcx: TyCtxt<'_>) -> Option<String> {
+ self.kind.value(tcx)
+ }
+
+ pub(crate) fn is_literal(&self, tcx: TyCtxt<'_>) -> bool {
+ self.kind.is_literal(tcx)
+ }
+}
+
+impl ConstantKind {
+ pub(crate) fn expr(&self, tcx: TyCtxt<'_>) -> String {
+ match *self {
+ ConstantKind::TyConst { ref expr } => expr.clone(),
+ ConstantKind::Extern { def_id } => print_inlined_const(tcx, def_id),
+ ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => {
+ print_const_expr(tcx, body)
+ }
+ }
+ }
+
+ pub(crate) fn value(&self, tcx: TyCtxt<'_>) -> Option<String> {
+ match *self {
+ ConstantKind::TyConst { .. } | ConstantKind::Anonymous { .. } => None,
+ ConstantKind::Extern { def_id } | ConstantKind::Local { def_id, .. } => {
+ print_evaluated_const(tcx, def_id)
+ }
+ }
+ }
+
+ pub(crate) fn is_literal(&self, tcx: TyCtxt<'_>) -> bool {
+ match *self {
+ ConstantKind::TyConst { .. } => false,
+ ConstantKind::Extern { def_id } => def_id.as_local().map_or(false, |def_id| {
+ is_literal_expr(tcx, tcx.hir().local_def_id_to_hir_id(def_id))
+ }),
+ ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => {
+ is_literal_expr(tcx, body.hir_id)
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Impl {
+ pub(crate) unsafety: hir::Unsafety,
+ pub(crate) generics: Generics,
+ pub(crate) trait_: Option<Path>,
+ pub(crate) for_: Type,
+ pub(crate) items: Vec<Item>,
+ pub(crate) polarity: ty::ImplPolarity,
+ pub(crate) kind: ImplKind,
+}
+
+impl Impl {
+ pub(crate) fn provided_trait_methods(&self, tcx: TyCtxt<'_>) -> FxHashSet<Symbol> {
+ self.trait_
+ .as_ref()
+ .map(|t| t.def_id())
+ .map(|did| tcx.provided_trait_methods(did).map(|meth| meth.name).collect())
+ .unwrap_or_default()
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum ImplKind {
+ Normal,
+ Auto,
+ FakeVaradic,
+ Blanket(Box<Type>),
+}
+
+impl ImplKind {
+ pub(crate) fn is_auto(&self) -> bool {
+ matches!(self, ImplKind::Auto)
+ }
+
+ pub(crate) fn is_blanket(&self) -> bool {
+ matches!(self, ImplKind::Blanket(_))
+ }
+
+ pub(crate) fn is_fake_variadic(&self) -> bool {
+ matches!(self, ImplKind::FakeVaradic)
+ }
+
+ pub(crate) fn as_blanket_ty(&self) -> Option<&Type> {
+ match self {
+ ImplKind::Blanket(ty) => Some(ty),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Import {
+ pub(crate) kind: ImportKind,
+ pub(crate) source: ImportSource,
+ pub(crate) should_be_displayed: bool,
+}
+
+impl Import {
+ pub(crate) fn new_simple(
+ name: Symbol,
+ source: ImportSource,
+ should_be_displayed: bool,
+ ) -> Self {
+ Self { kind: ImportKind::Simple(name), source, should_be_displayed }
+ }
+
+ pub(crate) fn new_glob(source: ImportSource, should_be_displayed: bool) -> Self {
+ Self { kind: ImportKind::Glob, source, should_be_displayed }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum ImportKind {
+ // use source as str;
+ Simple(Symbol),
+ // use source::*;
+ Glob,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct ImportSource {
+ pub(crate) path: Path,
+ pub(crate) did: Option<DefId>,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Macro {
+ pub(crate) source: String,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct ProcMacro {
+ pub(crate) kind: MacroKind,
+ pub(crate) helpers: Vec<Symbol>,
+}
+
+/// An type binding on an associated type (e.g., `A = Bar` in `Foo<A = Bar>` or
+/// `A: Send + Sync` in `Foo<A: Send + Sync>`).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct TypeBinding {
+ pub(crate) assoc: PathSegment,
+ pub(crate) kind: TypeBindingKind,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) enum TypeBindingKind {
+ Equality { term: Term },
+ Constraint { bounds: Vec<GenericBound> },
+}
+
+impl TypeBinding {
+ pub(crate) fn term(&self) -> &Term {
+ match self.kind {
+ TypeBindingKind::Equality { ref term } => term,
+ _ => panic!("expected equality type binding for parenthesized generic args"),
+ }
+ }
+}
+
+/// The type, lifetime, or constant that a private type alias's parameter should be
+/// replaced with when expanding a use of that type alias.
+///
+/// For example:
+///
+/// ```
+/// type PrivAlias<T> = Vec<T>;
+///
+/// pub fn public_fn() -> PrivAlias<i32> { vec![] }
+/// ```
+///
+/// `public_fn`'s docs will show it as returning `Vec<i32>`, since `PrivAlias` is private.
+/// [`SubstParam`] is used to record that `T` should be mapped to `i32`.
+pub(crate) enum SubstParam {
+ Type(Type),
+ Lifetime(Lifetime),
+ Constant(Constant),
+}
+
+impl SubstParam {
+ pub(crate) fn as_ty(&self) -> Option<&Type> {
+ if let Self::Type(ty) = self { Some(ty) } else { None }
+ }
+
+ pub(crate) fn as_lt(&self) -> Option<&Lifetime> {
+ if let Self::Lifetime(lt) = self { Some(lt) } else { None }
+ }
+}
+
+// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+mod size_asserts {
+ use super::*;
+ // These are in alphabetical order, which is easy to maintain.
+ rustc_data_structures::static_assert_size!(Crate, 72); // frequently moved by-value
+ rustc_data_structures::static_assert_size!(DocFragment, 32);
+ rustc_data_structures::static_assert_size!(GenericArg, 80);
+ rustc_data_structures::static_assert_size!(GenericArgs, 32);
+ rustc_data_structures::static_assert_size!(GenericParamDef, 56);
+ rustc_data_structures::static_assert_size!(Item, 56);
+ rustc_data_structures::static_assert_size!(ItemKind, 112);
+ rustc_data_structures::static_assert_size!(PathSegment, 40);
+ rustc_data_structures::static_assert_size!(Type, 72);
+}
diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs
new file mode 100644
index 000000000..71eddf434
--- /dev/null
+++ b/src/librustdoc/clean/types/tests.rs
@@ -0,0 +1,70 @@
+use super::*;
+
+use crate::clean::collapse_doc_fragments;
+
+use rustc_span::create_default_session_globals_then;
+use rustc_span::source_map::DUMMY_SP;
+use rustc_span::symbol::Symbol;
+
+fn create_doc_fragment(s: &str) -> Vec<DocFragment> {
+ vec![DocFragment {
+ span: DUMMY_SP,
+ parent_module: None,
+ doc: Symbol::intern(s),
+ kind: DocFragmentKind::SugaredDoc,
+ indent: 0,
+ }]
+}
+
+#[track_caller]
+fn run_test(input: &str, expected: &str) {
+ create_default_session_globals_then(|| {
+ let mut s = create_doc_fragment(input);
+ unindent_doc_fragments(&mut s);
+ assert_eq!(collapse_doc_fragments(&s), expected);
+ });
+}
+
+#[test]
+fn should_unindent() {
+ run_test(" line1\n line2", "line1\nline2");
+}
+
+#[test]
+fn should_unindent_multiple_paragraphs() {
+ run_test(" line1\n\n line2", "line1\n\nline2");
+}
+
+#[test]
+fn should_leave_multiple_indent_levels() {
+ // Line 2 is indented another level beyond the
+ // base indentation and should be preserved
+ run_test(" line1\n\n line2", "line1\n\n line2");
+}
+
+#[test]
+fn should_ignore_first_line_indent() {
+ run_test("line1\n line2", "line1\n line2");
+}
+
+#[test]
+fn should_not_ignore_first_line_indent_in_a_single_line_para() {
+ run_test("line1\n\n line2", "line1\n\n line2");
+}
+
+#[test]
+fn should_unindent_tabs() {
+ run_test("\tline1\n\tline2", "line1\nline2");
+}
+
+#[test]
+fn should_trim_mixed_indentation() {
+ run_test("\t line1\n\t line2", "line1\nline2");
+ run_test(" \tline1\n \tline2", "line1\nline2");
+}
+
+#[test]
+fn should_not_trim() {
+ run_test("\t line1 \n\t line2", "line1 \nline2");
+ run_test(" \tline1 \n \tline2", "line1 \nline2");
+}
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
new file mode 100644
index 000000000..43e71e90a
--- /dev/null
+++ b/src/librustdoc/clean/utils.rs
@@ -0,0 +1,616 @@
+use crate::clean::auto_trait::AutoTraitFinder;
+use crate::clean::blanket_impl::BlanketImplFinder;
+use crate::clean::render_macro_matchers::render_macro_matcher;
+use crate::clean::{
+ clean_middle_const, clean_middle_region, clean_middle_ty, inline, Clean, Crate, ExternalCrate,
+ Generic, GenericArg, GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment,
+ Primitive, PrimitiveType, Type, TypeBinding, Visibility,
+};
+use crate::core::DocContext;
+use crate::formats::item_type::ItemType;
+use crate::visit_lib::LibEmbargoVisitor;
+
+use rustc_ast as ast;
+use rustc_ast::tokenstream::TokenTree;
+use rustc_data_structures::thin_vec::ThinVec;
+use rustc_hir as hir;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_middle::mir;
+use rustc_middle::mir::interpret::ConstValue;
+use rustc_middle::ty::subst::{GenericArgKind, SubstsRef};
+use rustc_middle::ty::{self, DefIdTree, TyCtxt};
+use rustc_span::symbol::{kw, sym, Symbol};
+use std::fmt::Write as _;
+use std::mem;
+
+#[cfg(test)]
+mod tests;
+
+pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
+ let module = crate::visit_ast::RustdocVisitor::new(cx).visit();
+
+ for &cnum in cx.tcx.crates(()) {
+ // Analyze doc-reachability for extern items
+ LibEmbargoVisitor::new(cx).visit_lib(cnum);
+ }
+
+ // Clean the crate, translating the entire librustc_ast AST to one that is
+ // understood by rustdoc.
+ let mut module = module.clean(cx);
+
+ match *module.kind {
+ ItemKind::ModuleItem(ref module) => {
+ for it in &module.items {
+ // `compiler_builtins` should be masked too, but we can't apply
+ // `#[doc(masked)]` to the injected `extern crate` because it's unstable.
+ if it.is_extern_crate()
+ && (it.attrs.has_doc_flag(sym::masked)
+ || cx.tcx.is_compiler_builtins(it.item_id.krate()))
+ {
+ cx.cache.masked_crates.insert(it.item_id.krate());
+ }
+ }
+ }
+ _ => unreachable!(),
+ }
+
+ let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
+ let primitives = local_crate.primitives(cx.tcx);
+ let keywords = local_crate.keywords(cx.tcx);
+ {
+ let ItemKind::ModuleItem(ref mut m) = *module.kind
+ else { unreachable!() };
+ m.items.extend(primitives.iter().map(|&(def_id, prim)| {
+ Item::from_def_id_and_parts(
+ def_id,
+ Some(prim.as_sym()),
+ ItemKind::PrimitiveItem(prim),
+ cx,
+ )
+ }));
+ m.items.extend(keywords.into_iter().map(|(def_id, kw)| {
+ Item::from_def_id_and_parts(def_id, Some(kw), ItemKind::KeywordItem, cx)
+ }));
+ }
+
+ Crate { module, primitives, external_traits: cx.external_traits.clone() }
+}
+
+pub(crate) fn substs_to_args<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ substs: &[ty::subst::GenericArg<'tcx>],
+ mut skip_first: bool,
+) -> Vec<GenericArg> {
+ let mut ret_val =
+ Vec::with_capacity(substs.len().saturating_sub(if skip_first { 1 } else { 0 }));
+ ret_val.extend(substs.iter().filter_map(|kind| match kind.unpack() {
+ GenericArgKind::Lifetime(lt) => {
+ Some(GenericArg::Lifetime(clean_middle_region(lt).unwrap_or(Lifetime::elided())))
+ }
+ GenericArgKind::Type(_) if skip_first => {
+ skip_first = false;
+ None
+ }
+ GenericArgKind::Type(ty) => Some(GenericArg::Type(clean_middle_ty(ty, cx, None))),
+ GenericArgKind::Const(ct) => Some(GenericArg::Const(Box::new(clean_middle_const(ct, cx)))),
+ }));
+ ret_val
+}
+
+fn external_generic_args<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ did: DefId,
+ has_self: bool,
+ bindings: Vec<TypeBinding>,
+ substs: SubstsRef<'tcx>,
+) -> GenericArgs {
+ let args = substs_to_args(cx, substs, has_self);
+
+ if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() {
+ let inputs =
+ // The trait's first substitution is the one after self, if there is one.
+ match substs.iter().nth(if has_self { 1 } else { 0 }).unwrap().expect_ty().kind() {
+ ty::Tuple(tys) => tys.iter().map(|t| clean_middle_ty(t, cx, None)).collect::<Vec<_>>().into(),
+ _ => return GenericArgs::AngleBracketed { args: args.into(), bindings: bindings.into() },
+ };
+ let output = None;
+ // FIXME(#20299) return type comes from a projection now
+ // match types[1].kind {
+ // ty::Tuple(ref v) if v.is_empty() => None, // -> ()
+ // _ => Some(types[1].clean(cx))
+ // };
+ GenericArgs::Parenthesized { inputs, output }
+ } else {
+ GenericArgs::AngleBracketed { args: args.into(), bindings: bindings.into() }
+ }
+}
+
+pub(super) fn external_path<'tcx>(
+ cx: &mut DocContext<'tcx>,
+ did: DefId,
+ has_self: bool,
+ bindings: Vec<TypeBinding>,
+ substs: SubstsRef<'tcx>,
+) -> Path {
+ let def_kind = cx.tcx.def_kind(did);
+ let name = cx.tcx.item_name(did);
+ Path {
+ res: Res::Def(def_kind, did),
+ segments: vec![PathSegment {
+ name,
+ args: external_generic_args(cx, did, has_self, bindings, substs),
+ }],
+ }
+}
+
+/// Remove the generic arguments from a path.
+pub(crate) fn strip_path_generics(mut path: Path) -> Path {
+ for ps in path.segments.iter_mut() {
+ ps.args = GenericArgs::AngleBracketed { args: Default::default(), bindings: ThinVec::new() }
+ }
+
+ path
+}
+
+pub(crate) fn qpath_to_string(p: &hir::QPath<'_>) -> String {
+ let segments = match *p {
+ hir::QPath::Resolved(_, path) => &path.segments,
+ hir::QPath::TypeRelative(_, segment) => return segment.ident.to_string(),
+ hir::QPath::LangItem(lang_item, ..) => return lang_item.name().to_string(),
+ };
+
+ let mut s = String::new();
+ for (i, seg) in segments.iter().enumerate() {
+ if i > 0 {
+ s.push_str("::");
+ }
+ if seg.ident.name != kw::PathRoot {
+ s.push_str(seg.ident.as_str());
+ }
+ }
+ s
+}
+
+pub(crate) fn build_deref_target_impls(
+ cx: &mut DocContext<'_>,
+ items: &[Item],
+ ret: &mut Vec<Item>,
+) {
+ let tcx = cx.tcx;
+
+ for item in items {
+ let target = match *item.kind {
+ ItemKind::AssocTypeItem(ref t, _) => &t.type_,
+ _ => continue,
+ };
+
+ if let Some(prim) = target.primitive_type() {
+ let _prof_timer = cx.tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
+ for did in prim.impls(tcx).filter(|did| !did.is_local()) {
+ inline::build_impl(cx, None, did, None, ret);
+ }
+ } else if let Type::Path { path } = target {
+ let did = path.def_id();
+ if !did.is_local() {
+ inline::build_impls(cx, None, did, None, ret);
+ }
+ }
+ }
+}
+
+pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
+ use rustc_hir::*;
+ debug!("trying to get a name from pattern: {:?}", p);
+
+ Symbol::intern(&match p.kind {
+ PatKind::Wild | PatKind::Struct(..) => return kw::Underscore,
+ PatKind::Binding(_, _, ident, _) => return ident.name,
+ PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
+ PatKind::Or(pats) => {
+ pats.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(" | ")
+ }
+ PatKind::Tuple(elts, _) => format!(
+ "({})",
+ elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
+ ),
+ PatKind::Box(p) => return name_from_pat(&*p),
+ PatKind::Ref(p, _) => return name_from_pat(&*p),
+ PatKind::Lit(..) => {
+ warn!(
+ "tried to get argument name from PatKind::Lit, which is silly in function arguments"
+ );
+ return Symbol::intern("()");
+ }
+ PatKind::Range(..) => return kw::Underscore,
+ PatKind::Slice(begin, ref mid, end) => {
+ let begin = begin.iter().map(|p| name_from_pat(p).to_string());
+ let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
+ let end = end.iter().map(|p| name_from_pat(p).to_string());
+ format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
+ }
+ })
+}
+
+pub(crate) fn print_const(cx: &DocContext<'_>, n: ty::Const<'_>) -> String {
+ match n.kind() {
+ ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs: _, promoted }) => {
+ let mut s = if let Some(def) = def.as_local() {
+ print_const_expr(cx.tcx, cx.tcx.hir().body_owned_by(def.did))
+ } else {
+ inline::print_inlined_const(cx.tcx, def.did)
+ };
+ if let Some(promoted) = promoted {
+ s.push_str(&format!("::{:?}", promoted))
+ }
+ s
+ }
+ _ => {
+ let mut s = n.to_string();
+ // array lengths are obviously usize
+ if s.ends_with("_usize") {
+ let n = s.len() - "_usize".len();
+ s.truncate(n);
+ if s.ends_with(": ") {
+ let n = s.len() - ": ".len();
+ s.truncate(n);
+ }
+ }
+ s
+ }
+ }
+}
+
+pub(crate) fn print_evaluated_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<String> {
+ tcx.const_eval_poly(def_id).ok().and_then(|val| {
+ let ty = tcx.type_of(def_id);
+ match (val, ty.kind()) {
+ (_, &ty::Ref(..)) => None,
+ (ConstValue::Scalar(_), &ty::Adt(_, _)) => None,
+ (ConstValue::Scalar(_), _) => {
+ let const_ = mir::ConstantKind::from_value(val, ty);
+ Some(print_const_with_custom_print_scalar(tcx, const_))
+ }
+ _ => None,
+ }
+ })
+}
+
+fn format_integer_with_underscore_sep(num: &str) -> String {
+ let num_chars: Vec<_> = num.chars().collect();
+ let mut num_start_index = if num_chars.get(0) == Some(&'-') { 1 } else { 0 };
+ let chunk_size = match num[num_start_index..].as_bytes() {
+ [b'0', b'b' | b'x', ..] => {
+ num_start_index += 2;
+ 4
+ }
+ [b'0', b'o', ..] => {
+ num_start_index += 2;
+ let remaining_chars = num_chars.len() - num_start_index;
+ if remaining_chars <= 6 {
+ // don't add underscores to Unix permissions like 0755 or 100755
+ return num.to_string();
+ }
+ 3
+ }
+ _ => 3,
+ };
+
+ num_chars[..num_start_index]
+ .iter()
+ .chain(num_chars[num_start_index..].rchunks(chunk_size).rev().intersperse(&['_']).flatten())
+ .collect()
+}
+
+fn print_const_with_custom_print_scalar(tcx: TyCtxt<'_>, ct: mir::ConstantKind<'_>) -> String {
+ // Use a slightly different format for integer types which always shows the actual value.
+ // For all other types, fallback to the original `pretty_print_const`.
+ match (ct, ct.ty().kind()) {
+ (mir::ConstantKind::Val(ConstValue::Scalar(int), _), ty::Uint(ui)) => {
+ format!("{}{}", format_integer_with_underscore_sep(&int.to_string()), ui.name_str())
+ }
+ (mir::ConstantKind::Val(ConstValue::Scalar(int), _), ty::Int(i)) => {
+ let ty = tcx.lift(ct.ty()).unwrap();
+ let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size;
+ let data = int.assert_bits(size);
+ let sign_extended_data = size.sign_extend(data) as i128;
+ format!(
+ "{}{}",
+ format_integer_with_underscore_sep(&sign_extended_data.to_string()),
+ i.name_str()
+ )
+ }
+ _ => ct.to_string(),
+ }
+}
+
+pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
+ if let hir::Node::Expr(expr) = tcx.hir().get(hir_id) {
+ if let hir::ExprKind::Lit(_) = &expr.kind {
+ return true;
+ }
+
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind {
+ if let hir::ExprKind::Lit(_) = &expr.kind {
+ return true;
+ }
+ }
+ }
+
+ false
+}
+
+/// Build a textual representation of an unevaluated constant expression.
+///
+/// If the const expression is too complex, an underscore `_` is returned.
+/// For const arguments, it's `{ _ }` to be precise.
+/// This means that the output is not necessarily valid Rust code.
+///
+/// Currently, only
+///
+/// * literals (optionally with a leading `-`)
+/// * unit `()`
+/// * blocks (`{ … }`) around simple expressions and
+/// * paths without arguments
+///
+/// are considered simple enough. Simple blocks are included since they are
+/// necessary to disambiguate unit from the unit type.
+/// This list might get extended in the future.
+///
+/// Without this censoring, in a lot of cases the output would get too large
+/// and verbose. Consider `match` expressions, blocks and deeply nested ADTs.
+/// Further, private and `doc(hidden)` fields of structs would get leaked
+/// since HIR datatypes like the `body` parameter do not contain enough
+/// semantic information for this function to be able to hide them –
+/// at least not without significant performance overhead.
+///
+/// Whenever possible, prefer to evaluate the constant first and try to
+/// use a different method for pretty-printing. Ideally this function
+/// should only ever be used as a fallback.
+pub(crate) fn print_const_expr(tcx: TyCtxt<'_>, body: hir::BodyId) -> String {
+ let hir = tcx.hir();
+ let value = &hir.body(body).value;
+
+ #[derive(PartialEq, Eq)]
+ enum Classification {
+ Literal,
+ Simple,
+ Complex,
+ }
+
+ use Classification::*;
+
+ fn classify(expr: &hir::Expr<'_>) -> Classification {
+ match &expr.kind {
+ hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
+ if matches!(expr.kind, hir::ExprKind::Lit(_)) { Literal } else { Complex }
+ }
+ hir::ExprKind::Lit(_) => Literal,
+ hir::ExprKind::Tup([]) => Simple,
+ hir::ExprKind::Block(hir::Block { stmts: [], expr: Some(expr), .. }, _) => {
+ if classify(expr) == Complex { Complex } else { Simple }
+ }
+ // Paths with a self-type or arguments are too “complex” following our measure since
+ // they may leak private fields of structs (with feature `adt_const_params`).
+ // Consider: `<Self as Trait<{ Struct { private: () } }>>::CONSTANT`.
+ // Paths without arguments are definitely harmless though.
+ hir::ExprKind::Path(hir::QPath::Resolved(_, hir::Path { segments, .. })) => {
+ if segments.iter().all(|segment| segment.args.is_none()) { Simple } else { Complex }
+ }
+ // FIXME: Claiming that those kinds of QPaths are simple is probably not true if the Ty
+ // contains const arguments. Is there a *concise* way to check for this?
+ hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => Simple,
+ // FIXME: Can they contain const arguments and thus leak private struct fields?
+ hir::ExprKind::Path(hir::QPath::LangItem(..)) => Simple,
+ _ => Complex,
+ }
+ }
+
+ let classification = classify(value);
+
+ if classification == Literal
+ && !value.span.from_expansion()
+ && let Ok(snippet) = tcx.sess.source_map().span_to_snippet(value.span) {
+ // For literals, we avoid invoking the pretty-printer and use the source snippet instead to
+ // preserve certain stylistic choices the user likely made for the sake legibility like
+ //
+ // * hexadecimal notation
+ // * underscores
+ // * character escapes
+ //
+ // FIXME: This passes through `-/*spacer*/0` verbatim.
+ snippet
+ } else if classification == Simple {
+ // Otherwise we prefer pretty-printing to get rid of extraneous whitespace, comments and
+ // other formatting artifacts.
+ rustc_hir_pretty::id_to_string(&hir, body.hir_id)
+ } else if tcx.def_kind(hir.body_owner_def_id(body).to_def_id()) == DefKind::AnonConst {
+ // FIXME: Omit the curly braces if the enclosing expression is an array literal
+ // with a repeated element (an `ExprKind::Repeat`) as in such case it
+ // would not actually need any disambiguation.
+ "{ _ }".to_owned()
+ } else {
+ "_".to_owned()
+ }
+}
+
+/// Given a type Path, resolve it to a Type using the TyCtxt
+pub(crate) fn resolve_type(cx: &mut DocContext<'_>, path: Path) -> Type {
+ debug!("resolve_type({:?})", path);
+
+ match path.res {
+ Res::PrimTy(p) => Primitive(PrimitiveType::from(p)),
+ Res::SelfTy { .. } if path.segments.len() == 1 => Generic(kw::SelfUpper),
+ Res::Def(DefKind::TyParam, _) if path.segments.len() == 1 => Generic(path.segments[0].name),
+ _ => {
+ let _ = register_res(cx, path.res);
+ Type::Path { path }
+ }
+ }
+}
+
+pub(crate) fn get_auto_trait_and_blanket_impls(
+ cx: &mut DocContext<'_>,
+ item_def_id: DefId,
+) -> impl Iterator<Item = Item> {
+ let auto_impls = cx
+ .sess()
+ .prof
+ .generic_activity("get_auto_trait_impls")
+ .run(|| AutoTraitFinder::new(cx).get_auto_trait_impls(item_def_id));
+ let blanket_impls = cx
+ .sess()
+ .prof
+ .generic_activity("get_blanket_impls")
+ .run(|| BlanketImplFinder { cx }.get_blanket_impls(item_def_id));
+ auto_impls.into_iter().chain(blanket_impls)
+}
+
+/// If `res` has a documentation page associated, store it in the cache.
+///
+/// This is later used by [`href()`] to determine the HTML link for the item.
+///
+/// [`href()`]: crate::html::format::href
+pub(crate) fn register_res(cx: &mut DocContext<'_>, res: Res) -> DefId {
+ use DefKind::*;
+ debug!("register_res({:?})", res);
+
+ let (did, kind) = match res {
+ // These should be added to the cache using `record_extern_fqn`.
+ Res::Def(
+ kind @ (AssocTy | AssocFn | AssocConst | Variant | Fn | TyAlias | Enum | Trait | Struct
+ | Union | Mod | ForeignTy | Const | Static(_) | Macro(..) | TraitAlias),
+ i,
+ ) => (i, kind.into()),
+ // This is part of a trait definition or trait impl; document the trait.
+ Res::SelfTy { trait_: Some(trait_def_id), alias_to: _ } => (trait_def_id, ItemType::Trait),
+ // This is an inherent impl or a type definition; it doesn't have its own page.
+ Res::SelfTy { trait_: None, alias_to: Some((item_def_id, _)) } => return item_def_id,
+ Res::SelfTy { trait_: None, alias_to: None }
+ | Res::PrimTy(_)
+ | Res::ToolMod
+ | Res::SelfCtor(_)
+ | Res::Local(_)
+ | Res::NonMacroAttr(_)
+ | Res::Err => return res.def_id(),
+ Res::Def(
+ TyParam | ConstParam | Ctor(..) | ExternCrate | Use | ForeignMod | AnonConst
+ | InlineConst | OpaqueTy | Field | LifetimeParam | GlobalAsm | Impl | Closure
+ | Generator,
+ id,
+ ) => return id,
+ };
+ if did.is_local() {
+ return did;
+ }
+ inline::record_extern_fqn(cx, did, kind);
+ if let ItemType::Trait = kind {
+ inline::record_extern_trait(cx, did);
+ }
+ did
+}
+
+pub(crate) fn resolve_use_source(cx: &mut DocContext<'_>, path: Path) -> ImportSource {
+ ImportSource {
+ did: if path.res.opt_def_id().is_none() { None } else { Some(register_res(cx, path.res)) },
+ path,
+ }
+}
+
+pub(crate) fn enter_impl_trait<'tcx, F, R>(cx: &mut DocContext<'tcx>, f: F) -> R
+where
+ F: FnOnce(&mut DocContext<'tcx>) -> R,
+{
+ let old_bounds = mem::take(&mut cx.impl_trait_bounds);
+ let r = f(cx);
+ assert!(cx.impl_trait_bounds.is_empty());
+ cx.impl_trait_bounds = old_bounds;
+ r
+}
+
+/// Find the nearest parent module of a [`DefId`].
+pub(crate) fn find_nearest_parent_module(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> {
+ if def_id.is_top_level_module() {
+ // The crate root has no parent. Use it as the root instead.
+ Some(def_id)
+ } else {
+ let mut current = def_id;
+ // The immediate parent might not always be a module.
+ // Find the first parent which is.
+ while let Some(parent) = tcx.opt_parent(current) {
+ if tcx.def_kind(parent) == DefKind::Mod {
+ return Some(parent);
+ }
+ current = parent;
+ }
+ None
+ }
+}
+
+/// Checks for the existence of `hidden` in the attribute below if `flag` is `sym::hidden`:
+///
+/// ```
+/// #[doc(hidden)]
+/// pub fn foo() {}
+/// ```
+///
+/// This function exists because it runs on `hir::Attributes` whereas the other is a
+/// `clean::Attributes` method.
+pub(crate) fn has_doc_flag(tcx: TyCtxt<'_>, did: DefId, flag: Symbol) -> bool {
+ tcx.get_attrs(did, sym::doc).any(|attr| {
+ attr.meta_item_list().map_or(false, |l| rustc_attr::list_contains_name(&l, flag))
+ })
+}
+
+/// A link to `doc.rust-lang.org` that includes the channel name. Use this instead of manual links
+/// so that the channel is consistent.
+///
+/// Set by `bootstrap::Builder::doc_rust_lang_org_channel` in order to keep tests passing on beta/stable.
+pub(crate) const DOC_RUST_LANG_ORG_CHANNEL: &str = env!("DOC_RUST_LANG_ORG_CHANNEL");
+
+/// Render a sequence of macro arms in a format suitable for displaying to the user
+/// as part of an item declaration.
+pub(super) fn render_macro_arms<'a>(
+ tcx: TyCtxt<'_>,
+ matchers: impl Iterator<Item = &'a TokenTree>,
+ arm_delim: &str,
+) -> String {
+ let mut out = String::new();
+ for matcher in matchers {
+ writeln!(out, " {} => {{ ... }}{}", render_macro_matcher(tcx, matcher), arm_delim)
+ .unwrap();
+ }
+ out
+}
+
+pub(super) fn display_macro_source(
+ cx: &mut DocContext<'_>,
+ name: Symbol,
+ def: &ast::MacroDef,
+ def_id: DefId,
+ vis: Visibility,
+) -> String {
+ let tts: Vec<_> = def.body.inner_tokens().into_trees().collect();
+ // Extract the spans of all matchers. They represent the "interface" of the macro.
+ let matchers = tts.chunks(4).map(|arm| &arm[0]);
+
+ if def.macro_rules {
+ format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))
+ } else {
+ if matchers.len() <= 1 {
+ format!(
+ "{}macro {}{} {{\n ...\n}}",
+ vis.to_src_with_space(cx.tcx, def_id),
+ name,
+ matchers.map(|matcher| render_macro_matcher(cx.tcx, matcher)).collect::<String>(),
+ )
+ } else {
+ format!(
+ "{}macro {} {{\n{}}}",
+ vis.to_src_with_space(cx.tcx, def_id),
+ name,
+ render_macro_arms(cx.tcx, matchers, ","),
+ )
+ }
+ }
+}
diff --git a/src/librustdoc/clean/utils/tests.rs b/src/librustdoc/clean/utils/tests.rs
new file mode 100644
index 000000000..ebf4b4954
--- /dev/null
+++ b/src/librustdoc/clean/utils/tests.rs
@@ -0,0 +1,41 @@
+use super::*;
+
+#[test]
+fn int_format_decimal() {
+ assert_eq!(format_integer_with_underscore_sep("12345678"), "12_345_678");
+ assert_eq!(format_integer_with_underscore_sep("123"), "123");
+ assert_eq!(format_integer_with_underscore_sep("123459"), "123_459");
+ assert_eq!(format_integer_with_underscore_sep("-12345678"), "-12_345_678");
+ assert_eq!(format_integer_with_underscore_sep("-123"), "-123");
+ assert_eq!(format_integer_with_underscore_sep("-123459"), "-123_459");
+}
+
+#[test]
+fn int_format_hex() {
+ assert_eq!(format_integer_with_underscore_sep("0xab3"), "0xab3");
+ assert_eq!(format_integer_with_underscore_sep("0xa2345b"), "0xa2_345b");
+ assert_eq!(format_integer_with_underscore_sep("0xa2e6345b"), "0xa2e6_345b");
+ assert_eq!(format_integer_with_underscore_sep("-0xab3"), "-0xab3");
+ assert_eq!(format_integer_with_underscore_sep("-0xa2345b"), "-0xa2_345b");
+ assert_eq!(format_integer_with_underscore_sep("-0xa2e6345b"), "-0xa2e6_345b");
+}
+
+#[test]
+fn int_format_binary() {
+ assert_eq!(format_integer_with_underscore_sep("0o12345671"), "0o12_345_671");
+ assert_eq!(format_integer_with_underscore_sep("0o123"), "0o123");
+ assert_eq!(format_integer_with_underscore_sep("0o123451"), "0o123451");
+ assert_eq!(format_integer_with_underscore_sep("-0o12345671"), "-0o12_345_671");
+ assert_eq!(format_integer_with_underscore_sep("-0o123"), "-0o123");
+ assert_eq!(format_integer_with_underscore_sep("-0o123451"), "-0o123451");
+}
+
+#[test]
+fn int_format_octal() {
+ assert_eq!(format_integer_with_underscore_sep("0b101"), "0b101");
+ assert_eq!(format_integer_with_underscore_sep("0b101101011"), "0b1_0110_1011");
+ assert_eq!(format_integer_with_underscore_sep("0b01101011"), "0b0110_1011");
+ assert_eq!(format_integer_with_underscore_sep("-0b101"), "-0b101");
+ assert_eq!(format_integer_with_underscore_sep("-0b101101011"), "-0b1_0110_1011");
+ assert_eq!(format_integer_with_underscore_sep("-0b01101011"), "-0b0110_1011");
+}
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
new file mode 100644
index 000000000..8a8cc272e
--- /dev/null
+++ b/src/librustdoc/config.rs
@@ -0,0 +1,828 @@
+use std::collections::BTreeMap;
+use std::convert::TryFrom;
+use std::ffi::OsStr;
+use std::fmt;
+use std::path::PathBuf;
+use std::str::FromStr;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_driver::print_flag_list;
+use rustc_session::config::{
+ self, parse_crate_types_from_list, parse_externs, parse_target_triple, CrateType,
+};
+use rustc_session::config::{get_cmd_lint_options, nightly_options};
+use rustc_session::config::{
+ CodegenOptions, ErrorOutputType, Externs, JsonUnusedExterns, UnstableOptions,
+};
+use rustc_session::getopts;
+use rustc_session::lint::Level;
+use rustc_session::search_paths::SearchPath;
+use rustc_span::edition::Edition;
+use rustc_target::spec::TargetTriple;
+
+use crate::core::new_handler;
+use crate::externalfiles::ExternalHtml;
+use crate::html;
+use crate::html::markdown::IdMap;
+use crate::html::render::StylePath;
+use crate::html::static_files;
+use crate::opts;
+use crate::passes::{self, Condition};
+use crate::scrape_examples::{AllCallLocations, ScrapeExamplesOptions};
+use crate::theme;
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub(crate) enum OutputFormat {
+ Json,
+ Html,
+}
+
+impl Default for OutputFormat {
+ fn default() -> OutputFormat {
+ OutputFormat::Html
+ }
+}
+
+impl OutputFormat {
+ pub(crate) fn is_json(&self) -> bool {
+ matches!(self, OutputFormat::Json)
+ }
+}
+
+impl TryFrom<&str> for OutputFormat {
+ type Error = String;
+
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ match value {
+ "json" => Ok(OutputFormat::Json),
+ "html" => Ok(OutputFormat::Html),
+ _ => Err(format!("unknown output format `{}`", value)),
+ }
+ }
+}
+
+/// Configuration options for rustdoc.
+#[derive(Clone)]
+pub(crate) struct Options {
+ // Basic options / Options passed directly to rustc
+ /// The crate root or Markdown file to load.
+ pub(crate) input: PathBuf,
+ /// The name of the crate being documented.
+ pub(crate) crate_name: Option<String>,
+ /// Whether or not this is a proc-macro crate
+ pub(crate) proc_macro_crate: bool,
+ /// How to format errors and warnings.
+ pub(crate) error_format: ErrorOutputType,
+ /// Width of output buffer to truncate errors appropriately.
+ pub(crate) diagnostic_width: Option<usize>,
+ /// Library search paths to hand to the compiler.
+ pub(crate) libs: Vec<SearchPath>,
+ /// Library search paths strings to hand to the compiler.
+ pub(crate) lib_strs: Vec<String>,
+ /// The list of external crates to link against.
+ pub(crate) externs: Externs,
+ /// The list of external crates strings to link against.
+ pub(crate) extern_strs: Vec<String>,
+ /// List of `cfg` flags to hand to the compiler. Always includes `rustdoc`.
+ pub(crate) cfgs: Vec<String>,
+ /// List of check cfg flags to hand to the compiler.
+ pub(crate) check_cfgs: Vec<String>,
+ /// Codegen options to hand to the compiler.
+ pub(crate) codegen_options: CodegenOptions,
+ /// Codegen options strings to hand to the compiler.
+ pub(crate) codegen_options_strs: Vec<String>,
+ /// Unstable (`-Z`) options to pass to the compiler.
+ pub(crate) unstable_opts: UnstableOptions,
+ /// Unstable (`-Z`) options strings to pass to the compiler.
+ pub(crate) unstable_opts_strs: Vec<String>,
+ /// The target used to compile the crate against.
+ pub(crate) target: TargetTriple,
+ /// Edition used when reading the crate. Defaults to "2015". Also used by default when
+ /// compiling doctests from the crate.
+ pub(crate) edition: Edition,
+ /// The path to the sysroot. Used during the compilation process.
+ pub(crate) maybe_sysroot: Option<PathBuf>,
+ /// Lint information passed over the command-line.
+ pub(crate) lint_opts: Vec<(String, Level)>,
+ /// Whether to ask rustc to describe the lints it knows.
+ pub(crate) describe_lints: bool,
+ /// What level to cap lints at.
+ pub(crate) lint_cap: Option<Level>,
+
+ // Options specific to running doctests
+ /// Whether we should run doctests instead of generating docs.
+ pub(crate) should_test: bool,
+ /// List of arguments to pass to the test harness, if running tests.
+ pub(crate) test_args: Vec<String>,
+ /// The working directory in which to run tests.
+ pub(crate) test_run_directory: Option<PathBuf>,
+ /// Optional path to persist the doctest executables to, defaults to a
+ /// temporary directory if not set.
+ pub(crate) persist_doctests: Option<PathBuf>,
+ /// Runtool to run doctests with
+ pub(crate) runtool: Option<String>,
+ /// Arguments to pass to the runtool
+ pub(crate) runtool_args: Vec<String>,
+ /// Whether to allow ignoring doctests on a per-target basis
+ /// For example, using ignore-foo to ignore running the doctest on any target that
+ /// contains "foo" as a substring
+ pub(crate) enable_per_target_ignores: bool,
+ /// Do not run doctests, compile them if should_test is active.
+ pub(crate) no_run: bool,
+
+ /// The path to a rustc-like binary to build tests with. If not set, we
+ /// default to loading from `$sysroot/bin/rustc`.
+ pub(crate) test_builder: Option<PathBuf>,
+
+ // Options that affect the documentation process
+ /// Whether to run the `calculate-doc-coverage` pass, which counts the number of public items
+ /// with and without documentation.
+ pub(crate) show_coverage: bool,
+
+ // Options that alter generated documentation pages
+ /// Crate version to note on the sidebar of generated docs.
+ pub(crate) crate_version: Option<String>,
+ /// Collected options specific to outputting final pages.
+ pub(crate) render_options: RenderOptions,
+ /// The format that we output when rendering.
+ ///
+ /// Currently used only for the `--show-coverage` option.
+ pub(crate) output_format: OutputFormat,
+ /// If this option is set to `true`, rustdoc will only run checks and not generate
+ /// documentation.
+ pub(crate) run_check: bool,
+ /// Whether doctests should emit unused externs
+ pub(crate) json_unused_externs: JsonUnusedExterns,
+ /// Whether to skip capturing stdout and stderr of tests.
+ pub(crate) nocapture: bool,
+
+ /// Configuration for scraping examples from the current crate. If this option is Some(..) then
+ /// the compiler will scrape examples and not generate documentation.
+ pub(crate) scrape_examples_options: Option<ScrapeExamplesOptions>,
+}
+
+impl fmt::Debug for Options {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ struct FmtExterns<'a>(&'a Externs);
+
+ impl<'a> fmt::Debug for FmtExterns<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map().entries(self.0.iter()).finish()
+ }
+ }
+
+ f.debug_struct("Options")
+ .field("input", &self.input)
+ .field("crate_name", &self.crate_name)
+ .field("proc_macro_crate", &self.proc_macro_crate)
+ .field("error_format", &self.error_format)
+ .field("libs", &self.libs)
+ .field("externs", &FmtExterns(&self.externs))
+ .field("cfgs", &self.cfgs)
+ .field("check-cfgs", &self.check_cfgs)
+ .field("codegen_options", &"...")
+ .field("unstable_options", &"...")
+ .field("target", &self.target)
+ .field("edition", &self.edition)
+ .field("maybe_sysroot", &self.maybe_sysroot)
+ .field("lint_opts", &self.lint_opts)
+ .field("describe_lints", &self.describe_lints)
+ .field("lint_cap", &self.lint_cap)
+ .field("should_test", &self.should_test)
+ .field("test_args", &self.test_args)
+ .field("test_run_directory", &self.test_run_directory)
+ .field("persist_doctests", &self.persist_doctests)
+ .field("show_coverage", &self.show_coverage)
+ .field("crate_version", &self.crate_version)
+ .field("render_options", &self.render_options)
+ .field("runtool", &self.runtool)
+ .field("runtool_args", &self.runtool_args)
+ .field("enable-per-target-ignores", &self.enable_per_target_ignores)
+ .field("run_check", &self.run_check)
+ .field("no_run", &self.no_run)
+ .field("nocapture", &self.nocapture)
+ .field("scrape_examples_options", &self.scrape_examples_options)
+ .finish()
+ }
+}
+
+/// Configuration options for the HTML page-creation process.
+#[derive(Clone, Debug)]
+pub(crate) struct RenderOptions {
+ /// Output directory to generate docs into. Defaults to `doc`.
+ pub(crate) output: PathBuf,
+ /// External files to insert into generated pages.
+ pub(crate) external_html: ExternalHtml,
+ /// A pre-populated `IdMap` with the default headings and any headings added by Markdown files
+ /// processed by `external_html`.
+ pub(crate) id_map: IdMap,
+ /// If present, playground URL to use in the "Run" button added to code samples.
+ ///
+ /// Be aware: This option can come both from the CLI and from crate attributes!
+ pub(crate) playground_url: Option<String>,
+ /// What sorting mode to use for module pages.
+ /// `ModuleSorting::Alphabetical` by default.
+ pub(crate) module_sorting: ModuleSorting,
+ /// List of themes to extend the docs with. Original argument name is included to assist in
+ /// displaying errors if it fails a theme check.
+ pub(crate) themes: Vec<StylePath>,
+ /// If present, CSS file that contains rules to add to the default CSS.
+ pub(crate) extension_css: Option<PathBuf>,
+ /// A map of crate names to the URL to use instead of querying the crate's `html_root_url`.
+ pub(crate) extern_html_root_urls: BTreeMap<String, String>,
+ /// Whether to give precedence to `html_root_url` or `--exten-html-root-url`.
+ pub(crate) extern_html_root_takes_precedence: bool,
+ /// A map of the default settings (values are as for DOM storage API). Keys should lack the
+ /// `rustdoc-` prefix.
+ pub(crate) default_settings: FxHashMap<String, String>,
+ /// If present, suffix added to CSS/JavaScript files when referencing them in generated pages.
+ pub(crate) resource_suffix: String,
+ /// Whether to run the static CSS/JavaScript through a minifier when outputting them. `true` by
+ /// default.
+ pub(crate) enable_minification: bool,
+ /// Whether to create an index page in the root of the output directory. If this is true but
+ /// `enable_index_page` is None, generate a static listing of crates instead.
+ pub(crate) enable_index_page: bool,
+ /// A file to use as the index page at the root of the output directory. Overrides
+ /// `enable_index_page` to be true if set.
+ pub(crate) index_page: Option<PathBuf>,
+ /// An optional path to use as the location of static files. If not set, uses combinations of
+ /// `../` to reach the documentation root.
+ pub(crate) static_root_path: Option<String>,
+
+ // Options specific to reading standalone Markdown files
+ /// Whether to generate a table of contents on the output file when reading a standalone
+ /// Markdown file.
+ pub(crate) markdown_no_toc: bool,
+ /// Additional CSS files to link in pages generated from standalone Markdown files.
+ pub(crate) markdown_css: Vec<String>,
+ /// If present, playground URL to use in the "Run" button added to code samples generated from
+ /// standalone Markdown files. If not present, `playground_url` is used.
+ pub(crate) markdown_playground_url: Option<String>,
+ /// Document items that have lower than `pub` visibility.
+ pub(crate) document_private: bool,
+ /// Document items that have `doc(hidden)`.
+ pub(crate) document_hidden: bool,
+ /// If `true`, generate a JSON file in the crate folder instead of HTML redirection files.
+ pub(crate) generate_redirect_map: bool,
+ /// Show the memory layout of types in the docs.
+ pub(crate) show_type_layout: bool,
+ pub(crate) unstable_features: rustc_feature::UnstableFeatures,
+ pub(crate) emit: Vec<EmitType>,
+ /// If `true`, HTML source pages will generate links for items to their definition.
+ pub(crate) generate_link_to_definition: bool,
+ /// Set of function-call locations to include as examples
+ pub(crate) call_locations: AllCallLocations,
+ /// If `true`, Context::init will not emit shared files.
+ pub(crate) no_emit_shared: bool,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum ModuleSorting {
+ DeclarationOrder,
+ Alphabetical,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum EmitType {
+ Unversioned,
+ Toolchain,
+ InvocationSpecific,
+}
+
+impl FromStr for EmitType {
+ type Err = ();
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ use EmitType::*;
+ match s {
+ "unversioned-shared-resources" => Ok(Unversioned),
+ "toolchain-shared-resources" => Ok(Toolchain),
+ "invocation-specific" => Ok(InvocationSpecific),
+ _ => Err(()),
+ }
+ }
+}
+
+impl RenderOptions {
+ pub(crate) fn should_emit_crate(&self) -> bool {
+ self.emit.is_empty() || self.emit.contains(&EmitType::InvocationSpecific)
+ }
+}
+
+impl Options {
+ /// Parses the given command-line for options. If an error message or other early-return has
+ /// been printed, returns `Err` with the exit code.
+ pub(crate) fn from_matches(
+ matches: &getopts::Matches,
+ args: Vec<String>,
+ ) -> Result<Options, i32> {
+ let args = &args[1..];
+ // Check for unstable options.
+ nightly_options::check_nightly_options(matches, &opts());
+
+ if args.is_empty() || matches.opt_present("h") || matches.opt_present("help") {
+ crate::usage("rustdoc");
+ return Err(0);
+ } else if matches.opt_present("version") {
+ rustc_driver::version("rustdoc", matches);
+ return Err(0);
+ }
+
+ let z_flags = matches.opt_strs("Z");
+ if z_flags.iter().any(|x| *x == "help") {
+ print_flag_list("-Z", config::Z_OPTIONS);
+ return Err(0);
+ }
+ let c_flags = matches.opt_strs("C");
+ if c_flags.iter().any(|x| *x == "help") {
+ print_flag_list("-C", config::CG_OPTIONS);
+ return Err(0);
+ }
+
+ let color = config::parse_color(matches);
+ let config::JsonConfig { json_rendered, json_unused_externs, .. } =
+ config::parse_json(matches);
+ let error_format = config::parse_error_format(matches, color, json_rendered);
+ let diagnostic_width = matches.opt_get("diagnostic-width").unwrap_or_default();
+
+ let codegen_options = CodegenOptions::build(matches, error_format);
+ let unstable_opts = UnstableOptions::build(matches, error_format);
+
+ let diag = new_handler(error_format, None, diagnostic_width, &unstable_opts);
+
+ // check for deprecated options
+ check_deprecated_options(matches, &diag);
+
+ if matches.opt_strs("passes") == ["list"] {
+ println!("Available passes for running rustdoc:");
+ for pass in passes::PASSES {
+ println!("{:>20} - {}", pass.name, pass.description);
+ }
+ println!("\nDefault passes for rustdoc:");
+ for p in passes::DEFAULT_PASSES {
+ print!("{:>20}", p.pass.name);
+ println_condition(p.condition);
+ }
+
+ if nightly_options::match_is_nightly_build(matches) {
+ println!("\nPasses run with `--show-coverage`:");
+ for p in passes::COVERAGE_PASSES {
+ print!("{:>20}", p.pass.name);
+ println_condition(p.condition);
+ }
+ }
+
+ fn println_condition(condition: Condition) {
+ use Condition::*;
+ match condition {
+ Always => println!(),
+ WhenDocumentPrivate => println!(" (when --document-private-items)"),
+ WhenNotDocumentPrivate => println!(" (when not --document-private-items)"),
+ WhenNotDocumentHidden => println!(" (when not --document-hidden-items)"),
+ }
+ }
+
+ return Err(0);
+ }
+
+ let mut emit = Vec::new();
+ for list in matches.opt_strs("emit") {
+ for kind in list.split(',') {
+ match kind.parse() {
+ Ok(kind) => emit.push(kind),
+ Err(()) => {
+ diag.err(&format!("unrecognized emission type: {}", kind));
+ return Err(1);
+ }
+ }
+ }
+ }
+
+ // check for `--output-format=json`
+ if !matches!(matches.opt_str("output-format").as_deref(), None | Some("html"))
+ && !matches.opt_present("show-coverage")
+ && !nightly_options::is_unstable_enabled(matches)
+ {
+ rustc_session::early_error(
+ error_format,
+ "the -Z unstable-options flag must be passed to enable --output-format for documentation generation (see https://github.com/rust-lang/rust/issues/76578)",
+ );
+ }
+
+ let to_check = matches.opt_strs("check-theme");
+ if !to_check.is_empty() {
+ let paths = theme::load_css_paths(static_files::themes::LIGHT.as_bytes());
+ let mut errors = 0;
+
+ println!("rustdoc: [check-theme] Starting tests! (Ignoring all other arguments)");
+ for theme_file in to_check.iter() {
+ print!(" - Checking \"{}\"...", theme_file);
+ let (success, differences) = theme::test_theme_against(theme_file, &paths, &diag);
+ if !differences.is_empty() || !success {
+ println!(" FAILED");
+ errors += 1;
+ if !differences.is_empty() {
+ println!("{}", differences.join("\n"));
+ }
+ } else {
+ println!(" OK");
+ }
+ }
+ if errors != 0 {
+ return Err(1);
+ }
+ return Err(0);
+ }
+
+ let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
+
+ let input = PathBuf::from(if describe_lints {
+ "" // dummy, this won't be used
+ } else if matches.free.is_empty() {
+ diag.struct_err("missing file operand").emit();
+ return Err(1);
+ } else if matches.free.len() > 1 {
+ diag.struct_err("too many file operands").emit();
+ return Err(1);
+ } else {
+ &matches.free[0]
+ });
+
+ let libs = matches
+ .opt_strs("L")
+ .iter()
+ .map(|s| SearchPath::from_cli_opt(s, error_format))
+ .collect();
+ let externs = parse_externs(matches, &unstable_opts, error_format);
+ let extern_html_root_urls = match parse_extern_html_roots(matches) {
+ Ok(ex) => ex,
+ Err(err) => {
+ diag.struct_err(err).emit();
+ return Err(1);
+ }
+ };
+
+ let default_settings: Vec<Vec<(String, String)>> = vec![
+ matches
+ .opt_str("default-theme")
+ .iter()
+ .flat_map(|theme| {
+ vec![
+ ("use-system-theme".to_string(), "false".to_string()),
+ ("theme".to_string(), theme.to_string()),
+ ]
+ })
+ .collect(),
+ matches
+ .opt_strs("default-setting")
+ .iter()
+ .map(|s| match s.split_once('=') {
+ None => (s.clone(), "true".to_string()),
+ Some((k, v)) => (k.to_string(), v.to_string()),
+ })
+ .collect(),
+ ];
+ let default_settings = default_settings
+ .into_iter()
+ .flatten()
+ .map(
+ // The keys here become part of `data-` attribute names in the generated HTML. The
+ // browser does a strange mapping when converting them into attributes on the
+ // `dataset` property on the DOM HTML Node:
+ // https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/dataset
+ //
+ // The original key values we have are the same as the DOM storage API keys and the
+ // command line options, so contain `-`. Our Javascript needs to be able to look
+ // these values up both in `dataset` and in the storage API, so it needs to be able
+ // to convert the names back and forth. Despite doing this kebab-case to
+ // StudlyCaps transformation automatically, the JS DOM API does not provide a
+ // mechanism for doing the just transformation on a string. So we want to avoid
+ // the StudlyCaps representation in the `dataset` property.
+ //
+ // We solve this by replacing all the `-`s with `_`s. We do that here, when we
+ // generate the `data-` attributes, and in the JS, when we look them up. (See
+ // `getSettingValue` in `storage.js.`) Converting `-` to `_` is simple in JS.
+ //
+ // The values will be HTML-escaped by the default Tera escaping.
+ |(k, v)| (k.replace('-', "_"), v),
+ )
+ .collect();
+
+ let test_args = matches.opt_strs("test-args");
+ let test_args: Vec<String> =
+ test_args.iter().flat_map(|s| s.split_whitespace()).map(|s| s.to_string()).collect();
+
+ let should_test = matches.opt_present("test");
+ let no_run = matches.opt_present("no-run");
+
+ if !should_test && no_run {
+ diag.err("the `--test` flag must be passed to enable `--no-run`");
+ return Err(1);
+ }
+
+ let out_dir = matches.opt_str("out-dir").map(|s| PathBuf::from(&s));
+ let output = matches.opt_str("output").map(|s| PathBuf::from(&s));
+ let output = match (out_dir, output) {
+ (Some(_), Some(_)) => {
+ diag.struct_err("cannot use both 'out-dir' and 'output' at once").emit();
+ return Err(1);
+ }
+ (Some(out_dir), None) => out_dir,
+ (None, Some(output)) => output,
+ (None, None) => PathBuf::from("doc"),
+ };
+
+ let cfgs = matches.opt_strs("cfg");
+ let check_cfgs = matches.opt_strs("check-cfg");
+
+ let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s));
+
+ if let Some(ref p) = extension_css {
+ if !p.is_file() {
+ diag.struct_err("option --extend-css argument must be a file").emit();
+ return Err(1);
+ }
+ }
+
+ let mut themes = Vec::new();
+ if matches.opt_present("theme") {
+ let paths = theme::load_css_paths(static_files::themes::LIGHT.as_bytes());
+
+ for (theme_file, theme_s) in
+ matches.opt_strs("theme").iter().map(|s| (PathBuf::from(&s), s.to_owned()))
+ {
+ if !theme_file.is_file() {
+ diag.struct_err(&format!("invalid argument: \"{}\"", theme_s))
+ .help("arguments to --theme must be files")
+ .emit();
+ return Err(1);
+ }
+ if theme_file.extension() != Some(OsStr::new("css")) {
+ diag.struct_err(&format!("invalid argument: \"{}\"", theme_s))
+ .help("arguments to --theme must have a .css extension")
+ .emit();
+ return Err(1);
+ }
+ let (success, ret) = theme::test_theme_against(&theme_file, &paths, &diag);
+ if !success {
+ diag.struct_err(&format!("error loading theme file: \"{}\"", theme_s)).emit();
+ return Err(1);
+ } else if !ret.is_empty() {
+ diag.struct_warn(&format!(
+ "theme file \"{}\" is missing CSS rules from the default theme",
+ theme_s
+ ))
+ .warn("the theme may appear incorrect when loaded")
+ .help(&format!(
+ "to see what rules are missing, call `rustdoc --check-theme \"{}\"`",
+ theme_s
+ ))
+ .emit();
+ }
+ themes.push(StylePath { path: theme_file });
+ }
+ }
+
+ let edition = config::parse_crate_edition(matches);
+
+ let mut id_map = html::markdown::IdMap::new();
+ let Some(external_html) = ExternalHtml::load(
+ &matches.opt_strs("html-in-header"),
+ &matches.opt_strs("html-before-content"),
+ &matches.opt_strs("html-after-content"),
+ &matches.opt_strs("markdown-before-content"),
+ &matches.opt_strs("markdown-after-content"),
+ nightly_options::match_is_nightly_build(matches),
+ &diag,
+ &mut id_map,
+ edition,
+ &None,
+ ) else {
+ return Err(3);
+ };
+
+ match matches.opt_str("r").as_deref() {
+ Some("rust") | None => {}
+ Some(s) => {
+ diag.struct_err(&format!("unknown input format: {}", s)).emit();
+ return Err(1);
+ }
+ }
+
+ let index_page = matches.opt_str("index-page").map(|s| PathBuf::from(&s));
+ if let Some(ref index_page) = index_page {
+ if !index_page.is_file() {
+ diag.struct_err("option `--index-page` argument must be a file").emit();
+ return Err(1);
+ }
+ }
+
+ let target = parse_target_triple(matches, error_format);
+
+ let show_coverage = matches.opt_present("show-coverage");
+
+ let crate_types = match parse_crate_types_from_list(matches.opt_strs("crate-type")) {
+ Ok(types) => types,
+ Err(e) => {
+ diag.struct_err(&format!("unknown crate type: {}", e)).emit();
+ return Err(1);
+ }
+ };
+
+ let output_format = match matches.opt_str("output-format") {
+ Some(s) => match OutputFormat::try_from(s.as_str()) {
+ Ok(out_fmt) => {
+ if !out_fmt.is_json() && show_coverage {
+ diag.struct_err(
+ "html output format isn't supported for the --show-coverage option",
+ )
+ .emit();
+ return Err(1);
+ }
+ out_fmt
+ }
+ Err(e) => {
+ diag.struct_err(&e).emit();
+ return Err(1);
+ }
+ },
+ None => OutputFormat::default(),
+ };
+ let crate_name = matches.opt_str("crate-name");
+ let proc_macro_crate = crate_types.contains(&CrateType::ProcMacro);
+ let playground_url = matches.opt_str("playground-url");
+ let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
+ let module_sorting = if matches.opt_present("sort-modules-by-appearance") {
+ ModuleSorting::DeclarationOrder
+ } else {
+ ModuleSorting::Alphabetical
+ };
+ let resource_suffix = matches.opt_str("resource-suffix").unwrap_or_default();
+ let enable_minification = !matches.opt_present("disable-minification");
+ let markdown_no_toc = matches.opt_present("markdown-no-toc");
+ let markdown_css = matches.opt_strs("markdown-css");
+ let markdown_playground_url = matches.opt_str("markdown-playground-url");
+ let crate_version = matches.opt_str("crate-version");
+ let enable_index_page = matches.opt_present("enable-index-page") || index_page.is_some();
+ let static_root_path = matches.opt_str("static-root-path");
+ let test_run_directory = matches.opt_str("test-run-directory").map(PathBuf::from);
+ let persist_doctests = matches.opt_str("persist-doctests").map(PathBuf::from);
+ let test_builder = matches.opt_str("test-builder").map(PathBuf::from);
+ let codegen_options_strs = matches.opt_strs("C");
+ let unstable_opts_strs = matches.opt_strs("Z");
+ let lib_strs = matches.opt_strs("L");
+ let extern_strs = matches.opt_strs("extern");
+ let runtool = matches.opt_str("runtool");
+ let runtool_args = matches.opt_strs("runtool-arg");
+ let enable_per_target_ignores = matches.opt_present("enable-per-target-ignores");
+ let document_private = matches.opt_present("document-private-items");
+ let document_hidden = matches.opt_present("document-hidden-items");
+ let run_check = matches.opt_present("check");
+ let generate_redirect_map = matches.opt_present("generate-redirect-map");
+ let show_type_layout = matches.opt_present("show-type-layout");
+ let nocapture = matches.opt_present("nocapture");
+ let generate_link_to_definition = matches.opt_present("generate-link-to-definition");
+ let extern_html_root_takes_precedence =
+ matches.opt_present("extern-html-root-takes-precedence");
+
+ if generate_link_to_definition && (show_coverage || output_format != OutputFormat::Html) {
+ diag.struct_err(
+ "--generate-link-to-definition option can only be used with HTML output format",
+ )
+ .emit();
+ return Err(1);
+ }
+
+ let scrape_examples_options = ScrapeExamplesOptions::new(matches, &diag)?;
+ let with_examples = matches.opt_strs("with-examples");
+ let call_locations = crate::scrape_examples::load_call_locations(with_examples, &diag)?;
+
+ Ok(Options {
+ input,
+ proc_macro_crate,
+ error_format,
+ diagnostic_width,
+ libs,
+ lib_strs,
+ externs,
+ extern_strs,
+ cfgs,
+ check_cfgs,
+ codegen_options,
+ codegen_options_strs,
+ unstable_opts,
+ unstable_opts_strs,
+ target,
+ edition,
+ maybe_sysroot,
+ lint_opts,
+ describe_lints,
+ lint_cap,
+ should_test,
+ test_args,
+ show_coverage,
+ crate_version,
+ test_run_directory,
+ persist_doctests,
+ runtool,
+ runtool_args,
+ enable_per_target_ignores,
+ test_builder,
+ run_check,
+ no_run,
+ nocapture,
+ render_options: RenderOptions {
+ output,
+ external_html,
+ id_map,
+ playground_url,
+ module_sorting,
+ themes,
+ extension_css,
+ extern_html_root_urls,
+ extern_html_root_takes_precedence,
+ default_settings,
+ resource_suffix,
+ enable_minification,
+ enable_index_page,
+ index_page,
+ static_root_path,
+ markdown_no_toc,
+ markdown_css,
+ markdown_playground_url,
+ document_private,
+ document_hidden,
+ generate_redirect_map,
+ show_type_layout,
+ unstable_features: rustc_feature::UnstableFeatures::from_environment(
+ crate_name.as_deref(),
+ ),
+ emit,
+ generate_link_to_definition,
+ call_locations,
+ no_emit_shared: false,
+ },
+ crate_name,
+ output_format,
+ json_unused_externs,
+ scrape_examples_options,
+ })
+ }
+
+ /// Returns `true` if the file given as `self.input` is a Markdown file.
+ pub(crate) fn markdown_input(&self) -> bool {
+ self.input.extension().map_or(false, |e| e == "md" || e == "markdown")
+ }
+}
+
+/// Prints deprecation warnings for deprecated options
+fn check_deprecated_options(matches: &getopts::Matches, diag: &rustc_errors::Handler) {
+ let deprecated_flags = [];
+
+ for &flag in deprecated_flags.iter() {
+ if matches.opt_present(flag) {
+ diag.struct_warn(&format!("the `{}` flag is deprecated", flag))
+ .note(
+ "see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ )
+ .emit();
+ }
+ }
+
+ let removed_flags = ["plugins", "plugin-path", "no-defaults", "passes", "input-format"];
+
+ for &flag in removed_flags.iter() {
+ if matches.opt_present(flag) {
+ let mut err = diag.struct_warn(&format!("the `{}` flag no longer functions", flag));
+ err.note(
+ "see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ );
+
+ if flag == "no-defaults" || flag == "passes" {
+ err.help("you may want to use --document-private-items");
+ } else if flag == "plugins" || flag == "plugin-path" {
+ err.warn("see CVE-2018-1000622");
+ }
+
+ err.emit();
+ }
+ }
+}
+
+/// Extracts `--extern-html-root-url` arguments from `matches` and returns a map of crate names to
+/// the given URLs. If an `--extern-html-root-url` argument was ill-formed, returns an error
+/// describing the issue.
+fn parse_extern_html_roots(
+ matches: &getopts::Matches,
+) -> Result<BTreeMap<String, String>, &'static str> {
+ let mut externs = BTreeMap::new();
+ for arg in &matches.opt_strs("extern-html-root-url") {
+ let (name, url) =
+ arg.split_once('=').ok_or("--extern-html-root-url must be of the form name=url")?;
+ externs.insert(name.to_string(), url.to_string());
+ }
+ Ok(externs)
+}
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
new file mode 100644
index 000000000..c48b25aea
--- /dev/null
+++ b/src/librustdoc/core.rs
@@ -0,0 +1,566 @@
+use rustc_ast::NodeId;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::sync::{self, Lrc};
+use rustc_errors::emitter::{Emitter, EmitterWriter};
+use rustc_errors::json::JsonEmitter;
+use rustc_feature::UnstableFeatures;
+use rustc_hir::def::{Namespace, Res};
+use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{HirId, Path, TraitCandidate};
+use rustc_interface::interface;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::middle::privacy::AccessLevels;
+use rustc_middle::ty::{ParamEnv, Ty, TyCtxt};
+use rustc_resolve as resolve;
+use rustc_session::config::{self, CrateType, ErrorOutputType};
+use rustc_session::lint;
+use rustc_session::DiagnosticOutput;
+use rustc_session::Session;
+use rustc_span::symbol::sym;
+use rustc_span::{source_map, Span, Symbol};
+
+use std::cell::RefCell;
+use std::mem;
+use std::rc::Rc;
+use std::sync::LazyLock;
+
+use crate::clean::inline::build_external_trait;
+use crate::clean::{self, ItemId, TraitWithExtraInfo};
+use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions};
+use crate::formats::cache::Cache;
+use crate::passes::collect_intra_doc_links::PreprocessedMarkdownLink;
+use crate::passes::{self, Condition::*};
+
+pub(crate) use rustc_session::config::{Input, Options, UnstableOptions};
+
+pub(crate) struct ResolverCaches {
+ pub(crate) markdown_links: Option<FxHashMap<String, Vec<PreprocessedMarkdownLink>>>,
+ pub(crate) doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option<Res<NodeId>>>,
+ /// Traits in scope for a given module.
+ /// See `collect_intra_doc_links::traits_implemented_by` for more details.
+ pub(crate) traits_in_scope: DefIdMap<Vec<TraitCandidate>>,
+ pub(crate) all_traits: Option<Vec<DefId>>,
+ pub(crate) all_trait_impls: Option<Vec<DefId>>,
+ pub(crate) all_macro_rules: FxHashMap<Symbol, Res<NodeId>>,
+}
+
+pub(crate) struct DocContext<'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+ /// Name resolver. Used for intra-doc links.
+ ///
+ /// The `Rc<RefCell<...>>` wrapping is needed because that is what's returned by
+ /// [`rustc_interface::Queries::expansion()`].
+ // FIXME: see if we can get rid of this RefCell somehow
+ pub(crate) resolver: Rc<RefCell<interface::BoxedResolver>>,
+ pub(crate) resolver_caches: ResolverCaches,
+ /// Used for normalization.
+ ///
+ /// Most of this logic is copied from rustc_lint::late.
+ pub(crate) param_env: ParamEnv<'tcx>,
+ /// Later on moved through `clean::Crate` into `cache`
+ pub(crate) external_traits: Rc<RefCell<FxHashMap<DefId, clean::TraitWithExtraInfo>>>,
+ /// Used while populating `external_traits` to ensure we don't process the same trait twice at
+ /// the same time.
+ pub(crate) active_extern_traits: FxHashSet<DefId>,
+ // The current set of parameter substitutions,
+ // for expanding type aliases at the HIR level:
+ /// Table `DefId` of type, lifetime, or const parameter -> substituted type, lifetime, or const
+ pub(crate) substs: FxHashMap<DefId, clean::SubstParam>,
+ /// Table synthetic type parameter for `impl Trait` in argument position -> bounds
+ pub(crate) impl_trait_bounds: FxHashMap<ImplTraitParam, Vec<clean::GenericBound>>,
+ /// Auto-trait or blanket impls processed so far, as `(self_ty, trait_def_id)`.
+ // FIXME(eddyb) make this a `ty::TraitRef<'tcx>` set.
+ pub(crate) generated_synthetics: FxHashSet<(Ty<'tcx>, DefId)>,
+ pub(crate) auto_traits: Vec<DefId>,
+ /// The options given to rustdoc that could be relevant to a pass.
+ pub(crate) render_options: RenderOptions,
+ /// This same cache is used throughout rustdoc, including in [`crate::html::render`].
+ pub(crate) cache: Cache,
+ /// Used by [`clean::inline`] to tell if an item has already been inlined.
+ pub(crate) inlined: FxHashSet<ItemId>,
+ /// Used by `calculate_doc_coverage`.
+ pub(crate) output_format: OutputFormat,
+ /// Used by `strip_private`.
+ pub(crate) show_coverage: bool,
+}
+
+impl<'tcx> DocContext<'tcx> {
+ pub(crate) fn sess(&self) -> &'tcx Session {
+ self.tcx.sess
+ }
+
+ pub(crate) fn with_param_env<T, F: FnOnce(&mut Self) -> T>(
+ &mut self,
+ def_id: DefId,
+ f: F,
+ ) -> T {
+ let old_param_env = mem::replace(&mut self.param_env, self.tcx.param_env(def_id));
+ let ret = f(self);
+ self.param_env = old_param_env;
+ ret
+ }
+
+ pub(crate) fn enter_resolver<F, R>(&self, f: F) -> R
+ where
+ F: FnOnce(&mut resolve::Resolver<'_>) -> R,
+ {
+ self.resolver.borrow_mut().access(f)
+ }
+
+ /// Call the closure with the given parameters set as
+ /// the substitutions for a type alias' RHS.
+ pub(crate) fn enter_alias<F, R>(
+ &mut self,
+ substs: FxHashMap<DefId, clean::SubstParam>,
+ f: F,
+ ) -> R
+ where
+ F: FnOnce(&mut Self) -> R,
+ {
+ let old_substs = mem::replace(&mut self.substs, substs);
+ let r = f(self);
+ self.substs = old_substs;
+ r
+ }
+
+ /// Like `hir().local_def_id_to_hir_id()`, but skips calling it on fake DefIds.
+ /// (This avoids a slice-index-out-of-bounds panic.)
+ pub(crate) fn as_local_hir_id(tcx: TyCtxt<'_>, item_id: ItemId) -> Option<HirId> {
+ match item_id {
+ ItemId::DefId(real_id) => {
+ real_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
+ }
+ // FIXME: Can this be `Some` for `Auto` or `Blanket`?
+ _ => None,
+ }
+ }
+
+ pub(crate) fn with_all_traits(&mut self, f: impl FnOnce(&mut Self, &[DefId])) {
+ let all_traits = self.resolver_caches.all_traits.take();
+ f(self, all_traits.as_ref().expect("`all_traits` are already borrowed"));
+ self.resolver_caches.all_traits = all_traits;
+ }
+
+ pub(crate) fn with_all_trait_impls(&mut self, f: impl FnOnce(&mut Self, &[DefId])) {
+ let all_trait_impls = self.resolver_caches.all_trait_impls.take();
+ f(self, all_trait_impls.as_ref().expect("`all_trait_impls` are already borrowed"));
+ self.resolver_caches.all_trait_impls = all_trait_impls;
+ }
+}
+
+/// Creates a new diagnostic `Handler` that can be used to emit warnings and errors.
+///
+/// If the given `error_format` is `ErrorOutputType::Json` and no `SourceMap` is given, a new one
+/// will be created for the handler.
+pub(crate) fn new_handler(
+ error_format: ErrorOutputType,
+ source_map: Option<Lrc<source_map::SourceMap>>,
+ diagnostic_width: Option<usize>,
+ unstable_opts: &UnstableOptions,
+) -> rustc_errors::Handler {
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let emitter: Box<dyn Emitter + sync::Send> = match error_format {
+ ErrorOutputType::HumanReadable(kind) => {
+ let (short, color_config) = kind.unzip();
+ Box::new(
+ EmitterWriter::stderr(
+ color_config,
+ source_map.map(|sm| sm as _),
+ None,
+ fallback_bundle,
+ short,
+ unstable_opts.teach,
+ diagnostic_width,
+ false,
+ )
+ .ui_testing(unstable_opts.ui_testing),
+ )
+ }
+ ErrorOutputType::Json { pretty, json_rendered } => {
+ let source_map = source_map.unwrap_or_else(|| {
+ Lrc::new(source_map::SourceMap::new(source_map::FilePathMapping::empty()))
+ });
+ Box::new(
+ JsonEmitter::stderr(
+ None,
+ source_map,
+ None,
+ fallback_bundle,
+ pretty,
+ json_rendered,
+ diagnostic_width,
+ false,
+ )
+ .ui_testing(unstable_opts.ui_testing),
+ )
+ }
+ };
+
+ rustc_errors::Handler::with_emitter_and_flags(
+ emitter,
+ unstable_opts.diagnostic_handler_flags(true),
+ )
+}
+
+/// Parse, resolve, and typecheck the given crate.
+pub(crate) fn create_config(
+ RustdocOptions {
+ input,
+ crate_name,
+ proc_macro_crate,
+ error_format,
+ diagnostic_width,
+ libs,
+ externs,
+ mut cfgs,
+ check_cfgs,
+ codegen_options,
+ unstable_opts,
+ target,
+ edition,
+ maybe_sysroot,
+ lint_opts,
+ describe_lints,
+ lint_cap,
+ scrape_examples_options,
+ ..
+ }: RustdocOptions,
+) -> rustc_interface::Config {
+ // Add the doc cfg into the doc build.
+ cfgs.push("doc".to_string());
+
+ let cpath = Some(input.clone());
+ let input = Input::File(input);
+
+ // By default, rustdoc ignores all lints.
+ // Specifically unblock lints relevant to documentation or the lint machinery itself.
+ let mut lints_to_show = vec![
+ // it's unclear whether these should be part of rustdoc directly (#77364)
+ rustc_lint::builtin::MISSING_DOCS.name.to_string(),
+ rustc_lint::builtin::INVALID_DOC_ATTRIBUTES.name.to_string(),
+ // these are definitely not part of rustdoc, but we want to warn on them anyway.
+ rustc_lint::builtin::RENAMED_AND_REMOVED_LINTS.name.to_string(),
+ rustc_lint::builtin::UNKNOWN_LINTS.name.to_string(),
+ rustc_lint::builtin::UNEXPECTED_CFGS.name.to_string(),
+ // this lint is needed to support `#[expect]` attributes
+ rustc_lint::builtin::UNFULFILLED_LINT_EXPECTATIONS.name.to_string(),
+ ];
+ lints_to_show.extend(crate::lint::RUSTDOC_LINTS.iter().map(|lint| lint.name.to_string()));
+
+ let (lint_opts, lint_caps) = crate::lint::init_lints(lints_to_show, lint_opts, |lint| {
+ Some((lint.name_lower(), lint::Allow))
+ });
+
+ let crate_types =
+ if proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] };
+ let test = scrape_examples_options.map(|opts| opts.scrape_tests).unwrap_or(false);
+ // plays with error output here!
+ let sessopts = config::Options {
+ maybe_sysroot,
+ search_paths: libs,
+ crate_types,
+ lint_opts,
+ lint_cap,
+ cg: codegen_options,
+ externs,
+ target_triple: target,
+ unstable_features: UnstableFeatures::from_environment(crate_name.as_deref()),
+ actually_rustdoc: true,
+ unstable_opts,
+ error_format,
+ diagnostic_width,
+ edition,
+ describe_lints,
+ crate_name,
+ test,
+ ..Options::default()
+ };
+
+ interface::Config {
+ opts: sessopts,
+ crate_cfg: interface::parse_cfgspecs(cfgs),
+ crate_check_cfg: interface::parse_check_cfg(check_cfgs),
+ input,
+ input_path: cpath,
+ output_file: None,
+ output_dir: None,
+ file_loader: None,
+ diagnostic_output: DiagnosticOutput::Default,
+ lint_caps,
+ parse_sess_created: None,
+ register_lints: Some(Box::new(crate::lint::register_lints)),
+ override_queries: Some(|_sess, providers, _external_providers| {
+ // Most lints will require typechecking, so just don't run them.
+ providers.lint_mod = |_, _| {};
+ // Prevent `rustc_typeck::check_crate` from calling `typeck` on all bodies.
+ providers.typeck_item_bodies = |_, _| {};
+ // hack so that `used_trait_imports` won't try to call typeck
+ providers.used_trait_imports = |_, _| {
+ static EMPTY_SET: LazyLock<FxHashSet<LocalDefId>> =
+ LazyLock::new(FxHashSet::default);
+ &EMPTY_SET
+ };
+ // In case typeck does end up being called, don't ICE in case there were name resolution errors
+ providers.typeck = move |tcx, def_id| {
+ // Closures' tables come from their outermost function,
+ // as they are part of the same "inference environment".
+ // This avoids emitting errors for the parent twice (see similar code in `typeck_with_fallback`)
+ let typeck_root_def_id = tcx.typeck_root_def_id(def_id.to_def_id()).expect_local();
+ if typeck_root_def_id != def_id {
+ return tcx.typeck(typeck_root_def_id);
+ }
+
+ let hir = tcx.hir();
+ let body = hir.body(hir.body_owned_by(def_id));
+ debug!("visiting body for {:?}", def_id);
+ EmitIgnoredResolutionErrors::new(tcx).visit_body(body);
+ (rustc_interface::DEFAULT_QUERY_PROVIDERS.typeck)(tcx, def_id)
+ };
+ }),
+ make_codegen_backend: None,
+ registry: rustc_driver::diagnostics_registry(),
+ }
+}
+
+pub(crate) fn run_global_ctxt(
+ tcx: TyCtxt<'_>,
+ resolver: Rc<RefCell<interface::BoxedResolver>>,
+ resolver_caches: ResolverCaches,
+ show_coverage: bool,
+ render_options: RenderOptions,
+ output_format: OutputFormat,
+) -> (clean::Crate, RenderOptions, Cache) {
+ // Certain queries assume that some checks were run elsewhere
+ // (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
+ // so type-check everything other than function bodies in this crate before running lints.
+
+ // NOTE: this does not call `tcx.analysis()` so that we won't
+ // typeck function bodies or run the default rustc lints.
+ // (see `override_queries` in the `config`)
+
+ // HACK(jynelson) this calls an _extremely_ limited subset of `typeck`
+ // and might break if queries change their assumptions in the future.
+
+ // NOTE: This is copy/pasted from typeck/lib.rs and should be kept in sync with those changes.
+ tcx.sess.time("item_types_checking", || {
+ tcx.hir().for_each_module(|module| tcx.ensure().check_mod_item_types(module))
+ });
+ tcx.sess.abort_if_errors();
+ tcx.sess.time("missing_docs", || {
+ rustc_lint::check_crate(tcx, rustc_lint::builtin::MissingDoc::new);
+ });
+ tcx.sess.time("check_mod_attrs", || {
+ tcx.hir().for_each_module(|module| tcx.ensure().check_mod_attrs(module))
+ });
+ rustc_passes::stability::check_unused_or_stable_features(tcx);
+
+ let auto_traits = resolver_caches
+ .all_traits
+ .as_ref()
+ .expect("`all_traits` are already borrowed")
+ .iter()
+ .copied()
+ .filter(|&trait_def_id| tcx.trait_is_auto(trait_def_id))
+ .collect();
+ let access_levels = AccessLevels {
+ map: tcx.privacy_access_levels(()).map.iter().map(|(k, v)| (k.to_def_id(), *v)).collect(),
+ };
+
+ let mut ctxt = DocContext {
+ tcx,
+ resolver,
+ resolver_caches,
+ param_env: ParamEnv::empty(),
+ external_traits: Default::default(),
+ active_extern_traits: Default::default(),
+ substs: Default::default(),
+ impl_trait_bounds: Default::default(),
+ generated_synthetics: Default::default(),
+ auto_traits,
+ cache: Cache::new(access_levels, render_options.document_private),
+ inlined: FxHashSet::default(),
+ output_format,
+ render_options,
+ show_coverage,
+ };
+
+ // Small hack to force the Sized trait to be present.
+ //
+ // Note that in case of `#![no_core]`, the trait is not available.
+ if let Some(sized_trait_did) = ctxt.tcx.lang_items().sized_trait() {
+ let sized_trait = build_external_trait(&mut ctxt, sized_trait_did);
+ ctxt.external_traits
+ .borrow_mut()
+ .insert(sized_trait_did, TraitWithExtraInfo { trait_: sized_trait, is_notable: false });
+ }
+
+ debug!("crate: {:?}", tcx.hir().krate());
+
+ let mut krate = tcx.sess.time("clean_crate", || clean::krate(&mut ctxt));
+
+ if krate.module.doc_value().map(|d| d.is_empty()).unwrap_or(true) {
+ let help = format!(
+ "The following guide may be of use:\n\
+ {}/rustdoc/how-to-write-documentation.html",
+ crate::DOC_RUST_LANG_ORG_CHANNEL
+ );
+ tcx.struct_lint_node(
+ crate::lint::MISSING_CRATE_LEVEL_DOCS,
+ DocContext::as_local_hir_id(tcx, krate.module.item_id).unwrap(),
+ |lint| {
+ let mut diag =
+ lint.build("no documentation found for this crate's top-level module");
+ diag.help(&help);
+ diag.emit();
+ },
+ );
+ }
+
+ fn report_deprecated_attr(name: &str, diag: &rustc_errors::Handler, sp: Span) {
+ let mut msg =
+ diag.struct_span_warn(sp, &format!("the `#![doc({})]` attribute is deprecated", name));
+ msg.note(
+ "see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ );
+
+ if name == "no_default_passes" {
+ msg.help("`#![doc(no_default_passes)]` no longer functions; you may want to use `#![doc(document_private_items)]`");
+ } else if name.starts_with("passes") {
+ msg.help("`#![doc(passes = \"...\")]` no longer functions; you may want to use `#![doc(document_private_items)]`");
+ } else if name.starts_with("plugins") {
+ msg.warn("`#![doc(plugins = \"...\")]` no longer functions; see CVE-2018-1000622 <https://nvd.nist.gov/vuln/detail/CVE-2018-1000622>");
+ }
+
+ msg.emit();
+ }
+
+ // Process all of the crate attributes, extracting plugin metadata along
+ // with the passes which we are supposed to run.
+ for attr in krate.module.attrs.lists(sym::doc) {
+ let diag = ctxt.sess().diagnostic();
+
+ let name = attr.name_or_empty();
+ // `plugins = "..."`, `no_default_passes`, and `passes = "..."` have no effect
+ if attr.is_word() && name == sym::no_default_passes {
+ report_deprecated_attr("no_default_passes", diag, attr.span());
+ } else if attr.value_str().is_some() {
+ match name {
+ sym::passes => {
+ report_deprecated_attr("passes = \"...\"", diag, attr.span());
+ }
+ sym::plugins => {
+ report_deprecated_attr("plugins = \"...\"", diag, attr.span());
+ }
+ _ => (),
+ }
+ }
+
+ if attr.is_word() && name == sym::document_private_items {
+ ctxt.render_options.document_private = true;
+ }
+ }
+
+ info!("Executing passes");
+
+ for p in passes::defaults(show_coverage) {
+ let run = match p.condition {
+ Always => true,
+ WhenDocumentPrivate => ctxt.render_options.document_private,
+ WhenNotDocumentPrivate => !ctxt.render_options.document_private,
+ WhenNotDocumentHidden => !ctxt.render_options.document_hidden,
+ };
+ if run {
+ debug!("running pass {}", p.pass.name);
+ krate = tcx.sess.time(p.pass.name, || (p.pass.run)(krate, &mut ctxt));
+ }
+ }
+
+ tcx.sess.time("check_lint_expectations", || tcx.check_expectations(Some(sym::rustdoc)));
+
+ if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ rustc_errors::FatalError.raise();
+ }
+
+ krate = tcx.sess.time("create_format_cache", || Cache::populate(&mut ctxt, krate));
+
+ (krate, ctxt.render_options, ctxt.cache)
+}
+
+/// Due to <https://github.com/rust-lang/rust/pull/73566>,
+/// the name resolution pass may find errors that are never emitted.
+/// If typeck is called after this happens, then we'll get an ICE:
+/// 'Res::Error found but not reported'. To avoid this, emit the errors now.
+struct EmitIgnoredResolutionErrors<'tcx> {
+ tcx: TyCtxt<'tcx>,
+}
+
+impl<'tcx> EmitIgnoredResolutionErrors<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>) -> Self {
+ Self { tcx }
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for EmitIgnoredResolutionErrors<'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ // We need to recurse into nested closures,
+ // since those will fallback to the parent for type checking.
+ self.tcx.hir()
+ }
+
+ fn visit_path(&mut self, path: &'tcx Path<'_>, _id: HirId) {
+ debug!("visiting path {:?}", path);
+ if path.res == Res::Err {
+ // We have less context here than in rustc_resolve,
+ // so we can only emit the name and span.
+ // However we can give a hint that rustc_resolve will have more info.
+ let label = format!(
+ "could not resolve path `{}`",
+ path.segments
+ .iter()
+ .map(|segment| segment.ident.as_str())
+ .intersperse("::")
+ .collect::<String>()
+ );
+ let mut err = rustc_errors::struct_span_err!(
+ self.tcx.sess,
+ path.span,
+ E0433,
+ "failed to resolve: {}",
+ label
+ );
+ err.span_label(path.span, label);
+ err.note("this error was originally ignored because you are running `rustdoc`");
+ err.note("try running again with `rustc` or `cargo check` and you may get a more detailed error");
+ err.emit();
+ }
+ // We could have an outer resolution that succeeded,
+ // but with generic parameters that failed.
+ // Recurse into the segments so we catch those too.
+ intravisit::walk_path(self, path);
+ }
+}
+
+/// `DefId` or parameter index (`ty::ParamTy.index`) of a synthetic type parameter
+/// for `impl Trait` in argument position.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub(crate) enum ImplTraitParam {
+ DefId(DefId),
+ ParamIndex(u32),
+}
+
+impl From<DefId> for ImplTraitParam {
+ fn from(did: DefId) -> Self {
+ ImplTraitParam::DefId(did)
+ }
+}
+
+impl From<u32> for ImplTraitParam {
+ fn from(idx: u32) -> Self {
+ ImplTraitParam::ParamIndex(idx)
+ }
+}
diff --git a/src/librustdoc/docfs.rs b/src/librustdoc/docfs.rs
new file mode 100644
index 000000000..be066bdaf
--- /dev/null
+++ b/src/librustdoc/docfs.rs
@@ -0,0 +1,78 @@
+//! Rustdoc's FileSystem abstraction module.
+//!
+//! On Windows this indirects IO into threads to work around performance issues
+//! with Defender (and other similar virus scanners that do blocking operations).
+//! On other platforms this is a thin shim to fs.
+//!
+//! Only calls needed to permit this workaround have been abstracted: thus
+//! fs::read is still done directly via the fs module; if in future rustdoc
+//! needs to read-after-write from a file, then it would be added to this
+//! abstraction.
+
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::string::ToString;
+use std::sync::mpsc::Sender;
+
+pub(crate) trait PathError {
+ fn new<S, P: AsRef<Path>>(e: S, path: P) -> Self
+ where
+ S: ToString + Sized;
+}
+
+pub(crate) struct DocFS {
+ sync_only: bool,
+ errors: Option<Sender<String>>,
+}
+
+impl DocFS {
+ pub(crate) fn new(errors: Sender<String>) -> DocFS {
+ DocFS { sync_only: false, errors: Some(errors) }
+ }
+
+ pub(crate) fn set_sync_only(&mut self, sync_only: bool) {
+ self.sync_only = sync_only;
+ }
+
+ pub(crate) fn close(&mut self) {
+ self.errors = None;
+ }
+
+ pub(crate) fn create_dir_all<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
+ // For now, dir creation isn't a huge time consideration, do it
+ // synchronously, which avoids needing ordering between write() actions
+ // and directory creation.
+ fs::create_dir_all(path)
+ }
+
+ pub(crate) fn write<E>(
+ &self,
+ path: PathBuf,
+ contents: impl 'static + Send + AsRef<[u8]>,
+ ) -> Result<(), E>
+ where
+ E: PathError,
+ {
+ #[cfg(windows)]
+ if !self.sync_only {
+ // A possible future enhancement after more detailed profiling would
+ // be to create the file sync so errors are reported eagerly.
+ let sender = self.errors.clone().expect("can't write after closing");
+ rayon::spawn(move || {
+ fs::write(&path, contents).unwrap_or_else(|e| {
+ sender.send(format!("\"{}\": {}", path.display(), e)).unwrap_or_else(|_| {
+ panic!("failed to send error on \"{}\"", path.display())
+ })
+ });
+ });
+ } else {
+ fs::write(&path, contents).map_err(|e| E::new(e, path))?;
+ }
+
+ #[cfg(not(windows))]
+ fs::write(&path, contents).map_err(|e| E::new(e, path))?;
+
+ Ok(())
+ }
+}
diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs
new file mode 100644
index 000000000..35964e3ba
--- /dev/null
+++ b/src/librustdoc/doctest.rs
@@ -0,0 +1,1311 @@
+use rustc_ast as ast;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError};
+use rustc_hir as hir;
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_hir::intravisit;
+use rustc_hir::{HirId, CRATE_HIR_ID};
+use rustc_interface::interface;
+use rustc_middle::hir::map::Map;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::TyCtxt;
+use rustc_parse::maybe_new_parser_from_source_str;
+use rustc_parse::parser::attr::InnerAttrPolicy;
+use rustc_session::config::{self, CrateType, ErrorOutputType};
+use rustc_session::parse::ParseSess;
+use rustc_session::{lint, DiagnosticOutput, Session};
+use rustc_span::edition::Edition;
+use rustc_span::source_map::SourceMap;
+use rustc_span::symbol::sym;
+use rustc_span::Symbol;
+use rustc_span::{BytePos, FileName, Pos, Span, DUMMY_SP};
+use rustc_target::spec::TargetTriple;
+use tempfile::Builder as TempFileBuilder;
+
+use std::env;
+use std::io::{self, Write};
+use std::panic;
+use std::path::PathBuf;
+use std::process::{self, Command, Stdio};
+use std::str;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::{Arc, Mutex};
+
+use crate::clean::{types::AttributesExt, Attributes};
+use crate::config::Options as RustdocOptions;
+use crate::html::markdown::{self, ErrorCodes, Ignore, LangString};
+use crate::lint::init_lints;
+use crate::passes::span_of_attrs;
+
+/// Options that apply to all doctests in a crate or Markdown file (for `rustdoc foo.md`).
+#[derive(Clone, Default)]
+pub(crate) struct GlobalTestOptions {
+ /// Whether to disable the default `extern crate my_crate;` when creating doctests.
+ pub(crate) no_crate_inject: bool,
+ /// Additional crate-level attributes to add to doctests.
+ pub(crate) attrs: Vec<String>,
+}
+
+pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
+ let input = config::Input::File(options.input.clone());
+
+ let invalid_codeblock_attributes_name = crate::lint::INVALID_CODEBLOCK_ATTRIBUTES.name;
+
+ // See core::create_config for what's going on here.
+ let allowed_lints = vec![
+ invalid_codeblock_attributes_name.to_owned(),
+ lint::builtin::UNKNOWN_LINTS.name.to_owned(),
+ lint::builtin::RENAMED_AND_REMOVED_LINTS.name.to_owned(),
+ ];
+
+ let (lint_opts, lint_caps) = init_lints(allowed_lints, options.lint_opts.clone(), |lint| {
+ if lint.name == invalid_codeblock_attributes_name {
+ None
+ } else {
+ Some((lint.name_lower(), lint::Allow))
+ }
+ });
+
+ debug!(?lint_opts);
+
+ let crate_types =
+ if options.proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] };
+
+ let sessopts = config::Options {
+ maybe_sysroot: options.maybe_sysroot.clone(),
+ search_paths: options.libs.clone(),
+ crate_types,
+ lint_opts,
+ lint_cap: Some(options.lint_cap.unwrap_or(lint::Forbid)),
+ cg: options.codegen_options.clone(),
+ externs: options.externs.clone(),
+ unstable_features: options.render_options.unstable_features,
+ actually_rustdoc: true,
+ edition: options.edition,
+ target_triple: options.target.clone(),
+ crate_name: options.crate_name.clone(),
+ ..config::Options::default()
+ };
+
+ let mut cfgs = options.cfgs.clone();
+ cfgs.push("doc".to_owned());
+ cfgs.push("doctest".to_owned());
+ let config = interface::Config {
+ opts: sessopts,
+ crate_cfg: interface::parse_cfgspecs(cfgs),
+ crate_check_cfg: interface::parse_check_cfg(options.check_cfgs.clone()),
+ input,
+ input_path: None,
+ output_file: None,
+ output_dir: None,
+ file_loader: None,
+ diagnostic_output: DiagnosticOutput::Default,
+ lint_caps,
+ parse_sess_created: None,
+ register_lints: Some(Box::new(crate::lint::register_lints)),
+ override_queries: None,
+ make_codegen_backend: None,
+ registry: rustc_driver::diagnostics_registry(),
+ };
+
+ let test_args = options.test_args.clone();
+ let nocapture = options.nocapture;
+ let externs = options.externs.clone();
+ let json_unused_externs = options.json_unused_externs;
+
+ let (tests, unused_extern_reports, compiling_test_count) =
+ interface::run_compiler(config, |compiler| {
+ compiler.enter(|queries| {
+ let mut global_ctxt = queries.global_ctxt()?.take();
+
+ let collector = global_ctxt.enter(|tcx| {
+ let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID);
+
+ let opts = scrape_test_config(crate_attrs);
+ let enable_per_target_ignores = options.enable_per_target_ignores;
+ let mut collector = Collector::new(
+ tcx.crate_name(LOCAL_CRATE),
+ options,
+ false,
+ opts,
+ Some(compiler.session().parse_sess.clone_source_map()),
+ None,
+ enable_per_target_ignores,
+ );
+
+ let mut hir_collector = HirCollector {
+ sess: compiler.session(),
+ collector: &mut collector,
+ map: tcx.hir(),
+ codes: ErrorCodes::from(
+ compiler.session().opts.unstable_features.is_nightly_build(),
+ ),
+ tcx,
+ };
+ hir_collector.visit_testable(
+ "".to_string(),
+ CRATE_HIR_ID,
+ tcx.hir().span(CRATE_HIR_ID),
+ |this| tcx.hir().walk_toplevel_module(this),
+ );
+
+ collector
+ });
+ if compiler.session().diagnostic().has_errors_or_lint_errors().is_some() {
+ FatalError.raise();
+ }
+
+ let unused_extern_reports = collector.unused_extern_reports.clone();
+ let compiling_test_count = collector.compiling_test_count.load(Ordering::SeqCst);
+ let ret: Result<_, ErrorGuaranteed> =
+ Ok((collector.tests, unused_extern_reports, compiling_test_count));
+ ret
+ })
+ })?;
+
+ run_tests(test_args, nocapture, tests);
+
+ // Collect and warn about unused externs, but only if we've gotten
+ // reports for each doctest
+ if json_unused_externs.is_enabled() {
+ let unused_extern_reports: Vec<_> =
+ std::mem::take(&mut unused_extern_reports.lock().unwrap());
+ if unused_extern_reports.len() == compiling_test_count {
+ let extern_names = externs.iter().map(|(name, _)| name).collect::<FxHashSet<&String>>();
+ let mut unused_extern_names = unused_extern_reports
+ .iter()
+ .map(|uexts| uexts.unused_extern_names.iter().collect::<FxHashSet<&String>>())
+ .fold(extern_names, |uextsa, uextsb| {
+ uextsa.intersection(&uextsb).copied().collect::<FxHashSet<&String>>()
+ })
+ .iter()
+ .map(|v| (*v).clone())
+ .collect::<Vec<String>>();
+ unused_extern_names.sort();
+ // Take the most severe lint level
+ let lint_level = unused_extern_reports
+ .iter()
+ .map(|uexts| uexts.lint_level.as_str())
+ .max_by_key(|v| match *v {
+ "warn" => 1,
+ "deny" => 2,
+ "forbid" => 3,
+ // The allow lint level is not expected,
+ // as if allow is specified, no message
+ // is to be emitted.
+ v => unreachable!("Invalid lint level '{}'", v),
+ })
+ .unwrap_or("warn")
+ .to_string();
+ let uext = UnusedExterns { lint_level, unused_extern_names };
+ let unused_extern_json = serde_json::to_string(&uext).unwrap();
+ eprintln!("{unused_extern_json}");
+ }
+ }
+
+ Ok(())
+}
+
+pub(crate) fn run_tests(
+ mut test_args: Vec<String>,
+ nocapture: bool,
+ tests: Vec<test::TestDescAndFn>,
+) {
+ test_args.insert(0, "rustdoctest".to_string());
+ if nocapture {
+ test_args.push("--nocapture".to_string());
+ }
+ test::test_main(&test_args, tests, None);
+}
+
+// Look for `#![doc(test(no_crate_inject))]`, used by crates in the std facade.
+fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions {
+ use rustc_ast_pretty::pprust;
+
+ let mut opts = GlobalTestOptions { no_crate_inject: false, attrs: Vec::new() };
+
+ let test_attrs: Vec<_> = attrs
+ .iter()
+ .filter(|a| a.has_name(sym::doc))
+ .flat_map(|a| a.meta_item_list().unwrap_or_default())
+ .filter(|a| a.has_name(sym::test))
+ .collect();
+ let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
+
+ for attr in attrs {
+ if attr.has_name(sym::no_crate_inject) {
+ opts.no_crate_inject = true;
+ }
+ if attr.has_name(sym::attr) {
+ if let Some(l) = attr.meta_item_list() {
+ for item in l {
+ opts.attrs.push(pprust::meta_list_item_to_string(item));
+ }
+ }
+ }
+ }
+
+ opts
+}
+
+/// Documentation test failure modes.
+enum TestFailure {
+ /// The test failed to compile.
+ CompileError,
+ /// The test is marked `compile_fail` but compiled successfully.
+ UnexpectedCompilePass,
+ /// The test failed to compile (as expected) but the compiler output did not contain all
+ /// expected error codes.
+ MissingErrorCodes(Vec<String>),
+ /// The test binary was unable to be executed.
+ ExecutionError(io::Error),
+ /// The test binary exited with a non-zero exit code.
+ ///
+ /// This typically means an assertion in the test failed or another form of panic occurred.
+ ExecutionFailure(process::Output),
+ /// The test is marked `should_panic` but the test binary executed successfully.
+ UnexpectedRunPass,
+}
+
+enum DirState {
+ Temp(tempfile::TempDir),
+ Perm(PathBuf),
+}
+
+impl DirState {
+ fn path(&self) -> &std::path::Path {
+ match self {
+ DirState::Temp(t) => t.path(),
+ DirState::Perm(p) => p.as_path(),
+ }
+ }
+}
+
+// NOTE: Keep this in sync with the equivalent structs in rustc
+// and cargo.
+// We could unify this struct the one in rustc but they have different
+// ownership semantics, so doing so would create wasteful allocations.
+#[derive(serde::Serialize, serde::Deserialize)]
+struct UnusedExterns {
+ /// Lint level of the unused_crate_dependencies lint
+ lint_level: String,
+ /// List of unused externs by their names.
+ unused_extern_names: Vec<String>,
+}
+
+fn run_test(
+ test: &str,
+ crate_name: &str,
+ line: usize,
+ rustdoc_options: RustdocOptions,
+ mut lang_string: LangString,
+ no_run: bool,
+ runtool: Option<String>,
+ runtool_args: Vec<String>,
+ target: TargetTriple,
+ opts: &GlobalTestOptions,
+ edition: Edition,
+ outdir: DirState,
+ path: PathBuf,
+ test_id: &str,
+ report_unused_externs: impl Fn(UnusedExterns),
+) -> Result<(), TestFailure> {
+ let (test, line_offset, supports_color) =
+ make_test(test, Some(crate_name), lang_string.test_harness, opts, edition, Some(test_id));
+
+ let output_file = outdir.path().join("rust_out");
+
+ let rustc_binary = rustdoc_options
+ .test_builder
+ .as_deref()
+ .unwrap_or_else(|| rustc_interface::util::rustc_path().expect("found rustc"));
+ let mut compiler = Command::new(&rustc_binary);
+ compiler.arg("--crate-type").arg("bin");
+ for cfg in &rustdoc_options.cfgs {
+ compiler.arg("--cfg").arg(&cfg);
+ }
+ if !rustdoc_options.check_cfgs.is_empty() {
+ compiler.arg("-Z").arg("unstable-options");
+ for check_cfg in &rustdoc_options.check_cfgs {
+ compiler.arg("--check-cfg").arg(&check_cfg);
+ }
+ }
+ if let Some(sysroot) = rustdoc_options.maybe_sysroot {
+ compiler.arg("--sysroot").arg(sysroot);
+ }
+ compiler.arg("--edition").arg(&edition.to_string());
+ compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", path);
+ compiler.env("UNSTABLE_RUSTDOC_TEST_LINE", format!("{}", line as isize - line_offset as isize));
+ compiler.arg("-o").arg(&output_file);
+ if lang_string.test_harness {
+ compiler.arg("--test");
+ }
+ if rustdoc_options.json_unused_externs.is_enabled() && !lang_string.compile_fail {
+ compiler.arg("--error-format=json");
+ compiler.arg("--json").arg("unused-externs");
+ compiler.arg("-Z").arg("unstable-options");
+ compiler.arg("-W").arg("unused_crate_dependencies");
+ }
+ for lib_str in &rustdoc_options.lib_strs {
+ compiler.arg("-L").arg(&lib_str);
+ }
+ for extern_str in &rustdoc_options.extern_strs {
+ compiler.arg("--extern").arg(&extern_str);
+ }
+ compiler.arg("-Ccodegen-units=1");
+ for codegen_options_str in &rustdoc_options.codegen_options_strs {
+ compiler.arg("-C").arg(&codegen_options_str);
+ }
+ for unstable_option_str in &rustdoc_options.unstable_opts_strs {
+ compiler.arg("-Z").arg(&unstable_option_str);
+ }
+ if no_run && !lang_string.compile_fail && rustdoc_options.persist_doctests.is_none() {
+ compiler.arg("--emit=metadata");
+ }
+ compiler.arg("--target").arg(match target {
+ TargetTriple::TargetTriple(s) => s,
+ TargetTriple::TargetJson { path_for_rustdoc, .. } => {
+ path_for_rustdoc.to_str().expect("target path must be valid unicode").to_string()
+ }
+ });
+ if let ErrorOutputType::HumanReadable(kind) = rustdoc_options.error_format {
+ let (short, color_config) = kind.unzip();
+
+ if short {
+ compiler.arg("--error-format").arg("short");
+ }
+
+ match color_config {
+ ColorConfig::Never => {
+ compiler.arg("--color").arg("never");
+ }
+ ColorConfig::Always => {
+ compiler.arg("--color").arg("always");
+ }
+ ColorConfig::Auto => {
+ compiler.arg("--color").arg(if supports_color { "always" } else { "never" });
+ }
+ }
+ }
+
+ compiler.arg("-");
+ compiler.stdin(Stdio::piped());
+ compiler.stderr(Stdio::piped());
+
+ let mut child = compiler.spawn().expect("Failed to spawn rustc process");
+ {
+ let stdin = child.stdin.as_mut().expect("Failed to open stdin");
+ stdin.write_all(test.as_bytes()).expect("could write out test sources");
+ }
+ let output = child.wait_with_output().expect("Failed to read stdout");
+
+ struct Bomb<'a>(&'a str);
+ impl Drop for Bomb<'_> {
+ fn drop(&mut self) {
+ eprint!("{}", self.0);
+ }
+ }
+ let mut out = str::from_utf8(&output.stderr)
+ .unwrap()
+ .lines()
+ .filter(|l| {
+ if let Ok(uext) = serde_json::from_str::<UnusedExterns>(l) {
+ report_unused_externs(uext);
+ false
+ } else {
+ true
+ }
+ })
+ .intersperse_with(|| "\n")
+ .collect::<String>();
+
+ // Add a \n to the end to properly terminate the last line,
+ // but only if there was output to be printed
+ if !out.is_empty() {
+ out.push('\n');
+ }
+
+ let _bomb = Bomb(&out);
+ match (output.status.success(), lang_string.compile_fail) {
+ (true, true) => {
+ return Err(TestFailure::UnexpectedCompilePass);
+ }
+ (true, false) => {}
+ (false, true) => {
+ if !lang_string.error_codes.is_empty() {
+ // We used to check if the output contained "error[{}]: " but since we added the
+ // colored output, we can't anymore because of the color escape characters before
+ // the ":".
+ lang_string.error_codes.retain(|err| !out.contains(&format!("error[{err}]")));
+
+ if !lang_string.error_codes.is_empty() {
+ return Err(TestFailure::MissingErrorCodes(lang_string.error_codes));
+ }
+ }
+ }
+ (false, false) => {
+ return Err(TestFailure::CompileError);
+ }
+ }
+
+ if no_run {
+ return Ok(());
+ }
+
+ // Run the code!
+ let mut cmd;
+
+ if let Some(tool) = runtool {
+ cmd = Command::new(tool);
+ cmd.args(runtool_args);
+ cmd.arg(output_file);
+ } else {
+ cmd = Command::new(output_file);
+ }
+ if let Some(run_directory) = rustdoc_options.test_run_directory {
+ cmd.current_dir(run_directory);
+ }
+
+ let result = if rustdoc_options.nocapture {
+ cmd.status().map(|status| process::Output {
+ status,
+ stdout: Vec::new(),
+ stderr: Vec::new(),
+ })
+ } else {
+ cmd.output()
+ };
+ match result {
+ Err(e) => return Err(TestFailure::ExecutionError(e)),
+ Ok(out) => {
+ if lang_string.should_panic && out.status.success() {
+ return Err(TestFailure::UnexpectedRunPass);
+ } else if !lang_string.should_panic && !out.status.success() {
+ return Err(TestFailure::ExecutionFailure(out));
+ }
+ }
+ }
+
+ Ok(())
+}
+
+/// Transforms a test into code that can be compiled into a Rust binary, and returns the number of
+/// lines before the test code begins as well as if the output stream supports colors or not.
+pub(crate) fn make_test(
+ s: &str,
+ crate_name: Option<&str>,
+ dont_insert_main: bool,
+ opts: &GlobalTestOptions,
+ edition: Edition,
+ test_id: Option<&str>,
+) -> (String, usize, bool) {
+ let (crate_attrs, everything_else, crates) = partition_source(s, edition);
+ let everything_else = everything_else.trim();
+ let mut line_offset = 0;
+ let mut prog = String::new();
+ let mut supports_color = false;
+
+ if opts.attrs.is_empty() {
+ // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some
+ // lints that are commonly triggered in doctests. The crate-level test attributes are
+ // commonly used to make tests fail in case they trigger warnings, so having this there in
+ // that case may cause some tests to pass when they shouldn't have.
+ prog.push_str("#![allow(unused)]\n");
+ line_offset += 1;
+ }
+
+ // Next, any attributes that came from the crate root via #![doc(test(attr(...)))].
+ for attr in &opts.attrs {
+ prog.push_str(&format!("#![{attr}]\n"));
+ line_offset += 1;
+ }
+
+ // Now push any outer attributes from the example, assuming they
+ // are intended to be crate attributes.
+ prog.push_str(&crate_attrs);
+ prog.push_str(&crates);
+
+ // Uses librustc_ast to parse the doctest and find if there's a main fn and the extern
+ // crate already is included.
+ let result = rustc_driver::catch_fatal_errors(|| {
+ rustc_span::create_session_if_not_set_then(edition, |_| {
+ use rustc_errors::emitter::{Emitter, EmitterWriter};
+ use rustc_errors::Handler;
+ use rustc_parse::parser::ForceCollect;
+ use rustc_span::source_map::FilePathMapping;
+
+ let filename = FileName::anon_source_code(s);
+ let source = crates + everything_else;
+
+ // Any errors in parsing should also appear when the doctest is compiled for real, so just
+ // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ supports_color = EmitterWriter::stderr(
+ ColorConfig::Auto,
+ None,
+ None,
+ fallback_bundle.clone(),
+ false,
+ false,
+ Some(80),
+ false,
+ )
+ .supports_color();
+
+ let emitter = EmitterWriter::new(
+ Box::new(io::sink()),
+ None,
+ None,
+ fallback_bundle,
+ false,
+ false,
+ false,
+ None,
+ false,
+ );
+
+ // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
+ let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let sess = ParseSess::with_span_handler(handler, sm);
+
+ let mut found_main = false;
+ let mut found_extern_crate = crate_name.is_none();
+ let mut found_macro = false;
+
+ let mut parser = match maybe_new_parser_from_source_str(&sess, filename, source) {
+ Ok(p) => p,
+ Err(errs) => {
+ drop(errs);
+ return (found_main, found_extern_crate, found_macro);
+ }
+ };
+
+ loop {
+ match parser.parse_item(ForceCollect::No) {
+ Ok(Some(item)) => {
+ if !found_main {
+ if let ast::ItemKind::Fn(..) = item.kind {
+ if item.ident.name == sym::main {
+ found_main = true;
+ }
+ }
+ }
+
+ if !found_extern_crate {
+ if let ast::ItemKind::ExternCrate(original) = item.kind {
+ // This code will never be reached if `crate_name` is none because
+ // `found_extern_crate` is initialized to `true` if it is none.
+ let crate_name = crate_name.unwrap();
+
+ match original {
+ Some(name) => found_extern_crate = name.as_str() == crate_name,
+ None => found_extern_crate = item.ident.as_str() == crate_name,
+ }
+ }
+ }
+
+ if !found_macro {
+ if let ast::ItemKind::MacCall(..) = item.kind {
+ found_macro = true;
+ }
+ }
+
+ if found_main && found_extern_crate {
+ break;
+ }
+ }
+ Ok(None) => break,
+ Err(e) => {
+ e.cancel();
+ break;
+ }
+ }
+
+ // The supplied slice is only used for diagnostics,
+ // which are swallowed here anyway.
+ parser.maybe_consume_incorrect_semicolon(&[]);
+ }
+
+ // Reset errors so that they won't be reported as compiler bugs when dropping the
+ // handler. Any errors in the tests will be reported when the test file is compiled,
+ // Note that we still need to cancel the errors above otherwise `DiagnosticBuilder`
+ // will panic on drop.
+ sess.span_diagnostic.reset_err_count();
+
+ (found_main, found_extern_crate, found_macro)
+ })
+ });
+ let Ok((already_has_main, already_has_extern_crate, found_macro)) = result
+ else {
+ // If the parser panicked due to a fatal error, pass the test code through unchanged.
+ // The error will be reported during compilation.
+ return (s.to_owned(), 0, false);
+ };
+
+ // If a doctest's `fn main` is being masked by a wrapper macro, the parsing loop above won't
+ // see it. In that case, run the old text-based scan to see if they at least have a main
+ // function written inside a macro invocation. See
+ // https://github.com/rust-lang/rust/issues/56898
+ let already_has_main = if found_macro && !already_has_main {
+ s.lines()
+ .map(|line| {
+ let comment = line.find("//");
+ if let Some(comment_begins) = comment { &line[0..comment_begins] } else { line }
+ })
+ .any(|code| code.contains("fn main"))
+ } else {
+ already_has_main
+ };
+
+ // Don't inject `extern crate std` because it's already injected by the
+ // compiler.
+ if !already_has_extern_crate && !opts.no_crate_inject && crate_name != Some("std") {
+ if let Some(crate_name) = crate_name {
+ // Don't inject `extern crate` if the crate is never used.
+ // NOTE: this is terribly inaccurate because it doesn't actually
+ // parse the source, but only has false positives, not false
+ // negatives.
+ if s.contains(crate_name) {
+ prog.push_str(&format!("extern crate r#{crate_name};\n"));
+ line_offset += 1;
+ }
+ }
+ }
+
+ // FIXME: This code cannot yet handle no_std test cases yet
+ if dont_insert_main || already_has_main || prog.contains("![no_std]") {
+ prog.push_str(everything_else);
+ } else {
+ let returns_result = everything_else.trim_end().ends_with("(())");
+ // Give each doctest main function a unique name.
+ // This is for example needed for the tooling around `-C instrument-coverage`.
+ let inner_fn_name = if let Some(test_id) = test_id {
+ format!("_doctest_main_{test_id}")
+ } else {
+ "_inner".into()
+ };
+ let inner_attr = if test_id.is_some() { "#[allow(non_snake_case)] " } else { "" };
+ let (main_pre, main_post) = if returns_result {
+ (
+ format!(
+ "fn main() {{ {inner_attr}fn {inner_fn_name}() -> Result<(), impl core::fmt::Debug> {{\n",
+ ),
+ format!("\n}} {inner_fn_name}().unwrap() }}"),
+ )
+ } else if test_id.is_some() {
+ (
+ format!("fn main() {{ {inner_attr}fn {inner_fn_name}() {{\n",),
+ format!("\n}} {inner_fn_name}() }}"),
+ )
+ } else {
+ ("fn main() {\n".into(), "\n}".into())
+ };
+ // Note on newlines: We insert a line/newline *before*, and *after*
+ // the doctest and adjust the `line_offset` accordingly.
+ // In the case of `-C instrument-coverage`, this means that the generated
+ // inner `main` function spans from the doctest opening codeblock to the
+ // closing one. For example
+ // /// ``` <- start of the inner main
+ // /// <- code under doctest
+ // /// ``` <- end of the inner main
+ line_offset += 1;
+
+ prog.extend([&main_pre, everything_else, &main_post].iter().cloned());
+ }
+
+ debug!("final doctest:\n{prog}");
+
+ (prog, line_offset, supports_color)
+}
+
+fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
+ if source.is_empty() {
+ // Empty content so nothing to check in here...
+ return true;
+ }
+ rustc_driver::catch_fatal_errors(|| {
+ rustc_span::create_session_if_not_set_then(edition, |_| {
+ use rustc_errors::emitter::EmitterWriter;
+ use rustc_errors::Handler;
+ use rustc_span::source_map::FilePathMapping;
+
+ let filename = FileName::anon_source_code(source);
+ // Any errors in parsing should also appear when the doctest is compiled for real, so just
+ // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+
+ let emitter = EmitterWriter::new(
+ Box::new(io::sink()),
+ None,
+ None,
+ fallback_bundle,
+ false,
+ false,
+ false,
+ None,
+ false,
+ );
+
+ let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let sess = ParseSess::with_span_handler(handler, sm);
+ let mut parser =
+ match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) {
+ Ok(p) => p,
+ Err(_) => {
+ debug!("Cannot build a parser to check mod attr so skipping...");
+ return true;
+ }
+ };
+ // If a parsing error happened, it's very likely that the attribute is incomplete.
+ if let Err(e) = parser.parse_attribute(InnerAttrPolicy::Permitted) {
+ e.cancel();
+ return false;
+ }
+ // We now check if there is an unclosed delimiter for the attribute. To do so, we look at
+ // the `unclosed_delims` and see if the opening square bracket was closed.
+ parser
+ .unclosed_delims()
+ .get(0)
+ .map(|unclosed| {
+ unclosed.unclosed_span.map(|s| s.lo()).unwrap_or(BytePos(0)) != BytePos(2)
+ })
+ .unwrap_or(true)
+ })
+ })
+ .unwrap_or(false)
+}
+
+fn partition_source(s: &str, edition: Edition) -> (String, String, String) {
+ #[derive(Copy, Clone, PartialEq)]
+ enum PartitionState {
+ Attrs,
+ Crates,
+ Other,
+ }
+ let mut state = PartitionState::Attrs;
+ let mut before = String::new();
+ let mut crates = String::new();
+ let mut after = String::new();
+
+ let mut mod_attr_pending = String::new();
+
+ for line in s.lines() {
+ let trimline = line.trim();
+
+ // FIXME(misdreavus): if a doc comment is placed on an extern crate statement, it will be
+ // shunted into "everything else"
+ match state {
+ PartitionState::Attrs => {
+ state = if trimline.starts_with("#![") {
+ if !check_if_attr_is_complete(line, edition) {
+ mod_attr_pending = line.to_owned();
+ } else {
+ mod_attr_pending.clear();
+ }
+ PartitionState::Attrs
+ } else if trimline.chars().all(|c| c.is_whitespace())
+ || (trimline.starts_with("//") && !trimline.starts_with("///"))
+ {
+ PartitionState::Attrs
+ } else if trimline.starts_with("extern crate")
+ || trimline.starts_with("#[macro_use] extern crate")
+ {
+ PartitionState::Crates
+ } else {
+ // First we check if the previous attribute was "complete"...
+ if !mod_attr_pending.is_empty() {
+ // If not, then we append the new line into the pending attribute to check
+ // if this time it's complete...
+ mod_attr_pending.push_str(line);
+ if !trimline.is_empty()
+ && check_if_attr_is_complete(&mod_attr_pending, edition)
+ {
+ // If it's complete, then we can clear the pending content.
+ mod_attr_pending.clear();
+ }
+ // In any case, this is considered as `PartitionState::Attrs` so it's
+ // prepended before rustdoc's inserts.
+ PartitionState::Attrs
+ } else {
+ PartitionState::Other
+ }
+ };
+ }
+ PartitionState::Crates => {
+ state = if trimline.starts_with("extern crate")
+ || trimline.starts_with("#[macro_use] extern crate")
+ || trimline.chars().all(|c| c.is_whitespace())
+ || (trimline.starts_with("//") && !trimline.starts_with("///"))
+ {
+ PartitionState::Crates
+ } else {
+ PartitionState::Other
+ };
+ }
+ PartitionState::Other => {}
+ }
+
+ match state {
+ PartitionState::Attrs => {
+ before.push_str(line);
+ before.push('\n');
+ }
+ PartitionState::Crates => {
+ crates.push_str(line);
+ crates.push('\n');
+ }
+ PartitionState::Other => {
+ after.push_str(line);
+ after.push('\n');
+ }
+ }
+ }
+
+ debug!("before:\n{before}");
+ debug!("crates:\n{crates}");
+ debug!("after:\n{after}");
+
+ (before, after, crates)
+}
+
+pub(crate) trait Tester {
+ fn add_test(&mut self, test: String, config: LangString, line: usize);
+ fn get_line(&self) -> usize {
+ 0
+ }
+ fn register_header(&mut self, _name: &str, _level: u32) {}
+}
+
+pub(crate) struct Collector {
+ pub(crate) tests: Vec<test::TestDescAndFn>,
+
+ // The name of the test displayed to the user, separated by `::`.
+ //
+ // In tests from Rust source, this is the path to the item
+ // e.g., `["std", "vec", "Vec", "push"]`.
+ //
+ // In tests from a markdown file, this is the titles of all headers (h1~h6)
+ // of the sections that contain the code block, e.g., if the markdown file is
+ // written as:
+ //
+ // ``````markdown
+ // # Title
+ //
+ // ## Subtitle
+ //
+ // ```rust
+ // assert!(true);
+ // ```
+ // ``````
+ //
+ // the `names` vector of that test will be `["Title", "Subtitle"]`.
+ names: Vec<String>,
+
+ rustdoc_options: RustdocOptions,
+ use_headers: bool,
+ enable_per_target_ignores: bool,
+ crate_name: Symbol,
+ opts: GlobalTestOptions,
+ position: Span,
+ source_map: Option<Lrc<SourceMap>>,
+ filename: Option<PathBuf>,
+ visited_tests: FxHashMap<(String, usize), usize>,
+ unused_extern_reports: Arc<Mutex<Vec<UnusedExterns>>>,
+ compiling_test_count: AtomicUsize,
+}
+
+impl Collector {
+ pub(crate) fn new(
+ crate_name: Symbol,
+ rustdoc_options: RustdocOptions,
+ use_headers: bool,
+ opts: GlobalTestOptions,
+ source_map: Option<Lrc<SourceMap>>,
+ filename: Option<PathBuf>,
+ enable_per_target_ignores: bool,
+ ) -> Collector {
+ Collector {
+ tests: Vec::new(),
+ names: Vec::new(),
+ rustdoc_options,
+ use_headers,
+ enable_per_target_ignores,
+ crate_name,
+ opts,
+ position: DUMMY_SP,
+ source_map,
+ filename,
+ visited_tests: FxHashMap::default(),
+ unused_extern_reports: Default::default(),
+ compiling_test_count: AtomicUsize::new(0),
+ }
+ }
+
+ fn generate_name(&self, line: usize, filename: &FileName) -> String {
+ let mut item_path = self.names.join("::");
+ item_path.retain(|c| c != ' ');
+ if !item_path.is_empty() {
+ item_path.push(' ');
+ }
+ format!("{} - {}(line {})", filename.prefer_local(), item_path, line)
+ }
+
+ pub(crate) fn set_position(&mut self, position: Span) {
+ self.position = position;
+ }
+
+ fn get_filename(&self) -> FileName {
+ if let Some(ref source_map) = self.source_map {
+ let filename = source_map.span_to_filename(self.position);
+ if let FileName::Real(ref filename) = filename {
+ if let Ok(cur_dir) = env::current_dir() {
+ if let Some(local_path) = filename.local_path() {
+ if let Ok(path) = local_path.strip_prefix(&cur_dir) {
+ return path.to_owned().into();
+ }
+ }
+ }
+ }
+ filename
+ } else if let Some(ref filename) = self.filename {
+ filename.clone().into()
+ } else {
+ FileName::Custom("input".to_owned())
+ }
+ }
+}
+
+impl Tester for Collector {
+ fn add_test(&mut self, test: String, config: LangString, line: usize) {
+ let filename = self.get_filename();
+ let name = self.generate_name(line, &filename);
+ let crate_name = self.crate_name.to_string();
+ let opts = self.opts.clone();
+ let edition = config.edition.unwrap_or(self.rustdoc_options.edition);
+ let rustdoc_options = self.rustdoc_options.clone();
+ let runtool = self.rustdoc_options.runtool.clone();
+ let runtool_args = self.rustdoc_options.runtool_args.clone();
+ let target = self.rustdoc_options.target.clone();
+ let target_str = target.to_string();
+ let unused_externs = self.unused_extern_reports.clone();
+ let no_run = config.no_run || rustdoc_options.no_run;
+ if !config.compile_fail {
+ self.compiling_test_count.fetch_add(1, Ordering::SeqCst);
+ }
+
+ let path = match &filename {
+ FileName::Real(path) => {
+ if let Some(local_path) = path.local_path() {
+ local_path.to_path_buf()
+ } else {
+ // Somehow we got the filename from the metadata of another crate, should never happen
+ unreachable!("doctest from a different crate");
+ }
+ }
+ _ => PathBuf::from(r"doctest.rs"),
+ };
+
+ // For example `module/file.rs` would become `module_file_rs`
+ let file = filename
+ .prefer_local()
+ .to_string_lossy()
+ .chars()
+ .map(|c| if c.is_ascii_alphanumeric() { c } else { '_' })
+ .collect::<String>();
+ let test_id = format!(
+ "{file}_{line}_{number}",
+ file = file,
+ line = line,
+ number = {
+ // Increases the current test number, if this file already
+ // exists or it creates a new entry with a test number of 0.
+ self.visited_tests.entry((file.clone(), line)).and_modify(|v| *v += 1).or_insert(0)
+ },
+ );
+ let outdir = if let Some(mut path) = rustdoc_options.persist_doctests.clone() {
+ path.push(&test_id);
+
+ if let Err(err) = std::fs::create_dir_all(&path) {
+ eprintln!("Couldn't create directory for doctest executables: {}", err);
+ panic::resume_unwind(Box::new(()));
+ }
+
+ DirState::Perm(path)
+ } else {
+ DirState::Temp(
+ TempFileBuilder::new()
+ .prefix("rustdoctest")
+ .tempdir()
+ .expect("rustdoc needs a tempdir"),
+ )
+ };
+
+ debug!("creating test {name}: {test}");
+ self.tests.push(test::TestDescAndFn {
+ desc: test::TestDesc {
+ name: test::DynTestName(name),
+ ignore: match config.ignore {
+ Ignore::All => true,
+ Ignore::None => false,
+ Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)),
+ },
+ ignore_message: None,
+ // compiler failures are test failures
+ should_panic: test::ShouldPanic::No,
+ compile_fail: config.compile_fail,
+ no_run,
+ test_type: test::TestType::DocTest,
+ },
+ testfn: test::DynTestFn(Box::new(move || {
+ let report_unused_externs = |uext| {
+ unused_externs.lock().unwrap().push(uext);
+ };
+ let res = run_test(
+ &test,
+ &crate_name,
+ line,
+ rustdoc_options,
+ config,
+ no_run,
+ runtool,
+ runtool_args,
+ target,
+ &opts,
+ edition,
+ outdir,
+ path,
+ &test_id,
+ report_unused_externs,
+ );
+
+ if let Err(err) = res {
+ match err {
+ TestFailure::CompileError => {
+ eprint!("Couldn't compile the test.");
+ }
+ TestFailure::UnexpectedCompilePass => {
+ eprint!("Test compiled successfully, but it's marked `compile_fail`.");
+ }
+ TestFailure::UnexpectedRunPass => {
+ eprint!("Test executable succeeded, but it's marked `should_panic`.");
+ }
+ TestFailure::MissingErrorCodes(codes) => {
+ eprint!("Some expected error codes were not found: {:?}", codes);
+ }
+ TestFailure::ExecutionError(err) => {
+ eprint!("Couldn't run the test: {err}");
+ if err.kind() == io::ErrorKind::PermissionDenied {
+ eprint!(" - maybe your tempdir is mounted with noexec?");
+ }
+ }
+ TestFailure::ExecutionFailure(out) => {
+ eprintln!("Test executable failed ({reason}).", reason = out.status);
+
+ // FIXME(#12309): An unfortunate side-effect of capturing the test
+ // executable's output is that the relative ordering between the test's
+ // stdout and stderr is lost. However, this is better than the
+ // alternative: if the test executable inherited the parent's I/O
+ // handles the output wouldn't be captured at all, even on success.
+ //
+ // The ordering could be preserved if the test process' stderr was
+ // redirected to stdout, but that functionality does not exist in the
+ // standard library, so it may not be portable enough.
+ let stdout = str::from_utf8(&out.stdout).unwrap_or_default();
+ let stderr = str::from_utf8(&out.stderr).unwrap_or_default();
+
+ if !stdout.is_empty() || !stderr.is_empty() {
+ eprintln!();
+
+ if !stdout.is_empty() {
+ eprintln!("stdout:\n{stdout}");
+ }
+
+ if !stderr.is_empty() {
+ eprintln!("stderr:\n{stderr}");
+ }
+ }
+ }
+ }
+
+ panic::resume_unwind(Box::new(()));
+ }
+ })),
+ });
+ }
+
+ fn get_line(&self) -> usize {
+ if let Some(ref source_map) = self.source_map {
+ let line = self.position.lo().to_usize();
+ let line = source_map.lookup_char_pos(BytePos(line as u32)).line;
+ if line > 0 { line - 1 } else { line }
+ } else {
+ 0
+ }
+ }
+
+ fn register_header(&mut self, name: &str, level: u32) {
+ if self.use_headers {
+ // We use these headings as test names, so it's good if
+ // they're valid identifiers.
+ let name = name
+ .chars()
+ .enumerate()
+ .map(|(i, c)| {
+ if (i == 0 && rustc_lexer::is_id_start(c))
+ || (i != 0 && rustc_lexer::is_id_continue(c))
+ {
+ c
+ } else {
+ '_'
+ }
+ })
+ .collect::<String>();
+
+ // Here we try to efficiently assemble the header titles into the
+ // test name in the form of `h1::h2::h3::h4::h5::h6`.
+ //
+ // Suppose that originally `self.names` contains `[h1, h2, h3]`...
+ let level = level as usize;
+ if level <= self.names.len() {
+ // ... Consider `level == 2`. All headers in the lower levels
+ // are irrelevant in this new level. So we should reset
+ // `self.names` to contain headers until <h2>, and replace that
+ // slot with the new name: `[h1, name]`.
+ self.names.truncate(level);
+ self.names[level - 1] = name;
+ } else {
+ // ... On the other hand, consider `level == 5`. This means we
+ // need to extend `self.names` to contain five headers. We fill
+ // in the missing level (<h4>) with `_`. Thus `self.names` will
+ // become `[h1, h2, h3, "_", name]`.
+ if level - 1 > self.names.len() {
+ self.names.resize(level - 1, "_".to_owned());
+ }
+ self.names.push(name);
+ }
+ }
+ }
+}
+
+struct HirCollector<'a, 'hir, 'tcx> {
+ sess: &'a Session,
+ collector: &'a mut Collector,
+ map: Map<'hir>,
+ codes: ErrorCodes,
+ tcx: TyCtxt<'tcx>,
+}
+
+impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
+ fn visit_testable<F: FnOnce(&mut Self)>(
+ &mut self,
+ name: String,
+ hir_id: HirId,
+ sp: Span,
+ nested: F,
+ ) {
+ let ast_attrs = self.tcx.hir().attrs(hir_id);
+ if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) {
+ if !cfg.matches(&self.sess.parse_sess, Some(self.sess.features_untracked())) {
+ return;
+ }
+ }
+
+ let has_name = !name.is_empty();
+ if has_name {
+ self.collector.names.push(name);
+ }
+
+ // The collapse-docs pass won't combine sugared/raw doc attributes, or included files with
+ // anything else, this will combine them for us.
+ let attrs = Attributes::from_ast(ast_attrs);
+ if let Some(doc) = attrs.collapsed_doc_value() {
+ // Use the outermost invocation, so that doctest names come from where the docs were written.
+ let span = ast_attrs
+ .span()
+ .map(|span| span.ctxt().outer_expn().expansion_cause().unwrap_or(span))
+ .unwrap_or(DUMMY_SP);
+ self.collector.set_position(span);
+ markdown::find_testable_code(
+ &doc,
+ self.collector,
+ self.codes,
+ self.collector.enable_per_target_ignores,
+ Some(&crate::html::markdown::ExtraInfo::new(
+ self.tcx,
+ hir_id,
+ span_of_attrs(&attrs).unwrap_or(sp),
+ )),
+ );
+ }
+
+ nested(self);
+
+ if has_name {
+ self.collector.names.pop();
+ }
+ }
+}
+
+impl<'a, 'hir, 'tcx> intravisit::Visitor<'hir> for HirCollector<'a, 'hir, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.map
+ }
+
+ fn visit_item(&mut self, item: &'hir hir::Item<'_>) {
+ let name = match &item.kind {
+ hir::ItemKind::Impl(impl_) => {
+ rustc_hir_pretty::id_to_string(&self.map, impl_.self_ty.hir_id)
+ }
+ _ => item.ident.to_string(),
+ };
+
+ self.visit_testable(name, item.hir_id(), item.span, |this| {
+ intravisit::walk_item(this, item);
+ });
+ }
+
+ fn visit_trait_item(&mut self, item: &'hir hir::TraitItem<'_>) {
+ self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ intravisit::walk_trait_item(this, item);
+ });
+ }
+
+ fn visit_impl_item(&mut self, item: &'hir hir::ImplItem<'_>) {
+ self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ intravisit::walk_impl_item(this, item);
+ });
+ }
+
+ fn visit_foreign_item(&mut self, item: &'hir hir::ForeignItem<'_>) {
+ self.visit_testable(item.ident.to_string(), item.hir_id(), item.span, |this| {
+ intravisit::walk_foreign_item(this, item);
+ });
+ }
+
+ fn visit_variant(
+ &mut self,
+ v: &'hir hir::Variant<'_>,
+ g: &'hir hir::Generics<'_>,
+ item_id: hir::HirId,
+ ) {
+ self.visit_testable(v.ident.to_string(), v.id, v.span, |this| {
+ intravisit::walk_variant(this, v, g, item_id);
+ });
+ }
+
+ fn visit_field_def(&mut self, f: &'hir hir::FieldDef<'_>) {
+ self.visit_testable(f.ident.to_string(), f.hir_id, f.span, |this| {
+ intravisit::walk_field_def(this, f);
+ });
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/doctest/tests.rs b/src/librustdoc/doctest/tests.rs
new file mode 100644
index 000000000..360d2259e
--- /dev/null
+++ b/src/librustdoc/doctest/tests.rs
@@ -0,0 +1,300 @@
+use super::{make_test, GlobalTestOptions};
+use rustc_span::edition::DEFAULT_EDITION;
+
+#[test]
+fn make_test_basic() {
+ //basic use: wraps with `fn main`, adds `#![allow(unused)]`
+ let opts = GlobalTestOptions::default();
+ let input = "assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+fn main() {
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_crate_name_no_use() {
+ // If you give a crate name but *don't* use it within the test, it won't bother inserting
+ // the `extern crate` statement.
+ let opts = GlobalTestOptions::default();
+ let input = "assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+fn main() {
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_crate_name() {
+ // If you give a crate name and use it within the test, it will insert an `extern crate`
+ // statement before `fn main`.
+ let opts = GlobalTestOptions::default();
+ let input = "use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+extern crate r#asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 3));
+}
+
+#[test]
+fn make_test_no_crate_inject() {
+ // Even if you do use the crate within the test, setting `opts.no_crate_inject` will skip
+ // adding it anyway.
+ let opts = GlobalTestOptions { no_crate_inject: true, attrs: vec![] };
+ let input = "use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_ignore_std() {
+ // Even if you include a crate name, and use it in the doctest, we still won't include an
+ // `extern crate` statement if the crate is "std" -- that's included already by the
+ // compiler!
+ let opts = GlobalTestOptions::default();
+ let input = "use std::*;
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+fn main() {
+use std::*;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("std"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_manual_extern_crate() {
+ // When you manually include an `extern crate` statement in your doctest, `make_test`
+ // assumes you've included one for your own crate too.
+ let opts = GlobalTestOptions::default();
+ let input = "extern crate asdf;
+use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_manual_extern_crate_with_macro_use() {
+ let opts = GlobalTestOptions::default();
+ let input = "#[macro_use] extern crate asdf;
+use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+#[macro_use] extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_opts_attrs() {
+ // If you supplied some doctest attributes with `#![doc(test(attr(...)))]`, it will use
+ // those instead of the stock `#![allow(unused)]`.
+ let mut opts = GlobalTestOptions::default();
+ opts.attrs.push("feature(sick_rad)".to_string());
+ let input = "use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected = "#![feature(sick_rad)]
+extern crate r#asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 3));
+
+ // Adding more will also bump the returned line offset.
+ opts.attrs.push("feature(hella_dope)".to_string());
+ let expected = "#![feature(sick_rad)]
+#![feature(hella_dope)]
+extern crate r#asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 4));
+}
+
+#[test]
+fn make_test_crate_attrs() {
+ // Including inner attributes in your doctest will apply them to the whole "crate", pasting
+ // them outside the generated main function.
+ let opts = GlobalTestOptions::default();
+ let input = "#![feature(sick_rad)]
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+#![feature(sick_rad)]
+fn main() {
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_with_main() {
+ // Including your own `fn main` wrapper lets the test use it verbatim.
+ let opts = GlobalTestOptions::default();
+ let input = "fn main() {
+ assert_eq!(2+2, 4);
+}";
+ let expected = "#![allow(unused)]
+fn main() {
+ assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 1));
+}
+
+#[test]
+fn make_test_fake_main() {
+ // ... but putting it in a comment will still provide a wrapper.
+ let opts = GlobalTestOptions::default();
+ let input = "//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+//Ceci n'est pas une `fn main`
+fn main() {
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_dont_insert_main() {
+ // Even with that, if you set `dont_insert_main`, it won't create the `fn main` wrapper.
+ let opts = GlobalTestOptions::default();
+ let input = "//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);"
+ .to_string();
+ let (output, len, _) = make_test(input, None, true, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 1));
+}
+
+#[test]
+fn make_test_issues_21299_33731() {
+ let opts = GlobalTestOptions::default();
+
+ let input = "// fn main
+assert_eq!(2+2, 4);";
+
+ let expected = "#![allow(unused)]
+// fn main
+fn main() {
+assert_eq!(2+2, 4);
+}"
+ .to_string();
+
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+
+ let input = "extern crate hella_qwop;
+assert_eq!(asdf::foo, 4);";
+
+ let expected = "#![allow(unused)]
+extern crate hella_qwop;
+extern crate r#asdf;
+fn main() {
+assert_eq!(asdf::foo, 4);
+}"
+ .to_string();
+
+ let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 3));
+}
+
+#[test]
+fn make_test_main_in_macro() {
+ let opts = GlobalTestOptions::default();
+ let input = "#[macro_use] extern crate my_crate;
+test_wrapper! {
+ fn main() {}
+}";
+ let expected = "#![allow(unused)]
+#[macro_use] extern crate my_crate;
+test_wrapper! {
+ fn main() {}
+}"
+ .to_string();
+
+ let (output, len, _) = make_test(input, Some("my_crate"), false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 1));
+}
+
+#[test]
+fn make_test_returns_result() {
+ // creates an inner function and unwraps it
+ let opts = GlobalTestOptions::default();
+ let input = "use std::io;
+let mut input = String::new();
+io::stdin().read_line(&mut input)?;
+Ok::<(), io:Error>(())";
+ let expected = "#![allow(unused)]
+fn main() { fn _inner() -> Result<(), impl core::fmt::Debug> {
+use std::io;
+let mut input = String::new();
+io::stdin().read_line(&mut input)?;
+Ok::<(), io:Error>(())
+} _inner().unwrap() }"
+ .to_string();
+ let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
+ assert_eq!((output, len), (expected, 2));
+}
+
+#[test]
+fn make_test_named_wrapper() {
+ // creates an inner function with a specific name
+ let opts = GlobalTestOptions::default();
+ let input = "assert_eq!(2+2, 4);";
+ let expected = "#![allow(unused)]
+fn main() { #[allow(non_snake_case)] fn _doctest_main__some_unique_name() {
+assert_eq!(2+2, 4);
+} _doctest_main__some_unique_name() }"
+ .to_string();
+ let (output, len, _) =
+ make_test(input, None, false, &opts, DEFAULT_EDITION, Some("_some_unique_name"));
+ assert_eq!((output, len), (expected, 2));
+}
diff --git a/src/librustdoc/error.rs b/src/librustdoc/error.rs
new file mode 100644
index 000000000..6ed7eab1a
--- /dev/null
+++ b/src/librustdoc/error.rs
@@ -0,0 +1,59 @@
+use std::error;
+use std::fmt::{self, Formatter};
+use std::path::{Path, PathBuf};
+
+use crate::docfs::PathError;
+
+#[derive(Debug)]
+pub(crate) struct Error {
+ pub(crate) file: PathBuf,
+ pub(crate) error: String,
+}
+
+impl error::Error for Error {}
+
+impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ let file = self.file.display().to_string();
+ if file.is_empty() {
+ write!(f, "{}", self.error)
+ } else {
+ write!(f, "\"{}\": {}", self.file.display(), self.error)
+ }
+ }
+}
+
+impl PathError for Error {
+ fn new<S, P: AsRef<Path>>(e: S, path: P) -> Error
+ where
+ S: ToString + Sized,
+ {
+ Error { file: path.as_ref().to_path_buf(), error: e.to_string() }
+ }
+}
+
+#[macro_export]
+macro_rules! try_none {
+ ($e:expr, $file:expr) => {{
+ use std::io;
+ match $e {
+ Some(e) => e,
+ None => {
+ return Err(<crate::error::Error as crate::docfs::PathError>::new(
+ io::Error::new(io::ErrorKind::Other, "not found"),
+ $file,
+ ));
+ }
+ }
+ }};
+}
+
+#[macro_export]
+macro_rules! try_err {
+ ($e:expr, $file:expr) => {{
+ match $e {
+ Ok(e) => e,
+ Err(e) => return Err(Error::new(e, $file)),
+ }
+ }};
+}
diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs
new file mode 100644
index 000000000..37fd909c9
--- /dev/null
+++ b/src/librustdoc/externalfiles.rs
@@ -0,0 +1,107 @@
+use crate::html::markdown::{ErrorCodes, HeadingOffset, IdMap, Markdown, Playground};
+use crate::rustc_span::edition::Edition;
+use std::fs;
+use std::path::Path;
+use std::str;
+
+use serde::Serialize;
+
+#[derive(Clone, Debug, Serialize)]
+pub(crate) struct ExternalHtml {
+ /// Content that will be included inline in the `<head>` section of a
+ /// rendered Markdown file or generated documentation
+ pub(crate) in_header: String,
+ /// Content that will be included inline between `<body>` and the content of
+ /// a rendered Markdown file or generated documentation
+ pub(crate) before_content: String,
+ /// Content that will be included inline between the content and `</body>` of
+ /// a rendered Markdown file or generated documentation
+ pub(crate) after_content: String,
+}
+
+impl ExternalHtml {
+ pub(crate) fn load(
+ in_header: &[String],
+ before_content: &[String],
+ after_content: &[String],
+ md_before_content: &[String],
+ md_after_content: &[String],
+ nightly_build: bool,
+ diag: &rustc_errors::Handler,
+ id_map: &mut IdMap,
+ edition: Edition,
+ playground: &Option<Playground>,
+ ) -> Option<ExternalHtml> {
+ let codes = ErrorCodes::from(nightly_build);
+ let ih = load_external_files(in_header, diag)?;
+ let bc = load_external_files(before_content, diag)?;
+ let m_bc = load_external_files(md_before_content, diag)?;
+ let bc = format!(
+ "{}{}",
+ bc,
+ Markdown {
+ content: &m_bc,
+ links: &[],
+ ids: id_map,
+ error_codes: codes,
+ edition,
+ playground,
+ heading_offset: HeadingOffset::H2,
+ }
+ .into_string()
+ );
+ let ac = load_external_files(after_content, diag)?;
+ let m_ac = load_external_files(md_after_content, diag)?;
+ let ac = format!(
+ "{}{}",
+ ac,
+ Markdown {
+ content: &m_ac,
+ links: &[],
+ ids: id_map,
+ error_codes: codes,
+ edition,
+ playground,
+ heading_offset: HeadingOffset::H2,
+ }
+ .into_string()
+ );
+ Some(ExternalHtml { in_header: ih, before_content: bc, after_content: ac })
+ }
+}
+
+pub(crate) enum LoadStringError {
+ ReadFail,
+ BadUtf8,
+}
+
+pub(crate) fn load_string<P: AsRef<Path>>(
+ file_path: P,
+ diag: &rustc_errors::Handler,
+) -> Result<String, LoadStringError> {
+ let file_path = file_path.as_ref();
+ let contents = match fs::read(file_path) {
+ Ok(bytes) => bytes,
+ Err(e) => {
+ diag.struct_err(&format!("error reading `{}`: {}", file_path.display(), e)).emit();
+ return Err(LoadStringError::ReadFail);
+ }
+ };
+ match str::from_utf8(&contents) {
+ Ok(s) => Ok(s.to_string()),
+ Err(_) => {
+ diag.struct_err(&format!("error reading `{}`: not UTF-8", file_path.display())).emit();
+ Err(LoadStringError::BadUtf8)
+ }
+ }
+}
+
+fn load_external_files(names: &[String], diag: &rustc_errors::Handler) -> Option<String> {
+ let mut out = String::new();
+ for name in names {
+ let Ok(s) = load_string(name, diag) else { return None };
+ out.push_str(&s);
+ out.push('\n');
+ }
+ Some(out)
+}
diff --git a/src/librustdoc/fold.rs b/src/librustdoc/fold.rs
new file mode 100644
index 000000000..6b7e67e2c
--- /dev/null
+++ b/src/librustdoc/fold.rs
@@ -0,0 +1,103 @@
+use crate::clean::*;
+
+pub(crate) fn strip_item(mut item: Item) -> Item {
+ if !matches!(*item.kind, StrippedItem(..)) {
+ item.kind = Box::new(StrippedItem(item.kind));
+ }
+ item
+}
+
+pub(crate) trait DocFolder: Sized {
+ fn fold_item(&mut self, item: Item) -> Option<Item> {
+ Some(self.fold_item_recur(item))
+ }
+
+ /// don't override!
+ fn fold_inner_recur(&mut self, kind: ItemKind) -> ItemKind {
+ match kind {
+ StrippedItem(..) => unreachable!(),
+ ModuleItem(i) => ModuleItem(self.fold_mod(i)),
+ StructItem(mut i) => {
+ i.fields = i.fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ StructItem(i)
+ }
+ UnionItem(mut i) => {
+ i.fields = i.fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ UnionItem(i)
+ }
+ EnumItem(mut i) => {
+ i.variants = i.variants.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ EnumItem(i)
+ }
+ TraitItem(mut i) => {
+ i.items = i.items.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ TraitItem(i)
+ }
+ ImplItem(mut i) => {
+ i.items = i.items.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ ImplItem(i)
+ }
+ VariantItem(i) => match i {
+ Variant::Struct(mut j) => {
+ j.fields = j.fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ VariantItem(Variant::Struct(j))
+ }
+ Variant::Tuple(fields) => {
+ let fields = fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
+ VariantItem(Variant::Tuple(fields))
+ }
+ Variant::CLike => VariantItem(Variant::CLike),
+ },
+ ExternCrateItem { src: _ }
+ | ImportItem(_)
+ | FunctionItem(_)
+ | TypedefItem(_)
+ | OpaqueTyItem(_)
+ | StaticItem(_)
+ | ConstantItem(_)
+ | TraitAliasItem(_)
+ | TyMethodItem(_)
+ | MethodItem(_, _)
+ | StructFieldItem(_)
+ | ForeignFunctionItem(_)
+ | ForeignStaticItem(_)
+ | ForeignTypeItem
+ | MacroItem(_)
+ | ProcMacroItem(_)
+ | PrimitiveItem(_)
+ | TyAssocConstItem(..)
+ | AssocConstItem(..)
+ | TyAssocTypeItem(..)
+ | AssocTypeItem(..)
+ | KeywordItem => kind,
+ }
+ }
+
+ /// don't override!
+ fn fold_item_recur(&mut self, mut item: Item) -> Item {
+ item.kind = Box::new(match *item.kind {
+ StrippedItem(box i) => StrippedItem(Box::new(self.fold_inner_recur(i))),
+ _ => self.fold_inner_recur(*item.kind),
+ });
+ item
+ }
+
+ fn fold_mod(&mut self, m: Module) -> Module {
+ Module {
+ span: m.span,
+ items: m.items.into_iter().filter_map(|i| self.fold_item(i)).collect(),
+ }
+ }
+
+ fn fold_crate(&mut self, mut c: Crate) -> Crate {
+ c.module = self.fold_item(c.module).unwrap();
+
+ let external_traits = { std::mem::take(&mut *c.external_traits.borrow_mut()) };
+ for (k, mut v) in external_traits {
+ v.trait_.items = v.trait_.items.into_iter().filter_map(|i| self.fold_item(i)).collect();
+ c.external_traits.borrow_mut().insert(k, v);
+ }
+
+ c
+ }
+}
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
new file mode 100644
index 000000000..2b2691e53
--- /dev/null
+++ b/src/librustdoc/formats/cache.rs
@@ -0,0 +1,569 @@
+use std::mem;
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def_id::{CrateNum, DefId};
+use rustc_middle::middle::privacy::AccessLevels;
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_span::{sym, Symbol};
+
+use crate::clean::{self, types::ExternalLocation, ExternalCrate, ItemId, PrimitiveType};
+use crate::core::DocContext;
+use crate::fold::DocFolder;
+use crate::formats::item_type::ItemType;
+use crate::formats::Impl;
+use crate::html::format::join_with_double_colon;
+use crate::html::markdown::short_markdown_summary;
+use crate::html::render::search_index::get_function_type_for_search;
+use crate::html::render::IndexItem;
+
+/// This cache is used to store information about the [`clean::Crate`] being
+/// rendered in order to provide more useful documentation. This contains
+/// information like all implementors of a trait, all traits a type implements,
+/// documentation for all known traits, etc.
+///
+/// This structure purposefully does not implement `Clone` because it's intended
+/// to be a fairly large and expensive structure to clone. Instead this adheres
+/// to `Send` so it may be stored in an `Arc` instance and shared among the various
+/// rendering threads.
+#[derive(Default)]
+pub(crate) struct Cache {
+ /// Maps a type ID to all known implementations for that type. This is only
+ /// recognized for intra-crate [`clean::Type::Path`]s, and is used to print
+ /// out extra documentation on the page of an enum/struct.
+ ///
+ /// The values of the map are a list of implementations and documentation
+ /// found on that implementation.
+ pub(crate) impls: FxHashMap<DefId, Vec<Impl>>,
+
+ /// Maintains a mapping of local crate `DefId`s to the fully qualified name
+ /// and "short type description" of that node. This is used when generating
+ /// URLs when a type is being linked to. External paths are not located in
+ /// this map because the `External` type itself has all the information
+ /// necessary.
+ pub(crate) paths: FxHashMap<DefId, (Vec<Symbol>, ItemType)>,
+
+ /// Similar to `paths`, but only holds external paths. This is only used for
+ /// generating explicit hyperlinks to other crates.
+ pub(crate) external_paths: FxHashMap<DefId, (Vec<Symbol>, ItemType)>,
+
+ /// Maps local `DefId`s of exported types to fully qualified paths.
+ /// Unlike 'paths', this mapping ignores any renames that occur
+ /// due to 'use' statements.
+ ///
+ /// This map is used when writing out the special 'implementors'
+ /// javascript file. By using the exact path that the type
+ /// is declared with, we ensure that each path will be identical
+ /// to the path used if the corresponding type is inlined. By
+ /// doing this, we can detect duplicate impls on a trait page, and only display
+ /// the impl for the inlined type.
+ pub(crate) exact_paths: FxHashMap<DefId, Vec<Symbol>>,
+
+ /// This map contains information about all known traits of this crate.
+ /// Implementations of a crate should inherit the documentation of the
+ /// parent trait if no extra documentation is specified, and default methods
+ /// should show up in documentation about trait implementations.
+ pub(crate) traits: FxHashMap<DefId, clean::TraitWithExtraInfo>,
+
+ /// When rendering traits, it's often useful to be able to list all
+ /// implementors of the trait, and this mapping is exactly, that: a mapping
+ /// of trait ids to the list of known implementors of the trait
+ pub(crate) implementors: FxHashMap<DefId, Vec<Impl>>,
+
+ /// Cache of where external crate documentation can be found.
+ pub(crate) extern_locations: FxHashMap<CrateNum, ExternalLocation>,
+
+ /// Cache of where documentation for primitives can be found.
+ pub(crate) primitive_locations: FxHashMap<clean::PrimitiveType, DefId>,
+
+ // Note that external items for which `doc(hidden)` applies to are shown as
+ // non-reachable while local items aren't. This is because we're reusing
+ // the access levels from the privacy check pass.
+ pub(crate) access_levels: AccessLevels<DefId>,
+
+ /// The version of the crate being documented, if given from the `--crate-version` flag.
+ pub(crate) crate_version: Option<String>,
+
+ /// Whether to document private items.
+ /// This is stored in `Cache` so it doesn't need to be passed through all rustdoc functions.
+ pub(crate) document_private: bool,
+
+ /// Crates marked with [`#[doc(masked)]`][doc_masked].
+ ///
+ /// [doc_masked]: https://doc.rust-lang.org/nightly/unstable-book/language-features/doc-masked.html
+ pub(crate) masked_crates: FxHashSet<CrateNum>,
+
+ // Private fields only used when initially crawling a crate to build a cache
+ stack: Vec<Symbol>,
+ parent_stack: Vec<ParentStackItem>,
+ stripped_mod: bool,
+
+ pub(crate) search_index: Vec<IndexItem>,
+
+ // In rare case where a structure is defined in one module but implemented
+ // in another, if the implementing module is parsed before defining module,
+ // then the fully qualified name of the structure isn't presented in `paths`
+ // yet when its implementation methods are being indexed. Caches such methods
+ // and their parent id here and indexes them at the end of crate parsing.
+ pub(crate) orphan_impl_items: Vec<OrphanImplItem>,
+
+ // Similarly to `orphan_impl_items`, sometimes trait impls are picked up
+ // even though the trait itself is not exported. This can happen if a trait
+ // was defined in function/expression scope, since the impl will be picked
+ // up by `collect-trait-impls` but the trait won't be scraped out in the HIR
+ // crawl. In order to prevent crashes when looking for notable traits or
+ // when gathering trait documentation on a type, hold impls here while
+ // folding and add them to the cache later on if we find the trait.
+ orphan_trait_impls: Vec<(DefId, FxHashSet<DefId>, Impl)>,
+
+ /// All intra-doc links resolved so far.
+ ///
+ /// Links are indexed by the DefId of the item they document.
+ pub(crate) intra_doc_links: FxHashMap<ItemId, Vec<clean::ItemLink>>,
+ /// Cfg that have been hidden via #![doc(cfg_hide(...))]
+ pub(crate) hidden_cfg: FxHashSet<clean::cfg::Cfg>,
+}
+
+/// This struct is used to wrap the `cache` and `tcx` in order to run `DocFolder`.
+struct CacheBuilder<'a, 'tcx> {
+ cache: &'a mut Cache,
+ /// This field is used to prevent duplicated impl blocks.
+ impl_ids: FxHashMap<DefId, FxHashSet<DefId>>,
+ tcx: TyCtxt<'tcx>,
+}
+
+impl Cache {
+ pub(crate) fn new(access_levels: AccessLevels<DefId>, document_private: bool) -> Self {
+ Cache { access_levels, document_private, ..Cache::default() }
+ }
+
+ /// Populates the `Cache` with more data. The returned `Crate` will be missing some data that was
+ /// in `krate` due to the data being moved into the `Cache`.
+ pub(crate) fn populate(cx: &mut DocContext<'_>, mut krate: clean::Crate) -> clean::Crate {
+ let tcx = cx.tcx;
+
+ // Crawl the crate to build various caches used for the output
+ debug!(?cx.cache.crate_version);
+ cx.cache.traits = krate.external_traits.take();
+
+ // Cache where all our extern crates are located
+ // FIXME: this part is specific to HTML so it'd be nice to remove it from the common code
+ for &crate_num in cx.tcx.crates(()) {
+ let e = ExternalCrate { crate_num };
+
+ let name = e.name(tcx);
+ let render_options = &cx.render_options;
+ let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u);
+ let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence;
+ let dst = &render_options.output;
+ let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx);
+ cx.cache.extern_locations.insert(e.crate_num, location);
+ cx.cache.external_paths.insert(e.def_id(), (vec![name], ItemType::Module));
+ }
+
+ // FIXME: avoid this clone (requires implementing Default manually)
+ cx.cache.primitive_locations = PrimitiveType::primitive_locations(tcx).clone();
+ for (prim, &def_id) in &cx.cache.primitive_locations {
+ let crate_name = tcx.crate_name(def_id.krate);
+ // Recall that we only allow primitive modules to be at the root-level of the crate.
+ // If that restriction is ever lifted, this will have to include the relative paths instead.
+ cx.cache
+ .external_paths
+ .insert(def_id, (vec![crate_name, prim.as_sym()], ItemType::Primitive));
+ }
+
+ let (krate, mut impl_ids) = {
+ let mut cache_builder =
+ CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: FxHashMap::default() };
+ krate = cache_builder.fold_crate(krate);
+ (krate, cache_builder.impl_ids)
+ };
+
+ for (trait_did, dids, impl_) in cx.cache.orphan_trait_impls.drain(..) {
+ if cx.cache.traits.contains_key(&trait_did) {
+ for did in dids {
+ if impl_ids.entry(did).or_default().insert(impl_.def_id()) {
+ cx.cache.impls.entry(did).or_default().push(impl_.clone());
+ }
+ }
+ }
+ }
+
+ krate
+ }
+}
+
+impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
+ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
+ if item.item_id.is_local() {
+ debug!("folding {} \"{:?}\", id {:?}", item.type_(), item.name, item.item_id);
+ }
+
+ // If this is a stripped module,
+ // we don't want it or its children in the search index.
+ let orig_stripped_mod = match *item.kind {
+ clean::StrippedItem(box clean::ModuleItem(..)) => {
+ mem::replace(&mut self.cache.stripped_mod, true)
+ }
+ _ => self.cache.stripped_mod,
+ };
+
+ // If the impl is from a masked crate or references something from a
+ // masked crate then remove it completely.
+ if let clean::ImplItem(ref i) = *item.kind {
+ if self.cache.masked_crates.contains(&item.item_id.krate())
+ || i.trait_
+ .as_ref()
+ .map_or(false, |t| self.cache.masked_crates.contains(&t.def_id().krate))
+ || i.for_
+ .def_id(self.cache)
+ .map_or(false, |d| self.cache.masked_crates.contains(&d.krate))
+ {
+ return None;
+ }
+ }
+
+ // Propagate a trait method's documentation to all implementors of the
+ // trait.
+ if let clean::TraitItem(ref t) = *item.kind {
+ self.cache.traits.entry(item.item_id.expect_def_id()).or_insert_with(|| {
+ clean::TraitWithExtraInfo {
+ trait_: t.clone(),
+ is_notable: item.attrs.has_doc_flag(sym::notable_trait),
+ }
+ });
+ }
+
+ // Collect all the implementors of traits.
+ if let clean::ImplItem(ref i) = *item.kind {
+ if let Some(trait_) = &i.trait_ {
+ if !i.kind.is_blanket() {
+ self.cache
+ .implementors
+ .entry(trait_.def_id())
+ .or_default()
+ .push(Impl { impl_item: item.clone() });
+ }
+ }
+ }
+
+ // Index this method for searching later on.
+ if let Some(ref s) = item.name.or_else(|| {
+ if item.is_stripped() {
+ None
+ } else if let clean::ImportItem(ref i) = *item.kind &&
+ let clean::ImportKind::Simple(s) = i.kind {
+ Some(s)
+ } else {
+ None
+ }
+ }) {
+ let (parent, is_inherent_impl_item) = match *item.kind {
+ clean::StrippedItem(..) => ((None, None), false),
+ clean::AssocConstItem(..) | clean::AssocTypeItem(..)
+ if self
+ .cache
+ .parent_stack
+ .last()
+ .map_or(false, |parent| parent.is_trait_impl()) =>
+ {
+ // skip associated items in trait impls
+ ((None, None), false)
+ }
+ clean::TyMethodItem(..)
+ | clean::TyAssocConstItem(..)
+ | clean::TyAssocTypeItem(..)
+ | clean::StructFieldItem(..)
+ | clean::VariantItem(..) => (
+ (
+ Some(
+ self.cache
+ .parent_stack
+ .last()
+ .expect("parent_stack is empty")
+ .item_id()
+ .expect_def_id(),
+ ),
+ Some(&self.cache.stack[..self.cache.stack.len() - 1]),
+ ),
+ false,
+ ),
+ clean::MethodItem(..) | clean::AssocConstItem(..) | clean::AssocTypeItem(..) => {
+ if self.cache.parent_stack.is_empty() {
+ ((None, None), false)
+ } else {
+ let last = self.cache.parent_stack.last().expect("parent_stack is empty 2");
+ let did = match &*last {
+ ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache),
+ ParentStackItem::Type(item_id) => item_id.as_def_id(),
+ };
+ let path = match did.and_then(|did| self.cache.paths.get(&did)) {
+ // The current stack not necessarily has correlation
+ // for where the type was defined. On the other
+ // hand, `paths` always has the right
+ // information if present.
+ Some(&(ref fqp, _)) => Some(&fqp[..fqp.len() - 1]),
+ None => None,
+ };
+ ((did, path), true)
+ }
+ }
+ _ => ((None, Some(&*self.cache.stack)), false),
+ };
+
+ match parent {
+ (parent, Some(path)) if is_inherent_impl_item || !self.cache.stripped_mod => {
+ debug_assert!(!item.is_stripped());
+
+ // A crate has a module at its root, containing all items,
+ // which should not be indexed. The crate-item itself is
+ // inserted later on when serializing the search-index.
+ if item.item_id.as_def_id().map_or(false, |idx| !idx.is_crate_root()) {
+ let desc = item.doc_value().map_or_else(String::new, |x| {
+ short_markdown_summary(x.as_str(), &item.link_names(self.cache))
+ });
+ self.cache.search_index.push(IndexItem {
+ ty: item.type_(),
+ name: s.to_string(),
+ path: join_with_double_colon(path),
+ desc,
+ parent,
+ parent_idx: None,
+ search_type: get_function_type_for_search(
+ &item,
+ self.tcx,
+ clean_impl_generics(self.cache.parent_stack.last()).as_ref(),
+ self.cache,
+ ),
+ aliases: item.attrs.get_doc_aliases(),
+ });
+ }
+ }
+ (Some(parent), None) if is_inherent_impl_item => {
+ // We have a parent, but we don't know where they're
+ // defined yet. Wait for later to index this item.
+ let impl_generics = clean_impl_generics(self.cache.parent_stack.last());
+ self.cache.orphan_impl_items.push(OrphanImplItem {
+ parent,
+ item: item.clone(),
+ impl_generics,
+ });
+ }
+ _ => {}
+ }
+ }
+
+ // Keep track of the fully qualified path for this item.
+ let pushed = match item.name {
+ Some(n) if !n.is_empty() => {
+ self.cache.stack.push(n);
+ true
+ }
+ _ => false,
+ };
+
+ match *item.kind {
+ clean::StructItem(..)
+ | clean::EnumItem(..)
+ | clean::TypedefItem(..)
+ | clean::TraitItem(..)
+ | clean::TraitAliasItem(..)
+ | clean::FunctionItem(..)
+ | clean::ModuleItem(..)
+ | clean::ForeignFunctionItem(..)
+ | clean::ForeignStaticItem(..)
+ | clean::ConstantItem(..)
+ | clean::StaticItem(..)
+ | clean::UnionItem(..)
+ | clean::ForeignTypeItem
+ | clean::MacroItem(..)
+ | clean::ProcMacroItem(..)
+ | clean::VariantItem(..) => {
+ if !self.cache.stripped_mod {
+ // Re-exported items mean that the same id can show up twice
+ // in the rustdoc ast that we're looking at. We know,
+ // however, that a re-exported item doesn't show up in the
+ // `public_items` map, so we can skip inserting into the
+ // paths map if there was already an entry present and we're
+ // not a public item.
+ if !self.cache.paths.contains_key(&item.item_id.expect_def_id())
+ || self.cache.access_levels.is_public(item.item_id.expect_def_id())
+ {
+ self.cache.paths.insert(
+ item.item_id.expect_def_id(),
+ (self.cache.stack.clone(), item.type_()),
+ );
+ }
+ }
+ }
+ clean::PrimitiveItem(..) => {
+ self.cache
+ .paths
+ .insert(item.item_id.expect_def_id(), (self.cache.stack.clone(), item.type_()));
+ }
+
+ clean::ExternCrateItem { .. }
+ | clean::ImportItem(..)
+ | clean::OpaqueTyItem(..)
+ | clean::ImplItem(..)
+ | clean::TyMethodItem(..)
+ | clean::MethodItem(..)
+ | clean::StructFieldItem(..)
+ | clean::TyAssocConstItem(..)
+ | clean::AssocConstItem(..)
+ | clean::TyAssocTypeItem(..)
+ | clean::AssocTypeItem(..)
+ | clean::StrippedItem(..)
+ | clean::KeywordItem => {
+ // FIXME: Do these need handling?
+ // The person writing this comment doesn't know.
+ // So would rather leave them to an expert,
+ // as at least the list is better than `_ => {}`.
+ }
+ }
+
+ // Maintain the parent stack.
+ let (item, parent_pushed) = match *item.kind {
+ clean::TraitItem(..)
+ | clean::EnumItem(..)
+ | clean::ForeignTypeItem
+ | clean::StructItem(..)
+ | clean::UnionItem(..)
+ | clean::VariantItem(..)
+ | clean::ImplItem(..) => {
+ self.cache.parent_stack.push(ParentStackItem::new(&item));
+ (self.fold_item_recur(item), true)
+ }
+ _ => (self.fold_item_recur(item), false),
+ };
+
+ // Once we've recursively found all the generics, hoard off all the
+ // implementations elsewhere.
+ let ret = if let clean::Item { kind: box clean::ImplItem(ref i), .. } = item {
+ // Figure out the id of this impl. This may map to a
+ // primitive rather than always to a struct/enum.
+ // Note: matching twice to restrict the lifetime of the `i` borrow.
+ let mut dids = FxHashSet::default();
+ match i.for_ {
+ clean::Type::Path { ref path }
+ | clean::BorrowedRef { type_: box clean::Type::Path { ref path }, .. } => {
+ dids.insert(path.def_id());
+ if let Some(generics) = path.generics() &&
+ let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() &&
+ adt.is_fundamental() {
+ for ty in generics {
+ if let Some(did) = ty.def_id(self.cache) {
+ dids.insert(did);
+ }
+ }
+ }
+ }
+ clean::DynTrait(ref bounds, _)
+ | clean::BorrowedRef { type_: box clean::DynTrait(ref bounds, _), .. } => {
+ dids.insert(bounds[0].trait_.def_id());
+ }
+ ref t => {
+ let did = t
+ .primitive_type()
+ .and_then(|t| self.cache.primitive_locations.get(&t).cloned());
+
+ if let Some(did) = did {
+ dids.insert(did);
+ }
+ }
+ }
+
+ if let Some(generics) = i.trait_.as_ref().and_then(|t| t.generics()) {
+ for bound in generics {
+ if let Some(did) = bound.def_id(self.cache) {
+ dids.insert(did);
+ }
+ }
+ }
+ let impl_item = Impl { impl_item: item };
+ if impl_item.trait_did().map_or(true, |d| self.cache.traits.contains_key(&d)) {
+ for did in dids {
+ if self.impl_ids.entry(did).or_default().insert(impl_item.def_id()) {
+ self.cache
+ .impls
+ .entry(did)
+ .or_insert_with(Vec::new)
+ .push(impl_item.clone());
+ }
+ }
+ } else {
+ let trait_did = impl_item.trait_did().expect("no trait did");
+ self.cache.orphan_trait_impls.push((trait_did, dids, impl_item));
+ }
+ None
+ } else {
+ Some(item)
+ };
+
+ if pushed {
+ self.cache.stack.pop().expect("stack already empty");
+ }
+ if parent_pushed {
+ self.cache.parent_stack.pop().expect("parent stack already empty");
+ }
+ self.cache.stripped_mod = orig_stripped_mod;
+ ret
+ }
+}
+
+pub(crate) struct OrphanImplItem {
+ pub(crate) parent: DefId,
+ pub(crate) item: clean::Item,
+ pub(crate) impl_generics: Option<(clean::Type, clean::Generics)>,
+}
+
+/// Information about trait and type parents is tracked while traversing the item tree to build
+/// the cache.
+///
+/// We don't just store `Item` in there, because `Item` contains the list of children being
+/// traversed and it would be wasteful to clone all that. We also need the item id, so just
+/// storing `ItemKind` won't work, either.
+enum ParentStackItem {
+ Impl {
+ for_: clean::Type,
+ trait_: Option<clean::Path>,
+ generics: clean::Generics,
+ kind: clean::ImplKind,
+ item_id: ItemId,
+ },
+ Type(ItemId),
+}
+
+impl ParentStackItem {
+ fn new(item: &clean::Item) -> Self {
+ match &*item.kind {
+ clean::ItemKind::ImplItem(box clean::Impl { for_, trait_, generics, kind, .. }) => {
+ ParentStackItem::Impl {
+ for_: for_.clone(),
+ trait_: trait_.clone(),
+ generics: generics.clone(),
+ kind: kind.clone(),
+ item_id: item.item_id,
+ }
+ }
+ _ => ParentStackItem::Type(item.item_id),
+ }
+ }
+ fn is_trait_impl(&self) -> bool {
+ matches!(self, ParentStackItem::Impl { trait_: Some(..), .. })
+ }
+ fn item_id(&self) -> ItemId {
+ match self {
+ ParentStackItem::Impl { item_id, .. } => *item_id,
+ ParentStackItem::Type(item_id) => *item_id,
+ }
+ }
+}
+
+fn clean_impl_generics(item: Option<&ParentStackItem>) -> Option<(clean::Type, clean::Generics)> {
+ if let Some(ParentStackItem::Impl { for_, generics, kind: clean::ImplKind::Normal, .. }) = item
+ {
+ Some((for_.clone(), generics.clone()))
+ } else {
+ None
+ }
+}
diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs
new file mode 100644
index 000000000..0a7ee2005
--- /dev/null
+++ b/src/librustdoc/formats/item_type.rs
@@ -0,0 +1,185 @@
+//! Item types.
+
+use std::fmt;
+
+use serde::{Serialize, Serializer};
+
+use rustc_hir::def::DefKind;
+use rustc_span::hygiene::MacroKind;
+
+use crate::clean;
+
+/// Item type. Corresponds to `clean::ItemEnum` variants.
+///
+/// The search index uses item types encoded as smaller numbers which equal to
+/// discriminants. JavaScript then is used to decode them into the original value.
+/// Consequently, every change to this type should be synchronized to
+/// the `itemTypes` mapping table in `html/static/js/search.js`.
+///
+/// In addition, code in `html::render` uses this enum to generate CSS classes, page prefixes, and
+/// module headings. If you are adding to this enum and want to ensure that the sidebar also prints
+/// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an
+/// ordering based on a helper function inside `item_module`, in the same file.
+#[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)]
+pub(crate) enum ItemType {
+ Module = 0,
+ ExternCrate = 1,
+ Import = 2,
+ Struct = 3,
+ Enum = 4,
+ Function = 5,
+ Typedef = 6,
+ Static = 7,
+ Trait = 8,
+ Impl = 9,
+ TyMethod = 10,
+ Method = 11,
+ StructField = 12,
+ Variant = 13,
+ Macro = 14,
+ Primitive = 15,
+ AssocType = 16,
+ Constant = 17,
+ AssocConst = 18,
+ Union = 19,
+ ForeignType = 20,
+ Keyword = 21,
+ OpaqueTy = 22,
+ ProcAttribute = 23,
+ ProcDerive = 24,
+ TraitAlias = 25,
+}
+
+impl Serialize for ItemType {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ (*self as u8).serialize(serializer)
+ }
+}
+
+impl<'a> From<&'a clean::Item> for ItemType {
+ fn from(item: &'a clean::Item) -> ItemType {
+ let kind = match *item.kind {
+ clean::StrippedItem(box ref item) => item,
+ ref kind => kind,
+ };
+
+ match *kind {
+ clean::ModuleItem(..) => ItemType::Module,
+ clean::ExternCrateItem { .. } => ItemType::ExternCrate,
+ clean::ImportItem(..) => ItemType::Import,
+ clean::StructItem(..) => ItemType::Struct,
+ clean::UnionItem(..) => ItemType::Union,
+ clean::EnumItem(..) => ItemType::Enum,
+ clean::FunctionItem(..) => ItemType::Function,
+ clean::TypedefItem(..) => ItemType::Typedef,
+ clean::OpaqueTyItem(..) => ItemType::OpaqueTy,
+ clean::StaticItem(..) => ItemType::Static,
+ clean::ConstantItem(..) => ItemType::Constant,
+ clean::TraitItem(..) => ItemType::Trait,
+ clean::ImplItem(..) => ItemType::Impl,
+ clean::TyMethodItem(..) => ItemType::TyMethod,
+ clean::MethodItem(..) => ItemType::Method,
+ clean::StructFieldItem(..) => ItemType::StructField,
+ clean::VariantItem(..) => ItemType::Variant,
+ clean::ForeignFunctionItem(..) => ItemType::Function, // no ForeignFunction
+ clean::ForeignStaticItem(..) => ItemType::Static, // no ForeignStatic
+ clean::MacroItem(..) => ItemType::Macro,
+ clean::PrimitiveItem(..) => ItemType::Primitive,
+ clean::TyAssocConstItem(..) | clean::AssocConstItem(..) => ItemType::AssocConst,
+ clean::TyAssocTypeItem(..) | clean::AssocTypeItem(..) => ItemType::AssocType,
+ clean::ForeignTypeItem => ItemType::ForeignType,
+ clean::KeywordItem => ItemType::Keyword,
+ clean::TraitAliasItem(..) => ItemType::TraitAlias,
+ clean::ProcMacroItem(ref mac) => match mac.kind {
+ MacroKind::Bang => ItemType::Macro,
+ MacroKind::Attr => ItemType::ProcAttribute,
+ MacroKind::Derive => ItemType::ProcDerive,
+ },
+ clean::StrippedItem(..) => unreachable!(),
+ }
+ }
+}
+
+impl From<DefKind> for ItemType {
+ fn from(other: DefKind) -> Self {
+ match other {
+ DefKind::Enum => Self::Enum,
+ DefKind::Fn => Self::Function,
+ DefKind::Mod => Self::Module,
+ DefKind::Const => Self::Constant,
+ DefKind::Static(_) => Self::Static,
+ DefKind::Struct => Self::Struct,
+ DefKind::Union => Self::Union,
+ DefKind::Trait => Self::Trait,
+ DefKind::TyAlias => Self::Typedef,
+ DefKind::TraitAlias => Self::TraitAlias,
+ DefKind::Macro(kind) => match kind {
+ MacroKind::Bang => ItemType::Macro,
+ MacroKind::Attr => ItemType::ProcAttribute,
+ MacroKind::Derive => ItemType::ProcDerive,
+ },
+ DefKind::ForeignTy
+ | DefKind::Variant
+ | DefKind::AssocTy
+ | DefKind::TyParam
+ | DefKind::ConstParam
+ | DefKind::Ctor(..)
+ | DefKind::AssocFn
+ | DefKind::AssocConst
+ | DefKind::ExternCrate
+ | DefKind::Use
+ | DefKind::ForeignMod
+ | DefKind::AnonConst
+ | DefKind::InlineConst
+ | DefKind::OpaqueTy
+ | DefKind::Field
+ | DefKind::LifetimeParam
+ | DefKind::GlobalAsm
+ | DefKind::Impl
+ | DefKind::Closure
+ | DefKind::Generator => Self::ForeignType,
+ }
+ }
+}
+
+impl ItemType {
+ pub(crate) fn as_str(&self) -> &'static str {
+ match *self {
+ ItemType::Module => "mod",
+ ItemType::ExternCrate => "externcrate",
+ ItemType::Import => "import",
+ ItemType::Struct => "struct",
+ ItemType::Union => "union",
+ ItemType::Enum => "enum",
+ ItemType::Function => "fn",
+ ItemType::Typedef => "type",
+ ItemType::Static => "static",
+ ItemType::Trait => "trait",
+ ItemType::Impl => "impl",
+ ItemType::TyMethod => "tymethod",
+ ItemType::Method => "method",
+ ItemType::StructField => "structfield",
+ ItemType::Variant => "variant",
+ ItemType::Macro => "macro",
+ ItemType::Primitive => "primitive",
+ ItemType::AssocType => "associatedtype",
+ ItemType::Constant => "constant",
+ ItemType::AssocConst => "associatedconstant",
+ ItemType::ForeignType => "foreigntype",
+ ItemType::Keyword => "keyword",
+ ItemType::OpaqueTy => "opaque",
+ ItemType::ProcAttribute => "attr",
+ ItemType::ProcDerive => "derive",
+ ItemType::TraitAlias => "traitalias",
+ }
+ }
+}
+
+impl fmt::Display for ItemType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
diff --git a/src/librustdoc/formats/mod.rs b/src/librustdoc/formats/mod.rs
new file mode 100644
index 000000000..b236bd7be
--- /dev/null
+++ b/src/librustdoc/formats/mod.rs
@@ -0,0 +1,101 @@
+pub(crate) mod cache;
+pub(crate) mod item_type;
+pub(crate) mod renderer;
+
+use rustc_hir::def_id::DefId;
+
+pub(crate) use renderer::{run_format, FormatRenderer};
+
+use crate::clean::{self, ItemId};
+use crate::html::render::Context;
+
+/// Specifies whether rendering directly implemented trait items or ones from a certain Deref
+/// impl.
+pub(crate) enum AssocItemRender<'a> {
+ All,
+ DerefFor { trait_: &'a clean::Path, type_: &'a clean::Type, deref_mut_: bool },
+}
+
+/// For different handling of associated items from the Deref target of a type rather than the type
+/// itself.
+#[derive(Copy, Clone, PartialEq)]
+pub(crate) enum RenderMode {
+ Normal,
+ ForDeref { mut_: bool },
+}
+
+/// Metadata about implementations for a type or trait.
+#[derive(Clone, Debug)]
+pub(crate) struct Impl {
+ pub(crate) impl_item: clean::Item,
+}
+
+impl Impl {
+ pub(crate) fn inner_impl(&self) -> &clean::Impl {
+ match *self.impl_item.kind {
+ clean::ImplItem(ref impl_) => impl_,
+ _ => panic!("non-impl item found in impl"),
+ }
+ }
+
+ pub(crate) fn trait_did(&self) -> Option<DefId> {
+ self.inner_impl().trait_.as_ref().map(|t| t.def_id())
+ }
+
+ /// This function is used to extract a `DefId` to be used as a key for the `Cache::impls` field.
+ ///
+ /// It allows to prevent having duplicated implementations showing up (the biggest issue was
+ /// with blanket impls).
+ ///
+ /// It panics if `self` is a `ItemId::Primitive`.
+ pub(crate) fn def_id(&self) -> DefId {
+ match self.impl_item.item_id {
+ ItemId::Blanket { impl_id, .. } => impl_id,
+ ItemId::Auto { trait_, .. } => trait_,
+ ItemId::DefId(def_id) => def_id,
+ ItemId::Primitive(_, _) => {
+ panic!(
+ "Unexpected ItemId::Primitive in expect_def_id: {:?}",
+ self.impl_item.item_id
+ )
+ }
+ }
+ }
+
+ // Returns true if this is an implementation on a "local" type, meaning:
+ // the type is in the current crate, or the type and the trait are both
+ // re-exported by the current crate.
+ pub(crate) fn is_on_local_type(&self, cx: &Context<'_>) -> bool {
+ let cache = cx.cache();
+ let for_type = &self.inner_impl().for_;
+ if let Some(for_type_did) = for_type.def_id(cache) {
+ // The "for" type is local if it's in the paths for the current crate.
+ if cache.paths.contains_key(&for_type_did) {
+ return true;
+ }
+ if let Some(trait_did) = self.trait_did() {
+ // The "for" type and the trait are from the same crate. That could
+ // be different from the current crate, for instance when both were
+ // re-exported from some other crate. But they are local with respect to
+ // each other.
+ if for_type_did.krate == trait_did.krate {
+ return true;
+ }
+ // Hack: many traits and types in std are re-exported from
+ // core or alloc. In general, rustdoc is capable of recognizing
+ // these implementations as being on local types. However, in at
+ // least one case (https://github.com/rust-lang/rust/issues/97610),
+ // rustdoc gets confused and labels an implementation as being on
+ // a foreign type. To make sure that confusion doesn't pass on to
+ // the reader, consider all implementations in std, core, and alloc
+ // to be on local types.
+ let crate_name = cx.tcx().crate_name(trait_did.krate);
+ if matches!(crate_name.as_str(), "std" | "core" | "alloc") {
+ return true;
+ }
+ }
+ return false;
+ };
+ true
+ }
+}
diff --git a/src/librustdoc/formats/renderer.rs b/src/librustdoc/formats/renderer.rs
new file mode 100644
index 000000000..62ba984ac
--- /dev/null
+++ b/src/librustdoc/formats/renderer.rs
@@ -0,0 +1,97 @@
+use rustc_middle::ty::TyCtxt;
+use rustc_span::Symbol;
+
+use crate::clean;
+use crate::config::RenderOptions;
+use crate::error::Error;
+use crate::formats::cache::Cache;
+
+/// Allows for different backends to rustdoc to be used with the `run_format()` function. Each
+/// backend renderer has hooks for initialization, documenting an item, entering and exiting a
+/// module, and cleanup/finalizing output.
+pub(crate) trait FormatRenderer<'tcx>: Sized {
+ /// Gives a description of the renderer. Used for performance profiling.
+ fn descr() -> &'static str;
+
+ /// Whether to call `item` recursively for modules
+ ///
+ /// This is true for html, and false for json. See #80664
+ const RUN_ON_MODULE: bool;
+
+ /// Sets up any state required for the renderer. When this is called the cache has already been
+ /// populated.
+ fn init(
+ krate: clean::Crate,
+ options: RenderOptions,
+ cache: Cache,
+ tcx: TyCtxt<'tcx>,
+ ) -> Result<(Self, clean::Crate), Error>;
+
+ /// Make a new renderer to render a child of the item currently being rendered.
+ fn make_child_renderer(&self) -> Self;
+
+ /// Renders a single non-module item. This means no recursive sub-item rendering is required.
+ fn item(&mut self, item: clean::Item) -> Result<(), Error>;
+
+ /// Renders a module (should not handle recursing into children).
+ fn mod_item_in(&mut self, item: &clean::Item) -> Result<(), Error>;
+
+ /// Runs after recursively rendering all sub-items of a module.
+ fn mod_item_out(&mut self) -> Result<(), Error> {
+ Ok(())
+ }
+
+ /// Post processing hook for cleanup and dumping output to files.
+ fn after_krate(&mut self) -> Result<(), Error>;
+
+ fn cache(&self) -> &Cache;
+}
+
+/// Main method for rendering a crate.
+pub(crate) fn run_format<'tcx, T: FormatRenderer<'tcx>>(
+ krate: clean::Crate,
+ options: RenderOptions,
+ cache: Cache,
+ tcx: TyCtxt<'tcx>,
+) -> Result<(), Error> {
+ let prof = &tcx.sess.prof;
+
+ let emit_crate = options.should_emit_crate();
+ let (mut format_renderer, krate) = prof
+ .extra_verbose_generic_activity("create_renderer", T::descr())
+ .run(|| T::init(krate, options, cache, tcx))?;
+
+ if !emit_crate {
+ return Ok(());
+ }
+
+ // Render the crate documentation
+ let mut work = vec![(format_renderer.make_child_renderer(), krate.module)];
+
+ let unknown = Symbol::intern("<unknown item>");
+ while let Some((mut cx, item)) = work.pop() {
+ if item.is_mod() && T::RUN_ON_MODULE {
+ // modules are special because they add a namespace. We also need to
+ // recurse into the items of the module as well.
+ let _timer =
+ prof.generic_activity_with_arg("render_mod_item", item.name.unwrap().to_string());
+
+ cx.mod_item_in(&item)?;
+ let (clean::StrippedItem(box clean::ModuleItem(module)) | clean::ModuleItem(module)) = *item.kind
+ else { unreachable!() };
+ for it in module.items {
+ debug!("Adding {:?} to worklist", it.name);
+ work.push((cx.make_child_renderer(), it));
+ }
+
+ cx.mod_item_out()?;
+ // FIXME: checking `item.name.is_some()` is very implicit and leads to lots of special
+ // cases. Use an explicit match instead.
+ } else if item.name.is_some() && !item.is_extern_crate() {
+ prof.generic_activity_with_arg("render_item", item.name.unwrap_or(unknown).as_str())
+ .run(|| cx.item(item))?;
+ }
+ }
+ prof.extra_verbose_generic_activity("renderer_after_krate", T::descr())
+ .run(|| format_renderer.after_krate())
+}
diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs
new file mode 100644
index 000000000..4a19d0a44
--- /dev/null
+++ b/src/librustdoc/html/escape.rs
@@ -0,0 +1,40 @@
+//! HTML escaping.
+//!
+//! This module contains one unit struct, which can be used to HTML-escape a
+//! string of text (for use in a format string).
+
+use std::fmt;
+
+/// Wrapper struct which will emit the HTML-escaped version of the contained
+/// string when passed to a format string.
+pub(crate) struct Escape<'a>(pub &'a str);
+
+impl<'a> fmt::Display for Escape<'a> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Because the internet is always right, turns out there's not that many
+ // characters to escape: http://stackoverflow.com/questions/7381974
+ let Escape(s) = *self;
+ let pile_o_bits = s;
+ let mut last = 0;
+ for (i, ch) in s.char_indices() {
+ let s = match ch {
+ '>' => "&gt;",
+ '<' => "&lt;",
+ '&' => "&amp;",
+ '\'' => "&#39;",
+ '"' => "&quot;",
+ _ => continue,
+ };
+ fmt.write_str(&pile_o_bits[last..i])?;
+ fmt.write_str(s)?;
+ // NOTE: we only expect single byte characters here - which is fine as long as we
+ // only match single byte characters
+ last = i + 1;
+ }
+
+ if last < s.len() {
+ fmt.write_str(&pile_o_bits[last..])?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
new file mode 100644
index 000000000..36a47b05c
--- /dev/null
+++ b/src/librustdoc/html/format.rs
@@ -0,0 +1,1688 @@
+//! HTML formatting module
+//!
+//! This module contains a large number of `fmt::Display` implementations for
+//! various types in `rustdoc::clean`. These implementations all currently
+//! assume that HTML output is desired, although it may be possible to redesign
+//! them in the future to instead emit any format desired.
+
+use std::borrow::Cow;
+use std::cell::Cell;
+use std::fmt;
+use std::iter::{self, once};
+
+use rustc_ast as ast;
+use rustc_attr::{ConstStability, StabilityLevel};
+use rustc_data_structures::captures::Captures;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir as hir;
+use rustc_hir::def::DefKind;
+use rustc_hir::def_id::DefId;
+use rustc_metadata::creader::{CStore, LoadedMacro};
+use rustc_middle::ty;
+use rustc_middle::ty::DefIdTree;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::symbol::kw;
+use rustc_span::{sym, Symbol};
+use rustc_target::spec::abi::Abi;
+
+use itertools::Itertools;
+
+use crate::clean::{
+ self, types::ExternalLocation, utils::find_nearest_parent_module, ExternalCrate, ItemId,
+ PrimitiveType,
+};
+use crate::formats::item_type::ItemType;
+use crate::html::escape::Escape;
+use crate::html::render::Context;
+
+use super::url_parts_builder::estimate_item_path_byte_length;
+use super::url_parts_builder::UrlPartsBuilder;
+
+pub(crate) trait Print {
+ fn print(self, buffer: &mut Buffer);
+}
+
+impl<F> Print for F
+where
+ F: FnOnce(&mut Buffer),
+{
+ fn print(self, buffer: &mut Buffer) {
+ (self)(buffer)
+ }
+}
+
+impl Print for String {
+ fn print(self, buffer: &mut Buffer) {
+ buffer.write_str(&self);
+ }
+}
+
+impl Print for &'_ str {
+ fn print(self, buffer: &mut Buffer) {
+ buffer.write_str(self);
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Buffer {
+ for_html: bool,
+ buffer: String,
+}
+
+impl core::fmt::Write for Buffer {
+ #[inline]
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ self.buffer.write_str(s)
+ }
+
+ #[inline]
+ fn write_char(&mut self, c: char) -> fmt::Result {
+ self.buffer.write_char(c)
+ }
+
+ #[inline]
+ fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
+ self.buffer.write_fmt(args)
+ }
+}
+
+impl Buffer {
+ pub(crate) fn empty_from(v: &Buffer) -> Buffer {
+ Buffer { for_html: v.for_html, buffer: String::new() }
+ }
+
+ pub(crate) fn html() -> Buffer {
+ Buffer { for_html: true, buffer: String::new() }
+ }
+
+ pub(crate) fn new() -> Buffer {
+ Buffer { for_html: false, buffer: String::new() }
+ }
+
+ pub(crate) fn is_empty(&self) -> bool {
+ self.buffer.is_empty()
+ }
+
+ pub(crate) fn into_inner(self) -> String {
+ self.buffer
+ }
+
+ pub(crate) fn insert_str(&mut self, idx: usize, s: &str) {
+ self.buffer.insert_str(idx, s);
+ }
+
+ pub(crate) fn push_str(&mut self, s: &str) {
+ self.buffer.push_str(s);
+ }
+
+ pub(crate) fn push_buffer(&mut self, other: Buffer) {
+ self.buffer.push_str(&other.buffer);
+ }
+
+ // Intended for consumption by write! and writeln! (std::fmt) but without
+ // the fmt::Result return type imposed by fmt::Write (and avoiding the trait
+ // import).
+ pub(crate) fn write_str(&mut self, s: &str) {
+ self.buffer.push_str(s);
+ }
+
+ // Intended for consumption by write! and writeln! (std::fmt) but without
+ // the fmt::Result return type imposed by fmt::Write (and avoiding the trait
+ // import).
+ pub(crate) fn write_fmt(&mut self, v: fmt::Arguments<'_>) {
+ use fmt::Write;
+ self.buffer.write_fmt(v).unwrap();
+ }
+
+ pub(crate) fn to_display<T: Print>(mut self, t: T) -> String {
+ t.print(&mut self);
+ self.into_inner()
+ }
+
+ pub(crate) fn is_for_html(&self) -> bool {
+ self.for_html
+ }
+
+ pub(crate) fn reserve(&mut self, additional: usize) {
+ self.buffer.reserve(additional)
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ self.buffer.len()
+ }
+}
+
+fn comma_sep<T: fmt::Display>(
+ items: impl Iterator<Item = T>,
+ space_after_comma: bool,
+) -> impl fmt::Display {
+ display_fn(move |f| {
+ for (i, item) in items.enumerate() {
+ if i != 0 {
+ write!(f, ",{}", if space_after_comma { " " } else { "" })?;
+ }
+ fmt::Display::fmt(&item, f)?;
+ }
+ Ok(())
+ })
+}
+
+pub(crate) fn print_generic_bounds<'a, 'tcx: 'a>(
+ bounds: &'a [clean::GenericBound],
+ cx: &'a Context<'tcx>,
+) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ let mut bounds_dup = FxHashSet::default();
+
+ for (i, bound) in bounds.iter().filter(|b| bounds_dup.insert(b.clone())).enumerate() {
+ if i > 0 {
+ f.write_str(" + ")?;
+ }
+ fmt::Display::fmt(&bound.print(cx), f)?;
+ }
+ Ok(())
+ })
+}
+
+impl clean::GenericParamDef {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match &self.kind {
+ clean::GenericParamDefKind::Lifetime { outlives } => {
+ write!(f, "{}", self.name)?;
+
+ if !outlives.is_empty() {
+ f.write_str(": ")?;
+ for (i, lt) in outlives.iter().enumerate() {
+ if i != 0 {
+ f.write_str(" + ")?;
+ }
+ write!(f, "{}", lt.print())?;
+ }
+ }
+
+ Ok(())
+ }
+ clean::GenericParamDefKind::Type { bounds, default, .. } => {
+ f.write_str(self.name.as_str())?;
+
+ if !bounds.is_empty() {
+ if f.alternate() {
+ write!(f, ": {:#}", print_generic_bounds(bounds, cx))?;
+ } else {
+ write!(f, ":&nbsp;{}", print_generic_bounds(bounds, cx))?;
+ }
+ }
+
+ if let Some(ref ty) = default {
+ if f.alternate() {
+ write!(f, " = {:#}", ty.print(cx))?;
+ } else {
+ write!(f, "&nbsp;=&nbsp;{}", ty.print(cx))?;
+ }
+ }
+
+ Ok(())
+ }
+ clean::GenericParamDefKind::Const { ty, default, .. } => {
+ if f.alternate() {
+ write!(f, "const {}: {:#}", self.name, ty.print(cx))?;
+ } else {
+ write!(f, "const {}:&nbsp;{}", self.name, ty.print(cx))?;
+ }
+
+ if let Some(default) = default {
+ if f.alternate() {
+ write!(f, " = {:#}", default)?;
+ } else {
+ write!(f, "&nbsp;=&nbsp;{}", default)?;
+ }
+ }
+
+ Ok(())
+ }
+ })
+ }
+}
+
+impl clean::Generics {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ let mut real_params =
+ self.params.iter().filter(|p| !p.is_synthetic_type_param()).peekable();
+ if real_params.peek().is_none() {
+ return Ok(());
+ }
+
+ if f.alternate() {
+ write!(f, "<{:#}>", comma_sep(real_params.map(|g| g.print(cx)), true))
+ } else {
+ write!(f, "&lt;{}&gt;", comma_sep(real_params.map(|g| g.print(cx)), true))
+ }
+ })
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub(crate) enum Ending {
+ Newline,
+ NoNewline,
+}
+
+/// * The Generics from which to emit a where-clause.
+/// * The number of spaces to indent each line with.
+/// * Whether the where-clause needs to add a comma and newline after the last bound.
+pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
+ gens: &'a clean::Generics,
+ cx: &'a Context<'tcx>,
+ indent: usize,
+ ending: Ending,
+) -> impl fmt::Display + 'a + Captures<'tcx> {
+ use fmt::Write;
+
+ display_fn(move |f| {
+ let mut where_predicates = gens.where_predicates.iter().filter(|pred| {
+ !matches!(pred, clean::WherePredicate::BoundPredicate { bounds, .. } if bounds.is_empty())
+ }).map(|pred| {
+ display_fn(move |f| {
+ if f.alternate() {
+ f.write_str(" ")?;
+ } else {
+ f.write_str("<br>")?;
+ }
+
+ match pred {
+ clean::WherePredicate::BoundPredicate { ty, bounds, bound_params } => {
+ let ty_cx = ty.print(cx);
+ let generic_bounds = print_generic_bounds(bounds, cx);
+
+ if bound_params.is_empty() {
+ if f.alternate() {
+ write!(f, "{ty_cx:#}: {generic_bounds:#}")
+ } else {
+ write!(f, "{ty_cx}: {generic_bounds}")
+ }
+ } else {
+ if f.alternate() {
+ write!(
+ f,
+ "for<{:#}> {ty_cx:#}: {generic_bounds:#}",
+ comma_sep(bound_params.iter().map(|lt| lt.print()), true)
+ )
+ } else {
+ write!(
+ f,
+ "for&lt;{}&gt; {ty_cx}: {generic_bounds}",
+ comma_sep(bound_params.iter().map(|lt| lt.print()), true)
+ )
+ }
+ }
+ }
+ clean::WherePredicate::RegionPredicate { lifetime, bounds } => {
+ let mut bounds_display = String::new();
+ for bound in bounds.iter().map(|b| b.print(cx)) {
+ write!(bounds_display, "{bound} + ")?;
+ }
+ bounds_display.truncate(bounds_display.len() - " + ".len());
+ write!(f, "{}: {bounds_display}", lifetime.print())
+ }
+ clean::WherePredicate::EqPredicate { lhs, rhs } => {
+ if f.alternate() {
+ write!(f, "{:#} == {:#}", lhs.print(cx), rhs.print(cx))
+ } else {
+ write!(f, "{} == {}", lhs.print(cx), rhs.print(cx))
+ }
+ }
+ }
+ })
+ }).peekable();
+
+ if where_predicates.peek().is_none() {
+ return Ok(());
+ }
+
+ let where_preds = comma_sep(where_predicates, false);
+ let clause = if f.alternate() {
+ if ending == Ending::Newline {
+ // add a space so stripping <br> tags and breaking spaces still renders properly
+ format!(" where{where_preds}, ")
+ } else {
+ format!(" where{where_preds}")
+ }
+ } else {
+ let mut br_with_padding = String::with_capacity(6 * indent + 28);
+ br_with_padding.push_str("<br>");
+ for _ in 0..indent + 4 {
+ br_with_padding.push_str("&nbsp;");
+ }
+ let where_preds = where_preds.to_string().replace("<br>", &br_with_padding);
+
+ if ending == Ending::Newline {
+ let mut clause = "&nbsp;".repeat(indent.saturating_sub(1));
+ // add a space so stripping <br> tags and breaking spaces still renders properly
+ write!(
+ clause,
+ " <span class=\"where fmt-newline\">where{where_preds},&nbsp;</span>"
+ )?;
+ clause
+ } else {
+ // insert a <br> tag after a single space but before multiple spaces at the start
+ if indent == 0 {
+ format!(" <br><span class=\"where\">where{where_preds}</span>")
+ } else {
+ let mut clause = br_with_padding;
+ clause.truncate(clause.len() - 5 * "&nbsp;".len());
+ write!(clause, " <span class=\"where\">where{where_preds}</span>")?;
+ clause
+ }
+ }
+ };
+ write!(f, "{clause}")
+ })
+}
+
+impl clean::Lifetime {
+ pub(crate) fn print(&self) -> impl fmt::Display + '_ {
+ self.0.as_str()
+ }
+}
+
+impl clean::Constant {
+ pub(crate) fn print(&self, tcx: TyCtxt<'_>) -> impl fmt::Display + '_ {
+ let expr = self.expr(tcx);
+ display_fn(
+ move |f| {
+ if f.alternate() { f.write_str(&expr) } else { write!(f, "{}", Escape(&expr)) }
+ },
+ )
+ }
+}
+
+impl clean::PolyTrait {
+ fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ if !self.generic_params.is_empty() {
+ if f.alternate() {
+ write!(
+ f,
+ "for<{:#}> ",
+ comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true)
+ )?;
+ } else {
+ write!(
+ f,
+ "for&lt;{}&gt; ",
+ comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true)
+ )?;
+ }
+ }
+ if f.alternate() {
+ write!(f, "{:#}", self.trait_.print(cx))
+ } else {
+ write!(f, "{}", self.trait_.print(cx))
+ }
+ })
+ }
+}
+
+impl clean::GenericBound {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match self {
+ clean::GenericBound::Outlives(lt) => write!(f, "{}", lt.print()),
+ clean::GenericBound::TraitBound(ty, modifier) => {
+ let modifier_str = match modifier {
+ hir::TraitBoundModifier::None => "",
+ hir::TraitBoundModifier::Maybe => "?",
+ // ~const is experimental; do not display those bounds in rustdoc
+ hir::TraitBoundModifier::MaybeConst => "",
+ };
+ if f.alternate() {
+ write!(f, "{}{:#}", modifier_str, ty.print(cx))
+ } else {
+ write!(f, "{}{}", modifier_str, ty.print(cx))
+ }
+ }
+ })
+ }
+}
+
+impl clean::GenericArgs {
+ fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ match self {
+ clean::GenericArgs::AngleBracketed { args, bindings } => {
+ if !args.is_empty() || !bindings.is_empty() {
+ if f.alternate() {
+ f.write_str("<")?;
+ } else {
+ f.write_str("&lt;")?;
+ }
+ let mut comma = false;
+ for arg in args.iter() {
+ if comma {
+ f.write_str(", ")?;
+ }
+ comma = true;
+ if f.alternate() {
+ write!(f, "{:#}", arg.print(cx))?;
+ } else {
+ write!(f, "{}", arg.print(cx))?;
+ }
+ }
+ for binding in bindings.iter() {
+ if comma {
+ f.write_str(", ")?;
+ }
+ comma = true;
+ if f.alternate() {
+ write!(f, "{:#}", binding.print(cx))?;
+ } else {
+ write!(f, "{}", binding.print(cx))?;
+ }
+ }
+ if f.alternate() {
+ f.write_str(">")?;
+ } else {
+ f.write_str("&gt;")?;
+ }
+ }
+ }
+ clean::GenericArgs::Parenthesized { inputs, output } => {
+ f.write_str("(")?;
+ let mut comma = false;
+ for ty in inputs.iter() {
+ if comma {
+ f.write_str(", ")?;
+ }
+ comma = true;
+ if f.alternate() {
+ write!(f, "{:#}", ty.print(cx))?;
+ } else {
+ write!(f, "{}", ty.print(cx))?;
+ }
+ }
+ f.write_str(")")?;
+ if let Some(ref ty) = *output {
+ if f.alternate() {
+ write!(f, " -> {:#}", ty.print(cx))?;
+ } else {
+ write!(f, " -&gt; {}", ty.print(cx))?;
+ }
+ }
+ }
+ }
+ Ok(())
+ })
+ }
+}
+
+// Possible errors when computing href link source for a `DefId`
+#[derive(PartialEq, Eq)]
+pub(crate) enum HrefError {
+ /// This item is known to rustdoc, but from a crate that does not have documentation generated.
+ ///
+ /// This can only happen for non-local items.
+ ///
+ /// # Example
+ ///
+ /// Crate `a` defines a public trait and crate `b` – the target crate that depends on `a` –
+ /// implements it for a local type.
+ /// We document `b` but **not** `a` (we only _build_ the latter – with `rustc`):
+ ///
+ /// ```sh
+ /// rustc a.rs --crate-type=lib
+ /// rustdoc b.rs --crate-type=lib --extern=a=liba.rlib
+ /// ```
+ ///
+ /// Now, the associated items in the trait impl want to link to the corresponding item in the
+ /// trait declaration (see `html::render::assoc_href_attr`) but it's not available since their
+ /// *documentation (was) not built*.
+ DocumentationNotBuilt,
+ /// This can only happen for non-local items when `--document-private-items` is not passed.
+ Private,
+ // Not in external cache, href link should be in same page
+ NotInExternalCache,
+}
+
+// Panics if `syms` is empty.
+pub(crate) fn join_with_double_colon(syms: &[Symbol]) -> String {
+ let mut s = String::with_capacity(estimate_item_path_byte_length(syms.len()));
+ s.push_str(syms[0].as_str());
+ for sym in &syms[1..] {
+ s.push_str("::");
+ s.push_str(sym.as_str());
+ }
+ s
+}
+
+/// This function is to get the external macro path because they are not in the cache used in
+/// `href_with_root_path`.
+fn generate_macro_def_id_path(
+ def_id: DefId,
+ cx: &Context<'_>,
+ root_path: Option<&str>,
+) -> Result<(String, ItemType, Vec<Symbol>), HrefError> {
+ let tcx = cx.shared.tcx;
+ let crate_name = tcx.crate_name(def_id.krate).to_string();
+ let cache = cx.cache();
+
+ let fqp: Vec<Symbol> = tcx
+ .def_path(def_id)
+ .data
+ .into_iter()
+ .filter_map(|elem| {
+ // extern blocks (and a few others things) have an empty name.
+ match elem.data.get_opt_name() {
+ Some(s) if !s.is_empty() => Some(s),
+ _ => None,
+ }
+ })
+ .collect();
+ let relative = fqp.iter().map(|elem| elem.to_string());
+ let cstore = CStore::from_tcx(tcx);
+ // We need this to prevent a `panic` when this function is used from intra doc links...
+ if !cstore.has_crate_data(def_id.krate) {
+ debug!("No data for crate {}", crate_name);
+ return Err(HrefError::NotInExternalCache);
+ }
+ // Check to see if it is a macro 2.0 or built-in macro.
+ // More information in <https://rust-lang.github.io/rfcs/1584-macros.html>.
+ let is_macro_2 = match cstore.load_macro_untracked(def_id, tcx.sess) {
+ LoadedMacro::MacroDef(def, _) => {
+ // If `ast_def.macro_rules` is `true`, then it's not a macro 2.0.
+ matches!(&def.kind, ast::ItemKind::MacroDef(ast_def) if !ast_def.macro_rules)
+ }
+ _ => false,
+ };
+
+ let mut path = if is_macro_2 {
+ once(crate_name.clone()).chain(relative).collect()
+ } else {
+ vec![crate_name.clone(), relative.last().unwrap()]
+ };
+ if path.len() < 2 {
+ // The minimum we can have is the crate name followed by the macro name. If shorter, then
+ // it means that that `relative` was empty, which is an error.
+ debug!("macro path cannot be empty!");
+ return Err(HrefError::NotInExternalCache);
+ }
+
+ if let Some(last) = path.last_mut() {
+ *last = format!("macro.{}.html", last);
+ }
+
+ let url = match cache.extern_locations[&def_id.krate] {
+ ExternalLocation::Remote(ref s) => {
+ // `ExternalLocation::Remote` always end with a `/`.
+ format!("{}{}", s, path.join("/"))
+ }
+ ExternalLocation::Local => {
+ // `root_path` always end with a `/`.
+ format!("{}{}/{}", root_path.unwrap_or(""), crate_name, path.join("/"))
+ }
+ ExternalLocation::Unknown => {
+ debug!("crate {} not in cache when linkifying macros", crate_name);
+ return Err(HrefError::NotInExternalCache);
+ }
+ };
+ Ok((url, ItemType::Macro, fqp))
+}
+
+pub(crate) fn href_with_root_path(
+ did: DefId,
+ cx: &Context<'_>,
+ root_path: Option<&str>,
+) -> Result<(String, ItemType, Vec<Symbol>), HrefError> {
+ let tcx = cx.tcx();
+ let def_kind = tcx.def_kind(did);
+ let did = match def_kind {
+ DefKind::AssocTy | DefKind::AssocFn | DefKind::AssocConst | DefKind::Variant => {
+ // documented on their parent's page
+ tcx.parent(did)
+ }
+ _ => did,
+ };
+ let cache = cx.cache();
+ let relative_to = &cx.current;
+ fn to_module_fqp(shortty: ItemType, fqp: &[Symbol]) -> &[Symbol] {
+ if shortty == ItemType::Module { fqp } else { &fqp[..fqp.len() - 1] }
+ }
+
+ if !did.is_local()
+ && !cache.access_levels.is_public(did)
+ && !cache.document_private
+ && !cache.primitive_locations.values().any(|&id| id == did)
+ {
+ return Err(HrefError::Private);
+ }
+
+ let mut is_remote = false;
+ let (fqp, shortty, mut url_parts) = match cache.paths.get(&did) {
+ Some(&(ref fqp, shortty)) => (fqp, shortty, {
+ let module_fqp = to_module_fqp(shortty, fqp.as_slice());
+ debug!(?fqp, ?shortty, ?module_fqp);
+ href_relative_parts(module_fqp, relative_to).collect()
+ }),
+ None => {
+ if let Some(&(ref fqp, shortty)) = cache.external_paths.get(&did) {
+ let module_fqp = to_module_fqp(shortty, fqp);
+ (
+ fqp,
+ shortty,
+ match cache.extern_locations[&did.krate] {
+ ExternalLocation::Remote(ref s) => {
+ is_remote = true;
+ let s = s.trim_end_matches('/');
+ let mut builder = UrlPartsBuilder::singleton(s);
+ builder.extend(module_fqp.iter().copied());
+ builder
+ }
+ ExternalLocation::Local => {
+ href_relative_parts(module_fqp, relative_to).collect()
+ }
+ ExternalLocation::Unknown => return Err(HrefError::DocumentationNotBuilt),
+ },
+ )
+ } else if matches!(def_kind, DefKind::Macro(_)) {
+ return generate_macro_def_id_path(did, cx, root_path);
+ } else {
+ return Err(HrefError::NotInExternalCache);
+ }
+ }
+ };
+ if !is_remote {
+ if let Some(root_path) = root_path {
+ let root = root_path.trim_end_matches('/');
+ url_parts.push_front(root);
+ }
+ }
+ debug!(?url_parts);
+ match shortty {
+ ItemType::Module => {
+ url_parts.push("index.html");
+ }
+ _ => {
+ let prefix = shortty.as_str();
+ let last = fqp.last().unwrap();
+ url_parts.push_fmt(format_args!("{}.{}.html", prefix, last));
+ }
+ }
+ Ok((url_parts.finish(), shortty, fqp.to_vec()))
+}
+
+pub(crate) fn href(
+ did: DefId,
+ cx: &Context<'_>,
+) -> Result<(String, ItemType, Vec<Symbol>), HrefError> {
+ href_with_root_path(did, cx, None)
+}
+
+/// Both paths should only be modules.
+/// This is because modules get their own directories; that is, `std::vec` and `std::vec::Vec` will
+/// both need `../iter/trait.Iterator.html` to get at the iterator trait.
+pub(crate) fn href_relative_parts<'fqp>(
+ fqp: &'fqp [Symbol],
+ relative_to_fqp: &[Symbol],
+) -> Box<dyn Iterator<Item = Symbol> + 'fqp> {
+ for (i, (f, r)) in fqp.iter().zip(relative_to_fqp.iter()).enumerate() {
+ // e.g. linking to std::iter from std::vec (`dissimilar_part_count` will be 1)
+ if f != r {
+ let dissimilar_part_count = relative_to_fqp.len() - i;
+ let fqp_module = &fqp[i..fqp.len()];
+ return Box::new(
+ iter::repeat(sym::dotdot)
+ .take(dissimilar_part_count)
+ .chain(fqp_module.iter().copied()),
+ );
+ }
+ }
+ // e.g. linking to std::sync::atomic from std::sync
+ if relative_to_fqp.len() < fqp.len() {
+ Box::new(fqp[relative_to_fqp.len()..fqp.len()].iter().copied())
+ // e.g. linking to std::sync from std::sync::atomic
+ } else if fqp.len() < relative_to_fqp.len() {
+ let dissimilar_part_count = relative_to_fqp.len() - fqp.len();
+ Box::new(iter::repeat(sym::dotdot).take(dissimilar_part_count))
+ // linking to the same module
+ } else {
+ Box::new(iter::empty())
+ }
+}
+
+/// Used to render a [`clean::Path`].
+fn resolved_path<'cx>(
+ w: &mut fmt::Formatter<'_>,
+ did: DefId,
+ path: &clean::Path,
+ print_all: bool,
+ use_absolute: bool,
+ cx: &'cx Context<'_>,
+) -> fmt::Result {
+ let last = path.segments.last().unwrap();
+
+ if print_all {
+ for seg in &path.segments[..path.segments.len() - 1] {
+ write!(w, "{}::", if seg.name == kw::PathRoot { "" } else { seg.name.as_str() })?;
+ }
+ }
+ if w.alternate() {
+ write!(w, "{}{:#}", &last.name, last.args.print(cx))?;
+ } else {
+ let path = if use_absolute {
+ if let Ok((_, _, fqp)) = href(did, cx) {
+ format!(
+ "{}::{}",
+ join_with_double_colon(&fqp[..fqp.len() - 1]),
+ anchor(did, *fqp.last().unwrap(), cx)
+ )
+ } else {
+ last.name.to_string()
+ }
+ } else {
+ anchor(did, last.name, cx).to_string()
+ };
+ write!(w, "{}{}", path, last.args.print(cx))?;
+ }
+ Ok(())
+}
+
+fn primitive_link(
+ f: &mut fmt::Formatter<'_>,
+ prim: clean::PrimitiveType,
+ name: &str,
+ cx: &Context<'_>,
+) -> fmt::Result {
+ primitive_link_fragment(f, prim, name, "", cx)
+}
+
+fn primitive_link_fragment(
+ f: &mut fmt::Formatter<'_>,
+ prim: clean::PrimitiveType,
+ name: &str,
+ fragment: &str,
+ cx: &Context<'_>,
+) -> fmt::Result {
+ let m = &cx.cache();
+ let mut needs_termination = false;
+ if !f.alternate() {
+ match m.primitive_locations.get(&prim) {
+ Some(&def_id) if def_id.is_local() => {
+ let len = cx.current.len();
+ let len = if len == 0 { 0 } else { len - 1 };
+ write!(
+ f,
+ "<a class=\"primitive\" href=\"{}primitive.{}.html{fragment}\">",
+ "../".repeat(len),
+ prim.as_sym()
+ )?;
+ needs_termination = true;
+ }
+ Some(&def_id) => {
+ let loc = match m.extern_locations[&def_id.krate] {
+ ExternalLocation::Remote(ref s) => {
+ let cname_sym = ExternalCrate { crate_num: def_id.krate }.name(cx.tcx());
+ let builder: UrlPartsBuilder =
+ [s.as_str().trim_end_matches('/'), cname_sym.as_str()]
+ .into_iter()
+ .collect();
+ Some(builder)
+ }
+ ExternalLocation::Local => {
+ let cname_sym = ExternalCrate { crate_num: def_id.krate }.name(cx.tcx());
+ Some(if cx.current.first() == Some(&cname_sym) {
+ iter::repeat(sym::dotdot).take(cx.current.len() - 1).collect()
+ } else {
+ iter::repeat(sym::dotdot)
+ .take(cx.current.len())
+ .chain(iter::once(cname_sym))
+ .collect()
+ })
+ }
+ ExternalLocation::Unknown => None,
+ };
+ if let Some(mut loc) = loc {
+ loc.push_fmt(format_args!("primitive.{}.html", prim.as_sym()));
+ write!(f, "<a class=\"primitive\" href=\"{}{fragment}\">", loc.finish())?;
+ needs_termination = true;
+ }
+ }
+ None => {}
+ }
+ }
+ write!(f, "{}", name)?;
+ if needs_termination {
+ write!(f, "</a>")?;
+ }
+ Ok(())
+}
+
+/// Helper to render type parameters
+fn tybounds<'a, 'tcx: 'a>(
+ bounds: &'a [clean::PolyTrait],
+ lt: &'a Option<clean::Lifetime>,
+ cx: &'a Context<'tcx>,
+) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ for (i, bound) in bounds.iter().enumerate() {
+ if i > 0 {
+ write!(f, " + ")?;
+ }
+
+ fmt::Display::fmt(&bound.print(cx), f)?;
+ }
+
+ if let Some(lt) = lt {
+ write!(f, " + ")?;
+ fmt::Display::fmt(&lt.print(), f)?;
+ }
+ Ok(())
+ })
+}
+
+pub(crate) fn anchor<'a, 'cx: 'a>(
+ did: DefId,
+ text: Symbol,
+ cx: &'cx Context<'_>,
+) -> impl fmt::Display + 'a {
+ let parts = href(did, cx);
+ display_fn(move |f| {
+ if let Ok((url, short_ty, fqp)) = parts {
+ write!(
+ f,
+ r#"<a class="{}" href="{}" title="{} {}">{}</a>"#,
+ short_ty,
+ url,
+ short_ty,
+ join_with_double_colon(&fqp),
+ text.as_str()
+ )
+ } else {
+ write!(f, "{}", text)
+ }
+ })
+}
+
+fn fmt_type<'cx>(
+ t: &clean::Type,
+ f: &mut fmt::Formatter<'_>,
+ use_absolute: bool,
+ cx: &'cx Context<'_>,
+) -> fmt::Result {
+ trace!("fmt_type(t = {:?})", t);
+
+ match *t {
+ clean::Generic(name) => write!(f, "{}", name),
+ clean::Type::Path { ref path } => {
+ // Paths like `T::Output` and `Self::Output` should be rendered with all segments.
+ let did = path.def_id();
+ resolved_path(f, did, path, path.is_assoc_ty(), use_absolute, cx)
+ }
+ clean::DynTrait(ref bounds, ref lt) => {
+ f.write_str("dyn ")?;
+ fmt::Display::fmt(&tybounds(bounds, lt, cx), f)
+ }
+ clean::Infer => write!(f, "_"),
+ clean::Primitive(clean::PrimitiveType::Never) => {
+ primitive_link(f, PrimitiveType::Never, "!", cx)
+ }
+ clean::Primitive(prim) => primitive_link(f, prim, prim.as_sym().as_str(), cx),
+ clean::BareFunction(ref decl) => {
+ if f.alternate() {
+ write!(
+ f,
+ "{:#}{}{:#}fn{:#}",
+ decl.print_hrtb_with_space(cx),
+ decl.unsafety.print_with_space(),
+ print_abi_with_space(decl.abi),
+ decl.decl.print(cx),
+ )
+ } else {
+ write!(
+ f,
+ "{}{}{}",
+ decl.print_hrtb_with_space(cx),
+ decl.unsafety.print_with_space(),
+ print_abi_with_space(decl.abi)
+ )?;
+ primitive_link(f, PrimitiveType::Fn, "fn", cx)?;
+ write!(f, "{}", decl.decl.print(cx))
+ }
+ }
+ clean::Tuple(ref typs) => {
+ match &typs[..] {
+ &[] => primitive_link(f, PrimitiveType::Unit, "()", cx),
+ &[ref one] => {
+ if let clean::Generic(name) = one {
+ primitive_link(f, PrimitiveType::Tuple, &format!("({name},)"), cx)
+ } else {
+ write!(f, "(")?;
+ // Carry `f.alternate()` into this display w/o branching manually.
+ fmt::Display::fmt(&one.print(cx), f)?;
+ write!(f, ",)")
+ }
+ }
+ many => {
+ let generic_names: Vec<Symbol> = many
+ .iter()
+ .filter_map(|t| match t {
+ clean::Generic(name) => Some(*name),
+ _ => None,
+ })
+ .collect();
+ let is_generic = generic_names.len() == many.len();
+ if is_generic {
+ primitive_link(
+ f,
+ PrimitiveType::Tuple,
+ &format!("({})", generic_names.iter().map(|s| s.as_str()).join(", ")),
+ cx,
+ )
+ } else {
+ write!(f, "(")?;
+ for (i, item) in many.iter().enumerate() {
+ if i != 0 {
+ write!(f, ", ")?;
+ }
+ // Carry `f.alternate()` into this display w/o branching manually.
+ fmt::Display::fmt(&item.print(cx), f)?;
+ }
+ write!(f, ")")
+ }
+ }
+ }
+ }
+ clean::Slice(ref t) => match **t {
+ clean::Generic(name) => {
+ primitive_link(f, PrimitiveType::Slice, &format!("[{name}]"), cx)
+ }
+ _ => {
+ write!(f, "[")?;
+ fmt::Display::fmt(&t.print(cx), f)?;
+ write!(f, "]")
+ }
+ },
+ clean::Array(ref t, ref n) => {
+ primitive_link(f, PrimitiveType::Array, "[", cx)?;
+ fmt::Display::fmt(&t.print(cx), f)?;
+ if f.alternate() {
+ primitive_link(f, PrimitiveType::Array, &format!("; {}]", n), cx)
+ } else {
+ primitive_link(f, PrimitiveType::Array, &format!("; {}]", Escape(n)), cx)
+ }
+ }
+ clean::RawPointer(m, ref t) => {
+ let m = match m {
+ hir::Mutability::Mut => "mut",
+ hir::Mutability::Not => "const",
+ };
+
+ if matches!(**t, clean::Generic(_)) || t.is_assoc_ty() {
+ let text = if f.alternate() {
+ format!("*{} {:#}", m, t.print(cx))
+ } else {
+ format!("*{} {}", m, t.print(cx))
+ };
+ primitive_link(f, clean::PrimitiveType::RawPointer, &text, cx)
+ } else {
+ primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{} ", m), cx)?;
+ fmt::Display::fmt(&t.print(cx), f)
+ }
+ }
+ clean::BorrowedRef { lifetime: ref l, mutability, type_: ref ty } => {
+ let lt = match l {
+ Some(l) => format!("{} ", l.print()),
+ _ => String::new(),
+ };
+ let m = mutability.print_with_space();
+ let amp = if f.alternate() { "&".to_string() } else { "&amp;".to_string() };
+ match **ty {
+ clean::DynTrait(ref bounds, ref trait_lt)
+ if bounds.len() > 1 || trait_lt.is_some() =>
+ {
+ write!(f, "{}{}{}(", amp, lt, m)?;
+ fmt_type(ty, f, use_absolute, cx)?;
+ write!(f, ")")
+ }
+ clean::Generic(..) => {
+ primitive_link(
+ f,
+ PrimitiveType::Reference,
+ &format!("{}{}{}", amp, lt, m),
+ cx,
+ )?;
+ fmt_type(ty, f, use_absolute, cx)
+ }
+ _ => {
+ write!(f, "{}{}{}", amp, lt, m)?;
+ fmt_type(ty, f, use_absolute, cx)
+ }
+ }
+ }
+ clean::ImplTrait(ref bounds) => {
+ if f.alternate() {
+ write!(f, "impl {:#}", print_generic_bounds(bounds, cx))
+ } else {
+ write!(f, "impl {}", print_generic_bounds(bounds, cx))
+ }
+ }
+ clean::QPath { ref assoc, ref self_type, ref trait_, should_show_cast } => {
+ if f.alternate() {
+ if should_show_cast {
+ write!(f, "<{:#} as {:#}>::", self_type.print(cx), trait_.print(cx))?
+ } else {
+ write!(f, "{:#}::", self_type.print(cx))?
+ }
+ } else {
+ if should_show_cast {
+ write!(f, "&lt;{} as {}&gt;::", self_type.print(cx), trait_.print(cx))?
+ } else {
+ write!(f, "{}::", self_type.print(cx))?
+ }
+ };
+ // It's pretty unsightly to look at `<A as B>::C` in output, and
+ // we've got hyperlinking on our side, so try to avoid longer
+ // notation as much as possible by making `C` a hyperlink to trait
+ // `B` to disambiguate.
+ //
+ // FIXME: this is still a lossy conversion and there should probably
+ // be a better way of representing this in general? Most of
+ // the ugliness comes from inlining across crates where
+ // everything comes in as a fully resolved QPath (hard to
+ // look at).
+ match href(trait_.def_id(), cx) {
+ Ok((ref url, _, ref path)) if !f.alternate() => {
+ write!(
+ f,
+ "<a class=\"associatedtype\" href=\"{url}#{shortty}.{name}\" \
+ title=\"type {path}::{name}\">{name}</a>{args}",
+ url = url,
+ shortty = ItemType::AssocType,
+ name = assoc.name,
+ path = join_with_double_colon(path),
+ args = assoc.args.print(cx),
+ )?;
+ }
+ _ => write!(f, "{}{:#}", assoc.name, assoc.args.print(cx))?,
+ }
+ Ok(())
+ }
+ }
+}
+
+impl clean::Type {
+ pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'b + Captures<'tcx> {
+ display_fn(move |f| fmt_type(self, f, false, cx))
+ }
+}
+
+impl clean::Path {
+ pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'b + Captures<'tcx> {
+ display_fn(move |f| resolved_path(f, self.def_id(), self, false, false, cx))
+ }
+}
+
+impl clean::Impl {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ use_absolute: bool,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ if f.alternate() {
+ write!(f, "impl{:#} ", self.generics.print(cx))?;
+ } else {
+ write!(f, "impl{} ", self.generics.print(cx))?;
+ }
+
+ if let Some(ref ty) = self.trait_ {
+ match self.polarity {
+ ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => {}
+ ty::ImplPolarity::Negative => write!(f, "!")?,
+ }
+ fmt::Display::fmt(&ty.print(cx), f)?;
+ write!(f, " for ")?;
+ }
+
+ if let clean::Type::Tuple(types) = &self.for_ &&
+ let [clean::Type::Generic(name)] = &types[..] &&
+ (self.kind.is_fake_variadic() || self.kind.is_auto())
+ {
+ // Hardcoded anchor library/core/src/primitive_docs.rs
+ // Link should match `# Trait implementations`
+ primitive_link_fragment(f, PrimitiveType::Tuple, &format!("({name}₁, {name}₂, …, {name}ₙ)"), "#trait-implementations-1", cx)?;
+ } else if let clean::BareFunction(bare_fn) = &self.for_ &&
+ let [clean::Argument { type_: clean::Type::Generic(name), .. }] = &bare_fn.decl.inputs.values[..] &&
+ (self.kind.is_fake_variadic() || self.kind.is_auto())
+ {
+ // Hardcoded anchor library/core/src/primitive_docs.rs
+ // Link should match `# Trait implementations`
+
+ let hrtb = bare_fn.print_hrtb_with_space(cx);
+ let unsafety = bare_fn.unsafety.print_with_space();
+ let abi = print_abi_with_space(bare_fn.abi);
+ if f.alternate() {
+ write!(
+ f,
+ "{hrtb:#}{unsafety}{abi:#}",
+ )?;
+ } else {
+ write!(
+ f,
+ "{hrtb}{unsafety}{abi}",
+ )?;
+ }
+ let ellipsis = if bare_fn.decl.c_variadic {
+ ", ..."
+ } else {
+ ""
+ };
+ primitive_link_fragment(f, PrimitiveType::Tuple, &format!("fn ({name}₁, {name}₂, …, {name}ₙ{ellipsis})"), "#trait-implementations-1", cx)?;
+ // Write output.
+ if let clean::FnRetTy::Return(ty) = &bare_fn.decl.output {
+ write!(f, " -> ")?;
+ fmt_type(ty, f, use_absolute, cx)?;
+ }
+ } else if let Some(ty) = self.kind.as_blanket_ty() {
+ fmt_type(ty, f, use_absolute, cx)?;
+ } else {
+ fmt_type(&self.for_, f, use_absolute, cx)?;
+ }
+
+ fmt::Display::fmt(&print_where_clause(&self.generics, cx, 0, Ending::Newline), f)?;
+ Ok(())
+ })
+ }
+}
+
+impl clean::Arguments {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ for (i, input) in self.values.iter().enumerate() {
+ if !input.name.is_empty() {
+ write!(f, "{}: ", input.name)?;
+ }
+ if f.alternate() {
+ write!(f, "{:#}", input.type_.print(cx))?;
+ } else {
+ write!(f, "{}", input.type_.print(cx))?;
+ }
+ if i + 1 < self.values.len() {
+ write!(f, ", ")?;
+ }
+ }
+ Ok(())
+ })
+ }
+}
+
+impl clean::FnRetTy {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match self {
+ clean::Return(clean::Tuple(tys)) if tys.is_empty() => Ok(()),
+ clean::Return(ty) if f.alternate() => {
+ write!(f, " -> {:#}", ty.print(cx))
+ }
+ clean::Return(ty) => write!(f, " -&gt; {}", ty.print(cx)),
+ clean::DefaultReturn => Ok(()),
+ })
+ }
+}
+
+impl clean::BareFunctionDecl {
+ fn print_hrtb_with_space<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ if !self.generic_params.is_empty() {
+ write!(
+ f,
+ "for&lt;{}&gt; ",
+ comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true)
+ )
+ } else {
+ Ok(())
+ }
+ })
+ }
+}
+
+impl clean::FnDecl {
+ pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'b + Captures<'tcx> {
+ display_fn(move |f| {
+ let ellipsis = if self.c_variadic { ", ..." } else { "" };
+ if f.alternate() {
+ write!(
+ f,
+ "({args:#}{ellipsis}){arrow:#}",
+ args = self.inputs.print(cx),
+ ellipsis = ellipsis,
+ arrow = self.output.print(cx)
+ )
+ } else {
+ write!(
+ f,
+ "({args}{ellipsis}){arrow}",
+ args = self.inputs.print(cx),
+ ellipsis = ellipsis,
+ arrow = self.output.print(cx)
+ )
+ }
+ })
+ }
+
+ /// * `header_len`: The length of the function header and name. In other words, the number of
+ /// characters in the function declaration up to but not including the parentheses.
+ /// <br>Used to determine line-wrapping.
+ /// * `indent`: The number of spaces to indent each successive line with, if line-wrapping is
+ /// necessary.
+ /// * `asyncness`: Whether the function is async or not.
+ pub(crate) fn full_print<'a, 'tcx: 'a>(
+ &'a self,
+ header_len: usize,
+ indent: usize,
+ asyncness: hir::IsAsync,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| self.inner_full_print(header_len, indent, asyncness, f, cx))
+ }
+
+ fn inner_full_print(
+ &self,
+ header_len: usize,
+ indent: usize,
+ asyncness: hir::IsAsync,
+ f: &mut fmt::Formatter<'_>,
+ cx: &Context<'_>,
+ ) -> fmt::Result {
+ let amp = if f.alternate() { "&" } else { "&amp;" };
+ let mut args = Buffer::html();
+ let mut args_plain = Buffer::new();
+ for (i, input) in self.inputs.values.iter().enumerate() {
+ if let Some(selfty) = input.to_self() {
+ match selfty {
+ clean::SelfValue => {
+ args.push_str("self");
+ args_plain.push_str("self");
+ }
+ clean::SelfBorrowed(Some(ref lt), mtbl) => {
+ write!(args, "{}{} {}self", amp, lt.print(), mtbl.print_with_space());
+ write!(args_plain, "&{} {}self", lt.print(), mtbl.print_with_space());
+ }
+ clean::SelfBorrowed(None, mtbl) => {
+ write!(args, "{}{}self", amp, mtbl.print_with_space());
+ write!(args_plain, "&{}self", mtbl.print_with_space());
+ }
+ clean::SelfExplicit(ref typ) => {
+ if f.alternate() {
+ write!(args, "self: {:#}", typ.print(cx));
+ } else {
+ write!(args, "self: {}", typ.print(cx));
+ }
+ write!(args_plain, "self: {:#}", typ.print(cx));
+ }
+ }
+ } else {
+ if i > 0 {
+ args.push_str("<br>");
+ }
+ if input.is_const {
+ args.push_str("const ");
+ args_plain.push_str("const ");
+ }
+ if !input.name.is_empty() {
+ write!(args, "{}: ", input.name);
+ write!(args_plain, "{}: ", input.name);
+ }
+
+ if f.alternate() {
+ write!(args, "{:#}", input.type_.print(cx));
+ } else {
+ write!(args, "{}", input.type_.print(cx));
+ }
+ write!(args_plain, "{:#}", input.type_.print(cx));
+ }
+ if i + 1 < self.inputs.values.len() {
+ args.push_str(",");
+ args_plain.push_str(",");
+ }
+ }
+
+ let mut args_plain = format!("({})", args_plain.into_inner());
+ let mut args = args.into_inner();
+
+ if self.c_variadic {
+ args.push_str(",<br> ...");
+ args_plain.push_str(", ...");
+ }
+
+ let arrow_plain;
+ let arrow = if let hir::IsAsync::Async = asyncness {
+ let output = self.sugared_async_return_type();
+ arrow_plain = format!("{:#}", output.print(cx));
+ if f.alternate() { arrow_plain.clone() } else { format!("{}", output.print(cx)) }
+ } else {
+ arrow_plain = format!("{:#}", self.output.print(cx));
+ if f.alternate() { arrow_plain.clone() } else { format!("{}", self.output.print(cx)) }
+ };
+
+ let declaration_len = header_len + args_plain.len() + arrow_plain.len();
+ let output = if declaration_len > 80 {
+ let full_pad = format!("<br>{}", "&nbsp;".repeat(indent + 4));
+ let close_pad = format!("<br>{}", "&nbsp;".repeat(indent));
+ format!(
+ "({pad}{args}{close}){arrow}",
+ pad = if self.inputs.values.is_empty() { "" } else { &full_pad },
+ args = args.replace("<br>", &full_pad),
+ close = close_pad,
+ arrow = arrow
+ )
+ } else {
+ format!("({args}){arrow}", args = args.replace("<br>", " "), arrow = arrow)
+ };
+
+ if f.alternate() {
+ write!(f, "{}", output.replace("<br>", "\n"))
+ } else {
+ write!(f, "{}", output)
+ }
+ }
+}
+
+impl clean::Visibility {
+ pub(crate) fn print_with_space<'a, 'tcx: 'a>(
+ self,
+ item_did: ItemId,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ use std::fmt::Write as _;
+
+ let to_print: Cow<'static, str> = match self {
+ clean::Public => "pub ".into(),
+ clean::Inherited => "".into(),
+ clean::Visibility::Restricted(vis_did) => {
+ // FIXME(camelid): This may not work correctly if `item_did` is a module.
+ // However, rustdoc currently never displays a module's
+ // visibility, so it shouldn't matter.
+ let parent_module = find_nearest_parent_module(cx.tcx(), item_did.expect_def_id());
+
+ if vis_did.is_crate_root() {
+ "pub(crate) ".into()
+ } else if parent_module == Some(vis_did) {
+ // `pub(in foo)` where `foo` is the parent module
+ // is the same as no visibility modifier
+ "".into()
+ } else if parent_module
+ .and_then(|parent| find_nearest_parent_module(cx.tcx(), parent))
+ == Some(vis_did)
+ {
+ "pub(super) ".into()
+ } else {
+ let path = cx.tcx().def_path(vis_did);
+ debug!("path={:?}", path);
+ // modified from `resolved_path()` to work with `DefPathData`
+ let last_name = path.data.last().unwrap().data.get_opt_name().unwrap();
+ let anchor = anchor(vis_did, last_name, cx).to_string();
+
+ let mut s = "pub(in ".to_owned();
+ for seg in &path.data[..path.data.len() - 1] {
+ let _ = write!(s, "{}::", seg.data.get_opt_name().unwrap());
+ }
+ let _ = write!(s, "{}) ", anchor);
+ s.into()
+ }
+ }
+ };
+ display_fn(move |f| write!(f, "{}", to_print))
+ }
+
+ /// This function is the same as print_with_space, except that it renders no links.
+ /// It's used for macros' rendered source view, which is syntax highlighted and cannot have
+ /// any HTML in it.
+ pub(crate) fn to_src_with_space<'a, 'tcx: 'a>(
+ self,
+ tcx: TyCtxt<'tcx>,
+ item_did: DefId,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ let to_print = match self {
+ clean::Public => "pub ".to_owned(),
+ clean::Inherited => String::new(),
+ clean::Visibility::Restricted(vis_did) => {
+ // FIXME(camelid): This may not work correctly if `item_did` is a module.
+ // However, rustdoc currently never displays a module's
+ // visibility, so it shouldn't matter.
+ let parent_module = find_nearest_parent_module(tcx, item_did);
+
+ if vis_did.is_crate_root() {
+ "pub(crate) ".to_owned()
+ } else if parent_module == Some(vis_did) {
+ // `pub(in foo)` where `foo` is the parent module
+ // is the same as no visibility modifier
+ String::new()
+ } else if parent_module.and_then(|parent| find_nearest_parent_module(tcx, parent))
+ == Some(vis_did)
+ {
+ "pub(super) ".to_owned()
+ } else {
+ format!("pub(in {}) ", tcx.def_path_str(vis_did))
+ }
+ }
+ };
+ display_fn(move |f| f.write_str(&to_print))
+ }
+}
+
+pub(crate) trait PrintWithSpace {
+ fn print_with_space(&self) -> &str;
+}
+
+impl PrintWithSpace for hir::Unsafety {
+ fn print_with_space(&self) -> &str {
+ match self {
+ hir::Unsafety::Unsafe => "unsafe ",
+ hir::Unsafety::Normal => "",
+ }
+ }
+}
+
+impl PrintWithSpace for hir::IsAsync {
+ fn print_with_space(&self) -> &str {
+ match self {
+ hir::IsAsync::Async => "async ",
+ hir::IsAsync::NotAsync => "",
+ }
+ }
+}
+
+impl PrintWithSpace for hir::Mutability {
+ fn print_with_space(&self) -> &str {
+ match self {
+ hir::Mutability::Not => "",
+ hir::Mutability::Mut => "mut ",
+ }
+ }
+}
+
+pub(crate) fn print_constness_with_space(
+ c: &hir::Constness,
+ s: Option<ConstStability>,
+) -> &'static str {
+ match (c, s) {
+ // const stable or when feature(staged_api) is not set
+ (
+ hir::Constness::Const,
+ Some(ConstStability { level: StabilityLevel::Stable { .. }, .. }),
+ )
+ | (hir::Constness::Const, None) => "const ",
+ // const unstable or not const
+ _ => "",
+ }
+}
+
+impl clean::Import {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match self.kind {
+ clean::ImportKind::Simple(name) => {
+ if name == self.source.path.last() {
+ write!(f, "use {};", self.source.print(cx))
+ } else {
+ write!(f, "use {} as {};", self.source.print(cx), name)
+ }
+ }
+ clean::ImportKind::Glob => {
+ if self.source.path.segments.is_empty() {
+ write!(f, "use *;")
+ } else {
+ write!(f, "use {}::*;", self.source.print(cx))
+ }
+ }
+ })
+ }
+}
+
+impl clean::ImportSource {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match self.did {
+ Some(did) => resolved_path(f, did, &self.path, true, false, cx),
+ _ => {
+ for seg in &self.path.segments[..self.path.segments.len() - 1] {
+ write!(f, "{}::", seg.name)?;
+ }
+ let name = self.path.last();
+ if let hir::def::Res::PrimTy(p) = self.path.res {
+ primitive_link(f, PrimitiveType::from(p), name.as_str(), cx)?;
+ } else {
+ write!(f, "{}", name)?;
+ }
+ Ok(())
+ }
+ })
+ }
+}
+
+impl clean::TypeBinding {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| {
+ f.write_str(self.assoc.name.as_str())?;
+ if f.alternate() {
+ write!(f, "{:#}", self.assoc.args.print(cx))?;
+ } else {
+ write!(f, "{}", self.assoc.args.print(cx))?;
+ }
+ match self.kind {
+ clean::TypeBindingKind::Equality { ref term } => {
+ if f.alternate() {
+ write!(f, " = {:#}", term.print(cx))?;
+ } else {
+ write!(f, " = {}", term.print(cx))?;
+ }
+ }
+ clean::TypeBindingKind::Constraint { ref bounds } => {
+ if !bounds.is_empty() {
+ if f.alternate() {
+ write!(f, ": {:#}", print_generic_bounds(bounds, cx))?;
+ } else {
+ write!(f, ":&nbsp;{}", print_generic_bounds(bounds, cx))?;
+ }
+ }
+ }
+ }
+ Ok(())
+ })
+ }
+}
+
+pub(crate) fn print_abi_with_space(abi: Abi) -> impl fmt::Display {
+ display_fn(move |f| {
+ let quot = if f.alternate() { "\"" } else { "&quot;" };
+ match abi {
+ Abi::Rust => Ok(()),
+ abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
+ }
+ })
+}
+
+pub(crate) fn print_default_space<'a>(v: bool) -> &'a str {
+ if v { "default " } else { "" }
+}
+
+impl clean::GenericArg {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ display_fn(move |f| match self {
+ clean::GenericArg::Lifetime(lt) => fmt::Display::fmt(&lt.print(), f),
+ clean::GenericArg::Type(ty) => fmt::Display::fmt(&ty.print(cx), f),
+ clean::GenericArg::Const(ct) => fmt::Display::fmt(&ct.print(cx.tcx()), f),
+ clean::GenericArg::Infer => fmt::Display::fmt("_", f),
+ })
+ }
+}
+
+impl clean::types::Term {
+ pub(crate) fn print<'a, 'tcx: 'a>(
+ &'a self,
+ cx: &'a Context<'tcx>,
+ ) -> impl fmt::Display + 'a + Captures<'tcx> {
+ match self {
+ clean::types::Term::Type(ty) => ty.print(cx),
+ _ => todo!(),
+ }
+ }
+}
+
+pub(crate) fn display_fn(
+ f: impl FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result,
+) -> impl fmt::Display {
+ struct WithFormatter<F>(Cell<Option<F>>);
+
+ impl<F> fmt::Display for WithFormatter<F>
+ where
+ F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result,
+ {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (self.0.take()).unwrap()(f)
+ }
+ }
+
+ WithFormatter(Cell::new(Some(f)))
+}
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
new file mode 100644
index 000000000..05547ea15
--- /dev/null
+++ b/src/librustdoc/html/highlight.rs
@@ -0,0 +1,805 @@
+//! Basic syntax highlighting functionality.
+//!
+//! This module uses librustc_ast's lexer to provide token-based highlighting for
+//! the HTML documentation generated by rustdoc.
+//!
+//! Use the `render_with_highlighting` to highlight some rust code.
+
+use crate::clean::PrimitiveType;
+use crate::html::escape::Escape;
+use crate::html::render::Context;
+
+use std::collections::VecDeque;
+use std::fmt::{Display, Write};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_lexer::{LiteralKind, TokenKind};
+use rustc_span::edition::Edition;
+use rustc_span::symbol::Symbol;
+use rustc_span::{BytePos, Span, DUMMY_SP};
+
+use super::format::{self, Buffer};
+use super::render::LinkFromSrc;
+
+/// This type is needed in case we want to render links on items to allow to go to their definition.
+pub(crate) struct HrefContext<'a, 'b, 'c> {
+ pub(crate) context: &'a Context<'b>,
+ /// This span contains the current file we're going through.
+ pub(crate) file_span: Span,
+ /// This field is used to know "how far" from the top of the directory we are to link to either
+ /// documentation pages or other source pages.
+ pub(crate) root_path: &'c str,
+}
+
+/// Decorations are represented as a map from CSS class to vector of character ranges.
+/// Each range will be wrapped in a span with that class.
+pub(crate) struct DecorationInfo(pub(crate) FxHashMap<&'static str, Vec<(u32, u32)>>);
+
+/// Highlights `src`, returning the HTML output.
+pub(crate) fn render_with_highlighting(
+ src: &str,
+ out: &mut Buffer,
+ class: Option<&str>,
+ playground_button: Option<&str>,
+ tooltip: Option<(Option<Edition>, &str)>,
+ edition: Edition,
+ extra_content: Option<Buffer>,
+ href_context: Option<HrefContext<'_, '_, '_>>,
+ decoration_info: Option<DecorationInfo>,
+) {
+ debug!("highlighting: ================\n{}\n==============", src);
+ if let Some((edition_info, class)) = tooltip {
+ write!(
+ out,
+ "<div class='information'><div class='tooltip {}'{}>ⓘ</div></div>",
+ class,
+ if let Some(edition_info) = edition_info {
+ format!(" data-edition=\"{}\"", edition_info)
+ } else {
+ String::new()
+ },
+ );
+ }
+
+ write_header(out, class, extra_content);
+ write_code(out, src, edition, href_context, decoration_info);
+ write_footer(out, playground_button);
+}
+
+fn write_header(out: &mut Buffer, class: Option<&str>, extra_content: Option<Buffer>) {
+ write!(out, "<div class=\"example-wrap\">");
+ if let Some(extra) = extra_content {
+ out.push_buffer(extra);
+ }
+ if let Some(class) = class {
+ write!(out, "<pre class=\"rust {}\">", class);
+ } else {
+ write!(out, "<pre class=\"rust\">");
+ }
+ write!(out, "<code>");
+}
+
+/// Convert the given `src` source code into HTML by adding classes for highlighting.
+///
+/// This code is used to render code blocks (in the documentation) as well as the source code pages.
+///
+/// Some explanations on the last arguments:
+///
+/// In case we are rendering a code block and not a source code file, `href_context` will be `None`.
+/// To put it more simply: if `href_context` is `None`, the code won't try to generate links to an
+/// item definition.
+///
+/// More explanations about spans and how we use them here are provided in the
+fn write_code(
+ out: &mut Buffer,
+ src: &str,
+ edition: Edition,
+ href_context: Option<HrefContext<'_, '_, '_>>,
+ decoration_info: Option<DecorationInfo>,
+) {
+ // This replace allows to fix how the code source with DOS backline characters is displayed.
+ let src = src.replace("\r\n", "\n");
+ let mut closing_tags: Vec<&'static str> = Vec::new();
+ Classifier::new(
+ &src,
+ edition,
+ href_context.as_ref().map(|c| c.file_span).unwrap_or(DUMMY_SP),
+ decoration_info,
+ )
+ .highlight(&mut |highlight| {
+ match highlight {
+ Highlight::Token { text, class } => string(out, Escape(text), class, &href_context),
+ Highlight::EnterSpan { class } => {
+ closing_tags.push(enter_span(out, class, &href_context))
+ }
+ Highlight::ExitSpan => {
+ exit_span(out, closing_tags.pop().expect("ExitSpan without EnterSpan"))
+ }
+ };
+ });
+}
+
+fn write_footer(out: &mut Buffer, playground_button: Option<&str>) {
+ writeln!(out, "</code></pre>{}</div>", playground_button.unwrap_or_default());
+}
+
+/// How a span of text is classified. Mostly corresponds to token kinds.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum Class {
+ Comment,
+ DocComment,
+ Attribute,
+ KeyWord,
+ // Keywords that do pointer/reference stuff.
+ RefKeyWord,
+ Self_(Span),
+ Op,
+ Macro(Span),
+ MacroNonTerminal,
+ String,
+ Number,
+ Bool,
+ Ident(Span),
+ Lifetime,
+ PreludeTy,
+ PreludeVal,
+ QuestionMark,
+ Decoration(&'static str),
+}
+
+impl Class {
+ /// Returns the css class expected by rustdoc for each `Class`.
+ fn as_html(self) -> &'static str {
+ match self {
+ Class::Comment => "comment",
+ Class::DocComment => "doccomment",
+ Class::Attribute => "attribute",
+ Class::KeyWord => "kw",
+ Class::RefKeyWord => "kw-2",
+ Class::Self_(_) => "self",
+ Class::Op => "op",
+ Class::Macro(_) => "macro",
+ Class::MacroNonTerminal => "macro-nonterminal",
+ Class::String => "string",
+ Class::Number => "number",
+ Class::Bool => "bool-val",
+ Class::Ident(_) => "ident",
+ Class::Lifetime => "lifetime",
+ Class::PreludeTy => "prelude-ty",
+ Class::PreludeVal => "prelude-val",
+ Class::QuestionMark => "question-mark",
+ Class::Decoration(kind) => kind,
+ }
+ }
+
+ /// In case this is an item which can be converted into a link to a definition, it'll contain
+ /// a "span" (a tuple representing `(lo, hi)` equivalent of `Span`).
+ fn get_span(self) -> Option<Span> {
+ match self {
+ Self::Ident(sp) | Self::Self_(sp) | Self::Macro(sp) => Some(sp),
+ Self::Comment
+ | Self::DocComment
+ | Self::Attribute
+ | Self::KeyWord
+ | Self::RefKeyWord
+ | Self::Op
+ | Self::MacroNonTerminal
+ | Self::String
+ | Self::Number
+ | Self::Bool
+ | Self::Lifetime
+ | Self::PreludeTy
+ | Self::PreludeVal
+ | Self::QuestionMark
+ | Self::Decoration(_) => None,
+ }
+ }
+}
+
+enum Highlight<'a> {
+ Token { text: &'a str, class: Option<Class> },
+ EnterSpan { class: Class },
+ ExitSpan,
+}
+
+struct TokenIter<'a> {
+ src: &'a str,
+}
+
+impl<'a> Iterator for TokenIter<'a> {
+ type Item = (TokenKind, &'a str);
+ fn next(&mut self) -> Option<(TokenKind, &'a str)> {
+ if self.src.is_empty() {
+ return None;
+ }
+ let token = rustc_lexer::first_token(self.src);
+ let (text, rest) = self.src.split_at(token.len as usize);
+ self.src = rest;
+ Some((token.kind, text))
+ }
+}
+
+/// Classifies into identifier class; returns `None` if this is a non-keyword identifier.
+fn get_real_ident_class(text: &str, edition: Edition, allow_path_keywords: bool) -> Option<Class> {
+ let ignore: &[&str] =
+ if allow_path_keywords { &["self", "Self", "super", "crate"] } else { &["self", "Self"] };
+ if ignore.iter().any(|k| *k == text) {
+ return None;
+ }
+ Some(match text {
+ "ref" | "mut" => Class::RefKeyWord,
+ "false" | "true" => Class::Bool,
+ _ if Symbol::intern(text).is_reserved(|| edition) => Class::KeyWord,
+ _ => return None,
+ })
+}
+
+/// This iterator comes from the same idea than "Peekable" except that it allows to "peek" more than
+/// just the next item by using `peek_next`. The `peek` method always returns the next item after
+/// the current one whereas `peek_next` will return the next item after the last one peeked.
+///
+/// You can use both `peek` and `peek_next` at the same time without problem.
+struct PeekIter<'a> {
+ stored: VecDeque<(TokenKind, &'a str)>,
+ /// This position is reinitialized when using `next`. It is used in `peek_next`.
+ peek_pos: usize,
+ iter: TokenIter<'a>,
+}
+
+impl<'a> PeekIter<'a> {
+ fn new(iter: TokenIter<'a>) -> Self {
+ Self { stored: VecDeque::new(), peek_pos: 0, iter }
+ }
+ /// Returns the next item after the current one. It doesn't interfer with `peek_next` output.
+ fn peek(&mut self) -> Option<&(TokenKind, &'a str)> {
+ if self.stored.is_empty() {
+ if let Some(next) = self.iter.next() {
+ self.stored.push_back(next);
+ }
+ }
+ self.stored.front()
+ }
+ /// Returns the next item after the last one peeked. It doesn't interfer with `peek` output.
+ fn peek_next(&mut self) -> Option<&(TokenKind, &'a str)> {
+ self.peek_pos += 1;
+ if self.peek_pos - 1 < self.stored.len() {
+ self.stored.get(self.peek_pos - 1)
+ } else if let Some(next) = self.iter.next() {
+ self.stored.push_back(next);
+ self.stored.back()
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a> Iterator for PeekIter<'a> {
+ type Item = (TokenKind, &'a str);
+ fn next(&mut self) -> Option<Self::Item> {
+ self.peek_pos = 0;
+ if let Some(first) = self.stored.pop_front() { Some(first) } else { self.iter.next() }
+ }
+}
+
+/// Custom spans inserted into the source. Eg --scrape-examples uses this to highlight function calls
+struct Decorations {
+ starts: Vec<(u32, &'static str)>,
+ ends: Vec<u32>,
+}
+
+impl Decorations {
+ fn new(info: DecorationInfo) -> Self {
+ // Extract tuples (start, end, kind) into separate sequences of (start, kind) and (end).
+ let (mut starts, mut ends): (Vec<_>, Vec<_>) = info
+ .0
+ .into_iter()
+ .flat_map(|(kind, ranges)| ranges.into_iter().map(move |(lo, hi)| ((lo, kind), hi)))
+ .unzip();
+
+ // Sort the sequences in document order.
+ starts.sort_by_key(|(lo, _)| *lo);
+ ends.sort();
+
+ Decorations { starts, ends }
+ }
+}
+
+/// Processes program tokens, classifying strings of text by highlighting
+/// category (`Class`).
+struct Classifier<'a> {
+ tokens: PeekIter<'a>,
+ in_attribute: bool,
+ in_macro: bool,
+ in_macro_nonterminal: bool,
+ edition: Edition,
+ byte_pos: u32,
+ file_span: Span,
+ src: &'a str,
+ decorations: Option<Decorations>,
+}
+
+impl<'a> Classifier<'a> {
+ /// Takes as argument the source code to HTML-ify, the rust edition to use and the source code
+ /// file span which will be used later on by the `span_correspondance_map`.
+ fn new(
+ src: &str,
+ edition: Edition,
+ file_span: Span,
+ decoration_info: Option<DecorationInfo>,
+ ) -> Classifier<'_> {
+ let tokens = PeekIter::new(TokenIter { src });
+ let decorations = decoration_info.map(Decorations::new);
+ Classifier {
+ tokens,
+ in_attribute: false,
+ in_macro: false,
+ in_macro_nonterminal: false,
+ edition,
+ byte_pos: 0,
+ file_span,
+ src,
+ decorations,
+ }
+ }
+
+ /// Convenient wrapper to create a [`Span`] from a position in the file.
+ fn new_span(&self, lo: u32, text: &str) -> Span {
+ let hi = lo + text.len() as u32;
+ let file_lo = self.file_span.lo();
+ self.file_span.with_lo(file_lo + BytePos(lo)).with_hi(file_lo + BytePos(hi))
+ }
+
+ /// Concatenate colons and idents as one when possible.
+ fn get_full_ident_path(&mut self) -> Vec<(TokenKind, usize, usize)> {
+ let start = self.byte_pos as usize;
+ let mut pos = start;
+ let mut has_ident = false;
+ let edition = self.edition;
+
+ loop {
+ let mut nb = 0;
+ while let Some((TokenKind::Colon, _)) = self.tokens.peek() {
+ self.tokens.next();
+ nb += 1;
+ }
+ // Ident path can start with "::" but if we already have content in the ident path,
+ // the "::" is mandatory.
+ if has_ident && nb == 0 {
+ return vec![(TokenKind::Ident, start, pos)];
+ } else if nb != 0 && nb != 2 {
+ if has_ident {
+ return vec![(TokenKind::Ident, start, pos), (TokenKind::Colon, pos, pos + nb)];
+ } else {
+ return vec![(TokenKind::Colon, start, pos + nb)];
+ }
+ }
+
+ if let Some((None, text)) = self.tokens.peek().map(|(token, text)| {
+ if *token == TokenKind::Ident {
+ let class = get_real_ident_class(text, edition, true);
+ (class, text)
+ } else {
+ // Doesn't matter which Class we put in here...
+ (Some(Class::Comment), text)
+ }
+ }) {
+ // We only "add" the colon if there is an ident behind.
+ pos += text.len() + nb;
+ has_ident = true;
+ self.tokens.next();
+ } else if nb > 0 && has_ident {
+ return vec![(TokenKind::Ident, start, pos), (TokenKind::Colon, pos, pos + nb)];
+ } else if nb > 0 {
+ return vec![(TokenKind::Colon, start, start + nb)];
+ } else if has_ident {
+ return vec![(TokenKind::Ident, start, pos)];
+ } else {
+ return Vec::new();
+ }
+ }
+ }
+
+ /// Wraps the tokens iteration to ensure that the `byte_pos` is always correct.
+ ///
+ /// It returns the token's kind, the token as a string and its byte position in the source
+ /// string.
+ fn next(&mut self) -> Option<(TokenKind, &'a str, u32)> {
+ if let Some((kind, text)) = self.tokens.next() {
+ let before = self.byte_pos;
+ self.byte_pos += text.len() as u32;
+ Some((kind, text, before))
+ } else {
+ None
+ }
+ }
+
+ /// Exhausts the `Classifier` writing the output into `sink`.
+ ///
+ /// The general structure for this method is to iterate over each token,
+ /// possibly giving it an HTML span with a class specifying what flavor of
+ /// token is used.
+ fn highlight(mut self, sink: &mut dyn FnMut(Highlight<'a>)) {
+ loop {
+ if let Some(decs) = self.decorations.as_mut() {
+ let byte_pos = self.byte_pos;
+ let n_starts = decs.starts.iter().filter(|(i, _)| byte_pos >= *i).count();
+ for (_, kind) in decs.starts.drain(0..n_starts) {
+ sink(Highlight::EnterSpan { class: Class::Decoration(kind) });
+ }
+
+ let n_ends = decs.ends.iter().filter(|i| byte_pos >= **i).count();
+ for _ in decs.ends.drain(0..n_ends) {
+ sink(Highlight::ExitSpan);
+ }
+ }
+
+ if self
+ .tokens
+ .peek()
+ .map(|t| matches!(t.0, TokenKind::Colon | TokenKind::Ident))
+ .unwrap_or(false)
+ {
+ let tokens = self.get_full_ident_path();
+ for (token, start, end) in &tokens {
+ let text = &self.src[*start..*end];
+ self.advance(*token, text, sink, *start as u32);
+ self.byte_pos += text.len() as u32;
+ }
+ if !tokens.is_empty() {
+ continue;
+ }
+ }
+ if let Some((token, text, before)) = self.next() {
+ self.advance(token, text, sink, before);
+ } else {
+ break;
+ }
+ }
+ }
+
+ /// Single step of highlighting. This will classify `token`, but maybe also a couple of
+ /// following ones as well.
+ ///
+ /// `before` is the position of the given token in the `source` string and is used as "lo" byte
+ /// in case we want to try to generate a link for this token using the
+ /// `span_correspondance_map`.
+ fn advance(
+ &mut self,
+ token: TokenKind,
+ text: &'a str,
+ sink: &mut dyn FnMut(Highlight<'a>),
+ before: u32,
+ ) {
+ let lookahead = self.peek();
+ let no_highlight = |sink: &mut dyn FnMut(_)| sink(Highlight::Token { text, class: None });
+ let class = match token {
+ TokenKind::Whitespace => return no_highlight(sink),
+ TokenKind::LineComment { doc_style } | TokenKind::BlockComment { doc_style, .. } => {
+ if doc_style.is_some() {
+ Class::DocComment
+ } else {
+ Class::Comment
+ }
+ }
+ // Consider this as part of a macro invocation if there was a
+ // leading identifier.
+ TokenKind::Bang if self.in_macro => {
+ self.in_macro = false;
+ sink(Highlight::Token { text, class: None });
+ sink(Highlight::ExitSpan);
+ return;
+ }
+
+ // Assume that '&' or '*' is the reference or dereference operator
+ // or a reference or pointer type. Unless, of course, it looks like
+ // a logical and or a multiplication operator: `&&` or `* `.
+ TokenKind::Star => match self.tokens.peek() {
+ Some((TokenKind::Whitespace, _)) => Class::Op,
+ Some((TokenKind::Ident, "mut")) => {
+ self.next();
+ sink(Highlight::Token { text: "*mut", class: Some(Class::RefKeyWord) });
+ return;
+ }
+ Some((TokenKind::Ident, "const")) => {
+ self.next();
+ sink(Highlight::Token { text: "*const", class: Some(Class::RefKeyWord) });
+ return;
+ }
+ _ => Class::RefKeyWord,
+ },
+ TokenKind::And => match self.tokens.peek() {
+ Some((TokenKind::And, _)) => {
+ self.next();
+ sink(Highlight::Token { text: "&&", class: Some(Class::Op) });
+ return;
+ }
+ Some((TokenKind::Eq, _)) => {
+ self.next();
+ sink(Highlight::Token { text: "&=", class: Some(Class::Op) });
+ return;
+ }
+ Some((TokenKind::Whitespace, _)) => Class::Op,
+ Some((TokenKind::Ident, "mut")) => {
+ self.next();
+ sink(Highlight::Token { text: "&mut", class: Some(Class::RefKeyWord) });
+ return;
+ }
+ _ => Class::RefKeyWord,
+ },
+
+ // These can either be operators, or arrows.
+ TokenKind::Eq => match lookahead {
+ Some(TokenKind::Eq) => {
+ self.next();
+ sink(Highlight::Token { text: "==", class: Some(Class::Op) });
+ return;
+ }
+ Some(TokenKind::Gt) => {
+ self.next();
+ sink(Highlight::Token { text: "=>", class: None });
+ return;
+ }
+ _ => Class::Op,
+ },
+ TokenKind::Minus if lookahead == Some(TokenKind::Gt) => {
+ self.next();
+ sink(Highlight::Token { text: "->", class: None });
+ return;
+ }
+
+ // Other operators.
+ TokenKind::Minus
+ | TokenKind::Plus
+ | TokenKind::Or
+ | TokenKind::Slash
+ | TokenKind::Caret
+ | TokenKind::Percent
+ | TokenKind::Bang
+ | TokenKind::Lt
+ | TokenKind::Gt => Class::Op,
+
+ // Miscellaneous, no highlighting.
+ TokenKind::Dot
+ | TokenKind::Semi
+ | TokenKind::Comma
+ | TokenKind::OpenParen
+ | TokenKind::CloseParen
+ | TokenKind::OpenBrace
+ | TokenKind::CloseBrace
+ | TokenKind::OpenBracket
+ | TokenKind::At
+ | TokenKind::Tilde
+ | TokenKind::Colon
+ | TokenKind::Unknown => return no_highlight(sink),
+
+ TokenKind::Question => Class::QuestionMark,
+
+ TokenKind::Dollar => match lookahead {
+ Some(TokenKind::Ident) => {
+ self.in_macro_nonterminal = true;
+ Class::MacroNonTerminal
+ }
+ _ => return no_highlight(sink),
+ },
+
+ // This might be the start of an attribute. We're going to want to
+ // continue highlighting it as an attribute until the ending ']' is
+ // seen, so skip out early. Down below we terminate the attribute
+ // span when we see the ']'.
+ TokenKind::Pound => {
+ match lookahead {
+ // Case 1: #![inner_attribute]
+ Some(TokenKind::Bang) => {
+ self.next();
+ if let Some(TokenKind::OpenBracket) = self.peek() {
+ self.in_attribute = true;
+ sink(Highlight::EnterSpan { class: Class::Attribute });
+ }
+ sink(Highlight::Token { text: "#", class: None });
+ sink(Highlight::Token { text: "!", class: None });
+ return;
+ }
+ // Case 2: #[outer_attribute]
+ Some(TokenKind::OpenBracket) => {
+ self.in_attribute = true;
+ sink(Highlight::EnterSpan { class: Class::Attribute });
+ }
+ _ => (),
+ }
+ return no_highlight(sink);
+ }
+ TokenKind::CloseBracket => {
+ if self.in_attribute {
+ self.in_attribute = false;
+ sink(Highlight::Token { text: "]", class: None });
+ sink(Highlight::ExitSpan);
+ return;
+ }
+ return no_highlight(sink);
+ }
+ TokenKind::Literal { kind, .. } => match kind {
+ // Text literals.
+ LiteralKind::Byte { .. }
+ | LiteralKind::Char { .. }
+ | LiteralKind::Str { .. }
+ | LiteralKind::ByteStr { .. }
+ | LiteralKind::RawStr { .. }
+ | LiteralKind::RawByteStr { .. } => Class::String,
+ // Number literals.
+ LiteralKind::Float { .. } | LiteralKind::Int { .. } => Class::Number,
+ },
+ TokenKind::Ident | TokenKind::RawIdent if lookahead == Some(TokenKind::Bang) => {
+ self.in_macro = true;
+ sink(Highlight::EnterSpan { class: Class::Macro(self.new_span(before, text)) });
+ sink(Highlight::Token { text, class: None });
+ return;
+ }
+ TokenKind::Ident => match get_real_ident_class(text, self.edition, false) {
+ None => match text {
+ "Option" | "Result" => Class::PreludeTy,
+ "Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
+ // "union" is a weak keyword and is only considered as a keyword when declaring
+ // a union type.
+ "union" if self.check_if_is_union_keyword() => Class::KeyWord,
+ _ if self.in_macro_nonterminal => {
+ self.in_macro_nonterminal = false;
+ Class::MacroNonTerminal
+ }
+ "self" | "Self" => Class::Self_(self.new_span(before, text)),
+ _ => Class::Ident(self.new_span(before, text)),
+ },
+ Some(c) => c,
+ },
+ TokenKind::RawIdent | TokenKind::UnknownPrefix | TokenKind::InvalidIdent => {
+ Class::Ident(self.new_span(before, text))
+ }
+ TokenKind::Lifetime { .. } => Class::Lifetime,
+ };
+ // Anything that didn't return above is the simple case where we the
+ // class just spans a single token, so we can use the `string` method.
+ sink(Highlight::Token { text, class: Some(class) });
+ }
+
+ fn peek(&mut self) -> Option<TokenKind> {
+ self.tokens.peek().map(|(token_kind, _text)| *token_kind)
+ }
+
+ fn check_if_is_union_keyword(&mut self) -> bool {
+ while let Some(kind) = self.tokens.peek_next().map(|(token_kind, _text)| token_kind) {
+ if *kind == TokenKind::Whitespace {
+ continue;
+ }
+ return *kind == TokenKind::Ident;
+ }
+ false
+ }
+}
+
+/// Called when we start processing a span of text that should be highlighted.
+/// The `Class` argument specifies how it should be highlighted.
+fn enter_span(
+ out: &mut Buffer,
+ klass: Class,
+ href_context: &Option<HrefContext<'_, '_, '_>>,
+) -> &'static str {
+ string_without_closing_tag(out, "", Some(klass), href_context).expect(
+ "internal error: enter_span was called with Some(klass) but did not return a \
+ closing HTML tag",
+ )
+}
+
+/// Called at the end of a span of highlighted text.
+fn exit_span(out: &mut Buffer, closing_tag: &str) {
+ out.write_str(closing_tag);
+}
+
+/// Called for a span of text. If the text should be highlighted differently
+/// from the surrounding text, then the `Class` argument will be a value other
+/// than `None`.
+///
+/// The following sequences of callbacks are equivalent:
+/// ```plain
+/// enter_span(Foo), string("text", None), exit_span()
+/// string("text", Foo)
+/// ```
+///
+/// The latter can be thought of as a shorthand for the former, which is more
+/// flexible.
+///
+/// Note that if `context` is not `None` and that the given `klass` contains a `Span`, the function
+/// will then try to find this `span` in the `span_correspondance_map`. If found, it'll then
+/// generate a link for this element (which corresponds to where its definition is located).
+fn string<T: Display>(
+ out: &mut Buffer,
+ text: T,
+ klass: Option<Class>,
+ href_context: &Option<HrefContext<'_, '_, '_>>,
+) {
+ if let Some(closing_tag) = string_without_closing_tag(out, text, klass, href_context) {
+ out.write_str(closing_tag);
+ }
+}
+
+/// This function writes `text` into `out` with some modifications depending on `klass`:
+///
+/// * If `klass` is `None`, `text` is written into `out` with no modification.
+/// * If `klass` is `Some` but `klass.get_span()` is `None`, it writes the text wrapped in a
+/// `<span>` with the provided `klass`.
+/// * If `klass` is `Some` and has a [`rustc_span::Span`], it then tries to generate a link (`<a>`
+/// element) by retrieving the link information from the `span_correspondance_map` that was filled
+/// in `span_map.rs::collect_spans_and_sources`. If it cannot retrieve the information, then it's
+/// the same as the second point (`klass` is `Some` but doesn't have a [`rustc_span::Span`]).
+fn string_without_closing_tag<T: Display>(
+ out: &mut Buffer,
+ text: T,
+ klass: Option<Class>,
+ href_context: &Option<HrefContext<'_, '_, '_>>,
+) -> Option<&'static str> {
+ let Some(klass) = klass
+ else {
+ write!(out, "{}", text);
+ return None;
+ };
+ let Some(def_span) = klass.get_span()
+ else {
+ write!(out, "<span class=\"{}\">{}", klass.as_html(), text);
+ return Some("</span>");
+ };
+
+ let mut text_s = text.to_string();
+ if text_s.contains("::") {
+ text_s = text_s.split("::").intersperse("::").fold(String::new(), |mut path, t| {
+ match t {
+ "self" | "Self" => write!(
+ &mut path,
+ "<span class=\"{}\">{}</span>",
+ Class::Self_(DUMMY_SP).as_html(),
+ t
+ ),
+ "crate" | "super" => {
+ write!(&mut path, "<span class=\"{}\">{}</span>", Class::KeyWord.as_html(), t)
+ }
+ t => write!(&mut path, "{}", t),
+ }
+ .expect("Failed to build source HTML path");
+ path
+ });
+ }
+ if let Some(href_context) = href_context {
+ if let Some(href) =
+ href_context.context.shared.span_correspondance_map.get(&def_span).and_then(|href| {
+ let context = href_context.context;
+ // FIXME: later on, it'd be nice to provide two links (if possible) for all items:
+ // one to the documentation page and one to the source definition.
+ // FIXME: currently, external items only generate a link to their documentation,
+ // a link to their definition can be generated using this:
+ // https://github.com/rust-lang/rust/blob/60f1a2fc4b535ead9c85ce085fdce49b1b097531/src/librustdoc/html/render/context.rs#L315-L338
+ match href {
+ LinkFromSrc::Local(span) => context
+ .href_from_span(*span, true)
+ .map(|s| format!("{}{}", href_context.root_path, s)),
+ LinkFromSrc::External(def_id) => {
+ format::href_with_root_path(*def_id, context, Some(href_context.root_path))
+ .ok()
+ .map(|(url, _, _)| url)
+ }
+ LinkFromSrc::Primitive(prim) => format::href_with_root_path(
+ PrimitiveType::primitive_locations(context.tcx())[prim],
+ context,
+ Some(href_context.root_path),
+ )
+ .ok()
+ .map(|(url, _, _)| url),
+ }
+ })
+ {
+ write!(out, "<a class=\"{}\" href=\"{}\">{}", klass.as_html(), href, text_s);
+ return Some("</a>");
+ }
+ }
+ write!(out, "<span class=\"{}\">{}", klass.as_html(), text_s);
+ Some("</span>")
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/highlight/fixtures/decorations.html b/src/librustdoc/html/highlight/fixtures/decorations.html
new file mode 100644
index 000000000..45f567880
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/decorations.html
@@ -0,0 +1,2 @@
+<span class="example"><span class="kw">let</span> <span class="ident">x</span> <span class="op">=</span> <span class="number">1</span>;</span>
+<span class="kw">let</span> <span class="ident">y</span> <span class="op">=</span> <span class="number">2</span>; \ No newline at end of file
diff --git a/src/librustdoc/html/highlight/fixtures/dos_line.html b/src/librustdoc/html/highlight/fixtures/dos_line.html
new file mode 100644
index 000000000..1c8dbffe7
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/dos_line.html
@@ -0,0 +1,3 @@
+<span class="kw">pub</span> <span class="kw">fn</span> <span class="ident">foo</span>() {
+<span class="macro">println!</span>(<span class="string">&quot;foo&quot;</span>);
+}
diff --git a/src/librustdoc/html/highlight/fixtures/highlight.html b/src/librustdoc/html/highlight/fixtures/highlight.html
new file mode 100644
index 000000000..abc2db179
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/highlight.html
@@ -0,0 +1,4 @@
+<span class="kw">use</span> <span class="ident"><span class="kw">crate</span>::a::foo</span>;
+<span class="kw">use</span> <span class="ident"><span class="self">self</span>::whatever</span>;
+<span class="kw">let</span> <span class="ident">x</span> <span class="op">=</span> <span class="ident"><span class="kw">super</span>::b::foo</span>;
+<span class="kw">let</span> <span class="ident">y</span> <span class="op">=</span> <span class="ident"><span class="self">Self</span>::whatever</span>; \ No newline at end of file
diff --git a/src/librustdoc/html/highlight/fixtures/sample.html b/src/librustdoc/html/highlight/fixtures/sample.html
new file mode 100644
index 000000000..b117a12e3
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/sample.html
@@ -0,0 +1,37 @@
+
+<style>
+.kw { color: #8959A8; }
+.kw-2, .prelude-ty { color: #4271AE; }
+.number, .string { color: #718C00; }
+.self, .bool-val, .prelude-val, .attribute, .attribute .ident { color: #C82829; }
+.macro, .macro-nonterminal { color: #3E999F; }
+.lifetime { color: #B76514; }
+.question-mark { color: #ff9011; }
+</style>
+<pre><code><span class="attribute">#![<span class="ident">crate_type</span> <span class="op">=</span> <span class="string">&quot;lib&quot;</span>]</span>
+
+<span class="kw">use</span> <span class="ident">std::path</span>::{<span class="ident">Path</span>, <span class="ident">PathBuf</span>};
+
+<span class="attribute">#[<span class="ident">cfg</span>(<span class="ident">target_os</span> <span class="op">=</span> <span class="string">&quot;linux&quot;</span>)]</span>
+<span class="kw">fn</span> <span class="ident">main</span>() -&gt; () {
+ <span class="kw">let</span> <span class="ident">foo</span> <span class="op">=</span> <span class="bool-val">true</span> <span class="op">&amp;&amp;</span> <span class="bool-val">false</span> <span class="op">|</span><span class="op">|</span> <span class="bool-val">true</span>;
+ <span class="kw">let</span> <span class="kw">_</span>: <span class="kw-2">*const</span> () <span class="op">=</span> <span class="number">0</span>;
+ <span class="kw">let</span> <span class="kw">_</span> <span class="op">=</span> <span class="kw-2">&amp;</span><span class="ident">foo</span>;
+ <span class="kw">let</span> <span class="kw">_</span> <span class="op">=</span> <span class="op">&amp;&amp;</span><span class="ident">foo</span>;
+ <span class="kw">let</span> <span class="kw">_</span> <span class="op">=</span> <span class="kw-2">*</span><span class="ident">foo</span>;
+ <span class="macro">mac!</span>(<span class="ident">foo</span>, <span class="kw-2">&amp;mut</span> <span class="ident">bar</span>);
+ <span class="macro">assert!</span>(<span class="self">self</span>.<span class="ident">length</span> <span class="op">&lt;</span> <span class="ident">N</span> <span class="op">&amp;&amp;</span> <span class="ident">index</span> <span class="op">&lt;</span><span class="op">=</span> <span class="self">self</span>.<span class="ident">length</span>);
+ <span class="ident">::std::env::var</span>(<span class="string">&quot;gateau&quot;</span>).<span class="ident">is_ok</span>();
+ <span class="attribute">#[<span class="ident">rustfmt::skip</span>]</span>
+ <span class="kw">let</span> <span class="ident">s</span>:<span class="ident">std::path::PathBuf</span> <span class="op">=</span> <span class="ident">std::path::PathBuf::new</span>();
+ <span class="kw">let</span> <span class="kw-2">mut</span> <span class="ident">s</span> <span class="op">=</span> <span class="ident">String::new</span>();
+
+ <span class="kw">match</span> <span class="kw-2">&amp;</span><span class="ident">s</span> {
+ <span class="kw-2">ref</span> <span class="kw-2">mut</span> <span class="ident">x</span> =&gt; {}
+ }
+}
+
+<span class="macro">macro_rules!</span> <span class="ident">bar</span> {
+ (<span class="macro-nonterminal">$</span><span class="macro-nonterminal">foo</span>:<span class="ident">tt</span>) =&gt; {};
+}
+</code></pre>
diff --git a/src/librustdoc/html/highlight/fixtures/sample.rs b/src/librustdoc/html/highlight/fixtures/sample.rs
new file mode 100644
index 000000000..fbfdc6767
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/sample.rs
@@ -0,0 +1,26 @@
+#![crate_type = "lib"]
+
+use std::path::{Path, PathBuf};
+
+#[cfg(target_os = "linux")]
+fn main() -> () {
+ let foo = true && false || true;
+ let _: *const () = 0;
+ let _ = &foo;
+ let _ = &&foo;
+ let _ = *foo;
+ mac!(foo, &mut bar);
+ assert!(self.length < N && index <= self.length);
+ ::std::env::var("gateau").is_ok();
+ #[rustfmt::skip]
+ let s:std::path::PathBuf = std::path::PathBuf::new();
+ let mut s = String::new();
+
+ match &s {
+ ref mut x => {}
+ }
+}
+
+macro_rules! bar {
+ ($foo:tt) => {};
+}
diff --git a/src/librustdoc/html/highlight/fixtures/union.html b/src/librustdoc/html/highlight/fixtures/union.html
new file mode 100644
index 000000000..c0acf31a0
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/union.html
@@ -0,0 +1,8 @@
+<span class="kw">union</span> <span class="ident">Foo</span> {
+ <span class="ident">i</span>: <span class="ident">i8</span>,
+ <span class="ident">u</span>: <span class="ident">i8</span>,
+}
+
+<span class="kw">fn</span> <span class="ident">main</span>() {
+ <span class="kw">let</span> <span class="ident">union</span> <span class="op">=</span> <span class="number">0</span>;
+}
diff --git a/src/librustdoc/html/highlight/fixtures/union.rs b/src/librustdoc/html/highlight/fixtures/union.rs
new file mode 100644
index 000000000..269ee115d
--- /dev/null
+++ b/src/librustdoc/html/highlight/fixtures/union.rs
@@ -0,0 +1,8 @@
+union Foo {
+ i: i8,
+ u: i8,
+}
+
+fn main() {
+ let union = 0;
+}
diff --git a/src/librustdoc/html/highlight/tests.rs b/src/librustdoc/html/highlight/tests.rs
new file mode 100644
index 000000000..1fea7e983
--- /dev/null
+++ b/src/librustdoc/html/highlight/tests.rs
@@ -0,0 +1,81 @@
+use super::{write_code, DecorationInfo};
+use crate::html::format::Buffer;
+use expect_test::expect_file;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_span::create_default_session_globals_then;
+use rustc_span::edition::Edition;
+
+const STYLE: &str = r#"
+<style>
+.kw { color: #8959A8; }
+.kw-2, .prelude-ty { color: #4271AE; }
+.number, .string { color: #718C00; }
+.self, .bool-val, .prelude-val, .attribute, .attribute .ident { color: #C82829; }
+.macro, .macro-nonterminal { color: #3E999F; }
+.lifetime { color: #B76514; }
+.question-mark { color: #ff9011; }
+</style>
+"#;
+
+#[test]
+fn test_html_highlighting() {
+ create_default_session_globals_then(|| {
+ let src = include_str!("fixtures/sample.rs");
+ let html = {
+ let mut out = Buffer::new();
+ write_code(&mut out, src, Edition::Edition2018, None, None);
+ format!("{}<pre><code>{}</code></pre>\n", STYLE, out.into_inner())
+ };
+ expect_file!["fixtures/sample.html"].assert_eq(&html);
+ });
+}
+
+#[test]
+fn test_dos_backline() {
+ create_default_session_globals_then(|| {
+ let src = "pub fn foo() {\r\n\
+ println!(\"foo\");\r\n\
+}\r\n";
+ let mut html = Buffer::new();
+ write_code(&mut html, src, Edition::Edition2018, None, None);
+ expect_file!["fixtures/dos_line.html"].assert_eq(&html.into_inner());
+ });
+}
+
+#[test]
+fn test_keyword_highlight() {
+ create_default_session_globals_then(|| {
+ let src = "use crate::a::foo;
+use self::whatever;
+let x = super::b::foo;
+let y = Self::whatever;";
+
+ let mut html = Buffer::new();
+ write_code(&mut html, src, Edition::Edition2018, None, None);
+ expect_file!["fixtures/highlight.html"].assert_eq(&html.into_inner());
+ });
+}
+
+#[test]
+fn test_union_highlighting() {
+ create_default_session_globals_then(|| {
+ let src = include_str!("fixtures/union.rs");
+ let mut html = Buffer::new();
+ write_code(&mut html, src, Edition::Edition2018, None, None);
+ expect_file!["fixtures/union.html"].assert_eq(&html.into_inner());
+ });
+}
+
+#[test]
+fn test_decorations() {
+ create_default_session_globals_then(|| {
+ let src = "let x = 1;
+let y = 2;";
+ let mut decorations = FxHashMap::default();
+ decorations.insert("example", vec![(0, 10)]);
+
+ let mut html = Buffer::new();
+ write_code(&mut html, src, Edition::Edition2018, None, Some(DecorationInfo(decorations)));
+ expect_file!["fixtures/decorations.html"].assert_eq(&html.into_inner());
+ });
+}
diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs
new file mode 100644
index 000000000..7d6d4b71e
--- /dev/null
+++ b/src/librustdoc/html/layout.rs
@@ -0,0 +1,103 @@
+use std::path::PathBuf;
+
+use rustc_data_structures::fx::FxHashMap;
+
+use crate::error::Error;
+use crate::externalfiles::ExternalHtml;
+use crate::html::format::{Buffer, Print};
+use crate::html::render::{ensure_trailing_slash, StylePath};
+
+use askama::Template;
+
+#[derive(Clone)]
+pub(crate) struct Layout {
+ pub(crate) logo: String,
+ pub(crate) favicon: String,
+ pub(crate) external_html: ExternalHtml,
+ pub(crate) default_settings: FxHashMap<String, String>,
+ pub(crate) krate: String,
+ /// The given user css file which allow to customize the generated
+ /// documentation theme.
+ pub(crate) css_file_extension: Option<PathBuf>,
+ /// If true, then scrape-examples.js will be included in the output HTML file
+ pub(crate) scrape_examples_extension: bool,
+}
+
+pub(crate) struct Page<'a> {
+ pub(crate) title: &'a str,
+ pub(crate) css_class: &'a str,
+ pub(crate) root_path: &'a str,
+ pub(crate) static_root_path: Option<&'a str>,
+ pub(crate) description: &'a str,
+ pub(crate) keywords: &'a str,
+ pub(crate) resource_suffix: &'a str,
+}
+
+impl<'a> Page<'a> {
+ pub(crate) fn get_static_root_path(&self) -> &str {
+ self.static_root_path.unwrap_or(self.root_path)
+ }
+}
+
+#[derive(Template)]
+#[template(path = "page.html")]
+struct PageLayout<'a> {
+ static_root_path: &'a str,
+ page: &'a Page<'a>,
+ layout: &'a Layout,
+ themes: Vec<String>,
+ sidebar: String,
+ content: String,
+ krate_with_trailing_slash: String,
+ pub(crate) rustdoc_version: &'a str,
+}
+
+pub(crate) fn render<T: Print, S: Print>(
+ layout: &Layout,
+ page: &Page<'_>,
+ sidebar: S,
+ t: T,
+ style_files: &[StylePath],
+) -> String {
+ let static_root_path = page.get_static_root_path();
+ let krate_with_trailing_slash = ensure_trailing_slash(&layout.krate).to_string();
+ let mut themes: Vec<String> = style_files
+ .iter()
+ .map(StylePath::basename)
+ .collect::<Result<_, Error>>()
+ .unwrap_or_default();
+ themes.sort();
+ let rustdoc_version = rustc_interface::util::version_str().unwrap_or("unknown version");
+ let content = Buffer::html().to_display(t); // Note: This must happen before making the sidebar.
+ let sidebar = Buffer::html().to_display(sidebar);
+ PageLayout {
+ static_root_path,
+ page,
+ layout,
+ themes,
+ sidebar,
+ content,
+ krate_with_trailing_slash,
+ rustdoc_version,
+ }
+ .render()
+ .unwrap()
+}
+
+pub(crate) fn redirect(url: &str) -> String {
+ // <script> triggers a redirect before refresh, so this is fine.
+ format!(
+ r##"<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta http-equiv="refresh" content="0;URL={url}">
+ <title>Redirection</title>
+</head>
+<body>
+ <p>Redirecting to <a href="{url}">{url}</a>...</p>
+ <script>location.replace("{url}" + location.search + location.hash);</script>
+</body>
+</html>"##,
+ url = url,
+ )
+}
diff --git a/src/librustdoc/html/length_limit.rs b/src/librustdoc/html/length_limit.rs
new file mode 100644
index 000000000..bbdc91c8d
--- /dev/null
+++ b/src/librustdoc/html/length_limit.rs
@@ -0,0 +1,119 @@
+//! See [`HtmlWithLimit`].
+
+use std::fmt::Write;
+use std::ops::ControlFlow;
+
+use crate::html::escape::Escape;
+
+/// A buffer that allows generating HTML with a length limit.
+///
+/// This buffer ensures that:
+///
+/// * all tags are closed,
+/// * tags are closed in the reverse order of when they were opened (i.e., the correct HTML order),
+/// * no tags are left empty (e.g., `<em></em>`) due to the length limit being reached,
+/// * all text is escaped.
+#[derive(Debug)]
+pub(super) struct HtmlWithLimit {
+ buf: String,
+ len: usize,
+ limit: usize,
+ /// A list of tags that have been requested to be opened via [`Self::open_tag()`]
+ /// but have not actually been pushed to `buf` yet. This ensures that tags are not
+ /// left empty (e.g., `<em></em>`) due to the length limit being reached.
+ queued_tags: Vec<&'static str>,
+ /// A list of all tags that have been opened but not yet closed.
+ unclosed_tags: Vec<&'static str>,
+}
+
+impl HtmlWithLimit {
+ /// Create a new buffer, with a limit of `length_limit`.
+ pub(super) fn new(length_limit: usize) -> Self {
+ let buf = if length_limit > 1000 {
+ // If the length limit is really large, don't preallocate tons of memory.
+ String::new()
+ } else {
+ // The length limit is actually a good heuristic for initial allocation size.
+ // Measurements showed that using it as the initial capacity ended up using less memory
+ // than `String::new`.
+ // See https://github.com/rust-lang/rust/pull/88173#discussion_r692531631 for more.
+ String::with_capacity(length_limit)
+ };
+ Self {
+ buf,
+ len: 0,
+ limit: length_limit,
+ unclosed_tags: Vec::new(),
+ queued_tags: Vec::new(),
+ }
+ }
+
+ /// Finish using the buffer and get the written output.
+ /// This function will close all unclosed tags for you.
+ pub(super) fn finish(mut self) -> String {
+ self.close_all_tags();
+ self.buf
+ }
+
+ /// Write some plain text to the buffer, escaping as needed.
+ ///
+ /// This function skips writing the text if the length limit was reached
+ /// and returns [`ControlFlow::Break`].
+ pub(super) fn push(&mut self, text: &str) -> ControlFlow<(), ()> {
+ if self.len + text.len() > self.limit {
+ return ControlFlow::BREAK;
+ }
+
+ self.flush_queue();
+ write!(self.buf, "{}", Escape(text)).unwrap();
+ self.len += text.len();
+
+ ControlFlow::CONTINUE
+ }
+
+ /// Open an HTML tag.
+ ///
+ /// **Note:** HTML attributes have not yet been implemented.
+ /// This function will panic if called with a non-alphabetic `tag_name`.
+ pub(super) fn open_tag(&mut self, tag_name: &'static str) {
+ assert!(
+ tag_name.chars().all(|c| ('a'..='z').contains(&c)),
+ "tag_name contained non-alphabetic chars: {:?}",
+ tag_name
+ );
+ self.queued_tags.push(tag_name);
+ }
+
+ /// Close the most recently opened HTML tag.
+ pub(super) fn close_tag(&mut self) {
+ match self.unclosed_tags.pop() {
+ // Close the most recently opened tag.
+ Some(tag_name) => write!(self.buf, "</{}>", tag_name).unwrap(),
+ // There are valid cases where `close_tag()` is called without
+ // there being any tags to close. For example, this occurs when
+ // a tag is opened after the length limit is exceeded;
+ // `flush_queue()` will never be called, and thus, the tag will
+ // not end up being added to `unclosed_tags`.
+ None => {}
+ }
+ }
+
+ /// Write all queued tags and add them to the `unclosed_tags` list.
+ fn flush_queue(&mut self) {
+ for tag_name in self.queued_tags.drain(..) {
+ write!(self.buf, "<{}>", tag_name).unwrap();
+
+ self.unclosed_tags.push(tag_name);
+ }
+ }
+
+ /// Close all unclosed tags.
+ fn close_all_tags(&mut self) {
+ while !self.unclosed_tags.is_empty() {
+ self.close_tag();
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/length_limit/tests.rs b/src/librustdoc/html/length_limit/tests.rs
new file mode 100644
index 000000000..2d02b8a16
--- /dev/null
+++ b/src/librustdoc/html/length_limit/tests.rs
@@ -0,0 +1,120 @@
+use super::*;
+
+#[test]
+fn empty() {
+ assert_eq!(HtmlWithLimit::new(0).finish(), "");
+ assert_eq!(HtmlWithLimit::new(60).finish(), "");
+}
+
+#[test]
+fn basic() {
+ let mut buf = HtmlWithLimit::new(60);
+ buf.push("Hello ");
+ buf.open_tag("em");
+ buf.push("world");
+ buf.close_tag();
+ buf.push("!");
+ assert_eq!(buf.finish(), "Hello <em>world</em>!");
+}
+
+#[test]
+fn no_tags() {
+ let mut buf = HtmlWithLimit::new(60);
+ buf.push("Hello");
+ buf.push(" world!");
+ assert_eq!(buf.finish(), "Hello world!");
+}
+
+#[test]
+fn limit_0() {
+ let mut buf = HtmlWithLimit::new(0);
+ buf.push("Hello ");
+ buf.open_tag("em");
+ buf.push("world");
+ buf.close_tag();
+ buf.push("!");
+ assert_eq!(buf.finish(), "");
+}
+
+#[test]
+fn exactly_limit() {
+ let mut buf = HtmlWithLimit::new(12);
+ buf.push("Hello ");
+ buf.open_tag("em");
+ buf.push("world");
+ buf.close_tag();
+ buf.push("!");
+ assert_eq!(buf.finish(), "Hello <em>world</em>!");
+}
+
+#[test]
+fn multiple_nested_tags() {
+ let mut buf = HtmlWithLimit::new(60);
+ buf.open_tag("p");
+ buf.push("This is a ");
+ buf.open_tag("em");
+ buf.push("paragraph");
+ buf.open_tag("strong");
+ buf.push("!");
+ buf.close_tag();
+ buf.close_tag();
+ buf.close_tag();
+ assert_eq!(buf.finish(), "<p>This is a <em>paragraph<strong>!</strong></em></p>");
+}
+
+#[test]
+fn forgot_to_close_tags() {
+ let mut buf = HtmlWithLimit::new(60);
+ buf.open_tag("p");
+ buf.push("This is a ");
+ buf.open_tag("em");
+ buf.push("paragraph");
+ buf.open_tag("strong");
+ buf.push("!");
+ assert_eq!(buf.finish(), "<p>This is a <em>paragraph<strong>!</strong></em></p>");
+}
+
+#[test]
+fn past_the_limit() {
+ let mut buf = HtmlWithLimit::new(20);
+ buf.open_tag("p");
+ (0..10).try_for_each(|n| {
+ buf.open_tag("strong");
+ buf.push("word#")?;
+ buf.push(&n.to_string())?;
+ buf.close_tag();
+ ControlFlow::CONTINUE
+ });
+ buf.close_tag();
+ assert_eq!(
+ buf.finish(),
+ "<p>\
+ <strong>word#0</strong>\
+ <strong>word#1</strong>\
+ <strong>word#2</strong>\
+ </p>"
+ );
+}
+
+#[test]
+fn quickly_past_the_limit() {
+ let mut buf = HtmlWithLimit::new(6);
+ buf.open_tag("p");
+ buf.push("Hello");
+ buf.push(" World");
+ // intentionally not closing <p> before finishing
+ assert_eq!(buf.finish(), "<p>Hello</p>");
+}
+
+#[test]
+fn close_too_many() {
+ let mut buf = HtmlWithLimit::new(60);
+ buf.open_tag("p");
+ buf.push("Hello");
+ buf.close_tag();
+ // This call does not panic because there are valid cases
+ // where `close_tag()` is called with no tags left to close.
+ // So `close_tag()` does nothing in this case.
+ buf.close_tag();
+ assert_eq!(buf.finish(), "<p>Hello</p>");
+}
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
new file mode 100644
index 000000000..52a2effca
--- /dev/null
+++ b/src/librustdoc/html/markdown.rs
@@ -0,0 +1,1510 @@
+//! Markdown formatting for rustdoc.
+//!
+//! This module implements markdown formatting through the pulldown-cmark library.
+//!
+//! ```
+//! #![feature(rustc_private)]
+//!
+//! extern crate rustc_span;
+//!
+//! use rustc_span::edition::Edition;
+//! use rustdoc::html::markdown::{HeadingOffset, IdMap, Markdown, ErrorCodes};
+//!
+//! let s = "My *markdown* _text_";
+//! let mut id_map = IdMap::new();
+//! let md = Markdown {
+//! content: s,
+//! links: &[],
+//! ids: &mut id_map,
+//! error_codes: ErrorCodes::Yes,
+//! edition: Edition::Edition2015,
+//! playground: &None,
+//! heading_offset: HeadingOffset::H2,
+//! };
+//! let html = md.into_string();
+//! // ... something using html
+//! ```
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::DefId;
+use rustc_hir::HirId;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::edition::Edition;
+use rustc_span::Span;
+
+use once_cell::sync::Lazy;
+use std::borrow::Cow;
+use std::cell::RefCell;
+use std::collections::VecDeque;
+use std::default::Default;
+use std::fmt::Write;
+use std::ops::{ControlFlow, Range};
+use std::str;
+
+use crate::clean::RenderedLink;
+use crate::doctest;
+use crate::html::escape::Escape;
+use crate::html::format::Buffer;
+use crate::html::highlight;
+use crate::html::length_limit::HtmlWithLimit;
+use crate::html::toc::TocBuilder;
+
+use pulldown_cmark::{
+ html, BrokenLink, CodeBlockKind, CowStr, Event, LinkType, Options, Parser, Tag,
+};
+
+#[cfg(test)]
+mod tests;
+
+const MAX_HEADER_LEVEL: u32 = 6;
+
+/// Options for rendering Markdown in the main body of documentation.
+pub(crate) fn main_body_opts() -> Options {
+ Options::ENABLE_TABLES
+ | Options::ENABLE_FOOTNOTES
+ | Options::ENABLE_STRIKETHROUGH
+ | Options::ENABLE_TASKLISTS
+ | Options::ENABLE_SMART_PUNCTUATION
+}
+
+/// Options for rendering Markdown in summaries (e.g., in search results).
+pub(crate) fn summary_opts() -> Options {
+ Options::ENABLE_TABLES
+ | Options::ENABLE_FOOTNOTES
+ | Options::ENABLE_STRIKETHROUGH
+ | Options::ENABLE_TASKLISTS
+ | Options::ENABLE_SMART_PUNCTUATION
+}
+
+#[derive(Debug, Clone, Copy)]
+pub enum HeadingOffset {
+ H1 = 0,
+ H2,
+ H3,
+ H4,
+ H5,
+ H6,
+}
+
+/// When `to_string` is called, this struct will emit the HTML corresponding to
+/// the rendered version of the contained markdown string.
+pub struct Markdown<'a> {
+ pub content: &'a str,
+ /// A list of link replacements.
+ pub links: &'a [RenderedLink],
+ /// The current list of used header IDs.
+ pub ids: &'a mut IdMap,
+ /// Whether to allow the use of explicit error codes in doctest lang strings.
+ pub error_codes: ErrorCodes,
+ /// Default edition to use when parsing doctests (to add a `fn main`).
+ pub edition: Edition,
+ pub playground: &'a Option<Playground>,
+ /// Offset at which we render headings.
+ /// E.g. if `heading_offset: HeadingOffset::H2`, then `# something` renders an `<h2>`.
+ pub heading_offset: HeadingOffset,
+}
+/// A tuple struct like `Markdown` that renders the markdown with a table of contents.
+pub(crate) struct MarkdownWithToc<'a>(
+ pub(crate) &'a str,
+ pub(crate) &'a mut IdMap,
+ pub(crate) ErrorCodes,
+ pub(crate) Edition,
+ pub(crate) &'a Option<Playground>,
+);
+/// A tuple struct like `Markdown` that renders the markdown escaping HTML tags.
+pub(crate) struct MarkdownHtml<'a>(
+ pub(crate) &'a str,
+ pub(crate) &'a mut IdMap,
+ pub(crate) ErrorCodes,
+ pub(crate) Edition,
+ pub(crate) &'a Option<Playground>,
+);
+/// A tuple struct like `Markdown` that renders only the first paragraph.
+pub(crate) struct MarkdownSummaryLine<'a>(pub &'a str, pub &'a [RenderedLink]);
+
+#[derive(Copy, Clone, PartialEq, Debug)]
+pub enum ErrorCodes {
+ Yes,
+ No,
+}
+
+impl ErrorCodes {
+ pub(crate) fn from(b: bool) -> Self {
+ match b {
+ true => ErrorCodes::Yes,
+ false => ErrorCodes::No,
+ }
+ }
+
+ pub(crate) fn as_bool(self) -> bool {
+ match self {
+ ErrorCodes::Yes => true,
+ ErrorCodes::No => false,
+ }
+ }
+}
+
+/// Controls whether a line will be hidden or shown in HTML output.
+///
+/// All lines are used in documentation tests.
+enum Line<'a> {
+ Hidden(&'a str),
+ Shown(Cow<'a, str>),
+}
+
+impl<'a> Line<'a> {
+ fn for_html(self) -> Option<Cow<'a, str>> {
+ match self {
+ Line::Shown(l) => Some(l),
+ Line::Hidden(_) => None,
+ }
+ }
+
+ fn for_code(self) -> Cow<'a, str> {
+ match self {
+ Line::Shown(l) => l,
+ Line::Hidden(l) => Cow::Borrowed(l),
+ }
+ }
+}
+
+// FIXME: There is a minor inconsistency here. For lines that start with ##, we
+// have no easy way of removing a potential single space after the hashes, which
+// is done in the single # case. This inconsistency seems okay, if non-ideal. In
+// order to fix it we'd have to iterate to find the first non-# character, and
+// then reallocate to remove it; which would make us return a String.
+fn map_line(s: &str) -> Line<'_> {
+ let trimmed = s.trim();
+ if trimmed.starts_with("##") {
+ Line::Shown(Cow::Owned(s.replacen("##", "#", 1)))
+ } else if let Some(stripped) = trimmed.strip_prefix("# ") {
+ // # text
+ Line::Hidden(stripped)
+ } else if trimmed == "#" {
+ // We cannot handle '#text' because it could be #[attr].
+ Line::Hidden("")
+ } else {
+ Line::Shown(Cow::Borrowed(s))
+ }
+}
+
+/// Convert chars from a title for an id.
+///
+/// "Hello, world!" -> "hello-world"
+fn slugify(c: char) -> Option<char> {
+ if c.is_alphanumeric() || c == '-' || c == '_' {
+ if c.is_ascii() { Some(c.to_ascii_lowercase()) } else { Some(c) }
+ } else if c.is_whitespace() && c.is_ascii() {
+ Some('-')
+ } else {
+ None
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Playground {
+ pub crate_name: Option<String>,
+ pub url: String,
+}
+
+/// Adds syntax highlighting and playground Run buttons to Rust code blocks.
+struct CodeBlocks<'p, 'a, I: Iterator<Item = Event<'a>>> {
+ inner: I,
+ check_error_codes: ErrorCodes,
+ edition: Edition,
+ // Information about the playground if a URL has been specified, containing an
+ // optional crate name and the URL.
+ playground: &'p Option<Playground>,
+}
+
+impl<'p, 'a, I: Iterator<Item = Event<'a>>> CodeBlocks<'p, 'a, I> {
+ fn new(
+ iter: I,
+ error_codes: ErrorCodes,
+ edition: Edition,
+ playground: &'p Option<Playground>,
+ ) -> Self {
+ CodeBlocks { inner: iter, check_error_codes: error_codes, edition, playground }
+ }
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
+ type Item = Event<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let event = self.inner.next();
+ let compile_fail;
+ let should_panic;
+ let ignore;
+ let edition;
+ let Some(Event::Start(Tag::CodeBlock(kind))) = event else {
+ return event;
+ };
+
+ let mut origtext = String::new();
+ for event in &mut self.inner {
+ match event {
+ Event::End(Tag::CodeBlock(..)) => break,
+ Event::Text(ref s) => {
+ origtext.push_str(s);
+ }
+ _ => {}
+ }
+ }
+ let lines = origtext.lines().filter_map(|l| map_line(l).for_html());
+ let text = lines.intersperse("\n".into()).collect::<String>();
+
+ let parse_result = match kind {
+ CodeBlockKind::Fenced(ref lang) => {
+ let parse_result =
+ LangString::parse_without_check(lang, self.check_error_codes, false);
+ if !parse_result.rust {
+ return Some(Event::Html(
+ format!(
+ "<div class=\"example-wrap\">\
+ <pre class=\"language-{}\"><code>{}</code></pre>\
+ </div>",
+ lang,
+ Escape(&text),
+ )
+ .into(),
+ ));
+ }
+ parse_result
+ }
+ CodeBlockKind::Indented => Default::default(),
+ };
+
+ compile_fail = parse_result.compile_fail;
+ should_panic = parse_result.should_panic;
+ ignore = parse_result.ignore;
+ edition = parse_result.edition;
+
+ let explicit_edition = edition.is_some();
+ let edition = edition.unwrap_or(self.edition);
+
+ let playground_button = self.playground.as_ref().and_then(|playground| {
+ let krate = &playground.crate_name;
+ let url = &playground.url;
+ if url.is_empty() {
+ return None;
+ }
+ let test = origtext
+ .lines()
+ .map(|l| map_line(l).for_code())
+ .intersperse("\n".into())
+ .collect::<String>();
+ let krate = krate.as_ref().map(|s| &**s);
+ let (test, _, _) =
+ doctest::make_test(&test, krate, false, &Default::default(), edition, None);
+ let channel = if test.contains("#![feature(") { "&amp;version=nightly" } else { "" };
+
+ // These characters don't need to be escaped in a URI.
+ // FIXME: use a library function for percent encoding.
+ fn dont_escape(c: u8) -> bool {
+ (b'a' <= c && c <= b'z')
+ || (b'A' <= c && c <= b'Z')
+ || (b'0' <= c && c <= b'9')
+ || c == b'-'
+ || c == b'_'
+ || c == b'.'
+ || c == b'~'
+ || c == b'!'
+ || c == b'\''
+ || c == b'('
+ || c == b')'
+ || c == b'*'
+ }
+ let mut test_escaped = String::new();
+ for b in test.bytes() {
+ if dont_escape(b) {
+ test_escaped.push(char::from(b));
+ } else {
+ write!(test_escaped, "%{:02X}", b).unwrap();
+ }
+ }
+ Some(format!(
+ r#"<a class="test-arrow" target="_blank" href="{}?code={}{}&amp;edition={}">Run</a>"#,
+ url, test_escaped, channel, edition,
+ ))
+ });
+
+ let tooltip = if ignore != Ignore::None {
+ Some((None, "ignore"))
+ } else if compile_fail {
+ Some((None, "compile_fail"))
+ } else if should_panic {
+ Some((None, "should_panic"))
+ } else if explicit_edition {
+ Some((Some(edition), "edition"))
+ } else {
+ None
+ };
+
+ // insert newline to clearly separate it from the
+ // previous block so we can shorten the html output
+ let mut s = Buffer::new();
+ s.push_str("\n");
+ highlight::render_with_highlighting(
+ &text,
+ &mut s,
+ Some(&format!(
+ "rust-example-rendered{}",
+ if let Some((_, class)) = tooltip { format!(" {}", class) } else { String::new() }
+ )),
+ playground_button.as_deref(),
+ tooltip,
+ edition,
+ None,
+ None,
+ None,
+ );
+ Some(Event::Html(s.into_inner().into()))
+ }
+}
+
+/// Make headings links with anchor IDs and build up TOC.
+struct LinkReplacer<'a, I: Iterator<Item = Event<'a>>> {
+ inner: I,
+ links: &'a [RenderedLink],
+ shortcut_link: Option<&'a RenderedLink>,
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> LinkReplacer<'a, I> {
+ fn new(iter: I, links: &'a [RenderedLink]) -> Self {
+ LinkReplacer { inner: iter, links, shortcut_link: None }
+ }
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
+ type Item = Event<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let mut event = self.inner.next();
+
+ // Replace intra-doc links and remove disambiguators from shortcut links (`[fn@f]`).
+ match &mut event {
+ // This is a shortcut link that was resolved by the broken_link_callback: `[fn@f]`
+ // Remove any disambiguator.
+ Some(Event::Start(Tag::Link(
+ // [fn@f] or [fn@f][]
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown,
+ dest,
+ title,
+ ))) => {
+ debug!("saw start of shortcut link to {} with title {}", dest, title);
+ // If this is a shortcut link, it was resolved by the broken_link_callback.
+ // So the URL will already be updated properly.
+ let link = self.links.iter().find(|&link| *link.href == **dest);
+ // Since this is an external iterator, we can't replace the inner text just yet.
+ // Store that we saw a link so we know to replace it later.
+ if let Some(link) = link {
+ trace!("it matched");
+ assert!(self.shortcut_link.is_none(), "shortcut links cannot be nested");
+ self.shortcut_link = Some(link);
+ }
+ }
+ // Now that we're done with the shortcut link, don't replace any more text.
+ Some(Event::End(Tag::Link(
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown,
+ dest,
+ _,
+ ))) => {
+ debug!("saw end of shortcut link to {}", dest);
+ if self.links.iter().any(|link| *link.href == **dest) {
+ assert!(self.shortcut_link.is_some(), "saw closing link without opening tag");
+ self.shortcut_link = None;
+ }
+ }
+ // Handle backticks in inline code blocks, but only if we're in the middle of a shortcut link.
+ // [`fn@f`]
+ Some(Event::Code(text)) => {
+ trace!("saw code {}", text);
+ if let Some(link) = self.shortcut_link {
+ trace!("original text was {}", link.original_text);
+ // NOTE: this only replaces if the code block is the *entire* text.
+ // If only part of the link has code highlighting, the disambiguator will not be removed.
+ // e.g. [fn@`f`]
+ // This is a limitation from `collect_intra_doc_links`: it passes a full link,
+ // and does not distinguish at all between code blocks.
+ // So we could never be sure we weren't replacing too much:
+ // [fn@my_`f`unc] is treated the same as [my_func()] in that pass.
+ //
+ // NOTE: &[1..len() - 1] is to strip the backticks
+ if **text == link.original_text[1..link.original_text.len() - 1] {
+ debug!("replacing {} with {}", text, link.new_text);
+ *text = CowStr::Borrowed(&link.new_text);
+ }
+ }
+ }
+ // Replace plain text in links, but only in the middle of a shortcut link.
+ // [fn@f]
+ Some(Event::Text(text)) => {
+ trace!("saw text {}", text);
+ if let Some(link) = self.shortcut_link {
+ trace!("original text was {}", link.original_text);
+ // NOTE: same limitations as `Event::Code`
+ if **text == *link.original_text {
+ debug!("replacing {} with {}", text, link.new_text);
+ *text = CowStr::Borrowed(&link.new_text);
+ }
+ }
+ }
+ // If this is a link, but not a shortcut link,
+ // replace the URL, since the broken_link_callback was not called.
+ Some(Event::Start(Tag::Link(_, dest, _))) => {
+ if let Some(link) = self.links.iter().find(|&link| *link.original_text == **dest) {
+ *dest = CowStr::Borrowed(link.href.as_ref());
+ }
+ }
+ // Anything else couldn't have been a valid Rust path, so no need to replace the text.
+ _ => {}
+ }
+
+ // Yield the modified event
+ event
+ }
+}
+
+/// Wrap HTML tables into `<div>` to prevent having the doc blocks width being too big.
+struct TableWrapper<'a, I: Iterator<Item = Event<'a>>> {
+ inner: I,
+ stored_events: VecDeque<Event<'a>>,
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> TableWrapper<'a, I> {
+ fn new(iter: I) -> Self {
+ Self { inner: iter, stored_events: VecDeque::new() }
+ }
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> Iterator for TableWrapper<'a, I> {
+ type Item = Event<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(first) = self.stored_events.pop_front() {
+ return Some(first);
+ }
+
+ let event = self.inner.next()?;
+
+ Some(match event {
+ Event::Start(Tag::Table(t)) => {
+ self.stored_events.push_back(Event::Start(Tag::Table(t)));
+ Event::Html(CowStr::Borrowed("<div>"))
+ }
+ Event::End(Tag::Table(t)) => {
+ self.stored_events.push_back(Event::Html(CowStr::Borrowed("</div>")));
+ Event::End(Tag::Table(t))
+ }
+ e => e,
+ })
+ }
+}
+
+type SpannedEvent<'a> = (Event<'a>, Range<usize>);
+
+/// Make headings links with anchor IDs and build up TOC.
+struct HeadingLinks<'a, 'b, 'ids, I> {
+ inner: I,
+ toc: Option<&'b mut TocBuilder>,
+ buf: VecDeque<SpannedEvent<'a>>,
+ id_map: &'ids mut IdMap,
+ heading_offset: HeadingOffset,
+}
+
+impl<'a, 'b, 'ids, I> HeadingLinks<'a, 'b, 'ids, I> {
+ fn new(
+ iter: I,
+ toc: Option<&'b mut TocBuilder>,
+ ids: &'ids mut IdMap,
+ heading_offset: HeadingOffset,
+ ) -> Self {
+ HeadingLinks { inner: iter, toc, buf: VecDeque::new(), id_map: ids, heading_offset }
+ }
+}
+
+impl<'a, 'b, 'ids, I: Iterator<Item = SpannedEvent<'a>>> Iterator
+ for HeadingLinks<'a, 'b, 'ids, I>
+{
+ type Item = SpannedEvent<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(e) = self.buf.pop_front() {
+ return Some(e);
+ }
+
+ let event = self.inner.next();
+ if let Some((Event::Start(Tag::Heading(level, _, _)), _)) = event {
+ let mut id = String::new();
+ for event in &mut self.inner {
+ match &event.0 {
+ Event::End(Tag::Heading(..)) => break,
+ Event::Start(Tag::Link(_, _, _)) | Event::End(Tag::Link(..)) => {}
+ Event::Text(text) | Event::Code(text) => {
+ id.extend(text.chars().filter_map(slugify));
+ self.buf.push_back(event);
+ }
+ _ => self.buf.push_back(event),
+ }
+ }
+ let id = self.id_map.derive(id);
+
+ if let Some(ref mut builder) = self.toc {
+ let mut html_header = String::new();
+ html::push_html(&mut html_header, self.buf.iter().map(|(ev, _)| ev.clone()));
+ let sec = builder.push(level as u32, html_header, id.clone());
+ self.buf.push_front((Event::Html(format!("{} ", sec).into()), 0..0));
+ }
+
+ let level =
+ std::cmp::min(level as u32 + (self.heading_offset as u32), MAX_HEADER_LEVEL);
+ self.buf.push_back((Event::Html(format!("</a></h{}>", level).into()), 0..0));
+
+ let start_tags = format!(
+ "<h{level} id=\"{id}\">\
+ <a href=\"#{id}\">",
+ id = id,
+ level = level
+ );
+ return Some((Event::Html(start_tags.into()), 0..0));
+ }
+ event
+ }
+}
+
+/// Extracts just the first paragraph.
+struct SummaryLine<'a, I: Iterator<Item = Event<'a>>> {
+ inner: I,
+ started: bool,
+ depth: u32,
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> SummaryLine<'a, I> {
+ fn new(iter: I) -> Self {
+ SummaryLine { inner: iter, started: false, depth: 0 }
+ }
+}
+
+fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
+ matches!(
+ t,
+ Tag::Paragraph | Tag::Item | Tag::Emphasis | Tag::Strong | Tag::Link(..) | Tag::BlockQuote
+ )
+}
+
+fn is_forbidden_tag(t: &Tag<'_>) -> bool {
+ matches!(t, Tag::CodeBlock(_) | Tag::Table(_) | Tag::TableHead | Tag::TableRow | Tag::TableCell)
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
+ type Item = Event<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.started && self.depth == 0 {
+ return None;
+ }
+ if !self.started {
+ self.started = true;
+ }
+ if let Some(event) = self.inner.next() {
+ let mut is_start = true;
+ let is_allowed_tag = match event {
+ Event::Start(ref c) => {
+ if is_forbidden_tag(c) {
+ return None;
+ }
+ self.depth += 1;
+ check_if_allowed_tag(c)
+ }
+ Event::End(ref c) => {
+ if is_forbidden_tag(c) {
+ return None;
+ }
+ self.depth -= 1;
+ is_start = false;
+ check_if_allowed_tag(c)
+ }
+ _ => true,
+ };
+ return if !is_allowed_tag {
+ if is_start {
+ Some(Event::Start(Tag::Paragraph))
+ } else {
+ Some(Event::End(Tag::Paragraph))
+ }
+ } else {
+ Some(event)
+ };
+ }
+ None
+ }
+}
+
+/// Moves all footnote definitions to the end and add back links to the
+/// references.
+struct Footnotes<'a, I> {
+ inner: I,
+ footnotes: FxHashMap<String, (Vec<Event<'a>>, u16)>,
+}
+
+impl<'a, I> Footnotes<'a, I> {
+ fn new(iter: I) -> Self {
+ Footnotes { inner: iter, footnotes: FxHashMap::default() }
+ }
+
+ fn get_entry(&mut self, key: &str) -> &mut (Vec<Event<'a>>, u16) {
+ let new_id = self.footnotes.len() + 1;
+ let key = key.to_owned();
+ self.footnotes.entry(key).or_insert((Vec::new(), new_id as u16))
+ }
+}
+
+impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, I> {
+ type Item = SpannedEvent<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.inner.next() {
+ Some((Event::FootnoteReference(ref reference), range)) => {
+ let entry = self.get_entry(reference);
+ let reference = format!(
+ "<sup id=\"fnref{0}\"><a href=\"#fn{0}\">{0}</a></sup>",
+ (*entry).1
+ );
+ return Some((Event::Html(reference.into()), range));
+ }
+ Some((Event::Start(Tag::FootnoteDefinition(def)), _)) => {
+ let mut content = Vec::new();
+ for (event, _) in &mut self.inner {
+ if let Event::End(Tag::FootnoteDefinition(..)) = event {
+ break;
+ }
+ content.push(event);
+ }
+ let entry = self.get_entry(&def);
+ (*entry).0 = content;
+ }
+ Some(e) => return Some(e),
+ None => {
+ if !self.footnotes.is_empty() {
+ let mut v: Vec<_> = self.footnotes.drain().map(|(_, x)| x).collect();
+ v.sort_by(|a, b| a.1.cmp(&b.1));
+ let mut ret = String::from("<div class=\"footnotes\"><hr><ol>");
+ for (mut content, id) in v {
+ write!(ret, "<li id=\"fn{}\">", id).unwrap();
+ let mut is_paragraph = false;
+ if let Some(&Event::End(Tag::Paragraph)) = content.last() {
+ content.pop();
+ is_paragraph = true;
+ }
+ html::push_html(&mut ret, content.into_iter());
+ write!(ret, "&nbsp;<a href=\"#fnref{}\">↩</a>", id).unwrap();
+ if is_paragraph {
+ ret.push_str("</p>");
+ }
+ ret.push_str("</li>");
+ }
+ ret.push_str("</ol></div>");
+ return Some((Event::Html(ret.into()), 0..0));
+ } else {
+ return None;
+ }
+ }
+ }
+ }
+ }
+}
+
+pub(crate) fn find_testable_code<T: doctest::Tester>(
+ doc: &str,
+ tests: &mut T,
+ error_codes: ErrorCodes,
+ enable_per_target_ignores: bool,
+ extra_info: Option<&ExtraInfo<'_>>,
+) {
+ let mut parser = Parser::new(doc).into_offset_iter();
+ let mut prev_offset = 0;
+ let mut nb_lines = 0;
+ let mut register_header = None;
+ while let Some((event, offset)) = parser.next() {
+ match event {
+ Event::Start(Tag::CodeBlock(kind)) => {
+ let block_info = match kind {
+ CodeBlockKind::Fenced(ref lang) => {
+ if lang.is_empty() {
+ Default::default()
+ } else {
+ LangString::parse(
+ lang,
+ error_codes,
+ enable_per_target_ignores,
+ extra_info,
+ )
+ }
+ }
+ CodeBlockKind::Indented => Default::default(),
+ };
+ if !block_info.rust {
+ continue;
+ }
+
+ let mut test_s = String::new();
+
+ while let Some((Event::Text(s), _)) = parser.next() {
+ test_s.push_str(&s);
+ }
+ let text = test_s
+ .lines()
+ .map(|l| map_line(l).for_code())
+ .collect::<Vec<Cow<'_, str>>>()
+ .join("\n");
+
+ nb_lines += doc[prev_offset..offset.start].lines().count();
+ // If there are characters between the preceding line ending and
+ // this code block, `str::lines` will return an additional line,
+ // which we subtract here.
+ if nb_lines != 0 && !&doc[prev_offset..offset.start].ends_with('\n') {
+ nb_lines -= 1;
+ }
+ let line = tests.get_line() + nb_lines + 1;
+ tests.add_test(text, block_info, line);
+ prev_offset = offset.start;
+ }
+ Event::Start(Tag::Heading(level, _, _)) => {
+ register_header = Some(level as u32);
+ }
+ Event::Text(ref s) if register_header.is_some() => {
+ let level = register_header.unwrap();
+ if s.is_empty() {
+ tests.register_header("", level);
+ } else {
+ tests.register_header(s, level);
+ }
+ register_header = None;
+ }
+ _ => {}
+ }
+ }
+}
+
+pub(crate) struct ExtraInfo<'tcx> {
+ id: ExtraInfoId,
+ sp: Span,
+ tcx: TyCtxt<'tcx>,
+}
+
+enum ExtraInfoId {
+ Hir(HirId),
+ Def(DefId),
+}
+
+impl<'tcx> ExtraInfo<'tcx> {
+ pub(crate) fn new(tcx: TyCtxt<'tcx>, hir_id: HirId, sp: Span) -> ExtraInfo<'tcx> {
+ ExtraInfo { id: ExtraInfoId::Hir(hir_id), sp, tcx }
+ }
+
+ pub(crate) fn new_did(tcx: TyCtxt<'tcx>, did: DefId, sp: Span) -> ExtraInfo<'tcx> {
+ ExtraInfo { id: ExtraInfoId::Def(did), sp, tcx }
+ }
+
+ fn error_invalid_codeblock_attr(&self, msg: &str, help: &str) {
+ let hir_id = match self.id {
+ ExtraInfoId::Hir(hir_id) => hir_id,
+ ExtraInfoId::Def(item_did) => {
+ match item_did.as_local() {
+ Some(item_did) => self.tcx.hir().local_def_id_to_hir_id(item_did),
+ None => {
+ // If non-local, no need to check anything.
+ return;
+ }
+ }
+ }
+ };
+ self.tcx.struct_span_lint_hir(
+ crate::lint::INVALID_CODEBLOCK_ATTRIBUTES,
+ hir_id,
+ self.sp,
+ |lint| {
+ let mut diag = lint.build(msg);
+ diag.help(help);
+ diag.emit();
+ },
+ );
+ }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub(crate) struct LangString {
+ original: String,
+ pub(crate) should_panic: bool,
+ pub(crate) no_run: bool,
+ pub(crate) ignore: Ignore,
+ pub(crate) rust: bool,
+ pub(crate) test_harness: bool,
+ pub(crate) compile_fail: bool,
+ pub(crate) error_codes: Vec<String>,
+ pub(crate) edition: Option<Edition>,
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub(crate) enum Ignore {
+ All,
+ None,
+ Some(Vec<String>),
+}
+
+impl Default for LangString {
+ fn default() -> Self {
+ Self {
+ original: String::new(),
+ should_panic: false,
+ no_run: false,
+ ignore: Ignore::None,
+ rust: true,
+ test_harness: false,
+ compile_fail: false,
+ error_codes: Vec::new(),
+ edition: None,
+ }
+ }
+}
+
+impl LangString {
+ fn parse_without_check(
+ string: &str,
+ allow_error_code_check: ErrorCodes,
+ enable_per_target_ignores: bool,
+ ) -> LangString {
+ Self::parse(string, allow_error_code_check, enable_per_target_ignores, None)
+ }
+
+ fn tokens(string: &str) -> impl Iterator<Item = &str> {
+ // Pandoc, which Rust once used for generating documentation,
+ // expects lang strings to be surrounded by `{}` and for each token
+ // to be proceeded by a `.`. Since some of these lang strings are still
+ // loose in the wild, we strip a pair of surrounding `{}` from the lang
+ // string and a leading `.` from each token.
+
+ let string = string.trim();
+
+ let first = string.chars().next();
+ let last = string.chars().last();
+
+ let string = if first == Some('{') && last == Some('}') {
+ &string[1..string.len() - 1]
+ } else {
+ string
+ };
+
+ string
+ .split(|c| c == ',' || c == ' ' || c == '\t')
+ .map(str::trim)
+ .map(|token| token.strip_prefix('.').unwrap_or(token))
+ .filter(|token| !token.is_empty())
+ }
+
+ fn parse(
+ string: &str,
+ allow_error_code_check: ErrorCodes,
+ enable_per_target_ignores: bool,
+ extra: Option<&ExtraInfo<'_>>,
+ ) -> LangString {
+ let allow_error_code_check = allow_error_code_check.as_bool();
+ let mut seen_rust_tags = false;
+ let mut seen_other_tags = false;
+ let mut data = LangString::default();
+ let mut ignores = vec![];
+
+ data.original = string.to_owned();
+
+ for token in Self::tokens(string) {
+ match token {
+ "should_panic" => {
+ data.should_panic = true;
+ seen_rust_tags = !seen_other_tags;
+ }
+ "no_run" => {
+ data.no_run = true;
+ seen_rust_tags = !seen_other_tags;
+ }
+ "ignore" => {
+ data.ignore = Ignore::All;
+ seen_rust_tags = !seen_other_tags;
+ }
+ x if x.starts_with("ignore-") => {
+ if enable_per_target_ignores {
+ ignores.push(x.trim_start_matches("ignore-").to_owned());
+ seen_rust_tags = !seen_other_tags;
+ }
+ }
+ "rust" => {
+ data.rust = true;
+ seen_rust_tags = true;
+ }
+ "test_harness" => {
+ data.test_harness = true;
+ seen_rust_tags = !seen_other_tags || seen_rust_tags;
+ }
+ "compile_fail" => {
+ data.compile_fail = true;
+ seen_rust_tags = !seen_other_tags || seen_rust_tags;
+ data.no_run = true;
+ }
+ x if x.starts_with("edition") => {
+ data.edition = x[7..].parse::<Edition>().ok();
+ }
+ x if allow_error_code_check && x.starts_with('E') && x.len() == 5 => {
+ if x[1..].parse::<u32>().is_ok() {
+ data.error_codes.push(x.to_owned());
+ seen_rust_tags = !seen_other_tags || seen_rust_tags;
+ } else {
+ seen_other_tags = true;
+ }
+ }
+ x if extra.is_some() => {
+ let s = x.to_lowercase();
+ if let Some((flag, help)) = if s == "compile-fail"
+ || s == "compile_fail"
+ || s == "compilefail"
+ {
+ Some((
+ "compile_fail",
+ "the code block will either not be tested if not marked as a rust one \
+ or won't fail if it compiles successfully",
+ ))
+ } else if s == "should-panic" || s == "should_panic" || s == "shouldpanic" {
+ Some((
+ "should_panic",
+ "the code block will either not be tested if not marked as a rust one \
+ or won't fail if it doesn't panic when running",
+ ))
+ } else if s == "no-run" || s == "no_run" || s == "norun" {
+ Some((
+ "no_run",
+ "the code block will either not be tested if not marked as a rust one \
+ or will be run (which you might not want)",
+ ))
+ } else if s == "test-harness" || s == "test_harness" || s == "testharness" {
+ Some((
+ "test_harness",
+ "the code block will either not be tested if not marked as a rust one \
+ or the code will be wrapped inside a main function",
+ ))
+ } else {
+ None
+ } {
+ if let Some(extra) = extra {
+ extra.error_invalid_codeblock_attr(
+ &format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
+ help,
+ );
+ }
+ }
+ seen_other_tags = true;
+ }
+ _ => seen_other_tags = true,
+ }
+ }
+
+ // ignore-foo overrides ignore
+ if !ignores.is_empty() {
+ data.ignore = Ignore::Some(ignores);
+ }
+
+ data.rust &= !seen_other_tags || seen_rust_tags;
+
+ data
+ }
+}
+
+impl Markdown<'_> {
+ pub fn into_string(self) -> String {
+ let Markdown {
+ content: md,
+ links,
+ ids,
+ error_codes: codes,
+ edition,
+ playground,
+ heading_offset,
+ } = self;
+
+ // This is actually common enough to special-case
+ if md.is_empty() {
+ return String::new();
+ }
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ links
+ .iter()
+ .find(|link| link.original_text.as_str() == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ };
+
+ let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer));
+ let p = p.into_offset_iter();
+
+ let mut s = String::with_capacity(md.len() * 3 / 2);
+
+ let p = HeadingLinks::new(p, None, ids, heading_offset);
+ let p = Footnotes::new(p);
+ let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
+ let p = TableWrapper::new(p);
+ let p = CodeBlocks::new(p, codes, edition, playground);
+ html::push_html(&mut s, p);
+
+ s
+ }
+}
+
+impl MarkdownWithToc<'_> {
+ pub(crate) fn into_string(self) -> String {
+ let MarkdownWithToc(md, ids, codes, edition, playground) = self;
+
+ let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
+
+ let mut s = String::with_capacity(md.len() * 3 / 2);
+
+ let mut toc = TocBuilder::new();
+
+ {
+ let p = HeadingLinks::new(p, Some(&mut toc), ids, HeadingOffset::H1);
+ let p = Footnotes::new(p);
+ let p = TableWrapper::new(p.map(|(ev, _)| ev));
+ let p = CodeBlocks::new(p, codes, edition, playground);
+ html::push_html(&mut s, p);
+ }
+
+ format!("<nav id=\"TOC\">{}</nav>{}", toc.into_toc().print(), s)
+ }
+}
+
+impl MarkdownHtml<'_> {
+ pub(crate) fn into_string(self) -> String {
+ let MarkdownHtml(md, ids, codes, edition, playground) = self;
+
+ // This is actually common enough to special-case
+ if md.is_empty() {
+ return String::new();
+ }
+ let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
+
+ // Treat inline HTML as plain text.
+ let p = p.map(|event| match event.0 {
+ Event::Html(text) => (Event::Text(text), event.1),
+ _ => event,
+ });
+
+ let mut s = String::with_capacity(md.len() * 3 / 2);
+
+ let p = HeadingLinks::new(p, None, ids, HeadingOffset::H1);
+ let p = Footnotes::new(p);
+ let p = TableWrapper::new(p.map(|(ev, _)| ev));
+ let p = CodeBlocks::new(p, codes, edition, playground);
+ html::push_html(&mut s, p);
+
+ s
+ }
+}
+
+impl MarkdownSummaryLine<'_> {
+ pub(crate) fn into_string(self) -> String {
+ let MarkdownSummaryLine(md, links) = self;
+ // This is actually common enough to special-case
+ if md.is_empty() {
+ return String::new();
+ }
+
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ links
+ .iter()
+ .find(|link| link.original_text.as_str() == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ };
+
+ let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
+
+ let mut s = String::new();
+
+ html::push_html(&mut s, LinkReplacer::new(SummaryLine::new(p), links));
+
+ s
+ }
+}
+
+/// Renders a subset of Markdown in the first paragraph of the provided Markdown.
+///
+/// - *Italics*, **bold**, and `inline code` styles **are** rendered.
+/// - Headings and links are stripped (though the text *is* rendered).
+/// - HTML, code blocks, and everything else are ignored.
+///
+/// Returns a tuple of the rendered HTML string and whether the output was shortened
+/// due to the provided `length_limit`.
+fn markdown_summary_with_limit(
+ md: &str,
+ link_names: &[RenderedLink],
+ length_limit: usize,
+) -> (String, bool) {
+ if md.is_empty() {
+ return (String::new(), false);
+ }
+
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ link_names
+ .iter()
+ .find(|link| link.original_text.as_str() == &*broken_link.reference)
+ .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
+ };
+
+ let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
+ let mut p = LinkReplacer::new(p, link_names);
+
+ let mut buf = HtmlWithLimit::new(length_limit);
+ let mut stopped_early = false;
+ p.try_for_each(|event| {
+ match &event {
+ Event::Text(text) => {
+ let r =
+ text.split_inclusive(char::is_whitespace).try_for_each(|word| buf.push(word));
+ if r.is_break() {
+ stopped_early = true;
+ }
+ return r;
+ }
+ Event::Code(code) => {
+ buf.open_tag("code");
+ let r = buf.push(code);
+ if r.is_break() {
+ stopped_early = true;
+ } else {
+ buf.close_tag();
+ }
+ return r;
+ }
+ Event::Start(tag) => match tag {
+ Tag::Emphasis => buf.open_tag("em"),
+ Tag::Strong => buf.open_tag("strong"),
+ Tag::CodeBlock(..) => return ControlFlow::BREAK,
+ _ => {}
+ },
+ Event::End(tag) => match tag {
+ Tag::Emphasis | Tag::Strong => buf.close_tag(),
+ Tag::Paragraph | Tag::Heading(..) => return ControlFlow::BREAK,
+ _ => {}
+ },
+ Event::HardBreak | Event::SoftBreak => buf.push(" ")?,
+ _ => {}
+ };
+ ControlFlow::CONTINUE
+ });
+
+ (buf.finish(), stopped_early)
+}
+
+/// Renders a shortened first paragraph of the given Markdown as a subset of Markdown,
+/// making it suitable for contexts like the search index.
+///
+/// Will shorten to 59 or 60 characters, including an ellipsis (…) if it was shortened.
+///
+/// See [`markdown_summary_with_limit`] for details about what is rendered and what is not.
+pub(crate) fn short_markdown_summary(markdown: &str, link_names: &[RenderedLink]) -> String {
+ let (mut s, was_shortened) = markdown_summary_with_limit(markdown, link_names, 59);
+
+ if was_shortened {
+ s.push('…');
+ }
+
+ s
+}
+
+/// Renders the first paragraph of the provided markdown as plain text.
+/// Useful for alt-text.
+///
+/// - Headings, links, and formatting are stripped.
+/// - Inline code is rendered as-is, surrounded by backticks.
+/// - HTML and code blocks are ignored.
+pub(crate) fn plain_text_summary(md: &str) -> String {
+ if md.is_empty() {
+ return String::new();
+ }
+
+ let mut s = String::with_capacity(md.len() * 3 / 2);
+
+ for event in Parser::new_ext(md, summary_opts()) {
+ match &event {
+ Event::Text(text) => s.push_str(text),
+ Event::Code(code) => {
+ s.push('`');
+ s.push_str(code);
+ s.push('`');
+ }
+ Event::HardBreak | Event::SoftBreak => s.push(' '),
+ Event::Start(Tag::CodeBlock(..)) => break,
+ Event::End(Tag::Paragraph) => break,
+ Event::End(Tag::Heading(..)) => break,
+ _ => (),
+ }
+ }
+
+ s
+}
+
+#[derive(Debug)]
+pub(crate) struct MarkdownLink {
+ pub kind: LinkType,
+ pub link: String,
+ pub range: Range<usize>,
+}
+
+pub(crate) fn markdown_links<R>(
+ md: &str,
+ filter_map: impl Fn(MarkdownLink) -> Option<R>,
+) -> Vec<R> {
+ if md.is_empty() {
+ return vec![];
+ }
+
+ let links = RefCell::new(vec![]);
+
+ // FIXME: remove this function once pulldown_cmark can provide spans for link definitions.
+ let locate = |s: &str, fallback: Range<usize>| unsafe {
+ let s_start = s.as_ptr();
+ let s_end = s_start.add(s.len());
+ let md_start = md.as_ptr();
+ let md_end = md_start.add(md.len());
+ if md_start <= s_start && s_end <= md_end {
+ let start = s_start.offset_from(md_start) as usize;
+ let end = s_end.offset_from(md_start) as usize;
+ start..end
+ } else {
+ fallback
+ }
+ };
+
+ let span_for_link = |link: &CowStr<'_>, span: Range<usize>| {
+ // For diagnostics, we want to underline the link's definition but `span` will point at
+ // where the link is used. This is a problem for reference-style links, where the definition
+ // is separate from the usage.
+ match link {
+ // `Borrowed` variant means the string (the link's destination) may come directly from
+ // the markdown text and we can locate the original link destination.
+ // NOTE: LinkReplacer also provides `Borrowed` but possibly from other sources,
+ // so `locate()` can fall back to use `span`.
+ CowStr::Borrowed(s) => locate(s, span),
+
+ // For anything else, we can only use the provided range.
+ CowStr::Boxed(_) | CowStr::Inlined(_) => span,
+ }
+ };
+
+ let mut push = |link: BrokenLink<'_>| {
+ let span = span_for_link(&link.reference, link.span);
+ filter_map(MarkdownLink {
+ kind: LinkType::ShortcutUnknown,
+ link: link.reference.to_string(),
+ range: span,
+ })
+ .map(|link| links.borrow_mut().push(link));
+ None
+ };
+ let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut push))
+ .into_offset_iter();
+
+ // There's no need to thread an IdMap through to here because
+ // the IDs generated aren't going to be emitted anywhere.
+ let mut ids = IdMap::new();
+ let iter = Footnotes::new(HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1));
+
+ for ev in iter {
+ if let Event::Start(Tag::Link(
+ // `<>` links cannot be intra-doc links so we skip them.
+ kind @ (LinkType::Inline
+ | LinkType::Reference
+ | LinkType::ReferenceUnknown
+ | LinkType::Collapsed
+ | LinkType::CollapsedUnknown
+ | LinkType::Shortcut
+ | LinkType::ShortcutUnknown),
+ dest,
+ _,
+ )) = ev.0
+ {
+ debug!("found link: {dest}");
+ let span = span_for_link(&dest, ev.1);
+ filter_map(MarkdownLink { kind, link: dest.into_string(), range: span })
+ .map(|link| links.borrow_mut().push(link));
+ }
+ }
+
+ links.into_inner()
+}
+
+#[derive(Debug)]
+pub(crate) struct RustCodeBlock {
+ /// The range in the markdown that the code block occupies. Note that this includes the fences
+ /// for fenced code blocks.
+ pub(crate) range: Range<usize>,
+ /// The range in the markdown that the code within the code block occupies.
+ pub(crate) code: Range<usize>,
+ pub(crate) is_fenced: bool,
+ pub(crate) lang_string: LangString,
+}
+
+/// Returns a range of bytes for each code block in the markdown that is tagged as `rust` or
+/// untagged (and assumed to be rust).
+pub(crate) fn rust_code_blocks(md: &str, extra_info: &ExtraInfo<'_>) -> Vec<RustCodeBlock> {
+ let mut code_blocks = vec![];
+
+ if md.is_empty() {
+ return code_blocks;
+ }
+
+ let mut p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
+
+ while let Some((event, offset)) = p.next() {
+ if let Event::Start(Tag::CodeBlock(syntax)) = event {
+ let (lang_string, code_start, code_end, range, is_fenced) = match syntax {
+ CodeBlockKind::Fenced(syntax) => {
+ let syntax = syntax.as_ref();
+ let lang_string = if syntax.is_empty() {
+ Default::default()
+ } else {
+ LangString::parse(&*syntax, ErrorCodes::Yes, false, Some(extra_info))
+ };
+ if !lang_string.rust {
+ continue;
+ }
+ let (code_start, mut code_end) = match p.next() {
+ Some((Event::Text(_), offset)) => (offset.start, offset.end),
+ Some((_, sub_offset)) => {
+ let code = Range { start: sub_offset.start, end: sub_offset.start };
+ code_blocks.push(RustCodeBlock {
+ is_fenced: true,
+ range: offset,
+ code,
+ lang_string,
+ });
+ continue;
+ }
+ None => {
+ let code = Range { start: offset.end, end: offset.end };
+ code_blocks.push(RustCodeBlock {
+ is_fenced: true,
+ range: offset,
+ code,
+ lang_string,
+ });
+ continue;
+ }
+ };
+ while let Some((Event::Text(_), offset)) = p.next() {
+ code_end = offset.end;
+ }
+ (lang_string, code_start, code_end, offset, true)
+ }
+ CodeBlockKind::Indented => {
+ // The ending of the offset goes too far sometime so we reduce it by one in
+ // these cases.
+ if offset.end > offset.start && md.get(offset.end..=offset.end) == Some("\n") {
+ (
+ LangString::default(),
+ offset.start,
+ offset.end,
+ Range { start: offset.start, end: offset.end - 1 },
+ false,
+ )
+ } else {
+ (LangString::default(), offset.start, offset.end, offset, false)
+ }
+ }
+ };
+
+ code_blocks.push(RustCodeBlock {
+ is_fenced,
+ range,
+ code: Range { start: code_start, end: code_end },
+ lang_string,
+ });
+ }
+ }
+
+ code_blocks
+}
+
+#[derive(Clone, Default, Debug)]
+pub struct IdMap {
+ map: FxHashMap<Cow<'static, str>, usize>,
+}
+
+// The map is pre-initialized and cloned each time to avoid reinitializing it repeatedly.
+static DEFAULT_ID_MAP: Lazy<FxHashMap<Cow<'static, str>, usize>> = Lazy::new(|| init_id_map());
+
+fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
+ let mut map = FxHashMap::default();
+ // This is the list of IDs used in Javascript.
+ map.insert("settings".into(), 1);
+ map.insert("not-displayed".into(), 1);
+ map.insert("alternative-display".into(), 1);
+ map.insert("search".into(), 1);
+ // This is the list of IDs used in HTML generated in Rust (including the ones
+ // used in tera template files).
+ map.insert("mainThemeStyle".into(), 1);
+ map.insert("themeStyle".into(), 1);
+ map.insert("settings-menu".into(), 1);
+ map.insert("help-button".into(), 1);
+ map.insert("main-content".into(), 1);
+ map.insert("crate-search".into(), 1);
+ map.insert("toggle-all-docs".into(), 1);
+ map.insert("all-types".into(), 1);
+ map.insert("default-settings".into(), 1);
+ map.insert("rustdoc-vars".into(), 1);
+ map.insert("sidebar-vars".into(), 1);
+ map.insert("copy-path".into(), 1);
+ map.insert("TOC".into(), 1);
+ // This is the list of IDs used by rustdoc sections (but still generated by
+ // rustdoc).
+ map.insert("fields".into(), 1);
+ map.insert("variants".into(), 1);
+ map.insert("implementors-list".into(), 1);
+ map.insert("synthetic-implementors-list".into(), 1);
+ map.insert("foreign-impls".into(), 1);
+ map.insert("implementations".into(), 1);
+ map.insert("trait-implementations".into(), 1);
+ map.insert("synthetic-implementations".into(), 1);
+ map.insert("blanket-implementations".into(), 1);
+ map.insert("required-associated-types".into(), 1);
+ map.insert("provided-associated-types".into(), 1);
+ map.insert("provided-associated-consts".into(), 1);
+ map.insert("required-associated-consts".into(), 1);
+ map.insert("required-methods".into(), 1);
+ map.insert("provided-methods".into(), 1);
+ map.insert("implementors".into(), 1);
+ map.insert("synthetic-implementors".into(), 1);
+ map.insert("implementations-list".into(), 1);
+ map.insert("trait-implementations-list".into(), 1);
+ map.insert("synthetic-implementations-list".into(), 1);
+ map.insert("blanket-implementations-list".into(), 1);
+ map.insert("deref-methods".into(), 1);
+ map.insert("layout".into(), 1);
+ map
+}
+
+impl IdMap {
+ pub fn new() -> Self {
+ IdMap { map: DEFAULT_ID_MAP.clone() }
+ }
+
+ pub(crate) fn derive<S: AsRef<str> + ToString>(&mut self, candidate: S) -> String {
+ let id = match self.map.get_mut(candidate.as_ref()) {
+ None => candidate.to_string(),
+ Some(a) => {
+ let id = format!("{}-{}", candidate.as_ref(), *a);
+ *a += 1;
+ id
+ }
+ };
+
+ self.map.insert(id.clone().into(), 1);
+ id
+ }
+}
diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs
new file mode 100644
index 000000000..5c0bf0ed9
--- /dev/null
+++ b/src/librustdoc/html/markdown/tests.rs
@@ -0,0 +1,312 @@
+use super::{find_testable_code, plain_text_summary, short_markdown_summary};
+use super::{ErrorCodes, HeadingOffset, IdMap, Ignore, LangString, Markdown, MarkdownHtml};
+use rustc_span::edition::{Edition, DEFAULT_EDITION};
+
+#[test]
+fn test_unique_id() {
+ let input = [
+ "foo",
+ "examples",
+ "examples",
+ "method.into_iter",
+ "examples",
+ "method.into_iter",
+ "foo",
+ "main-content",
+ "search",
+ "methods",
+ "examples",
+ "method.into_iter",
+ "assoc_type.Item",
+ "assoc_type.Item",
+ ];
+ let expected = [
+ "foo",
+ "examples",
+ "examples-1",
+ "method.into_iter",
+ "examples-2",
+ "method.into_iter-1",
+ "foo-1",
+ "main-content-1",
+ "search-1",
+ "methods",
+ "examples-3",
+ "method.into_iter-2",
+ "assoc_type.Item",
+ "assoc_type.Item-1",
+ ];
+
+ let mut map = IdMap::new();
+ let actual: Vec<String> = input.iter().map(|s| map.derive(s.to_string())).collect();
+ assert_eq!(&actual[..], expected);
+}
+
+#[test]
+fn test_lang_string_parse() {
+ fn t(lg: LangString) {
+ let s = &lg.original;
+ assert_eq!(LangString::parse(s, ErrorCodes::Yes, true, None), lg)
+ }
+
+ t(Default::default());
+ t(LangString { original: "rust".into(), ..Default::default() });
+ t(LangString { original: ".rust".into(), ..Default::default() });
+ t(LangString { original: "{rust}".into(), ..Default::default() });
+ t(LangString { original: "{.rust}".into(), ..Default::default() });
+ t(LangString { original: "sh".into(), rust: false, ..Default::default() });
+ t(LangString { original: "ignore".into(), ignore: Ignore::All, ..Default::default() });
+ t(LangString {
+ original: "ignore-foo".into(),
+ ignore: Ignore::Some(vec!["foo".to_string()]),
+ ..Default::default()
+ });
+ t(LangString { original: "should_panic".into(), should_panic: true, ..Default::default() });
+ t(LangString { original: "no_run".into(), no_run: true, ..Default::default() });
+ t(LangString { original: "test_harness".into(), test_harness: true, ..Default::default() });
+ t(LangString {
+ original: "compile_fail".into(),
+ no_run: true,
+ compile_fail: true,
+ ..Default::default()
+ });
+ t(LangString { original: "no_run,example".into(), no_run: true, ..Default::default() });
+ t(LangString {
+ original: "sh,should_panic".into(),
+ should_panic: true,
+ rust: false,
+ ..Default::default()
+ });
+ t(LangString { original: "example,rust".into(), ..Default::default() });
+ t(LangString {
+ original: "test_harness,.rust".into(),
+ test_harness: true,
+ ..Default::default()
+ });
+ t(LangString {
+ original: "text, no_run".into(),
+ no_run: true,
+ rust: false,
+ ..Default::default()
+ });
+ t(LangString {
+ original: "text,no_run".into(),
+ no_run: true,
+ rust: false,
+ ..Default::default()
+ });
+ t(LangString {
+ original: "text,no_run, ".into(),
+ no_run: true,
+ rust: false,
+ ..Default::default()
+ });
+ t(LangString {
+ original: "text,no_run,".into(),
+ no_run: true,
+ rust: false,
+ ..Default::default()
+ });
+ t(LangString {
+ original: "edition2015".into(),
+ edition: Some(Edition::Edition2015),
+ ..Default::default()
+ });
+ t(LangString {
+ original: "edition2018".into(),
+ edition: Some(Edition::Edition2018),
+ ..Default::default()
+ });
+}
+
+#[test]
+fn test_lang_string_tokenizer() {
+ fn case(lang_string: &str, want: &[&str]) {
+ let have = LangString::tokens(lang_string).collect::<Vec<&str>>();
+ assert_eq!(have, want, "Unexpected lang string split for `{}`", lang_string);
+ }
+
+ case("", &[]);
+ case("foo", &["foo"]);
+ case("foo,bar", &["foo", "bar"]);
+ case(".foo,.bar", &["foo", "bar"]);
+ case("{.foo,.bar}", &["foo", "bar"]);
+ case(" {.foo,.bar} ", &["foo", "bar"]);
+ case("foo bar", &["foo", "bar"]);
+ case("foo\tbar", &["foo", "bar"]);
+ case("foo\t, bar", &["foo", "bar"]);
+ case(" foo , bar ", &["foo", "bar"]);
+ case(",,foo,,bar,,", &["foo", "bar"]);
+ case("foo=bar", &["foo=bar"]);
+ case("a-b-c", &["a-b-c"]);
+ case("a_b_c", &["a_b_c"]);
+}
+
+#[test]
+fn test_header() {
+ fn t(input: &str, expect: &str) {
+ let mut map = IdMap::new();
+ let output = Markdown {
+ content: input,
+ links: &[],
+ ids: &mut map,
+ error_codes: ErrorCodes::Yes,
+ edition: DEFAULT_EDITION,
+ playground: &None,
+ heading_offset: HeadingOffset::H2,
+ }
+ .into_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("# Foo bar", "<h2 id=\"foo-bar\"><a href=\"#foo-bar\">Foo bar</a></h2>");
+ t(
+ "## Foo-bar_baz qux",
+ "<h3 id=\"foo-bar_baz-qux\">\
+ <a href=\"#foo-bar_baz-qux\">Foo-bar_baz qux</a></h3>",
+ );
+ t(
+ "### **Foo** *bar* baz!?!& -_qux_-%",
+ "<h4 id=\"foo-bar-baz--qux-\">\
+ <a href=\"#foo-bar-baz--qux-\"><strong>Foo</strong> \
+ <em>bar</em> baz!?!&amp; -<em>qux</em>-%</a>\
+ </h4>",
+ );
+ t(
+ "#### **Foo?** & \\*bar?!* _`baz`_ ❤ #qux",
+ "<h5 id=\"foo--bar--baz--qux\">\
+ <a href=\"#foo--bar--baz--qux\"><strong>Foo?</strong> &amp; *bar?!* \
+ <em><code>baz</code></em> ❤ #qux</a>\
+ </h5>",
+ );
+}
+
+#[test]
+fn test_header_ids_multiple_blocks() {
+ let mut map = IdMap::new();
+ fn t(map: &mut IdMap, input: &str, expect: &str) {
+ let output = Markdown {
+ content: input,
+ links: &[],
+ ids: map,
+ error_codes: ErrorCodes::Yes,
+ edition: DEFAULT_EDITION,
+ playground: &None,
+ heading_offset: HeadingOffset::H2,
+ }
+ .into_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t(&mut map, "# Example", "<h2 id=\"example\"><a href=\"#example\">Example</a></h2>");
+ t(&mut map, "# Panics", "<h2 id=\"panics\"><a href=\"#panics\">Panics</a></h2>");
+ t(&mut map, "# Example", "<h2 id=\"example-1\"><a href=\"#example-1\">Example</a></h2>");
+ t(&mut map, "# Search", "<h2 id=\"search-1\"><a href=\"#search-1\">Search</a></h2>");
+ t(&mut map, "# Example", "<h2 id=\"example-2\"><a href=\"#example-2\">Example</a></h2>");
+ t(&mut map, "# Panics", "<h2 id=\"panics-1\"><a href=\"#panics-1\">Panics</a></h2>");
+}
+
+#[test]
+fn test_short_markdown_summary() {
+ fn t(input: &str, expect: &str) {
+ let output = short_markdown_summary(input, &[][..]);
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("", "");
+ t("hello [Rust](https://www.rust-lang.org) :)", "hello Rust :)");
+ t("*italic*", "<em>italic</em>");
+ t("**bold**", "<strong>bold</strong>");
+ t("Multi-line\nsummary", "Multi-line summary");
+ t("Hard-break \nsummary", "Hard-break summary");
+ t("hello [Rust] :)\n\n[Rust]: https://www.rust-lang.org", "hello Rust :)");
+ t("hello [Rust](https://www.rust-lang.org \"Rust\") :)", "hello Rust :)");
+ t("dud [link]", "dud [link]");
+ t("code `let x = i32;` ...", "code <code>let x = i32;</code> …");
+ t("type `Type<'static>` ...", "type <code>Type&lt;&#39;static&gt;</code> …");
+ // Test to ensure escaping and length-limiting work well together.
+ // The output should be limited based on the input length,
+ // rather than the output, because escaped versions of characters
+ // are usually longer than how the character is actually displayed.
+ t(
+ "& & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & & &",
+ "&amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; \
+ &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; &amp; \
+ &amp; &amp; &amp; &amp; &amp; …",
+ );
+ t("# top header", "top header");
+ t("# top header\n\nfollowed by a paragraph", "top header");
+ t("## header", "header");
+ t("first paragraph\n\nsecond paragraph", "first paragraph");
+ t("```\nfn main() {}\n```", "");
+ t("<div>hello</div>", "");
+ t(
+ "a *very*, **very** long first paragraph. it has lots of `inline code: Vec<T>`. and it has a [link](https://www.rust-lang.org).\nthat was a soft line break! \nthat was a hard one\n\nsecond paragraph.",
+ "a <em>very</em>, <strong>very</strong> long first paragraph. it has lots of …",
+ );
+}
+
+#[test]
+fn test_plain_text_summary() {
+ fn t(input: &str, expect: &str) {
+ let output = plain_text_summary(input);
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("", "");
+ t("hello [Rust](https://www.rust-lang.org) :)", "hello Rust :)");
+ t("**bold**", "bold");
+ t("Multi-line\nsummary", "Multi-line summary");
+ t("Hard-break \nsummary", "Hard-break summary");
+ t("hello [Rust] :)\n\n[Rust]: https://www.rust-lang.org", "hello Rust :)");
+ t("hello [Rust](https://www.rust-lang.org \"Rust\") :)", "hello Rust :)");
+ t("dud [link]", "dud [link]");
+ t("code `let x = i32;` ...", "code `let x = i32;` …");
+ t("type `Type<'static>` ...", "type `Type<'static>` …");
+ t("# top header", "top header");
+ t("# top header\n\nfollowed by some text", "top header");
+ t("## header", "header");
+ t("first paragraph\n\nsecond paragraph", "first paragraph");
+ t("```\nfn main() {}\n```", "");
+ t("<div>hello</div>", "");
+ t(
+ "a *very*, **very** long first paragraph. it has lots of `inline code: Vec<T>`. and it has a [link](https://www.rust-lang.org).\nthat was a soft line break! \nthat was a hard one\n\nsecond paragraph.",
+ "a very, very long first paragraph. it has lots of `inline code: Vec<T>`. and it has a link. that was a soft line break! that was a hard one",
+ );
+}
+
+#[test]
+fn test_markdown_html_escape() {
+ fn t(input: &str, expect: &str) {
+ let mut idmap = IdMap::new();
+ let output =
+ MarkdownHtml(input, &mut idmap, ErrorCodes::Yes, DEFAULT_EDITION, &None).into_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("`Struct<'a, T>`", "<p><code>Struct&lt;'a, T&gt;</code></p>\n");
+ t("Struct<'a, T>", "<p>Struct&lt;’a, T&gt;</p>\n");
+ t("Struct<br>", "<p>Struct&lt;br&gt;</p>\n");
+}
+
+#[test]
+fn test_find_testable_code_line() {
+ fn t(input: &str, expect: &[usize]) {
+ impl crate::doctest::Tester for Vec<usize> {
+ fn add_test(&mut self, _test: String, _config: LangString, line: usize) {
+ self.push(line);
+ }
+ }
+ let mut lines = Vec::<usize>::new();
+ find_testable_code(input, &mut lines, ErrorCodes::No, false, None);
+ assert_eq!(lines, expect);
+ }
+
+ t("", &[]);
+ t("```rust\n```", &[1]);
+ t(" ```rust\n```", &[1]);
+ t("\n```rust\n```", &[2]);
+ t("\n ```rust\n```", &[2]);
+ t("```rust\n```\n```rust\n```", &[1, 3]);
+ t("```rust\n```\n ```rust\n```", &[1, 3]);
+}
diff --git a/src/librustdoc/html/mod.rs b/src/librustdoc/html/mod.rs
new file mode 100644
index 000000000..481ed16c0
--- /dev/null
+++ b/src/librustdoc/html/mod.rs
@@ -0,0 +1,15 @@
+pub(crate) mod escape;
+pub(crate) mod format;
+pub(crate) mod highlight;
+pub(crate) mod layout;
+mod length_limit;
+// used by the error-index generator, so it needs to be public
+pub mod markdown;
+pub(crate) mod render;
+pub(crate) mod sources;
+pub(crate) mod static_files;
+pub(crate) mod toc;
+mod url_parts_builder;
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
new file mode 100644
index 000000000..2ed7a6f1b
--- /dev/null
+++ b/src/librustdoc/html/render/context.rs
@@ -0,0 +1,762 @@
+use std::cell::RefCell;
+use std::collections::BTreeMap;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::sync::mpsc::{channel, Receiver};
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_middle::ty::TyCtxt;
+use rustc_session::Session;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::FileName;
+use rustc_span::{sym, Symbol};
+
+use super::print_item::{full_path, item_path, print_item};
+use super::search_index::build_index;
+use super::write_shared::write_shared;
+use super::{
+ collect_spans_and_sources, print_sidebar, scrape_examples_help, AllTypes, LinkFromSrc, NameDoc,
+ StylePath, BASIC_KEYWORDS,
+};
+
+use crate::clean::{self, types::ExternalLocation, ExternalCrate};
+use crate::config::{ModuleSorting, RenderOptions};
+use crate::docfs::{DocFS, PathError};
+use crate::error::Error;
+use crate::formats::cache::Cache;
+use crate::formats::item_type::ItemType;
+use crate::formats::FormatRenderer;
+use crate::html::escape::Escape;
+use crate::html::format::{join_with_double_colon, Buffer};
+use crate::html::markdown::{self, plain_text_summary, ErrorCodes, IdMap};
+use crate::html::{layout, sources};
+use crate::scrape_examples::AllCallLocations;
+use crate::try_err;
+
+/// Major driving force in all rustdoc rendering. This contains information
+/// about where in the tree-like hierarchy rendering is occurring and controls
+/// how the current page is being rendered.
+///
+/// It is intended that this context is a lightweight object which can be fairly
+/// easily cloned because it is cloned per work-job (about once per item in the
+/// rustdoc tree).
+pub(crate) struct Context<'tcx> {
+ /// Current hierarchy of components leading down to what's currently being
+ /// rendered
+ pub(crate) current: Vec<Symbol>,
+ /// The current destination folder of where HTML artifacts should be placed.
+ /// This changes as the context descends into the module hierarchy.
+ pub(crate) dst: PathBuf,
+ /// A flag, which when `true`, will render pages which redirect to the
+ /// real location of an item. This is used to allow external links to
+ /// publicly reused items to redirect to the right location.
+ pub(super) render_redirect_pages: bool,
+ /// Tracks section IDs for `Deref` targets so they match in both the main
+ /// body and the sidebar.
+ pub(super) deref_id_map: FxHashMap<DefId, String>,
+ /// The map used to ensure all generated 'id=' attributes are unique.
+ pub(super) id_map: IdMap,
+ /// Shared mutable state.
+ ///
+ /// Issue for improving the situation: [#82381][]
+ ///
+ /// [#82381]: https://github.com/rust-lang/rust/issues/82381
+ pub(crate) shared: Rc<SharedContext<'tcx>>,
+ /// This flag indicates whether source links should be generated or not. If
+ /// the source files are present in the html rendering, then this will be
+ /// `true`.
+ pub(crate) include_sources: bool,
+}
+
+// `Context` is cloned a lot, so we don't want the size to grow unexpectedly.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+rustc_data_structures::static_assert_size!(Context<'_>, 128);
+
+/// Shared mutable state used in [`Context`] and elsewhere.
+pub(crate) struct SharedContext<'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+ /// The path to the crate root source minus the file name.
+ /// Used for simplifying paths to the highlighted source code files.
+ pub(crate) src_root: PathBuf,
+ /// This describes the layout of each page, and is not modified after
+ /// creation of the context (contains info like the favicon and added html).
+ pub(crate) layout: layout::Layout,
+ /// The local file sources we've emitted and their respective url-paths.
+ pub(crate) local_sources: FxHashMap<PathBuf, String>,
+ /// Show the memory layout of types in the docs.
+ pub(super) show_type_layout: bool,
+ /// The base-URL of the issue tracker for when an item has been tagged with
+ /// an issue number.
+ pub(super) issue_tracker_base_url: Option<String>,
+ /// The directories that have already been created in this doc run. Used to reduce the number
+ /// of spurious `create_dir_all` calls.
+ created_dirs: RefCell<FxHashSet<PathBuf>>,
+ /// This flag indicates whether listings of modules (in the side bar and documentation itself)
+ /// should be ordered alphabetically or in order of appearance (in the source code).
+ pub(super) module_sorting: ModuleSorting,
+ /// Additional CSS files to be added to the generated docs.
+ pub(crate) style_files: Vec<StylePath>,
+ /// Suffix to be added on resource files (if suffix is "-v2" then "light.css" becomes
+ /// "light-v2.css").
+ pub(crate) resource_suffix: String,
+ /// Optional path string to be used to load static files on output pages. If not set, uses
+ /// combinations of `../` to reach the documentation root.
+ pub(crate) static_root_path: Option<String>,
+ /// The fs handle we are working with.
+ pub(crate) fs: DocFS,
+ pub(super) codes: ErrorCodes,
+ pub(super) playground: Option<markdown::Playground>,
+ all: RefCell<AllTypes>,
+ /// Storage for the errors produced while generating documentation so they
+ /// can be printed together at the end.
+ errors: Receiver<String>,
+ /// `None` by default, depends on the `generate-redirect-map` option flag. If this field is set
+ /// to `Some(...)`, it'll store redirections and then generate a JSON file at the top level of
+ /// the crate.
+ redirections: Option<RefCell<FxHashMap<String, String>>>,
+
+ /// Correspondance map used to link types used in the source code pages to allow to click on
+ /// links to jump to the type's definition.
+ pub(crate) span_correspondance_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
+ /// The [`Cache`] used during rendering.
+ pub(crate) cache: Cache,
+
+ pub(crate) call_locations: AllCallLocations,
+}
+
+impl SharedContext<'_> {
+ pub(crate) fn ensure_dir(&self, dst: &Path) -> Result<(), Error> {
+ let mut dirs = self.created_dirs.borrow_mut();
+ if !dirs.contains(dst) {
+ try_err!(self.fs.create_dir_all(dst), dst);
+ dirs.insert(dst.to_path_buf());
+ }
+
+ Ok(())
+ }
+
+ pub(crate) fn edition(&self) -> Edition {
+ self.tcx.sess.edition()
+ }
+}
+
+impl<'tcx> Context<'tcx> {
+ pub(crate) fn tcx(&self) -> TyCtxt<'tcx> {
+ self.shared.tcx
+ }
+
+ pub(crate) fn cache(&self) -> &Cache {
+ &self.shared.cache
+ }
+
+ pub(super) fn sess(&self) -> &'tcx Session {
+ self.shared.tcx.sess
+ }
+
+ pub(super) fn derive_id(&mut self, id: String) -> String {
+ self.id_map.derive(id)
+ }
+
+ /// String representation of how to get back to the root path of the 'doc/'
+ /// folder in terms of a relative URL.
+ pub(super) fn root_path(&self) -> String {
+ "../".repeat(self.current.len())
+ }
+
+ fn render_item(&mut self, it: &clean::Item, is_module: bool) -> String {
+ let mut title = String::new();
+ if !is_module {
+ title.push_str(it.name.unwrap().as_str());
+ }
+ if !it.is_primitive() && !it.is_keyword() {
+ if !is_module {
+ title.push_str(" in ");
+ }
+ // No need to include the namespace for primitive types and keywords
+ title.push_str(&join_with_double_colon(&self.current));
+ };
+ title.push_str(" - Rust");
+ let tyname = it.type_();
+ let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc));
+ let desc = if let Some(desc) = desc {
+ desc
+ } else if it.is_crate() {
+ format!("API documentation for the Rust `{}` crate.", self.shared.layout.krate)
+ } else {
+ format!(
+ "API documentation for the Rust `{}` {} in crate `{}`.",
+ it.name.as_ref().unwrap(),
+ tyname,
+ self.shared.layout.krate
+ )
+ };
+ let keywords = make_item_keywords(it);
+ let name;
+ let tyname_s = if it.is_crate() {
+ name = format!("{} crate", tyname);
+ name.as_str()
+ } else {
+ tyname.as_str()
+ };
+
+ if !self.render_redirect_pages {
+ let clone_shared = Rc::clone(&self.shared);
+ let page = layout::Page {
+ css_class: tyname_s,
+ root_path: &self.root_path(),
+ static_root_path: clone_shared.static_root_path.as_deref(),
+ title: &title,
+ description: &desc,
+ keywords: &keywords,
+ resource_suffix: &clone_shared.resource_suffix,
+ };
+ let mut page_buffer = Buffer::html();
+ print_item(self, it, &mut page_buffer, &page);
+ layout::render(
+ &clone_shared.layout,
+ &page,
+ |buf: &mut _| print_sidebar(self, it, buf),
+ move |buf: &mut Buffer| buf.push_buffer(page_buffer),
+ &clone_shared.style_files,
+ )
+ } else {
+ if let Some(&(ref names, ty)) = self.cache().paths.get(&it.item_id.expect_def_id()) {
+ if self.current.len() + 1 != names.len()
+ || self.current.iter().zip(names.iter()).any(|(a, b)| a != b)
+ {
+ // We checked that the redirection isn't pointing to the current file,
+ // preventing an infinite redirection loop in the generated
+ // documentation.
+
+ let mut path = String::new();
+ for name in &names[..names.len() - 1] {
+ path.push_str(name.as_str());
+ path.push('/');
+ }
+ path.push_str(&item_path(ty, names.last().unwrap().as_str()));
+ match self.shared.redirections {
+ Some(ref redirections) => {
+ let mut current_path = String::new();
+ for name in &self.current {
+ current_path.push_str(name.as_str());
+ current_path.push('/');
+ }
+ current_path.push_str(&item_path(ty, names.last().unwrap().as_str()));
+ redirections.borrow_mut().insert(current_path, path);
+ }
+ None => return layout::redirect(&format!("{}{}", self.root_path(), path)),
+ }
+ }
+ }
+ String::new()
+ }
+ }
+
+ /// Construct a map of items shown in the sidebar to a plain-text summary of their docs.
+ fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
+ // BTreeMap instead of HashMap to get a sorted output
+ let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new();
+ let mut inserted: FxHashMap<ItemType, FxHashSet<Symbol>> = FxHashMap::default();
+
+ for item in &m.items {
+ if item.is_stripped() {
+ continue;
+ }
+
+ let short = item.type_();
+ let myname = match item.name {
+ None => continue,
+ Some(s) => s,
+ };
+ if inserted.entry(short).or_default().insert(myname) {
+ let short = short.to_string();
+ let myname = myname.to_string();
+ map.entry(short).or_default().push((
+ myname,
+ Some(item.doc_value().map_or_else(String::new, |s| plain_text_summary(&s))),
+ ));
+ }
+ }
+
+ match self.shared.module_sorting {
+ ModuleSorting::Alphabetical => {
+ for items in map.values_mut() {
+ items.sort();
+ }
+ }
+ ModuleSorting::DeclarationOrder => {}
+ }
+ map
+ }
+
+ /// Generates a url appropriate for an `href` attribute back to the source of
+ /// this item.
+ ///
+ /// The url generated, when clicked, will redirect the browser back to the
+ /// original source code.
+ ///
+ /// If `None` is returned, then a source link couldn't be generated. This
+ /// may happen, for example, with externally inlined items where the source
+ /// of their crate documentation isn't known.
+ pub(super) fn src_href(&self, item: &clean::Item) -> Option<String> {
+ self.href_from_span(item.span(self.tcx()), true)
+ }
+
+ pub(crate) fn href_from_span(&self, span: clean::Span, with_lines: bool) -> Option<String> {
+ if span.is_dummy() {
+ return None;
+ }
+ let mut root = self.root_path();
+ let mut path = String::new();
+ let cnum = span.cnum(self.sess());
+
+ // We can safely ignore synthetic `SourceFile`s.
+ let file = match span.filename(self.sess()) {
+ FileName::Real(ref path) => path.local_path_if_available().to_path_buf(),
+ _ => return None,
+ };
+ let file = &file;
+
+ let krate_sym;
+ let (krate, path) = if cnum == LOCAL_CRATE {
+ if let Some(path) = self.shared.local_sources.get(file) {
+ (self.shared.layout.krate.as_str(), path)
+ } else {
+ return None;
+ }
+ } else {
+ let (krate, src_root) = match *self.cache().extern_locations.get(&cnum)? {
+ ExternalLocation::Local => {
+ let e = ExternalCrate { crate_num: cnum };
+ (e.name(self.tcx()), e.src_root(self.tcx()))
+ }
+ ExternalLocation::Remote(ref s) => {
+ root = s.to_string();
+ let e = ExternalCrate { crate_num: cnum };
+ (e.name(self.tcx()), e.src_root(self.tcx()))
+ }
+ ExternalLocation::Unknown => return None,
+ };
+
+ sources::clean_path(&src_root, file, false, |component| {
+ path.push_str(&component.to_string_lossy());
+ path.push('/');
+ });
+ let mut fname = file.file_name().expect("source has no filename").to_os_string();
+ fname.push(".html");
+ path.push_str(&fname.to_string_lossy());
+ krate_sym = krate;
+ (krate_sym.as_str(), &path)
+ };
+
+ let anchor = if with_lines {
+ let loline = span.lo(self.sess()).line;
+ let hiline = span.hi(self.sess()).line;
+ format!(
+ "#{}",
+ if loline == hiline {
+ loline.to_string()
+ } else {
+ format!("{}-{}", loline, hiline)
+ }
+ )
+ } else {
+ "".to_string()
+ };
+ Some(format!(
+ "{root}src/{krate}/{path}{anchor}",
+ root = Escape(&root),
+ krate = krate,
+ path = path,
+ anchor = anchor
+ ))
+ }
+}
+
+/// Generates the documentation for `crate` into the directory `dst`
+impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
+ fn descr() -> &'static str {
+ "html"
+ }
+
+ const RUN_ON_MODULE: bool = true;
+
+ fn init(
+ krate: clean::Crate,
+ options: RenderOptions,
+ cache: Cache,
+ tcx: TyCtxt<'tcx>,
+ ) -> Result<(Self, clean::Crate), Error> {
+ // need to save a copy of the options for rendering the index page
+ let md_opts = options.clone();
+ let emit_crate = options.should_emit_crate();
+ let RenderOptions {
+ output,
+ external_html,
+ id_map,
+ playground_url,
+ module_sorting,
+ themes: style_files,
+ default_settings,
+ extension_css,
+ resource_suffix,
+ static_root_path,
+ unstable_features,
+ generate_redirect_map,
+ show_type_layout,
+ generate_link_to_definition,
+ call_locations,
+ no_emit_shared,
+ ..
+ } = options;
+
+ let src_root = match krate.src(tcx) {
+ FileName::Real(ref p) => match p.local_path_if_available().parent() {
+ Some(p) => p.to_path_buf(),
+ None => PathBuf::new(),
+ },
+ _ => PathBuf::new(),
+ };
+ // If user passed in `--playground-url` arg, we fill in crate name here
+ let mut playground = None;
+ if let Some(url) = playground_url {
+ playground =
+ Some(markdown::Playground { crate_name: Some(krate.name(tcx).to_string()), url });
+ }
+ let mut layout = layout::Layout {
+ logo: String::new(),
+ favicon: String::new(),
+ external_html,
+ default_settings,
+ krate: krate.name(tcx).to_string(),
+ css_file_extension: extension_css,
+ scrape_examples_extension: !call_locations.is_empty(),
+ };
+ let mut issue_tracker_base_url = None;
+ let mut include_sources = true;
+
+ // Crawl the crate attributes looking for attributes which control how we're
+ // going to emit HTML
+ for attr in krate.module.attrs.lists(sym::doc) {
+ match (attr.name_or_empty(), attr.value_str()) {
+ (sym::html_favicon_url, Some(s)) => {
+ layout.favicon = s.to_string();
+ }
+ (sym::html_logo_url, Some(s)) => {
+ layout.logo = s.to_string();
+ }
+ (sym::html_playground_url, Some(s)) => {
+ playground = Some(markdown::Playground {
+ crate_name: Some(krate.name(tcx).to_string()),
+ url: s.to_string(),
+ });
+ }
+ (sym::issue_tracker_base_url, Some(s)) => {
+ issue_tracker_base_url = Some(s.to_string());
+ }
+ (sym::html_no_source, None) if attr.is_word() => {
+ include_sources = false;
+ }
+ _ => {}
+ }
+ }
+
+ let (local_sources, matches) = collect_spans_and_sources(
+ tcx,
+ &krate,
+ &src_root,
+ include_sources,
+ generate_link_to_definition,
+ );
+
+ let (sender, receiver) = channel();
+ let mut scx = SharedContext {
+ tcx,
+ src_root,
+ local_sources,
+ issue_tracker_base_url,
+ layout,
+ created_dirs: Default::default(),
+ module_sorting,
+ style_files,
+ resource_suffix,
+ static_root_path,
+ fs: DocFS::new(sender),
+ codes: ErrorCodes::from(unstable_features.is_nightly_build()),
+ playground,
+ all: RefCell::new(AllTypes::new()),
+ errors: receiver,
+ redirections: if generate_redirect_map { Some(Default::default()) } else { None },
+ show_type_layout,
+ span_correspondance_map: matches,
+ cache,
+ call_locations,
+ };
+
+ // Add the default themes to the `Vec` of stylepaths
+ //
+ // Note that these must be added before `sources::render` is called
+ // so that the resulting source pages are styled
+ //
+ // `light.css` is not disabled because it is the stylesheet that stays loaded
+ // by the browser as the theme stylesheet. The theme system (hackily) works by
+ // changing the href to this stylesheet. All other themes are disabled to
+ // prevent rule conflicts
+ scx.style_files.push(StylePath { path: PathBuf::from("light.css") });
+ scx.style_files.push(StylePath { path: PathBuf::from("dark.css") });
+ scx.style_files.push(StylePath { path: PathBuf::from("ayu.css") });
+
+ let dst = output;
+ scx.ensure_dir(&dst)?;
+
+ let mut cx = Context {
+ current: Vec::new(),
+ dst,
+ render_redirect_pages: false,
+ id_map,
+ deref_id_map: FxHashMap::default(),
+ shared: Rc::new(scx),
+ include_sources,
+ };
+
+ if emit_crate {
+ sources::render(&mut cx, &krate)?;
+ }
+
+ if !no_emit_shared {
+ // Build our search index
+ let index = build_index(&krate, &mut Rc::get_mut(&mut cx.shared).unwrap().cache, tcx);
+
+ // Write shared runs within a flock; disable thread dispatching of IO temporarily.
+ Rc::get_mut(&mut cx.shared).unwrap().fs.set_sync_only(true);
+ write_shared(&mut cx, &krate, index, &md_opts)?;
+ Rc::get_mut(&mut cx.shared).unwrap().fs.set_sync_only(false);
+ }
+
+ Ok((cx, krate))
+ }
+
+ fn make_child_renderer(&self) -> Self {
+ Self {
+ current: self.current.clone(),
+ dst: self.dst.clone(),
+ render_redirect_pages: self.render_redirect_pages,
+ deref_id_map: FxHashMap::default(),
+ id_map: IdMap::new(),
+ shared: Rc::clone(&self.shared),
+ include_sources: self.include_sources,
+ }
+ }
+
+ fn after_krate(&mut self) -> Result<(), Error> {
+ let crate_name = self.tcx().crate_name(LOCAL_CRATE);
+ let final_file = self.dst.join(crate_name.as_str()).join("all.html");
+ let settings_file = self.dst.join("settings.html");
+ let scrape_examples_help_file = self.dst.join("scrape-examples-help.html");
+
+ let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
+ if !root_path.ends_with('/') {
+ root_path.push('/');
+ }
+ let shared = Rc::clone(&self.shared);
+ let mut page = layout::Page {
+ title: "List of all items in this crate",
+ css_class: "mod",
+ root_path: "../",
+ static_root_path: shared.static_root_path.as_deref(),
+ description: "List of all items in this crate",
+ keywords: BASIC_KEYWORDS,
+ resource_suffix: &shared.resource_suffix,
+ };
+ let sidebar = if shared.cache.crate_version.is_some() {
+ format!("<h2 class=\"location\">Crate {}</h2>", crate_name)
+ } else {
+ String::new()
+ };
+ let all = shared.all.replace(AllTypes::new());
+ let v = layout::render(
+ &shared.layout,
+ &page,
+ sidebar,
+ |buf: &mut Buffer| all.print(buf),
+ &shared.style_files,
+ );
+ shared.fs.write(final_file, v)?;
+
+ // Generating settings page.
+ page.title = "Rustdoc settings";
+ page.description = "Settings of Rustdoc";
+ page.root_path = "./";
+
+ let sidebar = "<h2 class=\"location\">Settings</h2><div class=\"sidebar-elems\"></div>";
+ let v = layout::render(
+ &shared.layout,
+ &page,
+ sidebar,
+ |buf: &mut Buffer| {
+ write!(
+ buf,
+ "<div class=\"main-heading\">\
+ <h1 class=\"fqn\">\
+ <span class=\"in-band\">Rustdoc settings</span>\
+ </h1>\
+ <span class=\"out-of-band\">\
+ <a id=\"back\" href=\"javascript:void(0)\" onclick=\"history.back();\">\
+ Back\
+ </a>\
+ </span>\
+ </div>\
+ <noscript>\
+ <section>\
+ You need to enable Javascript be able to update your settings.\
+ </section>\
+ </noscript>\
+ <link rel=\"stylesheet\" type=\"text/css\" \
+ href=\"{root_path}settings{suffix}.css\">\
+ <script defer src=\"{root_path}settings{suffix}.js\"></script>",
+ root_path = page.static_root_path.unwrap_or(""),
+ suffix = page.resource_suffix,
+ )
+ },
+ &shared.style_files,
+ );
+ shared.fs.write(settings_file, v)?;
+
+ if shared.layout.scrape_examples_extension {
+ page.title = "About scraped examples";
+ page.description = "How the scraped examples feature works in Rustdoc";
+ let v = layout::render(
+ &shared.layout,
+ &page,
+ "",
+ scrape_examples_help(&*shared),
+ &shared.style_files,
+ );
+ shared.fs.write(scrape_examples_help_file, v)?;
+ }
+
+ if let Some(ref redirections) = shared.redirections {
+ if !redirections.borrow().is_empty() {
+ let redirect_map_path =
+ self.dst.join(crate_name.as_str()).join("redirect-map.json");
+ let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
+ shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
+ shared.fs.write(redirect_map_path, paths)?;
+ }
+ }
+
+ // No need for it anymore.
+ drop(shared);
+
+ // Flush pending errors.
+ Rc::get_mut(&mut self.shared).unwrap().fs.close();
+ let nb_errors =
+ self.shared.errors.iter().map(|err| self.tcx().sess.struct_err(&err).emit()).count();
+ if nb_errors > 0 {
+ Err(Error::new(io::Error::new(io::ErrorKind::Other, "I/O error"), ""))
+ } else {
+ Ok(())
+ }
+ }
+
+ fn mod_item_in(&mut self, item: &clean::Item) -> Result<(), Error> {
+ // Stripped modules survive the rustdoc passes (i.e., `strip-private`)
+ // if they contain impls for public types. These modules can also
+ // contain items such as publicly re-exported structures.
+ //
+ // External crates will provide links to these structures, so
+ // these modules are recursed into, but not rendered normally
+ // (a flag on the context).
+ if !self.render_redirect_pages {
+ self.render_redirect_pages = item.is_stripped();
+ }
+ let item_name = item.name.unwrap();
+ self.dst.push(&*item_name.as_str());
+ self.current.push(item_name);
+
+ info!("Recursing into {}", self.dst.display());
+
+ let buf = self.render_item(item, true);
+ // buf will be empty if the module is stripped and there is no redirect for it
+ if !buf.is_empty() {
+ self.shared.ensure_dir(&self.dst)?;
+ let joint_dst = self.dst.join("index.html");
+ self.shared.fs.write(joint_dst, buf)?;
+ }
+
+ // Render sidebar-items.js used throughout this module.
+ if !self.render_redirect_pages {
+ let (clean::StrippedItem(box clean::ModuleItem(ref module)) | clean::ModuleItem(ref module)) = *item.kind
+ else { unreachable!() };
+ let items = self.build_sidebar_items(module);
+ let js_dst = self.dst.join(&format!("sidebar-items{}.js", self.shared.resource_suffix));
+ let v = format!("window.SIDEBAR_ITEMS = {};", serde_json::to_string(&items).unwrap());
+ self.shared.fs.write(js_dst, v)?;
+ }
+ Ok(())
+ }
+
+ fn mod_item_out(&mut self) -> Result<(), Error> {
+ info!("Recursed; leaving {}", self.dst.display());
+
+ // Go back to where we were at
+ self.dst.pop();
+ self.current.pop();
+ Ok(())
+ }
+
+ fn item(&mut self, item: clean::Item) -> Result<(), Error> {
+ // Stripped modules survive the rustdoc passes (i.e., `strip-private`)
+ // if they contain impls for public types. These modules can also
+ // contain items such as publicly re-exported structures.
+ //
+ // External crates will provide links to these structures, so
+ // these modules are recursed into, but not rendered normally
+ // (a flag on the context).
+ if !self.render_redirect_pages {
+ self.render_redirect_pages = item.is_stripped();
+ }
+
+ let buf = self.render_item(&item, false);
+ // buf will be empty if the item is stripped and there is no redirect for it
+ if !buf.is_empty() {
+ let name = item.name.as_ref().unwrap();
+ let item_type = item.type_();
+ let file_name = &item_path(item_type, name.as_str());
+ self.shared.ensure_dir(&self.dst)?;
+ let joint_dst = self.dst.join(file_name);
+ self.shared.fs.write(joint_dst, buf)?;
+
+ if !self.render_redirect_pages {
+ self.shared.all.borrow_mut().append(full_path(self, &item), &item_type);
+ }
+ // If the item is a macro, redirect from the old macro URL (with !)
+ // to the new one (without).
+ if item_type == ItemType::Macro {
+ let redir_name = format!("{}.{}!.html", item_type, name);
+ if let Some(ref redirections) = self.shared.redirections {
+ let crate_name = &self.shared.layout.krate;
+ redirections.borrow_mut().insert(
+ format!("{}/{}", crate_name, redir_name),
+ format!("{}/{}", crate_name, file_name),
+ );
+ } else {
+ let v = layout::redirect(file_name);
+ let redir_dst = self.dst.join(redir_name);
+ self.shared.fs.write(redir_dst, v)?;
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn cache(&self) -> &Cache {
+ &self.shared.cache
+ }
+}
+
+fn make_item_keywords(it: &clean::Item) -> String {
+ format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())
+}
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
new file mode 100644
index 000000000..a262c8f7d
--- /dev/null
+++ b/src/librustdoc/html/render/mod.rs
@@ -0,0 +1,2849 @@
+//! Rustdoc's HTML rendering module.
+//!
+//! This modules contains the bulk of the logic necessary for rendering a
+//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
+//! rendering process is largely driven by the `format!` syntax extension to
+//! perform all I/O into files and streams.
+//!
+//! The rendering process is largely driven by the `Context` and `Cache`
+//! structures. The cache is pre-populated by crawling the crate in question,
+//! and then it is shared among the various rendering threads. The cache is meant
+//! to be a fairly large structure not implementing `Clone` (because it's shared
+//! among threads). The context, however, should be a lightweight structure. This
+//! is cloned per-thread and contains information about what is currently being
+//! rendered.
+//!
+//! In order to speed up rendering (mostly because of markdown rendering), the
+//! rendering process has been parallelized. This parallelization is only
+//! exposed through the `crate` method on the context, and then also from the
+//! fact that the shared cache is stored in TLS (and must be accessed as such).
+//!
+//! In addition to rendering the crate itself, this module is also responsible
+//! for creating the corresponding search index and source file renderings.
+//! These threads are not parallelized (they haven't been a bottleneck yet), and
+//! both occur before the crate is rendered.
+
+pub(crate) mod search_index;
+
+#[cfg(test)]
+mod tests;
+
+mod context;
+mod print_item;
+mod span_map;
+mod write_shared;
+
+pub(crate) use self::context::*;
+pub(crate) use self::span_map::{collect_spans_and_sources, LinkFromSrc};
+
+use std::collections::VecDeque;
+use std::default::Default;
+use std::fmt;
+use std::fs;
+use std::iter::Peekable;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::str;
+use std::string::ToString;
+
+use rustc_ast_pretty::pprust;
+use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def::CtorKind;
+use rustc_hir::def_id::DefId;
+use rustc_hir::Mutability;
+use rustc_middle::middle::stability;
+use rustc_middle::ty;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::{
+ symbol::{sym, Symbol},
+ BytePos, FileName, RealFileName,
+};
+use serde::ser::SerializeSeq;
+use serde::{Serialize, Serializer};
+
+use crate::clean::{self, ItemId, RenderedLink, SelfTy};
+use crate::error::Error;
+use crate::formats::cache::Cache;
+use crate::formats::item_type::ItemType;
+use crate::formats::{AssocItemRender, Impl, RenderMode};
+use crate::html::escape::Escape;
+use crate::html::format::{
+ href, join_with_double_colon, print_abi_with_space, print_constness_with_space,
+ print_default_space, print_generic_bounds, print_where_clause, Buffer, Ending, HrefError,
+ PrintWithSpace,
+};
+use crate::html::highlight;
+use crate::html::markdown::{HeadingOffset, IdMap, Markdown, MarkdownHtml, MarkdownSummaryLine};
+use crate::html::sources;
+use crate::html::static_files::SCRAPE_EXAMPLES_HELP_MD;
+use crate::scrape_examples::{CallData, CallLocation};
+use crate::try_none;
+use crate::DOC_RUST_LANG_ORG_CHANNEL;
+
+/// A pair of name and its optional document.
+pub(crate) type NameDoc = (String, Option<String>);
+
+pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
+ crate::html::format::display_fn(move |f| {
+ if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) }
+ })
+}
+
+// Helper structs for rendering items/sidebars and carrying along contextual
+// information
+
+/// Struct representing one entry in the JS search index. These are all emitted
+/// by hand to a large JS file at the end of cache-creation.
+#[derive(Debug)]
+pub(crate) struct IndexItem {
+ pub(crate) ty: ItemType,
+ pub(crate) name: String,
+ pub(crate) path: String,
+ pub(crate) desc: String,
+ pub(crate) parent: Option<DefId>,
+ pub(crate) parent_idx: Option<usize>,
+ pub(crate) search_type: Option<IndexItemFunctionType>,
+ pub(crate) aliases: Box<[Symbol]>,
+}
+
+/// A type used for the search index.
+#[derive(Debug)]
+pub(crate) struct RenderType {
+ id: Option<RenderTypeId>,
+ generics: Option<Vec<RenderType>>,
+}
+
+impl Serialize for RenderType {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let id = match &self.id {
+ // 0 is a sentinel, everything else is one-indexed
+ None => 0,
+ Some(RenderTypeId::Index(idx)) => idx + 1,
+ _ => panic!("must convert render types to indexes before serializing"),
+ };
+ if let Some(generics) = &self.generics {
+ let mut seq = serializer.serialize_seq(None)?;
+ seq.serialize_element(&id)?;
+ seq.serialize_element(generics)?;
+ seq.end()
+ } else {
+ id.serialize(serializer)
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum RenderTypeId {
+ DefId(DefId),
+ Primitive(clean::PrimitiveType),
+ Index(usize),
+}
+
+/// Full type of functions/methods in the search index.
+#[derive(Debug)]
+pub(crate) struct IndexItemFunctionType {
+ inputs: Vec<RenderType>,
+ output: Vec<RenderType>,
+}
+
+impl Serialize for IndexItemFunctionType {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ // If we couldn't figure out a type, just write `0`.
+ let has_missing = self
+ .inputs
+ .iter()
+ .chain(self.output.iter())
+ .any(|i| i.id.is_none() && i.generics.is_none());
+ if has_missing {
+ 0.serialize(serializer)
+ } else {
+ let mut seq = serializer.serialize_seq(None)?;
+ match &self.inputs[..] {
+ [one] if one.generics.is_none() => seq.serialize_element(one)?,
+ _ => seq.serialize_element(&self.inputs)?,
+ }
+ match &self.output[..] {
+ [] => {}
+ [one] if one.generics.is_none() => seq.serialize_element(one)?,
+ _ => seq.serialize_element(&self.output)?,
+ }
+ seq.end()
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct StylePath {
+ /// The path to the theme
+ pub(crate) path: PathBuf,
+}
+
+impl StylePath {
+ pub(crate) fn basename(&self) -> Result<String, Error> {
+ Ok(try_none!(try_none!(self.path.file_stem(), &self.path).to_str(), &self.path).to_string())
+ }
+}
+
+fn write_srclink(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer) {
+ if let Some(l) = cx.src_href(item) {
+ write!(buf, "<a class=\"srclink\" href=\"{}\">source</a>", l)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+struct ItemEntry {
+ url: String,
+ name: String,
+}
+
+impl ItemEntry {
+ fn new(mut url: String, name: String) -> ItemEntry {
+ while url.starts_with('/') {
+ url.remove(0);
+ }
+ ItemEntry { url, name }
+ }
+}
+
+impl ItemEntry {
+ pub(crate) fn print(&self) -> impl fmt::Display + '_ {
+ crate::html::format::display_fn(move |f| {
+ write!(f, "<a href=\"{}\">{}</a>", self.url, Escape(&self.name))
+ })
+ }
+}
+
+impl PartialOrd for ItemEntry {
+ fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for ItemEntry {
+ fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering {
+ self.name.cmp(&other.name)
+ }
+}
+
+#[derive(Debug)]
+struct AllTypes {
+ structs: FxHashSet<ItemEntry>,
+ enums: FxHashSet<ItemEntry>,
+ unions: FxHashSet<ItemEntry>,
+ primitives: FxHashSet<ItemEntry>,
+ traits: FxHashSet<ItemEntry>,
+ macros: FxHashSet<ItemEntry>,
+ functions: FxHashSet<ItemEntry>,
+ typedefs: FxHashSet<ItemEntry>,
+ opaque_tys: FxHashSet<ItemEntry>,
+ statics: FxHashSet<ItemEntry>,
+ constants: FxHashSet<ItemEntry>,
+ attributes: FxHashSet<ItemEntry>,
+ derives: FxHashSet<ItemEntry>,
+ trait_aliases: FxHashSet<ItemEntry>,
+}
+
+impl AllTypes {
+ fn new() -> AllTypes {
+ let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default());
+ AllTypes {
+ structs: new_set(100),
+ enums: new_set(100),
+ unions: new_set(100),
+ primitives: new_set(26),
+ traits: new_set(100),
+ macros: new_set(100),
+ functions: new_set(100),
+ typedefs: new_set(100),
+ opaque_tys: new_set(100),
+ statics: new_set(100),
+ constants: new_set(100),
+ attributes: new_set(100),
+ derives: new_set(100),
+ trait_aliases: new_set(100),
+ }
+ }
+
+ fn append(&mut self, item_name: String, item_type: &ItemType) {
+ let mut url: Vec<_> = item_name.split("::").skip(1).collect();
+ if let Some(name) = url.pop() {
+ let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name);
+ url.push(name);
+ let name = url.join("::");
+ match *item_type {
+ ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)),
+ ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)),
+ ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)),
+ ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)),
+ ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)),
+ ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)),
+ ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)),
+ ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)),
+ ItemType::OpaqueTy => self.opaque_tys.insert(ItemEntry::new(new_url, name)),
+ ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)),
+ ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
+ ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
+ ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
+ ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
+ _ => true,
+ };
+ }
+ }
+}
+
+impl AllTypes {
+ fn print(self, f: &mut Buffer) {
+ fn print_entries(f: &mut Buffer, e: &FxHashSet<ItemEntry>, title: &str, class: &str) {
+ if !e.is_empty() {
+ let mut e: Vec<&ItemEntry> = e.iter().collect();
+ e.sort();
+ write!(
+ f,
+ "<h3 id=\"{}\">{}</h3><ul class=\"{} docblock\">",
+ title.replace(' ', "-"), // IDs cannot contain whitespaces.
+ title,
+ class
+ );
+
+ for s in e.iter() {
+ write!(f, "<li>{}</li>", s.print());
+ }
+
+ f.write_str("</ul>");
+ }
+ }
+
+ f.write_str(
+ "<h1 class=\"fqn\">\
+ <span class=\"in-band\">List of all items</span>\
+ </h1>",
+ );
+ // Note: print_entries does not escape the title, because we know the current set of titles
+ // doesn't require escaping.
+ print_entries(f, &self.structs, "Structs", "structs");
+ print_entries(f, &self.enums, "Enums", "enums");
+ print_entries(f, &self.unions, "Unions", "unions");
+ print_entries(f, &self.primitives, "Primitives", "primitives");
+ print_entries(f, &self.traits, "Traits", "traits");
+ print_entries(f, &self.macros, "Macros", "macros");
+ print_entries(f, &self.attributes, "Attribute Macros", "attributes");
+ print_entries(f, &self.derives, "Derive Macros", "derives");
+ print_entries(f, &self.functions, "Functions", "functions");
+ print_entries(f, &self.typedefs, "Typedefs", "typedefs");
+ print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases");
+ print_entries(f, &self.opaque_tys, "Opaque Types", "opaque-types");
+ print_entries(f, &self.statics, "Statics", "statics");
+ print_entries(f, &self.constants, "Constants", "constants")
+ }
+}
+
+fn scrape_examples_help(shared: &SharedContext<'_>) -> String {
+ let mut content = SCRAPE_EXAMPLES_HELP_MD.to_owned();
+ content.push_str(&format!(
+ "## More information\n\n\
+ If you want more information about this feature, please read the [corresponding chapter in the Rustdoc book]({}/rustdoc/scraped-examples.html).",
+ DOC_RUST_LANG_ORG_CHANNEL));
+
+ let mut ids = IdMap::default();
+ format!(
+ "<div class=\"main-heading\">\
+ <h1 class=\"fqn\">\
+ <span class=\"in-band\">About scraped examples</span>\
+ </h1>\
+ </div>\
+ <div>{}</div>",
+ Markdown {
+ content: &content,
+ links: &[],
+ ids: &mut ids,
+ error_codes: shared.codes,
+ edition: shared.edition(),
+ playground: &shared.playground,
+ heading_offset: HeadingOffset::H1
+ }
+ .into_string()
+ )
+}
+
+fn document(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ item: &clean::Item,
+ parent: Option<&clean::Item>,
+ heading_offset: HeadingOffset,
+) {
+ if let Some(ref name) = item.name {
+ info!("Documenting {}", name);
+ }
+ document_item_info(w, cx, item, parent);
+ if parent.is_none() {
+ document_full_collapsible(w, item, cx, heading_offset);
+ } else {
+ document_full(w, item, cx, heading_offset);
+ }
+}
+
+/// Render md_text as markdown.
+fn render_markdown(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ md_text: &str,
+ links: Vec<RenderedLink>,
+ heading_offset: HeadingOffset,
+) {
+ write!(
+ w,
+ "<div class=\"docblock\">{}</div>",
+ Markdown {
+ content: md_text,
+ links: &links,
+ ids: &mut cx.id_map,
+ error_codes: cx.shared.codes,
+ edition: cx.shared.edition(),
+ playground: &cx.shared.playground,
+ heading_offset,
+ }
+ .into_string()
+ )
+}
+
+/// Writes a documentation block containing only the first paragraph of the documentation. If the
+/// docs are longer, a "Read more" link is appended to the end.
+fn document_short(
+ w: &mut Buffer,
+ item: &clean::Item,
+ cx: &mut Context<'_>,
+ link: AssocItemLink<'_>,
+ parent: &clean::Item,
+ show_def_docs: bool,
+) {
+ document_item_info(w, cx, item, Some(parent));
+ if !show_def_docs {
+ return;
+ }
+ if let Some(s) = item.doc_value() {
+ let mut summary_html = MarkdownSummaryLine(&s, &item.links(cx)).into_string();
+
+ if s.contains('\n') {
+ let link = format!(r#" <a{}>Read more</a>"#, assoc_href_attr(item, link, cx));
+
+ if let Some(idx) = summary_html.rfind("</p>") {
+ summary_html.insert_str(idx, &link);
+ } else {
+ summary_html.push_str(&link);
+ }
+ }
+
+ write!(w, "<div class='docblock'>{}</div>", summary_html,);
+ }
+}
+
+fn document_full_collapsible(
+ w: &mut Buffer,
+ item: &clean::Item,
+ cx: &mut Context<'_>,
+ heading_offset: HeadingOffset,
+) {
+ document_full_inner(w, item, cx, true, heading_offset);
+}
+
+fn document_full(
+ w: &mut Buffer,
+ item: &clean::Item,
+ cx: &mut Context<'_>,
+ heading_offset: HeadingOffset,
+) {
+ document_full_inner(w, item, cx, false, heading_offset);
+}
+
+fn document_full_inner(
+ w: &mut Buffer,
+ item: &clean::Item,
+ cx: &mut Context<'_>,
+ is_collapsible: bool,
+ heading_offset: HeadingOffset,
+) {
+ if let Some(s) = item.collapsed_doc_value() {
+ debug!("Doc block: =====\n{}\n=====", s);
+ if is_collapsible {
+ w.write_str(
+ "<details class=\"rustdoc-toggle top-doc\" open>\
+ <summary class=\"hideme\">\
+ <span>Expand description</span>\
+ </summary>",
+ );
+ render_markdown(w, cx, &s, item.links(cx), heading_offset);
+ w.write_str("</details>");
+ } else {
+ render_markdown(w, cx, &s, item.links(cx), heading_offset);
+ }
+ }
+
+ let kind = match &*item.kind {
+ clean::ItemKind::StrippedItem(box kind) | kind => kind,
+ };
+
+ if let clean::ItemKind::FunctionItem(..) | clean::ItemKind::MethodItem(..) = kind {
+ render_call_locations(w, cx, item);
+ }
+}
+
+/// Add extra information about an item such as:
+///
+/// * Stability
+/// * Deprecated
+/// * Required features (through the `doc_cfg` feature)
+fn document_item_info(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ item: &clean::Item,
+ parent: Option<&clean::Item>,
+) {
+ let item_infos = short_item_info(item, cx, parent);
+ if !item_infos.is_empty() {
+ w.write_str("<span class=\"item-info\">");
+ for info in item_infos {
+ w.write_str(&info);
+ }
+ w.write_str("</span>");
+ }
+}
+
+fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<String> {
+ let cfg = match (&item.cfg, parent.and_then(|p| p.cfg.as_ref())) {
+ (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
+ (cfg, _) => cfg.as_deref().cloned(),
+ };
+
+ debug!("Portability {:?} - {:?} = {:?}", item.cfg, parent.and_then(|p| p.cfg.as_ref()), cfg);
+
+ Some(format!("<div class=\"stab portability\">{}</div>", cfg?.render_long_html()))
+}
+
+/// Render the stability, deprecation and portability information that is displayed at the top of
+/// the item's documentation.
+fn short_item_info(
+ item: &clean::Item,
+ cx: &mut Context<'_>,
+ parent: Option<&clean::Item>,
+) -> Vec<String> {
+ let mut extra_info = vec![];
+ let error_codes = cx.shared.codes;
+
+ if let Some(depr @ Deprecation { note, since, is_since_rustc_version: _, suggestion: _ }) =
+ item.deprecation(cx.tcx())
+ {
+ // We display deprecation messages for #[deprecated], but only display
+ // the future-deprecation messages for rustc versions.
+ let mut message = if let Some(since) = since {
+ let since = since.as_str();
+ if !stability::deprecation_in_effect(&depr) {
+ if since == "TBD" {
+ String::from("Deprecating in a future Rust version")
+ } else {
+ format!("Deprecating in {}", Escape(since))
+ }
+ } else {
+ format!("Deprecated since {}", Escape(since))
+ }
+ } else {
+ String::from("Deprecated")
+ };
+
+ if let Some(note) = note {
+ let note = note.as_str();
+ let html = MarkdownHtml(
+ note,
+ &mut cx.id_map,
+ error_codes,
+ cx.shared.edition(),
+ &cx.shared.playground,
+ );
+ message.push_str(&format!(": {}", html.into_string()));
+ }
+ extra_info.push(format!(
+ "<div class=\"stab deprecated\"><span class=\"emoji\">👎</span> {}</div>",
+ message,
+ ));
+ }
+
+ // Render unstable items. But don't render "rustc_private" crates (internal compiler crates).
+ // Those crates are permanently unstable so it makes no sense to render "unstable" everywhere.
+ if let Some((StabilityLevel::Unstable { reason: _, issue, .. }, feature)) = item
+ .stability(cx.tcx())
+ .as_ref()
+ .filter(|stab| stab.feature != sym::rustc_private)
+ .map(|stab| (stab.level, stab.feature))
+ {
+ let mut message =
+ "<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned();
+
+ let mut feature = format!("<code>{}</code>", Escape(feature.as_str()));
+ if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
+ feature.push_str(&format!(
+ "&nbsp;<a href=\"{url}{issue}\">#{issue}</a>",
+ url = url,
+ issue = issue
+ ));
+ }
+
+ message.push_str(&format!(" ({})", feature));
+
+ extra_info.push(format!("<div class=\"stab unstable\">{}</div>", message));
+ }
+
+ if let Some(portability) = portability(item, parent) {
+ extra_info.push(portability);
+ }
+
+ extra_info
+}
+
+// Render the list of items inside one of the sections "Trait Implementations",
+// "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages).
+fn render_impls(
+ cx: &mut Context<'_>,
+ w: &mut Buffer,
+ impls: &[&&Impl],
+ containing_item: &clean::Item,
+ toggle_open_by_default: bool,
+) {
+ let tcx = cx.tcx();
+ let mut rendered_impls = impls
+ .iter()
+ .map(|i| {
+ let did = i.trait_did().unwrap();
+ let provided_trait_methods = i.inner_impl().provided_trait_methods(tcx);
+ let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_trait_methods);
+ let mut buffer = if w.is_for_html() { Buffer::html() } else { Buffer::new() };
+ render_impl(
+ &mut buffer,
+ cx,
+ i,
+ containing_item,
+ assoc_link,
+ RenderMode::Normal,
+ None,
+ &[],
+ ImplRenderingParameters {
+ show_def_docs: true,
+ show_default_items: true,
+ show_non_assoc_items: true,
+ toggle_open_by_default,
+ },
+ );
+ buffer.into_inner()
+ })
+ .collect::<Vec<_>>();
+ rendered_impls.sort();
+ w.write_str(&rendered_impls.join(""));
+}
+
+/// Build a (possibly empty) `href` attribute (a key-value pair) for the given associated item.
+fn assoc_href_attr(it: &clean::Item, link: AssocItemLink<'_>, cx: &Context<'_>) -> String {
+ let name = it.name.unwrap();
+ let item_type = it.type_();
+
+ let href = match link {
+ AssocItemLink::Anchor(Some(ref id)) => Some(format!("#{}", id)),
+ AssocItemLink::Anchor(None) => Some(format!("#{}.{}", item_type, name)),
+ AssocItemLink::GotoSource(did, provided_methods) => {
+ // We're creating a link from the implementation of an associated item to its
+ // declaration in the trait declaration.
+ let item_type = match item_type {
+ // For historical but not technical reasons, the item type of methods in
+ // trait declarations depends on whether the method is required (`TyMethod`) or
+ // provided (`Method`).
+ ItemType::Method | ItemType::TyMethod => {
+ if provided_methods.contains(&name) {
+ ItemType::Method
+ } else {
+ ItemType::TyMethod
+ }
+ }
+ // For associated types and constants, no such distinction exists.
+ item_type => item_type,
+ };
+
+ match href(did.expect_def_id(), cx) {
+ Ok((url, ..)) => Some(format!("{}#{}.{}", url, item_type, name)),
+ // The link is broken since it points to an external crate that wasn't documented.
+ // Do not create any link in such case. This is better than falling back to a
+ // dummy anchor like `#{item_type}.{name}` representing the `id` of *this* impl item
+ // (that used to happen in older versions). Indeed, in most cases this dummy would
+ // coincide with the `id`. However, it would not always do so.
+ // In general, this dummy would be incorrect:
+ // If the type with the trait impl also had an inherent impl with an assoc. item of
+ // the *same* name as this impl item, the dummy would link to that one even though
+ // those two items are distinct!
+ // In this scenario, the actual `id` of this impl item would be
+ // `#{item_type}.{name}-{n}` for some number `n` (a disambiguator).
+ Err(HrefError::DocumentationNotBuilt) => None,
+ Err(_) => Some(format!("#{}.{}", item_type, name)),
+ }
+ }
+ };
+
+ // If there is no `href` for the reason explained above, simply do not render it which is valid:
+ // https://html.spec.whatwg.org/multipage/links.html#links-created-by-a-and-area-elements
+ href.map(|href| format!(" href=\"{}\"", href)).unwrap_or_default()
+}
+
+fn assoc_const(
+ w: &mut Buffer,
+ it: &clean::Item,
+ ty: &clean::Type,
+ default: Option<&clean::ConstantKind>,
+ link: AssocItemLink<'_>,
+ extra: &str,
+ cx: &Context<'_>,
+) {
+ write!(
+ w,
+ "{extra}{vis}const <a{href} class=\"constant\">{name}</a>: {ty}",
+ extra = extra,
+ vis = it.visibility.print_with_space(it.item_id, cx),
+ href = assoc_href_attr(it, link, cx),
+ name = it.name.as_ref().unwrap(),
+ ty = ty.print(cx),
+ );
+ if let Some(default) = default {
+ write!(w, " = ");
+
+ // FIXME: `.value()` uses `clean::utils::format_integer_with_underscore_sep` under the
+ // hood which adds noisy underscores and a type suffix to number literals.
+ // This hurts readability in this context especially when more complex expressions
+ // are involved and it doesn't add much of value.
+ // Find a way to print constants here without all that jazz.
+ write!(w, "{}", Escape(&default.value(cx.tcx()).unwrap_or_else(|| default.expr(cx.tcx()))));
+ }
+}
+
+fn assoc_type(
+ w: &mut Buffer,
+ it: &clean::Item,
+ generics: &clean::Generics,
+ bounds: &[clean::GenericBound],
+ default: Option<&clean::Type>,
+ link: AssocItemLink<'_>,
+ indent: usize,
+ cx: &Context<'_>,
+) {
+ write!(
+ w,
+ "{indent}type <a{href} class=\"associatedtype\">{name}</a>{generics}",
+ indent = " ".repeat(indent),
+ href = assoc_href_attr(it, link, cx),
+ name = it.name.as_ref().unwrap(),
+ generics = generics.print(cx),
+ );
+ if !bounds.is_empty() {
+ write!(w, ": {}", print_generic_bounds(bounds, cx))
+ }
+ write!(w, "{}", print_where_clause(generics, cx, indent, Ending::NoNewline));
+ if let Some(default) = default {
+ write!(w, " = {}", default.print(cx))
+ }
+}
+
+fn assoc_method(
+ w: &mut Buffer,
+ meth: &clean::Item,
+ g: &clean::Generics,
+ d: &clean::FnDecl,
+ link: AssocItemLink<'_>,
+ parent: ItemType,
+ cx: &Context<'_>,
+ render_mode: RenderMode,
+) {
+ let header = meth.fn_header(cx.tcx()).expect("Trying to get header from a non-function item");
+ let name = meth.name.as_ref().unwrap();
+ let vis = meth.visibility.print_with_space(meth.item_id, cx).to_string();
+ // FIXME: Once https://github.com/rust-lang/rust/issues/67792 is implemented, we can remove
+ // this condition.
+ let constness = match render_mode {
+ RenderMode::Normal => {
+ print_constness_with_space(&header.constness, meth.const_stability(cx.tcx()))
+ }
+ RenderMode::ForDeref { .. } => "",
+ };
+ let asyncness = header.asyncness.print_with_space();
+ let unsafety = header.unsafety.print_with_space();
+ let defaultness = print_default_space(meth.is_default());
+ let abi = print_abi_with_space(header.abi).to_string();
+ let href = assoc_href_attr(meth, link, cx);
+
+ // NOTE: `{:#}` does not print HTML formatting, `{}` does. So `g.print` can't be reused between the length calculation and `write!`.
+ let generics_len = format!("{:#}", g.print(cx)).len();
+ let mut header_len = "fn ".len()
+ + vis.len()
+ + constness.len()
+ + asyncness.len()
+ + unsafety.len()
+ + defaultness.len()
+ + abi.len()
+ + name.as_str().len()
+ + generics_len;
+
+ let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
+ header_len += 4;
+ let indent_str = " ";
+ render_attributes_in_pre(w, meth, indent_str);
+ (4, indent_str, Ending::NoNewline)
+ } else {
+ render_attributes_in_code(w, meth);
+ (0, "", Ending::Newline)
+ };
+ w.reserve(header_len + "<a href=\"\" class=\"fnname\">{".len() + "</a>".len());
+ write!(
+ w,
+ "{indent}{vis}{constness}{asyncness}{unsafety}{defaultness}{abi}fn <a{href} class=\"fnname\">{name}</a>\
+ {generics}{decl}{notable_traits}{where_clause}",
+ indent = indent_str,
+ vis = vis,
+ constness = constness,
+ asyncness = asyncness,
+ unsafety = unsafety,
+ defaultness = defaultness,
+ abi = abi,
+ href = href,
+ name = name,
+ generics = g.print(cx),
+ decl = d.full_print(header_len, indent, header.asyncness, cx),
+ notable_traits = notable_traits_decl(d, cx),
+ where_clause = print_where_clause(g, cx, indent, end_newline),
+ )
+}
+
+/// Writes a span containing the versions at which an item became stable and/or const-stable. For
+/// example, if the item became stable at 1.0.0, and const-stable at 1.45.0, this function would
+/// write a span containing "1.0.0 (const: 1.45.0)".
+///
+/// Returns `true` if a stability annotation was rendered.
+///
+/// Stability and const-stability are considered separately. If the item is unstable, no version
+/// will be written. If the item is const-unstable, "const: unstable" will be appended to the
+/// span, with a link to the tracking issue if present. If an item's stability or const-stability
+/// version matches the version of its enclosing item, that version will be omitted.
+///
+/// Note that it is possible for an unstable function to be const-stable. In that case, the span
+/// will include the const-stable version, but no stable version will be emitted, as a natural
+/// consequence of the above rules.
+fn render_stability_since_raw(
+ w: &mut Buffer,
+ ver: Option<Symbol>,
+ const_stability: Option<ConstStability>,
+ containing_ver: Option<Symbol>,
+ containing_const_ver: Option<Symbol>,
+) -> bool {
+ let stable_version = ver.filter(|inner| !inner.is_empty() && Some(*inner) != containing_ver);
+
+ let mut title = String::new();
+ let mut stability = String::new();
+
+ if let Some(ver) = stable_version {
+ stability.push_str(ver.as_str());
+ title.push_str(&format!("Stable since Rust version {}", ver));
+ }
+
+ let const_title_and_stability = match const_stability {
+ Some(ConstStability { level: StabilityLevel::Stable { since, .. }, .. })
+ if Some(since) != containing_const_ver =>
+ {
+ Some((format!("const since {}", since), format!("const: {}", since)))
+ }
+ Some(ConstStability { level: StabilityLevel::Unstable { issue, .. }, feature, .. }) => {
+ let unstable = if let Some(n) = issue {
+ format!(
+ r#"<a href="https://github.com/rust-lang/rust/issues/{}" title="Tracking issue for {}">unstable</a>"#,
+ n, feature
+ )
+ } else {
+ String::from("unstable")
+ };
+
+ Some((String::from("const unstable"), format!("const: {}", unstable)))
+ }
+ _ => None,
+ };
+
+ if let Some((const_title, const_stability)) = const_title_and_stability {
+ if !title.is_empty() {
+ title.push_str(&format!(", {}", const_title));
+ } else {
+ title.push_str(&const_title);
+ }
+
+ if !stability.is_empty() {
+ stability.push_str(&format!(" ({})", const_stability));
+ } else {
+ stability.push_str(&const_stability);
+ }
+ }
+
+ if !stability.is_empty() {
+ write!(w, r#"<span class="since" title="{}">{}</span>"#, title, stability);
+ }
+
+ !stability.is_empty()
+}
+
+fn render_assoc_item(
+ w: &mut Buffer,
+ item: &clean::Item,
+ link: AssocItemLink<'_>,
+ parent: ItemType,
+ cx: &Context<'_>,
+ render_mode: RenderMode,
+) {
+ match &*item.kind {
+ clean::StrippedItem(..) => {}
+ clean::TyMethodItem(m) => {
+ assoc_method(w, item, &m.generics, &m.decl, link, parent, cx, render_mode)
+ }
+ clean::MethodItem(m, _) => {
+ assoc_method(w, item, &m.generics, &m.decl, link, parent, cx, render_mode)
+ }
+ kind @ (clean::TyAssocConstItem(ty) | clean::AssocConstItem(ty, _)) => assoc_const(
+ w,
+ item,
+ ty,
+ match kind {
+ clean::TyAssocConstItem(_) => None,
+ clean::AssocConstItem(_, default) => Some(default),
+ _ => unreachable!(),
+ },
+ link,
+ if parent == ItemType::Trait { " " } else { "" },
+ cx,
+ ),
+ clean::TyAssocTypeItem(ref generics, ref bounds) => assoc_type(
+ w,
+ item,
+ generics,
+ bounds,
+ None,
+ link,
+ if parent == ItemType::Trait { 4 } else { 0 },
+ cx,
+ ),
+ clean::AssocTypeItem(ref ty, ref bounds) => assoc_type(
+ w,
+ item,
+ &ty.generics,
+ bounds,
+ Some(ty.item_type.as_ref().unwrap_or(&ty.type_)),
+ link,
+ if parent == ItemType::Trait { 4 } else { 0 },
+ cx,
+ ),
+ _ => panic!("render_assoc_item called on non-associated-item"),
+ }
+}
+
+const ALLOWED_ATTRIBUTES: &[Symbol] =
+ &[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive];
+
+fn attributes(it: &clean::Item) -> Vec<String> {
+ it.attrs
+ .other_attrs
+ .iter()
+ .filter_map(|attr| {
+ if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
+ Some(
+ pprust::attribute_to_string(attr)
+ .replace("\\\n", "")
+ .replace('\n', "")
+ .replace(" ", " "),
+ )
+ } else {
+ None
+ }
+ })
+ .collect()
+}
+
+// When an attribute is rendered inside a `<pre>` tag, it is formatted using
+// a whitespace prefix and newline.
+fn render_attributes_in_pre(w: &mut Buffer, it: &clean::Item, prefix: &str) {
+ for a in attributes(it) {
+ writeln!(w, "{}{}", prefix, a);
+ }
+}
+
+// When an attribute is rendered inside a <code> tag, it is formatted using
+// a div to produce a newline after it.
+fn render_attributes_in_code(w: &mut Buffer, it: &clean::Item) {
+ for a in attributes(it) {
+ write!(w, "<div class=\"code-attribute\">{}</div>", a);
+ }
+}
+
+#[derive(Copy, Clone)]
+enum AssocItemLink<'a> {
+ Anchor(Option<&'a str>),
+ GotoSource(ItemId, &'a FxHashSet<Symbol>),
+}
+
+impl<'a> AssocItemLink<'a> {
+ fn anchor(&self, id: &'a str) -> Self {
+ match *self {
+ AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(id)),
+ ref other => *other,
+ }
+ }
+}
+
+fn render_assoc_items(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ containing_item: &clean::Item,
+ it: DefId,
+ what: AssocItemRender<'_>,
+) {
+ let mut derefs = FxHashSet::default();
+ derefs.insert(it);
+ render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs)
+}
+
+fn render_assoc_items_inner(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ containing_item: &clean::Item,
+ it: DefId,
+ what: AssocItemRender<'_>,
+ derefs: &mut FxHashSet<DefId>,
+) {
+ info!("Documenting associated items of {:?}", containing_item.name);
+ let shared = Rc::clone(&cx.shared);
+ let cache = &shared.cache;
+ let Some(v) = cache.impls.get(&it) else { return };
+ let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none());
+ if !non_trait.is_empty() {
+ let mut tmp_buf = Buffer::empty_from(w);
+ let (render_mode, id) = match what {
+ AssocItemRender::All => {
+ tmp_buf.write_str(
+ "<h2 id=\"implementations\" class=\"small-section-header\">\
+ Implementations\
+ <a href=\"#implementations\" class=\"anchor\"></a>\
+ </h2>",
+ );
+ (RenderMode::Normal, "implementations-list".to_owned())
+ }
+ AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
+ let id =
+ cx.derive_id(small_url_encode(format!("deref-methods-{:#}", type_.print(cx))));
+ if let Some(def_id) = type_.def_id(cx.cache()) {
+ cx.deref_id_map.insert(def_id, id.clone());
+ }
+ write!(
+ tmp_buf,
+ "<h2 id=\"{id}\" class=\"small-section-header\">\
+ <span>Methods from {trait_}&lt;Target = {type_}&gt;</span>\
+ <a href=\"#{id}\" class=\"anchor\"></a>\
+ </h2>",
+ id = id,
+ trait_ = trait_.print(cx),
+ type_ = type_.print(cx),
+ );
+ (RenderMode::ForDeref { mut_: deref_mut_ }, cx.derive_id(id))
+ }
+ };
+ let mut impls_buf = Buffer::empty_from(w);
+ for i in &non_trait {
+ render_impl(
+ &mut impls_buf,
+ cx,
+ i,
+ containing_item,
+ AssocItemLink::Anchor(None),
+ render_mode,
+ None,
+ &[],
+ ImplRenderingParameters {
+ show_def_docs: true,
+ show_default_items: true,
+ show_non_assoc_items: true,
+ toggle_open_by_default: true,
+ },
+ );
+ }
+ if !impls_buf.is_empty() {
+ w.push_buffer(tmp_buf);
+ write!(w, "<div id=\"{}\">", id);
+ w.push_buffer(impls_buf);
+ w.write_str("</div>");
+ }
+ }
+
+ if !traits.is_empty() {
+ let deref_impl =
+ traits.iter().find(|t| t.trait_did() == cx.tcx().lang_items().deref_trait());
+ if let Some(impl_) = deref_impl {
+ let has_deref_mut =
+ traits.iter().any(|t| t.trait_did() == cx.tcx().lang_items().deref_mut_trait());
+ render_deref_methods(w, cx, impl_, containing_item, has_deref_mut, derefs);
+ }
+
+ // If we were already one level into rendering deref methods, we don't want to render
+ // anything after recursing into any further deref methods above.
+ if let AssocItemRender::DerefFor { .. } = what {
+ return;
+ }
+
+ let (synthetic, concrete): (Vec<&&Impl>, Vec<&&Impl>) =
+ traits.iter().partition(|t| t.inner_impl().kind.is_auto());
+ let (blanket_impl, concrete): (Vec<&&Impl>, _) =
+ concrete.into_iter().partition(|t| t.inner_impl().kind.is_blanket());
+
+ let mut impls = Buffer::empty_from(w);
+ render_impls(cx, &mut impls, &concrete, containing_item, true);
+ let impls = impls.into_inner();
+ if !impls.is_empty() {
+ write!(
+ w,
+ "<h2 id=\"trait-implementations\" class=\"small-section-header\">\
+ Trait Implementations\
+ <a href=\"#trait-implementations\" class=\"anchor\"></a>\
+ </h2>\
+ <div id=\"trait-implementations-list\">{}</div>",
+ impls
+ );
+ }
+
+ if !synthetic.is_empty() {
+ w.write_str(
+ "<h2 id=\"synthetic-implementations\" class=\"small-section-header\">\
+ Auto Trait Implementations\
+ <a href=\"#synthetic-implementations\" class=\"anchor\"></a>\
+ </h2>\
+ <div id=\"synthetic-implementations-list\">",
+ );
+ render_impls(cx, w, &synthetic, containing_item, false);
+ w.write_str("</div>");
+ }
+
+ if !blanket_impl.is_empty() {
+ w.write_str(
+ "<h2 id=\"blanket-implementations\" class=\"small-section-header\">\
+ Blanket Implementations\
+ <a href=\"#blanket-implementations\" class=\"anchor\"></a>\
+ </h2>\
+ <div id=\"blanket-implementations-list\">",
+ );
+ render_impls(cx, w, &blanket_impl, containing_item, false);
+ w.write_str("</div>");
+ }
+ }
+}
+
+fn render_deref_methods(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ impl_: &Impl,
+ container_item: &clean::Item,
+ deref_mut: bool,
+ derefs: &mut FxHashSet<DefId>,
+) {
+ let cache = cx.cache();
+ let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
+ let (target, real_target) = impl_
+ .inner_impl()
+ .items
+ .iter()
+ .find_map(|item| match *item.kind {
+ clean::AssocTypeItem(box ref t, _) => Some(match *t {
+ clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
+ _ => (&t.type_, &t.type_),
+ }),
+ _ => None,
+ })
+ .expect("Expected associated type binding");
+ debug!("Render deref methods for {:#?}, target {:#?}", impl_.inner_impl().for_, target);
+ let what =
+ AssocItemRender::DerefFor { trait_: deref_type, type_: real_target, deref_mut_: deref_mut };
+ if let Some(did) = target.def_id(cache) {
+ if let Some(type_did) = impl_.inner_impl().for_.def_id(cache) {
+ // `impl Deref<Target = S> for S`
+ if did == type_did || !derefs.insert(did) {
+ // Avoid infinite cycles
+ return;
+ }
+ }
+ render_assoc_items_inner(w, cx, container_item, did, what, derefs);
+ } else if let Some(prim) = target.primitive_type() {
+ if let Some(&did) = cache.primitive_locations.get(&prim) {
+ render_assoc_items_inner(w, cx, container_item, did, what, derefs);
+ }
+ }
+}
+
+fn should_render_item(item: &clean::Item, deref_mut_: bool, tcx: TyCtxt<'_>) -> bool {
+ let self_type_opt = match *item.kind {
+ clean::MethodItem(ref method, _) => method.decl.self_type(),
+ clean::TyMethodItem(ref method) => method.decl.self_type(),
+ _ => None,
+ };
+
+ if let Some(self_ty) = self_type_opt {
+ let (by_mut_ref, by_box, by_value) = match self_ty {
+ SelfTy::SelfBorrowed(_, mutability)
+ | SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => {
+ (mutability == Mutability::Mut, false, false)
+ }
+ SelfTy::SelfExplicit(clean::Type::Path { path }) => {
+ (false, Some(path.def_id()) == tcx.lang_items().owned_box(), false)
+ }
+ SelfTy::SelfValue => (false, false, true),
+ _ => (false, false, false),
+ };
+
+ (deref_mut_ || !by_mut_ref) && !by_box && !by_value
+ } else {
+ false
+ }
+}
+
+fn notable_traits_decl(decl: &clean::FnDecl, cx: &Context<'_>) -> String {
+ let mut out = Buffer::html();
+
+ if let Some((did, ty)) = decl.output.as_return().and_then(|t| Some((t.def_id(cx.cache())?, t)))
+ {
+ if let Some(impls) = cx.cache().impls.get(&did) {
+ for i in impls {
+ let impl_ = i.inner_impl();
+ if !impl_.for_.without_borrowed_ref().is_same(ty.without_borrowed_ref(), cx.cache())
+ {
+ // Two different types might have the same did,
+ // without actually being the same.
+ continue;
+ }
+ if let Some(trait_) = &impl_.trait_ {
+ let trait_did = trait_.def_id();
+
+ if cx.cache().traits.get(&trait_did).map_or(false, |t| t.is_notable) {
+ if out.is_empty() {
+ write!(
+ &mut out,
+ "<span class=\"notable\">Notable traits for {}</span>\
+ <code class=\"content\">",
+ impl_.for_.print(cx)
+ );
+ }
+
+ //use the "where" class here to make it small
+ write!(
+ &mut out,
+ "<span class=\"where fmt-newline\">{}</span>",
+ impl_.print(false, cx)
+ );
+ for it in &impl_.items {
+ if let clean::AssocTypeItem(ref tydef, ref _bounds) = *it.kind {
+ out.push_str("<span class=\"where fmt-newline\"> ");
+ let empty_set = FxHashSet::default();
+ let src_link =
+ AssocItemLink::GotoSource(trait_did.into(), &empty_set);
+ assoc_type(
+ &mut out,
+ it,
+ &tydef.generics,
+ &[], // intentionally leaving out bounds
+ Some(&tydef.type_),
+ src_link,
+ 0,
+ cx,
+ );
+ out.push_str(";</span>");
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if !out.is_empty() {
+ out.insert_str(
+ 0,
+ "<span class=\"notable-traits\"><span class=\"notable-traits-tooltip\">ⓘ\
+ <span class=\"notable-traits-tooltiptext\"><span class=\"docblock\">",
+ );
+ out.push_str("</code></span></span></span></span>");
+ }
+
+ out.into_inner()
+}
+
+#[derive(Clone, Copy, Debug)]
+struct ImplRenderingParameters {
+ show_def_docs: bool,
+ show_default_items: bool,
+ /// Whether or not to show methods.
+ show_non_assoc_items: bool,
+ toggle_open_by_default: bool,
+}
+
+fn render_impl(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ i: &Impl,
+ parent: &clean::Item,
+ link: AssocItemLink<'_>,
+ render_mode: RenderMode,
+ use_absolute: Option<bool>,
+ aliases: &[String],
+ rendering_params: ImplRenderingParameters,
+) {
+ let shared = Rc::clone(&cx.shared);
+ let cache = &shared.cache;
+ let traits = &cache.traits;
+ let trait_ = i.trait_did().map(|did| &traits[&did]);
+ let mut close_tags = String::new();
+
+ // For trait implementations, the `interesting` output contains all methods that have doc
+ // comments, and the `boring` output contains all methods that do not. The distinction is
+ // used to allow hiding the boring methods.
+ // `containing_item` is used for rendering stability info. If the parent is a trait impl,
+ // `containing_item` will the grandparent, since trait impls can't have stability attached.
+ fn doc_impl_item(
+ boring: &mut Buffer,
+ interesting: &mut Buffer,
+ cx: &mut Context<'_>,
+ item: &clean::Item,
+ parent: &clean::Item,
+ containing_item: &clean::Item,
+ link: AssocItemLink<'_>,
+ render_mode: RenderMode,
+ is_default_item: bool,
+ trait_: Option<&clean::Trait>,
+ rendering_params: ImplRenderingParameters,
+ ) {
+ let item_type = item.type_();
+ let name = item.name.as_ref().unwrap();
+
+ let render_method_item = rendering_params.show_non_assoc_items
+ && match render_mode {
+ RenderMode::Normal => true,
+ RenderMode::ForDeref { mut_: deref_mut_ } => {
+ should_render_item(item, deref_mut_, cx.tcx())
+ }
+ };
+
+ let in_trait_class = if trait_.is_some() { " trait-impl" } else { "" };
+
+ let mut doc_buffer = Buffer::empty_from(boring);
+ let mut info_buffer = Buffer::empty_from(boring);
+ let mut short_documented = true;
+
+ if render_method_item {
+ if !is_default_item {
+ if let Some(t) = trait_ {
+ // The trait item may have been stripped so we might not
+ // find any documentation or stability for it.
+ if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
+ // We need the stability of the item from the trait
+ // because impls can't have a stability.
+ if item.doc_value().is_some() {
+ document_item_info(&mut info_buffer, cx, it, Some(parent));
+ document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
+ short_documented = false;
+ } else {
+ // In case the item isn't documented,
+ // provide short documentation from the trait.
+ document_short(
+ &mut doc_buffer,
+ it,
+ cx,
+ link,
+ parent,
+ rendering_params.show_def_docs,
+ );
+ }
+ }
+ } else {
+ document_item_info(&mut info_buffer, cx, item, Some(parent));
+ if rendering_params.show_def_docs {
+ document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
+ short_documented = false;
+ }
+ }
+ } else {
+ document_short(
+ &mut doc_buffer,
+ item,
+ cx,
+ link,
+ parent,
+ rendering_params.show_def_docs,
+ );
+ }
+ }
+ let w = if short_documented && trait_.is_some() { interesting } else { boring };
+
+ let toggled = !doc_buffer.is_empty();
+ if toggled {
+ let method_toggle_class =
+ if item_type == ItemType::Method { " method-toggle" } else { "" };
+ write!(w, "<details class=\"rustdoc-toggle{}\" open><summary>", method_toggle_class);
+ }
+ match &*item.kind {
+ clean::MethodItem(..) | clean::TyMethodItem(_) => {
+ // Only render when the method is not static or we allow static methods
+ if render_method_item {
+ let id = cx.derive_id(format!("{}.{}", item_type, name));
+ let source_id = trait_
+ .and_then(|trait_| {
+ trait_.items.iter().find(|item| {
+ item.name.map(|n| n.as_str().eq(name.as_str())).unwrap_or(false)
+ })
+ })
+ .map(|item| format!("{}.{}", item.type_(), name));
+ write!(
+ w,
+ "<section id=\"{}\" class=\"{}{} has-srclink\">",
+ id, item_type, in_trait_class,
+ );
+ render_rightside(w, cx, item, containing_item, render_mode);
+ if trait_.is_some() {
+ // Anchors are only used on trait impls.
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ }
+ w.write_str("<h4 class=\"code-header\">");
+ render_assoc_item(
+ w,
+ item,
+ link.anchor(source_id.as_ref().unwrap_or(&id)),
+ ItemType::Impl,
+ cx,
+ render_mode,
+ );
+ w.write_str("</h4>");
+ w.write_str("</section>");
+ }
+ }
+ kind @ (clean::TyAssocConstItem(ty) | clean::AssocConstItem(ty, _)) => {
+ let source_id = format!("{}.{}", item_type, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(
+ w,
+ "<section id=\"{}\" class=\"{}{} has-srclink\">",
+ id, item_type, in_trait_class
+ );
+ render_rightside(w, cx, item, containing_item, render_mode);
+ if trait_.is_some() {
+ // Anchors are only used on trait impls.
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ }
+ w.write_str("<h4 class=\"code-header\">");
+ assoc_const(
+ w,
+ item,
+ ty,
+ match kind {
+ clean::TyAssocConstItem(_) => None,
+ clean::AssocConstItem(_, default) => Some(default),
+ _ => unreachable!(),
+ },
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
+ "",
+ cx,
+ );
+ w.write_str("</h4>");
+ w.write_str("</section>");
+ }
+ clean::TyAssocTypeItem(generics, bounds) => {
+ let source_id = format!("{}.{}", item_type, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(w, "<section id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class);
+ if trait_.is_some() {
+ // Anchors are only used on trait impls.
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ }
+ w.write_str("<h4 class=\"code-header\">");
+ assoc_type(
+ w,
+ item,
+ generics,
+ bounds,
+ None,
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
+ 0,
+ cx,
+ );
+ w.write_str("</h4>");
+ w.write_str("</section>");
+ }
+ clean::AssocTypeItem(tydef, _bounds) => {
+ let source_id = format!("{}.{}", item_type, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(
+ w,
+ "<section id=\"{}\" class=\"{}{} has-srclink\">",
+ id, item_type, in_trait_class
+ );
+ if trait_.is_some() {
+ // Anchors are only used on trait impls.
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ }
+ w.write_str("<h4 class=\"code-header\">");
+ assoc_type(
+ w,
+ item,
+ &tydef.generics,
+ &[], // intentionally leaving out bounds
+ Some(tydef.item_type.as_ref().unwrap_or(&tydef.type_)),
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
+ 0,
+ cx,
+ );
+ w.write_str("</h4>");
+ w.write_str("</section>");
+ }
+ clean::StrippedItem(..) => return,
+ _ => panic!("can't make docs for trait item with name {:?}", item.name),
+ }
+
+ w.push_buffer(info_buffer);
+ if toggled {
+ w.write_str("</summary>");
+ w.push_buffer(doc_buffer);
+ w.push_str("</details>");
+ }
+ }
+
+ let mut impl_items = Buffer::empty_from(w);
+ let mut default_impl_items = Buffer::empty_from(w);
+
+ for trait_item in &i.inner_impl().items {
+ doc_impl_item(
+ &mut default_impl_items,
+ &mut impl_items,
+ cx,
+ trait_item,
+ if trait_.is_some() { &i.impl_item } else { parent },
+ parent,
+ link,
+ render_mode,
+ false,
+ trait_.map(|t| &t.trait_),
+ rendering_params,
+ );
+ }
+
+ fn render_default_items(
+ boring: &mut Buffer,
+ interesting: &mut Buffer,
+ cx: &mut Context<'_>,
+ t: &clean::Trait,
+ i: &clean::Impl,
+ parent: &clean::Item,
+ containing_item: &clean::Item,
+ render_mode: RenderMode,
+ rendering_params: ImplRenderingParameters,
+ ) {
+ for trait_item in &t.items {
+ let n = trait_item.name;
+ if i.items.iter().any(|m| m.name == n) {
+ continue;
+ }
+ let did = i.trait_.as_ref().unwrap().def_id();
+ let provided_methods = i.provided_trait_methods(cx.tcx());
+ let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_methods);
+
+ doc_impl_item(
+ boring,
+ interesting,
+ cx,
+ trait_item,
+ parent,
+ containing_item,
+ assoc_link,
+ render_mode,
+ true,
+ Some(t),
+ rendering_params,
+ );
+ }
+ }
+
+ // If we've implemented a trait, then also emit documentation for all
+ // default items which weren't overridden in the implementation block.
+ // We don't emit documentation for default items if they appear in the
+ // Implementations on Foreign Types or Implementors sections.
+ if rendering_params.show_default_items {
+ if let Some(t) = trait_ {
+ render_default_items(
+ &mut default_impl_items,
+ &mut impl_items,
+ cx,
+ &t.trait_,
+ i.inner_impl(),
+ &i.impl_item,
+ parent,
+ render_mode,
+ rendering_params,
+ );
+ }
+ }
+ if render_mode == RenderMode::Normal {
+ let toggled = !(impl_items.is_empty() && default_impl_items.is_empty());
+ if toggled {
+ close_tags.insert_str(0, "</details>");
+ write!(
+ w,
+ "<details class=\"rustdoc-toggle implementors-toggle\"{}>",
+ if rendering_params.toggle_open_by_default { " open" } else { "" }
+ );
+ write!(w, "<summary>")
+ }
+ render_impl_summary(
+ w,
+ cx,
+ i,
+ parent,
+ parent,
+ rendering_params.show_def_docs,
+ use_absolute,
+ aliases,
+ );
+ if toggled {
+ write!(w, "</summary>")
+ }
+
+ if let Some(ref dox) = i.impl_item.collapsed_doc_value() {
+ if trait_.is_none() && i.inner_impl().items.is_empty() {
+ w.write_str(
+ "<div class=\"item-info\">\
+ <div class=\"stab empty-impl\">This impl block contains no items.</div>
+ </div>",
+ );
+ }
+ write!(
+ w,
+ "<div class=\"docblock\">{}</div>",
+ Markdown {
+ content: &*dox,
+ links: &i.impl_item.links(cx),
+ ids: &mut cx.id_map,
+ error_codes: cx.shared.codes,
+ edition: cx.shared.edition(),
+ playground: &cx.shared.playground,
+ heading_offset: HeadingOffset::H4
+ }
+ .into_string()
+ );
+ }
+ }
+ if !default_impl_items.is_empty() || !impl_items.is_empty() {
+ w.write_str("<div class=\"impl-items\">");
+ w.push_buffer(default_impl_items);
+ w.push_buffer(impl_items);
+ close_tags.insert_str(0, "</div>");
+ }
+ w.write_str(&close_tags);
+}
+
+// Render the items that appear on the right side of methods, impls, and
+// associated types. For example "1.0.0 (const: 1.39.0) · source".
+fn render_rightside(
+ w: &mut Buffer,
+ cx: &Context<'_>,
+ item: &clean::Item,
+ containing_item: &clean::Item,
+ render_mode: RenderMode,
+) {
+ let tcx = cx.tcx();
+
+ // FIXME: Once https://github.com/rust-lang/rust/issues/67792 is implemented, we can remove
+ // this condition.
+ let (const_stability, const_stable_since) = match render_mode {
+ RenderMode::Normal => (item.const_stability(tcx), containing_item.const_stable_since(tcx)),
+ RenderMode::ForDeref { .. } => (None, None),
+ };
+
+ let mut rightside = Buffer::new();
+ let has_stability = render_stability_since_raw(
+ &mut rightside,
+ item.stable_since(tcx),
+ const_stability,
+ containing_item.stable_since(tcx),
+ const_stable_since,
+ );
+ let mut srclink = Buffer::empty_from(w);
+ write_srclink(cx, item, &mut srclink);
+ if has_stability && !srclink.is_empty() {
+ rightside.write_str(" · ");
+ }
+ rightside.push_buffer(srclink);
+ if !rightside.is_empty() {
+ write!(w, "<span class=\"rightside\">{}</span>", rightside.into_inner());
+ }
+}
+
+pub(crate) fn render_impl_summary(
+ w: &mut Buffer,
+ cx: &mut Context<'_>,
+ i: &Impl,
+ parent: &clean::Item,
+ containing_item: &clean::Item,
+ show_def_docs: bool,
+ use_absolute: Option<bool>,
+ // This argument is used to reference same type with different paths to avoid duplication
+ // in documentation pages for trait with automatic implementations like "Send" and "Sync".
+ aliases: &[String],
+) {
+ let id =
+ cx.derive_id(get_id_for_impl(&i.inner_impl().for_, i.inner_impl().trait_.as_ref(), cx));
+ let aliases = if aliases.is_empty() {
+ String::new()
+ } else {
+ format!(" data-aliases=\"{}\"", aliases.join(","))
+ };
+ write!(w, "<section id=\"{}\" class=\"impl has-srclink\"{}>", id, aliases);
+ render_rightside(w, cx, &i.impl_item, containing_item, RenderMode::Normal);
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ write!(w, "<h3 class=\"code-header in-band\">");
+
+ if let Some(use_absolute) = use_absolute {
+ write!(w, "{}", i.inner_impl().print(use_absolute, cx));
+ if show_def_docs {
+ for it in &i.inner_impl().items {
+ if let clean::AssocTypeItem(ref tydef, ref _bounds) = *it.kind {
+ w.write_str("<span class=\"where fmt-newline\"> ");
+ assoc_type(
+ w,
+ it,
+ &tydef.generics,
+ &[], // intentionally leaving out bounds
+ Some(&tydef.type_),
+ AssocItemLink::Anchor(None),
+ 0,
+ cx,
+ );
+ w.write_str(";</span>");
+ }
+ }
+ }
+ } else {
+ write!(w, "{}", i.inner_impl().print(false, cx));
+ }
+ write!(w, "</h3>");
+
+ let is_trait = i.inner_impl().trait_.is_some();
+ if is_trait {
+ if let Some(portability) = portability(&i.impl_item, Some(parent)) {
+ write!(w, "<span class=\"item-info\">{}</span>", portability);
+ }
+ }
+
+ w.write_str("</section>");
+}
+
+fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) {
+ if it.is_struct()
+ || it.is_trait()
+ || it.is_primitive()
+ || it.is_union()
+ || it.is_enum()
+ || it.is_mod()
+ || it.is_typedef()
+ {
+ write!(
+ buffer,
+ "<h2 class=\"location\"><a href=\"#\">{}{}</a></h2>",
+ match *it.kind {
+ clean::ModuleItem(..) =>
+ if it.is_crate() {
+ "Crate "
+ } else {
+ "Module "
+ },
+ _ => "",
+ },
+ it.name.as_ref().unwrap()
+ );
+ }
+
+ buffer.write_str("<div class=\"sidebar-elems\">");
+ if it.is_crate() {
+ write!(buffer, "<div class=\"block\"><ul>");
+ if let Some(ref version) = cx.cache().crate_version {
+ write!(buffer, "<li class=\"version\">Version {}</li>", Escape(version));
+ }
+ write!(buffer, "<li><a id=\"all-types\" href=\"all.html\">All Items</a></li>");
+ buffer.write_str("</ul></div>");
+ }
+
+ match *it.kind {
+ clean::StructItem(ref s) => sidebar_struct(cx, buffer, it, s),
+ clean::TraitItem(ref t) => sidebar_trait(cx, buffer, it, t),
+ clean::PrimitiveItem(_) => sidebar_primitive(cx, buffer, it),
+ clean::UnionItem(ref u) => sidebar_union(cx, buffer, it, u),
+ clean::EnumItem(ref e) => sidebar_enum(cx, buffer, it, e),
+ clean::TypedefItem(_) => sidebar_typedef(cx, buffer, it),
+ clean::ModuleItem(ref m) => sidebar_module(buffer, &m.items),
+ clean::ForeignTypeItem => sidebar_foreign_type(cx, buffer, it),
+ _ => {}
+ }
+
+ // The sidebar is designed to display sibling functions, modules and
+ // other miscellaneous information. since there are lots of sibling
+ // items (and that causes quadratic growth in large modules),
+ // we refactor common parts into a shared JavaScript file per module.
+ // still, we don't move everything into JS because we want to preserve
+ // as much HTML as possible in order to allow non-JS-enabled browsers
+ // to navigate the documentation (though slightly inefficiently).
+
+ if !it.is_mod() {
+ let path: String = cx.current.iter().map(|s| s.as_str()).intersperse("::").collect();
+
+ write!(buffer, "<h2 class=\"location\"><a href=\"index.html\">In {}</a></h2>", path);
+ }
+
+ // Closes sidebar-elems div.
+ buffer.write_str("</div>");
+}
+
+fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
+ if used_links.insert(url.clone()) {
+ return url;
+ }
+ let mut add = 1;
+ while !used_links.insert(format!("{}-{}", url, add)) {
+ add += 1;
+ }
+ format!("{}-{}", url, add)
+}
+
+struct SidebarLink {
+ name: Symbol,
+ url: String,
+}
+
+impl fmt::Display for SidebarLink {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "<a href=\"#{}\">{}</a>", self.url, self.name)
+ }
+}
+
+impl PartialEq for SidebarLink {
+ fn eq(&self, other: &Self) -> bool {
+ self.url == other.url
+ }
+}
+
+impl Eq for SidebarLink {}
+
+impl PartialOrd for SidebarLink {
+ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for SidebarLink {
+ fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+ self.url.cmp(&other.url)
+ }
+}
+
+fn get_methods(
+ i: &clean::Impl,
+ for_deref: bool,
+ used_links: &mut FxHashSet<String>,
+ deref_mut: bool,
+ tcx: TyCtxt<'_>,
+) -> Vec<SidebarLink> {
+ i.items
+ .iter()
+ .filter_map(|item| match item.name {
+ Some(name) if !name.is_empty() && item.is_method() => {
+ if !for_deref || should_render_item(item, deref_mut, tcx) {
+ Some(SidebarLink {
+ name,
+ url: get_next_url(used_links, format!("{}.{}", ItemType::Method, name)),
+ })
+ } else {
+ None
+ }
+ }
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+}
+
+fn get_associated_constants(
+ i: &clean::Impl,
+ used_links: &mut FxHashSet<String>,
+) -> Vec<SidebarLink> {
+ i.items
+ .iter()
+ .filter_map(|item| match item.name {
+ Some(name) if !name.is_empty() && item.is_associated_const() => Some(SidebarLink {
+ name,
+ url: get_next_url(used_links, format!("{}.{}", ItemType::AssocConst, name)),
+ }),
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+}
+
+// The point is to url encode any potential character from a type with genericity.
+fn small_url_encode(s: String) -> String {
+ let mut st = String::new();
+ let mut last_match = 0;
+ for (idx, c) in s.char_indices() {
+ let escaped = match c {
+ '<' => "%3C",
+ '>' => "%3E",
+ ' ' => "%20",
+ '?' => "%3F",
+ '\'' => "%27",
+ '&' => "%26",
+ ',' => "%2C",
+ ':' => "%3A",
+ ';' => "%3B",
+ '[' => "%5B",
+ ']' => "%5D",
+ '"' => "%22",
+ _ => continue,
+ };
+
+ st += &s[last_match..idx];
+ st += escaped;
+ // NOTE: we only expect single byte characters here - which is fine as long as we
+ // only match single byte characters
+ last_match = idx + 1;
+ }
+
+ if last_match != 0 {
+ st += &s[last_match..];
+ st
+ } else {
+ s
+ }
+}
+
+fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
+ let did = it.item_id.expect_def_id();
+ let cache = cx.cache();
+
+ if let Some(v) = cache.impls.get(&did) {
+ let mut used_links = FxHashSet::default();
+ let mut id_map = IdMap::new();
+
+ {
+ let used_links_bor = &mut used_links;
+ let mut assoc_consts = v
+ .iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| get_associated_constants(i.inner_impl(), used_links_bor))
+ .collect::<Vec<_>>();
+ if !assoc_consts.is_empty() {
+ // We want links' order to be reproducible so we don't use unstable sort.
+ assoc_consts.sort();
+
+ print_sidebar_block(
+ out,
+ "implementations",
+ "Associated Constants",
+ assoc_consts.iter(),
+ );
+ }
+ let mut methods = v
+ .iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| get_methods(i.inner_impl(), false, used_links_bor, false, cx.tcx()))
+ .collect::<Vec<_>>();
+ if !methods.is_empty() {
+ // We want links' order to be reproducible so we don't use unstable sort.
+ methods.sort();
+
+ print_sidebar_block(out, "implementations", "Methods", methods.iter());
+ }
+ }
+
+ if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
+ if let Some(impl_) =
+ v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
+ {
+ let mut derefs = FxHashSet::default();
+ derefs.insert(did);
+ sidebar_deref_methods(cx, out, impl_, v, &mut derefs);
+ }
+
+ let format_impls = |impls: Vec<&Impl>, id_map: &mut IdMap| {
+ let mut links = FxHashSet::default();
+
+ let mut ret = impls
+ .iter()
+ .filter_map(|it| {
+ let trait_ = it.inner_impl().trait_.as_ref()?;
+ let encoded =
+ id_map.derive(get_id_for_impl(&it.inner_impl().for_, Some(trait_), cx));
+
+ let i_display = format!("{:#}", trait_.print(cx));
+ let out = Escape(&i_display);
+ let prefix = match it.inner_impl().polarity {
+ ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
+ ty::ImplPolarity::Negative => "!",
+ };
+ let generated = format!("<a href=\"#{}\">{}{}</a>", encoded, prefix, out);
+ if links.insert(generated.clone()) { Some(generated) } else { None }
+ })
+ .collect::<Vec<String>>();
+ ret.sort();
+ ret
+ };
+
+ let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) =
+ v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_auto());
+ let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) =
+ concrete.into_iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_blanket());
+
+ let concrete_format = format_impls(concrete, &mut id_map);
+ let synthetic_format = format_impls(synthetic, &mut id_map);
+ let blanket_format = format_impls(blanket_impl, &mut id_map);
+
+ if !concrete_format.is_empty() {
+ print_sidebar_block(
+ out,
+ "trait-implementations",
+ "Trait Implementations",
+ concrete_format.iter(),
+ );
+ }
+
+ if !synthetic_format.is_empty() {
+ print_sidebar_block(
+ out,
+ "synthetic-implementations",
+ "Auto Trait Implementations",
+ synthetic_format.iter(),
+ );
+ }
+
+ if !blanket_format.is_empty() {
+ print_sidebar_block(
+ out,
+ "blanket-implementations",
+ "Blanket Implementations",
+ blanket_format.iter(),
+ );
+ }
+ }
+ }
+}
+
+fn sidebar_deref_methods(
+ cx: &Context<'_>,
+ out: &mut Buffer,
+ impl_: &Impl,
+ v: &[Impl],
+ derefs: &mut FxHashSet<DefId>,
+) {
+ let c = cx.cache();
+
+ debug!("found Deref: {:?}", impl_);
+ if let Some((target, real_target)) =
+ impl_.inner_impl().items.iter().find_map(|item| match *item.kind {
+ clean::AssocTypeItem(box ref t, _) => Some(match *t {
+ clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
+ _ => (&t.type_, &t.type_),
+ }),
+ _ => None,
+ })
+ {
+ debug!("found target, real_target: {:?} {:?}", target, real_target);
+ if let Some(did) = target.def_id(c) {
+ if let Some(type_did) = impl_.inner_impl().for_.def_id(c) {
+ // `impl Deref<Target = S> for S`
+ if did == type_did || !derefs.insert(did) {
+ // Avoid infinite cycles
+ return;
+ }
+ }
+ }
+ let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
+ let inner_impl = target
+ .def_id(c)
+ .or_else(|| {
+ target.primitive_type().and_then(|prim| c.primitive_locations.get(&prim).cloned())
+ })
+ .and_then(|did| c.impls.get(&did));
+ if let Some(impls) = inner_impl {
+ debug!("found inner_impl: {:?}", impls);
+ let mut used_links = FxHashSet::default();
+ let mut ret = impls
+ .iter()
+ .filter(|i| i.inner_impl().trait_.is_none())
+ .flat_map(|i| {
+ get_methods(i.inner_impl(), true, &mut used_links, deref_mut, cx.tcx())
+ })
+ .collect::<Vec<_>>();
+ if !ret.is_empty() {
+ let id = if let Some(target_def_id) = real_target.def_id(c) {
+ cx.deref_id_map.get(&target_def_id).expect("Deref section without derived id")
+ } else {
+ "deref-methods"
+ };
+ let title = format!(
+ "Methods from {}&lt;Target={}&gt;",
+ Escape(&format!("{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print(cx))),
+ Escape(&format!("{:#}", real_target.print(cx))),
+ );
+ // We want links' order to be reproducible so we don't use unstable sort.
+ ret.sort();
+ print_sidebar_block(out, id, &title, ret.iter());
+ }
+ }
+
+ // Recurse into any further impls that might exist for `target`
+ if let Some(target_did) = target.def_id(c) {
+ if let Some(target_impls) = c.impls.get(&target_did) {
+ if let Some(target_deref_impl) = target_impls.iter().find(|i| {
+ i.inner_impl()
+ .trait_
+ .as_ref()
+ .map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
+ .unwrap_or(false)
+ }) {
+ sidebar_deref_methods(cx, out, target_deref_impl, target_impls, derefs);
+ }
+ }
+ }
+ }
+}
+
+fn sidebar_struct(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) {
+ let mut sidebar = Buffer::new();
+ let fields = get_struct_fields_name(&s.fields);
+
+ if !fields.is_empty() {
+ match s.struct_type {
+ CtorKind::Fictive => {
+ print_sidebar_block(&mut sidebar, "fields", "Fields", fields.iter());
+ }
+ CtorKind::Fn => print_sidebar_title(&mut sidebar, "fields", "Tuple Fields"),
+ CtorKind::Const => {}
+ }
+ }
+
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+fn get_id_for_impl(for_: &clean::Type, trait_: Option<&clean::Path>, cx: &Context<'_>) -> String {
+ match trait_ {
+ Some(t) => small_url_encode(format!("impl-{:#}-for-{:#}", t.print(cx), for_.print(cx))),
+ None => small_url_encode(format!("impl-{:#}", for_.print(cx))),
+ }
+}
+
+fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> {
+ match *item.kind {
+ clean::ItemKind::ImplItem(ref i) => {
+ i.trait_.as_ref().map(|trait_| {
+ // Alternative format produces no URLs,
+ // so this parameter does nothing.
+ (format!("{:#}", i.for_.print(cx)), get_id_for_impl(&i.for_, Some(trait_), cx))
+ })
+ }
+ _ => None,
+ }
+}
+
+/// Don't call this function directly!!! Use `print_sidebar_title` or `print_sidebar_block` instead!
+fn print_sidebar_title_inner(buf: &mut Buffer, id: &str, title: &str) {
+ write!(
+ buf,
+ "<h3 class=\"sidebar-title\">\
+ <a href=\"#{}\">{}</a>\
+ </h3>",
+ id, title
+ );
+}
+
+fn print_sidebar_title(buf: &mut Buffer, id: &str, title: &str) {
+ buf.push_str("<div class=\"block\">");
+ print_sidebar_title_inner(buf, id, title);
+ buf.push_str("</div>");
+}
+
+fn print_sidebar_block(
+ buf: &mut Buffer,
+ id: &str,
+ title: &str,
+ items: impl Iterator<Item = impl fmt::Display>,
+) {
+ buf.push_str("<div class=\"block\">");
+ print_sidebar_title_inner(buf, id, title);
+ buf.push_str("<ul>");
+ for item in items {
+ write!(buf, "<li>{}</li>", item);
+ }
+ buf.push_str("</ul></div>");
+}
+
+fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) {
+ buf.write_str("<section>");
+
+ fn print_sidebar_section(
+ out: &mut Buffer,
+ items: &[clean::Item],
+ id: &str,
+ title: &str,
+ filter: impl Fn(&clean::Item) -> bool,
+ mapper: impl Fn(&str) -> String,
+ ) {
+ let mut items: Vec<&str> = items
+ .iter()
+ .filter_map(|m| match m.name {
+ Some(ref name) if filter(m) => Some(name.as_str()),
+ _ => None,
+ })
+ .collect::<Vec<_>>();
+
+ if !items.is_empty() {
+ items.sort_unstable();
+ print_sidebar_block(out, id, title, items.into_iter().map(mapper));
+ }
+ }
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "required-associated-types",
+ "Required Associated Types",
+ |m| m.is_ty_associated_type(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocType),
+ );
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "provided-associated-types",
+ "Provided Associated Types",
+ |m| m.is_associated_type(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocType),
+ );
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "required-associated-consts",
+ "Required Associated Constants",
+ |m| m.is_ty_associated_const(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocConst),
+ );
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "provided-associated-consts",
+ "Provided Associated Constants",
+ |m| m.is_associated_const(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::AssocConst),
+ );
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "required-methods",
+ "Required Methods",
+ |m| m.is_ty_method(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::TyMethod),
+ );
+
+ print_sidebar_section(
+ buf,
+ &t.items,
+ "provided-methods",
+ "Provided Methods",
+ |m| m.is_method(),
+ |sym| format!("<a href=\"#{1}.{0}\">{0}</a>", sym, ItemType::Method),
+ );
+
+ if let Some(implementors) = cx.cache().implementors.get(&it.item_id.expect_def_id()) {
+ let mut res = implementors
+ .iter()
+ .filter(|i| !i.is_on_local_type(cx))
+ .filter_map(|i| extract_for_impl_name(&i.impl_item, cx))
+ .collect::<Vec<_>>();
+
+ if !res.is_empty() {
+ res.sort();
+ print_sidebar_block(
+ buf,
+ "foreign-impls",
+ "Implementations on Foreign Types",
+ res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(name))),
+ );
+ }
+ }
+
+ sidebar_assoc_items(cx, buf, it);
+
+ print_sidebar_title(buf, "implementors", "Implementors");
+ if t.is_auto(cx.tcx()) {
+ print_sidebar_title(buf, "synthetic-implementors", "Auto Implementors");
+ }
+
+ buf.push_str("</section>")
+}
+
+fn sidebar_primitive(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
+ let mut sidebar = Buffer::new();
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+fn sidebar_typedef(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
+ let mut sidebar = Buffer::new();
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+fn get_struct_fields_name(fields: &[clean::Item]) -> Vec<String> {
+ let mut fields = fields
+ .iter()
+ .filter(|f| matches!(*f.kind, clean::StructFieldItem(..)))
+ .filter_map(|f| {
+ f.name.map(|name| format!("<a href=\"#structfield.{name}\">{name}</a>", name = name))
+ })
+ .collect::<Vec<_>>();
+ fields.sort();
+ fields
+}
+
+fn sidebar_union(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, u: &clean::Union) {
+ let mut sidebar = Buffer::new();
+ let fields = get_struct_fields_name(&u.fields);
+
+ if !fields.is_empty() {
+ print_sidebar_block(&mut sidebar, "fields", "Fields", fields.iter());
+ }
+
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean::Enum) {
+ let mut sidebar = Buffer::new();
+
+ let mut variants = e
+ .variants()
+ .filter_map(|v| {
+ v.name
+ .as_ref()
+ .map(|name| format!("<a href=\"#variant.{name}\">{name}</a>", name = name))
+ })
+ .collect::<Vec<_>>();
+ if !variants.is_empty() {
+ variants.sort_unstable();
+ print_sidebar_block(&mut sidebar, "variants", "Variants", variants.iter());
+ }
+
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+enum ItemSection {
+ Reexports,
+ PrimitiveTypes,
+ Modules,
+ Macros,
+ Structs,
+ Enums,
+ Constants,
+ Statics,
+ Traits,
+ Functions,
+ TypeDefinitions,
+ Unions,
+ Implementations,
+ TypeMethods,
+ Methods,
+ StructFields,
+ Variants,
+ AssociatedTypes,
+ AssociatedConstants,
+ ForeignTypes,
+ Keywords,
+ OpaqueTypes,
+ AttributeMacros,
+ DeriveMacros,
+ TraitAliases,
+}
+
+impl ItemSection {
+ const ALL: &'static [Self] = {
+ use ItemSection::*;
+ // NOTE: The order here affects the order in the UI.
+ &[
+ Reexports,
+ PrimitiveTypes,
+ Modules,
+ Macros,
+ Structs,
+ Enums,
+ Constants,
+ Statics,
+ Traits,
+ Functions,
+ TypeDefinitions,
+ Unions,
+ Implementations,
+ TypeMethods,
+ Methods,
+ StructFields,
+ Variants,
+ AssociatedTypes,
+ AssociatedConstants,
+ ForeignTypes,
+ Keywords,
+ OpaqueTypes,
+ AttributeMacros,
+ DeriveMacros,
+ TraitAliases,
+ ]
+ };
+
+ fn id(self) -> &'static str {
+ match self {
+ Self::Reexports => "reexports",
+ Self::Modules => "modules",
+ Self::Structs => "structs",
+ Self::Unions => "unions",
+ Self::Enums => "enums",
+ Self::Functions => "functions",
+ Self::TypeDefinitions => "types",
+ Self::Statics => "statics",
+ Self::Constants => "constants",
+ Self::Traits => "traits",
+ Self::Implementations => "impls",
+ Self::TypeMethods => "tymethods",
+ Self::Methods => "methods",
+ Self::StructFields => "fields",
+ Self::Variants => "variants",
+ Self::Macros => "macros",
+ Self::PrimitiveTypes => "primitives",
+ Self::AssociatedTypes => "associated-types",
+ Self::AssociatedConstants => "associated-consts",
+ Self::ForeignTypes => "foreign-types",
+ Self::Keywords => "keywords",
+ Self::OpaqueTypes => "opaque-types",
+ Self::AttributeMacros => "attributes",
+ Self::DeriveMacros => "derives",
+ Self::TraitAliases => "trait-aliases",
+ }
+ }
+
+ fn name(self) -> &'static str {
+ match self {
+ Self::Reexports => "Re-exports",
+ Self::Modules => "Modules",
+ Self::Structs => "Structs",
+ Self::Unions => "Unions",
+ Self::Enums => "Enums",
+ Self::Functions => "Functions",
+ Self::TypeDefinitions => "Type Definitions",
+ Self::Statics => "Statics",
+ Self::Constants => "Constants",
+ Self::Traits => "Traits",
+ Self::Implementations => "Implementations",
+ Self::TypeMethods => "Type Methods",
+ Self::Methods => "Methods",
+ Self::StructFields => "Struct Fields",
+ Self::Variants => "Variants",
+ Self::Macros => "Macros",
+ Self::PrimitiveTypes => "Primitive Types",
+ Self::AssociatedTypes => "Associated Types",
+ Self::AssociatedConstants => "Associated Constants",
+ Self::ForeignTypes => "Foreign Types",
+ Self::Keywords => "Keywords",
+ Self::OpaqueTypes => "Opaque Types",
+ Self::AttributeMacros => "Attribute Macros",
+ Self::DeriveMacros => "Derive Macros",
+ Self::TraitAliases => "Trait Aliases",
+ }
+ }
+}
+
+fn item_ty_to_section(ty: ItemType) -> ItemSection {
+ match ty {
+ ItemType::ExternCrate | ItemType::Import => ItemSection::Reexports,
+ ItemType::Module => ItemSection::Modules,
+ ItemType::Struct => ItemSection::Structs,
+ ItemType::Union => ItemSection::Unions,
+ ItemType::Enum => ItemSection::Enums,
+ ItemType::Function => ItemSection::Functions,
+ ItemType::Typedef => ItemSection::TypeDefinitions,
+ ItemType::Static => ItemSection::Statics,
+ ItemType::Constant => ItemSection::Constants,
+ ItemType::Trait => ItemSection::Traits,
+ ItemType::Impl => ItemSection::Implementations,
+ ItemType::TyMethod => ItemSection::TypeMethods,
+ ItemType::Method => ItemSection::Methods,
+ ItemType::StructField => ItemSection::StructFields,
+ ItemType::Variant => ItemSection::Variants,
+ ItemType::Macro => ItemSection::Macros,
+ ItemType::Primitive => ItemSection::PrimitiveTypes,
+ ItemType::AssocType => ItemSection::AssociatedTypes,
+ ItemType::AssocConst => ItemSection::AssociatedConstants,
+ ItemType::ForeignType => ItemSection::ForeignTypes,
+ ItemType::Keyword => ItemSection::Keywords,
+ ItemType::OpaqueTy => ItemSection::OpaqueTypes,
+ ItemType::ProcAttribute => ItemSection::AttributeMacros,
+ ItemType::ProcDerive => ItemSection::DeriveMacros,
+ ItemType::TraitAlias => ItemSection::TraitAliases,
+ }
+}
+
+fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
+ use std::fmt::Write as _;
+
+ let mut sidebar = String::new();
+
+ let item_sections_in_use: FxHashSet<_> = items
+ .iter()
+ .filter(|it| {
+ !it.is_stripped()
+ && it
+ .name
+ .or_else(|| {
+ if let clean::ImportItem(ref i) = *it.kind &&
+ let clean::ImportKind::Simple(s) = i.kind { Some(s) } else { None }
+ })
+ .is_some()
+ })
+ .map(|it| item_ty_to_section(it.type_()))
+ .collect();
+ for &sec in ItemSection::ALL.iter().filter(|sec| item_sections_in_use.contains(sec)) {
+ let _ = write!(sidebar, "<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name());
+ }
+
+ if !sidebar.is_empty() {
+ write!(
+ buf,
+ "<section>\
+ <div class=\"block\">\
+ <ul>{}</ul>\
+ </div>\
+ </section>",
+ sidebar
+ );
+ }
+}
+
+fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
+ let mut sidebar = Buffer::new();
+ sidebar_assoc_items(cx, &mut sidebar, it);
+
+ if !sidebar.is_empty() {
+ write!(buf, "<section>{}</section>", sidebar.into_inner());
+ }
+}
+
+pub(crate) const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
+
+/// Returns a list of all paths used in the type.
+/// This is used to help deduplicate imported impls
+/// for reexported types. If any of the contained
+/// types are re-exported, we don't use the corresponding
+/// entry from the js file, as inlining will have already
+/// picked up the impl
+fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
+ let mut out = Vec::new();
+ let mut visited = FxHashSet::default();
+ let mut work = VecDeque::new();
+
+ let mut process_path = |did: DefId| {
+ let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone());
+ let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern);
+
+ if let Some(path) = fqp {
+ out.push(join_with_double_colon(&path));
+ }
+ };
+
+ work.push_back(first_ty);
+
+ while let Some(ty) = work.pop_front() {
+ if !visited.insert(ty.clone()) {
+ continue;
+ }
+
+ match ty {
+ clean::Type::Path { path } => process_path(path.def_id()),
+ clean::Type::Tuple(tys) => {
+ work.extend(tys.into_iter());
+ }
+ clean::Type::Slice(ty) => {
+ work.push_back(*ty);
+ }
+ clean::Type::Array(ty, _) => {
+ work.push_back(*ty);
+ }
+ clean::Type::RawPointer(_, ty) => {
+ work.push_back(*ty);
+ }
+ clean::Type::BorrowedRef { type_, .. } => {
+ work.push_back(*type_);
+ }
+ clean::Type::QPath { self_type, trait_, .. } => {
+ work.push_back(*self_type);
+ process_path(trait_.def_id());
+ }
+ _ => {}
+ }
+ }
+ out
+}
+
+const MAX_FULL_EXAMPLES: usize = 5;
+const NUM_VISIBLE_LINES: usize = 10;
+
+/// Generates the HTML for example call locations generated via the --scrape-examples flag.
+fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item) {
+ let tcx = cx.tcx();
+ let def_id = item.item_id.expect_def_id();
+ let key = tcx.def_path_hash(def_id);
+ let Some(call_locations) = cx.shared.call_locations.get(&key) else { return };
+
+ // Generate a unique ID so users can link to this section for a given method
+ let id = cx.id_map.derive("scraped-examples");
+ write!(
+ w,
+ "<div class=\"docblock scraped-example-list\">\
+ <span></span>\
+ <h5 id=\"{id}\">\
+ <a href=\"#{id}\">Examples found in repository</a>\
+ <a class=\"scrape-help\" href=\"{root_path}scrape-examples-help.html\">?</a>\
+ </h5>",
+ root_path = cx.root_path(),
+ id = id
+ );
+
+ // Create a URL to a particular location in a reverse-dependency's source file
+ let link_to_loc = |call_data: &CallData, loc: &CallLocation| -> (String, String) {
+ let (line_lo, line_hi) = loc.call_expr.line_span;
+ let (anchor, title) = if line_lo == line_hi {
+ ((line_lo + 1).to_string(), format!("line {}", line_lo + 1))
+ } else {
+ (
+ format!("{}-{}", line_lo + 1, line_hi + 1),
+ format!("lines {}-{}", line_lo + 1, line_hi + 1),
+ )
+ };
+ let url = format!("{}{}#{}", cx.root_path(), call_data.url, anchor);
+ (url, title)
+ };
+
+ // Generate the HTML for a single example, being the title and code block
+ let write_example = |w: &mut Buffer, (path, call_data): (&PathBuf, &CallData)| -> bool {
+ let contents = match fs::read_to_string(&path) {
+ Ok(contents) => contents,
+ Err(err) => {
+ let span = item.span(tcx).inner();
+ tcx.sess
+ .span_err(span, &format!("failed to read file {}: {}", path.display(), err));
+ return false;
+ }
+ };
+
+ // To reduce file sizes, we only want to embed the source code needed to understand the example, not
+ // the entire file. So we find the smallest byte range that covers all items enclosing examples.
+ assert!(!call_data.locations.is_empty());
+ let min_loc =
+ call_data.locations.iter().min_by_key(|loc| loc.enclosing_item.byte_span.0).unwrap();
+ let byte_min = min_loc.enclosing_item.byte_span.0;
+ let line_min = min_loc.enclosing_item.line_span.0;
+ let max_loc =
+ call_data.locations.iter().max_by_key(|loc| loc.enclosing_item.byte_span.1).unwrap();
+ let byte_max = max_loc.enclosing_item.byte_span.1;
+ let line_max = max_loc.enclosing_item.line_span.1;
+
+ // The output code is limited to that byte range.
+ let contents_subset = &contents[(byte_min as usize)..(byte_max as usize)];
+
+ // The call locations need to be updated to reflect that the size of the program has changed.
+ // Specifically, the ranges are all subtracted by `byte_min` since that's the new zero point.
+ let (mut byte_ranges, line_ranges): (Vec<_>, Vec<_>) = call_data
+ .locations
+ .iter()
+ .map(|loc| {
+ let (byte_lo, byte_hi) = loc.call_ident.byte_span;
+ let (line_lo, line_hi) = loc.call_expr.line_span;
+ let byte_range = (byte_lo - byte_min, byte_hi - byte_min);
+
+ let line_range = (line_lo - line_min, line_hi - line_min);
+ let (line_url, line_title) = link_to_loc(call_data, loc);
+
+ (byte_range, (line_range, line_url, line_title))
+ })
+ .unzip();
+
+ let (_, init_url, init_title) = &line_ranges[0];
+ let needs_expansion = line_max - line_min > NUM_VISIBLE_LINES;
+ let locations_encoded = serde_json::to_string(&line_ranges).unwrap();
+
+ write!(
+ w,
+ "<div class=\"scraped-example {expanded_cls}\" data-locs=\"{locations}\">\
+ <div class=\"scraped-example-title\">\
+ {name} (<a href=\"{url}\">{title}</a>)\
+ </div>\
+ <div class=\"code-wrapper\">",
+ expanded_cls = if needs_expansion { "" } else { "expanded" },
+ name = call_data.display_name,
+ url = init_url,
+ title = init_title,
+ // The locations are encoded as a data attribute, so they can be read
+ // later by the JS for interactions.
+ locations = Escape(&locations_encoded)
+ );
+
+ if line_ranges.len() > 1 {
+ write!(w, r#"<span class="prev">&pr;</span> <span class="next">&sc;</span>"#);
+ }
+
+ if needs_expansion {
+ write!(w, r#"<span class="expand">&varr;</span>"#);
+ }
+
+ // Look for the example file in the source map if it exists, otherwise return a dummy span
+ let file_span = (|| {
+ let source_map = tcx.sess.source_map();
+ let crate_src = tcx.sess.local_crate_source_file.as_ref()?;
+ let abs_crate_src = crate_src.canonicalize().ok()?;
+ let crate_root = abs_crate_src.parent()?.parent()?;
+ let rel_path = path.strip_prefix(crate_root).ok()?;
+ let files = source_map.files();
+ let file = files.iter().find(|file| match &file.name {
+ FileName::Real(RealFileName::LocalPath(other_path)) => rel_path == other_path,
+ _ => false,
+ })?;
+ Some(rustc_span::Span::with_root_ctxt(
+ file.start_pos + BytePos(byte_min),
+ file.start_pos + BytePos(byte_max),
+ ))
+ })()
+ .unwrap_or(rustc_span::DUMMY_SP);
+
+ // The root path is the inverse of Context::current
+ let root_path = vec!["../"; cx.current.len() - 1].join("");
+
+ let mut decoration_info = FxHashMap::default();
+ decoration_info.insert("highlight focus", vec![byte_ranges.remove(0)]);
+ decoration_info.insert("highlight", byte_ranges);
+
+ sources::print_src(
+ w,
+ contents_subset,
+ call_data.edition,
+ file_span,
+ cx,
+ &root_path,
+ Some(highlight::DecorationInfo(decoration_info)),
+ sources::SourceContext::Embedded { offset: line_min },
+ );
+ write!(w, "</div></div>");
+
+ true
+ };
+
+ // The call locations are output in sequence, so that sequence needs to be determined.
+ // Ideally the most "relevant" examples would be shown first, but there's no general algorithm
+ // for determining relevance. Instead, we prefer the smallest examples being likely the easiest to
+ // understand at a glance.
+ let ordered_locations = {
+ let sort_criterion = |(_, call_data): &(_, &CallData)| {
+ // Use the first location because that's what the user will see initially
+ let (lo, hi) = call_data.locations[0].enclosing_item.byte_span;
+ hi - lo
+ };
+
+ let mut locs = call_locations.iter().collect::<Vec<_>>();
+ locs.sort_by_key(sort_criterion);
+ locs
+ };
+
+ let mut it = ordered_locations.into_iter().peekable();
+
+ // An example may fail to write if its source can't be read for some reason, so this method
+ // continues iterating until a write succeeds
+ let write_and_skip_failure = |w: &mut Buffer, it: &mut Peekable<_>| {
+ while let Some(example) = it.next() {
+ if write_example(&mut *w, example) {
+ break;
+ }
+ }
+ };
+
+ // Write just one example that's visible by default in the method's description.
+ write_and_skip_failure(w, &mut it);
+
+ // Then add the remaining examples in a hidden section.
+ if it.peek().is_some() {
+ write!(
+ w,
+ "<details class=\"rustdoc-toggle more-examples-toggle\">\
+ <summary class=\"hideme\">\
+ <span>More examples</span>\
+ </summary>\
+ <div class=\"hide-more\">Hide additional examples</div>\
+ <div class=\"more-scraped-examples\">\
+ <div class=\"toggle-line\"><div class=\"toggle-line-inner\"></div></div>\
+ <div class=\"more-scraped-examples-inner\">"
+ );
+
+ // Only generate inline code for MAX_FULL_EXAMPLES number of examples. Otherwise we could
+ // make the page arbitrarily huge!
+ for _ in 0..MAX_FULL_EXAMPLES {
+ write_and_skip_failure(w, &mut it);
+ }
+
+ // For the remaining examples, generate a <ul> containing links to the source files.
+ if it.peek().is_some() {
+ write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
+ it.for_each(|(_, call_data)| {
+ let (url, _) = link_to_loc(call_data, &call_data.locations[0]);
+ write!(
+ w,
+ r#"<li><a href="{url}">{name}</a></li>"#,
+ url = url,
+ name = call_data.display_name
+ );
+ });
+ write!(w, "</ul></div>");
+ }
+
+ write!(w, "</div></div></details>");
+ }
+
+ write!(w, "</div>");
+}
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
new file mode 100644
index 000000000..99cf42919
--- /dev/null
+++ b/src/librustdoc/html/render/print_item.rs
@@ -0,0 +1,1974 @@
+use clean::AttributesExt;
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir as hir;
+use rustc_hir::def::CtorKind;
+use rustc_hir::def_id::DefId;
+use rustc_middle::middle::stability;
+use rustc_middle::span_bug;
+use rustc_middle::ty::layout::LayoutError;
+use rustc_middle::ty::{Adt, TyCtxt};
+use rustc_span::hygiene::MacroKind;
+use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_target::abi::{Layout, Primitive, TagEncoding, Variants};
+use std::cmp::Ordering;
+use std::fmt;
+use std::rc::Rc;
+
+use super::{
+ collect_paths_for_type, document, ensure_trailing_slash, item_ty_to_section,
+ notable_traits_decl, render_assoc_item, render_assoc_items, render_attributes_in_code,
+ render_attributes_in_pre, render_impl, render_stability_since_raw, write_srclink,
+ AssocItemLink, Context, ImplRenderingParameters,
+};
+use crate::clean;
+use crate::config::ModuleSorting;
+use crate::formats::item_type::ItemType;
+use crate::formats::{AssocItemRender, Impl, RenderMode};
+use crate::html::escape::Escape;
+use crate::html::format::{
+ join_with_double_colon, print_abi_with_space, print_constness_with_space, print_where_clause,
+ Buffer, Ending, PrintWithSpace,
+};
+use crate::html::highlight;
+use crate::html::layout::Page;
+use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
+use crate::html::url_parts_builder::UrlPartsBuilder;
+
+use askama::Template;
+use itertools::Itertools;
+
+const ITEM_TABLE_OPEN: &str = "<div class=\"item-table\">";
+const ITEM_TABLE_CLOSE: &str = "</div>";
+const ITEM_TABLE_ROW_OPEN: &str = "<div class=\"item-row\">";
+const ITEM_TABLE_ROW_CLOSE: &str = "</div>";
+
+// A component in a `use` path, like `string` in std::string::ToString
+struct PathComponent {
+ path: String,
+ name: Symbol,
+}
+
+#[derive(Template)]
+#[template(path = "print_item.html")]
+struct ItemVars<'a> {
+ page: &'a Page<'a>,
+ static_root_path: &'a str,
+ typ: &'a str,
+ name: &'a str,
+ item_type: &'a str,
+ path_components: Vec<PathComponent>,
+ stability_since_raw: &'a str,
+ src_href: Option<&'a str>,
+}
+
+/// Calls `print_where_clause` and returns `true` if a `where` clause was generated.
+fn print_where_clause_and_check<'a, 'tcx: 'a>(
+ buffer: &mut Buffer,
+ gens: &'a clean::Generics,
+ cx: &'a Context<'tcx>,
+) -> bool {
+ let len_before = buffer.len();
+ write!(buffer, "{}", print_where_clause(gens, cx, 0, Ending::Newline));
+ len_before != buffer.len()
+}
+
+pub(super) fn print_item(
+ cx: &mut Context<'_>,
+ item: &clean::Item,
+ buf: &mut Buffer,
+ page: &Page<'_>,
+) {
+ debug_assert!(!item.is_stripped());
+ let typ = match *item.kind {
+ clean::ModuleItem(_) => {
+ if item.is_crate() {
+ "Crate "
+ } else {
+ "Module "
+ }
+ }
+ clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => "Function ",
+ clean::TraitItem(..) => "Trait ",
+ clean::StructItem(..) => "Struct ",
+ clean::UnionItem(..) => "Union ",
+ clean::EnumItem(..) => "Enum ",
+ clean::TypedefItem(..) => "Type Definition ",
+ clean::MacroItem(..) => "Macro ",
+ clean::ProcMacroItem(ref mac) => match mac.kind {
+ MacroKind::Bang => "Macro ",
+ MacroKind::Attr => "Attribute Macro ",
+ MacroKind::Derive => "Derive Macro ",
+ },
+ clean::PrimitiveItem(..) => "Primitive Type ",
+ clean::StaticItem(..) | clean::ForeignStaticItem(..) => "Static ",
+ clean::ConstantItem(..) => "Constant ",
+ clean::ForeignTypeItem => "Foreign Type ",
+ clean::KeywordItem => "Keyword ",
+ clean::OpaqueTyItem(..) => "Opaque Type ",
+ clean::TraitAliasItem(..) => "Trait Alias ",
+ _ => {
+ // We don't generate pages for any other type.
+ unreachable!();
+ }
+ };
+ let mut stability_since_raw = Buffer::new();
+ render_stability_since_raw(
+ &mut stability_since_raw,
+ item.stable_since(cx.tcx()),
+ item.const_stability(cx.tcx()),
+ None,
+ None,
+ );
+ let stability_since_raw: String = stability_since_raw.into_inner();
+
+ // Write source tag
+ //
+ // When this item is part of a `crate use` in a downstream crate, the
+ // source link in the downstream documentation will actually come back to
+ // this page, and this link will be auto-clicked. The `id` attribute is
+ // used to find the link to auto-click.
+ let src_href =
+ if cx.include_sources && !item.is_primitive() { cx.src_href(item) } else { None };
+
+ let path_components = if item.is_primitive() || item.is_keyword() {
+ vec![]
+ } else {
+ let cur = &cx.current;
+ let amt = if item.is_mod() { cur.len() - 1 } else { cur.len() };
+ cur.iter()
+ .enumerate()
+ .take(amt)
+ .map(|(i, component)| PathComponent {
+ path: "../".repeat(cur.len() - i - 1),
+ name: *component,
+ })
+ .collect()
+ };
+
+ let item_vars = ItemVars {
+ page,
+ static_root_path: page.get_static_root_path(),
+ typ,
+ name: item.name.as_ref().unwrap().as_str(),
+ item_type: &item.type_().to_string(),
+ path_components,
+ stability_since_raw: &stability_since_raw,
+ src_href: src_href.as_deref(),
+ };
+
+ item_vars.render_into(buf).unwrap();
+
+ match &*item.kind {
+ clean::ModuleItem(ref m) => item_module(buf, cx, item, &m.items),
+ clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) => {
+ item_function(buf, cx, item, f)
+ }
+ clean::TraitItem(ref t) => item_trait(buf, cx, item, t),
+ clean::StructItem(ref s) => item_struct(buf, cx, item, s),
+ clean::UnionItem(ref s) => item_union(buf, cx, item, s),
+ clean::EnumItem(ref e) => item_enum(buf, cx, item, e),
+ clean::TypedefItem(ref t) => item_typedef(buf, cx, item, t),
+ clean::MacroItem(ref m) => item_macro(buf, cx, item, m),
+ clean::ProcMacroItem(ref m) => item_proc_macro(buf, cx, item, m),
+ clean::PrimitiveItem(_) => item_primitive(buf, cx, item),
+ clean::StaticItem(ref i) | clean::ForeignStaticItem(ref i) => item_static(buf, cx, item, i),
+ clean::ConstantItem(ref c) => item_constant(buf, cx, item, c),
+ clean::ForeignTypeItem => item_foreign_type(buf, cx, item),
+ clean::KeywordItem => item_keyword(buf, cx, item),
+ clean::OpaqueTyItem(ref e) => item_opaque_ty(buf, cx, item, e),
+ clean::TraitAliasItem(ref ta) => item_trait_alias(buf, cx, item, ta),
+ _ => {
+ // We don't generate pages for any other type.
+ unreachable!();
+ }
+ }
+}
+
+/// For large structs, enums, unions, etc, determine whether to hide their fields
+fn should_hide_fields(n_fields: usize) -> bool {
+ n_fields > 12
+}
+
+fn toggle_open(w: &mut Buffer, text: impl fmt::Display) {
+ write!(
+ w,
+ "<details class=\"rustdoc-toggle type-contents-toggle\">\
+ <summary class=\"hideme\">\
+ <span>Show {}</span>\
+ </summary>",
+ text
+ );
+}
+
+fn toggle_close(w: &mut Buffer) {
+ w.write_str("</details>");
+}
+
+fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: &[clean::Item]) {
+ document(w, cx, item, None, HeadingOffset::H2);
+
+ let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>();
+
+ // the order of item types in the listing
+ fn reorder(ty: ItemType) -> u8 {
+ match ty {
+ ItemType::ExternCrate => 0,
+ ItemType::Import => 1,
+ ItemType::Primitive => 2,
+ ItemType::Module => 3,
+ ItemType::Macro => 4,
+ ItemType::Struct => 5,
+ ItemType::Enum => 6,
+ ItemType::Constant => 7,
+ ItemType::Static => 8,
+ ItemType::Trait => 9,
+ ItemType::Function => 10,
+ ItemType::Typedef => 12,
+ ItemType::Union => 13,
+ _ => 14 + ty as u8,
+ }
+ }
+
+ fn cmp(
+ i1: &clean::Item,
+ i2: &clean::Item,
+ idx1: usize,
+ idx2: usize,
+ tcx: TyCtxt<'_>,
+ ) -> Ordering {
+ let ty1 = i1.type_();
+ let ty2 = i2.type_();
+ if item_ty_to_section(ty1) != item_ty_to_section(ty2)
+ || (ty1 != ty2 && (ty1 == ItemType::ExternCrate || ty2 == ItemType::ExternCrate))
+ {
+ return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2));
+ }
+ let s1 = i1.stability(tcx).as_ref().map(|s| s.level);
+ let s2 = i2.stability(tcx).as_ref().map(|s| s.level);
+ if let (Some(a), Some(b)) = (s1, s2) {
+ match (a.is_stable(), b.is_stable()) {
+ (true, true) | (false, false) => {}
+ (false, true) => return Ordering::Less,
+ (true, false) => return Ordering::Greater,
+ }
+ }
+ let lhs = i1.name.unwrap_or(kw::Empty);
+ let rhs = i2.name.unwrap_or(kw::Empty);
+ compare_names(lhs.as_str(), rhs.as_str())
+ }
+
+ match cx.shared.module_sorting {
+ ModuleSorting::Alphabetical => {
+ indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2, cx.tcx()));
+ }
+ ModuleSorting::DeclarationOrder => {}
+ }
+ // This call is to remove re-export duplicates in cases such as:
+ //
+ // ```
+ // pub(crate) mod foo {
+ // pub(crate) mod bar {
+ // pub(crate) trait Double { fn foo(); }
+ // }
+ // }
+ //
+ // pub(crate) use foo::bar::*;
+ // pub(crate) use foo::*;
+ // ```
+ //
+ // `Double` will appear twice in the generated docs.
+ //
+ // FIXME: This code is quite ugly and could be improved. Small issue: DefId
+ // can be identical even if the elements are different (mostly in imports).
+ // So in case this is an import, we keep everything by adding a "unique id"
+ // (which is the position in the vector).
+ indices.dedup_by_key(|i| {
+ (
+ items[*i].item_id,
+ if items[*i].name.is_some() { Some(full_path(cx, &items[*i])) } else { None },
+ items[*i].type_(),
+ if items[*i].is_import() { *i } else { 0 },
+ )
+ });
+
+ debug!("{:?}", indices);
+ let mut last_section = None;
+
+ for &idx in &indices {
+ let myitem = &items[idx];
+ if myitem.is_stripped() {
+ continue;
+ }
+
+ let my_section = item_ty_to_section(myitem.type_());
+ if Some(my_section) != last_section {
+ if last_section.is_some() {
+ w.write_str(ITEM_TABLE_CLOSE);
+ }
+ last_section = Some(my_section);
+ write!(
+ w,
+ "<h2 id=\"{id}\" class=\"small-section-header\">\
+ <a href=\"#{id}\">{name}</a>\
+ </h2>{}",
+ ITEM_TABLE_OPEN,
+ id = cx.derive_id(my_section.id().to_owned()),
+ name = my_section.name(),
+ );
+ }
+
+ match *myitem.kind {
+ clean::ExternCrateItem { ref src } => {
+ use crate::html::format::anchor;
+
+ w.write_str(ITEM_TABLE_ROW_OPEN);
+ match *src {
+ Some(src) => write!(
+ w,
+ "<div class=\"item-left\"><code>{}extern crate {} as {};",
+ myitem.visibility.print_with_space(myitem.item_id, cx),
+ anchor(myitem.item_id.expect_def_id(), src, cx),
+ myitem.name.unwrap(),
+ ),
+ None => write!(
+ w,
+ "<div class=\"item-left\"><code>{}extern crate {};",
+ myitem.visibility.print_with_space(myitem.item_id, cx),
+ anchor(myitem.item_id.expect_def_id(), myitem.name.unwrap(), cx),
+ ),
+ }
+ w.write_str("</code></div>");
+ w.write_str(ITEM_TABLE_ROW_CLOSE);
+ }
+
+ clean::ImportItem(ref import) => {
+ let (stab, stab_tags) = if let Some(import_def_id) = import.source.did {
+ let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id);
+ let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
+
+ // Just need an item with the correct def_id and attrs
+ let import_item = clean::Item {
+ item_id: import_def_id.into(),
+ attrs: import_attrs,
+ cfg: ast_attrs.cfg(cx.tcx(), &cx.cache().hidden_cfg),
+ ..myitem.clone()
+ };
+
+ let stab = import_item.stability_class(cx.tcx());
+ let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()));
+ (stab, stab_tags)
+ } else {
+ (None, None)
+ };
+
+ let add = if stab.is_some() { " " } else { "" };
+
+ w.write_str(ITEM_TABLE_ROW_OPEN);
+ let id = match import.kind {
+ clean::ImportKind::Simple(s) => {
+ format!(" id=\"{}\"", cx.derive_id(format!("reexport.{}", s)))
+ }
+ clean::ImportKind::Glob => String::new(),
+ };
+ write!(
+ w,
+ "<div class=\"item-left {stab}{add}import-item\"{id}>\
+ <code>{vis}{imp}</code>\
+ </div>\
+ <div class=\"item-right docblock-short\">{stab_tags}</div>",
+ stab = stab.unwrap_or_default(),
+ vis = myitem.visibility.print_with_space(myitem.item_id, cx),
+ imp = import.print(cx),
+ stab_tags = stab_tags.unwrap_or_default(),
+ );
+ w.write_str(ITEM_TABLE_ROW_CLOSE);
+ }
+
+ _ => {
+ if myitem.name.is_none() {
+ continue;
+ }
+
+ let unsafety_flag = match *myitem.kind {
+ clean::FunctionItem(_) | clean::ForeignFunctionItem(_)
+ if myitem.fn_header(cx.tcx()).unwrap().unsafety
+ == hir::Unsafety::Unsafe =>
+ {
+ "<a title=\"unsafe function\" href=\"#\"><sup>⚠</sup></a>"
+ }
+ _ => "",
+ };
+
+ let stab = myitem.stability_class(cx.tcx());
+ let add = if stab.is_some() { " " } else { "" };
+
+ let visibility_emoji = match myitem.visibility {
+ clean::Visibility::Restricted(_) => {
+ "<span title=\"Restricted Visibility\">&nbsp;🔒</span> "
+ }
+ _ => "",
+ };
+
+ let doc_value = myitem.doc_value().unwrap_or_default();
+ w.write_str(ITEM_TABLE_ROW_OPEN);
+ write!(
+ w,
+ "<div class=\"item-left {stab}{add}module-item\">\
+ <a class=\"{class}\" href=\"{href}\" title=\"{title}\">{name}</a>\
+ {visibility_emoji}\
+ {unsafety_flag}\
+ {stab_tags}\
+ </div>\
+ <div class=\"item-right docblock-short\">{docs}</div>",
+ name = myitem.name.unwrap(),
+ visibility_emoji = visibility_emoji,
+ stab_tags = extra_info_tags(myitem, item, cx.tcx()),
+ docs = MarkdownSummaryLine(&doc_value, &myitem.links(cx)).into_string(),
+ class = myitem.type_(),
+ add = add,
+ stab = stab.unwrap_or_default(),
+ unsafety_flag = unsafety_flag,
+ href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
+ title = [full_path(cx, myitem), myitem.type_().to_string()]
+ .iter()
+ .filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None })
+ .collect::<Vec<_>>()
+ .join(" "),
+ );
+ w.write_str(ITEM_TABLE_ROW_CLOSE);
+ }
+ }
+ }
+
+ if last_section.is_some() {
+ w.write_str(ITEM_TABLE_CLOSE);
+ }
+}
+
+/// Render the stability, deprecation and portability tags that are displayed in the item's summary
+/// at the module level.
+fn extra_info_tags(item: &clean::Item, parent: &clean::Item, tcx: TyCtxt<'_>) -> String {
+ let mut tags = String::new();
+
+ fn tag_html(class: &str, title: &str, contents: &str) -> String {
+ format!(r#"<span class="stab {}" title="{}">{}</span>"#, class, Escape(title), contents)
+ }
+
+ // The trailing space after each tag is to space it properly against the rest of the docs.
+ if let Some(depr) = &item.deprecation(tcx) {
+ let mut message = "Deprecated";
+ if !stability::deprecation_in_effect(depr) {
+ message = "Deprecation planned";
+ }
+ tags += &tag_html("deprecated", "", message);
+ }
+
+ // The "rustc_private" crates are permanently unstable so it makes no sense
+ // to render "unstable" everywhere.
+ if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
+ == Some(true)
+ {
+ tags += &tag_html("unstable", "", "Experimental");
+ }
+
+ let cfg = match (&item.cfg, parent.cfg.as_ref()) {
+ (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
+ (cfg, _) => cfg.as_deref().cloned(),
+ };
+
+ debug!("Portability {:?} - {:?} = {:?}", item.cfg, parent.cfg, cfg);
+ if let Some(ref cfg) = cfg {
+ tags += &tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html());
+ }
+
+ tags
+}
+
+fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &clean::Function) {
+ let header = it.fn_header(cx.tcx()).expect("printing a function which isn't a function");
+ let constness = print_constness_with_space(&header.constness, it.const_stability(cx.tcx()));
+ let unsafety = header.unsafety.print_with_space();
+ let abi = print_abi_with_space(header.abi).to_string();
+ let asyncness = header.asyncness.print_with_space();
+ let visibility = it.visibility.print_with_space(it.item_id, cx).to_string();
+ let name = it.name.unwrap();
+
+ let generics_len = format!("{:#}", f.generics.print(cx)).len();
+ let header_len = "fn ".len()
+ + visibility.len()
+ + constness.len()
+ + asyncness.len()
+ + unsafety.len()
+ + abi.len()
+ + name.as_str().len()
+ + generics_len;
+
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "fn", |w| {
+ render_attributes_in_pre(w, it, "");
+ w.reserve(header_len);
+ write!(
+ w,
+ "{vis}{constness}{asyncness}{unsafety}{abi}fn \
+ {name}{generics}{decl}{notable_traits}{where_clause}",
+ vis = visibility,
+ constness = constness,
+ asyncness = asyncness,
+ unsafety = unsafety,
+ abi = abi,
+ name = name,
+ generics = f.generics.print(cx),
+ where_clause = print_where_clause(&f.generics, cx, 0, Ending::Newline),
+ decl = f.decl.full_print(header_len, 0, header.asyncness, cx),
+ notable_traits = notable_traits_decl(&f.decl, cx),
+ );
+ });
+ });
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Trait) {
+ let bounds = bounds(&t.bounds, false, cx);
+ let required_types = t.items.iter().filter(|m| m.is_ty_associated_type()).collect::<Vec<_>>();
+ let provided_types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>();
+ let required_consts = t.items.iter().filter(|m| m.is_ty_associated_const()).collect::<Vec<_>>();
+ let provided_consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>();
+ let required_methods = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>();
+ let provided_methods = t.items.iter().filter(|m| m.is_method()).collect::<Vec<_>>();
+ let count_types = required_types.len() + provided_types.len();
+ let count_consts = required_consts.len() + provided_consts.len();
+ let count_methods = required_methods.len() + provided_methods.len();
+ let must_implement_one_of_functions =
+ cx.tcx().trait_def(t.def_id).must_implement_one_of.clone();
+
+ // Output the trait definition
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "trait", |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "{}{}{}trait {}{}{}",
+ it.visibility.print_with_space(it.item_id, cx),
+ t.unsafety(cx.tcx()).print_with_space(),
+ if t.is_auto(cx.tcx()) { "auto " } else { "" },
+ it.name.unwrap(),
+ t.generics.print(cx),
+ bounds
+ );
+
+ if !t.generics.where_predicates.is_empty() {
+ write!(w, "{}", print_where_clause(&t.generics, cx, 0, Ending::Newline));
+ } else {
+ w.write_str(" ");
+ }
+
+ if t.items.is_empty() {
+ w.write_str("{ }");
+ } else {
+ // FIXME: we should be using a derived_id for the Anchors here
+ w.write_str("{\n");
+ let mut toggle = false;
+
+ // If there are too many associated types, hide _everything_
+ if should_hide_fields(count_types) {
+ toggle = true;
+ toggle_open(
+ w,
+ format_args!(
+ "{} associated items",
+ count_types + count_consts + count_methods
+ ),
+ );
+ }
+ for types in [&required_types, &provided_types] {
+ for t in types {
+ render_assoc_item(
+ w,
+ t,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+ w.write_str(";\n");
+ }
+ }
+ // If there are too many associated constants, hide everything after them
+ // We also do this if the types + consts is large because otherwise we could
+ // render a bunch of types and _then_ a bunch of consts just because both were
+ // _just_ under the limit
+ if !toggle && should_hide_fields(count_types + count_consts) {
+ toggle = true;
+ toggle_open(
+ w,
+ format_args!(
+ "{} associated constant{} and {} method{}",
+ count_consts,
+ pluralize(count_consts),
+ count_methods,
+ pluralize(count_methods),
+ ),
+ );
+ }
+ if count_types != 0 && (count_consts != 0 || count_methods != 0) {
+ w.write_str("\n");
+ }
+ for consts in [&required_consts, &provided_consts] {
+ for c in consts {
+ render_assoc_item(
+ w,
+ c,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+ w.write_str(";\n");
+ }
+ }
+ if !toggle && should_hide_fields(count_methods) {
+ toggle = true;
+ toggle_open(w, format_args!("{} methods", count_methods));
+ }
+ if count_consts != 0 && count_methods != 0 {
+ w.write_str("\n");
+ }
+ for (pos, m) in required_methods.iter().enumerate() {
+ render_assoc_item(
+ w,
+ m,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+ w.write_str(";\n");
+
+ if pos < required_methods.len() - 1 {
+ w.write_str("<span class=\"item-spacer\"></span>");
+ }
+ }
+ if !required_methods.is_empty() && !provided_methods.is_empty() {
+ w.write_str("\n");
+ }
+ for (pos, m) in provided_methods.iter().enumerate() {
+ render_assoc_item(
+ w,
+ m,
+ AssocItemLink::Anchor(None),
+ ItemType::Trait,
+ cx,
+ RenderMode::Normal,
+ );
+ match *m.kind {
+ clean::MethodItem(ref inner, _)
+ if !inner.generics.where_predicates.is_empty() =>
+ {
+ w.write_str(",\n { ... }\n");
+ }
+ _ => {
+ w.write_str(" { ... }\n");
+ }
+ }
+
+ if pos < provided_methods.len() - 1 {
+ w.write_str("<span class=\"item-spacer\"></span>");
+ }
+ }
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+ }
+ });
+ });
+
+ // Trait documentation
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ fn write_small_section_header(w: &mut Buffer, id: &str, title: &str, extra_content: &str) {
+ write!(
+ w,
+ "<h2 id=\"{0}\" class=\"small-section-header\">\
+ {1}<a href=\"#{0}\" class=\"anchor\"></a>\
+ </h2>{2}",
+ id, title, extra_content
+ )
+ }
+
+ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::Item) {
+ let name = m.name.unwrap();
+ info!("Documenting {} on {:?}", name, t.name);
+ let item_type = m.type_();
+ let id = cx.derive_id(format!("{}.{}", item_type, name));
+ let mut content = Buffer::empty_from(w);
+ document(&mut content, cx, m, Some(t), HeadingOffset::H5);
+ let toggled = !content.is_empty();
+ if toggled {
+ write!(w, "<details class=\"rustdoc-toggle\" open><summary>");
+ }
+ write!(w, "<div id=\"{}\" class=\"method has-srclink\">", id);
+ write!(w, "<div class=\"rightside\">");
+
+ let has_stability = render_stability_since(w, m, t, cx.tcx());
+ if has_stability {
+ w.write_str(" · ");
+ }
+ write_srclink(cx, m, w);
+ write!(w, "</div>");
+ write!(w, "<h4 class=\"code-header\">");
+ render_assoc_item(
+ w,
+ m,
+ AssocItemLink::Anchor(Some(&id)),
+ ItemType::Impl,
+ cx,
+ RenderMode::Normal,
+ );
+ w.write_str("</h4>");
+ w.write_str("</div>");
+ if toggled {
+ write!(w, "</summary>");
+ w.push_buffer(content);
+ write!(w, "</details>");
+ }
+ }
+
+ if !required_types.is_empty() {
+ write_small_section_header(
+ w,
+ "required-associated-types",
+ "Required Associated Types",
+ "<div class=\"methods\">",
+ );
+ for t in required_types {
+ trait_item(w, cx, t, it);
+ }
+ w.write_str("</div>");
+ }
+ if !provided_types.is_empty() {
+ write_small_section_header(
+ w,
+ "provided-associated-types",
+ "Provided Associated Types",
+ "<div class=\"methods\">",
+ );
+ for t in provided_types {
+ trait_item(w, cx, t, it);
+ }
+ w.write_str("</div>");
+ }
+
+ if !required_consts.is_empty() {
+ write_small_section_header(
+ w,
+ "required-associated-consts",
+ "Required Associated Constants",
+ "<div class=\"methods\">",
+ );
+ for t in required_consts {
+ trait_item(w, cx, t, it);
+ }
+ w.write_str("</div>");
+ }
+ if !provided_consts.is_empty() {
+ write_small_section_header(
+ w,
+ "provided-associated-consts",
+ "Provided Associated Constants",
+ "<div class=\"methods\">",
+ );
+ for t in provided_consts {
+ trait_item(w, cx, t, it);
+ }
+ w.write_str("</div>");
+ }
+
+ // Output the documentation for each function individually
+ if !required_methods.is_empty() || must_implement_one_of_functions.is_some() {
+ write_small_section_header(
+ w,
+ "required-methods",
+ "Required Methods",
+ "<div class=\"methods\">",
+ );
+
+ if let Some(list) = must_implement_one_of_functions.as_deref() {
+ write!(
+ w,
+ "<div class=\"stab must_implement\">At least one of the `{}` methods is required.</div>",
+ list.iter().join("`, `")
+ );
+ }
+
+ for m in required_methods {
+ trait_item(w, cx, m, it);
+ }
+ w.write_str("</div>");
+ }
+ if !provided_methods.is_empty() {
+ write_small_section_header(
+ w,
+ "provided-methods",
+ "Provided Methods",
+ "<div class=\"methods\">",
+ );
+ for m in provided_methods {
+ trait_item(w, cx, m, it);
+ }
+ w.write_str("</div>");
+ }
+
+ // If there are methods directly on this trait object, render them here.
+ render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All);
+
+ let cloned_shared = Rc::clone(&cx.shared);
+ let cache = &cloned_shared.cache;
+ let mut extern_crates = FxHashSet::default();
+ if let Some(implementors) = cache.implementors.get(&it.item_id.expect_def_id()) {
+ // The DefId is for the first Type found with that name. The bool is
+ // if any Types with the same name but different DefId have been found.
+ let mut implementor_dups: FxHashMap<Symbol, (DefId, bool)> = FxHashMap::default();
+ for implementor in implementors {
+ if let Some(did) = implementor.inner_impl().for_.without_borrowed_ref().def_id(cache) &&
+ !did.is_local() {
+ extern_crates.insert(did.krate);
+ }
+ match implementor.inner_impl().for_.without_borrowed_ref() {
+ clean::Type::Path { ref path } if !path.is_assoc_ty() => {
+ let did = path.def_id();
+ let &mut (prev_did, ref mut has_duplicates) =
+ implementor_dups.entry(path.last()).or_insert((did, false));
+ if prev_did != did {
+ *has_duplicates = true;
+ }
+ }
+ _ => {}
+ }
+ }
+
+ let (local, foreign) =
+ implementors.iter().partition::<Vec<_>, _>(|i| i.is_on_local_type(cx));
+
+ let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) =
+ local.iter().partition(|i| i.inner_impl().kind.is_auto());
+
+ synthetic.sort_by(|a, b| compare_impl(a, b, cx));
+ concrete.sort_by(|a, b| compare_impl(a, b, cx));
+
+ if !foreign.is_empty() {
+ write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", "");
+
+ for implementor in foreign {
+ let provided_methods = implementor.inner_impl().provided_trait_methods(cx.tcx());
+ let assoc_link =
+ AssocItemLink::GotoSource(implementor.impl_item.item_id, &provided_methods);
+ render_impl(
+ w,
+ cx,
+ implementor,
+ it,
+ assoc_link,
+ RenderMode::Normal,
+ None,
+ &[],
+ ImplRenderingParameters {
+ show_def_docs: false,
+ show_default_items: false,
+ show_non_assoc_items: true,
+ toggle_open_by_default: false,
+ },
+ );
+ }
+ }
+
+ write_small_section_header(
+ w,
+ "implementors",
+ "Implementors",
+ "<div class=\"item-list\" id=\"implementors-list\">",
+ );
+ for implementor in concrete {
+ render_implementor(cx, implementor, it, w, &implementor_dups, &[]);
+ }
+ w.write_str("</div>");
+
+ if t.is_auto(cx.tcx()) {
+ write_small_section_header(
+ w,
+ "synthetic-implementors",
+ "Auto implementors",
+ "<div class=\"item-list\" id=\"synthetic-implementors-list\">",
+ );
+ for implementor in synthetic {
+ render_implementor(
+ cx,
+ implementor,
+ it,
+ w,
+ &implementor_dups,
+ &collect_paths_for_type(implementor.inner_impl().for_.clone(), cache),
+ );
+ }
+ w.write_str("</div>");
+ }
+ } else {
+ // even without any implementations to write in, we still want the heading and list, so the
+ // implementors javascript file pulled in below has somewhere to write the impls into
+ write_small_section_header(
+ w,
+ "implementors",
+ "Implementors",
+ "<div class=\"item-list\" id=\"implementors-list\"></div>",
+ );
+
+ if t.is_auto(cx.tcx()) {
+ write_small_section_header(
+ w,
+ "synthetic-implementors",
+ "Auto implementors",
+ "<div class=\"item-list\" id=\"synthetic-implementors-list\"></div>",
+ );
+ }
+ }
+
+ // Include implementors in crates that depend on the current crate.
+ //
+ // This is complicated by the way rustdoc is invoked, which is basically
+ // the same way rustc is invoked: it gets called, one at a time, for each
+ // crate. When building the rustdocs for the current crate, rustdoc can
+ // see crate metadata for its dependencies, but cannot see metadata for its
+ // dependents.
+ //
+ // To make this work, we generate a "hook" at this stage, and our
+ // dependents can "plug in" to it when they build. For simplicity's sake,
+ // it's [JSONP]: a JavaScript file with the data we need (and can parse),
+ // surrounded by a tiny wrapper that the Rust side ignores, but allows the
+ // JavaScript side to include without having to worry about Same Origin
+ // Policy. The code for *that* is in `write_shared.rs`.
+ //
+ // This is further complicated by `#[doc(inline)]`. We want all copies
+ // of an inlined trait to reference the same JS file, to address complex
+ // dependency graphs like this one (lower crates depend on higher crates):
+ //
+ // ```text
+ // --------------------------------------------
+ // | crate A: trait Foo |
+ // --------------------------------------------
+ // | |
+ // -------------------------------- |
+ // | crate B: impl A::Foo for Bar | |
+ // -------------------------------- |
+ // | |
+ // ---------------------------------------------
+ // | crate C: #[doc(inline)] use A::Foo as Baz |
+ // | impl Baz for Quux |
+ // ---------------------------------------------
+ // ```
+ //
+ // Basically, we want `C::Baz` and `A::Foo` to show the same set of
+ // impls, which is easier if they both treat `/implementors/A/trait.Foo.js`
+ // as the Single Source of Truth.
+ //
+ // We also want the `impl Baz for Quux` to be written to
+ // `trait.Foo.js`. However, when we generate plain HTML for `C::Baz`,
+ // we're going to want to generate plain HTML for `impl Baz for Quux` too,
+ // because that'll load faster, and it's better for SEO. And we don't want
+ // the same impl to show up twice on the same page.
+ //
+ // To make this work, the implementors JS file has a structure kinda
+ // like this:
+ //
+ // ```js
+ // JSONP({
+ // "B": {"impl A::Foo for Bar"},
+ // "C": {"impl Baz for Quux"},
+ // });
+ // ```
+ //
+ // First of all, this means we can rebuild a crate, and it'll replace its own
+ // data if something changes. That is, `rustdoc` is idempotent. The other
+ // advantage is that we can list the crates that get included in the HTML,
+ // and ignore them when doing the JavaScript-based part of rendering.
+ // So C's HTML will have something like this:
+ //
+ // ```html
+ // <script type="text/javascript" src="/implementors/A/trait.Foo.js"
+ // data-ignore-extern-crates="A,B" async></script>
+ // ```
+ //
+ // And, when the JS runs, anything in data-ignore-extern-crates is known
+ // to already be in the HTML, and will be ignored.
+ //
+ // [JSONP]: https://en.wikipedia.org/wiki/JSONP
+ let mut js_src_path: UrlPartsBuilder = std::iter::repeat("..")
+ .take(cx.current.len())
+ .chain(std::iter::once("implementors"))
+ .collect();
+ if let Some(did) = it.item_id.as_def_id() &&
+ let get_extern = { || cache.external_paths.get(&did).map(|s| s.0.clone()) } &&
+ let Some(fqp) = cache.exact_paths.get(&did).cloned().or_else(get_extern) {
+ js_src_path.extend(fqp[..fqp.len() - 1].iter().copied());
+ js_src_path.push_fmt(format_args!("{}.{}.js", it.type_(), fqp.last().unwrap()));
+ } else {
+ js_src_path.extend(cx.current.iter().copied());
+ js_src_path.push_fmt(format_args!("{}.{}.js", it.type_(), it.name.unwrap()));
+ }
+ let extern_crates = extern_crates
+ .into_iter()
+ .map(|cnum| cx.shared.tcx.crate_name(cnum).to_string())
+ .collect::<Vec<_>>()
+ .join(",");
+ write!(
+ w,
+ "<script type=\"text/javascript\" src=\"{src}\" data-ignore-extern-crates=\"{extern_crates}\" async></script>",
+ src = js_src_path.finish(),
+ );
+}
+
+fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::TraitAlias) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "trait-alias", |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "trait {}{}{} = {};",
+ it.name.unwrap(),
+ t.generics.print(cx),
+ print_where_clause(&t.generics, cx, 0, Ending::Newline),
+ bounds(&t.bounds, true, cx)
+ );
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ // Render any items associated directly to this alias, as otherwise they
+ // won't be visible anywhere in the docs. It would be nice to also show
+ // associated items from the aliased type (see discussion in #32077), but
+ // we need #14072 to make sense of the generics.
+ render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+}
+
+fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "opaque", |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "type {}{}{where_clause} = impl {bounds};",
+ it.name.unwrap(),
+ t.generics.print(cx),
+ where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
+ bounds = bounds(&t.bounds, false, cx),
+ );
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ // Render any items associated directly to this alias, as otherwise they
+ // won't be visible anywhere in the docs. It would be nice to also show
+ // associated items from the aliased type (see discussion in #32077), but
+ // we need #14072 to make sense of the generics.
+ render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+}
+
+fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Typedef) {
+ fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) {
+ wrap_item(w, "typedef", |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(w, "{}", it.visibility.print_with_space(it.item_id, cx));
+ write!(
+ w,
+ "type {}{}{where_clause} = {type_};",
+ it.name.unwrap(),
+ t.generics.print(cx),
+ where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline),
+ type_ = t.type_.print(cx),
+ );
+ });
+ }
+
+ wrap_into_docblock(w, |w| write_content(w, cx, it, t));
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ let def_id = it.item_id.expect_def_id();
+ // Render any items associated directly to this alias, as otherwise they
+ // won't be visible anywhere in the docs. It would be nice to also show
+ // associated items from the aliased type (see discussion in #32077), but
+ // we need #14072 to make sense of the generics.
+ render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
+ document_type_layout(w, cx, def_id);
+}
+
+fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "union", |w| {
+ render_attributes_in_pre(w, it, "");
+ render_union(w, it, Some(&s.generics), &s.fields, "", cx);
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ let mut fields = s
+ .fields
+ .iter()
+ .filter_map(|f| match *f.kind {
+ clean::StructFieldItem(ref ty) => Some((f, ty)),
+ _ => None,
+ })
+ .peekable();
+ if fields.peek().is_some() {
+ write!(
+ w,
+ "<h2 id=\"fields\" class=\"fields small-section-header\">\
+ Fields<a href=\"#fields\" class=\"anchor\"></a>\
+ </h2>"
+ );
+ for (field, ty) in fields {
+ let name = field.name.expect("union field name");
+ let id = format!("{}.{}", ItemType::StructField, name);
+ write!(
+ w,
+ "<span id=\"{id}\" class=\"{shortty} small-section-header\">\
+ <a href=\"#{id}\" class=\"anchor field\"></a>\
+ <code>{name}: {ty}</code>\
+ </span>",
+ id = id,
+ name = name,
+ shortty = ItemType::StructField,
+ ty = ty.print(cx),
+ );
+ if let Some(stability_class) = field.stability_class(cx.tcx()) {
+ write!(w, "<span class=\"stab {stab}\"></span>", stab = stability_class);
+ }
+ document(w, cx, field, Some(it), HeadingOffset::H3);
+ }
+ }
+ let def_id = it.item_id.expect_def_id();
+ render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
+ document_type_layout(w, cx, def_id);
+}
+
+fn print_tuple_struct_fields(w: &mut Buffer, cx: &Context<'_>, s: &[clean::Item]) {
+ for (i, ty) in s.iter().enumerate() {
+ if i > 0 {
+ w.write_str(",&nbsp;");
+ }
+ match *ty.kind {
+ clean::StrippedItem(box clean::StructFieldItem(_)) => w.write_str("_"),
+ clean::StructFieldItem(ref ty) => write!(w, "{}", ty.print(cx)),
+ _ => unreachable!(),
+ }
+ }
+}
+
+fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::Enum) {
+ let count_variants = e.variants().count();
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "enum", |w| {
+ render_attributes_in_pre(w, it, "");
+ write!(
+ w,
+ "{}enum {}{}",
+ it.visibility.print_with_space(it.item_id, cx),
+ it.name.unwrap(),
+ e.generics.print(cx),
+ );
+ if !print_where_clause_and_check(w, &e.generics, cx) {
+ // If there wasn't a `where` clause, we add a whitespace.
+ w.write_str(" ");
+ }
+
+ let variants_stripped = e.has_stripped_entries();
+ if count_variants == 0 && !variants_stripped {
+ w.write_str("{}");
+ } else {
+ w.write_str("{\n");
+ let toggle = should_hide_fields(count_variants);
+ if toggle {
+ toggle_open(w, format_args!("{} variants", count_variants));
+ }
+ for v in e.variants() {
+ w.write_str(" ");
+ let name = v.name.unwrap();
+ match *v.kind {
+ clean::VariantItem(ref var) => match var {
+ clean::Variant::CLike => write!(w, "{}", name),
+ clean::Variant::Tuple(ref s) => {
+ write!(w, "{}(", name);
+ print_tuple_struct_fields(w, cx, s);
+ w.write_str(")");
+ }
+ clean::Variant::Struct(ref s) => {
+ render_struct(
+ w,
+ v,
+ None,
+ s.struct_type,
+ &s.fields,
+ " ",
+ false,
+ cx,
+ );
+ }
+ },
+ _ => unreachable!(),
+ }
+ w.write_str(",\n");
+ }
+
+ if variants_stripped {
+ w.write_str(" // some variants omitted\n");
+ }
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+ }
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ if count_variants != 0 {
+ write!(
+ w,
+ "<h2 id=\"variants\" class=\"variants small-section-header\">\
+ Variants{}<a href=\"#variants\" class=\"anchor\"></a>\
+ </h2>",
+ document_non_exhaustive_header(it)
+ );
+ document_non_exhaustive(w, it);
+ for variant in e.variants() {
+ let id = cx.derive_id(format!("{}.{}", ItemType::Variant, variant.name.unwrap()));
+ write!(
+ w,
+ "<h3 id=\"{id}\" class=\"variant small-section-header\">\
+ <a href=\"#{id}\" class=\"anchor field\"></a>\
+ <code>{name}",
+ id = id,
+ name = variant.name.unwrap()
+ );
+ if let clean::VariantItem(clean::Variant::Tuple(ref s)) = *variant.kind {
+ w.write_str("(");
+ print_tuple_struct_fields(w, cx, s);
+ w.write_str(")");
+ }
+ w.write_str("</code>");
+ render_stability_since(w, variant, it, cx.tcx());
+ w.write_str("</h3>");
+
+ use crate::clean::Variant;
+
+ let heading_and_fields = match &*variant.kind {
+ clean::VariantItem(Variant::Struct(s)) => Some(("Fields", &s.fields)),
+ // Documentation on tuple variant fields is rare, so to reduce noise we only emit
+ // the section if at least one field is documented.
+ clean::VariantItem(Variant::Tuple(fields))
+ if fields.iter().any(|f| f.doc_value().is_some()) =>
+ {
+ Some(("Tuple Fields", fields))
+ }
+ _ => None,
+ };
+
+ if let Some((heading, fields)) = heading_and_fields {
+ let variant_id =
+ cx.derive_id(format!("{}.{}.fields", ItemType::Variant, variant.name.unwrap()));
+ write!(w, "<div class=\"sub-variant\" id=\"{id}\">", id = variant_id);
+ write!(w, "<h4>{heading}</h4>", heading = heading);
+ document_non_exhaustive(w, variant);
+ for field in fields {
+ match *field.kind {
+ clean::StrippedItem(box clean::StructFieldItem(_)) => {}
+ clean::StructFieldItem(ref ty) => {
+ let id = cx.derive_id(format!(
+ "variant.{}.field.{}",
+ variant.name.unwrap(),
+ field.name.unwrap()
+ ));
+ write!(
+ w,
+ "<div class=\"sub-variant-field\">\
+ <span id=\"{id}\" class=\"variant small-section-header\">\
+ <a href=\"#{id}\" class=\"anchor field\"></a>\
+ <code>{f}:&nbsp;{t}</code>\
+ </span>",
+ id = id,
+ f = field.name.unwrap(),
+ t = ty.print(cx)
+ );
+ document(w, cx, field, Some(variant), HeadingOffset::H5);
+ write!(w, "</div>");
+ }
+ _ => unreachable!(),
+ }
+ }
+ w.write_str("</div>");
+ }
+
+ document(w, cx, variant, Some(it), HeadingOffset::H4);
+ }
+ }
+ let def_id = it.item_id.expect_def_id();
+ render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
+ document_type_layout(w, cx, def_id);
+}
+
+fn item_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Macro) {
+ wrap_into_docblock(w, |w| {
+ highlight::render_with_highlighting(
+ &t.source,
+ w,
+ Some("macro"),
+ None,
+ None,
+ it.span(cx.tcx()).inner().edition(),
+ None,
+ None,
+ None,
+ );
+ });
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &clean::ProcMacro) {
+ wrap_into_docblock(w, |w| {
+ let name = it.name.expect("proc-macros always have names");
+ match m.kind {
+ MacroKind::Bang => {
+ wrap_item(w, "macro", |w| {
+ write!(w, "{}!() {{ /* proc-macro */ }}", name);
+ });
+ }
+ MacroKind::Attr => {
+ wrap_item(w, "attr", |w| {
+ write!(w, "#[{}]", name);
+ });
+ }
+ MacroKind::Derive => {
+ wrap_item(w, "derive", |w| {
+ write!(w, "#[derive({})]", name);
+ if !m.helpers.is_empty() {
+ w.push_str("\n{\n");
+ w.push_str(" // Attributes available to this derive:\n");
+ for attr in &m.helpers {
+ writeln!(w, " #[{}]", attr);
+ }
+ w.push_str("}\n");
+ }
+ });
+ }
+ }
+ });
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
+ document(w, cx, it, None, HeadingOffset::H2);
+ render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+}
+
+fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "const", |w| {
+ render_attributes_in_code(w, it);
+
+ write!(
+ w,
+ "{vis}const {name}: {typ}",
+ vis = it.visibility.print_with_space(it.item_id, cx),
+ name = it.name.unwrap(),
+ typ = c.type_.print(cx),
+ );
+
+ // FIXME: The code below now prints
+ // ` = _; // 100i32`
+ // if the expression is
+ // `50 + 50`
+ // which looks just wrong.
+ // Should we print
+ // ` = 100i32;`
+ // instead?
+
+ let value = c.value(cx.tcx());
+ let is_literal = c.is_literal(cx.tcx());
+ let expr = c.expr(cx.tcx());
+ if value.is_some() || is_literal {
+ write!(w, " = {expr};", expr = Escape(&expr));
+ } else {
+ w.write_str(";");
+ }
+
+ if !is_literal {
+ if let Some(value) = &value {
+ let value_lowercase = value.to_lowercase();
+ let expr_lowercase = expr.to_lowercase();
+
+ if value_lowercase != expr_lowercase
+ && value_lowercase.trim_end_matches("i32") != expr_lowercase
+ {
+ write!(w, " // {value}", value = Escape(value));
+ }
+ }
+ }
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "struct", |w| {
+ render_attributes_in_code(w, it);
+ render_struct(w, it, Some(&s.generics), s.struct_type, &s.fields, "", true, cx);
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ let mut fields = s
+ .fields
+ .iter()
+ .filter_map(|f| match *f.kind {
+ clean::StructFieldItem(ref ty) => Some((f, ty)),
+ _ => None,
+ })
+ .peekable();
+ if let CtorKind::Fictive | CtorKind::Fn = s.struct_type {
+ if fields.peek().is_some() {
+ write!(
+ w,
+ "<h2 id=\"fields\" class=\"fields small-section-header\">\
+ {}{}<a href=\"#fields\" class=\"anchor\"></a>\
+ </h2>",
+ if let CtorKind::Fictive = s.struct_type { "Fields" } else { "Tuple Fields" },
+ document_non_exhaustive_header(it)
+ );
+ document_non_exhaustive(w, it);
+ for (index, (field, ty)) in fields.enumerate() {
+ let field_name =
+ field.name.map_or_else(|| index.to_string(), |sym| sym.as_str().to_string());
+ let id = cx.derive_id(format!("{}.{}", ItemType::StructField, field_name));
+ write!(
+ w,
+ "<span id=\"{id}\" class=\"{item_type} small-section-header\">\
+ <a href=\"#{id}\" class=\"anchor field\"></a>\
+ <code>{name}: {ty}</code>\
+ </span>",
+ item_type = ItemType::StructField,
+ id = id,
+ name = field_name,
+ ty = ty.print(cx)
+ );
+ document(w, cx, field, Some(it), HeadingOffset::H3);
+ }
+ }
+ }
+ let def_id = it.item_id.expect_def_id();
+ render_assoc_items(w, cx, it, def_id, AssocItemRender::All);
+ document_type_layout(w, cx, def_id);
+}
+
+fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "static", |w| {
+ render_attributes_in_code(w, it);
+ write!(
+ w,
+ "{vis}static {mutability}{name}: {typ}",
+ vis = it.visibility.print_with_space(it.item_id, cx),
+ mutability = s.mutability.print_with_space(),
+ name = it.name.unwrap(),
+ typ = s.type_.print(cx)
+ );
+ });
+ });
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
+ wrap_into_docblock(w, |w| {
+ wrap_item(w, "foreigntype", |w| {
+ w.write_str("extern {\n");
+ render_attributes_in_code(w, it);
+ write!(
+ w,
+ " {}type {};\n}}",
+ it.visibility.print_with_space(it.item_id, cx),
+ it.name.unwrap(),
+ );
+ });
+ });
+
+ document(w, cx, it, None, HeadingOffset::H2);
+
+ render_assoc_items(w, cx, it, it.item_id.expect_def_id(), AssocItemRender::All)
+}
+
+fn item_keyword(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
+ document(w, cx, it, None, HeadingOffset::H2)
+}
+
+/// Compare two strings treating multi-digit numbers as single units (i.e. natural sort order).
+pub(crate) fn compare_names(mut lhs: &str, mut rhs: &str) -> Ordering {
+ /// Takes a non-numeric and a numeric part from the given &str.
+ fn take_parts<'a>(s: &mut &'a str) -> (&'a str, &'a str) {
+ let i = s.find(|c: char| c.is_ascii_digit());
+ let (a, b) = s.split_at(i.unwrap_or(s.len()));
+ let i = b.find(|c: char| !c.is_ascii_digit());
+ let (b, c) = b.split_at(i.unwrap_or(b.len()));
+ *s = c;
+ (a, b)
+ }
+
+ while !lhs.is_empty() || !rhs.is_empty() {
+ let (la, lb) = take_parts(&mut lhs);
+ let (ra, rb) = take_parts(&mut rhs);
+ // First process the non-numeric part.
+ match la.cmp(ra) {
+ Ordering::Equal => (),
+ x => return x,
+ }
+ // Then process the numeric part, if both sides have one (and they fit in a u64).
+ if let (Ok(ln), Ok(rn)) = (lb.parse::<u64>(), rb.parse::<u64>()) {
+ match ln.cmp(&rn) {
+ Ordering::Equal => (),
+ x => return x,
+ }
+ }
+ // Then process the numeric part again, but this time as strings.
+ match lb.cmp(rb) {
+ Ordering::Equal => (),
+ x => return x,
+ }
+ }
+
+ Ordering::Equal
+}
+
+pub(super) fn full_path(cx: &Context<'_>, item: &clean::Item) -> String {
+ let mut s = join_with_double_colon(&cx.current);
+ s.push_str("::");
+ s.push_str(item.name.unwrap().as_str());
+ s
+}
+
+pub(super) fn item_path(ty: ItemType, name: &str) -> String {
+ match ty {
+ ItemType::Module => format!("{}index.html", ensure_trailing_slash(name)),
+ _ => format!("{}.{}.html", ty, name),
+ }
+}
+
+fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool, cx: &Context<'_>) -> String {
+ let mut bounds = String::new();
+ if !t_bounds.is_empty() {
+ if !trait_alias {
+ bounds.push_str(": ");
+ }
+ for (i, p) in t_bounds.iter().enumerate() {
+ if i > 0 {
+ bounds.push_str(" + ");
+ }
+ bounds.push_str(&p.print(cx).to_string());
+ }
+ }
+ bounds
+}
+
+fn wrap_into_docblock<F>(w: &mut Buffer, f: F)
+where
+ F: FnOnce(&mut Buffer),
+{
+ w.write_str("<div class=\"docblock item-decl\">");
+ f(w);
+ w.write_str("</div>")
+}
+
+fn wrap_item<F>(w: &mut Buffer, item_name: &str, f: F)
+where
+ F: FnOnce(&mut Buffer),
+{
+ w.write_fmt(format_args!("<pre class=\"rust {}\"><code>", item_name));
+ f(w);
+ w.write_str("</code></pre>");
+}
+
+fn render_stability_since(
+ w: &mut Buffer,
+ item: &clean::Item,
+ containing_item: &clean::Item,
+ tcx: TyCtxt<'_>,
+) -> bool {
+ render_stability_since_raw(
+ w,
+ item.stable_since(tcx),
+ item.const_stability(tcx),
+ containing_item.stable_since(tcx),
+ containing_item.const_stable_since(tcx),
+ )
+}
+
+fn compare_impl<'a, 'b>(lhs: &'a &&Impl, rhs: &'b &&Impl, cx: &Context<'_>) -> Ordering {
+ let lhss = format!("{}", lhs.inner_impl().print(false, cx));
+ let rhss = format!("{}", rhs.inner_impl().print(false, cx));
+
+ // lhs and rhs are formatted as HTML, which may be unnecessary
+ compare_names(&lhss, &rhss)
+}
+
+fn render_implementor(
+ cx: &mut Context<'_>,
+ implementor: &Impl,
+ trait_: &clean::Item,
+ w: &mut Buffer,
+ implementor_dups: &FxHashMap<Symbol, (DefId, bool)>,
+ aliases: &[String],
+) {
+ // If there's already another implementor that has the same abridged name, use the
+ // full path, for example in `std::iter::ExactSizeIterator`
+ let use_absolute = match implementor.inner_impl().for_ {
+ clean::Type::Path { ref path, .. }
+ | clean::BorrowedRef { type_: box clean::Type::Path { ref path, .. }, .. }
+ if !path.is_assoc_ty() =>
+ {
+ implementor_dups[&path.last()].1
+ }
+ _ => false,
+ };
+ render_impl(
+ w,
+ cx,
+ implementor,
+ trait_,
+ AssocItemLink::Anchor(None),
+ RenderMode::Normal,
+ Some(use_absolute),
+ aliases,
+ ImplRenderingParameters {
+ show_def_docs: false,
+ show_default_items: false,
+ show_non_assoc_items: false,
+ toggle_open_by_default: false,
+ },
+ );
+}
+
+fn render_union(
+ w: &mut Buffer,
+ it: &clean::Item,
+ g: Option<&clean::Generics>,
+ fields: &[clean::Item],
+ tab: &str,
+ cx: &Context<'_>,
+) {
+ write!(w, "{}union {}", it.visibility.print_with_space(it.item_id, cx), it.name.unwrap(),);
+
+ let where_displayed = g
+ .map(|g| {
+ write!(w, "{}", g.print(cx));
+ print_where_clause_and_check(w, g, cx)
+ })
+ .unwrap_or(false);
+
+ // If there wasn't a `where` clause, we add a whitespace.
+ if !where_displayed {
+ w.write_str(" ");
+ }
+
+ write!(w, "{{\n{}", tab);
+ let count_fields =
+ fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count();
+ let toggle = should_hide_fields(count_fields);
+ if toggle {
+ toggle_open(w, format_args!("{} fields", count_fields));
+ }
+
+ for field in fields {
+ if let clean::StructFieldItem(ref ty) = *field.kind {
+ write!(
+ w,
+ " {}{}: {},\n{}",
+ field.visibility.print_with_space(field.item_id, cx),
+ field.name.unwrap(),
+ ty.print(cx),
+ tab
+ );
+ }
+ }
+
+ if it.has_stripped_entries().unwrap() {
+ write!(w, " /* private fields */\n{}", tab);
+ }
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+}
+
+fn render_struct(
+ w: &mut Buffer,
+ it: &clean::Item,
+ g: Option<&clean::Generics>,
+ ty: CtorKind,
+ fields: &[clean::Item],
+ tab: &str,
+ structhead: bool,
+ cx: &Context<'_>,
+) {
+ write!(
+ w,
+ "{}{}{}",
+ it.visibility.print_with_space(it.item_id, cx),
+ if structhead { "struct " } else { "" },
+ it.name.unwrap()
+ );
+ if let Some(g) = g {
+ write!(w, "{}", g.print(cx))
+ }
+ match ty {
+ CtorKind::Fictive => {
+ let where_diplayed = g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
+
+ // If there wasn't a `where` clause, we add a whitespace.
+ if !where_diplayed {
+ w.write_str(" {");
+ } else {
+ w.write_str("{");
+ }
+ let count_fields =
+ fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count();
+ let has_visible_fields = count_fields > 0;
+ let toggle = should_hide_fields(count_fields);
+ if toggle {
+ toggle_open(w, format_args!("{} fields", count_fields));
+ }
+ for field in fields {
+ if let clean::StructFieldItem(ref ty) = *field.kind {
+ write!(
+ w,
+ "\n{} {}{}: {},",
+ tab,
+ field.visibility.print_with_space(field.item_id, cx),
+ field.name.unwrap(),
+ ty.print(cx),
+ );
+ }
+ }
+
+ if has_visible_fields {
+ if it.has_stripped_entries().unwrap() {
+ write!(w, "\n{} /* private fields */", tab);
+ }
+ write!(w, "\n{}", tab);
+ } else if it.has_stripped_entries().unwrap() {
+ write!(w, " /* private fields */ ");
+ }
+ if toggle {
+ toggle_close(w);
+ }
+ w.write_str("}");
+ }
+ CtorKind::Fn => {
+ w.write_str("(");
+ for (i, field) in fields.iter().enumerate() {
+ if i > 0 {
+ w.write_str(", ");
+ }
+ match *field.kind {
+ clean::StrippedItem(box clean::StructFieldItem(..)) => write!(w, "_"),
+ clean::StructFieldItem(ref ty) => {
+ write!(
+ w,
+ "{}{}",
+ field.visibility.print_with_space(field.item_id, cx),
+ ty.print(cx),
+ )
+ }
+ _ => unreachable!(),
+ }
+ }
+ w.write_str(")");
+ if let Some(g) = g {
+ write!(w, "{}", print_where_clause(g, cx, 0, Ending::NoNewline));
+ }
+ // We only want a ";" when we are displaying a tuple struct, not a variant tuple struct.
+ if structhead {
+ w.write_str(";");
+ }
+ }
+ CtorKind::Const => {
+ // Needed for PhantomData.
+ if let Some(g) = g {
+ write!(w, "{}", print_where_clause(g, cx, 0, Ending::NoNewline));
+ }
+ w.write_str(";");
+ }
+ }
+}
+
+fn document_non_exhaustive_header(item: &clean::Item) -> &str {
+ if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" }
+}
+
+fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) {
+ if item.is_non_exhaustive() {
+ write!(
+ w,
+ "<details class=\"rustdoc-toggle non-exhaustive\">\
+ <summary class=\"hideme\"><span>{}</span></summary>\
+ <div class=\"docblock\">",
+ {
+ if item.is_struct() {
+ "This struct is marked as non-exhaustive"
+ } else if item.is_enum() {
+ "This enum is marked as non-exhaustive"
+ } else if item.is_variant() {
+ "This variant is marked as non-exhaustive"
+ } else {
+ "This type is marked as non-exhaustive"
+ }
+ }
+ );
+
+ if item.is_struct() {
+ w.write_str(
+ "Non-exhaustive structs could have additional fields added in future. \
+ Therefore, non-exhaustive structs cannot be constructed in external crates \
+ using the traditional <code>Struct { .. }</code> syntax; cannot be \
+ matched against without a wildcard <code>..</code>; and \
+ struct update syntax will not work.",
+ );
+ } else if item.is_enum() {
+ w.write_str(
+ "Non-exhaustive enums could have additional variants added in future. \
+ Therefore, when matching against variants of non-exhaustive enums, an \
+ extra wildcard arm must be added to account for any future variants.",
+ );
+ } else if item.is_variant() {
+ w.write_str(
+ "Non-exhaustive enum variants could have additional fields added in future. \
+ Therefore, non-exhaustive enum variants cannot be constructed in external \
+ crates and cannot be matched against.",
+ );
+ } else {
+ w.write_str(
+ "This type will require a wildcard arm in any match statements or constructors.",
+ );
+ }
+
+ w.write_str("</div></details>");
+ }
+}
+
+fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
+ fn write_size_of_layout(w: &mut Buffer, layout: Layout<'_>, tag_size: u64) {
+ if layout.abi().is_unsized() {
+ write!(w, "(unsized)");
+ } else {
+ let bytes = layout.size().bytes() - tag_size;
+ write!(w, "{size} byte{pl}", size = bytes, pl = if bytes == 1 { "" } else { "s" },);
+ }
+ }
+
+ if !cx.shared.show_type_layout {
+ return;
+ }
+
+ writeln!(
+ w,
+ "<h2 id=\"layout\" class=\"small-section-header\"> \
+ Layout<a href=\"#layout\" class=\"anchor\"></a></h2>"
+ );
+ writeln!(w, "<div class=\"docblock\">");
+
+ let tcx = cx.tcx();
+ let param_env = tcx.param_env(ty_def_id);
+ let ty = tcx.type_of(ty_def_id);
+ match tcx.layout_of(param_env.and(ty)) {
+ Ok(ty_layout) => {
+ writeln!(
+ w,
+ "<div class=\"warning\"><p><strong>Note:</strong> Most layout information is \
+ <strong>completely unstable</strong> and may even differ between compilations. \
+ The only exception is types with certain <code>repr(...)</code> attributes. \
+ Please see the Rust Reference’s \
+ <a href=\"https://doc.rust-lang.org/reference/type-layout.html\">“Type Layout”</a> \
+ chapter for details on type layout guarantees.</p></div>"
+ );
+ w.write_str("<p><strong>Size:</strong> ");
+ write_size_of_layout(w, ty_layout.layout, 0);
+ writeln!(w, "</p>");
+ if let Variants::Multiple { variants, tag, tag_encoding, .. } =
+ &ty_layout.layout.variants()
+ {
+ if !variants.is_empty() {
+ w.write_str(
+ "<p><strong>Size for each variant:</strong></p>\
+ <ul>",
+ );
+
+ let Adt(adt, _) = ty_layout.ty.kind() else {
+ span_bug!(tcx.def_span(ty_def_id), "not an adt")
+ };
+
+ let tag_size = if let TagEncoding::Niche { .. } = tag_encoding {
+ 0
+ } else if let Primitive::Int(i, _) = tag.primitive() {
+ i.size().bytes()
+ } else {
+ span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
+ };
+
+ for (index, layout) in variants.iter_enumerated() {
+ let name = adt.variant(index).name;
+ write!(w, "<li><code>{name}</code>: ", name = name);
+ write_size_of_layout(w, *layout, tag_size);
+ writeln!(w, "</li>");
+ }
+ w.write_str("</ul>");
+ }
+ }
+ }
+ // This kind of layout error can occur with valid code, e.g. if you try to
+ // get the layout of a generic type such as `Vec<T>`.
+ Err(LayoutError::Unknown(_)) => {
+ writeln!(
+ w,
+ "<p><strong>Note:</strong> Unable to compute type layout, \
+ possibly due to this type having generic parameters. \
+ Layout can only be computed for concrete, fully-instantiated types.</p>"
+ );
+ }
+ // This kind of error probably can't happen with valid code, but we don't
+ // want to panic and prevent the docs from building, so we just let the
+ // user know that we couldn't compute the layout.
+ Err(LayoutError::SizeOverflow(_)) => {
+ writeln!(
+ w,
+ "<p><strong>Note:</strong> Encountered an error during type layout; \
+ the type was too big.</p>"
+ );
+ }
+ Err(LayoutError::NormalizationFailure(_, _)) => {
+ writeln!(
+ w,
+ "<p><strong>Note:</strong> Encountered an error during type layout; \
+ the type failed to be normalized.</p>"
+ )
+ }
+ }
+
+ writeln!(w, "</div>");
+}
+
+fn pluralize(count: usize) -> &'static str {
+ if count > 1 { "s" } else { "" }
+}
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
new file mode 100644
index 000000000..d672f0bb5
--- /dev/null
+++ b/src/librustdoc/html/render/search_index.rs
@@ -0,0 +1,589 @@
+use std::collections::hash_map::Entry;
+use std::collections::BTreeMap;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::def_id::LOCAL_CRATE;
+use rustc_span::symbol::Symbol;
+use serde::ser::{Serialize, SerializeStruct, Serializer};
+
+use crate::clean;
+use crate::clean::types::{
+ FnRetTy, Function, GenericBound, Generics, ItemId, Type, WherePredicate,
+};
+use crate::formats::cache::{Cache, OrphanImplItem};
+use crate::formats::item_type::ItemType;
+use crate::html::format::join_with_double_colon;
+use crate::html::markdown::short_markdown_summary;
+use crate::html::render::{IndexItem, IndexItemFunctionType, RenderType, RenderTypeId};
+
+/// Builds the search index from the collected metadata
+pub(crate) fn build_index<'tcx>(
+ krate: &clean::Crate,
+ cache: &mut Cache,
+ tcx: TyCtxt<'tcx>,
+) -> String {
+ let mut itemid_to_pathid = FxHashMap::default();
+ let mut crate_paths = vec![];
+
+ // Attach all orphan items to the type's definition if the type
+ // has since been learned.
+ for &OrphanImplItem { parent, ref item, ref impl_generics } in &cache.orphan_impl_items {
+ if let Some(&(ref fqp, _)) = cache.paths.get(&parent) {
+ let desc = item
+ .doc_value()
+ .map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(cache)));
+ cache.search_index.push(IndexItem {
+ ty: item.type_(),
+ name: item.name.unwrap().to_string(),
+ path: join_with_double_colon(&fqp[..fqp.len() - 1]),
+ desc,
+ parent: Some(parent),
+ parent_idx: None,
+ search_type: get_function_type_for_search(item, tcx, impl_generics.as_ref(), cache),
+ aliases: item.attrs.get_doc_aliases(),
+ });
+ }
+ }
+
+ let crate_doc = krate
+ .module
+ .doc_value()
+ .map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(cache)));
+
+ // Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias,
+ // we need the alias element to have an array of items.
+ let mut aliases: BTreeMap<String, Vec<usize>> = BTreeMap::new();
+
+ // Sort search index items. This improves the compressibility of the search index.
+ cache.search_index.sort_unstable_by(|k1, k2| {
+ // `sort_unstable_by_key` produces lifetime errors
+ let k1 = (&k1.path, &k1.name, &k1.ty, &k1.parent);
+ let k2 = (&k2.path, &k2.name, &k2.ty, &k2.parent);
+ std::cmp::Ord::cmp(&k1, &k2)
+ });
+
+ // Set up alias indexes.
+ for (i, item) in cache.search_index.iter().enumerate() {
+ for alias in &item.aliases[..] {
+ aliases.entry(alias.as_str().to_lowercase()).or_default().push(i);
+ }
+ }
+
+ // Reduce `DefId` in paths into smaller sequential numbers,
+ // and prune the paths that do not appear in the index.
+ let mut lastpath = "";
+ let mut lastpathid = 0usize;
+
+ // First, on function signatures
+ let mut search_index = std::mem::replace(&mut cache.search_index, Vec::new());
+ for item in search_index.iter_mut() {
+ fn convert_render_type(
+ ty: &mut RenderType,
+ cache: &mut Cache,
+ itemid_to_pathid: &mut FxHashMap<ItemId, usize>,
+ lastpathid: &mut usize,
+ crate_paths: &mut Vec<(ItemType, Symbol)>,
+ ) {
+ if let Some(generics) = &mut ty.generics {
+ for item in generics {
+ convert_render_type(item, cache, itemid_to_pathid, lastpathid, crate_paths);
+ }
+ }
+ let Cache { ref paths, ref external_paths, .. } = *cache;
+ let Some(id) = ty.id.clone() else {
+ assert!(ty.generics.is_some());
+ return;
+ };
+ let (itemid, path, item_type) = match id {
+ RenderTypeId::DefId(defid) => {
+ if let Some(&(ref fqp, item_type)) =
+ paths.get(&defid).or_else(|| external_paths.get(&defid))
+ {
+ (ItemId::DefId(defid), *fqp.last().unwrap(), item_type)
+ } else {
+ ty.id = None;
+ return;
+ }
+ }
+ RenderTypeId::Primitive(primitive) => (
+ ItemId::Primitive(primitive, LOCAL_CRATE),
+ primitive.as_sym(),
+ ItemType::Primitive,
+ ),
+ RenderTypeId::Index(_) => return,
+ };
+ match itemid_to_pathid.entry(itemid) {
+ Entry::Occupied(entry) => ty.id = Some(RenderTypeId::Index(*entry.get())),
+ Entry::Vacant(entry) => {
+ let pathid = *lastpathid;
+ entry.insert(pathid);
+ *lastpathid += 1;
+ crate_paths.push((item_type, path));
+ ty.id = Some(RenderTypeId::Index(pathid));
+ }
+ }
+ }
+ if let Some(search_type) = &mut item.search_type {
+ for item in &mut search_type.inputs {
+ convert_render_type(
+ item,
+ cache,
+ &mut itemid_to_pathid,
+ &mut lastpathid,
+ &mut crate_paths,
+ );
+ }
+ for item in &mut search_type.output {
+ convert_render_type(
+ item,
+ cache,
+ &mut itemid_to_pathid,
+ &mut lastpathid,
+ &mut crate_paths,
+ );
+ }
+ }
+ }
+
+ let Cache { ref paths, .. } = *cache;
+
+ // Then, on parent modules
+ let crate_items: Vec<&IndexItem> = search_index
+ .iter_mut()
+ .map(|item| {
+ item.parent_idx =
+ item.parent.and_then(|defid| match itemid_to_pathid.entry(ItemId::DefId(defid)) {
+ Entry::Occupied(entry) => Some(*entry.get()),
+ Entry::Vacant(entry) => {
+ let pathid = lastpathid;
+ entry.insert(pathid);
+ lastpathid += 1;
+
+ if let Some(&(ref fqp, short)) = paths.get(&defid) {
+ crate_paths.push((short, *fqp.last().unwrap()));
+ Some(pathid)
+ } else {
+ None
+ }
+ }
+ });
+
+ // Omit the parent path if it is same to that of the prior item.
+ if lastpath == &item.path {
+ item.path.clear();
+ } else {
+ lastpath = &item.path;
+ }
+
+ &*item
+ })
+ .collect();
+
+ struct CrateData<'a> {
+ doc: String,
+ items: Vec<&'a IndexItem>,
+ paths: Vec<(ItemType, Symbol)>,
+ // The String is alias name and the vec is the list of the elements with this alias.
+ //
+ // To be noted: the `usize` elements are indexes to `items`.
+ aliases: &'a BTreeMap<String, Vec<usize>>,
+ }
+
+ impl<'a> Serialize for CrateData<'a> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let has_aliases = !self.aliases.is_empty();
+ let mut crate_data =
+ serializer.serialize_struct("CrateData", if has_aliases { 9 } else { 8 })?;
+ crate_data.serialize_field("doc", &self.doc)?;
+ crate_data.serialize_field(
+ "t",
+ &self.items.iter().map(|item| &item.ty).collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "n",
+ &self.items.iter().map(|item| &item.name).collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "q",
+ &self.items.iter().map(|item| &item.path).collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "d",
+ &self.items.iter().map(|item| &item.desc).collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "i",
+ &self
+ .items
+ .iter()
+ .map(|item| {
+ assert_eq!(
+ item.parent.is_some(),
+ item.parent_idx.is_some(),
+ "`{}` is missing idx",
+ item.name
+ );
+ // 0 is a sentinel, everything else is one-indexed
+ item.parent_idx.map(|x| x + 1).unwrap_or(0)
+ })
+ .collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "f",
+ &self
+ .items
+ .iter()
+ .map(|item| {
+ // Fake option to get `0` out as a sentinel instead of `null`.
+ // We want to use `0` because it's three less bytes.
+ enum FunctionOption<'a> {
+ Function(&'a IndexItemFunctionType),
+ None,
+ }
+ impl<'a> Serialize for FunctionOption<'a> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ match self {
+ FunctionOption::None => 0.serialize(serializer),
+ FunctionOption::Function(ty) => ty.serialize(serializer),
+ }
+ }
+ }
+ match &item.search_type {
+ Some(ty) => FunctionOption::Function(ty),
+ None => FunctionOption::None,
+ }
+ })
+ .collect::<Vec<_>>(),
+ )?;
+ crate_data.serialize_field(
+ "p",
+ &self.paths.iter().map(|(it, s)| (it, s.to_string())).collect::<Vec<_>>(),
+ )?;
+ if has_aliases {
+ crate_data.serialize_field("a", &self.aliases)?;
+ }
+ crate_data.end()
+ }
+ }
+
+ // Collect the index into a string
+ format!(
+ r#""{}":{}"#,
+ krate.name(tcx),
+ serde_json::to_string(&CrateData {
+ doc: crate_doc,
+ items: crate_items,
+ paths: crate_paths,
+ aliases: &aliases,
+ })
+ .expect("failed serde conversion")
+ // All these `replace` calls are because we have to go through JS string for JSON content.
+ .replace('\\', r"\\")
+ .replace('\'', r"\'")
+ // We need to escape double quotes for the JSON.
+ .replace("\\\"", "\\\\\"")
+ )
+}
+
+pub(crate) fn get_function_type_for_search<'tcx>(
+ item: &clean::Item,
+ tcx: TyCtxt<'tcx>,
+ impl_generics: Option<&(clean::Type, clean::Generics)>,
+ cache: &Cache,
+) -> Option<IndexItemFunctionType> {
+ let (mut inputs, mut output) = match *item.kind {
+ clean::FunctionItem(ref f) => get_fn_inputs_and_outputs(f, tcx, impl_generics, cache),
+ clean::MethodItem(ref m, _) => get_fn_inputs_and_outputs(m, tcx, impl_generics, cache),
+ clean::TyMethodItem(ref m) => get_fn_inputs_and_outputs(m, tcx, impl_generics, cache),
+ _ => return None,
+ };
+
+ inputs.retain(|a| a.id.is_some() || a.generics.is_some());
+ output.retain(|a| a.id.is_some() || a.generics.is_some());
+
+ Some(IndexItemFunctionType { inputs, output })
+}
+
+fn get_index_type(clean_type: &clean::Type, generics: Vec<RenderType>) -> RenderType {
+ RenderType {
+ id: get_index_type_id(clean_type),
+ generics: if generics.is_empty() { None } else { Some(generics) },
+ }
+}
+
+fn get_index_type_id(clean_type: &clean::Type) -> Option<RenderTypeId> {
+ match *clean_type {
+ clean::Type::Path { ref path, .. } => Some(RenderTypeId::DefId(path.def_id())),
+ clean::DynTrait(ref bounds, _) => {
+ let path = &bounds[0].trait_;
+ Some(RenderTypeId::DefId(path.def_id()))
+ }
+ clean::Primitive(p) => Some(RenderTypeId::Primitive(p)),
+ clean::BorrowedRef { ref type_, .. } | clean::RawPointer(_, ref type_) => {
+ get_index_type_id(type_)
+ }
+ clean::BareFunction(_)
+ | clean::Generic(_)
+ | clean::ImplTrait(_)
+ | clean::Tuple(_)
+ | clean::Slice(_)
+ | clean::Array(_, _)
+ | clean::QPath { .. }
+ | clean::Infer => None,
+ }
+}
+
+/// The point of this function is to replace bounds with types.
+///
+/// i.e. `[T, U]` when you have the following bounds: `T: Display, U: Option<T>` will return
+/// `[Display, Option]`. If a type parameter has no trait bound, it is discarded.
+///
+/// Important note: It goes through generics recursively. So if you have
+/// `T: Option<Result<(), ()>>`, it'll go into `Option` and then into `Result`.
+#[instrument(level = "trace", skip(tcx, res, cache))]
+fn add_generics_and_bounds_as_types<'tcx, 'a>(
+ self_: Option<&'a Type>,
+ generics: &Generics,
+ arg: &'a Type,
+ tcx: TyCtxt<'tcx>,
+ recurse: usize,
+ res: &mut Vec<RenderType>,
+ cache: &Cache,
+) {
+ fn insert_ty(res: &mut Vec<RenderType>, ty: Type, mut generics: Vec<RenderType>) {
+ // generics and impl trait are both identified by their generics,
+ // rather than a type name itself
+ let anonymous = ty.is_full_generic() || ty.is_impl_trait();
+ let generics_empty = generics.is_empty();
+
+ if anonymous {
+ if generics_empty {
+ // This is a type parameter with no trait bounds (for example: `T` in
+ // `fn f<T>(p: T)`, so not useful for the rustdoc search because we would end up
+ // with an empty type with an empty name. Let's just discard it.
+ return;
+ } else if generics.len() == 1 {
+ // In this case, no need to go through an intermediate state if the type parameter
+ // contains only one trait bound.
+ //
+ // For example:
+ //
+ // `fn foo<T: Display>(r: Option<T>) {}`
+ //
+ // In this case, it would contain:
+ //
+ // ```
+ // [{
+ // name: "option",
+ // generics: [{
+ // name: "",
+ // generics: [
+ // name: "Display",
+ // generics: []
+ // }]
+ // }]
+ // }]
+ // ```
+ //
+ // After removing the intermediate (unnecessary) type parameter, it'll become:
+ //
+ // ```
+ // [{
+ // name: "option",
+ // generics: [{
+ // name: "Display",
+ // generics: []
+ // }]
+ // }]
+ // ```
+ //
+ // To be noted that it can work if there is ONLY ONE trait bound, otherwise we still
+ // need to keep it as is!
+ res.push(generics.pop().unwrap());
+ return;
+ }
+ }
+ let index_ty = get_index_type(&ty, generics);
+ if index_ty.id.is_none() && generics_empty {
+ return;
+ }
+ res.push(index_ty);
+ }
+
+ if recurse >= 10 {
+ // FIXME: remove this whole recurse thing when the recursion bug is fixed
+ // See #59502 for the original issue.
+ return;
+ }
+
+ // First, check if it's "Self".
+ let arg = if let Some(self_) = self_ {
+ match &*arg {
+ Type::BorrowedRef { type_, .. } if type_.is_self_type() => self_,
+ type_ if type_.is_self_type() => self_,
+ arg => arg,
+ }
+ } else {
+ arg
+ };
+
+ // If this argument is a type parameter and not a trait bound or a type, we need to look
+ // for its bounds.
+ if let Type::Generic(arg_s) = *arg {
+ // First we check if the bounds are in a `where` predicate...
+ if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g {
+ WherePredicate::BoundPredicate { ty, .. } => ty.def_id(cache) == arg.def_id(cache),
+ _ => false,
+ }) {
+ let mut ty_generics = Vec::new();
+ let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
+ for bound in bounds.iter() {
+ if let GenericBound::TraitBound(poly_trait, _) = bound {
+ for param_def in poly_trait.generic_params.iter() {
+ match &param_def.kind {
+ clean::GenericParamDefKind::Type { default: Some(ty), .. } => {
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ )
+ }
+ _ => {}
+ }
+ }
+ }
+ }
+ insert_ty(res, arg.clone(), ty_generics);
+ }
+ // Otherwise we check if the trait bounds are "inlined" like `T: Option<u32>`...
+ if let Some(bound) = generics.params.iter().find(|g| g.is_type() && g.name == arg_s) {
+ let mut ty_generics = Vec::new();
+ for bound in bound.get_bounds().unwrap_or(&[]) {
+ if let Some(path) = bound.get_trait_path() {
+ let ty = Type::Path { path };
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
+ }
+ }
+ insert_ty(res, arg.clone(), ty_generics);
+ }
+ } else if let Type::ImplTrait(ref bounds) = *arg {
+ let mut ty_generics = Vec::new();
+ for bound in bounds {
+ if let Some(path) = bound.get_trait_path() {
+ let ty = Type::Path { path };
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ &ty,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
+ }
+ }
+ insert_ty(res, arg.clone(), ty_generics);
+ } else {
+ // This is not a type parameter. So for example if we have `T, U: Option<T>`, and we're
+ // looking at `Option`, we enter this "else" condition, otherwise if it's `T`, we don't.
+ //
+ // So in here, we can add it directly and look for its own type parameters (so for `Option`,
+ // we will look for them but not for `T`).
+ let mut ty_generics = Vec::new();
+ if let Some(arg_generics) = arg.generics() {
+ for gen in arg_generics.iter() {
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ gen,
+ tcx,
+ recurse + 1,
+ &mut ty_generics,
+ cache,
+ );
+ }
+ }
+ insert_ty(res, arg.clone(), ty_generics);
+ }
+}
+
+/// Return the full list of types when bounds have been resolved.
+///
+/// i.e. `fn foo<A: Display, B: Option<A>>(x: u32, y: B)` will return
+/// `[u32, Display, Option]`.
+fn get_fn_inputs_and_outputs<'tcx>(
+ func: &Function,
+ tcx: TyCtxt<'tcx>,
+ impl_generics: Option<&(clean::Type, clean::Generics)>,
+ cache: &Cache,
+) -> (Vec<RenderType>, Vec<RenderType>) {
+ let decl = &func.decl;
+
+ let combined_generics;
+ let (self_, generics) = if let Some(&(ref impl_self, ref impl_generics)) = impl_generics {
+ match (impl_generics.is_empty(), func.generics.is_empty()) {
+ (true, _) => (Some(impl_self), &func.generics),
+ (_, true) => (Some(impl_self), impl_generics),
+ (false, false) => {
+ let mut params = func.generics.params.clone();
+ params.extend(impl_generics.params.clone());
+ let mut where_predicates = func.generics.where_predicates.clone();
+ where_predicates.extend(impl_generics.where_predicates.clone());
+ combined_generics = clean::Generics { params, where_predicates };
+ (Some(impl_self), &combined_generics)
+ }
+ }
+ } else {
+ (None, &func.generics)
+ };
+
+ let mut all_types = Vec::new();
+ for arg in decl.inputs.values.iter() {
+ let mut args = Vec::new();
+ add_generics_and_bounds_as_types(self_, generics, &arg.type_, tcx, 0, &mut args, cache);
+ if !args.is_empty() {
+ all_types.extend(args);
+ } else {
+ all_types.push(get_index_type(&arg.type_, vec![]));
+ }
+ }
+
+ let mut ret_types = Vec::new();
+ match decl.output {
+ FnRetTy::Return(ref return_type) => {
+ add_generics_and_bounds_as_types(
+ self_,
+ generics,
+ return_type,
+ tcx,
+ 0,
+ &mut ret_types,
+ cache,
+ );
+ if ret_types.is_empty() {
+ ret_types.push(get_index_type(return_type, vec![]));
+ }
+ }
+ _ => {}
+ };
+ (all_types, ret_types)
+}
diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs
new file mode 100644
index 000000000..34d590fb2
--- /dev/null
+++ b/src/librustdoc/html/render/span_map.rs
@@ -0,0 +1,203 @@
+use crate::clean::{self, PrimitiveType};
+use crate::html::sources;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{ExprKind, HirId, Mod, Node};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::hygiene::MacroKind;
+use rustc_span::{BytePos, ExpnKind, Span};
+
+use std::path::{Path, PathBuf};
+
+/// This enum allows us to store two different kinds of information:
+///
+/// In case the `span` definition comes from the same crate, we can simply get the `span` and use
+/// it as is.
+///
+/// Otherwise, we store the definition `DefId` and will generate a link to the documentation page
+/// instead of the source code directly.
+#[derive(Debug)]
+pub(crate) enum LinkFromSrc {
+ Local(clean::Span),
+ External(DefId),
+ Primitive(PrimitiveType),
+}
+
+/// This function will do at most two things:
+///
+/// 1. Generate a `span` correspondance map which links an item `span` to its definition `span`.
+/// 2. Collect the source code files.
+///
+/// It returns the `krate`, the source code files and the `span` correspondance map.
+///
+/// Note about the `span` correspondance map: the keys are actually `(lo, hi)` of `span`s. We don't
+/// need the `span` context later on, only their position, so instead of keep a whole `Span`, we
+/// only keep the `lo` and `hi`.
+pub(crate) fn collect_spans_and_sources(
+ tcx: TyCtxt<'_>,
+ krate: &clean::Crate,
+ src_root: &Path,
+ include_sources: bool,
+ generate_link_to_definition: bool,
+) -> (FxHashMap<PathBuf, String>, FxHashMap<Span, LinkFromSrc>) {
+ let mut visitor = SpanMapVisitor { tcx, matches: FxHashMap::default() };
+
+ if include_sources {
+ if generate_link_to_definition {
+ tcx.hir().walk_toplevel_module(&mut visitor);
+ }
+ let sources = sources::collect_local_sources(tcx, src_root, krate);
+ (sources, visitor.matches)
+ } else {
+ (Default::default(), Default::default())
+ }
+}
+
+struct SpanMapVisitor<'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+ pub(crate) matches: FxHashMap<Span, LinkFromSrc>,
+}
+
+impl<'tcx> SpanMapVisitor<'tcx> {
+ /// This function is where we handle `hir::Path` elements and add them into the "span map".
+ fn handle_path(&mut self, path: &rustc_hir::Path<'_>) {
+ let info = match path.res {
+ // FIXME: For now, we handle `DefKind` if it's not a `DefKind::TyParam`.
+ // Would be nice to support them too alongside the other `DefKind`
+ // (such as primitive types!).
+ Res::Def(kind, def_id) if kind != DefKind::TyParam => Some(def_id),
+ Res::Local(_) => None,
+ Res::PrimTy(p) => {
+ // FIXME: Doesn't handle "path-like" primitives like arrays or tuples.
+ self.matches.insert(path.span, LinkFromSrc::Primitive(PrimitiveType::from(p)));
+ return;
+ }
+ Res::Err => return,
+ _ => return,
+ };
+ if let Some(span) = self.tcx.hir().res_span(path.res) {
+ self.matches.insert(path.span, LinkFromSrc::Local(clean::Span::new(span)));
+ } else if let Some(def_id) = info {
+ self.matches.insert(path.span, LinkFromSrc::External(def_id));
+ }
+ }
+
+ /// Adds the macro call into the span map. Returns `true` if the `span` was inside a macro
+ /// expansion, whether or not it was added to the span map.
+ ///
+ /// The idea for the macro support is to check if the current `Span` comes from expansion. If
+ /// so, we loop until we find the macro definition by using `outer_expn_data` in a loop.
+ /// Finally, we get the information about the macro itself (`span` if "local", `DefId`
+ /// otherwise) and store it inside the span map.
+ fn handle_macro(&mut self, span: Span) -> bool {
+ if !span.from_expansion() {
+ return false;
+ }
+ // So if the `span` comes from a macro expansion, we need to get the original
+ // macro's `DefId`.
+ let mut data = span.ctxt().outer_expn_data();
+ let mut call_site = data.call_site;
+ // Macros can expand to code containing macros, which will in turn be expanded, etc.
+ // So the idea here is to "go up" until we're back to code that was generated from
+ // macro expansion so that we can get the `DefId` of the original macro that was at the
+ // origin of this expansion.
+ while call_site.from_expansion() {
+ data = call_site.ctxt().outer_expn_data();
+ call_site = data.call_site;
+ }
+
+ let macro_name = match data.kind {
+ ExpnKind::Macro(MacroKind::Bang, macro_name) => macro_name,
+ // Even though we don't handle this kind of macro, this `data` still comes from
+ // expansion so we return `true` so we don't go any deeper in this code.
+ _ => return true,
+ };
+ let link_from_src = match data.macro_def_id {
+ Some(macro_def_id) if macro_def_id.is_local() => {
+ LinkFromSrc::Local(clean::Span::new(data.def_site))
+ }
+ Some(macro_def_id) => LinkFromSrc::External(macro_def_id),
+ None => return true,
+ };
+ let new_span = data.call_site;
+ let macro_name = macro_name.as_str();
+ // The "call_site" includes the whole macro with its "arguments". We only want
+ // the macro name.
+ let new_span = new_span.with_hi(new_span.lo() + BytePos(macro_name.len() as u32));
+ self.matches.insert(new_span, link_from_src);
+ true
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for SpanMapVisitor<'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.tcx.hir()
+ }
+
+ fn visit_path(&mut self, path: &'tcx rustc_hir::Path<'tcx>, _id: HirId) {
+ if self.handle_macro(path.span) {
+ return;
+ }
+ self.handle_path(path);
+ intravisit::walk_path(self, path);
+ }
+
+ fn visit_mod(&mut self, m: &'tcx Mod<'tcx>, span: Span, id: HirId) {
+ // To make the difference between "mod foo {}" and "mod foo;". In case we "import" another
+ // file, we want to link to it. Otherwise no need to create a link.
+ if !span.overlaps(m.spans.inner_span) {
+ // Now that we confirmed it's a file import, we want to get the span for the module
+ // name only and not all the "mod foo;".
+ if let Some(Node::Item(item)) = self.tcx.hir().find(id) {
+ self.matches.insert(
+ item.ident.span,
+ LinkFromSrc::Local(clean::Span::new(m.spans.inner_span)),
+ );
+ }
+ }
+ intravisit::walk_mod(self, m, id);
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx rustc_hir::Expr<'tcx>) {
+ if let ExprKind::MethodCall(segment, ..) = expr.kind {
+ if let Some(hir_id) = segment.hir_id {
+ let hir = self.tcx.hir();
+ let body_id = hir.enclosing_body_owner(hir_id);
+ // FIXME: this is showing error messages for parts of the code that are not
+ // compiled (because of cfg)!
+ //
+ // See discussion in https://github.com/rust-lang/rust/issues/69426#issuecomment-1019412352
+ let typeck_results = self.tcx.typeck_body(
+ hir.maybe_body_owned_by(body_id).expect("a body which isn't a body"),
+ );
+ if let Some(def_id) = typeck_results.type_dependent_def_id(expr.hir_id) {
+ self.matches.insert(
+ segment.ident.span,
+ match hir.span_if_local(def_id) {
+ Some(span) => LinkFromSrc::Local(clean::Span::new(span)),
+ None => LinkFromSrc::External(def_id),
+ },
+ );
+ }
+ }
+ } else if self.handle_macro(expr.span) {
+ // We don't want to go deeper into the macro.
+ return;
+ }
+ intravisit::walk_expr(self, expr);
+ }
+
+ fn visit_use(&mut self, path: &'tcx rustc_hir::Path<'tcx>, id: HirId) {
+ if self.handle_macro(path.span) {
+ return;
+ }
+ self.handle_path(path);
+ intravisit::walk_use(self, path, id);
+ }
+}
diff --git a/src/librustdoc/html/render/tests.rs b/src/librustdoc/html/render/tests.rs
new file mode 100644
index 000000000..3175fbe56
--- /dev/null
+++ b/src/librustdoc/html/render/tests.rs
@@ -0,0 +1,54 @@
+use std::cmp::Ordering;
+
+use super::print_item::compare_names;
+use super::{AllTypes, Buffer};
+
+#[test]
+fn test_compare_names() {
+ for &(a, b) in &[
+ ("hello", "world"),
+ ("", "world"),
+ ("123", "hello"),
+ ("123", ""),
+ ("123test", "123"),
+ ("hello", ""),
+ ("hello", "hello"),
+ ("hello123", "hello123"),
+ ("hello123", "hello12"),
+ ("hello12", "hello123"),
+ ("hello01abc", "hello01xyz"),
+ ("hello0abc", "hello0"),
+ ("hello0", "hello0abc"),
+ ("01", "1"),
+ ] {
+ assert_eq!(compare_names(a, b), a.cmp(b), "{:?} - {:?}", a, b);
+ }
+ assert_eq!(compare_names("u8", "u16"), Ordering::Less);
+ assert_eq!(compare_names("u32", "u16"), Ordering::Greater);
+ assert_eq!(compare_names("u8_to_f64", "u16_to_f64"), Ordering::Less);
+ assert_eq!(compare_names("u32_to_f64", "u16_to_f64"), Ordering::Greater);
+ assert_eq!(compare_names("u16_to_f64", "u16_to_f64"), Ordering::Equal);
+ assert_eq!(compare_names("u16_to_f32", "u16_to_f64"), Ordering::Less);
+}
+
+#[test]
+fn test_name_sorting() {
+ let names = [
+ "Apple", "Banana", "Fruit", "Fruit0", "Fruit00", "Fruit01", "Fruit1", "Fruit02", "Fruit2",
+ "Fruit20", "Fruit30x", "Fruit100", "Pear",
+ ];
+ let mut sorted = names.to_owned();
+ sorted.sort_by(|&l, r| compare_names(l, r));
+ assert_eq!(names, sorted);
+}
+
+#[test]
+fn test_all_types_prints_header_once() {
+ // Regression test for #82477
+ let all_types = AllTypes::new();
+
+ let mut buffer = Buffer::new();
+ all_types.print(&mut buffer);
+
+ assert_eq!(1, buffer.into_inner().matches("List of all items").count());
+}
diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs
new file mode 100644
index 000000000..6fb41ff32
--- /dev/null
+++ b/src/librustdoc/html/render/write_shared.rs
@@ -0,0 +1,600 @@
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::{self, BufReader};
+use std::path::{Component, Path, PathBuf};
+use std::rc::Rc;
+use std::sync::LazyLock as Lazy;
+
+use itertools::Itertools;
+use rustc_data_structures::flock;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use serde::Serialize;
+
+use super::{collect_paths_for_type, ensure_trailing_slash, Context, BASIC_KEYWORDS};
+use crate::clean::Crate;
+use crate::config::{EmitType, RenderOptions};
+use crate::docfs::PathError;
+use crate::error::Error;
+use crate::html::{layout, static_files};
+use crate::{try_err, try_none};
+
+static FILES_UNVERSIONED: Lazy<FxHashMap<&str, &[u8]>> = Lazy::new(|| {
+ map! {
+ "FiraSans-Regular.woff2" => static_files::fira_sans::REGULAR,
+ "FiraSans-Medium.woff2" => static_files::fira_sans::MEDIUM,
+ "FiraSans-LICENSE.txt" => static_files::fira_sans::LICENSE,
+ "SourceSerif4-Regular.ttf.woff2" => static_files::source_serif_4::REGULAR,
+ "SourceSerif4-Bold.ttf.woff2" => static_files::source_serif_4::BOLD,
+ "SourceSerif4-It.ttf.woff2" => static_files::source_serif_4::ITALIC,
+ "SourceSerif4-LICENSE.md" => static_files::source_serif_4::LICENSE,
+ "SourceCodePro-Regular.ttf.woff2" => static_files::source_code_pro::REGULAR,
+ "SourceCodePro-Semibold.ttf.woff2" => static_files::source_code_pro::SEMIBOLD,
+ "SourceCodePro-It.ttf.woff2" => static_files::source_code_pro::ITALIC,
+ "SourceCodePro-LICENSE.txt" => static_files::source_code_pro::LICENSE,
+ "NanumBarunGothic.ttf.woff2" => static_files::nanum_barun_gothic::REGULAR,
+ "NanumBarunGothic-LICENSE.txt" => static_files::nanum_barun_gothic::LICENSE,
+ "LICENSE-MIT.txt" => static_files::LICENSE_MIT,
+ "LICENSE-APACHE.txt" => static_files::LICENSE_APACHE,
+ "COPYRIGHT.txt" => static_files::COPYRIGHT,
+ }
+});
+
+enum SharedResource<'a> {
+ /// This file will never change, no matter what toolchain is used to build it.
+ ///
+ /// It does not have a resource suffix.
+ Unversioned { name: &'static str },
+ /// This file may change depending on the toolchain.
+ ///
+ /// It has a resource suffix.
+ ToolchainSpecific { basename: &'static str },
+ /// This file may change for any crate within a build, or based on the CLI arguments.
+ ///
+ /// This differs from normal invocation-specific files because it has a resource suffix.
+ InvocationSpecific { basename: &'a str },
+}
+
+impl SharedResource<'_> {
+ fn extension(&self) -> Option<&OsStr> {
+ use SharedResource::*;
+ match self {
+ Unversioned { name }
+ | ToolchainSpecific { basename: name }
+ | InvocationSpecific { basename: name } => Path::new(name).extension(),
+ }
+ }
+
+ fn path(&self, cx: &Context<'_>) -> PathBuf {
+ match self {
+ SharedResource::Unversioned { name } => cx.dst.join(name),
+ SharedResource::ToolchainSpecific { basename } => cx.suffix_path(basename),
+ SharedResource::InvocationSpecific { basename } => cx.suffix_path(basename),
+ }
+ }
+
+ fn should_emit(&self, emit: &[EmitType]) -> bool {
+ if emit.is_empty() {
+ return true;
+ }
+ let kind = match self {
+ SharedResource::Unversioned { .. } => EmitType::Unversioned,
+ SharedResource::ToolchainSpecific { .. } => EmitType::Toolchain,
+ SharedResource::InvocationSpecific { .. } => EmitType::InvocationSpecific,
+ };
+ emit.contains(&kind)
+ }
+}
+
+impl Context<'_> {
+ fn suffix_path(&self, filename: &str) -> PathBuf {
+ // We use splitn vs Path::extension here because we might get a filename
+ // like `style.min.css` and we want to process that into
+ // `style-suffix.min.css`. Path::extension would just return `css`
+ // which would result in `style.min-suffix.css` which isn't what we
+ // want.
+ let (base, ext) = filename.split_once('.').unwrap();
+ let filename = format!("{}{}.{}", base, self.shared.resource_suffix, ext);
+ self.dst.join(&filename)
+ }
+
+ fn write_shared(
+ &self,
+ resource: SharedResource<'_>,
+ contents: impl 'static + Send + AsRef<[u8]>,
+ emit: &[EmitType],
+ ) -> Result<(), Error> {
+ if resource.should_emit(emit) {
+ self.shared.fs.write(resource.path(self), contents)
+ } else {
+ Ok(())
+ }
+ }
+
+ fn write_minify(
+ &self,
+ resource: SharedResource<'_>,
+ contents: impl 'static + Send + AsRef<str> + AsRef<[u8]>,
+ minify: bool,
+ emit: &[EmitType],
+ ) -> Result<(), Error> {
+ if minify {
+ let contents = contents.as_ref();
+ let contents = if resource.extension() == Some(OsStr::new("css")) {
+ minifier::css::minify(contents)
+ .map_err(|e| {
+ Error::new(format!("failed to minify CSS file: {}", e), resource.path(self))
+ })?
+ .to_string()
+ } else {
+ minifier::js::minify(contents).to_string()
+ };
+ self.write_shared(resource, contents, emit)
+ } else {
+ self.write_shared(resource, contents, emit)
+ }
+ }
+}
+
+pub(super) fn write_shared(
+ cx: &mut Context<'_>,
+ krate: &Crate,
+ search_index: String,
+ options: &RenderOptions,
+) -> Result<(), Error> {
+ // Write out the shared files. Note that these are shared among all rustdoc
+ // docs placed in the output directory, so this needs to be a synchronized
+ // operation with respect to all other rustdocs running around.
+ let lock_file = cx.dst.join(".lock");
+ let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file);
+
+ // Minified resources are usually toolchain resources. If they're not, they should use `cx.write_minify` directly.
+ fn write_minify(
+ basename: &'static str,
+ contents: impl 'static + Send + AsRef<str> + AsRef<[u8]>,
+ cx: &Context<'_>,
+ options: &RenderOptions,
+ ) -> Result<(), Error> {
+ cx.write_minify(
+ SharedResource::ToolchainSpecific { basename },
+ contents,
+ options.enable_minification,
+ &options.emit,
+ )
+ }
+
+ // Toolchain resources should never be dynamic.
+ let write_toolchain = |p: &'static _, c: &'static _| {
+ cx.write_shared(SharedResource::ToolchainSpecific { basename: p }, c, &options.emit)
+ };
+
+ // Crate resources should always be dynamic.
+ let write_crate = |p: &_, make_content: &dyn Fn() -> Result<Vec<u8>, Error>| {
+ let content = make_content()?;
+ cx.write_shared(SharedResource::InvocationSpecific { basename: p }, content, &options.emit)
+ };
+
+ // Given "foo.svg", return e.g. "url(\"foo1.58.0.svg\")"
+ fn ver_url(cx: &Context<'_>, basename: &'static str) -> String {
+ format!(
+ "url(\"{}\")",
+ SharedResource::ToolchainSpecific { basename }
+ .path(cx)
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ )
+ }
+
+ // We use the AUTOREPLACE mechanism to inject into our static JS and CSS certain
+ // values that are only known at doc build time. Since this mechanism is somewhat
+ // surprising when reading the code, please limit it to rustdoc.css.
+ write_minify(
+ "rustdoc.css",
+ static_files::RUSTDOC_CSS
+ .replace(
+ "/* AUTOREPLACE: */url(\"toggle-minus.svg\")",
+ &ver_url(cx, "toggle-minus.svg"),
+ )
+ .replace("/* AUTOREPLACE: */url(\"toggle-plus.svg\")", &ver_url(cx, "toggle-plus.svg"))
+ .replace("/* AUTOREPLACE: */url(\"down-arrow.svg\")", &ver_url(cx, "down-arrow.svg")),
+ cx,
+ options,
+ )?;
+
+ // Add all the static files. These may already exist, but we just
+ // overwrite them anyway to make sure that they're fresh and up-to-date.
+ write_minify("settings.css", static_files::SETTINGS_CSS, cx, options)?;
+ write_minify("noscript.css", static_files::NOSCRIPT_CSS, cx, options)?;
+
+ // To avoid "light.css" to be overwritten, we'll first run over the received themes and only
+ // then we'll run over the "official" styles.
+ let mut themes: FxHashSet<String> = FxHashSet::default();
+
+ for entry in &cx.shared.style_files {
+ let theme = entry.basename()?;
+ let extension =
+ try_none!(try_none!(entry.path.extension(), &entry.path).to_str(), &entry.path);
+
+ // Handle the official themes
+ match theme.as_str() {
+ "light" => write_minify("light.css", static_files::themes::LIGHT, cx, options)?,
+ "dark" => write_minify("dark.css", static_files::themes::DARK, cx, options)?,
+ "ayu" => write_minify("ayu.css", static_files::themes::AYU, cx, options)?,
+ _ => {
+ // Handle added third-party themes
+ let filename = format!("{}.{}", theme, extension);
+ write_crate(&filename, &|| Ok(try_err!(fs::read(&entry.path), &entry.path)))?;
+ }
+ };
+
+ themes.insert(theme.to_owned());
+ }
+
+ if (*cx.shared).layout.logo.is_empty() {
+ write_toolchain("rust-logo.svg", static_files::RUST_LOGO_SVG)?;
+ }
+ if (*cx.shared).layout.favicon.is_empty() {
+ write_toolchain("favicon.svg", static_files::RUST_FAVICON_SVG)?;
+ write_toolchain("favicon-16x16.png", static_files::RUST_FAVICON_PNG_16)?;
+ write_toolchain("favicon-32x32.png", static_files::RUST_FAVICON_PNG_32)?;
+ }
+ write_toolchain("wheel.svg", static_files::WHEEL_SVG)?;
+ write_toolchain("clipboard.svg", static_files::CLIPBOARD_SVG)?;
+ write_toolchain("down-arrow.svg", static_files::DOWN_ARROW_SVG)?;
+ write_toolchain("toggle-minus.svg", static_files::TOGGLE_MINUS_PNG)?;
+ write_toolchain("toggle-plus.svg", static_files::TOGGLE_PLUS_PNG)?;
+
+ let mut themes: Vec<&String> = themes.iter().collect();
+ themes.sort();
+
+ write_minify("main.js", static_files::MAIN_JS, cx, options)?;
+ write_minify("search.js", static_files::SEARCH_JS, cx, options)?;
+ write_minify("settings.js", static_files::SETTINGS_JS, cx, options)?;
+
+ if cx.include_sources {
+ write_minify("source-script.js", static_files::sidebar::SOURCE_SCRIPT, cx, options)?;
+ }
+
+ write_minify("storage.js", static_files::STORAGE_JS, cx, options)?;
+
+ if cx.shared.layout.scrape_examples_extension {
+ cx.write_minify(
+ SharedResource::InvocationSpecific { basename: "scrape-examples.js" },
+ static_files::SCRAPE_EXAMPLES_JS,
+ options.enable_minification,
+ &options.emit,
+ )?;
+ }
+
+ if let Some(ref css) = cx.shared.layout.css_file_extension {
+ let buffer = try_err!(fs::read_to_string(css), css);
+ // This varies based on the invocation, so it can't go through the write_minify wrapper.
+ cx.write_minify(
+ SharedResource::InvocationSpecific { basename: "theme.css" },
+ buffer,
+ options.enable_minification,
+ &options.emit,
+ )?;
+ }
+ write_minify("normalize.css", static_files::NORMALIZE_CSS, cx, options)?;
+ for (name, contents) in &*FILES_UNVERSIONED {
+ cx.write_shared(SharedResource::Unversioned { name }, contents, &options.emit)?;
+ }
+
+ fn collect(path: &Path, krate: &str, key: &str) -> io::Result<(Vec<String>, Vec<String>)> {
+ let mut ret = Vec::new();
+ let mut krates = Vec::new();
+
+ if path.exists() {
+ let prefix = format!(r#"{}["{}"]"#, key, krate);
+ for line in BufReader::new(File::open(path)?).lines() {
+ let line = line?;
+ if !line.starts_with(key) {
+ continue;
+ }
+ if line.starts_with(&prefix) {
+ continue;
+ }
+ ret.push(line.to_string());
+ krates.push(
+ line[key.len() + 2..]
+ .split('"')
+ .next()
+ .map(|s| s.to_owned())
+ .unwrap_or_else(String::new),
+ );
+ }
+ }
+ Ok((ret, krates))
+ }
+
+ fn collect_json(path: &Path, krate: &str) -> io::Result<(Vec<String>, Vec<String>)> {
+ let mut ret = Vec::new();
+ let mut krates = Vec::new();
+
+ if path.exists() {
+ let prefix = format!("\"{}\"", krate);
+ for line in BufReader::new(File::open(path)?).lines() {
+ let line = line?;
+ if !line.starts_with('"') {
+ continue;
+ }
+ if line.starts_with(&prefix) {
+ continue;
+ }
+ if line.ends_with(",\\") {
+ ret.push(line[..line.len() - 2].to_string());
+ } else {
+ // Ends with "\\" (it's the case for the last added crate line)
+ ret.push(line[..line.len() - 1].to_string());
+ }
+ krates.push(
+ line.split('"')
+ .find(|s| !s.is_empty())
+ .map(|s| s.to_owned())
+ .unwrap_or_else(String::new),
+ );
+ }
+ }
+ Ok((ret, krates))
+ }
+
+ use std::ffi::OsString;
+
+ #[derive(Debug)]
+ struct Hierarchy {
+ elem: OsString,
+ children: FxHashMap<OsString, Hierarchy>,
+ elems: FxHashSet<OsString>,
+ }
+
+ impl Hierarchy {
+ fn new(elem: OsString) -> Hierarchy {
+ Hierarchy { elem, children: FxHashMap::default(), elems: FxHashSet::default() }
+ }
+
+ fn to_json_string(&self) -> String {
+ let mut subs: Vec<&Hierarchy> = self.children.values().collect();
+ subs.sort_unstable_by(|a, b| a.elem.cmp(&b.elem));
+ let mut files = self
+ .elems
+ .iter()
+ .map(|s| format!("\"{}\"", s.to_str().expect("invalid osstring conversion")))
+ .collect::<Vec<_>>();
+ files.sort_unstable();
+ let subs = subs.iter().map(|s| s.to_json_string()).collect::<Vec<_>>().join(",");
+ let dirs = if subs.is_empty() && files.is_empty() {
+ String::new()
+ } else {
+ format!(",[{}]", subs)
+ };
+ let files = files.join(",");
+ let files = if files.is_empty() { String::new() } else { format!(",[{}]", files) };
+ format!(
+ "[\"{name}\"{dirs}{files}]",
+ name = self.elem.to_str().expect("invalid osstring conversion"),
+ dirs = dirs,
+ files = files
+ )
+ }
+ }
+
+ if cx.include_sources {
+ let mut hierarchy = Hierarchy::new(OsString::new());
+ for source in cx
+ .shared
+ .local_sources
+ .iter()
+ .filter_map(|p| p.0.strip_prefix(&cx.shared.src_root).ok())
+ {
+ let mut h = &mut hierarchy;
+ let mut elems = source
+ .components()
+ .filter_map(|s| match s {
+ Component::Normal(s) => Some(s.to_owned()),
+ _ => None,
+ })
+ .peekable();
+ loop {
+ let cur_elem = elems.next().expect("empty file path");
+ if elems.peek().is_none() {
+ h.elems.insert(cur_elem);
+ break;
+ } else {
+ let e = cur_elem.clone();
+ h = h.children.entry(cur_elem.clone()).or_insert_with(|| Hierarchy::new(e));
+ }
+ }
+ }
+
+ let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix));
+ let make_sources = || {
+ let (mut all_sources, _krates) =
+ try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
+ all_sources.push(format!(
+ r#""{}":{}"#,
+ &krate.name(cx.tcx()),
+ hierarchy
+ .to_json_string()
+ // All these `replace` calls are because we have to go through JS string for JSON content.
+ .replace('\\', r"\\")
+ .replace('\'', r"\'")
+ // We need to escape double quotes for the JSON.
+ .replace("\\\"", "\\\\\"")
+ ));
+ all_sources.sort();
+ let mut v = String::from("var sourcesIndex = JSON.parse('{\\\n");
+ v.push_str(&all_sources.join(",\\\n"));
+ v.push_str("\\\n}');\ncreateSourceSidebar();\n");
+ Ok(v.into_bytes())
+ };
+ write_crate("source-files.js", &make_sources)?;
+ }
+
+ // Update the search index and crate list.
+ let dst = cx.dst.join(&format!("search-index{}.js", cx.shared.resource_suffix));
+ let (mut all_indexes, mut krates) =
+ try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
+ all_indexes.push(search_index);
+ krates.push(krate.name(cx.tcx()).to_string());
+ krates.sort();
+
+ // Sort the indexes by crate so the file will be generated identically even
+ // with rustdoc running in parallel.
+ all_indexes.sort();
+ write_crate("search-index.js", &|| {
+ let mut v = String::from("var searchIndex = JSON.parse('{\\\n");
+ v.push_str(&all_indexes.join(",\\\n"));
+ v.push_str(
+ r#"\
+}');
+if (typeof window !== 'undefined' && window.initSearch) {window.initSearch(searchIndex)};
+if (typeof exports !== 'undefined') {exports.searchIndex = searchIndex};
+"#,
+ );
+ Ok(v.into_bytes())
+ })?;
+
+ write_crate("crates.js", &|| {
+ let krates = krates.iter().map(|k| format!("\"{}\"", k)).join(",");
+ Ok(format!("window.ALL_CRATES = [{}];", krates).into_bytes())
+ })?;
+
+ if options.enable_index_page {
+ if let Some(index_page) = options.index_page.clone() {
+ let mut md_opts = options.clone();
+ md_opts.output = cx.dst.clone();
+ md_opts.external_html = (*cx.shared).layout.external_html.clone();
+
+ crate::markdown::render(&index_page, md_opts, cx.shared.edition())
+ .map_err(|e| Error::new(e, &index_page))?;
+ } else {
+ let shared = Rc::clone(&cx.shared);
+ let dst = cx.dst.join("index.html");
+ let page = layout::Page {
+ title: "Index of crates",
+ css_class: "mod",
+ root_path: "./",
+ static_root_path: shared.static_root_path.as_deref(),
+ description: "List of crates",
+ keywords: BASIC_KEYWORDS,
+ resource_suffix: &shared.resource_suffix,
+ };
+
+ let content = format!(
+ "<h1 class=\"fqn\">\
+ <span class=\"in-band\">List of all crates</span>\
+ </h1><ul class=\"crate mod\">{}</ul>",
+ krates
+ .iter()
+ .map(|s| {
+ format!(
+ "<li><a class=\"crate mod\" href=\"{}index.html\">{}</a></li>",
+ ensure_trailing_slash(s),
+ s
+ )
+ })
+ .collect::<String>()
+ );
+ let v = layout::render(&shared.layout, &page, "", content, &shared.style_files);
+ shared.fs.write(dst, v)?;
+ }
+ }
+
+ // Update the list of all implementors for traits
+ let dst = cx.dst.join("implementors");
+ let cache = cx.cache();
+ for (&did, imps) in &cache.implementors {
+ // Private modules can leak through to this phase of rustdoc, which
+ // could contain implementations for otherwise private types. In some
+ // rare cases we could find an implementation for an item which wasn't
+ // indexed, so we just skip this step in that case.
+ //
+ // FIXME: this is a vague explanation for why this can't be a `get`, in
+ // theory it should be...
+ let (remote_path, remote_item_type) = match cache.exact_paths.get(&did) {
+ Some(p) => match cache.paths.get(&did).or_else(|| cache.external_paths.get(&did)) {
+ Some((_, t)) => (p, t),
+ None => continue,
+ },
+ None => match cache.external_paths.get(&did) {
+ Some((p, t)) => (p, t),
+ None => continue,
+ },
+ };
+
+ #[derive(Serialize)]
+ struct Implementor {
+ text: String,
+ synthetic: bool,
+ types: Vec<String>,
+ }
+
+ let implementors = imps
+ .iter()
+ .filter_map(|imp| {
+ // If the trait and implementation are in the same crate, then
+ // there's no need to emit information about it (there's inlining
+ // going on). If they're in different crates then the crate defining
+ // the trait will be interested in our implementation.
+ //
+ // If the implementation is from another crate then that crate
+ // should add it.
+ if imp.impl_item.item_id.krate() == did.krate || !imp.impl_item.item_id.is_local() {
+ None
+ } else {
+ Some(Implementor {
+ text: imp.inner_impl().print(false, cx).to_string(),
+ synthetic: imp.inner_impl().kind.is_auto(),
+ types: collect_paths_for_type(imp.inner_impl().for_.clone(), cache),
+ })
+ }
+ })
+ .collect::<Vec<_>>();
+
+ // Only create a js file if we have impls to add to it. If the trait is
+ // documented locally though we always create the file to avoid dead
+ // links.
+ if implementors.is_empty() && !cache.paths.contains_key(&did) {
+ continue;
+ }
+
+ let implementors = format!(
+ r#"implementors["{}"] = {};"#,
+ krate.name(cx.tcx()),
+ serde_json::to_string(&implementors).unwrap()
+ );
+
+ let mut mydst = dst.clone();
+ for part in &remote_path[..remote_path.len() - 1] {
+ mydst.push(part.to_string());
+ }
+ cx.shared.ensure_dir(&mydst)?;
+ mydst.push(&format!("{}.{}.js", remote_item_type, remote_path[remote_path.len() - 1]));
+
+ let (mut all_implementors, _) =
+ try_err!(collect(&mydst, krate.name(cx.tcx()).as_str(), "implementors"), &mydst);
+ all_implementors.push(implementors);
+ // Sort the implementors by crate so the file will be generated
+ // identically even with rustdoc running in parallel.
+ all_implementors.sort();
+
+ let mut v = String::from("(function() {var implementors = {};\n");
+ for implementor in &all_implementors {
+ writeln!(v, "{}", *implementor).unwrap();
+ }
+ v.push_str(
+ "if (window.register_implementors) {\
+ window.register_implementors(implementors);\
+ } else {\
+ window.pending_implementors = implementors;\
+ }",
+ );
+ v.push_str("})()");
+ cx.shared.fs.write(mydst, v)?;
+ }
+ Ok(())
+}
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
new file mode 100644
index 000000000..d0fd637ba
--- /dev/null
+++ b/src/librustdoc/html/sources.rs
@@ -0,0 +1,303 @@
+use crate::clean;
+use crate::docfs::PathError;
+use crate::error::Error;
+use crate::html::format::Buffer;
+use crate::html::highlight;
+use crate::html::layout;
+use crate::html::render::{Context, BASIC_KEYWORDS};
+use crate::visit::DocVisitor;
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::Session;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::FileName;
+
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Component, Path, PathBuf};
+use std::rc::Rc;
+
+pub(crate) fn render(cx: &mut Context<'_>, krate: &clean::Crate) -> Result<(), Error> {
+ info!("emitting source files");
+
+ let dst = cx.dst.join("src").join(krate.name(cx.tcx()).as_str());
+ cx.shared.ensure_dir(&dst)?;
+
+ let mut collector = SourceCollector { dst, cx, emitted_local_sources: FxHashSet::default() };
+ collector.visit_crate(krate);
+ Ok(())
+}
+
+pub(crate) fn collect_local_sources<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ src_root: &Path,
+ krate: &clean::Crate,
+) -> FxHashMap<PathBuf, String> {
+ let mut lsc = LocalSourcesCollector { tcx, local_sources: FxHashMap::default(), src_root };
+ lsc.visit_crate(krate);
+ lsc.local_sources
+}
+
+struct LocalSourcesCollector<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ local_sources: FxHashMap<PathBuf, String>,
+ src_root: &'a Path,
+}
+
+fn is_real_and_local(span: clean::Span, sess: &Session) -> bool {
+ span.cnum(sess) == LOCAL_CRATE && span.filename(sess).is_real()
+}
+
+impl LocalSourcesCollector<'_, '_> {
+ fn add_local_source(&mut self, item: &clean::Item) {
+ let sess = self.tcx.sess;
+ let span = item.span(self.tcx);
+ // skip all synthetic "files"
+ if !is_real_and_local(span, sess) {
+ return;
+ }
+ let filename = span.filename(sess);
+ let p = if let FileName::Real(file) = filename {
+ match file.into_local_path() {
+ Some(p) => p,
+ None => return,
+ }
+ } else {
+ return;
+ };
+ if self.local_sources.contains_key(&*p) {
+ // We've already emitted this source
+ return;
+ }
+
+ let mut href = String::new();
+ clean_path(self.src_root, &p, false, |component| {
+ href.push_str(&component.to_string_lossy());
+ href.push('/');
+ });
+
+ let mut src_fname = p.file_name().expect("source has no filename").to_os_string();
+ src_fname.push(".html");
+ href.push_str(&src_fname.to_string_lossy());
+ self.local_sources.insert(p, href);
+ }
+}
+
+impl DocVisitor for LocalSourcesCollector<'_, '_> {
+ fn visit_item(&mut self, item: &clean::Item) {
+ self.add_local_source(item);
+
+ self.visit_item_recur(item)
+ }
+}
+
+/// Helper struct to render all source code to HTML pages
+struct SourceCollector<'a, 'tcx> {
+ cx: &'a mut Context<'tcx>,
+
+ /// Root destination to place all HTML output into
+ dst: PathBuf,
+ emitted_local_sources: FxHashSet<PathBuf>,
+}
+
+impl DocVisitor for SourceCollector<'_, '_> {
+ fn visit_item(&mut self, item: &clean::Item) {
+ if !self.cx.include_sources {
+ return;
+ }
+
+ let tcx = self.cx.tcx();
+ let span = item.span(tcx);
+ let sess = tcx.sess;
+
+ // If we're not rendering sources, there's nothing to do.
+ // If we're including source files, and we haven't seen this file yet,
+ // then we need to render it out to the filesystem.
+ if is_real_and_local(span, sess) {
+ let filename = span.filename(sess);
+ let span = span.inner();
+ let pos = sess.source_map().lookup_source_file(span.lo());
+ let file_span = span.with_lo(pos.start_pos).with_hi(pos.end_pos);
+ // If it turns out that we couldn't read this file, then we probably
+ // can't read any of the files (generating html output from json or
+ // something like that), so just don't include sources for the
+ // entire crate. The other option is maintaining this mapping on a
+ // per-file basis, but that's probably not worth it...
+ self.cx.include_sources = match self.emit_source(&filename, file_span) {
+ Ok(()) => true,
+ Err(e) => {
+ self.cx.shared.tcx.sess.span_err(
+ span,
+ &format!(
+ "failed to render source code for `{}`: {}",
+ filename.prefer_local(),
+ e,
+ ),
+ );
+ false
+ }
+ };
+ }
+
+ self.visit_item_recur(item)
+ }
+}
+
+impl SourceCollector<'_, '_> {
+ /// Renders the given filename into its corresponding HTML source file.
+ fn emit_source(
+ &mut self,
+ filename: &FileName,
+ file_span: rustc_span::Span,
+ ) -> Result<(), Error> {
+ let p = match *filename {
+ FileName::Real(ref file) => {
+ if let Some(local_path) = file.local_path() {
+ local_path.to_path_buf()
+ } else {
+ unreachable!("only the current crate should have sources emitted");
+ }
+ }
+ _ => return Ok(()),
+ };
+ if self.emitted_local_sources.contains(&*p) {
+ // We've already emitted this source
+ return Ok(());
+ }
+
+ let contents = match fs::read_to_string(&p) {
+ Ok(contents) => contents,
+ Err(e) => {
+ return Err(Error::new(e, &p));
+ }
+ };
+
+ // Remove the utf-8 BOM if any
+ let contents = contents.strip_prefix('\u{feff}').unwrap_or(&contents);
+
+ let shared = Rc::clone(&self.cx.shared);
+ // Create the intermediate directories
+ let mut cur = self.dst.clone();
+ let mut root_path = String::from("../../");
+ clean_path(&shared.src_root, &p, false, |component| {
+ cur.push(component);
+ root_path.push_str("../");
+ });
+
+ shared.ensure_dir(&cur)?;
+
+ let src_fname = p.file_name().expect("source has no filename").to_os_string();
+ let mut fname = src_fname.clone();
+ fname.push(".html");
+ cur.push(&fname);
+
+ let title = format!("{} - source", src_fname.to_string_lossy());
+ let desc = format!("Source of the Rust file `{}`.", filename.prefer_remapped());
+ let page = layout::Page {
+ title: &title,
+ css_class: "source",
+ root_path: &root_path,
+ static_root_path: shared.static_root_path.as_deref(),
+ description: &desc,
+ keywords: BASIC_KEYWORDS,
+ resource_suffix: &shared.resource_suffix,
+ };
+ let v = layout::render(
+ &shared.layout,
+ &page,
+ "",
+ |buf: &mut _| {
+ let cx = &mut self.cx;
+ print_src(
+ buf,
+ contents,
+ cx.shared.edition(),
+ file_span,
+ cx,
+ &root_path,
+ None,
+ SourceContext::Standalone,
+ )
+ },
+ &shared.style_files,
+ );
+ shared.fs.write(cur, v)?;
+ self.emitted_local_sources.insert(p);
+ Ok(())
+ }
+}
+
+/// Takes a path to a source file and cleans the path to it. This canonicalizes
+/// things like ".." to components which preserve the "top down" hierarchy of a
+/// static HTML tree. Each component in the cleaned path will be passed as an
+/// argument to `f`. The very last component of the path (ie the file name) will
+/// be passed to `f` if `keep_filename` is true, and ignored otherwise.
+pub(crate) fn clean_path<F>(src_root: &Path, p: &Path, keep_filename: bool, mut f: F)
+where
+ F: FnMut(&OsStr),
+{
+ // make it relative, if possible
+ let p = p.strip_prefix(src_root).unwrap_or(p);
+
+ let mut iter = p.components().peekable();
+
+ while let Some(c) = iter.next() {
+ if !keep_filename && iter.peek().is_none() {
+ break;
+ }
+
+ match c {
+ Component::ParentDir => f("up".as_ref()),
+ Component::Normal(c) => f(c),
+ _ => continue,
+ }
+ }
+}
+
+pub(crate) enum SourceContext {
+ Standalone,
+ Embedded { offset: usize },
+}
+
+/// Wrapper struct to render the source code of a file. This will do things like
+/// adding line numbers to the left-hand side.
+pub(crate) fn print_src(
+ buf: &mut Buffer,
+ s: &str,
+ edition: Edition,
+ file_span: rustc_span::Span,
+ context: &Context<'_>,
+ root_path: &str,
+ decoration_info: Option<highlight::DecorationInfo>,
+ source_context: SourceContext,
+) {
+ let lines = s.lines().count();
+ let mut line_numbers = Buffer::empty_from(buf);
+ line_numbers.write_str("<pre class=\"line-numbers\">");
+ match source_context {
+ SourceContext::Standalone => {
+ for line in 1..=lines {
+ writeln!(line_numbers, "<span id=\"{0}\">{0}</span>", line)
+ }
+ }
+ SourceContext::Embedded { offset } => {
+ for line in 1..=lines {
+ writeln!(line_numbers, "<span>{0}</span>", line + offset)
+ }
+ }
+ }
+ line_numbers.write_str("</pre>");
+ highlight::render_with_highlighting(
+ s,
+ buf,
+ None,
+ None,
+ None,
+ edition,
+ Some(line_numbers),
+ Some(highlight::HrefContext { context, file_span, root_path }),
+ decoration_info,
+ );
+}
diff --git a/src/librustdoc/html/static/.eslintrc.js b/src/librustdoc/html/static/.eslintrc.js
new file mode 100644
index 000000000..fcd925bb3
--- /dev/null
+++ b/src/librustdoc/html/static/.eslintrc.js
@@ -0,0 +1,96 @@
+module.exports = {
+ "env": {
+ "browser": true,
+ "es6": true
+ },
+ "extends": "eslint:recommended",
+ "parserOptions": {
+ "ecmaVersion": 2015,
+ "sourceType": "module"
+ },
+ "rules": {
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "semi": [
+ "error",
+ "always"
+ ],
+ "quotes": [
+ "error",
+ "double"
+ ],
+ "linebreak-style": [
+ "error",
+ "unix"
+ ],
+ "no-trailing-spaces": "error",
+ "no-var": ["error"],
+ "prefer-const": ["error"],
+ "prefer-arrow-callback": ["error"],
+ "brace-style": [
+ "error",
+ "1tbs",
+ { "allowSingleLine": false }
+ ],
+ "keyword-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "arrow-spacing": [
+ "error",
+ { "before": true, "after": true }
+ ],
+ "key-spacing": [
+ "error",
+ { "beforeColon": false, "afterColon": true, "mode": "strict" }
+ ],
+ "func-call-spacing": ["error", "never"],
+ "space-infix-ops": "error",
+ "space-before-function-paren": ["error", "never"],
+ "space-before-blocks": "error",
+ "comma-dangle": ["error", "always-multiline"],
+ "comma-style": ["error", "last"],
+ "max-len": ["error", { "code": 100, "tabWidth": 4 }],
+ "eol-last": ["error", "always"],
+ "arrow-parens": ["error", "as-needed"],
+ "no-unused-vars": [
+ "error",
+ {
+ "argsIgnorePattern": "^_",
+ "varsIgnorePattern": "^_"
+ }
+ ],
+ "eqeqeq": "error",
+ "no-const-assign": "error",
+ "no-debugger": "error",
+ "no-dupe-args": "error",
+ "no-dupe-else-if": "error",
+ "no-dupe-keys": "error",
+ "no-duplicate-case": "error",
+ "no-ex-assign": "error",
+ "no-fallthrough": "error",
+ "no-invalid-regexp": "error",
+ "no-import-assign": "error",
+ "no-self-compare": "error",
+ "no-template-curly-in-string": "error",
+ "block-scoped-var": "error",
+ "guard-for-in": "error",
+ "no-alert": "error",
+ "no-confusing-arrow": "error",
+ "no-div-regex": "error",
+ "no-floating-decimal": "error",
+ "no-implicit-globals": "error",
+ "no-implied-eval": "error",
+ "no-label-var": "error",
+ "no-lonely-if": "error",
+ "no-mixed-operators": "error",
+ "no-multi-assign": "error",
+ "no-return-assign": "error",
+ "no-script-url": "error",
+ "no-sequences": "error",
+ "no-throw-literal": "error",
+ "no-div-regex": "error",
+ }
+};
diff --git a/src/librustdoc/html/static/COPYRIGHT.txt b/src/librustdoc/html/static/COPYRIGHT.txt
new file mode 100644
index 000000000..34e48134c
--- /dev/null
+++ b/src/librustdoc/html/static/COPYRIGHT.txt
@@ -0,0 +1,46 @@
+These documentation pages include resources by third parties. This copyright
+file applies only to those resources. The following third party resources are
+included, and carry their own copyright notices and license terms:
+
+* Fira Sans (FiraSans-Regular.woff2, FiraSans-Medium.woff2):
+
+ Copyright (c) 2014, Mozilla Foundation https://mozilla.org/
+ with Reserved Font Name Fira Sans.
+
+ Copyright (c) 2014, Telefonica S.A.
+
+ Licensed under the SIL Open Font License, Version 1.1.
+ See FiraSans-LICENSE.txt.
+
+* rustdoc.css, main.js, and playpen.js:
+
+ Copyright 2015 The Rust Developers.
+ Licensed under the Apache License, Version 2.0 (see LICENSE-APACHE.txt) or
+ the MIT license (LICENSE-MIT.txt) at your option.
+
+* normalize.css:
+
+ Copyright (c) Nicolas Gallagher and Jonathan Neal.
+ Licensed under the MIT license (see LICENSE-MIT.txt).
+
+* Source Code Pro (SourceCodePro-Regular.ttf.woff2,
+ SourceCodePro-Semibold.ttf.woff2, SourceCodePro-It.ttf.woff2):
+
+ Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/),
+ with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark
+ of Adobe Systems Incorporated in the United States and/or other countries.
+
+ Licensed under the SIL Open Font License, Version 1.1.
+ See SourceCodePro-LICENSE.txt.
+
+* Source Serif 4 (SourceSerif4-Regular.ttf.woff2, SourceSerif4-Bold.ttf.woff2,
+ SourceSerif4-It.ttf.woff2):
+
+ Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name
+ 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United
+ States and/or other countries.
+
+ Licensed under the SIL Open Font License, Version 1.1.
+ See SourceSerif4-LICENSE.md.
+
+This copyright file is intended to be distributed with rustdoc output.
diff --git a/src/librustdoc/html/static/LICENSE-APACHE.txt b/src/librustdoc/html/static/LICENSE-APACHE.txt
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/src/librustdoc/html/static/LICENSE-APACHE.txt
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/librustdoc/html/static/LICENSE-MIT.txt b/src/librustdoc/html/static/LICENSE-MIT.txt
new file mode 100644
index 000000000..31aa79387
--- /dev/null
+++ b/src/librustdoc/html/static/LICENSE-MIT.txt
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/librustdoc/html/static/css/normalize.css b/src/librustdoc/html/static/css/normalize.css
new file mode 100644
index 000000000..fdb8a8c65
--- /dev/null
+++ b/src/librustdoc/html/static/css/normalize.css
@@ -0,0 +1,2 @@
+/* ignore-tidy-linelength */
+/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:0.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace, monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace, monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-0.25em}sup{top:-0.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type="button"],[type="reset"],[type="submit"],button{-webkit-appearance:button}[type="button"]::-moz-focus-inner,[type="reset"]::-moz-focus-inner,[type="submit"]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type="button"]:-moz-focusring,[type="reset"]:-moz-focusring,[type="submit"]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:0.35em 0.75em 0.625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type="checkbox"],[type="radio"]{box-sizing:border-box;padding:0}[type="number"]::-webkit-inner-spin-button,[type="number"]::-webkit-outer-spin-button{height:auto}[type="search"]{-webkit-appearance:textfield;outline-offset:-2px}[type="search"]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}template{display:none}[hidden]{display:none}
diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css
new file mode 100644
index 000000000..0a19a99ab
--- /dev/null
+++ b/src/librustdoc/html/static/css/noscript.css
@@ -0,0 +1,20 @@
+/*
+This whole CSS file is used only in case rustdoc is rendered with javascript disabled. Since a lot
+of content is hidden by default (depending on the settings too), we have to overwrite some of the
+rules.
+*/
+
+#main-content .attributes {
+ /* Since there is no toggle (the "[-]") when JS is disabled, no need for this margin either. */
+ margin-left: 0 !important;
+}
+
+#copy-path {
+ /* It requires JS to work so no need to display it in this case. */
+ display: none;
+}
+
+.sub {
+ /* The search bar and related controls don't work without JS */
+ display: none;
+}
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
new file mode 100644
index 000000000..83fe14550
--- /dev/null
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -0,0 +1,2335 @@
+/* See FiraSans-LICENSE.txt for the Fira Sans license. */
+@font-face {
+ font-family: 'Fira Sans';
+ font-style: normal;
+ font-weight: 400;
+ src: local('Fira Sans'),
+ url("FiraSans-Regular.woff2") format("woff2");
+ font-display: swap;
+}
+@font-face {
+ font-family: 'Fira Sans';
+ font-style: normal;
+ font-weight: 500;
+ src: local('Fira Sans Medium'),
+ url("FiraSans-Medium.woff2") format("woff2");
+ font-display: swap;
+}
+
+/* See SourceSerif4-LICENSE.md for the Source Serif 4 license. */
+@font-face {
+ font-family: 'Source Serif 4';
+ font-style: normal;
+ font-weight: 400;
+ src: local('Source Serif 4'),
+ url("SourceSerif4-Regular.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+@font-face {
+ font-family: 'Source Serif 4';
+ font-style: italic;
+ font-weight: 400;
+ src: local('Source Serif 4 Italic'),
+ url("SourceSerif4-It.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+@font-face {
+ font-family: 'Source Serif 4';
+ font-style: normal;
+ font-weight: 700;
+ src: local('Source Serif 4 Bold'),
+ url("SourceSerif4-Bold.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+
+/* See SourceCodePro-LICENSE.txt for the Source Code Pro license. */
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 400;
+ /* Avoid using locally installed font because bad versions are in circulation:
+ * see https://github.com/rust-lang/rust/issues/24355 */
+ src: url("SourceCodePro-Regular.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: italic;
+ font-weight: 400;
+ src: url("SourceCodePro-It.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 600;
+ src: url("SourceCodePro-Semibold.ttf.woff2") format("woff2");
+ font-display: swap;
+}
+
+/* Avoid using legacy CJK serif fonts in Windows like Batang. */
+@font-face {
+ font-family: 'NanumBarunGothic';
+ src: url("NanumBarunGothic.ttf.woff2") format("woff2");
+ font-display: swap;
+ unicode-range: U+AC00-D7AF, U+1100-11FF, U+3130-318F, U+A960-A97F, U+D7B0-D7FF;
+}
+
+* {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+}
+
+/* This part handles the "default" theme being used depending on the system one. */
+html {
+ content: "";
+}
+@media (prefers-color-scheme: light) {
+ html {
+ content: "light";
+ }
+}
+@media (prefers-color-scheme: dark) {
+ html {
+ content: "dark";
+ }
+}
+
+/* General structure and fonts */
+
+body {
+ /* Line spacing at least 1.5 per Web Content Accessibility Guidelines
+ https://www.w3.org/WAI/WCAG21/Understanding/visual-presentation.html */
+ font: 1rem/1.5 "Source Serif 4", NanumBarunGothic, serif;
+ margin: 0;
+ position: relative;
+ /* We use overflow-wrap: break-word for Safari, which doesn't recognize
+ `anywhere`: https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-wrap */
+ overflow-wrap: break-word;
+ /* Then override it with `anywhere`, which is required to make non-Safari browsers break
+ more aggressively when we want them to. */
+ overflow-wrap: anywhere;
+
+ -webkit-font-feature-settings: "kern", "liga";
+ -moz-font-feature-settings: "kern", "liga";
+ font-feature-settings: "kern", "liga";
+
+ background-color: var(--main-background-color);
+ color: var(--main-color);
+}
+
+h1 {
+ font-size: 1.5rem; /* 24px */
+}
+h2 {
+ font-size: 1.375rem; /* 22px */
+}
+h3 {
+ font-size: 1.25rem; /* 20px */
+}
+h1, h2, h3, h4, h5, h6 {
+ font-weight: 500;
+}
+h1, h2, h3, h4 {
+ margin: 20px 0 15px 0;
+ padding-bottom: 6px;
+}
+.docblock h3, .docblock h4, h5, h6 {
+ margin: 15px 0 5px 0;
+}
+h1.fqn {
+ margin: 0;
+ padding: 0;
+ border-bottom-color: var(--headings-border-bottom-color);
+}
+h2, h3, h4 {
+ border-bottom-color: var(--headings-border-bottom-color);
+}
+.main-heading {
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: space-between;
+ padding-bottom: 6px;
+ margin-bottom: 15px;
+}
+.main-heading a:hover {
+ text-decoration: underline;
+}
+#toggle-all-docs {
+ text-decoration: none;
+}
+/* The only headings that get underlines are:
+ Markdown-generated headings within the top-doc
+ Rustdoc-generated h2 section headings (e.g. "Implementations", "Required Methods", etc)
+ Underlines elsewhere in the documentation break up visual flow and tend to invert
+ section hierarchies. */
+h2,
+.top-doc .docblock > h3,
+.top-doc .docblock > h4 {
+ border-bottom: 1px solid var(--headings-border-bottom-color);
+}
+h3.code-header {
+ font-size: 1.125rem; /* 18px */
+}
+h4.code-header {
+ font-size: 1rem;
+}
+.code-header {
+ font-weight: 600;
+ border-bottom-style: none;
+ margin: 0;
+ padding: 0;
+ margin-top: 0.6em;
+ margin-bottom: 0.4em;
+}
+.impl,
+.impl-items .method,
+.methods .method,
+.impl-items .type,
+.methods .type,
+.impl-items .associatedconstant,
+.methods .associatedconstant,
+.impl-items .associatedtype,
+.methods .associatedtype {
+ flex-basis: 100%;
+ font-weight: 600;
+ position: relative;
+}
+
+div.impl-items > div {
+ padding-left: 0;
+}
+
+h1, h2, h3, h4, h5, h6,
+.sidebar,
+.mobile-topbar,
+a.source,
+.search-input,
+.search-results .result-name,
+.content table td:first-child > a,
+.item-left > a,
+.out-of-band,
+span.since,
+#source-sidebar, #sidebar-toggle,
+details.rustdoc-toggle > summary::before,
+div.impl-items > div:not(.docblock):not(.item-info),
+.content ul.crate a.crate,
+a.srclink,
+#main-content > .since,
+#help-button > button,
+details.rustdoc-toggle.top-doc > summary,
+details.rustdoc-toggle.top-doc > summary::before,
+details.rustdoc-toggle.non-exhaustive > summary,
+details.rustdoc-toggle.non-exhaustive > summary::before,
+.scraped-example-title,
+.more-examples-toggle summary, .more-examples-toggle .hide-more,
+.example-links a,
+/* This selector is for the items listed in the "all items" page. */
+#main-content > ul.docblock > li > a {
+ font-family: "Fira Sans", Arial, NanumBarunGothic, sans-serif;
+}
+
+h1, h2, h3, h4,
+a#toggle-all-docs,
+a.anchor,
+.small-section-header a,
+#source-sidebar a,
+pre.rust a,
+.sidebar h2 a,
+.sidebar h3 a,
+.mobile-topbar h2 a,
+.in-band a,
+.search-results a,
+.module-item .stab,
+.import-item .stab,
+.result-name .primitive > i, .result-name .keyword > i,
+.content .method .where,
+.content .fn .where,
+.content .where.fmt-newline {
+ color: var(--main-color);
+}
+
+ol, ul {
+ padding-left: 24px;
+}
+ul ul, ol ul, ul ol, ol ol {
+ margin-bottom: .625em;
+}
+
+p {
+ /* Paragraph spacing at least 1.5 times line spacing per Web Content Accessibility Guidelines.
+ Line-height is 1.5rem, so line spacing is .5rem; .75em is 1.5 times that.
+ https://www.w3.org/WAI/WCAG21/Understanding/visual-presentation.html */
+ margin: 0 0 .75em 0;
+}
+
+summary {
+ outline: none;
+}
+
+/* Fix some style changes due to normalize.css 8 */
+
+td,
+th {
+ padding: 0;
+}
+
+table {
+ border-collapse: collapse;
+}
+
+button,
+input,
+optgroup,
+select,
+textarea {
+ color: inherit;
+ font: inherit;
+ margin: 0;
+}
+
+button {
+ /* Buttons on Safari have different default padding than other platforms. Make them the same. */
+ padding: 1px 6px;
+}
+
+/* end tweaks for normalize.css 8 */
+
+.rustdoc {
+ display: flex;
+ flex-direction: row;
+ flex-wrap: nowrap;
+}
+
+main {
+ position: relative;
+ flex-grow: 1;
+ padding: 10px 15px 40px 45px;
+ min-width: 0;
+}
+
+.source main {
+ padding: 15px;
+}
+
+.width-limiter {
+ max-width: 960px;
+ margin-right: auto;
+}
+
+.source .width-limiter {
+ max-width: unset;
+}
+
+details:not(.rustdoc-toggle) summary {
+ margin-bottom: .6em;
+}
+
+code, pre, a.test-arrow, .code-header {
+ font-family: "Source Code Pro", monospace;
+}
+.docblock code, .docblock-short code {
+ border-radius: 3px;
+ padding: 0 0.125em;
+}
+.docblock pre code, .docblock-short pre code {
+ padding: 0;
+}
+pre {
+ padding: 14px;
+}
+.docblock.item-decl {
+ margin-left: 0;
+}
+.item-decl pre {
+ overflow-x: auto;
+}
+
+.source .content pre {
+ padding: 20px;
+}
+
+img {
+ max-width: 100%;
+}
+
+li {
+ position: relative;
+}
+
+.source .content {
+ max-width: none;
+ overflow: visible;
+ margin-left: 0px;
+}
+
+nav.sub {
+ position: relative;
+ font-size: 1rem;
+}
+
+.sub-container {
+ display: flex;
+ flex-direction: row;
+ flex-wrap: nowrap;
+}
+
+.sub-logo-container {
+ display: none;
+ margin-right: 20px;
+}
+
+.source .sub-logo-container {
+ display: block;
+}
+
+.source .sub-logo-container > img {
+ height: 60px;
+ width: 60px;
+ object-fit: contain;
+}
+
+.sidebar, .mobile-topbar, .sidebar-menu-toggle {
+ background-color: var(--sidebar-background-color);
+}
+
+.sidebar {
+ font-size: 0.875rem;
+ width: 250px;
+ min-width: 200px;
+ overflow-y: scroll;
+ position: sticky;
+ height: 100vh;
+ top: 0;
+ left: 0;
+}
+
+.sidebar-elems,
+.sidebar > .location {
+ padding-left: 24px;
+}
+
+.sidebar .location {
+ overflow-wrap: anywhere;
+}
+
+.rustdoc.source .sidebar {
+ width: 50px;
+ min-width: 0px;
+ max-width: 300px;
+ flex-grow: 0;
+ flex-shrink: 0;
+ flex-basis: auto;
+ border-right: 1px solid;
+ overflow-x: hidden;
+ /* The sidebar is by default hidden */
+ overflow-y: hidden;
+}
+
+.rustdoc.source .sidebar .sidebar-logo {
+ display: none;
+}
+
+.source .sidebar, #sidebar-toggle, #source-sidebar {
+ background-color: var(--sidebar-background-color);
+}
+
+#sidebar-toggle > button:hover, #sidebar-toggle > button:focus {
+ background-color: var(--sidebar-background-color-hover);
+}
+
+.source .sidebar > *:not(#sidebar-toggle) {
+ opacity: 0;
+ visibility: hidden;
+}
+
+.source-sidebar-expanded .source .sidebar {
+ overflow-y: auto;
+}
+
+.source-sidebar-expanded .source .sidebar > *:not(#sidebar-toggle) {
+ opacity: 1;
+ visibility: visible;
+}
+
+#all-types {
+ margin-top: 1em;
+}
+
+/* Improve the scrollbar display on firefox */
+* {
+ scrollbar-width: initial;
+ scrollbar-color: var(--scrollbar-color);
+}
+.sidebar {
+ scrollbar-width: thin;
+ scrollbar-color: var(--scrollbar-color);
+}
+
+/* Improve the scrollbar display on webkit-based browsers */
+::-webkit-scrollbar {
+ width: 12px;
+}
+.sidebar::-webkit-scrollbar {
+ width: 8px;
+}
+::-webkit-scrollbar-track {
+ -webkit-box-shadow: inset 0;
+ background-color: var(--scrollbar-track-background-color);
+}
+.sidebar::-webkit-scrollbar-track {
+ background-color: var(--scrollbar-track-background-color);
+}
+::-webkit-scrollbar-thumb, .sidebar::-webkit-scrollbar-thumb {
+ background-color: var(--scrollbar-thumb-background-color);
+}
+
+/* Everything else */
+
+.hidden {
+ display: none !important;
+}
+
+.sidebar .logo-container {
+ display: flex;
+ margin-top: 10px;
+ margin-bottom: 10px;
+ justify-content: center;
+}
+
+.version {
+ overflow-wrap: break-word;
+}
+
+.logo-container > img {
+ height: 100px;
+ width: 100px;
+}
+
+.location:empty {
+ border: none;
+}
+
+.location a:first-of-type {
+ font-weight: 500;
+}
+
+.block {
+ padding: 0;
+}
+.block ul, .block li {
+ padding: 0;
+ margin: 0;
+ list-style: none;
+}
+
+.block a,
+h2.location a {
+ display: block;
+ padding: 0.25rem;
+ margin-left: -0.25rem;
+
+ text-overflow: ellipsis;
+ overflow: hidden;
+}
+
+.sidebar h2 {
+ border-bottom: none;
+ font-weight: 500;
+ padding: 0;
+ margin: 0;
+ margin-top: 0.7rem;
+ margin-bottom: 0.7rem;
+}
+
+.sidebar h3 {
+ font-size: 1.125rem; /* 18px */
+ font-weight: 500;
+ padding: 0;
+ margin: 0;
+}
+
+.sidebar-elems .block {
+ margin-bottom: 2em;
+}
+
+.sidebar-elems .block li a {
+ white-space: nowrap;
+}
+
+.mobile-topbar {
+ display: none;
+}
+
+.source .content pre.rust {
+ white-space: pre;
+ overflow: auto;
+ padding-left: 0;
+}
+
+.rustdoc .example-wrap {
+ display: inline-flex;
+ margin-bottom: 10px;
+}
+
+.example-wrap {
+ position: relative;
+ width: 100%;
+}
+
+.example-wrap > pre.line-number {
+ overflow: initial;
+ border: 1px solid;
+ padding: 13px 8px;
+ text-align: right;
+ border-top-left-radius: 5px;
+ border-bottom-left-radius: 5px;
+}
+
+.example-wrap > pre.rust a:hover {
+ text-decoration: underline;
+}
+
+.line-numbers {
+ text-align: right;
+}
+.rustdoc:not(.source) .example-wrap > pre:not(.line-number) {
+ width: 100%;
+ overflow-x: auto;
+}
+
+.rustdoc:not(.source) .example-wrap > pre.line-numbers {
+ width: auto;
+ overflow-x: visible;
+}
+
+.rustdoc .example-wrap > pre {
+ margin: 0;
+}
+
+#search {
+ position: relative;
+}
+
+.search-loading {
+ text-align: center;
+}
+
+#results > table {
+ width: 100%;
+ table-layout: fixed;
+}
+
+.content > .example-wrap pre.line-numbers {
+ position: relative;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+.line-numbers span {
+ cursor: pointer;
+}
+
+.docblock-short {
+ overflow-wrap: break-word;
+ overflow-wrap: anywhere;
+}
+.docblock-short p {
+ display: inline;
+}
+
+.docblock-short p {
+ overflow: hidden;
+ text-overflow: ellipsis;
+ margin: 0;
+}
+/* Wrap non-pre code blocks (`text`) but not (```text```). */
+.docblock > :not(pre) > code,
+.docblock-short > :not(pre) > code {
+ white-space: pre-wrap;
+}
+
+.top-doc .docblock h2 { font-size: 1.375rem; }
+.top-doc .docblock h3 { font-size: 1.25rem; }
+.top-doc .docblock h4,
+.top-doc .docblock h5 {
+ font-size: 1.125rem;
+}
+.top-doc .docblock h6 {
+ font-size: 1rem;
+}
+
+.docblock h5 { font-size: 1rem; }
+.docblock h6 { font-size: 0.875rem; }
+.docblock h1, .docblock h2, .docblock h3, .docblock h4, .docblock h5, .docblock h6 {
+ border-bottom-color: var(--headings-border-bottom-color);
+}
+
+.docblock {
+ margin-left: 24px;
+ position: relative;
+}
+
+.docblock > :not(.information):not(.more-examples-toggle) {
+ max-width: 100%;
+ overflow-x: auto;
+}
+
+.content .out-of-band {
+ flex-grow: 0;
+ font-size: 1.125rem;
+ font-weight: normal;
+ float: right;
+}
+
+.method > .code-header, .trait-impl > .code-header {
+ max-width: calc(100% - 41px);
+ display: block;
+}
+
+.content .in-band {
+ flex-grow: 1;
+ margin: 0px;
+ padding: 0px;
+ overflow-wrap: break-word;
+ overflow-wrap: anywhere;
+}
+
+.in-band > code, .in-band > .code-header {
+ display: inline-block;
+}
+
+.docblock code, .docblock-short code,
+pre, .rustdoc.source .example-wrap {
+ background-color: var(--code-block-background-color);
+}
+
+#main-content {
+ position: relative;
+}
+#main-content > .since {
+ top: inherit;
+}
+
+.content table:not(.table-display) {
+ border-spacing: 0 5px;
+}
+.content td { vertical-align: top; }
+.content td:first-child { padding-right: 20px; }
+.content td p:first-child { margin-top: 0; }
+.content td h1, .content td h2 { margin-left: 0; font-size: 1.125rem; }
+.content tr:first-child td { border-top: 0; }
+
+.docblock table {
+ margin: .5em 0;
+ width: calc(100% - 2px);
+ overflow-x: auto;
+ display: block;
+}
+
+.docblock table td {
+ padding: .5em;
+ border: 1px dashed;
+}
+
+.docblock table th {
+ padding: .5em;
+ text-align: left;
+ border: 1px solid;
+}
+
+.fields + table {
+ margin-bottom: 1em;
+}
+
+.content .item-list {
+ list-style-type: none;
+ padding: 0;
+}
+
+.content .multi-column {
+ -moz-column-count: 5;
+ -moz-column-gap: 2.5em;
+ -webkit-column-count: 5;
+ -webkit-column-gap: 2.5em;
+ column-count: 5;
+ column-gap: 2.5em;
+}
+.content .multi-column li { width: 100%; display: inline-block; }
+
+.content > .methods > .method {
+ font-size: 1rem;
+ position: relative;
+}
+/* Shift "where ..." part of method or fn definition down a line */
+.content .method .where,
+.content .fn .where,
+.content .where.fmt-newline {
+ display: block;
+ font-size: 0.875rem;
+}
+
+.content .methods > div:not(.notable-traits):not(.method) {
+ margin-left: 40px;
+ margin-bottom: 15px;
+}
+
+.content .docblock > .impl-items {
+ margin-left: 20px;
+ margin-top: -34px;
+}
+.content .docblock >.impl-items .table-display {
+ margin: 0;
+}
+.content .docblock >.impl-items table td {
+ padding: 0;
+}
+.content .docblock > .impl-items .table-display, .impl-items table td {
+ border: none;
+}
+
+.item-info {
+ display: block;
+}
+
+.content .item-info code {
+ font-size: 0.875rem;
+}
+
+.content .item-info {
+ position: relative;
+ margin-left: 24px;
+}
+
+.sub-variant > div > .item-info {
+ margin-top: initial;
+}
+
+.content .impl-items .docblock, .content .impl-items .item-info {
+ margin-bottom: .6em;
+}
+
+.content .impl-items > .item-info {
+ margin-left: 40px;
+}
+
+.methods > .item-info, .content .impl-items > .item-info {
+ margin-top: -8px;
+}
+
+.impl-items {
+ flex-basis: 100%;
+}
+
+#main-content > .item-info {
+ margin-top: 0;
+ margin-left: 0;
+}
+
+nav.sub {
+ flex-grow: 1;
+ margin-bottom: 25px;
+}
+.source nav.sub {
+ margin-left: 32px;
+}
+nav.main {
+ padding: 20px 0;
+ text-align: center;
+}
+nav.main .current {
+ border-top: 1px solid;
+ border-bottom: 1px solid;
+}
+nav.main .separator {
+ border: 1px solid;
+ display: inline-block;
+ height: 23px;
+ margin: 0 20px;
+}
+nav.sum { text-align: right; }
+nav.sub form { display: inline; }
+
+a {
+ text-decoration: none;
+ background: transparent;
+}
+
+.small-section-header {
+ display: flex;
+ justify-content: space-between;
+ position: relative;
+}
+
+.small-section-header:hover > .anchor {
+ display: initial;
+}
+
+.in-band:hover > .anchor, .impl:hover > .anchor, .method.trait-impl:hover > .anchor,
+.type.trait-impl:hover > .anchor, .associatedconstant.trait-impl:hover > .anchor,
+.associatedtype.trait-impl:hover > .anchor {
+ display: inline-block;
+ position: absolute;
+}
+.anchor {
+ display: none;
+ position: absolute;
+ left: -0.5em;
+ background: none !important;
+}
+.anchor.field {
+ left: -5px;
+}
+.small-section-header > .anchor {
+ left: -15px;
+ padding-right: 8px;
+}
+h2.small-section-header > .anchor {
+ padding-right: 6px;
+}
+.anchor::before {
+ content: '§';
+}
+
+.docblock a:not(.srclink):not(.test-arrow):not(.scrape-help):hover,
+.docblock-short a:not(.srclink):not(.test-arrow):not(.scrape-help):hover, .item-info a {
+ text-decoration: underline;
+}
+
+.block a.current.crate { font-weight: 500; }
+
+/* In most contexts we use `overflow-wrap: anywhere` to ensure that we can wrap
+ as much as needed on mobile (see
+ src/test/rustdoc-gui/type-declaration-overflow.goml for an example of why
+ this matters). The `anywhere` value means:
+
+ "Soft wrap opportunities introduced by the word break are considered when
+ calculating min-content intrinsic sizes."
+
+ https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-wrap#values
+
+ For table layouts, that becomes a problem: the browser tries to make each
+ column as narrow as possible, and `overflow-wrap: anywhere` means it can do
+ so by breaking words - even if some other column could be shrunk without
+ breaking words! This shows up, for instance, in the `Structs` / `Modules` /
+ `Functions` (etcetera) sections of a module page, and when a docblock
+ contains a table.
+
+ So, for table layouts, override the default with break-word, which does
+ _not_ affect min-content intrinsic sizes.
+*/
+table,
+.item-table {
+ overflow-wrap: break-word;
+}
+
+.item-table {
+ display: table;
+}
+.item-row {
+ display: table-row;
+}
+.item-left, .item-right {
+ display: table-cell;
+}
+.item-left {
+ padding-right: 1.25rem;
+}
+
+.search-container {
+ position: relative;
+ display: flex;
+ height: 34px;
+}
+.search-container > * {
+ height: 100%;
+}
+.search-results-title {
+ display: inline;
+}
+#search-settings {
+ font-size: 1.5rem;
+ font-weight: 500;
+ margin-bottom: 20px;
+}
+#crate-search {
+ min-width: 115px;
+ margin-top: 5px;
+ padding-left: 0.15em;
+ padding-right: 23px;
+ border: 1px solid;
+ border-radius: 4px;
+ outline: none;
+ cursor: pointer;
+ -moz-appearance: none;
+ -webkit-appearance: none;
+ /* Removes default arrow from firefox */
+ background-repeat: no-repeat;
+ background-color: transparent;
+ background-size: 20px;
+ background-position: calc(100% - 1px) 56%;
+ background-image: /* AUTOREPLACE: */url("down-arrow.svg");
+ max-width: 100%;
+ text-overflow: ellipsis;
+}
+.search-container {
+ margin-top: 4px;
+}
+.search-input {
+ /* Override Normalize.css: it has a rule that sets
+ -webkit-appearance: textfield for search inputs. That
+ causes rounded corners and no border on iOS Safari. */
+ -webkit-appearance: none;
+ /* Override Normalize.css: we have margins and do
+ not want to overflow - the `moz` attribute is necessary
+ until Firefox 29, too early to drop at this point */
+ -moz-box-sizing: border-box !important;
+ box-sizing: border-box !important;
+ outline: none;
+ border: 1px solid;
+ border-radius: 2px;
+ padding: 8px;
+ font-size: 1rem;
+ width: 100%;
+}
+
+.search-results {
+ display: none;
+ padding-bottom: 2em;
+}
+
+.search-results.active {
+ display: block;
+ /* prevent overhanging tabs from moving the first result */
+ clear: both;
+}
+
+.search-results .desc > span {
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ display: block;
+}
+
+.search-results > a {
+ display: block;
+ width: 100%;
+ /* A little margin ensures the browser's outlining of focused links has room to display. */
+ margin-left: 2px;
+ margin-right: 2px;
+ border-bottom: 1px solid #aaa3;
+}
+
+.search-results > a > div {
+ display: flex;
+ flex-flow: row wrap;
+}
+
+.search-results .result-name, .search-results div.desc, .search-results .result-description {
+ width: 50%;
+}
+.search-results .result-name {
+ padding-right: 1em;
+}
+
+.search-results .result-name > span {
+ display: inline-block;
+ margin: 0;
+ font-weight: normal;
+}
+
+.popover {
+ font-size: 1rem;
+ position: absolute;
+ right: 0;
+ z-index: 2;
+ display: block;
+ margin-top: 7px;
+ border-radius: 3px;
+ border: 1px solid;
+ font-size: 1rem;
+}
+
+/* This rule is to draw the little arrow connecting the settings menu to the gear icon. */
+.popover::before {
+ content: '';
+ position: absolute;
+ right: 11px;
+ border: solid;
+ border-width: 1px 1px 0 0;
+ display: inline-block;
+ padding: 4px;
+ transform: rotate(-45deg);
+ top: -5px;
+}
+
+.popover, .popover::before {
+ background-color: var(--main-background-color);
+ color: var(--main-color);
+}
+
+#help-button .popover {
+ max-width: 600px;
+}
+
+#help-button .popover::before {
+ right: 48px;
+}
+
+#help-button dt {
+ float: left;
+ clear: left;
+ display: block;
+ margin-right: 0.5rem;
+}
+#help-button span.top, #help-button span.bottom {
+ text-align: center;
+ display: block;
+ font-size: 1.125rem;
+}
+#help-button span.top {
+ text-align: center;
+ display: block;
+ margin: 10px 0;
+ border-bottom: 1px solid;
+ padding-bottom: 4px;
+ margin-bottom: 6px;
+}
+#help-button span.bottom {
+ clear: both;
+ border-top: 1px solid;
+}
+.side-by-side {
+ text-align: initial;
+}
+.side-by-side > div {
+ width: 50%;
+ float: left;
+ padding: 0 20px 20px 17px;
+}
+
+.item-info .stab {
+ width: fit-content;
+ /* This min-height is needed to unify the height of the stab elements because some of them
+ have emojis.
+ */
+ min-height: 36px;
+ display: flex;
+ align-items: center;
+ white-space: pre-wrap;
+}
+.stab {
+ padding: 3px;
+ margin-bottom: 5px;
+ font-size: 0.875rem;
+ font-weight: normal;
+}
+.stab p {
+ display: inline;
+ margin: 0;
+}
+
+.stab .emoji {
+ font-size: 1.25rem;
+}
+
+/* Black one-pixel outline around emoji shapes */
+.emoji {
+ text-shadow:
+ 1px 0 0 black,
+ -1px 0 0 black,
+ 0 1px 0 black,
+ 0 -1px 0 black;
+}
+
+.module-item .stab,
+.import-item .stab {
+ border-radius: 3px;
+ display: inline-block;
+ font-size: 0.875rem;
+ line-height: 1.2;
+ margin-bottom: 0;
+ margin-left: 0.3125em;
+ padding: 2px;
+ vertical-align: text-bottom;
+}
+
+.module-item.unstable,
+.import-item.unstable {
+ opacity: 0.65;
+}
+
+.since {
+ font-weight: normal;
+ font-size: initial;
+}
+
+.rightside {
+ padding-left: 12px;
+ padding-right: 2px;
+ position: initial;
+}
+
+.impl-items .srclink, .impl .srclink, .methods .srclink {
+ /* Override header settings otherwise it's too bold */
+ font-weight: normal;
+ font-size: 1rem;
+}
+
+.rightside {
+ float: right;
+}
+
+.variants_table {
+ width: 100%;
+}
+
+.variants_table tbody tr td:first-child {
+ width: 1%; /* make the variant name as small as possible */
+}
+
+td.summary-column {
+ width: 100%;
+}
+
+.summary {
+ padding-right: 0px;
+}
+
+pre.rust .question-mark {
+ font-weight: bold;
+}
+
+a.test-arrow {
+ display: inline-block;
+ visibility: hidden;
+ position: absolute;
+ padding: 5px 10px 5px 10px;
+ border-radius: 5px;
+ font-size: 1.375rem;
+ top: 5px;
+ right: 5px;
+ z-index: 1;
+}
+.example-wrap:hover .test-arrow {
+ visibility: visible;
+}
+a.test-arrow:hover{
+ text-decoration: none;
+}
+
+.code-attribute {
+ font-weight: 300;
+}
+
+.item-spacer {
+ width: 100%;
+ height: 12px;
+}
+
+.out-of-band > span.since {
+ position: initial;
+ font-size: 1.25rem;
+}
+
+h3.variant {
+ font-weight: 600;
+ font-size: 1.125rem;
+ margin-bottom: 10px;
+ border-bottom: none;
+}
+
+.sub-variant h4 {
+ font-size: 1rem;
+ font-weight: 400;
+ border-bottom: none;
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+.sub-variant {
+ margin-left: 24px;
+ margin-bottom: 40px;
+}
+
+.sub-variant > .sub-variant-field {
+ margin-left: 24px;
+}
+
+.toggle-label {
+ display: inline-block;
+ margin-left: 4px;
+ margin-top: 3px;
+}
+
+:target > code, :target > .code-header {
+ opacity: 1;
+}
+
+:target {
+ padding-right: 3px;
+}
+
+.information {
+ position: absolute;
+ left: -25px;
+ margin-top: 7px;
+ z-index: 1;
+}
+
+.tooltip {
+ position: relative;
+ display: inline-block;
+ cursor: pointer;
+}
+
+.tooltip::after {
+ display: none;
+ text-align: center;
+ padding: 5px 3px 3px 3px;
+ border-radius: 6px;
+ margin-left: 5px;
+ font-size: 1rem;
+}
+
+.tooltip.ignore::after {
+ content: "This example is not tested";
+}
+.tooltip.compile_fail::after {
+ content: "This example deliberately fails to compile";
+}
+.tooltip.should_panic::after {
+ content: "This example panics";
+}
+.tooltip.edition::after {
+ content: "This code runs with edition " attr(data-edition);
+}
+
+.tooltip::before {
+ content: " ";
+ position: absolute;
+ top: 50%;
+ left: 16px;
+ margin-top: -5px;
+ border-width: 5px;
+ border-style: solid;
+ display: none;
+}
+
+.tooltip:hover::before, .tooltip:hover::after {
+ display: inline;
+}
+
+.tooltip.compile_fail, .tooltip.should_panic, .tooltip.ignore {
+ font-weight: bold;
+ font-size: 1.25rem;
+}
+
+.notable-traits-tooltip {
+ display: inline-block;
+ cursor: pointer;
+}
+
+.notable-traits:hover .notable-traits-tooltiptext,
+.notable-traits .notable-traits-tooltiptext.force-tooltip {
+ display: inline-block;
+}
+
+.notable-traits .notable-traits-tooltiptext {
+ display: none;
+ padding: 5px 3px 3px 3px;
+ border-radius: 6px;
+ margin-left: 5px;
+ z-index: 10;
+ font-size: 1rem;
+ cursor: default;
+ position: absolute;
+ border: 1px solid;
+}
+
+.notable-traits-tooltip::after {
+ /* The margin on the tooltip does not capture hover events,
+ this extends the area of hover enough so that mouse hover is not
+ lost when moving the mouse to the tooltip */
+ content: "\00a0\00a0\00a0";
+}
+
+.notable-traits .notable, .notable-traits .docblock {
+ margin: 0;
+}
+
+.notable-traits .notable {
+ margin: 0;
+ margin-bottom: 13px;
+ font-size: 1.1875rem;
+ font-weight: 600;
+ display: block;
+}
+
+.notable-traits .docblock code.content{
+ margin: 0;
+ padding: 0;
+ font-size: 1.25rem;
+}
+
+/* Example code has the "Run" button that needs to be positioned relative to the pre */
+pre.rust.rust-example-rendered {
+ position: relative;
+}
+
+pre.rust {
+ tab-size: 4;
+ -moz-tab-size: 4;
+}
+
+.search-failed {
+ text-align: center;
+ margin-top: 20px;
+ display: none;
+}
+
+.search-failed.active {
+ display: block;
+}
+
+.search-failed > ul {
+ text-align: left;
+ max-width: 570px;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+#titles {
+ height: 35px;
+}
+
+#titles > button {
+ float: left;
+ width: 33.3%;
+ text-align: center;
+ font-size: 1.125rem;
+ cursor: pointer;
+ border: 0;
+ border-top: 2px solid;
+}
+
+#titles > button:first-child:last-child {
+ margin-right: 1px;
+ width: calc(100% - 1px);
+}
+
+#titles > button:not(:last-child) {
+ margin-right: 1px;
+ width: calc(33.3% - 1px);
+}
+
+#titles > button > div.count {
+ display: inline-block;
+ font-size: 1rem;
+}
+
+.notable-traits {
+ cursor: pointer;
+ z-index: 2;
+ margin-left: 5px;
+}
+
+#sidebar-toggle {
+ position: sticky;
+ top: 0;
+ left: 0;
+ font-weight: bold;
+ font-size: 1.25rem;
+ border-bottom: 1px solid;
+ display: flex;
+ height: 40px;
+ justify-content: center;
+ align-items: center;
+ z-index: 10;
+}
+#source-sidebar {
+ width: 100%;
+ z-index: 1;
+ overflow: auto;
+}
+#source-sidebar > .title {
+ font-size: 1.5rem;
+ text-align: center;
+ border-bottom: 1px solid;
+ margin-bottom: 6px;
+}
+#sidebar-toggle > button {
+ background: none;
+ color: inherit;
+ cursor: pointer;
+ text-align: center;
+ border: none;
+ outline: none;
+ position: absolute;
+ top: 0;
+ bottom: 0;
+ left: 0;
+ right: 0;
+ /* work around button layout strangeness: https://stackoverflow.com/q/7271561 */
+ width: 100%;
+ /* iOS button gradient: https://stackoverflow.com/q/5438567 */
+ -webkit-appearance: none;
+ opacity: 1;
+}
+#settings-menu, #help-button {
+ margin-left: 4px;
+ outline: none;
+}
+
+#copy-path {
+ height: 34px;
+}
+#settings-menu > a, #help-button > button, #copy-path {
+ padding: 5px;
+ width: 33px;
+ border: 1px solid;
+ border-radius: 2px;
+ cursor: pointer;
+}
+#settings-menu {
+ padding: 0;
+}
+#settings-menu > a, #help-button > button {
+ padding: 5px;
+ height: 100%;
+ display: block;
+}
+
+@keyframes rotating {
+ from {
+ transform: rotate(0deg);
+ }
+ to {
+ transform: rotate(360deg);
+ }
+}
+#settings-menu.rotate > a img {
+ animation: rotating 2s linear infinite;
+}
+
+.setting-line .radio-line input:checked {
+ box-shadow: inset 0 0 0 3px var(--main-background-color);
+ background-color: var(--settings-input-color);
+}
+.setting-line .radio-line input:focus {
+ box-shadow: 0 0 1px 1px var(--settings-input-color);
+}
+/* In here we combine both `:focus` and `:checked` properties. */
+.setting-line .radio-line input:checked:focus {
+ box-shadow: inset 0 0 0 3px var(--main-background-color),
+ 0 0 2px 2px var(--settings-input-color);
+}
+.setting-line .radio-line input:hover {
+ border-color: var(--settings-input-color) !important;
+}
+input:checked + .slider {
+ background-color: var(--settings-input-color);
+}
+
+#help-button > button {
+ text-align: center;
+ /* Rare exception to specifying font sizes in rem. Since this is acting
+ as an icon, it's okay to specify their sizes in pixels. */
+ font-size: 20px;
+ padding-top: 2px;
+}
+
+#copy-path {
+ background: initial;
+ margin-left: 10px;
+ padding: 0;
+ padding-left: 2px;
+ border: 0;
+}
+
+#theme-choices {
+ display: none;
+ position: absolute;
+ left: 0;
+ top: 28px;
+ border: 1px solid;
+ border-radius: 3px;
+ z-index: 1;
+ cursor: pointer;
+}
+
+#theme-choices > button {
+ border: none;
+ width: 100%;
+ padding: 4px 8px;
+ text-align: center;
+ background: rgba(0,0,0,0);
+ overflow-wrap: normal;
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top: 1px solid;
+}
+
+kbd {
+ display: inline-block;
+ padding: 3px 5px;
+ font: 15px monospace;
+ line-height: 10px;
+ vertical-align: middle;
+ border: solid 1px;
+ border-radius: 3px;
+ cursor: default;
+}
+
+.hidden-by-impl-hider,
+.hidden-by-usual-hider {
+ /* important because of conflicting rule for small screens */
+ display: none !important;
+}
+
+#implementations-list > h3 > span.in-band {
+ width: 100%;
+}
+
+.table-display {
+ width: 100%;
+ border: 0;
+ border-collapse: collapse;
+ border-spacing: 0;
+ font-size: 1rem;
+}
+
+.table-display tr td:first-child {
+ padding-right: 0;
+}
+
+.table-display tr td:last-child {
+ float: right;
+}
+.table-display .out-of-band {
+ position: relative;
+ font-size: 1.125rem;
+ display: block;
+}
+
+.table-display td:hover .anchor {
+ display: block;
+ top: 2px;
+ left: -5px;
+}
+
+#main-content > ul {
+ padding-left: 10px;
+}
+#main-content > ul > li {
+ list-style: none;
+}
+
+.non-exhaustive {
+ margin-bottom: 1em;
+}
+
+details.dir-entry {
+ padding-left: 4px;
+}
+
+details.dir-entry > summary {
+ margin: 0 0 0 13px;
+ list-style-position: outside;
+ cursor: pointer;
+}
+
+details.dir-entry div.folders, details.dir-entry div.files {
+ padding-left: 23px;
+}
+
+details.dir-entry a {
+ display: block;
+}
+
+/* The hideme class is used on summary tags that contain a span with
+ placeholder text shown only when the toggle is closed. For instance,
+ "Expand description" or "Show methods". */
+details.rustdoc-toggle > summary.hideme {
+ cursor: pointer;
+}
+
+details.rustdoc-toggle > summary {
+ list-style: none;
+}
+details.rustdoc-toggle > summary::-webkit-details-marker,
+details.rustdoc-toggle > summary::marker {
+ display: none;
+}
+
+details.rustdoc-toggle > summary.hideme > span {
+ margin-left: 9px;
+}
+
+details.rustdoc-toggle > summary::before {
+ content: "";
+ cursor: pointer;
+ width: 16px;
+ height: 16px;
+ background-repeat: no-repeat;
+ background-position: top left;
+ display: inline-block;
+ vertical-align: middle;
+ opacity: .5;
+}
+
+/* Screen readers see the text version at the end the line.
+ Visual readers see the icon at the start of the line, but small and transparent. */
+details.rustdoc-toggle > summary::after {
+ content: "Expand";
+ overflow: hidden;
+ width: 0;
+ height: 0;
+ position: absolute;
+}
+
+details.rustdoc-toggle > summary.hideme::after {
+ /* "hideme" toggles already have a description when they're contracted */
+ content: "";
+}
+
+details.rustdoc-toggle > summary:focus::before,
+details.rustdoc-toggle > summary:hover::before {
+ opacity: 1;
+}
+
+details.rustdoc-toggle.top-doc > summary,
+details.rustdoc-toggle.top-doc > summary::before,
+details.rustdoc-toggle.non-exhaustive > summary,
+details.rustdoc-toggle.non-exhaustive > summary::before {
+ font-size: 1rem;
+}
+
+details.non-exhaustive {
+ margin-bottom: 8px;
+}
+
+details.rustdoc-toggle > summary.hideme::before {
+ position: relative;
+}
+
+details.rustdoc-toggle > summary:not(.hideme)::before {
+ position: absolute;
+ left: -24px;
+ top: 4px;
+}
+
+.impl-items > details.rustdoc-toggle > summary:not(.hideme)::before {
+ position: absolute;
+ left: -24px;
+}
+
+/* When a "hideme" summary is open and the "Expand description" or "Show
+ methods" text is hidden, we want the [-] toggle that remains to not
+ affect the layout of the items to its right. To do that, we use
+ absolute positioning. Note that we also set position: relative
+ on the parent <details> to make this work properly. */
+details.rustdoc-toggle[open] > summary.hideme {
+ position: absolute;
+}
+
+details.rustdoc-toggle {
+ position: relative;
+}
+
+details.rustdoc-toggle[open] > summary.hideme > span {
+ display: none;
+}
+
+details.rustdoc-toggle[open] > summary::before,
+details.rustdoc-toggle[open] > summary.hideme::before {
+ background-image: /* AUTOREPLACE: */url("toggle-minus.svg");
+}
+
+details.rustdoc-toggle > summary::before {
+ background-image: /* AUTOREPLACE: */url("toggle-plus.svg");
+}
+
+details.rustdoc-toggle[open] > summary::before,
+details.rustdoc-toggle[open] > summary.hideme::before {
+ width: 16px;
+ height: 16px;
+ background-repeat: no-repeat;
+ background-position: top left;
+ display: inline-block;
+ content: "";
+}
+
+details.rustdoc-toggle[open] > summary::after,
+details.rustdoc-toggle[open] > summary.hideme::after {
+ content: "Collapse";
+}
+
+/* This is needed in docblocks to have the "▶" element to be on the same line. */
+.docblock summary > * {
+ display: inline-block;
+}
+
+/* Media Queries */
+
+/*
+WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY;
+If you update this line, then you also need to update the line with the same warning
+in storage.js plus the media query with (max-width: 700px)
+*/
+@media (min-width: 701px) {
+ /* In case there is no documentation before a code block, we need to add some margin at the top
+ to prevent an overlay between the "collapse toggle" and the information tooltip.
+ However, it's not needed with smaller screen width because the doc/code block is always put
+ "one line" below. */
+ .docblock > .information:first-child > .tooltip {
+ margin-top: 16px;
+ }
+
+ /* When we expand the sidebar on the source code page, we hide the logo on the left of the
+ search bar to have more space. */
+ .source-sidebar-expanded .source .sidebar + main .width-limiter .sub-logo-container.rust-logo {
+ display: none;
+ }
+
+ .source-sidebar-expanded .source .sidebar {
+ width: 300px;
+ }
+}
+
+/*
+WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
+If you update this line, then you also need to update the line with the same warning
+in storage.js plus the media query with (min-width: 701px)
+*/
+@media (max-width: 700px) {
+ /* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
+ or visiting a URL with a fragment like `#method.new`, we don't want the item to be obscured
+ by the topbar. Anything with an `id` gets scroll-margin-top equal to .mobile-topbar's size.
+ */
+ *[id] {
+ scroll-margin-top: 45px;
+ }
+
+ .rustdoc {
+ padding-top: 0px;
+ /* Sidebar should overlay main content, rather than pushing main content to the right.
+ Turn off `display: flex` on the body element. */
+ display: block;
+ }
+
+ main {
+ padding-left: 15px;
+ padding-top: 0px;
+ }
+
+ .rustdoc,
+ .main-heading {
+ flex-direction: column;
+ }
+
+ .content .out-of-band {
+ text-align: left;
+ margin-left: initial;
+ padding: initial;
+ }
+
+ .content .out-of-band .since::before {
+ content: "Since ";
+ }
+
+ #copy-path {
+ display: none;
+ }
+
+ /* Hide the logo and item name from the sidebar. Those are displayed
+ in the mobile-topbar instead. */
+ .sidebar .sidebar-logo,
+ .sidebar .location {
+ display: none;
+ }
+
+ .sidebar-elems {
+ margin-top: 1em;
+ }
+
+ .sidebar {
+ position: fixed;
+ top: 45px;
+ /* Hide the sidebar offscreen while not in use. Doing this instead of display: none means
+ the sidebar stays visible for screen readers, which is useful for navigation. */
+ left: -1000px;
+ margin-left: 0;
+ margin: 0;
+ padding: 0;
+ z-index: 11;
+ /* Reduce height slightly to account for mobile topbar. */
+ height: calc(100vh - 45px);
+ }
+
+ /* The source view uses a different design for the sidebar toggle, and doesn't have a topbar,
+ so don't bump down the main content or the sidebar. */
+ .source main,
+ .rustdoc.source .sidebar {
+ top: 0;
+ padding: 0;
+ height: 100vh;
+ border: 0;
+ }
+
+ .sidebar.shown,
+ .source-sidebar-expanded .source .sidebar,
+ .sidebar:focus-within {
+ left: 0;
+ }
+
+ .rustdoc.source > .sidebar {
+ position: fixed;
+ margin: 0;
+ z-index: 11;
+ width: 0;
+ }
+
+ .mobile-topbar .location a {
+ padding: 0;
+ margin: 0;
+ }
+
+ .mobile-topbar .location {
+ border: none;
+ padding: 0;
+ margin: auto 0.5em auto auto;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ white-space: nowrap;
+ /* Rare exception to specifying font sizes in rem. Since the topbar
+ height is specified in pixels, this also has to be specified in
+ pixels to avoid overflowing the topbar when the user sets a bigger
+ font size. */
+ font-size: 24px;
+ }
+
+ .mobile-topbar .logo-container {
+ max-height: 45px;
+ }
+
+ .mobile-topbar .logo-container > img {
+ max-width: 35px;
+ max-height: 35px;
+ margin-left: 20px;
+ margin-top: 5px;
+ margin-bottom: 5px;
+ }
+
+ .mobile-topbar {
+ display: flex;
+ flex-direction: row;
+ position: sticky;
+ z-index: 10;
+ font-size: 2rem;
+ height: 45px;
+ width: 100%;
+ left: 0;
+ top: 0;
+ }
+
+ .source .mobile-topbar {
+ display: none;
+ }
+
+ .sidebar-menu-toggle {
+ width: 45px;
+ /* Rare exception to specifying font sizes in rem. Since this is acting
+ as an icon, it's okay to specify its sizes in pixels. */
+ font-size: 32px;
+ border: none;
+ }
+
+ .sidebar-elems {
+ background-color: var(--sidebar-background-color);
+ }
+
+ .source nav:not(.sidebar).sub {
+ margin-left: 32px;
+ }
+
+ .content {
+ margin-left: 0px;
+ }
+
+ .source .content {
+ margin-top: 10px;
+ }
+
+ #search {
+ margin-left: 0;
+ padding: 0;
+ }
+
+ .anchor {
+ display: none !important;
+ }
+
+ .notable-traits {
+ position: absolute;
+ left: -22px;
+ top: 24px;
+ }
+
+ #titles > button > div.count {
+ float: left;
+ width: 100%;
+ }
+
+ #titles {
+ height: 50px;
+ }
+
+ /* Because of ios, we need to actually have a full height sidebar title so the
+ * actual sidebar can show up. But then we need to make it transparent so we don't
+ * hide content. The filler just allows to create the background for the sidebar
+ * title. But because of the absolute position, I had to lower the z-index.
+ */
+ #sidebar-filler {
+ position: fixed;
+ left: 45px;
+ width: calc(100% - 45px);
+ top: 0;
+ height: 45px;
+ z-index: -1;
+ border-bottom: 1px solid;
+ }
+
+ #main-content > details.rustdoc-toggle > summary::before,
+ #main-content > div > details.rustdoc-toggle > summary::before {
+ left: -11px;
+ }
+
+ #sidebar-toggle {
+ position: fixed;
+ left: 1px;
+ top: 100px;
+ width: 30px;
+ font-size: 1.5rem;
+ text-align: center;
+ padding: 0;
+ z-index: 10;
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ cursor: pointer;
+ font-weight: bold;
+ border: 1px solid;
+ border-left: 0;
+ }
+
+ .source-sidebar-expanded #sidebar-toggle {
+ left: unset;
+ top: unset;
+ width: unset;
+ border-top-right-radius: unset;
+ border-bottom-right-radius: unset;
+ position: sticky;
+ border: 0;
+ border-bottom: 1px solid;
+ }
+
+ #source-sidebar {
+ z-index: 11;
+ }
+
+ #main-content > .line-numbers {
+ margin-top: 0;
+ }
+
+ .notable-traits .notable-traits-tooltiptext {
+ left: 0;
+ top: 100%;
+ }
+
+ /* We don't display the help button on mobile devices. */
+ #help-button {
+ display: none;
+ }
+
+ /* Display an alternating layout on tablets and phones */
+ .item-table {
+ display: block;
+ }
+ .item-row {
+ display: flex;
+ flex-flow: column wrap;
+ }
+ .item-left, .item-right {
+ width: 100%;
+ }
+
+ /* Display an alternating layout on tablets and phones */
+ .search-results > a {
+ border-bottom: 1px solid #aaa9;
+ padding: 5px 0px;
+ }
+ .search-results .result-name, .search-results div.desc, .search-results .result-description {
+ width: 100%;
+ }
+ .search-results div.desc, .search-results .result-description, .item-right {
+ padding-left: 2em;
+ }
+
+ .source-sidebar-expanded .source .sidebar {
+ max-width: 100vw;
+ width: 100vw;
+ }
+
+ /* Position of the "[-]" element. */
+ details.rustdoc-toggle:not(.top-doc) > summary {
+ margin-left: 10px;
+ }
+ .impl-items > details.rustdoc-toggle > summary:not(.hideme)::before,
+ #main-content > details.rustdoc-toggle:not(.top-doc) > summary::before,
+ #main-content > div > details.rustdoc-toggle > summary::before {
+ left: -11px;
+ }
+}
+
+@media print {
+ nav.sidebar, nav.sub, .content .out-of-band, a.srclink, #copy-path,
+ details.rustdoc-toggle[open] > summary::before, details.rustdoc-toggle > summary::before,
+ details.rustdoc-toggle.top-doc > summary {
+ display: none;
+ }
+
+ .docblock {
+ margin-left: 0;
+ }
+
+ main {
+ padding: 10px;
+ }
+}
+
+@media (max-width: 464px) {
+ #titles, #titles > button {
+ height: 73px;
+ }
+
+ #main-content > table:not(.table-display) td {
+ word-break: break-word;
+ width: 50%;
+ }
+
+ #crate-search {
+ border-radius: 4px;
+ }
+
+ .docblock {
+ margin-left: 12px;
+ }
+
+ .docblock code {
+ overflow-wrap: break-word;
+ overflow-wrap: anywhere;
+ }
+
+ .sub-container {
+ flex-direction: column;
+ }
+
+ .sub-logo-container {
+ align-self: center;
+ }
+
+ .source .sub-logo-container > img {
+ height: 35px;
+ width: 35px;
+ }
+
+ #sidebar-toggle {
+ top: 10px;
+ }
+ .source-sidebar-expanded #sidebar-toggle {
+ top: unset;
+ }
+}
+
+.method-toggle summary,
+.implementors-toggle summary,
+.impl {
+ margin-bottom: 0.75em;
+}
+
+.method-toggle[open] {
+ margin-bottom: 2em;
+}
+
+.implementors-toggle[open] {
+ margin-bottom: 2em;
+}
+
+#trait-implementations-list .method-toggle,
+#synthetic-implementations-list .method-toggle,
+#blanket-implementations-list .method-toggle {
+ margin-bottom: 1em;
+}
+
+/* Begin: styles for --scrape-examples feature */
+
+.scraped-example-list .scrape-help {
+ margin-left: 10px;
+ padding: 0 4px;
+ font-weight: normal;
+ font-size: 12px;
+ position: relative;
+ bottom: 1px;
+ background: transparent;
+ border-width: 1px;
+ border-style: solid;
+ border-radius: 50px;
+}
+
+.scraped-example .code-wrapper {
+ position: relative;
+ display: flex;
+ flex-direction: row;
+ flex-wrap: wrap;
+ width: 100%;
+}
+
+.scraped-example:not(.expanded) .code-wrapper {
+ max-height: 240px;
+}
+
+.scraped-example:not(.expanded) .code-wrapper pre {
+ overflow-y: hidden;
+ max-height: 240px;
+ padding-bottom: 0;
+}
+
+.scraped-example:not(.expanded) .code-wrapper pre.line-numbers {
+ overflow-x: hidden;
+}
+
+.scraped-example .code-wrapper .prev {
+ position: absolute;
+ top: 0.25em;
+ right: 2.25em;
+ z-index: 100;
+ cursor: pointer;
+}
+
+.scraped-example .code-wrapper .next {
+ position: absolute;
+ top: 0.25em;
+ right: 1.25em;
+ z-index: 100;
+ cursor: pointer;
+}
+
+.scraped-example .code-wrapper .expand {
+ position: absolute;
+ top: 0.25em;
+ right: 0.25em;
+ z-index: 100;
+ cursor: pointer;
+}
+
+.scraped-example:not(.expanded) .code-wrapper:before {
+ content: " ";
+ width: 100%;
+ height: 5px;
+ position: absolute;
+ z-index: 100;
+ top: 0;
+}
+
+.scraped-example:not(.expanded) .code-wrapper:after {
+ content: " ";
+ width: 100%;
+ height: 5px;
+ position: absolute;
+ z-index: 100;
+ bottom: 0;
+}
+
+.scraped-example .code-wrapper .line-numbers {
+ margin: 0;
+ padding: 14px 0;
+}
+
+.scraped-example .code-wrapper .line-numbers span {
+ padding: 0 14px;
+}
+
+.scraped-example .code-wrapper .example-wrap {
+ flex: 1;
+ overflow-x: auto;
+ overflow-y: hidden;
+ margin-bottom: 0;
+}
+
+.scraped-example:not(.expanded) .code-wrapper .example-wrap {
+ overflow-x: hidden;
+}
+
+.scraped-example .code-wrapper .example-wrap pre.rust {
+ overflow-x: inherit;
+ width: inherit;
+ overflow-y: hidden;
+}
+
+
+.more-examples-toggle {
+ max-width: calc(100% + 25px);
+ margin-top: 10px;
+ margin-left: -25px;
+}
+
+.more-examples-toggle .hide-more {
+ margin-left: 25px;
+ margin-bottom: 5px;
+ cursor: pointer;
+}
+
+.more-scraped-examples {
+ margin-left: 5px;
+ display: flex;
+ flex-direction: row;
+}
+
+.more-scraped-examples-inner {
+ /* 20px is width of toggle-line + toggle-line-inner */
+ width: calc(100% - 20px);
+}
+
+.toggle-line {
+ align-self: stretch;
+ margin-right: 10px;
+ margin-top: 5px;
+ padding: 0 4px;
+ cursor: pointer;
+}
+
+.toggle-line-inner {
+ min-width: 2px;
+ height: 100%;
+}
+
+.more-scraped-examples .scraped-example {
+ margin-bottom: 20px;
+}
+
+.more-scraped-examples .scraped-example:last-child {
+ margin-bottom: 0;
+}
+
+.example-links a {
+ margin-top: 20px;
+}
+
+.example-links ul {
+ margin-bottom: 0;
+}
+
+/* End: styles for --scrape-examples feature */
diff --git a/src/librustdoc/html/static/css/settings.css b/src/librustdoc/html/static/css/settings.css
new file mode 100644
index 000000000..e82ec0426
--- /dev/null
+++ b/src/librustdoc/html/static/css/settings.css
@@ -0,0 +1,90 @@
+.setting-line {
+ margin: 0.6em 0 0.6em 0.3em;
+ position: relative;
+}
+
+.setting-line .choices {
+ display: flex;
+ flex-wrap: wrap;
+}
+
+.setting-line .radio-line input {
+ margin-right: 0.3em;
+ height: 1.2rem;
+ width: 1.2rem;
+ border: 1px solid;
+ outline: none;
+ -webkit-appearance: none;
+ cursor: pointer;
+ border-radius: 50%;
+}
+.setting-line .radio-line input + span {
+ padding-bottom: 1px;
+}
+
+.radio-line .setting-name {
+ width: 100%;
+}
+
+.radio-line .choice {
+ margin-top: 0.1em;
+ margin-bottom: 0.1em;
+ min-width: 3.8em;
+ padding: 0.3em;
+ display: flex;
+ align-items: center;
+ cursor: pointer;
+}
+.radio-line .choice + .choice {
+ margin-left: 0.5em;
+}
+
+.toggle {
+ position: relative;
+ width: 100%;
+ margin-right: 20px;
+ display: flex;
+ align-items: center;
+ cursor: pointer;
+}
+
+.toggle input {
+ opacity: 0;
+ position: absolute;
+}
+
+.slider {
+ position: relative;
+ width: 45px;
+ min-width: 45px;
+ display: block;
+ height: 28px;
+ margin-right: 20px;
+ cursor: pointer;
+ background-color: #ccc;
+ transition: .3s;
+}
+
+.slider:before {
+ position: absolute;
+ content: "";
+ height: 19px;
+ width: 19px;
+ left: 4px;
+ bottom: 4px;
+ transition: .3s;
+}
+
+input:checked + .slider:before {
+ transform: translateX(19px);
+}
+
+.setting-line > .sub-settings {
+ padding-left: 42px;
+ width: 100%;
+ display: block;
+}
+
+#settings .setting-line {
+ margin: 1.2em 0.6em;
+}
diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css
new file mode 100644
index 000000000..c42cac59b
--- /dev/null
+++ b/src/librustdoc/html/static/css/themes/ayu.css
@@ -0,0 +1,563 @@
+/*
+Based off of the Ayu theme
+Original by Dempfi (https://github.com/dempfi/ayu)
+*/
+
+:root {
+ --main-background-color: #0f1419;
+ --main-color: #c5c5c5;
+ --settings-input-color: #ffb454;
+ --sidebar-background-color: #14191f;
+ --sidebar-background-color-hover: rgba(70, 70, 70, 0.33);
+ --code-block-background-color: #191f26;
+ --scrollbar-track-background-color: transparent;
+ --scrollbar-thumb-background-color: #5c6773;
+ --scrollbar-color: #5c6773 #24292f;
+ --headings-border-bottom-color: #5c6773;
+}
+
+.slider {
+ background-color: #ccc;
+}
+.slider:before {
+ background-color: white;
+}
+input:focus + .slider {
+ box-shadow: 0 0 0 2px #0a84ff, 0 0 0 6px rgba(10, 132, 255, 0.3);
+}
+
+h1, h2, h3, h4 {
+ color: white;
+}
+h1.fqn a {
+ color: #fff;
+}
+h4 {
+ border: none;
+}
+
+.in-band {
+ background-color: #0f1419;
+}
+
+.docblock code {
+ color: #ffb454;
+}
+.code-header {
+ color: #e6e1cf;
+}
+.docblock pre > code, pre > code {
+ color: #e6e1cf;
+}
+span code {
+ color: #e6e1cf;
+}
+.docblock a > code {
+ color: #39AFD7 !important;
+}
+pre, .rustdoc.source .example-wrap {
+ color: #e6e1cf;
+}
+
+.rust-logo {
+ filter: drop-shadow(1px 0 0px #fff)
+ drop-shadow(0 1px 0 #fff)
+ drop-shadow(-1px 0 0 #fff)
+ drop-shadow(0 -1px 0 #fff);
+}
+
+.sidebar .current,
+.sidebar a:hover {
+ background-color: transparent;
+ color: #ffb44c;
+}
+
+.sidebar-elems .location {
+ color: #ff7733;
+}
+
+.line-numbers span { color: #5c6773; }
+.line-numbers .line-highlighted {
+ color: #708090;
+ background-color: rgba(255, 236, 164, 0.06);
+ padding-right: 4px;
+ border-right: 1px solid #ffb44c;
+}
+
+.docblock table td, .docblock table th {
+ border-color: #5c6773;
+}
+
+.search-results a:hover {
+ background-color: #777;
+}
+
+.search-results a:focus {
+ color: #000 !important;
+ background-color: #c6afb3;
+}
+.search-results a {
+ color: #0096cf;
+}
+.search-results a div.desc {
+ color: #c5c5c5;
+}
+
+.content .item-info::before { color: #ccc; }
+
+.content span.foreigntype, .content a.foreigntype { color: #ffa0a5; }
+.content span.union, .content a.union { color: #ffa0a5; }
+.content span.constant, .content a.constant,
+.content span.static, .content a.static { color: #39AFD7; }
+.content span.primitive, .content a.primitive { color: #ffa0a5; }
+.content span.traitalias, .content a.traitalias { color: #39AFD7; }
+.content span.keyword, .content a.keyword { color: #39AFD7; }
+
+.content span.externcrate, .content span.mod, .content a.mod {
+ color: #39AFD7;
+}
+.content span.struct, .content a.struct {
+ color: #ffa0a5;
+}
+.content span.enum, .content a.enum {
+ color: #ffa0a5;
+}
+.content span.trait, .content a.trait {
+ color: #39AFD7;
+}
+.content span.type, .content a.type {
+ color: #39AFD7;
+}
+.content span.type,
+.content a.type,
+.block a.current.type { color: #39AFD7; }
+.content span.associatedtype,
+.content a.associatedtype,
+.block a.current.associatedtype { color: #39AFD7; }
+.content span.fn, .content a.fn, .content span.method,
+.content a.method, .content span.tymethod,
+.content a.tymethod, .content .fnname {
+ color: #fdd687;
+}
+.content span.attr, .content a.attr, .content span.derive,
+.content a.derive, .content span.macro, .content a.macro {
+ color: #a37acc;
+}
+
+.sidebar a { color: #53b1db; }
+.sidebar a.current.type { color: #53b1db; }
+.sidebar a.current.associatedtype { color: #53b1db; }
+
+pre.rust .comment { color: #788797; }
+pre.rust .doccomment { color: #a1ac88; }
+
+nav.main .current {
+ border-top-color: #5c6773;
+ border-bottom-color: #5c6773;
+}
+nav.main .separator {
+ border: 1px solid #5c6773;
+}
+a {
+ color: #39AFD7;
+}
+
+.sidebar h2 a,
+.sidebar h3 a {
+ color: white;
+}
+.search-results a {
+ color: #0096cf;
+}
+body.source .example-wrap pre.rust a {
+ background: #333;
+}
+
+details.rustdoc-toggle > summary.hideme > span,
+details.rustdoc-toggle > summary::before {
+ color: #999;
+}
+
+details.rustdoc-toggle > summary::before {
+ filter: invert(100%);
+}
+
+#crate-search, .search-input {
+ background-color: #141920;
+ border-color: #424c57;
+}
+
+#crate-search {
+ /* Without the `!important`, the border-color is ignored for `<select>`...
+ It cannot be in the group above because `.search-input` has a different border color on
+ hover. */
+ border-color: #424c57 !important;
+}
+
+.search-input {
+ color: #ffffff;
+}
+
+.module-item .stab,
+.import-item .stab {
+ color: #000;
+}
+
+/* Created this empty rule to satisfy the theme checks. */
+.stab.empty-impl {}
+.stab.must_implement {}
+
+.stab.unstable,
+.stab.deprecated,
+.stab.portability,
+.stab.empty-impl,
+.stab.must_implement {
+ color: #c5c5c5;
+ background: #314559 !important;
+ border-style: none !important;
+ border-radius: 4px;
+ padding: 3px 6px 3px 6px;
+}
+
+.stab.portability > code {
+ color: #e6e1cf;
+ background: none;
+}
+
+.rightside,
+.out-of-band {
+ color: grey;
+}
+
+.result-name .primitive > i, .result-name .keyword > i {
+ color: #788797;
+}
+
+.line-numbers :target { background-color: transparent; }
+
+/* Code highlighting */
+pre.rust .number, pre.rust .string { color: #b8cc52; }
+pre.rust .kw, pre.rust .kw-2, pre.rust .prelude-ty,
+pre.rust .bool-val, pre.rust .prelude-val,
+pre.rust .op, pre.rust .lifetime { color: #ff7733; }
+pre.rust .macro, pre.rust .macro-nonterminal { color: #a37acc; }
+pre.rust .question-mark {
+ color: #ff9011;
+}
+pre.rust .self {
+ color: #36a3d9;
+ font-style: italic;
+}
+pre.rust .attribute {
+ color: #e6e1cf;
+}
+pre.rust .attribute .ident, pre.rust .attribute .op {
+ color: #e6e1cf;
+}
+
+.example-wrap > pre.line-number {
+ color: #5c67736e;
+ border: none;
+}
+
+a.test-arrow {
+ font-size: 100%;
+ color: #788797;
+ border-radius: 4px;
+ background-color: rgba(57, 175, 215, 0.09);
+}
+
+a.test-arrow:hover {
+ background-color: rgba(57, 175, 215, 0.368);
+ color: #c5c5c5;
+}
+
+.toggle-label,
+.code-attribute {
+ color: #999;
+}
+
+:target {
+ background: rgba(255, 236, 164, 0.06);
+ border-right: 3px solid rgba(255, 180, 76, 0.85);
+}
+
+pre.compile_fail {
+ border-left: 2px solid rgba(255,0,0,.4);
+}
+
+pre.compile_fail:hover, .information:hover + pre.compile_fail {
+ border-left: 2px solid #f00;
+}
+
+pre.should_panic {
+ border-left: 2px solid rgba(255,0,0,.4);
+}
+
+pre.should_panic:hover, .information:hover + pre.should_panic {
+ border-left: 2px solid #f00;
+}
+
+pre.ignore {
+ border-left: 2px solid rgba(255,142,0,.6);
+}
+
+pre.ignore:hover, .information:hover + pre.ignore {
+ border-left: 2px solid #ff9200;
+}
+
+.tooltip.compile_fail {
+ color: rgba(255,0,0,.5);
+}
+
+.information > .compile_fail:hover {
+ color: #f00;
+}
+
+.tooltip.should_panic {
+ color: rgba(255,0,0,.5);
+}
+
+.information > .should_panic:hover {
+ color: #f00;
+}
+
+.tooltip.ignore {
+ color: rgba(255,142,0,.6);
+}
+
+.information > .ignore:hover {
+ color: #ff9200;
+}
+
+.search-failed a {
+ color: #39AFD7;
+}
+
+.tooltip::after {
+ background-color: #314559;
+ color: #c5c5c5;
+ border: 1px solid #5c6773;
+}
+
+.tooltip::before {
+ border-color: transparent #314559 transparent transparent;
+}
+
+.notable-traits-tooltiptext {
+ background-color: #314559;
+ border-color: #5c6773;
+}
+
+.notable-traits-tooltiptext .notable {
+ border-bottom-color: #5c6773;
+}
+
+#titles > button.selected {
+ background-color: #141920 !important;
+ border-bottom: 1px solid #ffb44c !important;
+ border-top: none;
+}
+
+#titles > button:not(.selected) {
+ background-color: transparent !important;
+ border: none;
+}
+
+#titles > button:hover {
+ border-bottom: 1px solid rgba(242, 151, 24, 0.3);
+}
+
+#titles > button > div.count {
+ color: #888;
+}
+
+/* rules that this theme does not need to set, here to satisfy the rule checker */
+/* note that a lot of these are partially set in some way (meaning they are set
+individually rather than as a group) */
+/* FIXME: these rules should be at the bottom of the file but currently must be
+above the `@media (max-width: 700px)` rules due to a bug in the css checker */
+/* see https://github.com/rust-lang/rust/pull/71237#issuecomment-618170143 */
+.search-input:focus {}
+.content span.attr,.content a.attr,.block a.current.attr,.content span.derive,.content a.derive,
+.block a.current.derive,.content span.macro,.content a.macro,.block a.current.macro {}
+.content span.struct,.content a.struct,.block a.current.struct {}
+#titles>button:hover,#titles>button.selected {}
+.content span.typedef,.content a.typedef,.block a.current.typedef {}
+.content span.union,.content a.union,.block a.current.union {}
+pre.rust .lifetime {}
+.stab.unstable {}
+h2,
+h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {}
+.content span.enum,.content a.enum,.block a.current.enum {}
+.content span.constant,.content a.constant,.block a.current.constant,.content span.static,
+.content a.static, .block a.current.static {}
+.content span.keyword,.content a.keyword,.block a.current.keyword {}
+pre.rust .comment {}
+.content span.traitalias,.content a.traitalias,.block a.current.traitalias {}
+.content span.fn,.content a.fn,.block a.current.fn,.content span.method,.content a.method,
+.block a.current.method,.content span.tymethod,.content a.tymethod,.block a.current.tymethod,
+.content .fnname {}
+pre.rust .kw {}
+pre.rust .self,pre.rust .bool-val,pre.rust .prelude-val,pre.rust .attribute,
+pre.rust .attribute .ident {}
+.content span.foreigntype,.content a.foreigntype,.block a.current.foreigntype {}
+pre.rust .doccomment {}
+.stab.deprecated {}
+.content a.attr,.content a.derive,.content a.macro {}
+.stab.portability {}
+.content span.primitive,.content a.primitive,.block a.current.primitive {}
+.content span.externcrate,.content span.mod,.content a.mod,.block a.current.mod {}
+pre.rust .kw-2,pre.rust .prelude-ty {}
+.content span.trait,.content a.trait,.block a.current.trait {}
+
+.search-results a:focus span {}
+a.result-trait:focus {}
+a.result-traitalias:focus {}
+a.result-mod:focus,
+a.result-externcrate:focus {}
+a.result-mod:focus {}
+a.result-externcrate:focus {}
+a.result-enum:focus {}
+a.result-struct:focus {}
+a.result-union:focus {}
+a.result-fn:focus,
+a.result-method:focus,
+a.result-tymethod:focus {}
+a.result-type:focus {}
+a.result-associatedtype:focus {}
+a.result-foreigntype:focus {}
+a.result-attr:focus,
+a.result-derive:focus,
+a.result-macro:focus {}
+a.result-constant:focus,
+a.result-static:focus {}
+a.result-primitive:focus {}
+a.result-keyword:focus {}
+
+.sidebar a.current.enum {}
+.sidebar a.current.struct {}
+.sidebar a.current.foreigntype {}
+.sidebar a.current.attr,
+.sidebar a.current.derive,
+.sidebar a.current.macro {}
+.sidebar a.current.union {}
+.sidebar a.current.constant
+.sidebar a.current.static {}
+.sidebar a.current.primitive {}
+.sidebar a.current.externcrate
+.sidebar a.current.mod {}
+.sidebar a.current.trait {}
+.sidebar a.current.traitalias {}
+.sidebar a.current.fn,
+.sidebar a.current.method,
+.sidebar a.current.tymethod {}
+.sidebar a.current.keyword {}
+
+@media (max-width: 700px) {
+ .sidebar-elems {
+ border-right-color: #5c6773;
+ }
+}
+
+kbd {
+ color: #c5c5c5;
+ background-color: #314559;
+ border-color: #5c6773;
+ border-bottom-color: #5c6773;
+ box-shadow: inset 0 -1px 0 #5c6773;
+}
+
+#settings-menu > a, #help-button > button {
+ border-color: #5c6773;
+ background-color: #0f1419;
+ color: #fff;
+}
+
+#settings-menu > a img {
+ filter: invert(100);
+}
+
+.popover, .popover::before,
+#help-button span.top, #help-button span.bottom {
+ border-color: #5c6773;
+}
+
+#copy-path {
+ color: #fff;
+}
+#copy-path > img {
+ filter: invert(70%);
+}
+#copy-path:hover > img {
+ filter: invert(100%);
+}
+
+#settings-menu > a:hover, #settings-menu > a:focus,
+#help-button > button:hover, #help-button > button:focus {
+ border-color: #e0e0e0;
+}
+
+#theme-choices {
+ border-color: #5c6773;
+ background-color: #0f1419;
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top-color: #5c6773;
+}
+
+#theme-choices > button:hover, #theme-choices > button:focus {
+ background-color: rgba(110, 110, 110, 0.33);
+}
+
+.search-results .result-name span.alias {
+ color: #c5c5c5;
+}
+.search-results .result-name span.grey {
+ color: #999;
+}
+
+#source-sidebar > .title {
+ color: #fff;
+ border-bottom-color: #5c6773;
+}
+#source-sidebar div.files > a:hover, details.dir-entry summary:hover,
+#source-sidebar div.files > a:focus, details.dir-entry summary:focus {
+ background-color: #14191f;
+ color: #ffb44c;
+}
+#source-sidebar div.files > a.selected {
+ background-color: #14191f;
+ color: #ffb44c;
+}
+
+.scraped-example-list .scrape-help {
+ border-color: #aaa;
+ color: #eee;
+}
+.scraped-example-list .scrape-help:hover {
+ border-color: white;
+ color: white;
+}
+.more-examples-toggle summary, .more-examples-toggle .hide-more {
+ color: #999;
+}
+.scraped-example .example-wrap .rust span.highlight {
+ background: rgb(91, 59, 1);
+}
+.scraped-example .example-wrap .rust span.highlight.focus {
+ background: rgb(124, 75, 15);
+}
+.scraped-example:not(.expanded) .code-wrapper:before {
+ background: linear-gradient(to bottom, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
+}
+.scraped-example:not(.expanded) .code-wrapper:after {
+ background: linear-gradient(to top, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
+}
+.toggle-line-inner {
+ background: #999;
+}
+.toggle-line:hover .toggle-line-inner {
+ background: #c5c5c5;
+}
diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css
new file mode 100644
index 000000000..a550eb1c1
--- /dev/null
+++ b/src/librustdoc/html/static/css/themes/dark.css
@@ -0,0 +1,409 @@
+:root {
+ --main-background-color: #353535;
+ --main-color: #ddd;
+ --settings-input-color: #2196f3;
+ --sidebar-background-color: #505050;
+ --sidebar-background-color-hover: #676767;
+ --code-block-background-color: #2A2A2A;
+ --scrollbar-track-background-color: #717171;
+ --scrollbar-thumb-background-color: rgba(32, 34, 37, .6);
+ --scrollbar-color: rgba(32,34,37,.6) #5a5a5a;
+ --headings-border-bottom-color: #d2d2d2;
+}
+
+.slider {
+ background-color: #ccc;
+}
+.slider:before {
+ background-color: white;
+}
+input:focus + .slider {
+ box-shadow: 0 0 0 2px #0a84ff, 0 0 0 6px rgba(10, 132, 255, 0.3);
+}
+
+.in-band {
+ background-color: #353535;
+}
+
+.rust-logo {
+ filter: drop-shadow(1px 0 0px #fff)
+ drop-shadow(0 1px 0 #fff)
+ drop-shadow(-1px 0 0 #fff)
+ drop-shadow(0 -1px 0 #fff)
+}
+
+.sidebar .current,
+.sidebar a:hover {
+ background: #444;
+}
+
+.line-numbers span { color: #3B91E2; }
+.line-numbers .line-highlighted {
+ background-color: #0a042f !important;
+}
+
+.docblock table td, .docblock table th {
+ border-color: #ddd;
+}
+
+.search-results a:hover {
+ background-color: #777;
+}
+
+.search-results a:focus {
+ color: #eee !important;
+ background-color: #616161;
+}
+.search-results a:focus span { color: #eee !important; }
+a.result-trait:focus { background-color: #013191; }
+a.result-traitalias:focus { background-color: #013191; }
+a.result-mod:focus,
+a.result-externcrate:focus { background-color: #884719; }
+a.result-enum:focus { background-color: #194e9f; }
+a.result-struct:focus { background-color: #194e9f; }
+a.result-union:focus { background-color: #194e9f; }
+a.result-fn:focus,
+a.result-method:focus,
+a.result-tymethod:focus { background-color: #4950ed; }
+a.result-type:focus { background-color: #194e9f; }
+a.result-associatedtype:focus { background-color: #884719; }
+a.result-foreigntype:focus { background-color: #194e9f; }
+a.result-attr:focus,
+a.result-derive:focus,
+a.result-macro:focus { background-color: #217d1c; }
+a.result-constant:focus,
+a.result-static:focus { background-color: #884719; }
+a.result-primitive:focus { background-color: #194e9f; }
+a.result-keyword:focus { background-color: #884719; }
+
+.content .item-info::before { color: #ccc; }
+
+.content span.enum, .content a.enum, .block a.current.enum { color: #2dbfb8; }
+.content span.struct, .content a.struct, .block a.current.struct { color: #2dbfb8; }
+.content span.type, .content a.type, .block a.current.type { color: #2dbfb8; }
+.content span.associatedtype,
+.content a.associatedtype,
+.block a.current.associatedtype { color: #D2991D; }
+.content span.foreigntype, .content a.foreigntype, .block a.current.foreigntype { color: #2dbfb8; }
+.content span.attr, .content a.attr, .block a.current.attr,
+.content span.derive, .content a.derive, .block a.current.derive,
+.content span.macro, .content a.macro, .block a.current.macro { color: #09bd00; }
+.content span.union, .content a.union, .block a.current.union { color: #2dbfb8; }
+.content span.constant, .content a.constant, .block a.current.constant,
+.content span.static, .content a.static, .block a.current.static { color: #D2991D; }
+.content span.primitive, .content a.primitive, .block a.current.primitive { color: #2dbfb8; }
+.content span.externcrate,
+.content span.mod, .content a.mod, .block a.current.mod { color: #D2991D; }
+.content span.trait, .content a.trait, .block a.current.trait { color: #b78cf2; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #b78cf2; }
+.content span.fn, .content a.fn, .block a.current.fn,
+.content span.method, .content a.method, .block a.current.method,
+.content span.tymethod, .content a.tymethod, .block a.current.tymethod,
+.content .fnname{ color: #2BAB63; }
+.content span.keyword, .content a.keyword, .block a.current.keyword { color: #D2991D; }
+
+.sidebar a { color: #fdbf35; }
+.sidebar a.current.enum { color: #12ece2; }
+.sidebar a.current.struct { color: #12ece2; }
+.sidebar a.current.type { color: #12ece2; }
+.sidebar a.current.associatedtype { color: #fdbf35; }
+.sidebar a.current.foreigntype { color: #12ece2; }
+.sidebar a.current.attr,
+.sidebar a.current.derive,
+.sidebar a.current.macro { color: #0be900; }
+.sidebar a.current.union { color: #12ece2; }
+.sidebar a.current.constant
+.sidebar a.current.static { color: #fdbf35; }
+.sidebar a.current.primitive { color: #12ece2; }
+.sidebar a.current.externcrate
+.sidebar a.current.mod { color: #fdbf35; }
+.sidebar a.current.trait { color: #cca7ff; }
+.sidebar a.current.traitalias { color: #cca7ff; }
+.sidebar a.current.fn,
+.sidebar a.current.method,
+.sidebar a.current.tymethod { color: #32d479; }
+.sidebar a.current.keyword { color: #fdbf35; }
+
+pre.rust .comment { color: #8d8d8b; }
+pre.rust .doccomment { color: #8ca375; }
+
+nav.main .current {
+ border-top-color: #eee;
+ border-bottom-color: #eee;
+}
+nav.main .separator {
+ border-color: #eee;
+}
+
+a {
+ color: #D2991D;
+}
+
+body.source .example-wrap pre.rust a {
+ background: #333;
+}
+
+details.rustdoc-toggle > summary.hideme > span,
+details.rustdoc-toggle > summary::before {
+ color: #999;
+}
+
+details.rustdoc-toggle > summary::before {
+ filter: invert(100%);
+}
+
+#crate-search, .search-input {
+ color: #111;
+ background-color: #f0f0f0;
+ border-color: #f0f0f0;
+}
+
+#crate-search {
+ /* Without the `!important`, the border-color is ignored for `<select>`...
+ It cannot be in the group above because `.search-input` has a different border color on
+ hover. */
+ border-color: #f0f0f0 !important;
+}
+
+.search-input {
+ border-color: #e0e0e0;
+}
+
+.search-input:focus {
+ border-color: #008dfd;
+}
+
+.stab.empty-impl { background: #FFF5D6; border-color: #FFC600; color: #2f2f2f; }
+.stab.unstable { background: #FFF5D6; border-color: #FFC600; color: #2f2f2f; }
+.stab.deprecated { background: #ffc4c4; border-color: #db7b7b; color: #2f2f2f; }
+.stab.must_implement { background: #F3DFFF; border-color: #b07bdb; color: #2f2f2f; }
+.stab.portability { background: #F3DFFF; border-color: #b07bdb; color: #2f2f2f; }
+.stab.portability > code { background: none; }
+
+.rightside,
+.out-of-band {
+ color: grey;
+}
+
+.line-numbers :target { background-color: transparent; }
+
+/* Code highlighting */
+pre.rust .kw { color: #ab8ac1; }
+pre.rust .kw-2, pre.rust .prelude-ty { color: #769acb; }
+pre.rust .number, pre.rust .string { color: #83a300; }
+pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
+pre.rust .attribute, pre.rust .attribute .ident { color: #ee6868; }
+pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
+pre.rust .lifetime { color: #d97f26; }
+pre.rust .question-mark {
+ color: #ff9011;
+}
+
+.example-wrap > pre.line-number {
+ border-color: #4a4949;
+}
+
+a.test-arrow {
+ color: #dedede;
+ background-color: rgba(78, 139, 202, 0.2);
+}
+
+a.test-arrow:hover{
+ background-color: #4e8bca;
+}
+
+.toggle-label,
+.code-attribute {
+ color: #999;
+}
+
+:target {
+ background-color: #494a3d;
+ border-right: 3px solid #bb7410;
+}
+
+pre.compile_fail {
+ border-left: 2px solid rgba(255,0,0,.8);
+}
+
+pre.compile_fail:hover, .information:hover + pre.compile_fail {
+ border-left: 2px solid #f00;
+}
+
+pre.should_panic {
+ border-left: 2px solid rgba(255,0,0,.8);
+}
+
+pre.should_panic:hover, .information:hover + pre.should_panic {
+ border-left: 2px solid #f00;
+}
+
+pre.ignore {
+ border-left: 2px solid rgba(255,142,0,.6);
+}
+
+pre.ignore:hover, .information:hover + pre.ignore {
+ border-left: 2px solid #ff9200;
+}
+
+.tooltip.compile_fail {
+ color: rgba(255,0,0,.8);
+}
+
+.information > .compile_fail:hover {
+ color: #f00;
+}
+
+.tooltip.should_panic {
+ color: rgba(255,0,0,.8);
+}
+
+.information > .should_panic:hover {
+ color: #f00;
+}
+
+.tooltip.ignore {
+ color: rgba(255,142,0,.6);
+}
+
+.information > .ignore:hover {
+ color: #ff9200;
+}
+
+.search-failed a {
+ color: #0089ff;
+}
+
+.tooltip::after {
+ background-color: #000;
+ color: #fff;
+ border-color: #000;
+}
+
+.tooltip::before {
+ border-color: transparent black transparent transparent;
+}
+
+.notable-traits-tooltiptext {
+ background-color: #111;
+ border-color: #777;
+}
+
+.notable-traits-tooltiptext .notable {
+ border-bottom-color: #d2d2d2;
+}
+
+#titles > button:not(.selected) {
+ background-color: #252525;
+ border-top-color: #252525;
+}
+
+#titles > button:hover, #titles > button.selected {
+ border-top-color: #0089ff;
+ background-color: #353535;
+}
+
+#titles > button > div.count {
+ color: #888;
+}
+
+@media (max-width: 700px) {
+ .sidebar-elems {
+ border-right-color: #000;
+ }
+}
+
+kbd {
+ color: #000;
+ background-color: #fafbfc;
+ border-color: #d1d5da;
+ border-bottom-color: #c6cbd1;
+ box-shadow: inset 0 -1px 0 #c6cbd1;
+}
+
+#settings-menu > a, #help-button > button {
+ border-color: #e0e0e0;
+ background: #f0f0f0;
+ color: #000;
+}
+
+#settings-menu > a:hover, #settings-menu > a:focus,
+#help-button > button:hover, #help-button > button:focus {
+ border-color: #ffb900;
+}
+
+.popover, .popover::before,
+#help-button span.top, #help-button span.bottom {
+ border-color: #d2d2d2;
+}
+
+#copy-path {
+ color: #999;
+}
+#copy-path > img {
+ filter: invert(50%);
+}
+#copy-path:hover > img {
+ filter: invert(65%);
+}
+
+#theme-choices {
+ border-color: #e0e0e0;
+ background-color: #353535;
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
+}
+
+#theme-choices > button:hover, #theme-choices > button:focus {
+ background-color: #4e4e4e;
+}
+
+.search-results .result-name span.alias {
+ color: #fff;
+}
+.search-results .result-name span.grey {
+ color: #ccc;
+}
+
+#source-sidebar > .title {
+ border-bottom-color: #ccc;
+}
+#source-sidebar div.files > a:hover, details.dir-entry summary:hover,
+#source-sidebar div.files > a:focus, details.dir-entry summary:focus {
+ background-color: #444;
+}
+#source-sidebar div.files > a.selected {
+ background-color: #333;
+}
+
+.scraped-example-list .scrape-help {
+ border-color: #aaa;
+ color: #eee;
+}
+.scraped-example-list .scrape-help:hover {
+ border-color: white;
+ color: white;
+}
+.more-examples-toggle summary, .more-examples-toggle .hide-more {
+ color: #999;
+}
+.scraped-example .example-wrap .rust span.highlight {
+ background: rgb(91, 59, 1);
+}
+.scraped-example .example-wrap .rust span.highlight.focus {
+ background: rgb(124, 75, 15);
+}
+.scraped-example:not(.expanded) .code-wrapper:before {
+ background: linear-gradient(to bottom, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
+}
+.scraped-example:not(.expanded) .code-wrapper:after {
+ background: linear-gradient(to top, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
+}
+.toggle-line-inner {
+ background: #999;
+}
+.toggle-line:hover .toggle-line-inner {
+ background: #c5c5c5;
+}
diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css
new file mode 100644
index 000000000..b751acff1
--- /dev/null
+++ b/src/librustdoc/html/static/css/themes/light.css
@@ -0,0 +1,395 @@
+:root {
+ --main-background-color: white;
+ --main-color: black;
+ --settings-input-color: #2196f3;
+ --sidebar-background-color: #F5F5F5;
+ --sidebar-background-color-hover: #E0E0E0;
+ --code-block-background-color: #F5F5F5;
+ --scrollbar-track-background-color: #dcdcdc;
+ --scrollbar-thumb-background-color: rgba(36, 37, 39, 0.6);
+ --scrollbar-color: rgba(36, 37, 39, 0.6) #d9d9d9;
+ --headings-border-bottom-color: #ddd;
+}
+
+.slider {
+ background-color: #ccc;
+}
+.slider:before {
+ background-color: white;
+}
+input:focus + .slider {
+ box-shadow: 0 0 0 2px #0a84ff, 0 0 0 6px rgba(10, 132, 255, 0.3);
+}
+
+.in-band {
+ background-color: white;
+}
+
+.rust-logo {
+ /* This rule exists to force other themes to explicitly style the logo.
+ * Rustdoc has a custom linter for this purpose.
+ */
+}
+
+.sidebar .current,
+.sidebar a:hover {
+ background-color: #fff;
+}
+
+.line-numbers span { color: #c67e2d; }
+.line-numbers .line-highlighted {
+ background-color: #FDFFD3 !important;
+}
+
+.docblock table td, .docblock table th {
+ border-color: #ddd;
+}
+
+.search-results a:hover {
+ background-color: #ddd;
+}
+
+.search-results a:focus {
+ color: #000 !important;
+ background-color: #ccc;
+}
+.search-results a:focus span { color: #000 !important; }
+a.result-trait:focus { background-color: #c7b6ff; }
+a.result-traitalias:focus { background-color: #c7b6ff; }
+a.result-mod:focus,
+a.result-externcrate:focus { background-color: #afc6e4; }
+a.result-enum:focus { background-color: #e7b1a0; }
+a.result-struct:focus { background-color: #e7b1a0; }
+a.result-union:focus { background-color: #e7b1a0; }
+a.result-fn:focus,
+a.result-method:focus,
+a.result-tymethod:focus { background-color: #c6afb3; }
+a.result-type:focus { background-color: #e7b1a0; }
+a.result-associatedtype:focus { background-color: #afc6e4; }
+a.result-foreigntype:focus { background-color: #e7b1a0; }
+a.result-attr:focus,
+a.result-derive:focus,
+a.result-macro:focus { background-color: #8ce488; }
+a.result-constant:focus,
+a.result-static:focus { background-color: #afc6e4; }
+a.result-primitive:focus { background-color: #e7b1a0; }
+a.result-keyword:focus { background-color: #afc6e4; }
+
+.content .item-info::before { color: #ccc; }
+
+.content span.enum, .content a.enum, .block a.current.enum { color: #AD378A; }
+.content span.struct, .content a.struct, .block a.current.struct { color: #AD378A; }
+.content span.type, .content a.type, .block a.current.type { color: #AD378A; }
+.content span.foreigntype, .content a.foreigntype, .block a.current.foreigntype { color: #3873AD; }
+.content span.associatedtype,
+.content a.associatedtype,
+.block a.current.associatedtype { color: #3873AD; }
+.content span.attr, .content a.attr, .block a.current.attr,
+.content span.derive, .content a.derive, .block a.current.derive,
+.content span.macro, .content a.macro, .block a.current.macro { color: #068000; }
+.content span.union, .content a.union, .block a.current.union { color: #AD378A; }
+.content span.constant, .content a.constant, .block a.current.constant,
+.content span.static, .content a.static, .block a.current.static { color: #3873AD; }
+.content span.primitive, .content a.primitive, .block a.current.primitive { color: #AD378A; }
+.content span.externcrate,
+.content span.mod, .content a.mod, .block a.current.mod { color: #3873AD; }
+.content span.trait, .content a.trait, .block a.current.trait { color: #6E4FC9; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #5137AD; }
+.content span.fn, .content a.fn, .block a.current.fn,
+.content span.method, .content a.method, .block a.current.method,
+.content span.tymethod, .content a.tymethod, .block a.current.tymethod,
+.content .fnname { color: #AD7C37; }
+.content span.keyword, .content a.keyword, .block a.current.keyword { color: #3873AD; }
+
+.sidebar a { color: #356da4; }
+.sidebar a.current.enum { color: #a63283; }
+.sidebar a.current.struct { color: #a63283; }
+.sidebar a.current.type { color: #a63283; }
+.sidebar a.current.associatedtype { color: #356da4; }
+.sidebar a.current.foreigntype { color: #356da4; }
+.sidebar a.current.attr,
+.sidebar a.current.derive,
+.sidebar a.current.macro { color: #067901; }
+.sidebar a.current.union { color: #a63283; }
+.sidebar a.current.constant
+.sidebar a.current.static { color: #356da4; }
+.sidebar a.current.primitive { color: #a63283; }
+.sidebar a.current.externcrate
+.sidebar a.current.mod { color: #356da4; }
+.sidebar a.current.trait { color: #6849c3; }
+.sidebar a.current.traitalias { color: #4b349e; }
+.sidebar a.current.fn,
+.sidebar a.current.method,
+.sidebar a.current.tymethod { color: #a67736; }
+.sidebar a.current.keyword { color: #356da4; }
+
+nav.main .current {
+ border-top-color: #000;
+ border-bottom-color: #000;
+}
+nav.main .separator {
+ border: 1px solid #000;
+}
+
+a {
+ color: #3873AD;
+}
+
+body.source .example-wrap pre.rust a {
+ background: #eee;
+}
+
+details.rustdoc-toggle > summary.hideme > span,
+details.rustdoc-toggle > summary::before {
+ color: #999;
+}
+
+#crate-search, .search-input {
+ background-color: white;
+ border-color: #e0e0e0;
+}
+
+#crate-search {
+ /* Without the `!important`, the border-color is ignored for `<select>`...
+ It cannot be in the group above because `.search-input` has a different border color on
+ hover. */
+ border-color: #e0e0e0 !important;
+}
+
+.search-input:focus {
+ border-color: #66afe9;
+}
+
+.stab.empty-impl { background: #FFF5D6; border-color: #FFC600; }
+.stab.unstable { background: #FFF5D6; border-color: #FFC600; }
+.stab.deprecated { background: #ffc4c4; border-color: #db7b7b; }
+.stab.must_implement { background: #F3DFFF; border-color: #b07bdb; }
+.stab.portability { background: #F3DFFF; border-color: #b07bdb; }
+.stab.portability > code { background: none; }
+
+.rightside,
+.out-of-band {
+ color: grey;
+}
+
+.line-numbers :target { background-color: transparent; }
+
+/* Code highlighting */
+pre.rust .kw { color: #8959A8; }
+pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; }
+pre.rust .number, pre.rust .string { color: #718C00; }
+pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
+pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; }
+pre.rust .comment { color: #8E908C; }
+pre.rust .doccomment { color: #4D4D4C; }
+pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
+pre.rust .lifetime { color: #B76514; }
+pre.rust .question-mark {
+ color: #ff9011;
+}
+
+.example-wrap > pre.line-number {
+ border-color: #c7c7c7;
+}
+
+a.test-arrow {
+ color: #f5f5f5;
+ background-color: rgba(78, 139, 202, 0.2);
+}
+
+a.test-arrow:hover{
+ background-color: #4e8bca;
+}
+
+.toggle-label,
+.code-attribute {
+ color: #999;
+}
+
+:target {
+ background: #FDFFD3;
+ border-right: 3px solid #AD7C37;
+}
+
+pre.compile_fail {
+ border-left: 2px solid rgba(255,0,0,.5);
+}
+
+pre.compile_fail:hover, .information:hover + pre.compile_fail {
+ border-left: 2px solid #f00;
+}
+
+pre.should_panic {
+ border-left: 2px solid rgba(255,0,0,.5);
+}
+
+pre.should_panic:hover, .information:hover + pre.should_panic {
+ border-left: 2px solid #f00;
+}
+
+pre.ignore {
+ border-left: 2px solid rgba(255,142,0,.6);
+}
+
+pre.ignore:hover, .information:hover + pre.ignore {
+ border-left: 2px solid #ff9200;
+}
+
+.tooltip.compile_fail {
+ color: rgba(255,0,0,.5);
+}
+
+.information > .compile_fail:hover {
+ color: #f00;
+}
+
+.tooltip.should_panic {
+ color: rgba(255,0,0,.5);
+}
+
+.information > .should_panic:hover {
+ color: #f00;
+}
+
+.tooltip.ignore {
+ color: rgba(255,142,0,.6);
+}
+
+.information > .ignore:hover {
+ color: #ff9200;
+}
+
+.search-failed a {
+ color: #3873AD;
+}
+
+.tooltip::after {
+ background-color: #000;
+ color: #fff;
+}
+
+.tooltip::before {
+ border-color: transparent black transparent transparent;
+}
+
+.notable-traits-tooltiptext {
+ background-color: #eee;
+ border-color: #999;
+}
+
+.notable-traits-tooltiptext .notable {
+ border-bottom-color: #DDDDDD;
+}
+
+#titles > button:not(.selected) {
+ background-color: #e6e6e6;
+ border-top-color: #e6e6e6;
+}
+
+#titles > button:hover, #titles > button.selected {
+ background-color: #ffffff;
+ border-top-color: #0089ff;
+}
+
+#titles > button > div.count {
+ color: #888;
+}
+
+@media (max-width: 700px) {
+ .sidebar-elems {
+ border-right-color: #000;
+ }
+}
+
+kbd {
+ color: #000;
+ background-color: #fafbfc;
+ border-color: #d1d5da;
+ border-bottom-color: #c6cbd1;
+ box-shadow: inset 0 -1px 0 #c6cbd1;
+}
+
+#settings-menu > a, #help-button > button {
+ border-color: #e0e0e0;
+ background-color: #fff;
+}
+
+#settings-menu > a:hover, #settings-menu > a:focus,
+#help-button > button:hover, #help-button > button:focus {
+ border-color: #717171;
+}
+
+.popover, .popover::before,
+#help-button span.top, #help-button span.bottom {
+ border-color: #DDDDDD;
+}
+
+#copy-path {
+ color: #999;
+}
+#copy-path > img {
+ filter: invert(50%);
+}
+#copy-path:hover > img {
+ filter: invert(35%);
+}
+
+#theme-choices {
+ border-color: #ccc;
+ background-color: #fff;
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
+}
+
+#theme-choices > button:hover, #theme-choices > button:focus {
+ background-color: #eee;
+}
+
+.search-results .result-name span.alias {
+ color: #000;
+}
+.search-results .result-name span.grey {
+ color: #999;
+}
+
+#source-sidebar > .title {
+ border-bottom-color: #ccc;
+}
+#source-sidebar div.files > a:hover, details.dir-entry summary:hover,
+#source-sidebar div.files > a:focus, details.dir-entry summary:focus {
+ background-color: #E0E0E0;
+}
+#source-sidebar div.files > a.selected {
+ background-color: #fff;
+}
+.scraped-example-list .scrape-help {
+ border-color: #555;
+ color: #333;
+}
+.scraped-example-list .scrape-help:hover {
+ border-color: black;
+ color: black;
+}
+.more-examples-toggle summary, .more-examples-toggle .hide-more {
+ color: #999;
+}
+.scraped-example .example-wrap .rust span.highlight {
+ background: #fcffd6;
+}
+.scraped-example .example-wrap .rust span.highlight.focus {
+ background: #f6fdb0;
+}
+.scraped-example:not(.expanded) .code-wrapper:before {
+ background: linear-gradient(to bottom, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
+}
+.scraped-example:not(.expanded) .code-wrapper:after {
+ background: linear-gradient(to top, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
+}
+.toggle-line-inner {
+ background: #ccc;
+}
+.toggle-line:hover .toggle-line-inner {
+ background: #999;
+}
diff --git a/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt b/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt
new file mode 100644
index 000000000..ff9afab06
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/FiraSans-LICENSE.txt
@@ -0,0 +1,94 @@
+Digitized data copyright (c) 2012-2015, The Mozilla Foundation and Telefonica S.A.
+with Reserved Font Name < Fira >,
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/librustdoc/html/static/fonts/FiraSans-Medium.woff2 b/src/librustdoc/html/static/fonts/FiraSans-Medium.woff2
new file mode 100644
index 000000000..7a1e5fc54
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/FiraSans-Medium.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/FiraSans-Regular.woff2 b/src/librustdoc/html/static/fonts/FiraSans-Regular.woff2
new file mode 100644
index 000000000..e766e06cc
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/FiraSans-Regular.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt b/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt
new file mode 100644
index 000000000..0bf46682b
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/NanumBarunGothic-LICENSE.txt
@@ -0,0 +1,99 @@
+Copyright (c) 2010, NAVER Corporation (https://www.navercorp.com/),
+
+with Reserved Font Name Nanum, Naver Nanum, NanumGothic, Naver NanumGothic,
+NanumMyeongjo, Naver NanumMyeongjo, NanumBrush, Naver NanumBrush, NanumPen,
+Naver NanumPen, Naver NanumGothicEco, NanumGothicEco, Naver NanumMyeongjoEco,
+NanumMyeongjoEco, Naver NanumGothicLight, NanumGothicLight, NanumBarunGothic,
+Naver NanumBarunGothic, NanumSquareRound, NanumBarunPen, MaruBuri
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/librustdoc/html/static/fonts/NanumBarunGothic.ttf.woff2 b/src/librustdoc/html/static/fonts/NanumBarunGothic.ttf.woff2
new file mode 100644
index 000000000..1866ad4bc
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/NanumBarunGothic.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceCodePro-It.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceCodePro-It.ttf.woff2
new file mode 100644
index 000000000..462c34efc
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceCodePro-It.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt b/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt
new file mode 100644
index 000000000..07542572e
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceCodePro-LICENSE.txt
@@ -0,0 +1,93 @@
+Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe Systems Incorporated in the United States and/or other countries.
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+
+This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/librustdoc/html/static/fonts/SourceCodePro-Regular.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceCodePro-Regular.ttf.woff2
new file mode 100644
index 000000000..10b558e0b
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceCodePro-Regular.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceCodePro-Semibold.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceCodePro-Semibold.ttf.woff2
new file mode 100644
index 000000000..5ec64eef0
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceCodePro-Semibold.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2
new file mode 100644
index 000000000..db57d2145
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-Bold.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2
new file mode 100644
index 000000000..1cbc021a3
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-It.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
new file mode 100644
index 000000000..68ea18924
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-LICENSE.md
@@ -0,0 +1,93 @@
+Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+
+This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2 b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2
new file mode 100644
index 000000000..2db73fe2b
--- /dev/null
+++ b/src/librustdoc/html/static/fonts/SourceSerif4-Regular.ttf.woff2
Binary files differ
diff --git a/src/librustdoc/html/static/images/clipboard.svg b/src/librustdoc/html/static/images/clipboard.svg
new file mode 100644
index 000000000..8adbd9963
--- /dev/null
+++ b/src/librustdoc/html/static/images/clipboard.svg
@@ -0,0 +1 @@
+<svg width="24" height="25" viewBox="0 0 24 25" xmlns="http://www.w3.org/2000/svg" aria-label="Copy to clipboard"><path d="M18 20h2v3c0 1-1 2-2 2H2c-.998 0-2-1-2-2V5c0-.911.755-1.667 1.667-1.667h5A3.323 3.323 0 0110 0a3.323 3.323 0 013.333 3.333h5C19.245 3.333 20 4.09 20 5v8.333h-2V9H2v14h16v-3zM3 7h14c0-.911-.793-1.667-1.75-1.667H13.5c-.957 0-1.75-.755-1.75-1.666C11.75 2.755 10.957 2 10 2s-1.75.755-1.75 1.667c0 .911-.793 1.666-1.75 1.666H4.75C3.793 5.333 3 6.09 3 7z"/><path d="M4 19h6v2H4zM12 11H4v2h8zM4 17h4v-2H4zM15 15v-3l-4.5 4.5L15 21v-3l8.027-.032L23 15z"/></svg>
diff --git a/src/librustdoc/html/static/images/down-arrow.svg b/src/librustdoc/html/static/images/down-arrow.svg
new file mode 100644
index 000000000..35437e77a
--- /dev/null
+++ b/src/librustdoc/html/static/images/down-arrow.svg
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" id="Layer_1" width="128" height="128" enable-background="new 0 0 128 128" version="1.1" viewBox="-30 -20 176 176" xml:space="preserve"><g><line x1="111" x2="64" y1="40.5" y2="87.499" fill="none" stroke="#2F3435" stroke-linecap="square" stroke-miterlimit="10" stroke-width="12"/><line x1="64" x2="17" y1="87.499" y2="40.5" fill="none" stroke="#2F3435" stroke-linecap="square" stroke-miterlimit="10" stroke-width="12"/></g></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/favicon-16x16.png b/src/librustdoc/html/static/images/favicon-16x16.png
new file mode 100644
index 000000000..ea4b45cae
--- /dev/null
+++ b/src/librustdoc/html/static/images/favicon-16x16.png
Binary files differ
diff --git a/src/librustdoc/html/static/images/favicon-32x32.png b/src/librustdoc/html/static/images/favicon-32x32.png
new file mode 100644
index 000000000..69b8613ce
--- /dev/null
+++ b/src/librustdoc/html/static/images/favicon-32x32.png
Binary files differ
diff --git a/src/librustdoc/html/static/images/favicon.svg b/src/librustdoc/html/static/images/favicon.svg
new file mode 100644
index 000000000..8b34b5119
--- /dev/null
+++ b/src/librustdoc/html/static/images/favicon.svg
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg width="100%" height="100%" viewBox="0 0 32 32" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;">
+<defs>
+ <style type="text/css"><![CDATA[
+ #logo {
+ fill-rule: nonzero;
+ }
+ #logo-teeth {
+ stroke: #000000;
+ stroke-width: 0.92px;
+ }
+ @media (prefers-color-scheme: dark) {
+ #logo {
+ fill: #FFFFFF;
+ fill-rule: nonzero;
+ }
+ #logo-teeth {
+ fill: #FFFFFF;
+ stroke: #FFFFFF;
+ stroke-width: 0.92px;
+ }
+ }
+ ]]></style>
+</defs>
+<path id="logo" d="M15.993,1.54c-7.972,0 -14.461,6.492 -14.461,14.462c0,7.969 6.492,14.461 14.461,14.461c7.97,0 14.462,-6.492 14.462,-14.461c0,-7.97 -6.492,-14.462 -14.462,-14.462Zm-0.021,1.285c0.511,0.013 0.924,0.439 0.924,0.951c0,0.522 -0.43,0.952 -0.952,0.952c-0.522,0 -0.951,-0.43 -0.951,-0.952c0,0 0,0 0,0c0,-0.522 0.429,-0.952 0.951,-0.952c0.01,0 0.019,0.001 0.028,0.001Zm2.178,1.566c3.379,0.633 6.313,2.723 8.016,5.709l-1.123,2.533c-0.193,0.438 0.006,0.952 0.44,1.147l2.16,0.958c0.067,0.675 0.076,1.355 0.025,2.031l-1.202,0c-0.12,0 -0.169,0.08 -0.169,0.196l0,0.551c0,1.297 -0.731,1.582 -1.373,1.652c-0.612,0.07 -1.288,-0.257 -1.374,-0.63c-0.361,-2.029 -0.961,-2.46 -1.909,-3.21c1.178,-0.746 2.401,-1.85 2.401,-3.325c0,-1.594 -1.092,-2.597 -1.835,-3.09c-1.046,-0.688 -2.203,-0.826 -2.515,-0.826l-12.421,0c1.717,-1.918 4.02,-3.218 6.55,-3.696l1.466,1.536c0.33,0.346 0.878,0.361 1.223,0.028l1.64,-1.564Zm-13.522,7.043c0.511,0.015 0.924,0.44 0.924,0.951c0,0.522 -0.43,0.952 -0.952,0.952c-0.522,0 -0.951,-0.43 -0.951,-0.952c0,0 0,0 0,0c0,-0.522 0.429,-0.951 0.951,-0.951c0.009,0 0.019,0 0.028,0Zm22.685,0.043c0.511,0.015 0.924,0.44 0.924,0.951c0,0.522 -0.43,0.952 -0.952,0.952c-0.522,0 -0.951,-0.43 -0.951,-0.952c0,0 0,0 0,0c0,-0.522 0.429,-0.952 0.951,-0.952c0.01,0 0.019,0 0.028,0.001Zm-20.892,0.153l1.658,0l0,7.477l-3.347,0c-0.414,-1.452 -0.542,-2.97 -0.38,-4.47l2.05,-0.912c0.438,-0.195 0.637,-0.706 0.441,-1.144l-0.422,-0.951Zm6.92,0.079l3.949,0c0.205,0 1.441,0.236 1.441,1.163c0,0.768 -0.948,1.043 -1.728,1.043l-3.665,0l0.003,-2.206Zm0,5.373l3.026,0c0.275,0 1.477,0.079 1.86,1.615c0.119,0.471 0.385,2.007 0.566,2.499c0.18,0.551 0.911,1.652 1.691,1.652l4.938,0c-0.331,0.444 -0.693,0.863 -1.083,1.255l-2.01,-0.432c-0.468,-0.101 -0.93,0.199 -1.031,0.667l-0.477,2.228c-3.104,1.406 -6.672,1.389 -9.762,-0.046l-0.478,-2.228c-0.101,-0.468 -0.56,-0.767 -1.028,-0.667l-1.967,0.423c-0.365,-0.377 -0.704,-0.778 -1.016,-1.2l9.567,0c0.107,0 0.181,-0.018 0.181,-0.119l0,-3.384c0,-0.097 -0.074,-0.119 -0.181,-0.119l-2.799,0l0.003,-2.144Zm-4.415,7.749c0.512,0.015 0.924,0.44 0.924,0.951c0,0.522 -0.429,0.952 -0.951,0.952c-0.522,0 -0.952,-0.43 -0.952,-0.952c0,0 0,0 0,0c0,-0.522 0.43,-0.952 0.952,-0.952c0.009,0 0.018,0.001 0.027,0.001Zm14.089,0.043c0.511,0.015 0.924,0.439 0.923,0.951c0,0.522 -0.429,0.952 -0.951,0.952c-0.522,0 -0.951,-0.43 -0.951,-0.952c0,0 0,0 0,0c0,-0.522 0.429,-0.952 0.951,-0.952c0.009,0 0.018,0 0.028,0.001Z"/><path id="logo-teeth" d="M29.647,16.002c0,7.49 -6.163,13.653 -13.654,13.653c-7.49,0 -13.654,-6.163 -13.654,-13.653c0,-7.491 6.164,-13.654 13.654,-13.654c7.491,0 13.654,6.163 13.654,13.654Zm-0.257,-1.319l2.13,1.319l-2.13,1.318l1.83,1.71l-2.344,0.878l1.463,2.035l-2.475,0.404l1.04,2.282l-2.506,-0.089l0.575,2.442l-2.441,-0.576l0.089,2.506l-2.283,-1.04l-0.403,2.475l-2.035,-1.462l-0.878,2.343l-1.71,-1.829l-1.319,2.129l-1.318,-2.129l-1.71,1.829l-0.878,-2.343l-2.035,1.462l-0.404,-2.475l-2.282,1.04l0.089,-2.506l-2.442,0.576l0.575,-2.442l-2.505,0.089l1.04,-2.282l-2.475,-0.404l1.462,-2.035l-2.343,-0.878l1.829,-1.71l-2.129,-1.318l2.129,-1.319l-1.829,-1.71l2.343,-0.878l-1.462,-2.035l2.475,-0.404l-1.04,-2.282l2.505,0.089l-0.575,-2.441l2.442,0.575l-0.089,-2.506l2.282,1.04l0.404,-2.475l2.035,1.463l0.878,-2.344l1.71,1.83l1.318,-2.13l1.319,2.13l1.71,-1.83l0.878,2.344l2.035,-1.463l0.403,2.475l2.283,-1.04l-0.089,2.506l2.441,-0.575l-0.575,2.441l2.506,-0.089l-1.04,2.282l2.475,0.404l-1.463,2.035l2.344,0.878l-1.83,1.71Z"/></svg>
diff --git a/src/librustdoc/html/static/images/rust-logo.svg b/src/librustdoc/html/static/images/rust-logo.svg
new file mode 100644
index 000000000..62424d8ff
--- /dev/null
+++ b/src/librustdoc/html/static/images/rust-logo.svg
@@ -0,0 +1,61 @@
+<svg version="1.1" height="106" width="106" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="logo" transform="translate(53, 53)">
+ <path id="r" transform="translate(0.5, 0.5)" stroke="black" stroke-width="1" stroke-linejoin="round" d="
+ M -9,-15 H 4 C 12,-15 12,-7 4,-7 H -9 Z
+ M -40,22 H 0 V 11 H -9 V 3 H 1 C 12,3 6,22 15,22 H 40
+ V 3 H 34 V 5 C 34,13 25,12 24,7 C 23,2 19,-2 18,-2 C 33,-10 24,-26 12,-26 H -35
+ V -15 H -25 V 11 H -40 Z" />
+ <g id="gear" mask="url(#holes)">
+ <circle r="43" fill="none" stroke="black" stroke-width="9" />
+ <g id="cogs">
+ <polygon id="cog" stroke="black" stroke-width="3" stroke-linejoin="round" points="46,3 51,0 46,-3" />
+ <use xlink:href="#cog" transform="rotate(11.25)" />
+ <use xlink:href="#cog" transform="rotate(22.50)" />
+ <use xlink:href="#cog" transform="rotate(33.75)" />
+ <use xlink:href="#cog" transform="rotate(45.00)" />
+ <use xlink:href="#cog" transform="rotate(56.25)" />
+ <use xlink:href="#cog" transform="rotate(67.50)" />
+ <use xlink:href="#cog" transform="rotate(78.75)" />
+ <use xlink:href="#cog" transform="rotate(90.00)" />
+ <use xlink:href="#cog" transform="rotate(101.25)" />
+ <use xlink:href="#cog" transform="rotate(112.50)" />
+ <use xlink:href="#cog" transform="rotate(123.75)" />
+ <use xlink:href="#cog" transform="rotate(135.00)" />
+ <use xlink:href="#cog" transform="rotate(146.25)" />
+ <use xlink:href="#cog" transform="rotate(157.50)" />
+ <use xlink:href="#cog" transform="rotate(168.75)" />
+ <use xlink:href="#cog" transform="rotate(180.00)" />
+ <use xlink:href="#cog" transform="rotate(191.25)" />
+ <use xlink:href="#cog" transform="rotate(202.50)" />
+ <use xlink:href="#cog" transform="rotate(213.75)" />
+ <use xlink:href="#cog" transform="rotate(225.00)" />
+ <use xlink:href="#cog" transform="rotate(236.25)" />
+ <use xlink:href="#cog" transform="rotate(247.50)" />
+ <use xlink:href="#cog" transform="rotate(258.75)" />
+ <use xlink:href="#cog" transform="rotate(270.00)" />
+ <use xlink:href="#cog" transform="rotate(281.25)" />
+ <use xlink:href="#cog" transform="rotate(292.50)" />
+ <use xlink:href="#cog" transform="rotate(303.75)" />
+ <use xlink:href="#cog" transform="rotate(315.00)" />
+ <use xlink:href="#cog" transform="rotate(326.25)" />
+ <use xlink:href="#cog" transform="rotate(337.50)" />
+ <use xlink:href="#cog" transform="rotate(348.75)" />
+ </g>
+ <g id="mounts">
+ <polygon id="mount" stroke="black" stroke-width="6" stroke-linejoin="round" points="-7,-42 0,-35 7,-42" />
+ <use xlink:href="#mount" transform="rotate(72)" />
+ <use xlink:href="#mount" transform="rotate(144)" />
+ <use xlink:href="#mount" transform="rotate(216)" />
+ <use xlink:href="#mount" transform="rotate(288)" />
+ </g>
+ </g>
+ <mask id="holes">
+ <rect x="-60" y="-60" width="120" height="120" fill="white"/>
+ <circle id="hole" cy="-40" r="3" />
+ <use xlink:href="#hole" transform="rotate(72)" />
+ <use xlink:href="#hole" transform="rotate(144)" />
+ <use xlink:href="#hole" transform="rotate(216)" />
+ <use xlink:href="#hole" transform="rotate(288)" />
+ </mask>
+</g>
+</svg>
diff --git a/src/librustdoc/html/static/images/toggle-minus.svg b/src/librustdoc/html/static/images/toggle-minus.svg
new file mode 100644
index 000000000..73154788a
--- /dev/null
+++ b/src/librustdoc/html/static/images/toggle-minus.svg
@@ -0,0 +1 @@
+<svg width="17" height="17" shape-rendering="crispEdges" stroke="#000" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7"/></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/toggle-plus.svg b/src/librustdoc/html/static/images/toggle-plus.svg
new file mode 100644
index 000000000..08b17033e
--- /dev/null
+++ b/src/librustdoc/html/static/images/toggle-plus.svg
@@ -0,0 +1 @@
+<svg width="17" height="17" shape-rendering="crispEdges" stroke="#000" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M5 2.5H2.5v12H5m7-12h2.5v12H12M5 8.5h7M8.5 12V8.625v0V5"/></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/images/wheel.svg b/src/librustdoc/html/static/images/wheel.svg
new file mode 100644
index 000000000..01da3b24c
--- /dev/null
+++ b/src/librustdoc/html/static/images/wheel.svg
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" id="Capa_1" width="27.434" height="29.5" enable-background="new 0 0 27.434 29.5" version="1.1" viewBox="0 0 27.434 29.5" xml:space="preserve"><g><path d="M27.315,18.389c-0.165-0.604-0.509-1.113-0.981-1.459c-0.042-0.144-0.083-0.429-0.015-0.761l0.037-0.177v-0.182V14.8 c0-1.247-0.006-1.277-0.048-1.472c-0.076-0.354-0.035-0.653,0.007-0.803c0.477-0.346,0.828-0.861,0.996-1.476 c0.261-0.956,0.076-2.091-0.508-3.114l-0.591-1.032c-0.746-1.307-1.965-2.119-3.182-2.119c-0.378,0-0.75,0.081-1.085,0.235 c-0.198-0.025-0.554-0.15-0.855-0.389l-0.103-0.082l-0.114-0.065l-1.857-1.067L18.92,3.36l-0.105-0.044 c-0.376-0.154-0.658-0.41-0.768-0.556C17.918,1.172,16.349,0,14.296,0H13.14c-2.043,0-3.608,1.154-3.749,2.721 C9.277,2.862,8.999,3.104,8.633,3.25l-0.1,0.039L8.439,3.341L6.495,4.406L6.363,4.479L6.245,4.573 C5.936,4.82,5.596,4.944,5.416,4.977c-0.314-0.139-0.66-0.21-1.011-0.21c-1.198,0-2.411,0.819-3.165,2.139L0.65,7.938 c-0.412,0.72-0.642,1.521-0.644,2.258c-0.003,0.952,0.362,1.756,1.013,2.256c0.034,0.155,0.061,0.448-0.016,0.786 c-0.038,0.168-0.062,0.28-0.062,1.563c0,1.148,0,1.148,0.015,1.262l0.009,0.073l0.017,0.073c0.073,0.346,0.045,0.643,0.011,0.802 C0.348,17.512-0.01,18.314,0,19.268c0.008,0.729,0.238,1.523,0.648,2.242l0.589,1.031c0.761,1.331,1.967,2.159,3.15,2.159 c0.324,0,0.645-0.064,0.938-0.187c0.167,0.038,0.492,0.156,0.813,0.416l0.11,0.088l0.124,0.07l2.045,1.156l0.102,0.057l0.107,0.043 c0.364,0.147,0.646,0.381,0.766,0.521c0.164,1.52,1.719,2.634,3.745,2.634h1.155c2.037,0,3.598-1.134,3.747-2.675 c0.117-0.145,0.401-0.393,0.774-0.549l0.111-0.047l0.105-0.062l1.96-1.159l0.105-0.062l0.097-0.075 c0.309-0.246,0.651-0.371,0.832-0.402c0.313,0.138,0.662,0.212,1.016,0.212c1.199,0,2.412-0.82,3.166-2.139l0.59-1.032 C27.387,20.48,27.575,19.342,27.315,18.389z M25.274,20.635l-0.59,1.032c-0.438,0.765-1.104,1.251-1.639,1.251 c-0.133,0-0.258-0.029-0.369-0.094c-0.15-0.086-0.346-0.127-0.566-0.127c-0.596,0-1.383,0.295-2.01,0.796l-1.96,1.157 c-1.016,0.425-1.846,1.291-1.846,1.929s-0.898,1.159-1.998,1.159H13.14c-1.1,0-1.998-0.514-1.998-1.141s-0.834-1.477-1.854-1.888 l-2.046-1.157c-0.636-0.511-1.425-0.814-2.006-0.814c-0.202,0-0.379,0.037-0.516,0.115c-0.101,0.057-0.214,0.084-0.333,0.084 c-0.518,0-1.179-0.498-1.62-1.271l-0.591-1.032c-0.545-0.954-0.556-1.983-0.024-2.286c0.532-0.305,0.78-1.432,0.551-2.506 c0,0,0-0.003,0-1.042c0-1.088,0.021-1.18,0.021-1.18c0.238-1.072-0.01-2.203-0.552-2.513C1.631,10.8,1.634,9.765,2.18,8.812 L2.769,7.78c0.438-0.766,1.103-1.251,1.636-1.251c0.131,0,0.255,0.029,0.365,0.092C4.92,6.707,5.114,6.747,5.334,6.747 c0.596,0,1.38-0.296,2.007-0.795l1.944-1.065c1.021-0.407,1.856-1.277,1.856-1.933c0-0.656,0.898-1.192,1.998-1.192h1.156V1.761 c1.1,0,1.998,0.545,1.998,1.211c0,0.667,0.832,1.554,1.849,1.973L20,6.013c0.618,0.489,1.401,0.775,2.012,0.775 c0.24,0,0.454-0.045,0.62-0.139c0.122-0.069,0.259-0.102,0.403-0.102c0.551,0,1.221,0.476,1.653,1.231l0.59,1.032 c0.544,0.953,0.518,2.004-0.062,2.334c-0.577,0.331-0.859,1.48-0.627,2.554c0,0,0.01,0.042,0.01,1.103c0,1.012,0,1.012,0,1.012 c-0.218,1.049,0.068,2.174,0.636,2.498C25.802,18.635,25.819,19.68,25.274,20.635z"/><path d="M13.61,7.611c-3.913,0-7.084,3.173-7.084,7.085c0,3.914,3.171,7.085,7.084,7.085s7.085-3.172,7.085-7.085 C20.695,10.784,17.523,7.611,13.61,7.611z M13.61,20.02c-2.936,0-5.323-2.388-5.323-5.323c0-2.935,2.388-5.323,5.323-5.323 s5.324,2.388,5.324,5.323C18.934,17.632,16.546,20.02,13.61,20.02z"/><path d="M13.682,9.908c-2.602,0-4.718,2.116-4.718,4.718c0,2.601,2.116,4.716,4.718,4.716c2.601,0,4.717-2.115,4.717-4.716 C18.399,12.024,16.283,9.908,13.682,9.908z M13.682,17.581c-1.633,0-2.956-1.323-2.956-2.955s1.323-2.956,2.956-2.956 c1.632,0,2.956,1.324,2.956,2.956S15.314,17.581,13.682,17.581z"/></g></svg> \ No newline at end of file
diff --git a/src/librustdoc/html/static/js/README.md b/src/librustdoc/html/static/js/README.md
new file mode 100644
index 000000000..1fd859ad7
--- /dev/null
+++ b/src/librustdoc/html/static/js/README.md
@@ -0,0 +1,15 @@
+# Rustdoc JS
+
+These JavaScript files are incorporated into the rustdoc binary at build time,
+and are minified and written to the filesystem as part of the doc build process.
+
+We use the [Closure Compiler](https://github.com/google/closure-compiler/wiki/Annotating-JavaScript-for-the-Closure-Compiler)
+dialect of JSDoc to comment our code and annotate params and return types.
+To run a check:
+
+ ./x.py doc library/std
+ npm i -g google-closure-compiler
+ google-closure-compiler -W VERBOSE \
+ build/<YOUR PLATFORM>/doc/{search-index*.js,crates*.js} \
+ src/librustdoc/html/static/js/{search.js,main.js,storage.js} \
+ --externs src/librustdoc/html/static/js/externs.js >/dev/null
diff --git a/src/librustdoc/html/static/js/externs.js b/src/librustdoc/html/static/js/externs.js
new file mode 100644
index 000000000..ecbe15a59
--- /dev/null
+++ b/src/librustdoc/html/static/js/externs.js
@@ -0,0 +1,142 @@
+// This file contains type definitions that are processed by the Closure Compiler but are
+// not put into the JavaScript we include as part of the documentation. It is used for
+// type checking. See README.md in this directory for more info.
+
+/* eslint-disable */
+let searchState;
+function initSearch(searchIndex){}
+
+/**
+ * @typedef {{
+ * name: string,
+ * fullPath: Array<string>,
+ * pathWithoutLast: Array<string>,
+ * pathLast: string,
+ * generics: Array<QueryElement>,
+ * }}
+ */
+let QueryElement;
+
+/**
+ * @typedef {{
+ * pos: number,
+ * totalElems: number,
+ * typeFilter: (null|string),
+ * userQuery: string,
+ * }}
+ */
+let ParserState;
+
+/**
+ * @typedef {{
+ * original: string,
+ * userQuery: string,
+ * typeFilter: number,
+ * elems: Array<QueryElement>,
+ * args: Array<QueryElement>,
+ * returned: Array<QueryElement>,
+ * foundElems: number,
+ * }}
+ */
+let ParsedQuery;
+
+/**
+ * @typedef {{
+ * crate: string,
+ * desc: string,
+ * id: number,
+ * name: string,
+ * normalizedName: string,
+ * parent: (Object|null|undefined),
+ * path: string,
+ * ty: (Number|null|number),
+ * type: (Array<?>|null)
+ * }}
+ */
+let Row;
+
+/**
+ * @typedef {{
+ * in_args: Array<Object>,
+ * returned: Array<Object>,
+ * others: Array<Object>,
+ * query: ParsedQuery,
+ * }}
+ */
+let ResultsTable;
+
+/**
+ * @typedef {{
+ * desc: string,
+ * displayPath: string,
+ * fullPath: string,
+ * href: string,
+ * id: number,
+ * lev: number,
+ * name: string,
+ * normalizedName: string,
+ * parent: (Object|undefined),
+ * path: string,
+ * ty: number,
+ * }}
+ */
+let Results;
+
+/**
+ * A pair of [inputs, outputs], or 0 for null. This is stored in the search index.
+ * The JavaScript deserializes this into FunctionSearchType.
+ *
+ * Numeric IDs are *ONE-indexed* into the paths array (`p`). Zero is used as a sentinel for `null`
+ * because `null` is four bytes while `0` is one byte.
+ *
+ * An input or output can be encoded as just a number if there is only one of them, AND
+ * it has no generics. The no generics rule exists to avoid ambiguity: imagine if you had
+ * a function with a single output, and that output had a single generic:
+ *
+ * fn something() -> Result<usize, usize>
+ *
+ * If output was allowed to be any RawFunctionType, it would look like this
+ *
+ * [[], [50, [3, 3]]]
+ *
+ * The problem is that the above output could be interpreted as either a type with ID 50 and two
+ * generics, or it could be interpreted as a pair of types, the first one with ID 50 and the second
+ * with ID 3 and a single generic parameter that is also ID 3. We avoid this ambiguity by choosing
+ * in favor of the pair of types interpretation. This is why the `(number|Array<RawFunctionType>)`
+ * is used instead of `(RawFunctionType|Array<RawFunctionType>)`.
+ *
+ * @typedef {(
+ * 0 |
+ * [(number|Array<RawFunctionType>)] |
+ * [(number|Array<RawFunctionType>), (number|Array<RawFunctionType>)]
+ * )}
+ */
+let RawFunctionSearchType;
+
+/**
+ * A single function input or output type. This is either a single path ID, or a pair of
+ * [path ID, generics].
+ *
+ * Numeric IDs are *ONE-indexed* into the paths array (`p`). Zero is used as a sentinel for `null`
+ * because `null` is four bytes while `0` is one byte.
+ *
+ * @typedef {number | [number, Array<RawFunctionType>]}
+ */
+let RawFunctionType;
+
+/**
+ * @typedef {{
+ * inputs: Array<FunctionType>,
+ * outputs: Array<FunctionType>,
+ * }}
+ */
+let FunctionSearchType;
+
+/**
+ * @typedef {{
+ * name: (null|string),
+ * ty: (null|number),
+ * generics: Array<FunctionType>,
+ * }}
+ */
+let FunctionType;
diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
new file mode 100644
index 000000000..0702b2b0b
--- /dev/null
+++ b/src/librustdoc/html/static/js/main.js
@@ -0,0 +1,974 @@
+// Local js definitions:
+/* global addClass, getSettingValue, hasClass, searchState */
+/* global onEach, onEachLazy, removeClass */
+
+"use strict";
+
+// Get a value from the rustdoc-vars div, which is used to convey data from
+// Rust to the JS. If there is no such element, return null.
+function getVar(name) {
+ const el = document.getElementById("rustdoc-vars");
+ if (el) {
+ return el.attributes["data-" + name].value;
+ } else {
+ return null;
+ }
+}
+
+// Given a basename (e.g. "storage") and an extension (e.g. ".js"), return a URL
+// for a resource under the root-path, with the resource-suffix.
+function resourcePath(basename, extension) {
+ return getVar("root-path") + basename + getVar("resource-suffix") + extension;
+}
+
+function hideMain() {
+ addClass(document.getElementById(MAIN_ID), "hidden");
+}
+
+function showMain() {
+ removeClass(document.getElementById(MAIN_ID), "hidden");
+}
+
+function elemIsInParent(elem, parent) {
+ while (elem && elem !== document.body) {
+ if (elem === parent) {
+ return true;
+ }
+ elem = elem.parentElement;
+ }
+ return false;
+}
+
+function blurHandler(event, parentElem, hideCallback) {
+ if (!elemIsInParent(document.activeElement, parentElem) &&
+ !elemIsInParent(event.relatedTarget, parentElem)
+ ) {
+ hideCallback();
+ }
+}
+
+(function() {
+ window.rootPath = getVar("root-path");
+ window.currentCrate = getVar("current-crate");
+}());
+
+function setMobileTopbar() {
+ // FIXME: It would be nicer to generate this text content directly in HTML,
+ // but with the current code it's hard to get the right information in the right place.
+ const mobileLocationTitle = document.querySelector(".mobile-topbar h2.location");
+ const locationTitle = document.querySelector(".sidebar h2.location");
+ if (mobileLocationTitle && locationTitle) {
+ mobileLocationTitle.innerHTML = locationTitle.innerHTML;
+ }
+}
+
+// Gets the human-readable string for the virtual-key code of the
+// given KeyboardEvent, ev.
+//
+// This function is meant as a polyfill for KeyboardEvent#key,
+// since it is not supported in IE 11 or Chrome for Android. We also test for
+// KeyboardEvent#keyCode because the handleShortcut handler is
+// also registered for the keydown event, because Blink doesn't fire
+// keypress on hitting the Escape key.
+//
+// So I guess you could say things are getting pretty interoperable.
+function getVirtualKey(ev) {
+ if ("key" in ev && typeof ev.key !== "undefined") {
+ return ev.key;
+ }
+
+ const c = ev.charCode || ev.keyCode;
+ if (c === 27) {
+ return "Escape";
+ }
+ return String.fromCharCode(c);
+}
+
+const MAIN_ID = "main-content";
+const SETTINGS_BUTTON_ID = "settings-menu";
+const ALTERNATIVE_DISPLAY_ID = "alternative-display";
+const NOT_DISPLAYED_ID = "not-displayed";
+const HELP_BUTTON_ID = "help-button";
+
+function getSettingsButton() {
+ return document.getElementById(SETTINGS_BUTTON_ID);
+}
+
+function getHelpButton() {
+ return document.getElementById(HELP_BUTTON_ID);
+}
+
+// Returns the current URL without any query parameter or hash.
+function getNakedUrl() {
+ return window.location.href.split("?")[0].split("#")[0];
+}
+
+/**
+ * This function inserts `newNode` after `referenceNode`. It doesn't work if `referenceNode`
+ * doesn't have a parent node.
+ *
+ * @param {HTMLElement} newNode
+ * @param {HTMLElement} referenceNode
+ */
+function insertAfter(newNode, referenceNode) {
+ referenceNode.parentNode.insertBefore(newNode, referenceNode.nextSibling);
+}
+
+/**
+ * This function creates a new `<section>` with the given `id` and `classes` if it doesn't already
+ * exist.
+ *
+ * More information about this in `switchDisplayedElement` documentation.
+ *
+ * @param {string} id
+ * @param {string} classes
+ */
+function getOrCreateSection(id, classes) {
+ let el = document.getElementById(id);
+
+ if (!el) {
+ el = document.createElement("section");
+ el.id = id;
+ el.className = classes;
+ insertAfter(el, document.getElementById(MAIN_ID));
+ }
+ return el;
+}
+
+/**
+ * Returns the `<section>` element which contains the displayed element.
+ *
+ * @return {HTMLElement}
+ */
+function getAlternativeDisplayElem() {
+ return getOrCreateSection(ALTERNATIVE_DISPLAY_ID, "content hidden");
+}
+
+/**
+ * Returns the `<section>` element which contains the not-displayed elements.
+ *
+ * @return {HTMLElement}
+ */
+function getNotDisplayedElem() {
+ return getOrCreateSection(NOT_DISPLAYED_ID, "hidden");
+}
+
+/**
+ * To nicely switch between displayed "extra" elements (such as search results or settings menu)
+ * and to alternate between the displayed and not displayed elements, we hold them in two different
+ * `<section>` elements. They work in pair: one holds the hidden elements while the other
+ * contains the displayed element (there can be only one at the same time!). So basically, we switch
+ * elements between the two `<section>` elements.
+ *
+ * @param {HTMLElement} elemToDisplay
+ */
+function switchDisplayedElement(elemToDisplay) {
+ const el = getAlternativeDisplayElem();
+
+ if (el.children.length > 0) {
+ getNotDisplayedElem().appendChild(el.firstElementChild);
+ }
+ if (elemToDisplay === null) {
+ addClass(el, "hidden");
+ showMain();
+ return;
+ }
+ el.appendChild(elemToDisplay);
+ hideMain();
+ removeClass(el, "hidden");
+}
+
+function browserSupportsHistoryApi() {
+ return window.history && typeof window.history.pushState === "function";
+}
+
+// eslint-disable-next-line no-unused-vars
+function loadCss(cssFileName) {
+ const link = document.createElement("link");
+ link.href = resourcePath(cssFileName, ".css");
+ link.type = "text/css";
+ link.rel = "stylesheet";
+ document.getElementsByTagName("head")[0].appendChild(link);
+}
+
+(function() {
+ function loadScript(url) {
+ const script = document.createElement("script");
+ script.src = url;
+ document.head.append(script);
+ }
+
+ getSettingsButton().onclick = event => {
+ addClass(getSettingsButton(), "rotate");
+ event.preventDefault();
+ // Sending request for the CSS and the JS files at the same time so it will
+ // hopefully be loaded when the JS will generate the settings content.
+ loadCss("settings");
+ loadScript(resourcePath("settings", ".js"));
+ };
+
+ window.searchState = {
+ loadingText: "Loading search results...",
+ input: document.getElementsByClassName("search-input")[0],
+ outputElement: () => {
+ let el = document.getElementById("search");
+ if (!el) {
+ el = document.createElement("section");
+ el.id = "search";
+ getNotDisplayedElem().appendChild(el);
+ }
+ return el;
+ },
+ title: document.title,
+ titleBeforeSearch: document.title,
+ timeout: null,
+ // On the search screen, so you remain on the last tab you opened.
+ //
+ // 0 for "In Names"
+ // 1 for "In Parameters"
+ // 2 for "In Return Types"
+ currentTab: 0,
+ // tab and back preserves the element that was focused.
+ focusedByTab: [null, null, null],
+ clearInputTimeout: () => {
+ if (searchState.timeout !== null) {
+ clearTimeout(searchState.timeout);
+ searchState.timeout = null;
+ }
+ },
+ isDisplayed: () => searchState.outputElement().parentElement.id === ALTERNATIVE_DISPLAY_ID,
+ // Sets the focus on the search bar at the top of the page
+ focus: () => {
+ searchState.input.focus();
+ },
+ // Removes the focus from the search bar.
+ defocus: () => {
+ searchState.input.blur();
+ },
+ showResults: search => {
+ if (search === null || typeof search === "undefined") {
+ search = searchState.outputElement();
+ }
+ switchDisplayedElement(search);
+ searchState.mouseMovedAfterSearch = false;
+ document.title = searchState.title;
+ },
+ hideResults: () => {
+ switchDisplayedElement(null);
+ document.title = searchState.titleBeforeSearch;
+ // We also remove the query parameter from the URL.
+ if (browserSupportsHistoryApi()) {
+ history.replaceState(null, window.currentCrate + " - Rust",
+ getNakedUrl() + window.location.hash);
+ }
+ },
+ getQueryStringParams: () => {
+ const params = {};
+ window.location.search.substring(1).split("&").
+ map(s => {
+ const pair = s.split("=");
+ params[decodeURIComponent(pair[0])] =
+ typeof pair[1] === "undefined" ? null : decodeURIComponent(pair[1]);
+ });
+ return params;
+ },
+ setup: () => {
+ const search_input = searchState.input;
+ if (!searchState.input) {
+ return;
+ }
+ let searchLoaded = false;
+ function loadSearch() {
+ if (!searchLoaded) {
+ searchLoaded = true;
+ loadScript(resourcePath("search", ".js"));
+ loadScript(resourcePath("search-index", ".js"));
+ }
+ }
+
+ search_input.addEventListener("focus", () => {
+ search_input.origPlaceholder = search_input.placeholder;
+ search_input.placeholder = "Type your search here.";
+ loadSearch();
+ });
+
+ if (search_input.value !== "") {
+ loadSearch();
+ }
+
+ const params = searchState.getQueryStringParams();
+ if (params.search !== undefined) {
+ const search = searchState.outputElement();
+ search.innerHTML = "<h3 class=\"search-loading\">" +
+ searchState.loadingText + "</h3>";
+ searchState.showResults(search);
+ loadSearch();
+ }
+ },
+ };
+
+ function getPageId() {
+ if (window.location.hash) {
+ const tmp = window.location.hash.replace(/^#/, "");
+ if (tmp.length > 0) {
+ return tmp;
+ }
+ }
+ return null;
+ }
+
+ const toggleAllDocsId = "toggle-all-docs";
+ let savedHash = "";
+
+ function handleHashes(ev) {
+ if (ev !== null && searchState.isDisplayed() && ev.newURL) {
+ // This block occurs when clicking on an element in the navbar while
+ // in a search.
+ switchDisplayedElement(null);
+ const hash = ev.newURL.slice(ev.newURL.indexOf("#") + 1);
+ if (browserSupportsHistoryApi()) {
+ // `window.location.search`` contains all the query parameters, not just `search`.
+ history.replaceState(null, "",
+ getNakedUrl() + window.location.search + "#" + hash);
+ }
+ const elem = document.getElementById(hash);
+ if (elem) {
+ elem.scrollIntoView();
+ }
+ }
+ // This part is used in case an element is not visible.
+ if (savedHash !== window.location.hash) {
+ savedHash = window.location.hash;
+ if (savedHash.length === 0) {
+ return;
+ }
+ expandSection(savedHash.slice(1)); // we remove the '#'
+ }
+ }
+
+ function onHashChange(ev) {
+ // If we're in mobile mode, we should hide the sidebar in any case.
+ const sidebar = document.getElementsByClassName("sidebar")[0];
+ removeClass(sidebar, "shown");
+ handleHashes(ev);
+ }
+
+ function openParentDetails(elem) {
+ while (elem) {
+ if (elem.tagName === "DETAILS") {
+ elem.open = true;
+ }
+ elem = elem.parentNode;
+ }
+ }
+
+ function expandSection(id) {
+ openParentDetails(document.getElementById(id));
+ }
+
+ function handleEscape(ev) {
+ searchState.clearInputTimeout();
+ switchDisplayedElement(null);
+ if (browserSupportsHistoryApi()) {
+ history.replaceState(null, window.currentCrate + " - Rust",
+ getNakedUrl() + window.location.hash);
+ }
+ ev.preventDefault();
+ searchState.defocus();
+ window.hidePopoverMenus();
+ }
+
+ function handleShortcut(ev) {
+ // Don't interfere with browser shortcuts
+ const disableShortcuts = getSettingValue("disable-shortcuts") === "true";
+ if (ev.ctrlKey || ev.altKey || ev.metaKey || disableShortcuts) {
+ return;
+ }
+
+ if (document.activeElement.tagName === "INPUT" &&
+ document.activeElement.type !== "checkbox") {
+ switch (getVirtualKey(ev)) {
+ case "Escape":
+ handleEscape(ev);
+ break;
+ }
+ } else {
+ switch (getVirtualKey(ev)) {
+ case "Escape":
+ handleEscape(ev);
+ break;
+
+ case "s":
+ case "S":
+ ev.preventDefault();
+ searchState.focus();
+ break;
+
+ case "+":
+ case "-":
+ ev.preventDefault();
+ toggleAllDocs();
+ break;
+
+ case "?":
+ showHelp();
+ break;
+
+ default:
+ break;
+ }
+ }
+ }
+
+ document.addEventListener("keypress", handleShortcut);
+ document.addEventListener("keydown", handleShortcut);
+
+ function addSidebarItems() {
+ if (!window.SIDEBAR_ITEMS) {
+ return;
+ }
+ const sidebar = document.getElementsByClassName("sidebar-elems")[0];
+
+ /**
+ * Append to the sidebar a "block" of links - a heading along with a list (`<ul>`) of items.
+ *
+ * @param {string} shortty - A short type name, like "primitive", "mod", or "macro"
+ * @param {string} id - The HTML id of the corresponding section on the module page.
+ * @param {string} longty - A long, capitalized, plural name, like "Primitive Types",
+ * "Modules", or "Macros".
+ */
+ function block(shortty, id, longty) {
+ const filtered = window.SIDEBAR_ITEMS[shortty];
+ if (!filtered) {
+ return;
+ }
+
+ const div = document.createElement("div");
+ div.className = "block " + shortty;
+ const h3 = document.createElement("h3");
+ h3.innerHTML = `<a href="index.html#${id}">${longty}</a>`;
+ div.appendChild(h3);
+ const ul = document.createElement("ul");
+
+ for (const item of filtered) {
+ const name = item[0];
+ const desc = item[1]; // can be null
+
+ let klass = shortty;
+ let path;
+ if (shortty === "mod") {
+ path = name + "/index.html";
+ } else {
+ path = shortty + "." + name + ".html";
+ }
+ const current_page = document.location.href.split("/").pop();
+ if (path === current_page) {
+ klass += " current";
+ }
+ const link = document.createElement("a");
+ link.href = path;
+ link.title = desc;
+ link.className = klass;
+ link.textContent = name;
+ const li = document.createElement("li");
+ li.appendChild(link);
+ ul.appendChild(li);
+ }
+ div.appendChild(ul);
+ sidebar.appendChild(div);
+ }
+
+ if (sidebar) {
+ block("primitive", "primitives", "Primitive Types");
+ block("mod", "modules", "Modules");
+ block("macro", "macros", "Macros");
+ block("struct", "structs", "Structs");
+ block("enum", "enums", "Enums");
+ block("union", "unions", "Unions");
+ block("constant", "constants", "Constants");
+ block("static", "static", "Statics");
+ block("trait", "traits", "Traits");
+ block("fn", "functions", "Functions");
+ block("type", "types", "Type Definitions");
+ block("foreigntype", "foreign-types", "Foreign Types");
+ block("keyword", "keywords", "Keywords");
+ block("traitalias", "trait-aliases", "Trait Aliases");
+ }
+ }
+
+ window.register_implementors = imp => {
+ const implementors = document.getElementById("implementors-list");
+ const synthetic_implementors = document.getElementById("synthetic-implementors-list");
+ const inlined_types = new Set();
+
+ if (synthetic_implementors) {
+ // This `inlined_types` variable is used to avoid having the same implementation
+ // showing up twice. For example "String" in the "Sync" doc page.
+ //
+ // By the way, this is only used by and useful for traits implemented automatically
+ // (like "Send" and "Sync").
+ onEachLazy(synthetic_implementors.getElementsByClassName("impl"), el => {
+ const aliases = el.getAttribute("data-aliases");
+ if (!aliases) {
+ return;
+ }
+ aliases.split(",").forEach(alias => {
+ inlined_types.add(alias);
+ });
+ });
+ }
+
+ let currentNbImpls = implementors.getElementsByClassName("impl").length;
+ const traitName = document.querySelector("h1.fqn > .in-band > .trait").textContent;
+ const baseIdName = "impl-" + traitName + "-";
+ const libs = Object.getOwnPropertyNames(imp);
+ // We don't want to include impls from this JS file, when the HTML already has them.
+ // The current crate should always be ignored. Other crates that should also be
+ // ignored are included in the attribute `data-ignore-extern-crates`.
+ const ignoreExternCrates = document
+ .querySelector("script[data-ignore-extern-crates]")
+ .getAttribute("data-ignore-extern-crates");
+ for (const lib of libs) {
+ if (lib === window.currentCrate || ignoreExternCrates.indexOf(lib) !== -1) {
+ continue;
+ }
+ const structs = imp[lib];
+
+ struct_loop:
+ for (const struct of structs) {
+ const list = struct.synthetic ? synthetic_implementors : implementors;
+
+ if (struct.synthetic) {
+ for (const struct_type of struct.types) {
+ if (inlined_types.has(struct_type)) {
+ continue struct_loop;
+ }
+ inlined_types.add(struct_type);
+ }
+ }
+
+ const code = document.createElement("h3");
+ code.innerHTML = struct.text;
+ addClass(code, "code-header");
+ addClass(code, "in-band");
+
+ onEachLazy(code.getElementsByTagName("a"), elem => {
+ const href = elem.getAttribute("href");
+
+ if (href && href.indexOf("http") !== 0) {
+ elem.setAttribute("href", window.rootPath + href);
+ }
+ });
+
+ const currentId = baseIdName + currentNbImpls;
+ const anchor = document.createElement("a");
+ anchor.href = "#" + currentId;
+ addClass(anchor, "anchor");
+
+ const display = document.createElement("div");
+ display.id = currentId;
+ addClass(display, "impl");
+ display.appendChild(anchor);
+ display.appendChild(code);
+ list.appendChild(display);
+ currentNbImpls += 1;
+ }
+ }
+ };
+ if (window.pending_implementors) {
+ window.register_implementors(window.pending_implementors);
+ }
+
+ function addSidebarCrates() {
+ if (!window.ALL_CRATES) {
+ return;
+ }
+ const sidebarElems = document.getElementsByClassName("sidebar-elems")[0];
+ if (!sidebarElems) {
+ return;
+ }
+ // Draw a convenient sidebar of known crates if we have a listing
+ const div = document.createElement("div");
+ div.className = "block crate";
+ div.innerHTML = "<h3>Crates</h3>";
+ const ul = document.createElement("ul");
+ div.appendChild(ul);
+
+ for (const crate of window.ALL_CRATES) {
+ let klass = "crate";
+ if (window.rootPath !== "./" && crate === window.currentCrate) {
+ klass += " current";
+ }
+ const link = document.createElement("a");
+ link.href = window.rootPath + crate + "/index.html";
+ link.className = klass;
+ link.textContent = crate;
+
+ const li = document.createElement("li");
+ li.appendChild(link);
+ ul.appendChild(li);
+ }
+ sidebarElems.appendChild(div);
+ }
+
+
+ function labelForToggleButton(sectionIsCollapsed) {
+ if (sectionIsCollapsed) {
+ // button will expand the section
+ return "+";
+ }
+ // button will collapse the section
+ // note that this text is also set in the HTML template in ../render/mod.rs
+ return "\u2212"; // "\u2212" is "−" minus sign
+ }
+
+ function toggleAllDocs() {
+ const innerToggle = document.getElementById(toggleAllDocsId);
+ if (!innerToggle) {
+ return;
+ }
+ let sectionIsCollapsed = false;
+ if (hasClass(innerToggle, "will-expand")) {
+ removeClass(innerToggle, "will-expand");
+ onEachLazy(document.getElementsByClassName("rustdoc-toggle"), e => {
+ if (!hasClass(e, "type-contents-toggle")) {
+ e.open = true;
+ }
+ });
+ innerToggle.title = "collapse all docs";
+ } else {
+ addClass(innerToggle, "will-expand");
+ onEachLazy(document.getElementsByClassName("rustdoc-toggle"), e => {
+ if (e.parentNode.id !== "implementations-list" ||
+ (!hasClass(e, "implementors-toggle") &&
+ !hasClass(e, "type-contents-toggle"))
+ ) {
+ e.open = false;
+ }
+ });
+ sectionIsCollapsed = true;
+ innerToggle.title = "expand all docs";
+ }
+ innerToggle.children[0].innerText = labelForToggleButton(sectionIsCollapsed);
+ }
+
+ (function() {
+ const toggles = document.getElementById(toggleAllDocsId);
+ if (toggles) {
+ toggles.onclick = toggleAllDocs;
+ }
+
+ const hideMethodDocs = getSettingValue("auto-hide-method-docs") === "true";
+ const hideImplementations = getSettingValue("auto-hide-trait-implementations") === "true";
+ const hideLargeItemContents = getSettingValue("auto-hide-large-items") !== "false";
+
+ function setImplementorsTogglesOpen(id, open) {
+ const list = document.getElementById(id);
+ if (list !== null) {
+ onEachLazy(list.getElementsByClassName("implementors-toggle"), e => {
+ e.open = open;
+ });
+ }
+ }
+
+ if (hideImplementations) {
+ setImplementorsTogglesOpen("trait-implementations-list", false);
+ setImplementorsTogglesOpen("blanket-implementations-list", false);
+ }
+
+ onEachLazy(document.getElementsByClassName("rustdoc-toggle"), e => {
+ if (!hideLargeItemContents && hasClass(e, "type-contents-toggle")) {
+ e.open = true;
+ }
+ if (hideMethodDocs && hasClass(e, "method-toggle")) {
+ e.open = false;
+ }
+
+ });
+
+ const pageId = getPageId();
+ if (pageId !== null) {
+ expandSection(pageId);
+ }
+ }());
+
+ (function() {
+ // To avoid checking on "rustdoc-line-numbers" value on every loop...
+ let lineNumbersFunc = () => {};
+ if (getSettingValue("line-numbers") === "true") {
+ lineNumbersFunc = x => {
+ const count = x.textContent.split("\n").length;
+ const elems = [];
+ for (let i = 0; i < count; ++i) {
+ elems.push(i + 1);
+ }
+ const node = document.createElement("pre");
+ addClass(node, "line-number");
+ node.innerHTML = elems.join("\n");
+ x.parentNode.insertBefore(node, x);
+ };
+ }
+ onEachLazy(document.getElementsByClassName("rust-example-rendered"), e => {
+ if (hasClass(e, "compile_fail")) {
+ e.addEventListener("mouseover", function() {
+ this.parentElement.previousElementSibling.childNodes[0].style.color = "#f00";
+ });
+ e.addEventListener("mouseout", function() {
+ this.parentElement.previousElementSibling.childNodes[0].style.color = "";
+ });
+ } else if (hasClass(e, "ignore")) {
+ e.addEventListener("mouseover", function() {
+ this.parentElement.previousElementSibling.childNodes[0].style.color = "#ff9200";
+ });
+ e.addEventListener("mouseout", function() {
+ this.parentElement.previousElementSibling.childNodes[0].style.color = "";
+ });
+ }
+ lineNumbersFunc(e);
+ });
+ }());
+
+ function hideSidebar() {
+ const sidebar = document.getElementsByClassName("sidebar")[0];
+ removeClass(sidebar, "shown");
+ }
+
+ function handleClick(id, f) {
+ const elem = document.getElementById(id);
+ if (elem) {
+ elem.addEventListener("click", f);
+ }
+ }
+ handleClick(MAIN_ID, () => {
+ hideSidebar();
+ });
+
+ onEachLazy(document.getElementsByTagName("a"), el => {
+ // For clicks on internal links (<A> tags with a hash property), we expand the section we're
+ // jumping to *before* jumping there. We can't do this in onHashChange, because it changes
+ // the height of the document so we wind up scrolled to the wrong place.
+ if (el.hash) {
+ el.addEventListener("click", () => {
+ expandSection(el.hash.slice(1));
+ hideSidebar();
+ });
+ }
+ });
+
+ onEachLazy(document.querySelectorAll(".rustdoc-toggle > summary:not(.hideme)"), el => {
+ el.addEventListener("click", e => {
+ if (e.target.tagName !== "SUMMARY" && e.target.tagName !== "A") {
+ e.preventDefault();
+ }
+ });
+ });
+
+ onEachLazy(document.getElementsByClassName("notable-traits"), e => {
+ e.onclick = function() {
+ this.getElementsByClassName("notable-traits-tooltiptext")[0]
+ .classList.toggle("force-tooltip");
+ };
+ });
+
+ const sidebar_menu_toggle = document.getElementsByClassName("sidebar-menu-toggle")[0];
+ if (sidebar_menu_toggle) {
+ sidebar_menu_toggle.addEventListener("click", () => {
+ const sidebar = document.getElementsByClassName("sidebar")[0];
+ if (!hasClass(sidebar, "shown")) {
+ addClass(sidebar, "shown");
+ } else {
+ removeClass(sidebar, "shown");
+ }
+ });
+ }
+
+ function helpBlurHandler(event) {
+ blurHandler(event, getHelpButton(), window.hidePopoverMenus);
+ }
+
+ function buildHelpMenu() {
+ const book_info = document.createElement("span");
+ book_info.className = "top";
+ book_info.innerHTML = "You can find more information in \
+ <a href=\"https://doc.rust-lang.org/rustdoc/\">the rustdoc book</a>.";
+
+ const shortcuts = [
+ ["?", "Show this help dialog"],
+ ["S", "Focus the search field"],
+ ["↑", "Move up in search results"],
+ ["↓", "Move down in search results"],
+ ["← / →", "Switch result tab (when results focused)"],
+ ["&#9166;", "Go to active search result"],
+ ["+", "Expand all sections"],
+ ["-", "Collapse all sections"],
+ ].map(x => "<dt>" +
+ x[0].split(" ")
+ .map((y, index) => ((index & 1) === 0 ? "<kbd>" + y + "</kbd>" : " " + y + " "))
+ .join("") + "</dt><dd>" + x[1] + "</dd>").join("");
+ const div_shortcuts = document.createElement("div");
+ addClass(div_shortcuts, "shortcuts");
+ div_shortcuts.innerHTML = "<h2>Keyboard Shortcuts</h2><dl>" + shortcuts + "</dl></div>";
+
+ const infos = [
+ "Prefix searches with a type followed by a colon (e.g., <code>fn:</code>) to \
+ restrict the search to a given item kind.",
+ "Accepted kinds are: <code>fn</code>, <code>mod</code>, <code>struct</code>, \
+ <code>enum</code>, <code>trait</code>, <code>type</code>, <code>macro</code>, \
+ and <code>const</code>.",
+ "Search functions by type signature (e.g., <code>vec -&gt; usize</code> or \
+ <code>-&gt; vec</code>)",
+ "Search multiple things at once by splitting your query with comma (e.g., \
+ <code>str,u8</code> or <code>String,struct:Vec,test</code>)",
+ "You can look for items with an exact name by putting double quotes around \
+ your request: <code>\"string\"</code>",
+ "Look for items inside another one by searching for a path: <code>vec::Vec</code>",
+ ].map(x => "<p>" + x + "</p>").join("");
+ const div_infos = document.createElement("div");
+ addClass(div_infos, "infos");
+ div_infos.innerHTML = "<h2>Search Tricks</h2>" + infos;
+
+ const rustdoc_version = document.createElement("span");
+ rustdoc_version.className = "bottom";
+ const rustdoc_version_code = document.createElement("code");
+ rustdoc_version_code.innerText = "rustdoc " + getVar("rustdoc-version");
+ rustdoc_version.appendChild(rustdoc_version_code);
+
+ const container = document.createElement("div");
+ container.className = "popover";
+ container.style.display = "none";
+
+ const side_by_side = document.createElement("div");
+ side_by_side.className = "side-by-side";
+ side_by_side.appendChild(div_shortcuts);
+ side_by_side.appendChild(div_infos);
+
+ container.appendChild(book_info);
+ container.appendChild(side_by_side);
+ container.appendChild(rustdoc_version);
+
+ const help_button = getHelpButton();
+ help_button.appendChild(container);
+
+ container.onblur = helpBlurHandler;
+ container.onclick = event => {
+ event.preventDefault();
+ };
+ help_button.onblur = helpBlurHandler;
+ help_button.children[0].onblur = helpBlurHandler;
+
+ return container;
+ }
+
+ /**
+ * Hide all the popover menus.
+ */
+ window.hidePopoverMenus = function() {
+ onEachLazy(document.querySelectorAll(".search-container .popover"), elem => {
+ elem.style.display = "none";
+ });
+ };
+
+ /**
+ * Returns the help menu element (not the button).
+ *
+ * @param {boolean} buildNeeded - If this argument is `false`, the help menu element won't be
+ * built if it doesn't exist.
+ *
+ * @return {HTMLElement}
+ */
+ function getHelpMenu(buildNeeded) {
+ let menu = getHelpButton().querySelector(".popover");
+ if (!menu && buildNeeded) {
+ menu = buildHelpMenu();
+ }
+ return menu;
+ }
+
+ /**
+ * Show the help popup menu.
+ */
+ function showHelp() {
+ const menu = getHelpMenu(true);
+ if (menu.style.display === "none") {
+ window.hidePopoverMenus();
+ menu.style.display = "";
+ }
+ }
+
+ document.querySelector(`#${HELP_BUTTON_ID} > button`).addEventListener("click", event => {
+ const target = event.target;
+ if (target.tagName !== "BUTTON" || target.parentElement.id !== HELP_BUTTON_ID) {
+ return;
+ }
+ const menu = getHelpMenu(true);
+ const shouldShowHelp = menu.style.display === "none";
+ if (shouldShowHelp) {
+ showHelp();
+ } else {
+ window.hidePopoverMenus();
+ }
+ });
+
+ setMobileTopbar();
+ addSidebarItems();
+ addSidebarCrates();
+ onHashChange(null);
+ window.addEventListener("hashchange", onHashChange);
+ searchState.setup();
+}());
+
+(function() {
+ let reset_button_timeout = null;
+
+ window.copy_path = but => {
+ const parent = but.parentElement;
+ const path = [];
+
+ onEach(parent.childNodes, child => {
+ if (child.tagName === "A") {
+ path.push(child.textContent);
+ }
+ });
+
+ const el = document.createElement("textarea");
+ el.value = path.join("::");
+ el.setAttribute("readonly", "");
+ // To not make it appear on the screen.
+ el.style.position = "absolute";
+ el.style.left = "-9999px";
+
+ document.body.appendChild(el);
+ el.select();
+ document.execCommand("copy");
+ document.body.removeChild(el);
+
+ // There is always one children, but multiple childNodes.
+ but.children[0].style.display = "none";
+
+ let tmp;
+ if (but.childNodes.length < 2) {
+ tmp = document.createTextNode("✓");
+ but.appendChild(tmp);
+ } else {
+ onEachLazy(but.childNodes, e => {
+ if (e.nodeType === Node.TEXT_NODE) {
+ tmp = e;
+ return true;
+ }
+ });
+ tmp.textContent = "✓";
+ }
+
+ if (reset_button_timeout !== null) {
+ window.clearTimeout(reset_button_timeout);
+ }
+
+ function reset_button() {
+ tmp.textContent = "";
+ reset_button_timeout = null;
+ but.children[0].style.display = "";
+ }
+
+ reset_button_timeout = window.setTimeout(reset_button, 1000);
+ };
+}());
diff --git a/src/librustdoc/html/static/js/scrape-examples.js b/src/librustdoc/html/static/js/scrape-examples.js
new file mode 100644
index 000000000..fd7a14497
--- /dev/null
+++ b/src/librustdoc/html/static/js/scrape-examples.js
@@ -0,0 +1,106 @@
+/* global addClass, hasClass, removeClass, onEachLazy */
+
+"use strict";
+
+(function() {
+ // Number of lines shown when code viewer is not expanded
+ const MAX_LINES = 10;
+
+ // Scroll code block to the given code location
+ function scrollToLoc(elt, loc) {
+ const lines = elt.querySelector(".line-numbers");
+ let scrollOffset;
+
+ // If the block is greater than the size of the viewer,
+ // then scroll to the top of the block. Otherwise scroll
+ // to the middle of the block.
+ if (loc[1] - loc[0] > MAX_LINES) {
+ const line = Math.max(0, loc[0] - 1);
+ scrollOffset = lines.children[line].offsetTop;
+ } else {
+ const wrapper = elt.querySelector(".code-wrapper");
+ const halfHeight = wrapper.offsetHeight / 2;
+ const offsetMid = (lines.children[loc[0]].offsetTop
+ + lines.children[loc[1]].offsetTop) / 2;
+ scrollOffset = offsetMid - halfHeight;
+ }
+
+ lines.scrollTo(0, scrollOffset);
+ elt.querySelector(".rust").scrollTo(0, scrollOffset);
+ }
+
+ function updateScrapedExample(example) {
+ const locs = JSON.parse(example.attributes.getNamedItem("data-locs").textContent);
+ let locIndex = 0;
+ const highlights = Array.prototype.slice.call(example.querySelectorAll(".highlight"));
+ const link = example.querySelector(".scraped-example-title a");
+
+ if (locs.length > 1) {
+ // Toggle through list of examples in a given file
+ const onChangeLoc = changeIndex => {
+ removeClass(highlights[locIndex], "focus");
+ changeIndex();
+ scrollToLoc(example, locs[locIndex][0]);
+ addClass(highlights[locIndex], "focus");
+
+ const url = locs[locIndex][1];
+ const title = locs[locIndex][2];
+
+ link.href = url;
+ link.innerHTML = title;
+ };
+
+ example.querySelector(".prev")
+ .addEventListener("click", () => {
+ onChangeLoc(() => {
+ locIndex = (locIndex - 1 + locs.length) % locs.length;
+ });
+ });
+
+ example.querySelector("next")
+ .addEventListener("click", () => {
+ onChangeLoc(() => {
+ locIndex = (locIndex + 1) % locs.length;
+ });
+ });
+ }
+
+ const expandButton = example.querySelector(".expand");
+ if (expandButton) {
+ expandButton.addEventListener("click", () => {
+ if (hasClass(example, "expanded")) {
+ removeClass(example, "expanded");
+ scrollToLoc(example, locs[0][0]);
+ } else {
+ addClass(example, "expanded");
+ }
+ });
+ }
+
+ // Start with the first example in view
+ scrollToLoc(example, locs[0][0]);
+ }
+
+ const firstExamples = document.querySelectorAll(".scraped-example-list > .scraped-example");
+ onEachLazy(firstExamples, updateScrapedExample);
+ onEachLazy(document.querySelectorAll(".more-examples-toggle"), toggle => {
+ // Allow users to click the left border of the <details> section to close it,
+ // since the section can be large and finding the [+] button is annoying.
+ onEachLazy(toggle.querySelectorAll(".toggle-line, .hide-more"), button => {
+ button.addEventListener("click", () => {
+ toggle.open = false;
+ });
+ });
+
+ const moreExamples = toggle.querySelectorAll(".scraped-example");
+ toggle.querySelector("summary").addEventListener("click", () => {
+ // Wrapping in setTimeout ensures the update happens after the elements are actually
+ // visible. This is necessary since updateScrapedExample calls scrollToLoc which
+ // depends on offsetHeight, a property that requires an element to be visible to
+ // compute correctly.
+ setTimeout(() => {
+ onEachLazy(moreExamples, updateScrapedExample);
+ });
+ }, {once: true});
+ });
+})();
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
new file mode 100644
index 000000000..75c7bd45a
--- /dev/null
+++ b/src/librustdoc/html/static/js/search.js
@@ -0,0 +1,2297 @@
+/* global addClass, getNakedUrl, getSettingValue */
+/* global onEachLazy, removeClass, searchState, browserSupportsHistoryApi, exports */
+
+"use strict";
+
+(function() {
+// This mapping table should match the discriminants of
+// `rustdoc::formats::item_type::ItemType` type in Rust.
+const itemTypes = [
+ "mod",
+ "externcrate",
+ "import",
+ "struct",
+ "enum",
+ "fn",
+ "type",
+ "static",
+ "trait",
+ "impl",
+ "tymethod",
+ "method",
+ "structfield",
+ "variant",
+ "macro",
+ "primitive",
+ "associatedtype",
+ "constant",
+ "associatedconstant",
+ "union",
+ "foreigntype",
+ "keyword",
+ "existential",
+ "attr",
+ "derive",
+ "traitalias",
+];
+
+// used for special search precedence
+const TY_PRIMITIVE = itemTypes.indexOf("primitive");
+const TY_KEYWORD = itemTypes.indexOf("keyword");
+const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../";
+
+function hasOwnPropertyRustdoc(obj, property) {
+ return Object.prototype.hasOwnProperty.call(obj, property);
+}
+
+// In the search display, allows to switch between tabs.
+function printTab(nb) {
+ let iter = 0;
+ let foundCurrentTab = false;
+ let foundCurrentResultSet = false;
+ onEachLazy(document.getElementById("titles").childNodes, elem => {
+ if (nb === iter) {
+ addClass(elem, "selected");
+ foundCurrentTab = true;
+ } else {
+ removeClass(elem, "selected");
+ }
+ iter += 1;
+ });
+ iter = 0;
+ onEachLazy(document.getElementById("results").childNodes, elem => {
+ if (nb === iter) {
+ addClass(elem, "active");
+ foundCurrentResultSet = true;
+ } else {
+ removeClass(elem, "active");
+ }
+ iter += 1;
+ });
+ if (foundCurrentTab && foundCurrentResultSet) {
+ searchState.currentTab = nb;
+ } else if (nb !== 0) {
+ printTab(0);
+ }
+}
+
+/**
+ * A function to compute the Levenshtein distance between two strings
+ * Licensed under the Creative Commons Attribution-ShareAlike 3.0 Unported
+ * Full License can be found at http://creativecommons.org/licenses/by-sa/3.0/legalcode
+ * This code is an unmodified version of the code written by Marco de Wit
+ * and was found at https://stackoverflow.com/a/18514751/745719
+ */
+const levenshtein_row2 = [];
+function levenshtein(s1, s2) {
+ if (s1 === s2) {
+ return 0;
+ }
+ const s1_len = s1.length, s2_len = s2.length;
+ if (s1_len && s2_len) {
+ let i1 = 0, i2 = 0, a, b, c, c2;
+ const row = levenshtein_row2;
+ while (i1 < s1_len) {
+ row[i1] = ++i1;
+ }
+ while (i2 < s2_len) {
+ c2 = s2.charCodeAt(i2);
+ a = i2;
+ ++i2;
+ b = i2;
+ for (i1 = 0; i1 < s1_len; ++i1) {
+ c = a + (s1.charCodeAt(i1) !== c2 ? 1 : 0);
+ a = row[i1];
+ b = b < a ? (b < c ? b + 1 : c) : (a < c ? a + 1 : c);
+ row[i1] = b;
+ }
+ }
+ return b;
+ }
+ return s1_len + s2_len;
+}
+
+function initSearch(rawSearchIndex) {
+ const MAX_LEV_DISTANCE = 3;
+ const MAX_RESULTS = 200;
+ const NO_TYPE_FILTER = -1;
+ /**
+ * @type {Array<Row>}
+ */
+ let searchIndex;
+ let currentResults;
+ const ALIASES = Object.create(null);
+
+ function isWhitespace(c) {
+ return " \t\n\r".indexOf(c) !== -1;
+ }
+
+ function isSpecialStartCharacter(c) {
+ return "<\"".indexOf(c) !== -1;
+ }
+
+ function isEndCharacter(c) {
+ return ",>-".indexOf(c) !== -1;
+ }
+
+ function isStopCharacter(c) {
+ return isWhitespace(c) || isEndCharacter(c);
+ }
+
+ function isErrorCharacter(c) {
+ return "()".indexOf(c) !== -1;
+ }
+
+ function itemTypeFromName(typename) {
+ for (let i = 0, len = itemTypes.length; i < len; ++i) {
+ if (itemTypes[i] === typename) {
+ return i;
+ }
+ }
+
+ throw new Error("Unknown type filter `" + typename + "`");
+ }
+
+ /**
+ * If we encounter a `"`, then we try to extract the string from it until we find another `"`.
+ *
+ * This function will throw an error in the following cases:
+ * * There is already another string element.
+ * * We are parsing a generic argument.
+ * * There is more than one element.
+ * * There is no closing `"`.
+ *
+ * @param {ParsedQuery} query
+ * @param {ParserState} parserState
+ * @param {boolean} isInGenerics
+ */
+ function getStringElem(query, parserState, isInGenerics) {
+ if (isInGenerics) {
+ throw new Error("`\"` cannot be used in generics");
+ } else if (query.literalSearch) {
+ throw new Error("Cannot have more than one literal search element");
+ } else if (parserState.totalElems - parserState.genericsElems > 0) {
+ throw new Error("Cannot use literal search when there is more than one element");
+ }
+ parserState.pos += 1;
+ const start = parserState.pos;
+ const end = getIdentEndPosition(parserState);
+ if (parserState.pos >= parserState.length) {
+ throw new Error("Unclosed `\"`");
+ } else if (parserState.userQuery[end] !== "\"") {
+ throw new Error(`Unexpected \`${parserState.userQuery[end]}\` in a string element`);
+ } else if (start === end) {
+ throw new Error("Cannot have empty string element");
+ }
+ // To skip the quote at the end.
+ parserState.pos += 1;
+ query.literalSearch = true;
+ }
+
+ /**
+ * Returns `true` if the current parser position is starting with "::".
+ *
+ * @param {ParserState} parserState
+ *
+ * @return {boolean}
+ */
+ function isPathStart(parserState) {
+ return parserState.userQuery.slice(parserState.pos, parserState.pos + 2) === "::";
+ }
+
+ /**
+ * Returns `true` if the current parser position is starting with "->".
+ *
+ * @param {ParserState} parserState
+ *
+ * @return {boolean}
+ */
+ function isReturnArrow(parserState) {
+ return parserState.userQuery.slice(parserState.pos, parserState.pos + 2) === "->";
+ }
+
+ /**
+ * Returns `true` if the given `c` character is valid for an ident.
+ *
+ * @param {string} c
+ *
+ * @return {boolean}
+ */
+ function isIdentCharacter(c) {
+ return (
+ c === "_" ||
+ (c >= "0" && c <= "9") ||
+ (c >= "a" && c <= "z") ||
+ (c >= "A" && c <= "Z"));
+ }
+
+ /**
+ * Returns `true` if the given `c` character is a separator.
+ *
+ * @param {string} c
+ *
+ * @return {boolean}
+ */
+ function isSeparatorCharacter(c) {
+ return c === "," || isWhitespaceCharacter(c);
+ }
+
+ /**
+ * Returns `true` if the given `c` character is a whitespace.
+ *
+ * @param {string} c
+ *
+ * @return {boolean}
+ */
+ function isWhitespaceCharacter(c) {
+ return c === " " || c === "\t";
+ }
+
+ /**
+ * @param {ParsedQuery} query
+ * @param {ParserState} parserState
+ * @param {string} name - Name of the query element.
+ * @param {Array<QueryElement>} generics - List of generics of this query element.
+ *
+ * @return {QueryElement} - The newly created `QueryElement`.
+ */
+ function createQueryElement(query, parserState, name, generics, isInGenerics) {
+ if (name === "*" || (name.length === 0 && generics.length === 0)) {
+ return;
+ }
+ if (query.literalSearch && parserState.totalElems - parserState.genericsElems > 0) {
+ throw new Error("You cannot have more than one element if you use quotes");
+ }
+ const pathSegments = name.split("::");
+ if (pathSegments.length > 1) {
+ for (let i = 0, len = pathSegments.length; i < len; ++i) {
+ const pathSegment = pathSegments[i];
+
+ if (pathSegment.length === 0) {
+ if (i === 0) {
+ throw new Error("Paths cannot start with `::`");
+ } else if (i + 1 === len) {
+ throw new Error("Paths cannot end with `::`");
+ }
+ throw new Error("Unexpected `::::`");
+ }
+ }
+ }
+ // In case we only have something like `<p>`, there is no name.
+ if (pathSegments.length === 0 || (pathSegments.length === 1 && pathSegments[0] === "")) {
+ throw new Error("Found generics without a path");
+ }
+ parserState.totalElems += 1;
+ if (isInGenerics) {
+ parserState.genericsElems += 1;
+ }
+ return {
+ name: name,
+ fullPath: pathSegments,
+ pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
+ pathLast: pathSegments[pathSegments.length - 1],
+ generics: generics,
+ };
+ }
+
+ /**
+ * This function goes through all characters until it reaches an invalid ident character or the
+ * end of the query. It returns the position of the last character of the ident.
+ *
+ * @param {ParserState} parserState
+ *
+ * @return {integer}
+ */
+ function getIdentEndPosition(parserState) {
+ let end = parserState.pos;
+ let foundExclamation = false;
+ while (parserState.pos < parserState.length) {
+ const c = parserState.userQuery[parserState.pos];
+ if (!isIdentCharacter(c)) {
+ if (c === "!") {
+ if (foundExclamation) {
+ throw new Error("Cannot have more than one `!` in an ident");
+ } else if (parserState.pos + 1 < parserState.length &&
+ isIdentCharacter(parserState.userQuery[parserState.pos + 1])
+ ) {
+ throw new Error("`!` can only be at the end of an ident");
+ }
+ foundExclamation = true;
+ } else if (isErrorCharacter(c)) {
+ throw new Error(`Unexpected \`${c}\``);
+ } else if (
+ isStopCharacter(c) ||
+ isSpecialStartCharacter(c) ||
+ isSeparatorCharacter(c)
+ ) {
+ break;
+ } else if (c === ":") { // If we allow paths ("str::string" for example).
+ if (!isPathStart(parserState)) {
+ break;
+ }
+ // Skip current ":".
+ parserState.pos += 1;
+ foundExclamation = false;
+ } else {
+ throw new Error(`Unexpected \`${c}\``);
+ }
+ }
+ parserState.pos += 1;
+ end = parserState.pos;
+ }
+ return end;
+ }
+
+ /**
+ * @param {ParsedQuery} query
+ * @param {ParserState} parserState
+ * @param {Array<QueryElement>} elems - This is where the new {QueryElement} will be added.
+ * @param {boolean} isInGenerics
+ */
+ function getNextElem(query, parserState, elems, isInGenerics) {
+ const generics = [];
+
+ let start = parserState.pos;
+ let end;
+ // We handle the strings on their own mostly to make code easier to follow.
+ if (parserState.userQuery[parserState.pos] === "\"") {
+ start += 1;
+ getStringElem(query, parserState, isInGenerics);
+ end = parserState.pos - 1;
+ } else {
+ end = getIdentEndPosition(parserState);
+ }
+ if (parserState.pos < parserState.length &&
+ parserState.userQuery[parserState.pos] === "<"
+ ) {
+ if (isInGenerics) {
+ throw new Error("Unexpected `<` after `<`");
+ } else if (start >= end) {
+ throw new Error("Found generics without a path");
+ }
+ parserState.pos += 1;
+ getItemsBefore(query, parserState, generics, ">");
+ }
+ if (start >= end && generics.length === 0) {
+ return;
+ }
+ elems.push(
+ createQueryElement(
+ query,
+ parserState,
+ parserState.userQuery.slice(start, end),
+ generics,
+ isInGenerics
+ )
+ );
+ }
+
+ /**
+ * This function parses the next query element until it finds `endChar`, calling `getNextElem`
+ * to collect each element.
+ *
+ * If there is no `endChar`, this function will implicitly stop at the end without raising an
+ * error.
+ *
+ * @param {ParsedQuery} query
+ * @param {ParserState} parserState
+ * @param {Array<QueryElement>} elems - This is where the new {QueryElement} will be added.
+ * @param {string} endChar - This function will stop when it'll encounter this
+ * character.
+ */
+ function getItemsBefore(query, parserState, elems, endChar) {
+ let foundStopChar = true;
+
+ while (parserState.pos < parserState.length) {
+ const c = parserState.userQuery[parserState.pos];
+ if (c === endChar) {
+ break;
+ } else if (isSeparatorCharacter(c)) {
+ parserState.pos += 1;
+ foundStopChar = true;
+ continue;
+ } else if (c === ":" && isPathStart(parserState)) {
+ throw new Error("Unexpected `::`: paths cannot start with `::`");
+ } else if (c === ":" || isEndCharacter(c)) {
+ let extra = "";
+ if (endChar === ">") {
+ extra = "`<`";
+ } else if (endChar === "") {
+ extra = "`->`";
+ }
+ throw new Error("Unexpected `" + c + "` after " + extra);
+ }
+ if (!foundStopChar) {
+ if (endChar !== "") {
+ throw new Error(`Expected \`,\`, \` \` or \`${endChar}\`, found \`${c}\``);
+ }
+ throw new Error(`Expected \`,\` or \` \`, found \`${c}\``);
+ }
+ const posBefore = parserState.pos;
+ getNextElem(query, parserState, elems, endChar === ">");
+ // This case can be encountered if `getNextElem` encounted a "stop character" right from
+ // the start. For example if you have `,,` or `<>`. In this case, we simply move up the
+ // current position to continue the parsing.
+ if (posBefore === parserState.pos) {
+ parserState.pos += 1;
+ }
+ foundStopChar = false;
+ }
+ // We are either at the end of the string or on the `endChar`` character, let's move forward
+ // in any case.
+ parserState.pos += 1;
+ }
+
+ /**
+ * Checks that the type filter doesn't have unwanted characters like `<>` (which are ignored
+ * if empty).
+ *
+ * @param {ParserState} parserState
+ */
+ function checkExtraTypeFilterCharacters(parserState) {
+ const query = parserState.userQuery;
+
+ for (let pos = 0; pos < parserState.pos; ++pos) {
+ if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) {
+ throw new Error(`Unexpected \`${query[pos]}\` in type filter`);
+ }
+ }
+ }
+
+ /**
+ * Parses the provided `query` input to fill `parserState`. If it encounters an error while
+ * parsing `query`, it'll throw an error.
+ *
+ * @param {ParsedQuery} query
+ * @param {ParserState} parserState
+ */
+ function parseInput(query, parserState) {
+ let c, before;
+ let foundStopChar = true;
+
+ while (parserState.pos < parserState.length) {
+ c = parserState.userQuery[parserState.pos];
+ if (isStopCharacter(c)) {
+ foundStopChar = true;
+ if (isSeparatorCharacter(c)) {
+ parserState.pos += 1;
+ continue;
+ } else if (c === "-" || c === ">") {
+ if (isReturnArrow(parserState)) {
+ break;
+ }
+ throw new Error(`Unexpected \`${c}\` (did you mean \`->\`?)`);
+ }
+ throw new Error(`Unexpected \`${c}\``);
+ } else if (c === ":" && !isPathStart(parserState)) {
+ if (parserState.typeFilter !== null) {
+ throw new Error("Unexpected `:`");
+ }
+ if (query.elems.length === 0) {
+ throw new Error("Expected type filter before `:`");
+ } else if (query.elems.length !== 1 || parserState.totalElems !== 1) {
+ throw new Error("Unexpected `:`");
+ } else if (query.literalSearch) {
+ throw new Error("You cannot use quotes on type filter");
+ }
+ checkExtraTypeFilterCharacters(parserState);
+ // The type filter doesn't count as an element since it's a modifier.
+ parserState.typeFilter = query.elems.pop().name;
+ parserState.pos += 1;
+ parserState.totalElems = 0;
+ query.literalSearch = false;
+ foundStopChar = true;
+ continue;
+ }
+ if (!foundStopChar) {
+ if (parserState.typeFilter !== null) {
+ throw new Error(`Expected \`,\`, \` \` or \`->\`, found \`${c}\``);
+ }
+ throw new Error(`Expected \`,\`, \` \`, \`:\` or \`->\`, found \`${c}\``);
+ }
+ before = query.elems.length;
+ getNextElem(query, parserState, query.elems, false);
+ if (query.elems.length === before) {
+ // Nothing was added, weird... Let's increase the position to not remain stuck.
+ parserState.pos += 1;
+ }
+ foundStopChar = false;
+ }
+ while (parserState.pos < parserState.length) {
+ c = parserState.userQuery[parserState.pos];
+ if (isReturnArrow(parserState)) {
+ parserState.pos += 2;
+ // Get returned elements.
+ getItemsBefore(query, parserState, query.returned, "");
+ // Nothing can come afterward!
+ if (query.returned.length === 0) {
+ throw new Error("Expected at least one item after `->`");
+ }
+ break;
+ } else {
+ parserState.pos += 1;
+ }
+ }
+ }
+
+ /**
+ * Takes the user search input and returns an empty `ParsedQuery`.
+ *
+ * @param {string} userQuery
+ *
+ * @return {ParsedQuery}
+ */
+ function newParsedQuery(userQuery) {
+ return {
+ original: userQuery,
+ userQuery: userQuery.toLowerCase(),
+ typeFilter: NO_TYPE_FILTER,
+ elems: [],
+ returned: [],
+ // Total number of "top" elements (does not include generics).
+ foundElems: 0,
+ literalSearch: false,
+ error: null,
+ };
+ }
+
+ /**
+ * Build an URL with search parameters.
+ *
+ * @param {string} search - The current search being performed.
+ * @param {string|null} filterCrates - The current filtering crate (if any).
+ *
+ * @return {string}
+ */
+ function buildUrl(search, filterCrates) {
+ let extra = "?search=" + encodeURIComponent(search);
+
+ if (filterCrates !== null) {
+ extra += "&filter-crate=" + encodeURIComponent(filterCrates);
+ }
+ return getNakedUrl() + extra + window.location.hash;
+ }
+
+ /**
+ * Return the filtering crate or `null` if there is none.
+ *
+ * @return {string|null}
+ */
+ function getFilterCrates() {
+ const elem = document.getElementById("crate-search");
+
+ if (elem &&
+ elem.value !== "All crates" &&
+ hasOwnPropertyRustdoc(rawSearchIndex, elem.value)
+ ) {
+ return elem.value;
+ }
+ return null;
+ }
+
+ /**
+ * Parses the query.
+ *
+ * The supported syntax by this parser is as follow:
+ *
+ * ident = *(ALPHA / DIGIT / "_") [!]
+ * path = ident *(DOUBLE-COLON ident)
+ * arg = path [generics]
+ * arg-without-generic = path
+ * type-sep = COMMA/WS *(COMMA/WS)
+ * nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep)
+ * nonempty-arg-list-without-generics = *(type-sep) arg-without-generic
+ * *(type-sep arg-without-generic) *(type-sep)
+ * generics = OPEN-ANGLE-BRACKET [ nonempty-arg-list-without-generics ] *(type-sep)
+ * CLOSE-ANGLE-BRACKET/EOF
+ * return-args = RETURN-ARROW *(type-sep) nonempty-arg-list
+ *
+ * exact-search = [type-filter *WS COLON] [ RETURN-ARROW ] *WS QUOTE ident QUOTE [ generics ]
+ * type-search = [type-filter *WS COLON] [ nonempty-arg-list ] [ return-args ]
+ *
+ * query = *WS (exact-search / type-search) *WS
+ *
+ * type-filter = (
+ * "mod" /
+ * "externcrate" /
+ * "import" /
+ * "struct" /
+ * "enum" /
+ * "fn" /
+ * "type" /
+ * "static" /
+ * "trait" /
+ * "impl" /
+ * "tymethod" /
+ * "method" /
+ * "structfield" /
+ * "variant" /
+ * "macro" /
+ * "primitive" /
+ * "associatedtype" /
+ * "constant" /
+ * "associatedconstant" /
+ * "union" /
+ * "foreigntype" /
+ * "keyword" /
+ * "existential" /
+ * "attr" /
+ * "derive" /
+ * "traitalias")
+ *
+ * OPEN-ANGLE-BRACKET = "<"
+ * CLOSE-ANGLE-BRACKET = ">"
+ * COLON = ":"
+ * DOUBLE-COLON = "::"
+ * QUOTE = %x22
+ * COMMA = ","
+ * RETURN-ARROW = "->"
+ *
+ * ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
+ * DIGIT = %x30-39
+ * WS = %x09 / " "
+ *
+ * @param {string} val - The user query
+ *
+ * @return {ParsedQuery} - The parsed query
+ */
+ function parseQuery(userQuery) {
+ userQuery = userQuery.trim();
+ const parserState = {
+ length: userQuery.length,
+ pos: 0,
+ // Total number of elements (includes generics).
+ totalElems: 0,
+ genericsElems: 0,
+ typeFilter: null,
+ userQuery: userQuery.toLowerCase(),
+ };
+ let query = newParsedQuery(userQuery);
+
+ try {
+ parseInput(query, parserState);
+ if (parserState.typeFilter !== null) {
+ let typeFilter = parserState.typeFilter;
+ if (typeFilter === "const") {
+ typeFilter = "constant";
+ }
+ query.typeFilter = itemTypeFromName(typeFilter);
+ }
+ } catch (err) {
+ query = newParsedQuery(userQuery);
+ query.error = err.message;
+ query.typeFilter = -1;
+ return query;
+ }
+
+ if (!query.literalSearch) {
+ // If there is more than one element in the query, we switch to literalSearch in any
+ // case.
+ query.literalSearch = parserState.totalElems > 1;
+ }
+ query.foundElems = query.elems.length + query.returned.length;
+ return query;
+ }
+
+ /**
+ * Creates the query results.
+ *
+ * @param {Array<Result>} results_in_args
+ * @param {Array<Result>} results_returned
+ * @param {Array<Result>} results_in_args
+ * @param {ParsedQuery} parsedQuery
+ *
+ * @return {ResultsTable}
+ */
+ function createQueryResults(results_in_args, results_returned, results_others, parsedQuery) {
+ return {
+ "in_args": results_in_args,
+ "returned": results_returned,
+ "others": results_others,
+ "query": parsedQuery,
+ };
+ }
+
+ /**
+ * Executes the parsed query and builds a {ResultsTable}.
+ *
+ * @param {ParsedQuery} parsedQuery - The parsed user query
+ * @param {Object} searchWords - The list of search words to query against
+ * @param {Object} [filterCrates] - Crate to search in if defined
+ * @param {Object} [currentCrate] - Current crate, to rank results from this crate higher
+ *
+ * @return {ResultsTable}
+ */
+ function execQuery(parsedQuery, searchWords, filterCrates, currentCrate) {
+ const results_others = {}, results_in_args = {}, results_returned = {};
+
+ function transformResults(results) {
+ const duplicates = {};
+ const out = [];
+
+ for (const result of results) {
+ if (result.id > -1) {
+ const obj = searchIndex[result.id];
+ obj.lev = result.lev;
+ const res = buildHrefAndPath(obj);
+ obj.displayPath = pathSplitter(res[0]);
+ obj.fullPath = obj.displayPath + obj.name;
+ // To be sure than it some items aren't considered as duplicate.
+ obj.fullPath += "|" + obj.ty;
+
+ if (duplicates[obj.fullPath]) {
+ continue;
+ }
+ duplicates[obj.fullPath] = true;
+
+ obj.href = res[1];
+ out.push(obj);
+ if (out.length >= MAX_RESULTS) {
+ break;
+ }
+ }
+ }
+ return out;
+ }
+
+ function sortResults(results, isType, preferredCrate) {
+ const userQuery = parsedQuery.userQuery;
+ const ar = [];
+ for (const entry in results) {
+ if (hasOwnPropertyRustdoc(results, entry)) {
+ const result = results[entry];
+ result.word = searchWords[result.id];
+ result.item = searchIndex[result.id] || {};
+ ar.push(result);
+ }
+ }
+ results = ar;
+ // if there are no results then return to default and fail
+ if (results.length === 0) {
+ return [];
+ }
+
+ results.sort((aaa, bbb) => {
+ let a, b;
+
+ // sort by exact match with regard to the last word (mismatch goes later)
+ a = (aaa.word !== userQuery);
+ b = (bbb.word !== userQuery);
+ if (a !== b) {
+ return a - b;
+ }
+
+ // Sort by non levenshtein results and then levenshtein results by the distance
+ // (less changes required to match means higher rankings)
+ a = (aaa.lev);
+ b = (bbb.lev);
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort by crate (current crate comes first)
+ a = (aaa.item.crate !== preferredCrate);
+ b = (bbb.item.crate !== preferredCrate);
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort by item name length (longer goes later)
+ a = aaa.word.length;
+ b = bbb.word.length;
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort by item name (lexicographically larger goes later)
+ a = aaa.word;
+ b = bbb.word;
+ if (a !== b) {
+ return (a > b ? +1 : -1);
+ }
+
+ // sort by index of keyword in item name (no literal occurrence goes later)
+ a = (aaa.index < 0);
+ b = (bbb.index < 0);
+ if (a !== b) {
+ return a - b;
+ }
+ // (later literal occurrence, if any, goes later)
+ a = aaa.index;
+ b = bbb.index;
+ if (a !== b) {
+ return a - b;
+ }
+
+ // special precedence for primitive and keyword pages
+ if ((aaa.item.ty === TY_PRIMITIVE && bbb.item.ty !== TY_KEYWORD) ||
+ (aaa.item.ty === TY_KEYWORD && bbb.item.ty !== TY_PRIMITIVE)) {
+ return -1;
+ }
+ if ((bbb.item.ty === TY_PRIMITIVE && aaa.item.ty !== TY_PRIMITIVE) ||
+ (bbb.item.ty === TY_KEYWORD && aaa.item.ty !== TY_KEYWORD)) {
+ return 1;
+ }
+
+ // sort by description (no description goes later)
+ a = (aaa.item.desc === "");
+ b = (bbb.item.desc === "");
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort by type (later occurrence in `itemTypes` goes later)
+ a = aaa.item.ty;
+ b = bbb.item.ty;
+ if (a !== b) {
+ return a - b;
+ }
+
+ // sort by path (lexicographically larger goes later)
+ a = aaa.item.path;
+ b = bbb.item.path;
+ if (a !== b) {
+ return (a > b ? +1 : -1);
+ }
+
+ // que sera, sera
+ return 0;
+ });
+
+ let nameSplit = null;
+ if (parsedQuery.elems.length === 1) {
+ const hasPath = typeof parsedQuery.elems[0].path === "undefined";
+ nameSplit = hasPath ? null : parsedQuery.elems[0].path;
+ }
+
+ for (const result of results) {
+ // this validation does not make sense when searching by types
+ if (result.dontValidate) {
+ continue;
+ }
+ const name = result.item.name.toLowerCase(),
+ path = result.item.path.toLowerCase(),
+ parent = result.item.parent;
+
+ if (!isType && !validateResult(name, path, nameSplit, parent)) {
+ result.id = -1;
+ }
+ }
+ return transformResults(results);
+ }
+
+ /**
+ * This function checks if the object (`row`) generics match the given type (`elem`)
+ * generics. If there are no generics on `row`, `defaultLev` is returned.
+ *
+ * @param {Row} row - The object to check.
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {integer} defaultLev - This is the value to return in case there are no generics.
+ *
+ * @return {integer} - Returns the best match (if any) or `MAX_LEV_DISTANCE + 1`.
+ */
+ function checkGenerics(row, elem, defaultLev) {
+ if (row.generics.length === 0) {
+ return elem.generics.length === 0 ? defaultLev : MAX_LEV_DISTANCE + 1;
+ } else if (row.generics.length > 0 && row.generics[0].name === null) {
+ return checkGenerics(row.generics[0], elem, defaultLev);
+ }
+ // The names match, but we need to be sure that all generics kinda
+ // match as well.
+ let elem_name;
+ if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) {
+ const elems = Object.create(null);
+ for (const entry of row.generics) {
+ elem_name = entry.name;
+ if (elem_name === "") {
+ // Pure generic, needs to check into it.
+ if (checkGenerics(entry, elem, MAX_LEV_DISTANCE + 1) !== 0) {
+ return MAX_LEV_DISTANCE + 1;
+ }
+ continue;
+ }
+ if (elems[elem_name] === undefined) {
+ elems[elem_name] = 0;
+ }
+ elems[elem_name] += 1;
+ }
+ // We need to find the type that matches the most to remove it in order
+ // to move forward.
+ for (const generic of elem.generics) {
+ let match = null;
+ if (elems[generic.name]) {
+ match = generic.name;
+ } else {
+ for (elem_name in elems) {
+ if (!hasOwnPropertyRustdoc(elems, elem_name)) {
+ continue;
+ }
+ if (elem_name === generic) {
+ match = elem_name;
+ break;
+ }
+ }
+ }
+ if (match === null) {
+ return MAX_LEV_DISTANCE + 1;
+ }
+ elems[match] -= 1;
+ if (elems[match] === 0) {
+ delete elems[match];
+ }
+ }
+ return 0;
+ }
+ return MAX_LEV_DISTANCE + 1;
+ }
+
+ /**
+ * This function checks if the object (`row`) matches the given type (`elem`) and its
+ * generics (if any).
+ *
+ * @param {Row} row
+ * @param {QueryElement} elem - The element from the parsed query.
+ *
+ * @return {integer} - Returns a Levenshtein distance to the best match.
+ */
+ function checkIfInGenerics(row, elem) {
+ let lev = MAX_LEV_DISTANCE + 1;
+ for (const entry of row.generics) {
+ lev = Math.min(checkType(entry, elem, true), lev);
+ if (lev === 0) {
+ break;
+ }
+ }
+ return lev;
+ }
+
+ /**
+ * This function checks if the object (`row`) matches the given type (`elem`) and its
+ * generics (if any).
+ *
+ * @param {Row} row
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {boolean} literalSearch
+ *
+ * @return {integer} - Returns a Levenshtein distance to the best match. If there is
+ * no match, returns `MAX_LEV_DISTANCE + 1`.
+ */
+ function checkType(row, elem, literalSearch) {
+ if (row.name === null) {
+ // This is a pure "generic" search, no need to run other checks.
+ if (row.generics.length > 0) {
+ return checkIfInGenerics(row, elem);
+ }
+ return MAX_LEV_DISTANCE + 1;
+ }
+
+ let lev = levenshtein(row.name, elem.name);
+ if (literalSearch) {
+ if (lev !== 0) {
+ // The name didn't match, let's try to check if the generics do.
+ if (elem.generics.length === 0) {
+ const checkGeneric = row.generics.length > 0;
+ if (checkGeneric && row.generics
+ .findIndex(tmp_elem => tmp_elem.name === elem.name) !== -1) {
+ return 0;
+ }
+ }
+ return MAX_LEV_DISTANCE + 1;
+ } else if (elem.generics.length > 0) {
+ return checkGenerics(row, elem, MAX_LEV_DISTANCE + 1);
+ }
+ return 0;
+ } else if (row.generics.length > 0) {
+ if (elem.generics.length === 0) {
+ if (lev === 0) {
+ return 0;
+ }
+ // The name didn't match so we now check if the type we're looking for is inside
+ // the generics!
+ lev = checkIfInGenerics(row, elem);
+ // Now whatever happens, the returned distance is "less good" so we should mark
+ // it as such, and so we add 0.5 to the distance to make it "less good".
+ return lev + 0.5;
+ } else if (lev > MAX_LEV_DISTANCE) {
+ // So our item's name doesn't match at all and has generics.
+ //
+ // Maybe it's present in a sub generic? For example "f<A<B<C>>>()", if we're
+ // looking for "B<C>", we'll need to go down.
+ return checkIfInGenerics(row, elem);
+ } else {
+ // At this point, the name kinda match and we have generics to check, so
+ // let's go!
+ const tmp_lev = checkGenerics(row, elem, lev);
+ if (tmp_lev > MAX_LEV_DISTANCE) {
+ return MAX_LEV_DISTANCE + 1;
+ }
+ // We compute the median value of both checks and return it.
+ return (tmp_lev + lev) / 2;
+ }
+ } else if (elem.generics.length > 0) {
+ // In this case, we were expecting generics but there isn't so we simply reject this
+ // one.
+ return MAX_LEV_DISTANCE + 1;
+ }
+ // No generics on our query or on the target type so we can return without doing
+ // anything else.
+ return lev;
+ }
+
+ /**
+ * This function checks if the object (`row`) has an argument with the given type (`elem`).
+ *
+ * @param {Row} row
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {integer} typeFilter
+ *
+ * @return {integer} - Returns a Levenshtein distance to the best match. If there is no
+ * match, returns `MAX_LEV_DISTANCE + 1`.
+ */
+ function findArg(row, elem, typeFilter) {
+ let lev = MAX_LEV_DISTANCE + 1;
+
+ if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
+ for (const input of row.type.inputs) {
+ if (!typePassesFilter(typeFilter, input.ty)) {
+ continue;
+ }
+ lev = Math.min(lev, checkType(input, elem, parsedQuery.literalSearch));
+ if (lev === 0) {
+ return 0;
+ }
+ }
+ }
+ return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev;
+ }
+
+ /**
+ * This function checks if the object (`row`) returns the given type (`elem`).
+ *
+ * @param {Row} row
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {integer} typeFilter
+ *
+ * @return {integer} - Returns a Levenshtein distance to the best match. If there is no
+ * match, returns `MAX_LEV_DISTANCE + 1`.
+ */
+ function checkReturned(row, elem, typeFilter) {
+ let lev = MAX_LEV_DISTANCE + 1;
+
+ if (row && row.type && row.type.output.length > 0) {
+ const ret = row.type.output;
+ for (const ret_ty of ret) {
+ if (!typePassesFilter(typeFilter, ret_ty.ty)) {
+ continue;
+ }
+ lev = Math.min(lev, checkType(ret_ty, elem, parsedQuery.literalSearch));
+ if (lev === 0) {
+ return 0;
+ }
+ }
+ }
+ return parsedQuery.literalSearch ? MAX_LEV_DISTANCE + 1 : lev;
+ }
+
+ function checkPath(contains, ty) {
+ if (contains.length === 0) {
+ return 0;
+ }
+ let ret_lev = MAX_LEV_DISTANCE + 1;
+ const path = ty.path.split("::");
+
+ if (ty.parent && ty.parent.name) {
+ path.push(ty.parent.name.toLowerCase());
+ }
+
+ const length = path.length;
+ const clength = contains.length;
+ if (clength > length) {
+ return MAX_LEV_DISTANCE + 1;
+ }
+ for (let i = 0; i < length; ++i) {
+ if (i + clength > length) {
+ break;
+ }
+ let lev_total = 0;
+ let aborted = false;
+ for (let x = 0; x < clength; ++x) {
+ const lev = levenshtein(path[i + x], contains[x]);
+ if (lev > MAX_LEV_DISTANCE) {
+ aborted = true;
+ break;
+ }
+ lev_total += lev;
+ }
+ if (!aborted) {
+ ret_lev = Math.min(ret_lev, Math.round(lev_total / clength));
+ }
+ }
+ return ret_lev;
+ }
+
+ function typePassesFilter(filter, type) {
+ // No filter or Exact mach
+ if (filter <= NO_TYPE_FILTER || filter === type) return true;
+
+ // Match related items
+ const name = itemTypes[type];
+ switch (itemTypes[filter]) {
+ case "constant":
+ return name === "associatedconstant";
+ case "fn":
+ return name === "method" || name === "tymethod";
+ case "type":
+ return name === "primitive" || name === "associatedtype";
+ case "trait":
+ return name === "traitalias";
+ }
+
+ // No match
+ return false;
+ }
+
+ function createAliasFromItem(item) {
+ return {
+ crate: item.crate,
+ name: item.name,
+ path: item.path,
+ desc: item.desc,
+ ty: item.ty,
+ parent: item.parent,
+ type: item.type,
+ is_alias: true,
+ };
+ }
+
+ function handleAliases(ret, query, filterCrates, currentCrate) {
+ const lowerQuery = query.toLowerCase();
+ // We separate aliases and crate aliases because we want to have current crate
+ // aliases to be before the others in the displayed results.
+ const aliases = [];
+ const crateAliases = [];
+ if (filterCrates !== null) {
+ if (ALIASES[filterCrates] && ALIASES[filterCrates][lowerQuery]) {
+ const query_aliases = ALIASES[filterCrates][lowerQuery];
+ for (const alias of query_aliases) {
+ aliases.push(createAliasFromItem(searchIndex[alias]));
+ }
+ }
+ } else {
+ Object.keys(ALIASES).forEach(crate => {
+ if (ALIASES[crate][lowerQuery]) {
+ const pushTo = crate === currentCrate ? crateAliases : aliases;
+ const query_aliases = ALIASES[crate][lowerQuery];
+ for (const alias of query_aliases) {
+ pushTo.push(createAliasFromItem(searchIndex[alias]));
+ }
+ }
+ });
+ }
+
+ const sortFunc = (aaa, bbb) => {
+ if (aaa.path < bbb.path) {
+ return 1;
+ } else if (aaa.path === bbb.path) {
+ return 0;
+ }
+ return -1;
+ };
+ crateAliases.sort(sortFunc);
+ aliases.sort(sortFunc);
+
+ const pushFunc = alias => {
+ alias.alias = query;
+ const res = buildHrefAndPath(alias);
+ alias.displayPath = pathSplitter(res[0]);
+ alias.fullPath = alias.displayPath + alias.name;
+ alias.href = res[1];
+
+ ret.others.unshift(alias);
+ if (ret.others.length > MAX_RESULTS) {
+ ret.others.pop();
+ }
+ };
+
+ aliases.forEach(pushFunc);
+ crateAliases.forEach(pushFunc);
+ }
+
+ /**
+ * This function adds the given result into the provided `results` map if it matches the
+ * following condition:
+ *
+ * * If it is a "literal search" (`parsedQuery.literalSearch`), then `lev` must be 0.
+ * * If it is not a "literal search", `lev` must be <= `MAX_LEV_DISTANCE`.
+ *
+ * The `results` map contains information which will be used to sort the search results:
+ *
+ * * `fullId` is a `string`` used as the key of the object we use for the `results` map.
+ * * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
+ * * `index` is an `integer`` used to sort by the position of the word in the item's name.
+ * * `lev` is the main metric used to sort the search results.
+ *
+ * @param {Results} results
+ * @param {string} fullId
+ * @param {integer} id
+ * @param {integer} index
+ * @param {integer} lev
+ */
+ function addIntoResults(results, fullId, id, index, lev) {
+ if (lev === 0 || (!parsedQuery.literalSearch && lev <= MAX_LEV_DISTANCE)) {
+ if (results[fullId] !== undefined) {
+ const result = results[fullId];
+ if (result.dontValidate || result.lev <= lev) {
+ return;
+ }
+ }
+ results[fullId] = {
+ id: id,
+ index: index,
+ dontValidate: parsedQuery.literalSearch,
+ lev: lev,
+ };
+ }
+ }
+
+ /**
+ * This function is called in case the query is only one element (with or without generics).
+ * This element will be compared to arguments' and returned values' items and also to items.
+ *
+ * Other important thing to note: since there is only one element, we use levenshtein
+ * distance for name comparisons.
+ *
+ * @param {Row} row
+ * @param {integer} pos - Position in the `searchIndex`.
+ * @param {QueryElement} elem - The element from the parsed query.
+ * @param {Results} results_others - Unqualified results (not in arguments nor in
+ * returned values).
+ * @param {Results} results_in_args - Matching arguments results.
+ * @param {Results} results_returned - Matching returned arguments results.
+ */
+ function handleSingleArg(
+ row,
+ pos,
+ elem,
+ results_others,
+ results_in_args,
+ results_returned
+ ) {
+ if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
+ return;
+ }
+ let lev, lev_add = 0, index = -1;
+ const fullId = row.id;
+
+ const in_args = findArg(row, elem, parsedQuery.typeFilter);
+ const returned = checkReturned(row, elem, parsedQuery.typeFilter);
+
+ addIntoResults(results_in_args, fullId, pos, index, in_args);
+ addIntoResults(results_returned, fullId, pos, index, returned);
+
+ if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
+ return;
+ }
+ const searchWord = searchWords[pos];
+
+ if (parsedQuery.literalSearch) {
+ if (searchWord === elem.name) {
+ addIntoResults(results_others, fullId, pos, -1, 0);
+ }
+ return;
+ }
+
+ // No need to check anything else if it's a "pure" generics search.
+ if (elem.name.length === 0) {
+ if (row.type !== null) {
+ lev = checkGenerics(row.type, elem, MAX_LEV_DISTANCE + 1);
+ addIntoResults(results_others, fullId, pos, index, lev);
+ }
+ return;
+ }
+
+ if (elem.fullPath.length > 1) {
+ lev = checkPath(elem.pathWithoutLast, row);
+ if (lev > MAX_LEV_DISTANCE || (parsedQuery.literalSearch && lev !== 0)) {
+ return;
+ } else if (lev > 0) {
+ lev_add = lev / 10;
+ }
+ }
+
+ if (searchWord.indexOf(elem.pathLast) > -1 ||
+ row.normalizedName.indexOf(elem.pathLast) > -1
+ ) {
+ index = row.normalizedName.indexOf(elem.pathLast);
+ }
+ lev = levenshtein(searchWord, elem.pathLast);
+ if (lev > 0 && elem.pathLast.length > 2 && searchWord.indexOf(elem.pathLast) > -1) {
+ if (elem.pathLast.length < 6) {
+ lev = 1;
+ } else {
+ lev = 0;
+ }
+ }
+ lev += lev_add;
+ if (lev > MAX_LEV_DISTANCE) {
+ return;
+ } else if (index !== -1 && elem.fullPath.length < 2) {
+ lev -= 1;
+ }
+ if (lev < 0) {
+ lev = 0;
+ }
+ addIntoResults(results_others, fullId, pos, index, lev);
+ }
+
+ /**
+ * This function is called in case the query has more than one element. In this case, it'll
+ * try to match the items which validates all the elements. For `aa -> bb` will look for
+ * functions which have a parameter `aa` and has `bb` in its returned values.
+ *
+ * @param {Row} row
+ * @param {integer} pos - Position in the `searchIndex`.
+ * @param {Object} results
+ */
+ function handleArgs(row, pos, results) {
+ if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
+ return;
+ }
+
+ let totalLev = 0;
+ let nbLev = 0;
+
+ // If the result is too "bad", we return false and it ends this search.
+ function checkArgs(elems, callback) {
+ for (const elem of elems) {
+ // There is more than one parameter to the query so all checks should be "exact"
+ const lev = callback(row, elem, NO_TYPE_FILTER);
+ if (lev <= 1) {
+ nbLev += 1;
+ totalLev += lev;
+ } else {
+ return false;
+ }
+ }
+ return true;
+ }
+ if (!checkArgs(parsedQuery.elems, findArg)) {
+ return;
+ }
+ if (!checkArgs(parsedQuery.returned, checkReturned)) {
+ return;
+ }
+
+ if (nbLev === 0) {
+ return;
+ }
+ const lev = Math.round(totalLev / nbLev);
+ addIntoResults(results, row.id, pos, 0, lev);
+ }
+
+ function innerRunQuery() {
+ let elem, i, nSearchWords, in_returned, row;
+
+ if (parsedQuery.foundElems === 1) {
+ if (parsedQuery.elems.length === 1) {
+ elem = parsedQuery.elems[0];
+ for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
+ // It means we want to check for this element everywhere (in names, args and
+ // returned).
+ handleSingleArg(
+ searchIndex[i],
+ i,
+ elem,
+ results_others,
+ results_in_args,
+ results_returned
+ );
+ }
+ } else if (parsedQuery.returned.length === 1) {
+ // We received one returned argument to check, so looking into returned values.
+ elem = parsedQuery.returned[0];
+ for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
+ row = searchIndex[i];
+ in_returned = checkReturned(row, elem, parsedQuery.typeFilter);
+ addIntoResults(results_others, row.id, i, -1, in_returned);
+ }
+ }
+ } else if (parsedQuery.foundElems > 0) {
+ for (i = 0, nSearchWords = searchWords.length; i < nSearchWords; ++i) {
+ handleArgs(searchIndex[i], i, results_others);
+ }
+ }
+ }
+
+ if (parsedQuery.error === null) {
+ innerRunQuery();
+ }
+
+ const ret = createQueryResults(
+ sortResults(results_in_args, true, currentCrate),
+ sortResults(results_returned, true, currentCrate),
+ sortResults(results_others, false, currentCrate),
+ parsedQuery);
+ handleAliases(ret, parsedQuery.original.replace(/"/g, ""), filterCrates, currentCrate);
+ if (parsedQuery.error !== null && ret.others.length !== 0) {
+ // It means some doc aliases were found so let's "remove" the error!
+ ret.query.error = null;
+ }
+ return ret;
+ }
+
+ /**
+ * Validate performs the following boolean logic. For example:
+ * "File::open" will give IF A PARENT EXISTS => ("file" && "open")
+ * exists in (name || path || parent) OR => ("file" && "open") exists in
+ * (name || path )
+ *
+ * This could be written functionally, but I wanted to minimise
+ * functions on stack.
+ *
+ * @param {string} name - The name of the result
+ * @param {string} path - The path of the result
+ * @param {string} keys - The keys to be used (["file", "open"])
+ * @param {Object} parent - The parent of the result
+ *
+ * @return {boolean} - Whether the result is valid or not
+ */
+ function validateResult(name, path, keys, parent) {
+ if (!keys || !keys.length) {
+ return true;
+ }
+ for (const key of keys) {
+ // each check is for validation so we negate the conditions and invalidate
+ if (!(
+ // check for an exact name match
+ name.indexOf(key) > -1 ||
+ // then an exact path match
+ path.indexOf(key) > -1 ||
+ // next if there is a parent, check for exact parent match
+ (parent !== undefined && parent.name !== undefined &&
+ parent.name.toLowerCase().indexOf(key) > -1) ||
+ // lastly check to see if the name was a levenshtein match
+ levenshtein(name, key) <= MAX_LEV_DISTANCE)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ function nextTab(direction) {
+ const next = (searchState.currentTab + direction + 3) % searchState.focusedByTab.length;
+ searchState.focusedByTab[searchState.currentTab] = document.activeElement;
+ printTab(next);
+ focusSearchResult();
+ }
+
+ // Focus the first search result on the active tab, or the result that
+ // was focused last time this tab was active.
+ function focusSearchResult() {
+ const target = searchState.focusedByTab[searchState.currentTab] ||
+ document.querySelectorAll(".search-results.active a").item(0) ||
+ document.querySelectorAll("#titles > button").item(searchState.currentTab);
+ if (target) {
+ target.focus();
+ }
+ }
+
+ function buildHrefAndPath(item) {
+ let displayPath;
+ let href;
+ const type = itemTypes[item.ty];
+ const name = item.name;
+ let path = item.path;
+
+ if (type === "mod") {
+ displayPath = path + "::";
+ href = ROOT_PATH + path.replace(/::/g, "/") + "/" +
+ name + "/index.html";
+ } else if (type === "import") {
+ displayPath = item.path + "::";
+ href = ROOT_PATH + item.path.replace(/::/g, "/") + "/index.html#reexport." + name;
+ } else if (type === "primitive" || type === "keyword") {
+ displayPath = "";
+ href = ROOT_PATH + path.replace(/::/g, "/") +
+ "/" + type + "." + name + ".html";
+ } else if (type === "externcrate") {
+ displayPath = "";
+ href = ROOT_PATH + name + "/index.html";
+ } else if (item.parent !== undefined) {
+ const myparent = item.parent;
+ let anchor = "#" + type + "." + name;
+ const parentType = itemTypes[myparent.ty];
+ let pageType = parentType;
+ let pageName = myparent.name;
+
+ if (parentType === "primitive") {
+ displayPath = myparent.name + "::";
+ } else if (type === "structfield" && parentType === "variant") {
+ // Structfields belonging to variants are special: the
+ // final path element is the enum name.
+ const enumNameIdx = item.path.lastIndexOf("::");
+ const enumName = item.path.substr(enumNameIdx + 2);
+ path = item.path.substr(0, enumNameIdx);
+ displayPath = path + "::" + enumName + "::" + myparent.name + "::";
+ anchor = "#variant." + myparent.name + ".field." + name;
+ pageType = "enum";
+ pageName = enumName;
+ } else {
+ displayPath = path + "::" + myparent.name + "::";
+ }
+ href = ROOT_PATH + path.replace(/::/g, "/") +
+ "/" + pageType +
+ "." + pageName +
+ ".html" + anchor;
+ } else {
+ displayPath = item.path + "::";
+ href = ROOT_PATH + item.path.replace(/::/g, "/") +
+ "/" + type + "." + name + ".html";
+ }
+ return [displayPath, href];
+ }
+
+ function escape(content) {
+ const h1 = document.createElement("h1");
+ h1.textContent = content;
+ return h1.innerHTML;
+ }
+
+ function pathSplitter(path) {
+ const tmp = "<span>" + path.replace(/::/g, "::</span><span>");
+ if (tmp.endsWith("<span>")) {
+ return tmp.slice(0, tmp.length - 6);
+ }
+ return tmp;
+ }
+
+ /**
+ * Render a set of search results for a single tab.
+ * @param {Array<?>} array - The search results for this tab
+ * @param {ParsedQuery} query
+ * @param {boolean} display - True if this is the active tab
+ */
+ function addTab(array, query, display) {
+ let extraClass = "";
+ if (display === true) {
+ extraClass = " active";
+ }
+
+ const output = document.createElement("div");
+ let length = 0;
+ if (array.length > 0) {
+ output.className = "search-results " + extraClass;
+
+ array.forEach(item => {
+ const name = item.name;
+ const type = itemTypes[item.ty];
+
+ length += 1;
+
+ let extra = "";
+ if (type === "primitive") {
+ extra = " <i>(primitive type)</i>";
+ } else if (type === "keyword") {
+ extra = " <i>(keyword)</i>";
+ }
+
+ const link = document.createElement("a");
+ link.className = "result-" + type;
+ link.href = item.href;
+
+ const wrapper = document.createElement("div");
+ const resultName = document.createElement("div");
+ resultName.className = "result-name";
+
+ if (item.is_alias) {
+ const alias = document.createElement("span");
+ alias.className = "alias";
+
+ const bold = document.createElement("b");
+ bold.innerText = item.alias;
+ alias.appendChild(bold);
+
+ alias.insertAdjacentHTML(
+ "beforeend",
+ "<span class=\"grey\"><i>&nbsp;- see&nbsp;</i></span>");
+
+ resultName.appendChild(alias);
+ }
+ resultName.insertAdjacentHTML(
+ "beforeend",
+ item.displayPath + "<span class=\"" + type + "\">" + name + extra + "</span>");
+ wrapper.appendChild(resultName);
+
+ const description = document.createElement("div");
+ description.className = "desc";
+ const spanDesc = document.createElement("span");
+ spanDesc.insertAdjacentHTML("beforeend", item.desc);
+
+ description.appendChild(spanDesc);
+ wrapper.appendChild(description);
+ link.appendChild(wrapper);
+ output.appendChild(link);
+ });
+ } else if (query.error === null) {
+ output.className = "search-failed" + extraClass;
+ output.innerHTML = "No results :(<br/>" +
+ "Try on <a href=\"https://duckduckgo.com/?q=" +
+ encodeURIComponent("rust " + query.userQuery) +
+ "\">DuckDuckGo</a>?<br/><br/>" +
+ "Or try looking in one of these:<ul><li>The <a " +
+ "href=\"https://doc.rust-lang.org/reference/index.html\">Rust Reference</a> " +
+ " for technical details about the language.</li><li><a " +
+ "href=\"https://doc.rust-lang.org/rust-by-example/index.html\">Rust By " +
+ "Example</a> for expository code examples.</a></li><li>The <a " +
+ "href=\"https://doc.rust-lang.org/book/index.html\">Rust Book</a> for " +
+ "introductions to language features and the language itself.</li><li><a " +
+ "href=\"https://docs.rs\">Docs.rs</a> for documentation of crates released on" +
+ " <a href=\"https://crates.io/\">crates.io</a>.</li></ul>";
+ }
+ return [output, length];
+ }
+
+ function makeTabHeader(tabNb, text, nbElems) {
+ if (searchState.currentTab === tabNb) {
+ return "<button class=\"selected\">" + text +
+ " <div class=\"count\">(" + nbElems + ")</div></button>";
+ }
+ return "<button>" + text + " <div class=\"count\">(" + nbElems + ")</div></button>";
+ }
+
+ /**
+ * @param {ResultsTable} results
+ * @param {boolean} go_to_first
+ * @param {string} filterCrates
+ */
+ function showResults(results, go_to_first, filterCrates) {
+ const search = searchState.outputElement();
+ if (go_to_first || (results.others.length === 1
+ && getSettingValue("go-to-only-result") === "true"
+ // By default, the search DOM element is "empty" (meaning it has no children not
+ // text content). Once a search has been run, it won't be empty, even if you press
+ // ESC or empty the search input (which also "cancels" the search).
+ && (!search.firstChild || search.firstChild.innerText !== searchState.loadingText))
+ ) {
+ const elem = document.createElement("a");
+ elem.href = results.others[0].href;
+ removeClass(elem, "active");
+ // For firefox, we need the element to be in the DOM so it can be clicked.
+ document.body.appendChild(elem);
+ elem.click();
+ return;
+ }
+ if (results.query === undefined) {
+ results.query = parseQuery(searchState.input.value);
+ }
+
+ currentResults = results.query.userQuery;
+
+ const ret_others = addTab(results.others, results.query, true);
+ const ret_in_args = addTab(results.in_args, results.query, false);
+ const ret_returned = addTab(results.returned, results.query, false);
+
+ // Navigate to the relevant tab if the current tab is empty, like in case users search
+ // for "-> String". If they had selected another tab previously, they have to click on
+ // it again.
+ let currentTab = searchState.currentTab;
+ if ((currentTab === 0 && ret_others[1] === 0) ||
+ (currentTab === 1 && ret_in_args[1] === 0) ||
+ (currentTab === 2 && ret_returned[1] === 0)) {
+ if (ret_others[1] !== 0) {
+ currentTab = 0;
+ } else if (ret_in_args[1] !== 0) {
+ currentTab = 1;
+ } else if (ret_returned[1] !== 0) {
+ currentTab = 2;
+ }
+ }
+
+ let crates = "";
+ const crates_list = Object.keys(rawSearchIndex);
+ if (crates_list.length > 1) {
+ crates = " in <select id=\"crate-search\"><option value=\"All crates\">" +
+ "All crates</option>";
+ for (const c of crates_list) {
+ crates += `<option value="${c}" ${c === filterCrates && "selected"}>${c}</option>`;
+ }
+ crates += "</select>";
+ }
+
+ let typeFilter = "";
+ if (results.query.typeFilter !== NO_TYPE_FILTER) {
+ typeFilter = " (type: " + escape(itemTypes[results.query.typeFilter]) + ")";
+ }
+
+ let output = "<div id=\"search-settings\">" +
+ `<h1 class="search-results-title">Results for ${escape(results.query.userQuery)}` +
+ `${typeFilter}</h1>${crates}</div>`;
+ if (results.query.error !== null) {
+ output += `<h3>Query parser error: "${results.query.error}".</h3>`;
+ output += "<div id=\"titles\">" +
+ makeTabHeader(0, "In Names", ret_others[1]) +
+ "</div>";
+ currentTab = 0;
+ } else if (results.query.foundElems <= 1 && results.query.returned.length === 0) {
+ output += "<div id=\"titles\">" +
+ makeTabHeader(0, "In Names", ret_others[1]) +
+ makeTabHeader(1, "In Parameters", ret_in_args[1]) +
+ makeTabHeader(2, "In Return Types", ret_returned[1]) +
+ "</div>";
+ } else {
+ const signatureTabTitle =
+ results.query.elems.length === 0 ? "In Function Return Types" :
+ results.query.returned.length === 0 ? "In Function Parameters" :
+ "In Function Signatures";
+ output += "<div id=\"titles\">" +
+ makeTabHeader(0, signatureTabTitle, ret_others[1]) +
+ "</div>";
+ currentTab = 0;
+ }
+
+ const resultsElem = document.createElement("div");
+ resultsElem.id = "results";
+ resultsElem.appendChild(ret_others[0]);
+ resultsElem.appendChild(ret_in_args[0]);
+ resultsElem.appendChild(ret_returned[0]);
+
+ search.innerHTML = output;
+ const crateSearch = document.getElementById("crate-search");
+ if (crateSearch) {
+ crateSearch.addEventListener("input", updateCrate);
+ }
+ search.appendChild(resultsElem);
+ // Reset focused elements.
+ searchState.showResults(search);
+ const elems = document.getElementById("titles").childNodes;
+ searchState.focusedByTab = [];
+ let i = 0;
+ for (const elem of elems) {
+ const j = i;
+ elem.onclick = () => printTab(j);
+ searchState.focusedByTab.push(null);
+ i += 1;
+ }
+ printTab(currentTab);
+ }
+
+ /**
+ * Perform a search based on the current state of the search input element
+ * and display the results.
+ * @param {Event} [e] - The event that triggered this search, if any
+ * @param {boolean} [forced]
+ */
+ function search(e, forced) {
+ const params = searchState.getQueryStringParams();
+ const query = parseQuery(searchState.input.value.trim());
+
+ if (e) {
+ e.preventDefault();
+ }
+
+ if (!forced && query.userQuery === currentResults) {
+ if (query.userQuery.length > 0) {
+ putBackSearch();
+ }
+ return;
+ }
+
+ let filterCrates = getFilterCrates();
+
+ // In case we have no information about the saved crate and there is a URL query parameter,
+ // we override it with the URL query parameter.
+ if (filterCrates === null && params["filter-crate"] !== undefined) {
+ filterCrates = params["filter-crate"];
+ }
+
+ // Update document title to maintain a meaningful browser history
+ searchState.title = "Results for " + query.original + " - Rust";
+
+ // Because searching is incremental by character, only the most
+ // recent search query is added to the browser history.
+ if (browserSupportsHistoryApi()) {
+ const newURL = buildUrl(query.original, filterCrates);
+
+ if (!history.state && !params.search) {
+ history.pushState(null, "", newURL);
+ } else {
+ history.replaceState(null, "", newURL);
+ }
+ }
+
+ showResults(
+ execQuery(query, searchWords, filterCrates, window.currentCrate),
+ params.go_to_first,
+ filterCrates);
+ }
+
+ /**
+ * Convert a list of RawFunctionType / ID to object-based FunctionType.
+ *
+ * Crates often have lots of functions in them, and it's common to have a large number of
+ * functions that operate on a small set of data types, so the search index compresses them
+ * by encoding function parameter and return types as indexes into an array of names.
+ *
+ * Even when a general-purpose compression algorithm is used, this is still a win. I checked.
+ * https://github.com/rust-lang/rust/pull/98475#issue-1284395985
+ *
+ * The format for individual function types is encoded in
+ * librustdoc/html/render/mod.rs: impl Serialize for RenderType
+ *
+ * @param {null|Array<RawFunctionType>} types
+ * @param {Array<{name: string, ty: number}>} lowercasePaths
+ *
+ * @return {Array<FunctionSearchType>}
+ */
+ function buildItemSearchTypeAll(types, lowercasePaths) {
+ const PATH_INDEX_DATA = 0;
+ const GENERICS_DATA = 1;
+ return types.map(type => {
+ let pathIndex, generics;
+ if (typeof type === "number") {
+ pathIndex = type;
+ generics = [];
+ } else {
+ pathIndex = type[PATH_INDEX_DATA];
+ generics = buildItemSearchTypeAll(type[GENERICS_DATA], lowercasePaths);
+ }
+ return {
+ // `0` is used as a sentinel because it's fewer bytes than `null`
+ name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
+ generics: generics,
+ };
+ });
+ }
+
+ /**
+ * Convert from RawFunctionSearchType to FunctionSearchType.
+ *
+ * Crates often have lots of functions in them, and function signatures are sometimes complex,
+ * so rustdoc uses a pretty tight encoding for them. This function converts it to a simpler,
+ * object-based encoding so that the actual search code is more readable and easier to debug.
+ *
+ * The raw function search type format is generated using serde in
+ * librustdoc/html/render/mod.rs: impl Serialize for IndexItemFunctionType
+ *
+ * @param {RawFunctionSearchType} functionSearchType
+ * @param {Array<{name: string, ty: number}>} lowercasePaths
+ *
+ * @return {null|FunctionSearchType}
+ */
+ function buildFunctionSearchType(functionSearchType, lowercasePaths) {
+ const INPUTS_DATA = 0;
+ const OUTPUT_DATA = 1;
+ // `0` is used as a sentinel because it's fewer bytes than `null`
+ if (functionSearchType === 0) {
+ return null;
+ }
+ let inputs, output;
+ if (typeof functionSearchType[INPUTS_DATA] === "number") {
+ const pathIndex = functionSearchType[INPUTS_DATA];
+ inputs = [{
+ name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
+ generics: [],
+ }];
+ } else {
+ inputs = buildItemSearchTypeAll(functionSearchType[INPUTS_DATA], lowercasePaths);
+ }
+ if (functionSearchType.length > 1) {
+ if (typeof functionSearchType[OUTPUT_DATA] === "number") {
+ const pathIndex = functionSearchType[OUTPUT_DATA];
+ output = [{
+ name: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].name,
+ ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
+ generics: [],
+ }];
+ } else {
+ output = buildItemSearchTypeAll(functionSearchType[OUTPUT_DATA], lowercasePaths);
+ }
+ } else {
+ output = [];
+ }
+ return {
+ inputs, output,
+ };
+ }
+
+ function buildIndex(rawSearchIndex) {
+ searchIndex = [];
+ /**
+ * @type {Array<string>}
+ */
+ const searchWords = [];
+ let i, word;
+ let currentIndex = 0;
+ let id = 0;
+
+ for (const crate in rawSearchIndex) {
+ if (!hasOwnPropertyRustdoc(rawSearchIndex, crate)) {
+ continue;
+ }
+
+ let crateSize = 0;
+
+ /**
+ * The raw search data for a given crate. `n`, `t`, `d`, and `q`, `i`, and `f`
+ * are arrays with the same length. n[i] contains the name of an item.
+ * t[i] contains the type of that item (as a small integer that represents an
+ * offset in `itemTypes`). d[i] contains the description of that item.
+ *
+ * q[i] contains the full path of the item, or an empty string indicating
+ * "same as q[i-1]".
+ *
+ * i[i] contains an item's parent, usually a module. For compactness,
+ * it is a set of indexes into the `p` array.
+ *
+ * f[i] contains function signatures, or `0` if the item isn't a function.
+ * Functions are themselves encoded as arrays. The first item is a list of
+ * types representing the function's inputs, and the second list item is a list
+ * of types representing the function's output. Tuples are flattened.
+ * Types are also represented as arrays; the first item is an index into the `p`
+ * array, while the second is a list of types representing any generic parameters.
+ *
+ * `a` defines aliases with an Array of pairs: [name, offset], where `offset`
+ * points into the n/t/d/q/i/f arrays.
+ *
+ * `doc` contains the description of the crate.
+ *
+ * `p` is a list of path/type pairs. It is used for parents and function parameters.
+ *
+ * @type {{
+ * doc: string,
+ * a: Object,
+ * n: Array<string>,
+ * t: Array<Number>,
+ * d: Array<string>,
+ * q: Array<string>,
+ * i: Array<Number>,
+ * f: Array<RawFunctionSearchType>,
+ * p: Array<Object>,
+ * }}
+ */
+ const crateCorpus = rawSearchIndex[crate];
+
+ searchWords.push(crate);
+ // This object should have exactly the same set of fields as the "row"
+ // object defined below. Your JavaScript runtime will thank you.
+ // https://mathiasbynens.be/notes/shapes-ics
+ const crateRow = {
+ crate: crate,
+ ty: 1, // == ExternCrate
+ name: crate,
+ path: "",
+ desc: crateCorpus.doc,
+ parent: undefined,
+ type: null,
+ id: id,
+ normalizedName: crate.indexOf("_") === -1 ? crate : crate.replace(/_/g, ""),
+ };
+ id += 1;
+ searchIndex.push(crateRow);
+ currentIndex += 1;
+
+ // an array of (Number) item types
+ const itemTypes = crateCorpus.t;
+ // an array of (String) item names
+ const itemNames = crateCorpus.n;
+ // an array of (String) full paths (or empty string for previous path)
+ const itemPaths = crateCorpus.q;
+ // an array of (String) descriptions
+ const itemDescs = crateCorpus.d;
+ // an array of (Number) the parent path index + 1 to `paths`, or 0 if none
+ const itemParentIdxs = crateCorpus.i;
+ // an array of (Object | null) the type of the function, if any
+ const itemFunctionSearchTypes = crateCorpus.f;
+ // an array of [(Number) item type,
+ // (String) name]
+ const paths = crateCorpus.p;
+ // an array of [(String) alias name
+ // [Number] index to items]
+ const aliases = crateCorpus.a;
+
+ // an array of [{name: String, ty: Number}]
+ const lowercasePaths = [];
+
+ // convert `rawPaths` entries into object form
+ // generate normalizedPaths for function search mode
+ let len = paths.length;
+ for (i = 0; i < len; ++i) {
+ lowercasePaths.push({ty: paths[i][0], name: paths[i][1].toLowerCase()});
+ paths[i] = {ty: paths[i][0], name: paths[i][1]};
+ }
+
+ // convert `item*` into an object form, and construct word indices.
+ //
+ // before any analysis is performed lets gather the search terms to
+ // search against apart from the rest of the data. This is a quick
+ // operation that is cached for the life of the page state so that
+ // all other search operations have access to this cached data for
+ // faster analysis operations
+ len = itemTypes.length;
+ let lastPath = "";
+ for (i = 0; i < len; ++i) {
+ // This object should have exactly the same set of fields as the "crateRow"
+ // object defined above.
+ if (typeof itemNames[i] === "string") {
+ word = itemNames[i].toLowerCase();
+ searchWords.push(word);
+ } else {
+ word = "";
+ searchWords.push("");
+ }
+ const row = {
+ crate: crate,
+ ty: itemTypes[i],
+ name: itemNames[i],
+ path: itemPaths[i] ? itemPaths[i] : lastPath,
+ desc: itemDescs[i],
+ parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
+ type: buildFunctionSearchType(itemFunctionSearchTypes[i], lowercasePaths),
+ id: id,
+ normalizedName: word.indexOf("_") === -1 ? word : word.replace(/_/g, ""),
+ };
+ id += 1;
+ searchIndex.push(row);
+ lastPath = row.path;
+ crateSize += 1;
+ }
+
+ if (aliases) {
+ ALIASES[crate] = Object.create(null);
+ for (const alias_name in aliases) {
+ if (!hasOwnPropertyRustdoc(aliases, alias_name)) {
+ continue;
+ }
+
+ if (!hasOwnPropertyRustdoc(ALIASES[crate], alias_name)) {
+ ALIASES[crate][alias_name] = [];
+ }
+ for (const local_alias of aliases[alias_name]) {
+ ALIASES[crate][alias_name].push(local_alias + currentIndex);
+ }
+ }
+ }
+ currentIndex += crateSize;
+ }
+ return searchWords;
+ }
+
+ /**
+ * Callback for when the search form is submitted.
+ * @param {Event} [e] - The event that triggered this call, if any
+ */
+ function onSearchSubmit(e) {
+ e.preventDefault();
+ searchState.clearInputTimeout();
+ search();
+ }
+
+ function putBackSearch() {
+ const search_input = searchState.input;
+ if (!searchState.input) {
+ return;
+ }
+ if (search_input.value !== "" && !searchState.isDisplayed()) {
+ searchState.showResults();
+ if (browserSupportsHistoryApi()) {
+ history.replaceState(null, "",
+ buildUrl(search_input.value, getFilterCrates()));
+ }
+ document.title = searchState.title;
+ }
+ }
+
+ function registerSearchEvents() {
+ const params = searchState.getQueryStringParams();
+
+ // Populate search bar with query string search term when provided,
+ // but only if the input bar is empty. This avoid the obnoxious issue
+ // where you start trying to do a search, and the index loads, and
+ // suddenly your search is gone!
+ if (searchState.input.value === "") {
+ searchState.input.value = params.search || "";
+ }
+
+ const searchAfter500ms = () => {
+ searchState.clearInputTimeout();
+ if (searchState.input.value.length === 0) {
+ if (browserSupportsHistoryApi()) {
+ history.replaceState(null, window.currentCrate + " - Rust",
+ getNakedUrl() + window.location.hash);
+ }
+ searchState.hideResults();
+ } else {
+ searchState.timeout = setTimeout(search, 500);
+ }
+ };
+ searchState.input.onkeyup = searchAfter500ms;
+ searchState.input.oninput = searchAfter500ms;
+ document.getElementsByClassName("search-form")[0].onsubmit = onSearchSubmit;
+ searchState.input.onchange = e => {
+ if (e.target !== document.activeElement) {
+ // To prevent doing anything when it's from a blur event.
+ return;
+ }
+ // Do NOT e.preventDefault() here. It will prevent pasting.
+ searchState.clearInputTimeout();
+ // zero-timeout necessary here because at the time of event handler execution the
+ // pasted content is not in the input field yet. Shouldn’t make any difference for
+ // change, though.
+ setTimeout(search, 0);
+ };
+ searchState.input.onpaste = searchState.input.onchange;
+
+ searchState.outputElement().addEventListener("keydown", e => {
+ // We only handle unmodified keystrokes here. We don't want to interfere with,
+ // for instance, alt-left and alt-right for history navigation.
+ if (e.altKey || e.ctrlKey || e.shiftKey || e.metaKey) {
+ return;
+ }
+ // up and down arrow select next/previous search result, or the
+ // search box if we're already at the top.
+ if (e.which === 38) { // up
+ const previous = document.activeElement.previousElementSibling;
+ if (previous) {
+ previous.focus();
+ } else {
+ searchState.focus();
+ }
+ e.preventDefault();
+ } else if (e.which === 40) { // down
+ const next = document.activeElement.nextElementSibling;
+ if (next) {
+ next.focus();
+ }
+ const rect = document.activeElement.getBoundingClientRect();
+ if (window.innerHeight - rect.bottom < rect.height) {
+ window.scrollBy(0, rect.height);
+ }
+ e.preventDefault();
+ } else if (e.which === 37) { // left
+ nextTab(-1);
+ e.preventDefault();
+ } else if (e.which === 39) { // right
+ nextTab(1);
+ e.preventDefault();
+ }
+ });
+
+ searchState.input.addEventListener("keydown", e => {
+ if (e.which === 40) { // down
+ focusSearchResult();
+ e.preventDefault();
+ }
+ });
+
+ searchState.input.addEventListener("focus", () => {
+ putBackSearch();
+ });
+
+ searchState.input.addEventListener("blur", () => {
+ searchState.input.placeholder = searchState.input.origPlaceholder;
+ });
+
+ // Push and pop states are used to add search results to the browser
+ // history.
+ if (browserSupportsHistoryApi()) {
+ // Store the previous <title> so we can revert back to it later.
+ const previousTitle = document.title;
+
+ window.addEventListener("popstate", e => {
+ const params = searchState.getQueryStringParams();
+ // Revert to the previous title manually since the History
+ // API ignores the title parameter.
+ document.title = previousTitle;
+ // When browsing forward to search results the previous
+ // search will be repeated, so the currentResults are
+ // cleared to ensure the search is successful.
+ currentResults = null;
+ // Synchronize search bar with query string state and
+ // perform the search. This will empty the bar if there's
+ // nothing there, which lets you really go back to a
+ // previous state with nothing in the bar.
+ if (params.search && params.search.length > 0) {
+ searchState.input.value = params.search;
+ // Some browsers fire "onpopstate" for every page load
+ // (Chrome), while others fire the event only when actually
+ // popping a state (Firefox), which is why search() is
+ // called both here and at the end of the startSearch()
+ // function.
+ search(e);
+ } else {
+ searchState.input.value = "";
+ // When browsing back from search results the main page
+ // visibility must be reset.
+ searchState.hideResults();
+ }
+ });
+ }
+
+ // This is required in firefox to avoid this problem: Navigating to a search result
+ // with the keyboard, hitting enter, and then hitting back would take you back to
+ // the doc page, rather than the search that should overlay it.
+ // This was an interaction between the back-forward cache and our handlers
+ // that try to sync state between the URL and the search input. To work around it,
+ // do a small amount of re-init on page show.
+ window.onpageshow = () => {
+ const qSearch = searchState.getQueryStringParams().search;
+ if (searchState.input.value === "" && qSearch) {
+ searchState.input.value = qSearch;
+ }
+ search();
+ };
+ }
+
+ function updateCrate(ev) {
+ if (ev.target.value === "All crates") {
+ // If we don't remove it from the URL, it'll be picked up again by the search.
+ const params = searchState.getQueryStringParams();
+ const query = searchState.input.value.trim();
+ if (!history.state && !params.search) {
+ history.pushState(null, "", buildUrl(query, null));
+ } else {
+ history.replaceState(null, "", buildUrl(query, null));
+ }
+ }
+ // In case you "cut" the entry from the search input, then change the crate filter
+ // before paste back the previous search, you get the old search results without
+ // the filter. To prevent this, we need to remove the previous results.
+ currentResults = null;
+ search(undefined, true);
+ }
+
+ /**
+ * @type {Array<string>}
+ */
+ const searchWords = buildIndex(rawSearchIndex);
+ if (typeof window !== "undefined") {
+ registerSearchEvents();
+ // If there's a search term in the URL, execute the search now.
+ if (window.searchState.getQueryStringParams().search) {
+ search();
+ }
+ }
+
+ if (typeof exports !== "undefined") {
+ exports.initSearch = initSearch;
+ exports.execQuery = execQuery;
+ exports.parseQuery = parseQuery;
+ }
+ return searchWords;
+}
+
+if (typeof window !== "undefined") {
+ window.initSearch = initSearch;
+ if (window.searchIndex !== undefined) {
+ initSearch(window.searchIndex);
+ }
+} else {
+ // Running in Node, not a browser. Run initSearch just to produce the
+ // exports.
+ initSearch({});
+}
+
+
+})();
diff --git a/src/librustdoc/html/static/js/settings.js b/src/librustdoc/html/static/js/settings.js
new file mode 100644
index 000000000..797b931af
--- /dev/null
+++ b/src/librustdoc/html/static/js/settings.js
@@ -0,0 +1,272 @@
+// Local js definitions:
+/* global getSettingValue, getVirtualKey, updateLocalStorage, updateSystemTheme */
+/* global addClass, removeClass, onEach, onEachLazy, blurHandler, elemIsInParent */
+/* global MAIN_ID, getVar, getSettingsButton */
+
+"use strict";
+
+(function() {
+ const isSettingsPage = window.location.pathname.endsWith("/settings.html");
+
+ function changeSetting(settingName, value) {
+ updateLocalStorage(settingName, value);
+
+ switch (settingName) {
+ case "theme":
+ case "preferred-dark-theme":
+ case "preferred-light-theme":
+ case "use-system-theme":
+ updateSystemTheme();
+ updateLightAndDark();
+ break;
+ }
+ }
+
+ function handleKey(ev) {
+ // Don't interfere with browser shortcuts
+ if (ev.ctrlKey || ev.altKey || ev.metaKey) {
+ return;
+ }
+ switch (getVirtualKey(ev)) {
+ case "Enter":
+ case "Return":
+ case "Space":
+ ev.target.checked = !ev.target.checked;
+ ev.preventDefault();
+ break;
+ }
+ }
+
+ function showLightAndDark() {
+ addClass(document.getElementById("theme").parentElement, "hidden");
+ removeClass(document.getElementById("preferred-light-theme").parentElement, "hidden");
+ removeClass(document.getElementById("preferred-dark-theme").parentElement, "hidden");
+ }
+
+ function hideLightAndDark() {
+ addClass(document.getElementById("preferred-light-theme").parentElement, "hidden");
+ addClass(document.getElementById("preferred-dark-theme").parentElement, "hidden");
+ removeClass(document.getElementById("theme").parentElement, "hidden");
+ }
+
+ function updateLightAndDark() {
+ if (getSettingValue("use-system-theme") !== "false") {
+ showLightAndDark();
+ } else {
+ hideLightAndDark();
+ }
+ }
+
+ function setEvents(settingsElement) {
+ updateLightAndDark();
+ onEachLazy(settingsElement.getElementsByClassName("slider"), elem => {
+ const toggle = elem.previousElementSibling;
+ const settingId = toggle.id;
+ const settingValue = getSettingValue(settingId);
+ if (settingValue !== null) {
+ toggle.checked = settingValue === "true";
+ }
+ toggle.onchange = function() {
+ changeSetting(this.id, this.checked);
+ };
+ toggle.onkeyup = handleKey;
+ toggle.onkeyrelease = handleKey;
+ });
+ onEachLazy(settingsElement.getElementsByClassName("select-wrapper"), elem => {
+ const select = elem.getElementsByTagName("select")[0];
+ const settingId = select.id;
+ const settingValue = getSettingValue(settingId);
+ if (settingValue !== null) {
+ select.value = settingValue;
+ }
+ select.onchange = function() {
+ changeSetting(this.id, this.value);
+ };
+ });
+ onEachLazy(settingsElement.querySelectorAll("input[type=\"radio\"]"), elem => {
+ const settingId = elem.name;
+ const settingValue = getSettingValue(settingId);
+ if (settingValue !== null && settingValue !== "null") {
+ elem.checked = settingValue === elem.value;
+ }
+ elem.addEventListener("change", ev => {
+ changeSetting(ev.target.name, ev.target.value);
+ });
+ });
+ }
+
+ /**
+ * This function builds the sections inside the "settings page". It takes a `settings` list
+ * as argument which describes each setting and how to render it. It returns a string
+ * representing the raw HTML.
+ *
+ * @param {Array<Object>} settings
+ *
+ * @return {string}
+ */
+ function buildSettingsPageSections(settings) {
+ let output = "";
+
+ for (const setting of settings) {
+ output += "<div class=\"setting-line\">";
+ const js_data_name = setting["js_name"];
+ const setting_name = setting["name"];
+
+ if (setting["options"] !== undefined) {
+ // This is a select setting.
+ output += `<div class="radio-line" id="${js_data_name}">\
+ <span class="setting-name">${setting_name}</span>\
+ <div class="choices">`;
+ onEach(setting["options"], option => {
+ const checked = option === setting["default"] ? " checked" : "";
+
+ output += `<label for="${js_data_name}-${option}" class="choice">\
+ <input type="radio" name="${js_data_name}" \
+ id="${js_data_name}-${option}" value="${option}"${checked}>\
+ <span>${option}</span>\
+ </label>`;
+ });
+ output += "</div></div>";
+ } else {
+ // This is a toggle.
+ const checked = setting["default"] === true ? " checked" : "";
+ output += `<label class="toggle">\
+ <input type="checkbox" id="${js_data_name}"${checked}>\
+ <span class="slider"></span>\
+ <span class="label">${setting_name}</span>\
+ </label>`;
+ }
+ output += "</div>";
+ }
+ return output;
+ }
+
+ /**
+ * This function builds the "settings page" and returns the generated HTML element.
+ *
+ * @return {HTMLElement}
+ */
+ function buildSettingsPage() {
+ const themes = getVar("themes").split(",");
+ const settings = [
+ {
+ "name": "Use system theme",
+ "js_name": "use-system-theme",
+ "default": true,
+ },
+ {
+ "name": "Theme",
+ "js_name": "theme",
+ "default": "light",
+ "options": themes,
+ },
+ {
+ "name": "Preferred light theme",
+ "js_name": "preferred-light-theme",
+ "default": "light",
+ "options": themes,
+ },
+ {
+ "name": "Preferred dark theme",
+ "js_name": "preferred-dark-theme",
+ "default": "dark",
+ "options": themes,
+ },
+ {
+ "name": "Auto-hide item contents for large items",
+ "js_name": "auto-hide-large-items",
+ "default": true,
+ },
+ {
+ "name": "Auto-hide item methods' documentation",
+ "js_name": "auto-hide-method-docs",
+ "default": false,
+ },
+ {
+ "name": "Auto-hide trait implementation documentation",
+ "js_name": "auto-hide-trait-implementations",
+ "default": false,
+ },
+ {
+ "name": "Directly go to item in search if there is only one result",
+ "js_name": "go-to-only-result",
+ "default": false,
+ },
+ {
+ "name": "Show line numbers on code examples",
+ "js_name": "line-numbers",
+ "default": false,
+ },
+ {
+ "name": "Disable keyboard shortcuts",
+ "js_name": "disable-shortcuts",
+ "default": false,
+ },
+ ];
+
+ // Then we build the DOM.
+ const elementKind = isSettingsPage ? "section" : "div";
+ const innerHTML = `<div class="settings">${buildSettingsPageSections(settings)}</div>`;
+ const el = document.createElement(elementKind);
+ el.id = "settings";
+ el.className = "popover";
+ el.innerHTML = innerHTML;
+
+ if (isSettingsPage) {
+ document.getElementById(MAIN_ID).appendChild(el);
+ } else {
+ el.setAttribute("tabindex", "-1");
+ getSettingsButton().appendChild(el);
+ }
+ return el;
+ }
+
+ const settingsMenu = buildSettingsPage();
+
+ function displaySettings() {
+ settingsMenu.style.display = "";
+ }
+
+ function settingsBlurHandler(event) {
+ blurHandler(event, getSettingsButton(), window.hidePopoverMenus);
+ }
+
+ if (isSettingsPage) {
+ // We replace the existing "onclick" callback to do nothing if clicked.
+ getSettingsButton().onclick = function(event) {
+ event.preventDefault();
+ };
+ } else {
+ // We replace the existing "onclick" callback.
+ const settingsButton = getSettingsButton();
+ const settingsMenu = document.getElementById("settings");
+ settingsButton.onclick = function(event) {
+ if (elemIsInParent(event.target, settingsMenu)) {
+ return;
+ }
+ event.preventDefault();
+ const shouldDisplaySettings = settingsMenu.style.display === "none";
+
+ window.hidePopoverMenus();
+ if (shouldDisplaySettings) {
+ displaySettings();
+ }
+ };
+ settingsButton.onblur = settingsBlurHandler;
+ settingsButton.querySelector("a").onblur = settingsBlurHandler;
+ onEachLazy(settingsMenu.querySelectorAll("input"), el => {
+ el.onblur = settingsBlurHandler;
+ });
+ settingsMenu.onblur = settingsBlurHandler;
+ }
+
+ // We now wait a bit for the web browser to end re-computing the DOM...
+ setTimeout(() => {
+ setEvents(settingsMenu);
+ // The setting menu is already displayed if we're on the settings page.
+ if (!isSettingsPage) {
+ displaySettings();
+ }
+ removeClass(getSettingsButton(), "rotate");
+ }, 0);
+})();
diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js
new file mode 100644
index 000000000..c45d61429
--- /dev/null
+++ b/src/librustdoc/html/static/js/source-script.js
@@ -0,0 +1,241 @@
+// From rust:
+/* global sourcesIndex */
+
+// Local js definitions:
+/* global addClass, getCurrentValue, onEachLazy, removeClass, browserSupportsHistoryApi */
+/* global updateLocalStorage */
+
+"use strict";
+
+(function() {
+
+const rootPath = document.getElementById("rustdoc-vars").attributes["data-root-path"].value;
+let oldScrollPosition = 0;
+
+const NAME_OFFSET = 0;
+const DIRS_OFFSET = 1;
+const FILES_OFFSET = 2;
+
+function closeSidebarIfMobile() {
+ if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
+ updateLocalStorage("source-sidebar-show", "false");
+ }
+}
+
+function createDirEntry(elem, parent, fullPath, hasFoundFile) {
+ const dirEntry = document.createElement("details");
+ const summary = document.createElement("summary");
+
+ dirEntry.className = "dir-entry";
+
+ fullPath += elem[NAME_OFFSET] + "/";
+
+ summary.innerText = elem[NAME_OFFSET];
+ dirEntry.appendChild(summary);
+
+ const folders = document.createElement("div");
+ folders.className = "folders";
+ if (elem[DIRS_OFFSET]) {
+ for (const dir of elem[DIRS_OFFSET]) {
+ if (createDirEntry(dir, folders, fullPath, false)) {
+ dirEntry.open = true;
+ hasFoundFile = true;
+ }
+ }
+ }
+ dirEntry.appendChild(folders);
+
+ const files = document.createElement("div");
+ files.className = "files";
+ if (elem[FILES_OFFSET]) {
+ for (const file_text of elem[FILES_OFFSET]) {
+ const file = document.createElement("a");
+ file.innerText = file_text;
+ file.href = rootPath + "src/" + fullPath + file_text + ".html";
+ file.addEventListener("click", closeSidebarIfMobile);
+ const w = window.location.href.split("#")[0];
+ if (!hasFoundFile && w === file.href) {
+ file.className = "selected";
+ dirEntry.open = true;
+ hasFoundFile = true;
+ }
+ files.appendChild(file);
+ }
+ }
+ dirEntry.appendChild(files);
+ parent.appendChild(dirEntry);
+ return hasFoundFile;
+}
+
+function toggleSidebar() {
+ const child = this.parentNode.children[0];
+ if (child.innerText === ">") {
+ if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
+ // This is to keep the scroll position on mobile.
+ oldScrollPosition = window.scrollY;
+ document.body.style.position = "fixed";
+ document.body.style.top = `-${oldScrollPosition}px`;
+ }
+ addClass(document.documentElement, "source-sidebar-expanded");
+ child.innerText = "<";
+ updateLocalStorage("source-sidebar-show", "true");
+ } else {
+ if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
+ // This is to keep the scroll position on mobile.
+ document.body.style.position = "";
+ document.body.style.top = "";
+ // The scroll position is lost when resetting the style, hence why we store it in
+ // `oldScroll`.
+ window.scrollTo(0, oldScrollPosition);
+ }
+ removeClass(document.documentElement, "source-sidebar-expanded");
+ child.innerText = ">";
+ updateLocalStorage("source-sidebar-show", "false");
+ }
+}
+
+function createSidebarToggle() {
+ const sidebarToggle = document.createElement("div");
+ sidebarToggle.id = "sidebar-toggle";
+
+ const inner = document.createElement("button");
+
+ if (getCurrentValue("source-sidebar-show") === "true") {
+ inner.innerText = "<";
+ } else {
+ inner.innerText = ">";
+ }
+ inner.onclick = toggleSidebar;
+
+ sidebarToggle.appendChild(inner);
+ return sidebarToggle;
+}
+
+// This function is called from "source-files.js", generated in `html/render/mod.rs`.
+// eslint-disable-next-line no-unused-vars
+function createSourceSidebar() {
+ const container = document.querySelector("nav.sidebar");
+
+ const sidebarToggle = createSidebarToggle();
+ container.insertBefore(sidebarToggle, container.firstChild);
+
+ const sidebar = document.createElement("div");
+ sidebar.id = "source-sidebar";
+
+ let hasFoundFile = false;
+
+ const title = document.createElement("div");
+ title.className = "title";
+ title.innerText = "Files";
+ sidebar.appendChild(title);
+ Object.keys(sourcesIndex).forEach(key => {
+ sourcesIndex[key][NAME_OFFSET] = key;
+ hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "",
+ hasFoundFile);
+ });
+
+ container.appendChild(sidebar);
+ // Focus on the current file in the source files sidebar.
+ const selected_elem = sidebar.getElementsByClassName("selected")[0];
+ if (typeof selected_elem !== "undefined") {
+ selected_elem.focus();
+ }
+}
+
+const lineNumbersRegex = /^#?(\d+)(?:-(\d+))?$/;
+
+function highlightSourceLines(match) {
+ if (typeof match === "undefined") {
+ match = window.location.hash.match(lineNumbersRegex);
+ }
+ if (!match) {
+ return;
+ }
+ let from = parseInt(match[1], 10);
+ let to = from;
+ if (typeof match[2] !== "undefined") {
+ to = parseInt(match[2], 10);
+ }
+ if (to < from) {
+ const tmp = to;
+ to = from;
+ from = tmp;
+ }
+ let elem = document.getElementById(from);
+ if (!elem) {
+ return;
+ }
+ const x = document.getElementById(from);
+ if (x) {
+ x.scrollIntoView();
+ }
+ onEachLazy(document.getElementsByClassName("line-numbers"), e => {
+ onEachLazy(e.getElementsByTagName("span"), i_e => {
+ removeClass(i_e, "line-highlighted");
+ });
+ });
+ for (let i = from; i <= to; ++i) {
+ elem = document.getElementById(i);
+ if (!elem) {
+ break;
+ }
+ addClass(elem, "line-highlighted");
+ }
+}
+
+const handleSourceHighlight = (function() {
+ let prev_line_id = 0;
+
+ const set_fragment = name => {
+ const x = window.scrollX,
+ y = window.scrollY;
+ if (browserSupportsHistoryApi()) {
+ history.replaceState(null, null, "#" + name);
+ highlightSourceLines();
+ } else {
+ location.replace("#" + name);
+ }
+ // Prevent jumps when selecting one or many lines
+ window.scrollTo(x, y);
+ };
+
+ return ev => {
+ let cur_line_id = parseInt(ev.target.id, 10);
+ // It can happen when clicking not on a line number span.
+ if (isNaN(cur_line_id)) {
+ return;
+ }
+ ev.preventDefault();
+
+ if (ev.shiftKey && prev_line_id) {
+ // Swap selection if needed
+ if (prev_line_id > cur_line_id) {
+ const tmp = prev_line_id;
+ prev_line_id = cur_line_id;
+ cur_line_id = tmp;
+ }
+
+ set_fragment(prev_line_id + "-" + cur_line_id);
+ } else {
+ prev_line_id = cur_line_id;
+
+ set_fragment(cur_line_id);
+ }
+ };
+}());
+
+window.addEventListener("hashchange", () => {
+ const match = window.location.hash.match(lineNumbersRegex);
+ if (match) {
+ return highlightSourceLines(match);
+ }
+});
+
+onEachLazy(document.getElementsByClassName("line-numbers"), el => {
+ el.addEventListener("click", handleSourceHighlight);
+});
+
+highlightSourceLines();
+
+window.createSourceSidebar = createSourceSidebar;
+})();
diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js
new file mode 100644
index 000000000..0c5389d45
--- /dev/null
+++ b/src/librustdoc/html/static/js/storage.js
@@ -0,0 +1,268 @@
+// storage.js is loaded in the `<head>` of all rustdoc pages and doesn't
+// use `async` or `defer`. That means it blocks further parsing and rendering
+// of the page: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script.
+// This makes it the correct place to act on settings that affect the display of
+// the page, so we don't see major layout changes during the load of the page.
+"use strict";
+
+const darkThemes = ["dark", "ayu"];
+window.currentTheme = document.getElementById("themeStyle");
+window.mainTheme = document.getElementById("mainThemeStyle");
+
+// WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
+// If you update this line, then you also need to update the two media queries with the same
+// warning in rustdoc.css
+window.RUSTDOC_MOBILE_BREAKPOINT = 701;
+
+const settingsDataset = (function() {
+ const settingsElement = document.getElementById("default-settings");
+ if (settingsElement === null) {
+ return null;
+ }
+ const dataset = settingsElement.dataset;
+ if (dataset === undefined) {
+ return null;
+ }
+ return dataset;
+})();
+
+function getSettingValue(settingName) {
+ const current = getCurrentValue(settingName);
+ if (current !== null) {
+ return current;
+ }
+ if (settingsDataset !== null) {
+ // See the comment for `default_settings.into_iter()` etc. in
+ // `Options::from_matches` in `librustdoc/config.rs`.
+ const def = settingsDataset[settingName.replace(/-/g,"_")];
+ if (def !== undefined) {
+ return def;
+ }
+ }
+ return null;
+}
+
+const localStoredTheme = getSettingValue("theme");
+
+const savedHref = [];
+
+// eslint-disable-next-line no-unused-vars
+function hasClass(elem, className) {
+ return elem && elem.classList && elem.classList.contains(className);
+}
+
+// eslint-disable-next-line no-unused-vars
+function addClass(elem, className) {
+ if (!elem || !elem.classList) {
+ return;
+ }
+ elem.classList.add(className);
+}
+
+// eslint-disable-next-line no-unused-vars
+function removeClass(elem, className) {
+ if (!elem || !elem.classList) {
+ return;
+ }
+ elem.classList.remove(className);
+}
+
+/**
+ * Run a callback for every element of an Array.
+ * @param {Array<?>} arr - The array to iterate over
+ * @param {function(?)} func - The callback
+ * @param {boolean} [reversed] - Whether to iterate in reverse
+ */
+function onEach(arr, func, reversed) {
+ if (arr && arr.length > 0 && func) {
+ if (reversed) {
+ const length = arr.length;
+ for (let i = length - 1; i >= 0; --i) {
+ if (func(arr[i])) {
+ return true;
+ }
+ }
+ } else {
+ for (const elem of arr) {
+ if (func(elem)) {
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+}
+
+/**
+ * Turn an HTMLCollection or a NodeList into an Array, then run a callback
+ * for every element. This is useful because iterating over an HTMLCollection
+ * or a "live" NodeList while modifying it can be very slow.
+ * https://developer.mozilla.org/en-US/docs/Web/API/HTMLCollection
+ * https://developer.mozilla.org/en-US/docs/Web/API/NodeList
+ * @param {NodeList<?>|HTMLCollection<?>} lazyArray - An array to iterate over
+ * @param {function(?)} func - The callback
+ * @param {boolean} [reversed] - Whether to iterate in reverse
+ */
+function onEachLazy(lazyArray, func, reversed) {
+ return onEach(
+ Array.prototype.slice.call(lazyArray),
+ func,
+ reversed);
+}
+
+function updateLocalStorage(name, value) {
+ try {
+ window.localStorage.setItem("rustdoc-" + name, value);
+ } catch (e) {
+ // localStorage is not accessible, do nothing
+ }
+}
+
+function getCurrentValue(name) {
+ try {
+ return window.localStorage.getItem("rustdoc-" + name);
+ } catch (e) {
+ return null;
+ }
+}
+
+function switchTheme(styleElem, mainStyleElem, newTheme, saveTheme) {
+ const newHref = mainStyleElem.href.replace(
+ /\/rustdoc([^/]*)\.css/, "/" + newTheme + "$1" + ".css");
+
+ // If this new value comes from a system setting or from the previously
+ // saved theme, no need to save it.
+ if (saveTheme) {
+ updateLocalStorage("theme", newTheme);
+ }
+
+ if (styleElem.href === newHref) {
+ return;
+ }
+
+ let found = false;
+ if (savedHref.length === 0) {
+ onEachLazy(document.getElementsByTagName("link"), el => {
+ savedHref.push(el.href);
+ });
+ }
+ onEach(savedHref, el => {
+ if (el === newHref) {
+ found = true;
+ return true;
+ }
+ });
+ if (found) {
+ styleElem.href = newHref;
+ }
+}
+
+// This function is called from "main.js".
+// eslint-disable-next-line no-unused-vars
+function useSystemTheme(value) {
+ if (value === undefined) {
+ value = true;
+ }
+
+ updateLocalStorage("use-system-theme", value);
+
+ // update the toggle if we're on the settings page
+ const toggle = document.getElementById("use-system-theme");
+ if (toggle && toggle instanceof HTMLInputElement) {
+ toggle.checked = value;
+ }
+}
+
+const updateSystemTheme = (function() {
+ if (!window.matchMedia) {
+ // fallback to the CSS computed value
+ return () => {
+ const cssTheme = getComputedStyle(document.documentElement)
+ .getPropertyValue("content");
+
+ switchTheme(
+ window.currentTheme,
+ window.mainTheme,
+ JSON.parse(cssTheme) || "light",
+ true
+ );
+ };
+ }
+
+ // only listen to (prefers-color-scheme: dark) because light is the default
+ const mql = window.matchMedia("(prefers-color-scheme: dark)");
+
+ function handlePreferenceChange(mql) {
+ const use = theme => {
+ switchTheme(window.currentTheme, window.mainTheme, theme, true);
+ };
+ // maybe the user has disabled the setting in the meantime!
+ if (getSettingValue("use-system-theme") !== "false") {
+ const lightTheme = getSettingValue("preferred-light-theme") || "light";
+ const darkTheme = getSettingValue("preferred-dark-theme") || "dark";
+
+ if (mql.matches) {
+ use(darkTheme);
+ } else {
+ // prefers a light theme, or has no preference
+ use(lightTheme);
+ }
+ // note: we save the theme so that it doesn't suddenly change when
+ // the user disables "use-system-theme" and reloads the page or
+ // navigates to another page
+ } else {
+ use(getSettingValue("theme"));
+ }
+ }
+
+ mql.addListener(handlePreferenceChange);
+
+ return () => {
+ handlePreferenceChange(mql);
+ };
+})();
+
+function switchToSavedTheme() {
+ switchTheme(
+ window.currentTheme,
+ window.mainTheme,
+ getSettingValue("theme") || "light",
+ false
+ );
+}
+
+if (getSettingValue("use-system-theme") !== "false" && window.matchMedia) {
+ // update the preferred dark theme if the user is already using a dark theme
+ // See https://github.com/rust-lang/rust/pull/77809#issuecomment-707875732
+ if (getSettingValue("use-system-theme") === null
+ && getSettingValue("preferred-dark-theme") === null
+ && darkThemes.indexOf(localStoredTheme) >= 0) {
+ updateLocalStorage("preferred-dark-theme", localStoredTheme);
+ }
+
+ // call the function to initialize the theme at least once!
+ updateSystemTheme();
+} else {
+ switchToSavedTheme();
+}
+
+if (getSettingValue("source-sidebar-show") === "true") {
+ // At this point in page load, `document.body` is not available yet.
+ // Set a class on the `<html>` element instead.
+ addClass(document.documentElement, "source-sidebar-expanded");
+}
+
+// If we navigate away (for example to a settings page), and then use the back or
+// forward button to get back to a page, the theme may have changed in the meantime.
+// But scripts may not be re-loaded in such a case due to the bfcache
+// (https://web.dev/bfcache/). The "pageshow" event triggers on such navigations.
+// Use that opportunity to update the theme.
+// We use a setTimeout with a 0 timeout here to put the change on the event queue.
+// For some reason, if we try to change the theme while the `pageshow` event is
+// running, it sometimes fails to take effect. The problem manifests on Chrome,
+// specifically when talking to a remote website with no caching.
+window.addEventListener("pageshow", ev => {
+ if (ev.persisted) {
+ setTimeout(switchToSavedTheme, 0);
+ }
+});
diff --git a/src/librustdoc/html/static/scrape-examples-help.md b/src/librustdoc/html/static/scrape-examples-help.md
new file mode 100644
index 000000000..035b2e18b
--- /dev/null
+++ b/src/librustdoc/html/static/scrape-examples-help.md
@@ -0,0 +1,34 @@
+Rustdoc will automatically scrape examples of documented items from the `examples/` directory of a project. These examples will be included within the generated documentation for that item. For example, if your library contains a public function:
+
+```rust
+// src/lib.rs
+pub fn a_func() {}
+```
+
+And you have an example calling this function:
+
+```rust
+// examples/ex.rs
+fn main() {
+ a_crate::a_func();
+}
+```
+
+Then this code snippet will be included in the documentation for `a_func`.
+
+## How to read scraped examples
+
+Scraped examples are shown as blocks of code from a given file. The relevant item will be highlighted. If the file is larger than a couple lines, only a small window will be shown which you can expand by clicking &varr; in the top-right. If a file contains multiple instances of an item, you can use the &pr; and &sc; buttons to toggle through each instance.
+
+If there is more than one file that contains examples, then you should click "More examples" to see these examples.
+
+
+## How Rustdoc scrapes examples
+
+When you run `cargo doc`, Rustdoc will analyze all the crates that match Cargo's `--examples` filter for instances of items that occur in the crates being documented. Then Rustdoc will include the source code of these instances in the generated documentation.
+
+Rustdoc has a few techniques to ensure this doesn't overwhelm documentation readers, and that it doesn't blow up the page size:
+
+1. For a given item, a maximum of 5 examples are included in the page. The remaining examples are just links to source code.
+2. Only one example is shown by default, and the remaining examples are hidden behind a toggle.
+3. For a given file that contains examples, only the item containing the examples will be included in the generated documentation.
diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs
new file mode 100644
index 000000000..75f2b7e35
--- /dev/null
+++ b/src/librustdoc/html/static_files.rs
@@ -0,0 +1,168 @@
+//! Static files bundled with documentation output.
+//!
+//! All the static files are included here for centralized access in case anything other than the
+//! HTML rendering code (say, the theme checker) needs to access one of these files.
+//!
+//! Note about types: CSS and JavaScript files are included as `&'static str` to allow for the
+//! minifier to run on them. All other files are included as `&'static [u8]` so they can be
+//! directly written to a `Write` handle.
+
+/// The file contents of the main `rustdoc.css` file, responsible for the core layout of the page.
+pub(crate) static RUSTDOC_CSS: &str = include_str!("static/css/rustdoc.css");
+
+/// The file contents of `settings.css`, responsible for the items on the settings page.
+pub(crate) static SETTINGS_CSS: &str = include_str!("static/css/settings.css");
+
+/// The file contents of the `noscript.css` file, used in case JS isn't supported or is disabled.
+pub(crate) static NOSCRIPT_CSS: &str = include_str!("static/css/noscript.css");
+
+/// The file contents of `normalize.css`, included to even out standard elements between browser
+/// implementations.
+pub(crate) static NORMALIZE_CSS: &str = include_str!("static/css/normalize.css");
+
+/// The file contents of `main.js`, which contains the core JavaScript used on documentation pages,
+/// including search behavior and docblock folding, among others.
+pub(crate) static MAIN_JS: &str = include_str!("static/js/main.js");
+
+/// The file contents of `search.js`, which contains the search behavior.
+pub(crate) static SEARCH_JS: &str = include_str!("static/js/search.js");
+
+/// The file contents of `settings.js`, which contains the JavaScript used to handle the settings
+/// page.
+pub(crate) static SETTINGS_JS: &str = include_str!("static/js/settings.js");
+
+/// The file contents of `storage.js`, which contains functionality related to browser Local
+/// Storage, used to store documentation settings.
+pub(crate) static STORAGE_JS: &str = include_str!("static/js/storage.js");
+
+/// The file contents of `scraped-examples.js`, which contains functionality related to the
+/// --scrape-examples flag that inserts automatically-found examples of usages of items.
+pub(crate) static SCRAPE_EXAMPLES_JS: &str = include_str!("static/js/scrape-examples.js");
+
+pub(crate) static SCRAPE_EXAMPLES_HELP_MD: &str = include_str!("static/scrape-examples-help.md");
+
+/// The file contents of `wheel.svg`, the icon used for the settings button.
+pub(crate) static WHEEL_SVG: &[u8] = include_bytes!("static/images/wheel.svg");
+
+/// The file contents of `clipboard.svg`, the icon used for the "copy path" button.
+pub(crate) static CLIPBOARD_SVG: &[u8] = include_bytes!("static/images/clipboard.svg");
+
+/// The file contents of `down-arrow.svg`, the icon used for the crate choice combobox.
+pub(crate) static DOWN_ARROW_SVG: &[u8] = include_bytes!("static/images/down-arrow.svg");
+
+/// The file contents of `toggle-minus.svg`, the icon used for opened toggles.
+pub(crate) static TOGGLE_MINUS_PNG: &[u8] = include_bytes!("static/images/toggle-minus.svg");
+
+/// The file contents of `toggle-plus.svg`, the icon used for closed toggles.
+pub(crate) static TOGGLE_PLUS_PNG: &[u8] = include_bytes!("static/images/toggle-plus.svg");
+
+/// The contents of `COPYRIGHT.txt`, the license listing for files distributed with documentation
+/// output.
+pub(crate) static COPYRIGHT: &[u8] = include_bytes!("static/COPYRIGHT.txt");
+
+/// The contents of `LICENSE-APACHE.txt`, the text of the Apache License, version 2.0.
+pub(crate) static LICENSE_APACHE: &[u8] = include_bytes!("static/LICENSE-APACHE.txt");
+
+/// The contents of `LICENSE-MIT.txt`, the text of the MIT License.
+pub(crate) static LICENSE_MIT: &[u8] = include_bytes!("static/LICENSE-MIT.txt");
+
+/// The contents of `rust-logo.svg`, the default icon of the documentation.
+pub(crate) static RUST_LOGO_SVG: &[u8] = include_bytes!("static/images/rust-logo.svg");
+
+/// The default documentation favicons (SVG and PNG fallbacks)
+pub(crate) static RUST_FAVICON_SVG: &[u8] = include_bytes!("static/images/favicon.svg");
+pub(crate) static RUST_FAVICON_PNG_16: &[u8] = include_bytes!("static/images/favicon-16x16.png");
+pub(crate) static RUST_FAVICON_PNG_32: &[u8] = include_bytes!("static/images/favicon-32x32.png");
+
+/// The built-in themes given to every documentation site.
+pub(crate) mod themes {
+ /// The "light" theme, selected by default when no setting is available. Used as the basis for
+ /// the `--check-theme` functionality.
+ pub(crate) static LIGHT: &str = include_str!("static/css/themes/light.css");
+
+ /// The "dark" theme.
+ pub(crate) static DARK: &str = include_str!("static/css/themes/dark.css");
+
+ /// The "ayu" theme.
+ pub(crate) static AYU: &str = include_str!("static/css/themes/ayu.css");
+}
+
+/// Files related to the Fira Sans font.
+pub(crate) mod fira_sans {
+ /// The file `FiraSans-Regular.woff2`, the Regular variant of the Fira Sans font in woff2.
+ pub(crate) static REGULAR: &[u8] = include_bytes!("static/fonts/FiraSans-Regular.woff2");
+
+ /// The file `FiraSans-Medium.woff2`, the Medium variant of the Fira Sans font in woff2.
+ pub(crate) static MEDIUM: &[u8] = include_bytes!("static/fonts/FiraSans-Medium.woff2");
+
+ /// The file `FiraSans-LICENSE.txt`, the license text for the Fira Sans font.
+ pub(crate) static LICENSE: &[u8] = include_bytes!("static/fonts/FiraSans-LICENSE.txt");
+}
+
+/// Files related to the Source Serif 4 font.
+pub(crate) mod source_serif_4 {
+ /// The file `SourceSerif4-Regular.ttf.woff2`, the Regular variant of the Source Serif 4 font in
+ /// woff2.
+ pub(crate) static REGULAR: &[u8] =
+ include_bytes!("static/fonts/SourceSerif4-Regular.ttf.woff2");
+
+ /// The file `SourceSerif4-Bold.ttf.woff2`, the Bold variant of the Source Serif 4 font in
+ /// woff2.
+ pub(crate) static BOLD: &[u8] = include_bytes!("static/fonts/SourceSerif4-Bold.ttf.woff2");
+
+ /// The file `SourceSerif4-It.ttf.woff2`, the Italic variant of the Source Serif 4 font in
+ /// woff2.
+ pub(crate) static ITALIC: &[u8] = include_bytes!("static/fonts/SourceSerif4-It.ttf.woff2");
+
+ /// The file `SourceSerif4-LICENSE.txt`, the license text for the Source Serif 4 font.
+ pub(crate) static LICENSE: &[u8] = include_bytes!("static/fonts/SourceSerif4-LICENSE.md");
+}
+
+/// Files related to the Source Code Pro font.
+pub(crate) mod source_code_pro {
+ /// The file `SourceCodePro-Regular.ttf.woff2`, the Regular variant of the Source Code Pro font
+ /// in woff2.
+ pub(crate) static REGULAR: &[u8] =
+ include_bytes!("static/fonts/SourceCodePro-Regular.ttf.woff2");
+
+ /// The file `SourceCodePro-Semibold.ttf.woff2`, the Semibold variant of the Source Code Pro
+ /// font in woff2.
+ pub(crate) static SEMIBOLD: &[u8] =
+ include_bytes!("static/fonts/SourceCodePro-Semibold.ttf.woff2");
+
+ /// The file `SourceCodePro-It.ttf.woff2`, the Italic variant of the Source Code Pro font in
+ /// woff2.
+ pub(crate) static ITALIC: &[u8] = include_bytes!("static/fonts/SourceCodePro-It.ttf.woff2");
+
+ /// The file `SourceCodePro-LICENSE.txt`, the license text of the Source Code Pro font.
+ pub(crate) static LICENSE: &[u8] = include_bytes!("static/fonts/SourceCodePro-LICENSE.txt");
+}
+
+/// Files related to the Nanum Barun Gothic font.
+///
+/// These files are used to avoid some legacy CJK serif fonts in Windows.
+///
+/// Note that the Noto Sans KR font, which was used previously but was not very readable on Windows,
+/// has been replaced by the Nanum Barun Gothic font. This is due to Windows' implementation of font
+/// rendering that distorts OpenType fonts too much.
+///
+/// The font files were generated with these commands:
+///
+/// ```sh
+/// pyftsubset NanumBarunGothic.ttf \
+/// --unicodes=U+AC00-D7AF,U+1100-11FF,U+3130-318F,U+A960-A97F,U+D7B0-D7FF \
+/// --output-file=NanumBarunGothic.ttf.woff2 --flavor=woff2
+/// ```
+pub(crate) mod nanum_barun_gothic {
+ /// The file `NanumBarunGothic.ttf.woff2`, the Regular variant of the Nanum Barun Gothic font.
+ pub(crate) static REGULAR: &[u8] = include_bytes!("static/fonts/NanumBarunGothic.ttf.woff2");
+
+ /// The file `NanumBarunGothic-LICENSE.txt`, the license text of the Nanum Barun Gothic font.
+ pub(crate) static LICENSE: &[u8] = include_bytes!("static/fonts/NanumBarunGothic-LICENSE.txt");
+}
+
+/// Files related to the sidebar in rustdoc sources.
+pub(crate) mod sidebar {
+ /// File script to handle sidebar.
+ pub(crate) static SOURCE_SCRIPT: &str = include_str!("static/js/source-script.js");
+}
diff --git a/src/librustdoc/html/templates/STYLE.md b/src/librustdoc/html/templates/STYLE.md
new file mode 100644
index 000000000..fff65e3b5
--- /dev/null
+++ b/src/librustdoc/html/templates/STYLE.md
@@ -0,0 +1,37 @@
+# Style for Templates
+
+This directory has templates in the [Tera templating language](teradoc), which is very
+similar to [Jinja2](jinjadoc) and [Django](djangodoc) templates, and also to [Askama](askamadoc).
+
+[teradoc]: https://tera.netlify.app/docs/#templates
+[jinjadoc]: https://jinja.palletsprojects.com/en/3.0.x/templates/
+[djangodoc]: https://docs.djangoproject.com/en/3.2/topics/templates/
+[askamadoc]: https://docs.rs/askama/0.10.5/askama/
+
+We want our rendered output to have as little unnecessary whitespace as
+possible, so that pages load quickly. To achieve that we use Tera's
+[whitespace control] features. At the end of most lines, we put an empty comment
+tag with the whitespace control characters: `{#- -#}`. This causes all
+whitespace between the end of the line and the beginning of the next, including
+indentation, to be omitted on render. Sometimes we want to preserve a single
+space. In those cases we put the space at the end of the line, followed by
+`{# -#}`, which is a directive to remove following whitespace but not preceding.
+We also use the whitespace control characters in most instances of tags with
+control flow, for example `{%- if foo -%}`.
+
+[whitespace control]: https://tera.netlify.app/docs/#whitespace-control
+
+We want our templates to be readable, so we use indentation and newlines
+liberally. We indent by four spaces after opening an HTML tag _or_ a Tera
+tag. In most cases an HTML tag should be followed by a newline, but if the
+tag has simple contents and fits with its close tag on a single line, the
+contents don't necessarily need a new line.
+
+Tera templates support quite sophisticated control flow. To keep our templates
+simple and understandable, we use only a subset: `if` and `for`. In particular
+we avoid [assignments in the template logic](assignments) and [Tera
+macros](macros). This also may make things easier if we switch to a different
+Jinja-style template system, like Askama, in the future.
+
+[assignments]: https://tera.netlify.app/docs/#assignments
+[macros]: https://tera.netlify.app/docs/#macros
diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html
new file mode 100644
index 000000000..8e25f6764
--- /dev/null
+++ b/src/librustdoc/html/templates/page.html
@@ -0,0 +1,148 @@
+<!DOCTYPE html> {#- -#}
+<html lang="en"> {#- -#}
+<head> {#- -#}
+ <meta charset="utf-8"> {#- -#}
+ <meta name="viewport" content="width=device-width, initial-scale=1.0"> {#- -#}
+ <meta name="generator" content="rustdoc"> {#- -#}
+ <meta name="description" content="{{page.description}}"> {#- -#}
+ <meta name="keywords" content="{{page.keywords}}"> {#- -#}
+ <title>{{page.title}}</title> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}SourceSerif4-Regular.ttf.woff2"> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}FiraSans-Regular.woff2"> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}FiraSans-Medium.woff2"> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}SourceCodePro-Regular.ttf.woff2"> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}SourceSerif4-Bold.ttf.woff2"> {#- -#}
+ <link rel="preload" as="font" type="font/woff2" crossorigin href="{{static_root_path|safe}}SourceCodePro-Semibold.ttf.woff2"> {#- -#}
+ <link rel="stylesheet" type="text/css" {# -#}
+ href="{{static_root_path|safe}}normalize{{page.resource_suffix}}.css"> {#- -#}
+ <link rel="stylesheet" type="text/css" {# -#}
+ href="{{static_root_path|safe}}rustdoc{{page.resource_suffix}}.css" {# -#}
+ id="mainThemeStyle"> {#- -#}
+ {%- for theme in themes -%}
+ <link rel="stylesheet" type="text/css" {# -#}
+ href="{{static_root_path|safe}}{{theme}}{{page.resource_suffix}}.css" {# -#}
+ {%- if theme == "light" -%}
+ id="themeStyle"
+ {%- else -%}
+ disabled
+ {%- endif -%}
+ >
+ {%- endfor -%}
+ <script id="default-settings" {# -#}
+ {% for (k, v) in layout.default_settings %}
+ data-{{k}}="{{v}}"
+ {%- endfor -%}
+ ></script> {#- -#}
+ <script src="{{static_root_path|safe}}storage{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- if page.css_class.contains("crate") -%}
+ <script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- else if page.css_class == "source" -%}
+ <script defer src="{{static_root_path|safe}}source-script{{page.resource_suffix}}.js"></script> {#- -#}
+ <script defer src="{{page.root_path|safe}}source-files{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- else if !page.css_class.contains("mod") -%}
+ <script defer src="sidebar-items{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- endif -%}
+ <script defer src="{{static_root_path|safe}}main{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- if layout.scrape_examples_extension -%}
+ <script defer src="{{page.root_path|safe}}scrape-examples{{page.resource_suffix}}.js"></script> {#- -#}
+ {%- endif -%}
+ <noscript> {#- -#}
+ <link rel="stylesheet" {# -#}
+ href="{{static_root_path|safe}}noscript{{page.resource_suffix}}.css"> {#- -#}
+ </noscript> {#- -#}
+ {%- if layout.css_file_extension.is_some() -%}
+ <link rel="stylesheet" type="text/css" {# -#}
+ href="{{static_root_path|safe}}theme{{page.resource_suffix}}.css"> {#- -#}
+ {%- endif -%}
+ {%- if !layout.favicon.is_empty() -%}
+ <link rel="icon" href="{{layout.favicon}}"> {#- -#}
+ {%- else -%}
+ <link rel="alternate icon" type="image/png" {# -#}
+ href="{{static_root_path|safe}}favicon-16x16{{page.resource_suffix}}.png"> {#- -#}
+ <link rel="alternate icon" type="image/png" {# -#}
+ href="{{static_root_path|safe}}favicon-32x32{{page.resource_suffix}}.png"> {#- -#}
+ <link rel="icon" type="image/svg+xml" {# -#}
+ href="{{static_root_path|safe}}favicon{{page.resource_suffix}}.svg"> {#- -#}
+ {%- endif -%}
+ {{- layout.external_html.in_header|safe -}}
+</head> {#- -#}
+<body class="rustdoc {{page.css_class}}"> {#- -#}
+ <!--[if lte IE 11]> {#- -#}
+ <div class="warning"> {#- -#}
+ This old browser is unsupported and will most likely display funky things. {#- -#}
+ </div> {#- -#}
+ <![endif]--> {#- -#}
+ {{- layout.external_html.before_content|safe -}}
+ <nav class="mobile-topbar"> {#- -#}
+ <button class="sidebar-menu-toggle">&#9776;</button> {#- -#}
+ <a class="sidebar-logo" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
+ <div class="logo-container"> {#- -#}
+ {%- if !layout.logo.is_empty() -%}
+ <img src="{{layout.logo}}" alt="logo"> {#- -#}
+ {%- else -%}
+ <img class="rust-logo" src="{{static_root_path|safe}}rust-logo{{page.resource_suffix}}.svg" alt="logo"> {#- -#}
+ {%- endif -%}
+ </div> {#- -#}
+ </a> {#- -#}
+ <h2 class="location"></h2> {#- -#}
+ </nav> {#- -#}
+ <nav class="sidebar"> {#- -#}
+ <a class="sidebar-logo" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
+ <div class="logo-container"> {#- -#}
+ {%- if !layout.logo.is_empty() %}
+ <img src="{{layout.logo}}" alt="logo"> {#- -#}
+ {%- else -%}
+ <img class="rust-logo" src="{{static_root_path|safe}}rust-logo{{page.resource_suffix}}.svg" alt="logo"> {#- -#}
+ {%- endif -%}
+ </div> {#- -#}
+ </a> {#- -#}
+ {{- sidebar|safe -}}
+ </nav> {#- -#}
+ <main> {#- -#}
+ <div class="width-limiter"> {#- -#}
+ <div class="sub-container"> {#- -#}
+ <a class="sub-logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {#- -#}
+ {%- if !layout.logo.is_empty() %}
+ <img src="{{layout.logo}}" alt="logo"> {#- -#}
+ {%- else -%}
+ <img class="rust-logo" src="{{static_root_path|safe}}rust-logo{{page.resource_suffix}}.svg" alt="logo"> {#- -#}
+ {%- endif -%}
+ </a> {#- -#}
+ <nav class="sub"> {#- -#}
+ <form class="search-form"> {#- -#}
+ <div class="search-container"> {#- -#}
+ <span></span> {#- This empty span is a hacky fix for Safari - See #93184 -#}
+ <input {# -#}
+ class="search-input" {# -#}
+ name="search" {# -#}
+ autocomplete="off" {# -#}
+ spellcheck="false" {# -#}
+ placeholder="Click or press ‘S’ to search, ‘?’ for more options…" {# -#}
+ type="search"> {#- -#}
+ <div id="help-button" title="help" tabindex="-1"> {#- -#}
+ <button type="button">?</button> {#- -#}
+ </div> {#- -#}
+ <div id="settings-menu" tabindex="-1"> {#- -#}
+ <a href="{{page.root_path|safe}}settings.html" title="settings"> {#- -#}
+ <img width="22" height="22" alt="Change settings" {# -#}
+ src="{{static_root_path|safe}}wheel{{page.resource_suffix}}.svg"> {#- -#}
+ </a> {#- -#}
+ </div> {#- -#}
+ </div> {#- -#}
+ </form> {#- -#}
+ </nav> {#- -#}
+ </div> {#- -#}
+ <section id="main-content" class="content">{{- content|safe -}}</section> {#- -#}
+ </div> {#- -#}
+ </main> {#- -#}
+ {{- layout.external_html.after_content|safe -}}
+ <div id="rustdoc-vars" {# -#}
+ data-root-path="{{page.root_path|safe}}" {# -#}
+ data-current-crate="{{layout.krate}}" {# -#}
+ data-themes="{{themes|join(",") }}" {# -#}
+ data-resource-suffix="{{page.resource_suffix}}" {# -#}
+ data-rustdoc-version="{{rustdoc_version}}" {# -#}
+ > {#- -#}
+ </div> {#- -#}
+</body> {#- -#}
+</html> {#- -#}
diff --git a/src/librustdoc/html/templates/print_item.html b/src/librustdoc/html/templates/print_item.html
new file mode 100644
index 000000000..c755157d2
--- /dev/null
+++ b/src/librustdoc/html/templates/print_item.html
@@ -0,0 +1,30 @@
+<div class="main-heading"> {#- -#}
+ <h1 class="fqn"> {#- -#}
+ <span class="in-band"> {#- -#}
+ {{-typ-}}
+ {#- The breadcrumbs of the item path, like std::string -#}
+ {%- for component in path_components -%}
+ <a href="{{component.path|safe}}index.html">{{component.name}}</a>::<wbr>
+ {%- endfor -%}
+ <a class="{{item_type}}" href="#">{{name}}</a> {#- -#}
+ <button id="copy-path" onclick="copy_path(this)" title="Copy item path to clipboard"> {#- -#}
+ <img src="{{static_root_path|safe}}clipboard{{page.resource_suffix}}.svg" {# -#}
+ width="19" height="18" {# -#}
+ alt="Copy item path"> {#- -#}
+ </button> {#- -#}
+ </span> {#- -#}
+ </h1> {#- -#}
+ <span class="out-of-band"> {#- -#}
+ {% if !stability_since_raw.is_empty() %}
+ {{- stability_since_raw|safe }} · {# -#}
+ {% endif %}
+ {%- match src_href -%}
+ {%- when Some with (href) -%}
+ <a class="srclink" href="{{href|safe}}">source</a> · {# -#}
+ {%- else -%}
+ {%- endmatch -%}
+ <a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs"> {#- -#}
+ [<span class="inner">&#x2212;</span>] {#- -#}
+ </a> {#- -#}
+ </span> {#- -#}
+</div> {#- -#}
diff --git a/src/librustdoc/html/tests.rs b/src/librustdoc/html/tests.rs
new file mode 100644
index 000000000..437d3995e
--- /dev/null
+++ b/src/librustdoc/html/tests.rs
@@ -0,0 +1,50 @@
+use crate::html::format::href_relative_parts;
+use rustc_span::{sym, Symbol};
+
+fn assert_relative_path(expected: &[Symbol], relative_to_fqp: &[Symbol], fqp: &[Symbol]) {
+ // No `create_default_session_globals_then` call is needed here because all
+ // the symbols used are static, and no `Symbol::intern` calls occur.
+ assert_eq!(expected, href_relative_parts(&fqp, &relative_to_fqp).collect::<Vec<_>>());
+}
+
+#[test]
+fn href_relative_parts_basic() {
+ let relative_to_fqp = &[sym::std, sym::vec];
+ let fqp = &[sym::std, sym::iter];
+ assert_relative_path(&[sym::dotdot, sym::iter], relative_to_fqp, fqp);
+}
+
+#[test]
+fn href_relative_parts_parent_module() {
+ let relative_to_fqp = &[sym::std, sym::vec];
+ let fqp = &[sym::std];
+ assert_relative_path(&[sym::dotdot], relative_to_fqp, fqp);
+}
+
+#[test]
+fn href_relative_parts_different_crate() {
+ let relative_to_fqp = &[sym::std, sym::vec];
+ let fqp = &[sym::core, sym::iter];
+ assert_relative_path(&[sym::dotdot, sym::dotdot, sym::core, sym::iter], relative_to_fqp, fqp);
+}
+
+#[test]
+fn href_relative_parts_same_module() {
+ let relative_to_fqp = &[sym::std, sym::vec];
+ let fqp = &[sym::std, sym::vec];
+ assert_relative_path(&[], relative_to_fqp, fqp);
+}
+
+#[test]
+fn href_relative_parts_child_module() {
+ let relative_to_fqp = &[sym::std];
+ let fqp = &[sym::std, sym::vec];
+ assert_relative_path(&[sym::vec], relative_to_fqp, fqp);
+}
+
+#[test]
+fn href_relative_parts_root() {
+ let relative_to_fqp = &[];
+ let fqp = &[sym::std];
+ assert_relative_path(&[sym::std], relative_to_fqp, fqp);
+}
diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs
new file mode 100644
index 000000000..a12c2a6a1
--- /dev/null
+++ b/src/librustdoc/html/toc.rs
@@ -0,0 +1,191 @@
+//! Table-of-contents creation.
+
+/// A (recursive) table of contents
+#[derive(Debug, PartialEq)]
+pub(crate) struct Toc {
+ /// The levels are strictly decreasing, i.e.
+ ///
+ /// `entries[0].level >= entries[1].level >= ...`
+ ///
+ /// Normally they are equal, but can differ in cases like A and B,
+ /// both of which end up in the same `Toc` as they have the same
+ /// parent (Main).
+ ///
+ /// ```text
+ /// # Main
+ /// ### A
+ /// ## B
+ /// ```
+ entries: Vec<TocEntry>,
+}
+
+impl Toc {
+ fn count_entries_with_level(&self, level: u32) -> usize {
+ self.entries.iter().filter(|e| e.level == level).count()
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct TocEntry {
+ level: u32,
+ sec_number: String,
+ name: String,
+ id: String,
+ children: Toc,
+}
+
+/// Progressive construction of a table of contents.
+#[derive(PartialEq)]
+pub(crate) struct TocBuilder {
+ top_level: Toc,
+ /// The current hierarchy of parent headings, the levels are
+ /// strictly increasing (i.e., `chain[0].level < chain[1].level <
+ /// ...`) with each entry being the most recent occurrence of a
+ /// heading with that level (it doesn't include the most recent
+ /// occurrences of every level, just, if it *is* in `chain` then
+ /// it is the most recent one).
+ ///
+ /// We also have `chain[0].level <= top_level.entries[last]`.
+ chain: Vec<TocEntry>,
+}
+
+impl TocBuilder {
+ pub(crate) fn new() -> TocBuilder {
+ TocBuilder { top_level: Toc { entries: Vec::new() }, chain: Vec::new() }
+ }
+
+ /// Converts into a true `Toc` struct.
+ pub(crate) fn into_toc(mut self) -> Toc {
+ // we know all levels are >= 1.
+ self.fold_until(0);
+ self.top_level
+ }
+
+ /// Collapse the chain until the first heading more important than
+ /// `level` (i.e., lower level)
+ ///
+ /// Example:
+ ///
+ /// ```text
+ /// ## A
+ /// # B
+ /// # C
+ /// ## D
+ /// ## E
+ /// ### F
+ /// #### G
+ /// ### H
+ /// ```
+ ///
+ /// If we are considering H (i.e., level 3), then A and B are in
+ /// self.top_level, D is in C.children, and C, E, F, G are in
+ /// self.chain.
+ ///
+ /// When we attempt to push H, we realize that first G is not the
+ /// parent (level is too high) so it is popped from chain and put
+ /// into F.children, then F isn't the parent (level is equal, aka
+ /// sibling), so it's also popped and put into E.children.
+ ///
+ /// This leaves us looking at E, which does have a smaller level,
+ /// and, by construction, it's the most recent thing with smaller
+ /// level, i.e., it's the immediate parent of H.
+ fn fold_until(&mut self, level: u32) {
+ let mut this = None;
+ loop {
+ match self.chain.pop() {
+ Some(mut next) => {
+ next.children.entries.extend(this);
+ if next.level < level {
+ // this is the parent we want, so return it to
+ // its rightful place.
+ self.chain.push(next);
+ return;
+ } else {
+ this = Some(next);
+ }
+ }
+ None => {
+ self.top_level.entries.extend(this);
+ return;
+ }
+ }
+ }
+ }
+
+ /// Push a level `level` heading into the appropriate place in the
+ /// hierarchy, returning a string containing the section number in
+ /// `<num>.<num>.<num>` format.
+ pub(crate) fn push(&mut self, level: u32, name: String, id: String) -> &str {
+ assert!(level >= 1);
+
+ // collapse all previous sections into their parents until we
+ // get to relevant heading (i.e., the first one with a smaller
+ // level than us)
+ self.fold_until(level);
+
+ let mut sec_number;
+ {
+ let (toc_level, toc) = match self.chain.last() {
+ None => {
+ sec_number = String::new();
+ (0, &self.top_level)
+ }
+ Some(entry) => {
+ sec_number = entry.sec_number.clone();
+ sec_number.push('.');
+ (entry.level, &entry.children)
+ }
+ };
+ // fill in any missing zeros, e.g., for
+ // # Foo (1)
+ // ### Bar (1.0.1)
+ for _ in toc_level..level - 1 {
+ sec_number.push_str("0.");
+ }
+ let number = toc.count_entries_with_level(level);
+ sec_number.push_str(&(number + 1).to_string())
+ }
+
+ self.chain.push(TocEntry {
+ level,
+ name,
+ sec_number,
+ id,
+ children: Toc { entries: Vec::new() },
+ });
+
+ // get the thing we just pushed, so we can borrow the string
+ // out of it with the right lifetime
+ let just_inserted = self.chain.last_mut().unwrap();
+ &just_inserted.sec_number
+ }
+}
+
+impl Toc {
+ fn print_inner(&self, v: &mut String) {
+ use std::fmt::Write as _;
+
+ v.push_str("<ul>");
+ for entry in &self.entries {
+ // recursively format this table of contents
+ let _ = write!(
+ v,
+ "\n<li><a href=\"#{id}\">{num} {name}</a>",
+ id = entry.id,
+ num = entry.sec_number,
+ name = entry.name
+ );
+ entry.children.print_inner(&mut *v);
+ v.push_str("</li>");
+ }
+ v.push_str("</ul>");
+ }
+ pub(crate) fn print(&self) -> String {
+ let mut v = String::new();
+ self.print_inner(&mut v);
+ v
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/toc/tests.rs b/src/librustdoc/html/toc/tests.rs
new file mode 100644
index 000000000..014f34686
--- /dev/null
+++ b/src/librustdoc/html/toc/tests.rs
@@ -0,0 +1,79 @@
+use super::{Toc, TocBuilder, TocEntry};
+
+#[test]
+fn builder_smoke() {
+ let mut builder = TocBuilder::new();
+
+ // this is purposely not using a fancy macro like below so
+ // that we're sure that this is doing the correct thing, and
+ // there's been no macro mistake.
+ macro_rules! push {
+ ($level: expr, $name: expr) => {
+ assert_eq!(builder.push($level, $name.to_string(), "".to_string()), $name);
+ };
+ }
+ push!(2, "0.1");
+ push!(1, "1");
+ {
+ push!(2, "1.1");
+ {
+ push!(3, "1.1.1");
+ push!(3, "1.1.2");
+ }
+ push!(2, "1.2");
+ {
+ push!(3, "1.2.1");
+ push!(3, "1.2.2");
+ }
+ }
+ push!(1, "2");
+ push!(1, "3");
+ {
+ push!(4, "3.0.0.1");
+ {
+ push!(6, "3.0.0.1.0.1");
+ }
+ push!(4, "3.0.0.2");
+ push!(2, "3.1");
+ {
+ push!(4, "3.1.0.1");
+ }
+ }
+
+ macro_rules! toc {
+ ($(($level: expr, $name: expr, $(($sub: tt))* )),*) => {
+ Toc {
+ entries: vec![
+ $(
+ TocEntry {
+ level: $level,
+ name: $name.to_string(),
+ sec_number: $name.to_string(),
+ id: "".to_string(),
+ children: toc!($($sub),*)
+ }
+ ),*
+ ]
+ }
+ }
+ }
+ let expected = toc!(
+ (2, "0.1",),
+ (
+ 1,
+ "1",
+ ((2, "1.1", ((3, "1.1.1",))((3, "1.1.2",))))((
+ 2,
+ "1.2",
+ ((3, "1.2.1",))((3, "1.2.2",))
+ ))
+ ),
+ (1, "2",),
+ (
+ 1,
+ "3",
+ ((4, "3.0.0.1", ((6, "3.0.0.1.0.1",))))((4, "3.0.0.2",))((2, "3.1", ((4, "3.1.0.1",))))
+ )
+ );
+ assert_eq!(expected, builder.into_toc());
+}
diff --git a/src/librustdoc/html/url_parts_builder.rs b/src/librustdoc/html/url_parts_builder.rs
new file mode 100644
index 000000000..1e6af6af6
--- /dev/null
+++ b/src/librustdoc/html/url_parts_builder.rs
@@ -0,0 +1,180 @@
+use std::fmt::{self, Write};
+
+use rustc_span::Symbol;
+
+/// A builder that allows efficiently and easily constructing the part of a URL
+/// after the domain: `nightly/core/str/struct.Bytes.html`.
+///
+/// This type is a wrapper around the final `String` buffer,
+/// but its API is like that of a `Vec` of URL components.
+#[derive(Debug)]
+pub(crate) struct UrlPartsBuilder {
+ buf: String,
+}
+
+impl UrlPartsBuilder {
+ /// Create an empty buffer.
+ #[allow(dead_code)]
+ pub(crate) fn new() -> Self {
+ Self { buf: String::new() }
+ }
+
+ /// Create an empty buffer with capacity for the specified number of bytes.
+ fn with_capacity_bytes(count: usize) -> Self {
+ Self { buf: String::with_capacity(count) }
+ }
+
+ /// Create a buffer with one URL component.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```ignore (private-type)
+ /// let builder = UrlPartsBuilder::singleton("core");
+ /// assert_eq!(builder.finish(), "core");
+ /// ```
+ ///
+ /// Adding more components afterward.
+ ///
+ /// ```ignore (private-type)
+ /// let mut builder = UrlPartsBuilder::singleton("core");
+ /// builder.push("str");
+ /// builder.push_front("nightly");
+ /// assert_eq!(builder.finish(), "nightly/core/str");
+ /// ```
+ pub(crate) fn singleton(part: &str) -> Self {
+ Self { buf: part.to_owned() }
+ }
+
+ /// Push a component onto the buffer.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```ignore (private-type)
+ /// let mut builder = UrlPartsBuilder::new();
+ /// builder.push("core");
+ /// builder.push("str");
+ /// builder.push("struct.Bytes.html");
+ /// assert_eq!(builder.finish(), "core/str/struct.Bytes.html");
+ /// ```
+ pub(crate) fn push(&mut self, part: &str) {
+ if !self.buf.is_empty() {
+ self.buf.push('/');
+ }
+ self.buf.push_str(part);
+ }
+
+ /// Push a component onto the buffer, using [`format!`]'s formatting syntax.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage (equivalent to the example for [`UrlPartsBuilder::push`]):
+ ///
+ /// ```ignore (private-type)
+ /// let mut builder = UrlPartsBuilder::new();
+ /// builder.push("core");
+ /// builder.push("str");
+ /// builder.push_fmt(format_args!("{}.{}.html", "struct", "Bytes"));
+ /// assert_eq!(builder.finish(), "core/str/struct.Bytes.html");
+ /// ```
+ pub(crate) fn push_fmt(&mut self, args: fmt::Arguments<'_>) {
+ if !self.buf.is_empty() {
+ self.buf.push('/');
+ }
+ self.buf.write_fmt(args).unwrap()
+ }
+
+ /// Push a component onto the front of the buffer.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```ignore (private-type)
+ /// let mut builder = UrlPartsBuilder::new();
+ /// builder.push("core");
+ /// builder.push("str");
+ /// builder.push_front("nightly");
+ /// builder.push("struct.Bytes.html");
+ /// assert_eq!(builder.finish(), "nightly/core/str/struct.Bytes.html");
+ /// ```
+ pub(crate) fn push_front(&mut self, part: &str) {
+ let is_empty = self.buf.is_empty();
+ self.buf.reserve(part.len() + if !is_empty { 1 } else { 0 });
+ self.buf.insert_str(0, part);
+ if !is_empty {
+ self.buf.insert(part.len(), '/');
+ }
+ }
+
+ /// Get the final `String` buffer.
+ pub(crate) fn finish(self) -> String {
+ self.buf
+ }
+}
+
+/// This is just a guess at the average length of a URL part,
+/// used for [`String::with_capacity`] calls in the [`FromIterator`]
+/// and [`Extend`] impls, and for [estimating item path lengths].
+///
+/// The value `8` was chosen for two main reasons:
+///
+/// * It seems like a good guess for the average part length.
+/// * jemalloc's size classes are all multiples of eight,
+/// which means that the amount of memory it allocates will often match
+/// the amount requested, avoiding wasted bytes.
+///
+/// [estimating item path lengths]: estimate_item_path_byte_length
+const AVG_PART_LENGTH: usize = 8;
+
+/// Estimate the number of bytes in an item's path, based on how many segments it has.
+///
+/// **Note:** This is only to be used with, e.g., [`String::with_capacity()`];
+/// the return value is just a rough estimate.
+pub(crate) const fn estimate_item_path_byte_length(segment_count: usize) -> usize {
+ AVG_PART_LENGTH * segment_count
+}
+
+impl<'a> FromIterator<&'a str> for UrlPartsBuilder {
+ fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
+ let iter = iter.into_iter();
+ let mut builder = Self::with_capacity_bytes(AVG_PART_LENGTH * iter.size_hint().0);
+ iter.for_each(|part| builder.push(part));
+ builder
+ }
+}
+
+impl<'a> Extend<&'a str> for UrlPartsBuilder {
+ fn extend<T: IntoIterator<Item = &'a str>>(&mut self, iter: T) {
+ let iter = iter.into_iter();
+ self.buf.reserve(AVG_PART_LENGTH * iter.size_hint().0);
+ iter.for_each(|part| self.push(part));
+ }
+}
+
+impl FromIterator<Symbol> for UrlPartsBuilder {
+ fn from_iter<T: IntoIterator<Item = Symbol>>(iter: T) -> Self {
+ // This code has to be duplicated from the `&str` impl because of
+ // `Symbol::as_str`'s lifetimes.
+ let iter = iter.into_iter();
+ let mut builder = Self::with_capacity_bytes(AVG_PART_LENGTH * iter.size_hint().0);
+ iter.for_each(|part| builder.push(part.as_str()));
+ builder
+ }
+}
+
+impl Extend<Symbol> for UrlPartsBuilder {
+ fn extend<T: IntoIterator<Item = Symbol>>(&mut self, iter: T) {
+ // This code has to be duplicated from the `&str` impl because of
+ // `Symbol::as_str`'s lifetimes.
+ let iter = iter.into_iter();
+ self.buf.reserve(AVG_PART_LENGTH * iter.size_hint().0);
+ iter.for_each(|part| self.push(part.as_str()));
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/url_parts_builder/tests.rs b/src/librustdoc/html/url_parts_builder/tests.rs
new file mode 100644
index 000000000..636e1ab55
--- /dev/null
+++ b/src/librustdoc/html/url_parts_builder/tests.rs
@@ -0,0 +1,64 @@
+use super::*;
+
+fn t(builder: UrlPartsBuilder, expect: &str) {
+ assert_eq!(builder.finish(), expect);
+}
+
+#[test]
+fn empty() {
+ t(UrlPartsBuilder::new(), "");
+}
+
+#[test]
+fn singleton() {
+ t(UrlPartsBuilder::singleton("index.html"), "index.html");
+}
+
+#[test]
+fn push_several() {
+ let mut builder = UrlPartsBuilder::new();
+ builder.push("core");
+ builder.push("str");
+ builder.push("struct.Bytes.html");
+ t(builder, "core/str/struct.Bytes.html");
+}
+
+#[test]
+fn push_front_empty() {
+ let mut builder = UrlPartsBuilder::new();
+ builder.push_front("page.html");
+ t(builder, "page.html");
+}
+
+#[test]
+fn push_front_non_empty() {
+ let mut builder = UrlPartsBuilder::new();
+ builder.push("core");
+ builder.push("str");
+ builder.push("struct.Bytes.html");
+ builder.push_front("nightly");
+ t(builder, "nightly/core/str/struct.Bytes.html");
+}
+
+#[test]
+fn push_fmt() {
+ let mut builder = UrlPartsBuilder::new();
+ builder.push_fmt(format_args!("{}", "core"));
+ builder.push("str");
+ builder.push_front("nightly");
+ builder.push_fmt(format_args!("{}.{}.html", "struct", "Bytes"));
+ t(builder, "nightly/core/str/struct.Bytes.html");
+}
+
+#[test]
+fn collect() {
+ t(["core", "str"].into_iter().collect(), "core/str");
+ t(["core", "str", "struct.Bytes.html"].into_iter().collect(), "core/str/struct.Bytes.html");
+}
+
+#[test]
+fn extend() {
+ let mut builder = UrlPartsBuilder::singleton("core");
+ builder.extend(["str", "struct.Bytes.html"]);
+ t(builder, "core/str/struct.Bytes.html");
+}
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
new file mode 100644
index 000000000..716a4c9ea
--- /dev/null
+++ b/src/librustdoc/json/conversions.rs
@@ -0,0 +1,798 @@
+//! These from impls are used to create the JSON types which get serialized. They're very close to
+//! the `clean` types but with some fields removed or stringified to simplify the output and not
+//! expose unstable compiler internals.
+
+#![allow(rustc::default_hash_types)]
+
+use std::convert::From;
+use std::fmt;
+
+use rustc_ast::ast;
+use rustc_hir::{def::CtorKind, def_id::DefId};
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_span::{Pos, Symbol};
+use rustc_target::spec::abi::Abi as RustcAbi;
+
+use rustdoc_json_types::*;
+
+use crate::clean::utils::print_const_expr;
+use crate::clean::{self, ItemId};
+use crate::formats::item_type::ItemType;
+use crate::json::JsonRenderer;
+
+impl JsonRenderer<'_> {
+ pub(super) fn convert_item(&self, item: clean::Item) -> Option<Item> {
+ let deprecation = item.deprecation(self.tcx);
+ let links = self
+ .cache
+ .intra_doc_links
+ .get(&item.item_id)
+ .into_iter()
+ .flatten()
+ .map(|clean::ItemLink { link, did, .. }| {
+ (link.clone(), from_item_id((*did).into(), self.tcx))
+ })
+ .collect();
+ let docs = item.attrs.collapsed_doc_value();
+ let attrs = item
+ .attrs
+ .other_attrs
+ .iter()
+ .map(rustc_ast_pretty::pprust::attribute_to_string)
+ .collect();
+ let span = item.span(self.tcx);
+ let clean::Item { name, attrs: _, kind: _, visibility, item_id, cfg: _ } = item;
+ let inner = match *item.kind {
+ clean::KeywordItem => return None,
+ clean::StrippedItem(ref inner) => {
+ match &**inner {
+ // We document non-empty stripped modules as with `Module::is_stripped` set to
+ // `true`, to prevent contained items from being orphaned for downstream users,
+ // as JSON does no inlining.
+ clean::ModuleItem(m) if !m.items.is_empty() => from_clean_item(item, self.tcx),
+ _ => return None,
+ }
+ }
+ _ => from_clean_item(item, self.tcx),
+ };
+ Some(Item {
+ id: from_item_id_with_name(item_id, self.tcx, name),
+ crate_id: item_id.krate().as_u32(),
+ name: name.map(|sym| sym.to_string()),
+ span: self.convert_span(span),
+ visibility: self.convert_visibility(visibility),
+ docs,
+ attrs,
+ deprecation: deprecation.map(from_deprecation),
+ inner,
+ links,
+ })
+ }
+
+ fn convert_span(&self, span: clean::Span) -> Option<Span> {
+ match span.filename(self.sess()) {
+ rustc_span::FileName::Real(name) => {
+ if let Some(local_path) = name.into_local_path() {
+ let hi = span.hi(self.sess());
+ let lo = span.lo(self.sess());
+ Some(Span {
+ filename: local_path,
+ begin: (lo.line, lo.col.to_usize()),
+ end: (hi.line, hi.col.to_usize()),
+ })
+ } else {
+ None
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn convert_visibility(&self, v: clean::Visibility) -> Visibility {
+ use clean::Visibility::*;
+ match v {
+ Public => Visibility::Public,
+ Inherited => Visibility::Default,
+ Restricted(did) if did.is_crate_root() => Visibility::Crate,
+ Restricted(did) => Visibility::Restricted {
+ parent: from_item_id(did.into(), self.tcx),
+ path: self.tcx.def_path(did).to_string_no_crate_verbose(),
+ },
+ }
+ }
+}
+
+pub(crate) trait FromWithTcx<T> {
+ fn from_tcx(f: T, tcx: TyCtxt<'_>) -> Self;
+}
+
+pub(crate) trait IntoWithTcx<T> {
+ fn into_tcx(self, tcx: TyCtxt<'_>) -> T;
+}
+
+impl<T, U> IntoWithTcx<U> for T
+where
+ U: FromWithTcx<T>,
+{
+ fn into_tcx(self, tcx: TyCtxt<'_>) -> U {
+ U::from_tcx(self, tcx)
+ }
+}
+
+pub(crate) fn from_deprecation(deprecation: rustc_attr::Deprecation) -> Deprecation {
+ #[rustfmt::skip]
+ let rustc_attr::Deprecation { since, note, is_since_rustc_version: _, suggestion: _ } = deprecation;
+ Deprecation { since: since.map(|s| s.to_string()), note: note.map(|s| s.to_string()) }
+}
+
+impl FromWithTcx<clean::GenericArgs> for GenericArgs {
+ fn from_tcx(args: clean::GenericArgs, tcx: TyCtxt<'_>) -> Self {
+ use clean::GenericArgs::*;
+ match args {
+ AngleBracketed { args, bindings } => GenericArgs::AngleBracketed {
+ args: args.into_vec().into_iter().map(|a| a.into_tcx(tcx)).collect(),
+ bindings: bindings.into_iter().map(|a| a.into_tcx(tcx)).collect(),
+ },
+ Parenthesized { inputs, output } => GenericArgs::Parenthesized {
+ inputs: inputs.into_vec().into_iter().map(|a| a.into_tcx(tcx)).collect(),
+ output: output.map(|a| (*a).into_tcx(tcx)),
+ },
+ }
+ }
+}
+
+impl FromWithTcx<clean::GenericArg> for GenericArg {
+ fn from_tcx(arg: clean::GenericArg, tcx: TyCtxt<'_>) -> Self {
+ use clean::GenericArg::*;
+ match arg {
+ Lifetime(l) => GenericArg::Lifetime(l.0.to_string()),
+ Type(t) => GenericArg::Type(t.into_tcx(tcx)),
+ Const(box c) => GenericArg::Const(c.into_tcx(tcx)),
+ Infer => GenericArg::Infer,
+ }
+ }
+}
+
+impl FromWithTcx<clean::Constant> for Constant {
+ fn from_tcx(constant: clean::Constant, tcx: TyCtxt<'_>) -> Self {
+ let expr = constant.expr(tcx);
+ let value = constant.value(tcx);
+ let is_literal = constant.is_literal(tcx);
+ Constant { type_: constant.type_.into_tcx(tcx), expr, value, is_literal }
+ }
+}
+
+impl FromWithTcx<clean::TypeBinding> for TypeBinding {
+ fn from_tcx(binding: clean::TypeBinding, tcx: TyCtxt<'_>) -> Self {
+ TypeBinding {
+ name: binding.assoc.name.to_string(),
+ args: binding.assoc.args.into_tcx(tcx),
+ binding: binding.kind.into_tcx(tcx),
+ }
+ }
+}
+
+impl FromWithTcx<clean::TypeBindingKind> for TypeBindingKind {
+ fn from_tcx(kind: clean::TypeBindingKind, tcx: TyCtxt<'_>) -> Self {
+ use clean::TypeBindingKind::*;
+ match kind {
+ Equality { term } => TypeBindingKind::Equality(term.into_tcx(tcx)),
+ Constraint { bounds } => {
+ TypeBindingKind::Constraint(bounds.into_iter().map(|a| a.into_tcx(tcx)).collect())
+ }
+ }
+ }
+}
+
+/// It generates an ID as follows:
+///
+/// `CRATE_ID:ITEM_ID[:NAME_ID]` (if there is no name, NAME_ID is not generated).
+pub(crate) fn from_item_id(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
+ from_item_id_with_name(item_id, tcx, None)
+}
+
+// FIXME: this function (and appending the name at the end of the ID) should be removed when
+// reexports are not inlined anymore for json format. It should be done in #93518.
+pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Option<Symbol>) -> Id {
+ struct DisplayDefId<'a>(DefId, TyCtxt<'a>, Option<Symbol>);
+
+ impl<'a> fmt::Display for DisplayDefId<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let name = match self.2 {
+ Some(name) => format!(":{}", name.as_u32()),
+ None => self
+ .1
+ .opt_item_name(self.0)
+ .map(|n| format!(":{}", n.as_u32()))
+ .unwrap_or_default(),
+ };
+ write!(f, "{}:{}{}", self.0.krate.as_u32(), u32::from(self.0.index), name)
+ }
+ }
+
+ match item_id {
+ ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, name))),
+ ItemId::Blanket { for_, impl_id } => {
+ Id(format!("b:{}-{}", DisplayDefId(impl_id, tcx, None), DisplayDefId(for_, tcx, name)))
+ }
+ ItemId::Auto { for_, trait_ } => {
+ Id(format!("a:{}-{}", DisplayDefId(trait_, tcx, None), DisplayDefId(for_, tcx, name)))
+ }
+ ItemId::Primitive(ty, krate) => Id(format!("p:{}:{}", krate.as_u32(), ty.as_sym())),
+ }
+}
+
+fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum {
+ use clean::ItemKind::*;
+ let name = item.name;
+ let is_crate = item.is_crate();
+ let header = item.fn_header(tcx);
+
+ match *item.kind {
+ ModuleItem(m) => {
+ ItemEnum::Module(Module { is_crate, items: ids(m.items, tcx), is_stripped: false })
+ }
+ ImportItem(i) => ItemEnum::Import(i.into_tcx(tcx)),
+ StructItem(s) => ItemEnum::Struct(s.into_tcx(tcx)),
+ UnionItem(u) => ItemEnum::Union(u.into_tcx(tcx)),
+ StructFieldItem(f) => ItemEnum::StructField(f.into_tcx(tcx)),
+ EnumItem(e) => ItemEnum::Enum(e.into_tcx(tcx)),
+ VariantItem(v) => ItemEnum::Variant(v.into_tcx(tcx)),
+ FunctionItem(f) => ItemEnum::Function(from_function(f, header.unwrap(), tcx)),
+ ForeignFunctionItem(f) => ItemEnum::Function(from_function(f, header.unwrap(), tcx)),
+ TraitItem(t) => ItemEnum::Trait(t.into_tcx(tcx)),
+ TraitAliasItem(t) => ItemEnum::TraitAlias(t.into_tcx(tcx)),
+ MethodItem(m, _) => ItemEnum::Method(from_function_method(m, true, header.unwrap(), tcx)),
+ TyMethodItem(m) => ItemEnum::Method(from_function_method(m, false, header.unwrap(), tcx)),
+ ImplItem(i) => ItemEnum::Impl(i.into_tcx(tcx)),
+ StaticItem(s) => ItemEnum::Static(s.into_tcx(tcx)),
+ ForeignStaticItem(s) => ItemEnum::Static(s.into_tcx(tcx)),
+ ForeignTypeItem => ItemEnum::ForeignType,
+ TypedefItem(t) => ItemEnum::Typedef(t.into_tcx(tcx)),
+ OpaqueTyItem(t) => ItemEnum::OpaqueTy(t.into_tcx(tcx)),
+ ConstantItem(c) => ItemEnum::Constant(c.into_tcx(tcx)),
+ MacroItem(m) => ItemEnum::Macro(m.source),
+ ProcMacroItem(m) => ItemEnum::ProcMacro(m.into_tcx(tcx)),
+ PrimitiveItem(p) => ItemEnum::PrimitiveType(p.as_sym().to_string()),
+ TyAssocConstItem(ty) => ItemEnum::AssocConst { type_: ty.into_tcx(tcx), default: None },
+ AssocConstItem(ty, default) => {
+ ItemEnum::AssocConst { type_: ty.into_tcx(tcx), default: Some(default.expr(tcx)) }
+ }
+ TyAssocTypeItem(g, b) => ItemEnum::AssocType {
+ generics: (*g).into_tcx(tcx),
+ bounds: b.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ default: None,
+ },
+ AssocTypeItem(t, b) => ItemEnum::AssocType {
+ generics: t.generics.into_tcx(tcx),
+ bounds: b.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ default: Some(t.item_type.unwrap_or(t.type_).into_tcx(tcx)),
+ },
+ // `convert_item` early returns `None` for stripped items and keywords.
+ KeywordItem => unreachable!(),
+ StrippedItem(inner) => {
+ match *inner {
+ ModuleItem(m) => ItemEnum::Module(Module {
+ is_crate,
+ items: ids(m.items, tcx),
+ is_stripped: true,
+ }),
+ // `convert_item` early returns `None` for stripped items we're not including
+ _ => unreachable!(),
+ }
+ }
+ ExternCrateItem { ref src } => ItemEnum::ExternCrate {
+ name: name.as_ref().unwrap().to_string(),
+ rename: src.map(|x| x.to_string()),
+ },
+ }
+}
+
+impl FromWithTcx<clean::Struct> for Struct {
+ fn from_tcx(struct_: clean::Struct, tcx: TyCtxt<'_>) -> Self {
+ let fields_stripped = struct_.has_stripped_entries();
+ let clean::Struct { struct_type, generics, fields } = struct_;
+ Struct {
+ struct_type: from_ctor_kind(struct_type),
+ generics: generics.into_tcx(tcx),
+ fields_stripped,
+ fields: ids(fields, tcx),
+ impls: Vec::new(), // Added in JsonRenderer::item
+ }
+ }
+}
+
+impl FromWithTcx<clean::Union> for Union {
+ fn from_tcx(union_: clean::Union, tcx: TyCtxt<'_>) -> Self {
+ let fields_stripped = union_.has_stripped_entries();
+ let clean::Union { generics, fields } = union_;
+ Union {
+ generics: generics.into_tcx(tcx),
+ fields_stripped,
+ fields: ids(fields, tcx),
+ impls: Vec::new(), // Added in JsonRenderer::item
+ }
+ }
+}
+
+pub(crate) fn from_ctor_kind(struct_type: CtorKind) -> StructType {
+ match struct_type {
+ CtorKind::Fictive => StructType::Plain,
+ CtorKind::Fn => StructType::Tuple,
+ CtorKind::Const => StructType::Unit,
+ }
+}
+
+pub(crate) fn from_fn_header(header: &rustc_hir::FnHeader) -> Header {
+ Header {
+ async_: header.is_async(),
+ const_: header.is_const(),
+ unsafe_: header.is_unsafe(),
+ abi: convert_abi(header.abi),
+ }
+}
+
+fn convert_abi(a: RustcAbi) -> Abi {
+ match a {
+ RustcAbi::Rust => Abi::Rust,
+ RustcAbi::C { unwind } => Abi::C { unwind },
+ RustcAbi::Cdecl { unwind } => Abi::Cdecl { unwind },
+ RustcAbi::Stdcall { unwind } => Abi::Stdcall { unwind },
+ RustcAbi::Fastcall { unwind } => Abi::Fastcall { unwind },
+ RustcAbi::Aapcs { unwind } => Abi::Aapcs { unwind },
+ RustcAbi::Win64 { unwind } => Abi::Win64 { unwind },
+ RustcAbi::SysV64 { unwind } => Abi::SysV64 { unwind },
+ RustcAbi::System { unwind } => Abi::System { unwind },
+ _ => Abi::Other(a.to_string()),
+ }
+}
+
+impl FromWithTcx<clean::Generics> for Generics {
+ fn from_tcx(generics: clean::Generics, tcx: TyCtxt<'_>) -> Self {
+ Generics {
+ params: generics.params.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ where_predicates: generics
+ .where_predicates
+ .into_iter()
+ .map(|x| x.into_tcx(tcx))
+ .collect(),
+ }
+ }
+}
+
+impl FromWithTcx<clean::GenericParamDef> for GenericParamDef {
+ fn from_tcx(generic_param: clean::GenericParamDef, tcx: TyCtxt<'_>) -> Self {
+ GenericParamDef {
+ name: generic_param.name.to_string(),
+ kind: generic_param.kind.into_tcx(tcx),
+ }
+ }
+}
+
+impl FromWithTcx<clean::GenericParamDefKind> for GenericParamDefKind {
+ fn from_tcx(kind: clean::GenericParamDefKind, tcx: TyCtxt<'_>) -> Self {
+ use clean::GenericParamDefKind::*;
+ match kind {
+ Lifetime { outlives } => GenericParamDefKind::Lifetime {
+ outlives: outlives.into_iter().map(|lt| lt.0.to_string()).collect(),
+ },
+ Type { did: _, bounds, default, synthetic } => GenericParamDefKind::Type {
+ bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ default: default.map(|x| (*x).into_tcx(tcx)),
+ synthetic,
+ },
+ Const { did: _, ty, default } => GenericParamDefKind::Const {
+ type_: (*ty).into_tcx(tcx),
+ default: default.map(|x| *x),
+ },
+ }
+ }
+}
+
+impl FromWithTcx<clean::WherePredicate> for WherePredicate {
+ fn from_tcx(predicate: clean::WherePredicate, tcx: TyCtxt<'_>) -> Self {
+ use clean::WherePredicate::*;
+ match predicate {
+ BoundPredicate { ty, bounds, bound_params } => WherePredicate::BoundPredicate {
+ type_: ty.into_tcx(tcx),
+ bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ generic_params: bound_params
+ .into_iter()
+ .map(|x| GenericParamDef {
+ name: x.0.to_string(),
+ kind: GenericParamDefKind::Lifetime { outlives: vec![] },
+ })
+ .collect(),
+ },
+ RegionPredicate { lifetime, bounds } => WherePredicate::RegionPredicate {
+ lifetime: lifetime.0.to_string(),
+ bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ },
+ EqPredicate { lhs, rhs } => {
+ WherePredicate::EqPredicate { lhs: lhs.into_tcx(tcx), rhs: rhs.into_tcx(tcx) }
+ }
+ }
+ }
+}
+
+impl FromWithTcx<clean::GenericBound> for GenericBound {
+ fn from_tcx(bound: clean::GenericBound, tcx: TyCtxt<'_>) -> Self {
+ use clean::GenericBound::*;
+ match bound {
+ TraitBound(clean::PolyTrait { trait_, generic_params }, modifier) => {
+ // FIXME: should `trait_` be a clean::Path equivalent in JSON?
+ let trait_ = clean::Type::Path { path: trait_ }.into_tcx(tcx);
+ GenericBound::TraitBound {
+ trait_,
+ generic_params: generic_params.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ modifier: from_trait_bound_modifier(modifier),
+ }
+ }
+ Outlives(lifetime) => GenericBound::Outlives(lifetime.0.to_string()),
+ }
+ }
+}
+
+pub(crate) fn from_trait_bound_modifier(
+ modifier: rustc_hir::TraitBoundModifier,
+) -> TraitBoundModifier {
+ use rustc_hir::TraitBoundModifier::*;
+ match modifier {
+ None => TraitBoundModifier::None,
+ Maybe => TraitBoundModifier::Maybe,
+ MaybeConst => TraitBoundModifier::MaybeConst,
+ }
+}
+
+impl FromWithTcx<clean::Type> for Type {
+ fn from_tcx(ty: clean::Type, tcx: TyCtxt<'_>) -> Self {
+ use clean::Type::{
+ Array, BareFunction, BorrowedRef, DynTrait, Generic, ImplTrait, Infer, Primitive,
+ QPath, RawPointer, Slice, Tuple,
+ };
+
+ match ty {
+ clean::Type::Path { path } => Type::ResolvedPath {
+ name: path.whole_name(),
+ id: from_item_id(path.def_id().into(), tcx),
+ args: path.segments.last().map(|args| Box::new(args.clone().args.into_tcx(tcx))),
+ param_names: Vec::new(),
+ },
+ DynTrait(mut bounds, lt) => {
+ let first_trait = bounds.remove(0).trait_;
+
+ Type::ResolvedPath {
+ name: first_trait.whole_name(),
+ id: from_item_id(first_trait.def_id().into(), tcx),
+ args: first_trait
+ .segments
+ .last()
+ .map(|args| Box::new(args.clone().args.into_tcx(tcx))),
+ param_names: bounds
+ .into_iter()
+ .map(|t| {
+ clean::GenericBound::TraitBound(t, rustc_hir::TraitBoundModifier::None)
+ })
+ .chain(lt.map(clean::GenericBound::Outlives))
+ .map(|bound| bound.into_tcx(tcx))
+ .collect(),
+ }
+ }
+ Generic(s) => Type::Generic(s.to_string()),
+ Primitive(p) => Type::Primitive(p.as_sym().to_string()),
+ BareFunction(f) => Type::FunctionPointer(Box::new((*f).into_tcx(tcx))),
+ Tuple(t) => Type::Tuple(t.into_iter().map(|x| x.into_tcx(tcx)).collect()),
+ Slice(t) => Type::Slice(Box::new((*t).into_tcx(tcx))),
+ Array(t, s) => Type::Array { type_: Box::new((*t).into_tcx(tcx)), len: s },
+ ImplTrait(g) => Type::ImplTrait(g.into_iter().map(|x| x.into_tcx(tcx)).collect()),
+ Infer => Type::Infer,
+ RawPointer(mutability, type_) => Type::RawPointer {
+ mutable: mutability == ast::Mutability::Mut,
+ type_: Box::new((*type_).into_tcx(tcx)),
+ },
+ BorrowedRef { lifetime, mutability, type_ } => Type::BorrowedRef {
+ lifetime: lifetime.map(|l| l.0.to_string()),
+ mutable: mutability == ast::Mutability::Mut,
+ type_: Box::new((*type_).into_tcx(tcx)),
+ },
+ QPath { assoc, self_type, trait_, .. } => {
+ // FIXME: should `trait_` be a clean::Path equivalent in JSON?
+ let trait_ = clean::Type::Path { path: trait_ }.into_tcx(tcx);
+ Type::QualifiedPath {
+ name: assoc.name.to_string(),
+ args: Box::new(assoc.args.clone().into_tcx(tcx)),
+ self_type: Box::new((*self_type).into_tcx(tcx)),
+ trait_: Box::new(trait_),
+ }
+ }
+ }
+ }
+}
+
+impl FromWithTcx<clean::Term> for Term {
+ fn from_tcx(term: clean::Term, tcx: TyCtxt<'_>) -> Term {
+ match term {
+ clean::Term::Type(ty) => Term::Type(FromWithTcx::from_tcx(ty, tcx)),
+ clean::Term::Constant(c) => Term::Constant(FromWithTcx::from_tcx(c, tcx)),
+ }
+ }
+}
+
+impl FromWithTcx<clean::BareFunctionDecl> for FunctionPointer {
+ fn from_tcx(bare_decl: clean::BareFunctionDecl, tcx: TyCtxt<'_>) -> Self {
+ let clean::BareFunctionDecl { unsafety, generic_params, decl, abi } = bare_decl;
+ FunctionPointer {
+ header: Header {
+ unsafe_: matches!(unsafety, rustc_hir::Unsafety::Unsafe),
+ const_: false,
+ async_: false,
+ abi: convert_abi(abi),
+ },
+ generic_params: generic_params.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ decl: decl.into_tcx(tcx),
+ }
+ }
+}
+
+impl FromWithTcx<clean::FnDecl> for FnDecl {
+ fn from_tcx(decl: clean::FnDecl, tcx: TyCtxt<'_>) -> Self {
+ let clean::FnDecl { inputs, output, c_variadic } = decl;
+ FnDecl {
+ inputs: inputs
+ .values
+ .into_iter()
+ .map(|arg| (arg.name.to_string(), arg.type_.into_tcx(tcx)))
+ .collect(),
+ output: match output {
+ clean::FnRetTy::Return(t) => Some(t.into_tcx(tcx)),
+ clean::FnRetTy::DefaultReturn => None,
+ },
+ c_variadic,
+ }
+ }
+}
+
+impl FromWithTcx<clean::Trait> for Trait {
+ fn from_tcx(trait_: clean::Trait, tcx: TyCtxt<'_>) -> Self {
+ let is_auto = trait_.is_auto(tcx);
+ let is_unsafe = trait_.unsafety(tcx) == rustc_hir::Unsafety::Unsafe;
+ let clean::Trait { items, generics, bounds, .. } = trait_;
+ Trait {
+ is_auto,
+ is_unsafe,
+ items: ids(items, tcx),
+ generics: generics.into_tcx(tcx),
+ bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ implementations: Vec::new(), // Added in JsonRenderer::item
+ }
+ }
+}
+
+impl FromWithTcx<Box<clean::Impl>> for Impl {
+ fn from_tcx(impl_: Box<clean::Impl>, tcx: TyCtxt<'_>) -> Self {
+ let provided_trait_methods = impl_.provided_trait_methods(tcx);
+ let clean::Impl { unsafety, generics, trait_, for_, items, polarity, kind } = *impl_;
+ // FIXME: should `trait_` be a clean::Path equivalent in JSON?
+ let trait_ = trait_.map(|path| clean::Type::Path { path }.into_tcx(tcx));
+ // FIXME: use something like ImplKind in JSON?
+ let (synthetic, blanket_impl) = match kind {
+ clean::ImplKind::Normal | clean::ImplKind::FakeVaradic => (false, None),
+ clean::ImplKind::Auto => (true, None),
+ clean::ImplKind::Blanket(ty) => (false, Some(*ty)),
+ };
+ let negative_polarity = match polarity {
+ ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => false,
+ ty::ImplPolarity::Negative => true,
+ };
+ Impl {
+ is_unsafe: unsafety == rustc_hir::Unsafety::Unsafe,
+ generics: generics.into_tcx(tcx),
+ provided_trait_methods: provided_trait_methods
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect(),
+ trait_,
+ for_: for_.into_tcx(tcx),
+ items: ids(items, tcx),
+ negative: negative_polarity,
+ synthetic,
+ blanket_impl: blanket_impl.map(|x| x.into_tcx(tcx)),
+ }
+ }
+}
+
+pub(crate) fn from_function(
+ function: Box<clean::Function>,
+ header: rustc_hir::FnHeader,
+ tcx: TyCtxt<'_>,
+) -> Function {
+ let clean::Function { decl, generics } = *function;
+ Function {
+ decl: decl.into_tcx(tcx),
+ generics: generics.into_tcx(tcx),
+ header: from_fn_header(&header),
+ }
+}
+
+pub(crate) fn from_function_method(
+ function: Box<clean::Function>,
+ has_body: bool,
+ header: rustc_hir::FnHeader,
+ tcx: TyCtxt<'_>,
+) -> Method {
+ let clean::Function { decl, generics } = *function;
+ Method {
+ decl: decl.into_tcx(tcx),
+ generics: generics.into_tcx(tcx),
+ header: from_fn_header(&header),
+ has_body,
+ }
+}
+
+impl FromWithTcx<clean::Enum> for Enum {
+ fn from_tcx(enum_: clean::Enum, tcx: TyCtxt<'_>) -> Self {
+ let variants_stripped = enum_.has_stripped_entries();
+ let clean::Enum { variants, generics } = enum_;
+ Enum {
+ generics: generics.into_tcx(tcx),
+ variants_stripped,
+ variants: ids(variants, tcx),
+ impls: Vec::new(), // Added in JsonRenderer::item
+ }
+ }
+}
+
+impl FromWithTcx<clean::VariantStruct> for Struct {
+ fn from_tcx(struct_: clean::VariantStruct, tcx: TyCtxt<'_>) -> Self {
+ let fields_stripped = struct_.has_stripped_entries();
+ let clean::VariantStruct { struct_type, fields } = struct_;
+ Struct {
+ struct_type: from_ctor_kind(struct_type),
+ generics: Generics { params: vec![], where_predicates: vec![] },
+ fields_stripped,
+ fields: ids(fields, tcx),
+ impls: Vec::new(),
+ }
+ }
+}
+
+impl FromWithTcx<clean::Variant> for Variant {
+ fn from_tcx(variant: clean::Variant, tcx: TyCtxt<'_>) -> Self {
+ use clean::Variant::*;
+ match variant {
+ CLike => Variant::Plain,
+ Tuple(fields) => Variant::Tuple(
+ fields
+ .into_iter()
+ .map(|f| {
+ if let clean::StructFieldItem(ty) = *f.kind {
+ ty.into_tcx(tcx)
+ } else {
+ unreachable!()
+ }
+ })
+ .collect(),
+ ),
+ Struct(s) => Variant::Struct(ids(s.fields, tcx)),
+ }
+ }
+}
+
+impl FromWithTcx<clean::Import> for Import {
+ fn from_tcx(import: clean::Import, tcx: TyCtxt<'_>) -> Self {
+ use clean::ImportKind::*;
+ match import.kind {
+ Simple(s) => Import {
+ source: import.source.path.whole_name(),
+ name: s.to_string(),
+ id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)),
+ glob: false,
+ },
+ Glob => Import {
+ source: import.source.path.whole_name(),
+ name: import
+ .source
+ .path
+ .last_opt()
+ .unwrap_or_else(|| Symbol::intern("*"))
+ .to_string(),
+ id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)),
+ glob: true,
+ },
+ }
+ }
+}
+
+impl FromWithTcx<clean::ProcMacro> for ProcMacro {
+ fn from_tcx(mac: clean::ProcMacro, _tcx: TyCtxt<'_>) -> Self {
+ ProcMacro {
+ kind: from_macro_kind(mac.kind),
+ helpers: mac.helpers.iter().map(|x| x.to_string()).collect(),
+ }
+ }
+}
+
+pub(crate) fn from_macro_kind(kind: rustc_span::hygiene::MacroKind) -> MacroKind {
+ use rustc_span::hygiene::MacroKind::*;
+ match kind {
+ Bang => MacroKind::Bang,
+ Attr => MacroKind::Attr,
+ Derive => MacroKind::Derive,
+ }
+}
+
+impl FromWithTcx<Box<clean::Typedef>> for Typedef {
+ fn from_tcx(typedef: Box<clean::Typedef>, tcx: TyCtxt<'_>) -> Self {
+ let clean::Typedef { type_, generics, item_type: _ } = *typedef;
+ Typedef { type_: type_.into_tcx(tcx), generics: generics.into_tcx(tcx) }
+ }
+}
+
+impl FromWithTcx<clean::OpaqueTy> for OpaqueTy {
+ fn from_tcx(opaque: clean::OpaqueTy, tcx: TyCtxt<'_>) -> Self {
+ OpaqueTy {
+ bounds: opaque.bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ generics: opaque.generics.into_tcx(tcx),
+ }
+ }
+}
+
+impl FromWithTcx<clean::Static> for Static {
+ fn from_tcx(stat: clean::Static, tcx: TyCtxt<'_>) -> Self {
+ Static {
+ type_: stat.type_.into_tcx(tcx),
+ mutable: stat.mutability == ast::Mutability::Mut,
+ expr: stat.expr.map(|e| print_const_expr(tcx, e)).unwrap_or_default(),
+ }
+ }
+}
+
+impl FromWithTcx<clean::TraitAlias> for TraitAlias {
+ fn from_tcx(alias: clean::TraitAlias, tcx: TyCtxt<'_>) -> Self {
+ TraitAlias {
+ generics: alias.generics.into_tcx(tcx),
+ params: alias.bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
+ }
+ }
+}
+
+impl FromWithTcx<ItemType> for ItemKind {
+ fn from_tcx(kind: ItemType, _tcx: TyCtxt<'_>) -> Self {
+ use ItemType::*;
+ match kind {
+ Module => ItemKind::Module,
+ ExternCrate => ItemKind::ExternCrate,
+ Import => ItemKind::Import,
+ Struct => ItemKind::Struct,
+ Union => ItemKind::Union,
+ Enum => ItemKind::Enum,
+ Function => ItemKind::Function,
+ Typedef => ItemKind::Typedef,
+ OpaqueTy => ItemKind::OpaqueTy,
+ Static => ItemKind::Static,
+ Constant => ItemKind::Constant,
+ Trait => ItemKind::Trait,
+ Impl => ItemKind::Impl,
+ TyMethod | Method => ItemKind::Method,
+ StructField => ItemKind::StructField,
+ Variant => ItemKind::Variant,
+ Macro => ItemKind::Macro,
+ Primitive => ItemKind::Primitive,
+ AssocConst => ItemKind::AssocConst,
+ AssocType => ItemKind::AssocType,
+ ForeignType => ItemKind::ForeignType,
+ Keyword => ItemKind::Keyword,
+ TraitAlias => ItemKind::TraitAlias,
+ ProcAttribute => ItemKind::ProcAttribute,
+ ProcDerive => ItemKind::ProcDerive,
+ }
+ }
+}
+
+fn ids(items: impl IntoIterator<Item = clean::Item>, tcx: TyCtxt<'_>) -> Vec<Id> {
+ items
+ .into_iter()
+ .filter(|x| !x.is_stripped() && !x.is_keyword())
+ .map(|i| from_item_id_with_name(i.item_id, tcx, i.name))
+ .collect()
+}
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
new file mode 100644
index 000000000..6364d00d0
--- /dev/null
+++ b/src/librustdoc/json/mod.rs
@@ -0,0 +1,327 @@
+//! Rustdoc's JSON backend
+//!
+//! This module contains the logic for rendering a crate as JSON rather than the normal static HTML
+//! output. See [the RFC](https://github.com/rust-lang/rfcs/pull/2963) and the [`types`] module
+//! docs for usage and details.
+
+mod conversions;
+
+use std::cell::RefCell;
+use std::fs::{create_dir_all, File};
+use std::io::{BufWriter, Write};
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::DefId;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::Session;
+use rustc_span::def_id::LOCAL_CRATE;
+
+use rustdoc_json_types as types;
+
+use crate::clean::types::{ExternalCrate, ExternalLocation};
+use crate::clean::ItemKind;
+use crate::config::RenderOptions;
+use crate::docfs::PathError;
+use crate::error::Error;
+use crate::formats::cache::Cache;
+use crate::formats::FormatRenderer;
+use crate::json::conversions::{from_item_id, from_item_id_with_name, IntoWithTcx};
+use crate::{clean, try_err};
+
+#[derive(Clone)]
+pub(crate) struct JsonRenderer<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ /// A mapping of IDs that contains all local items for this crate which gets output as a top
+ /// level field of the JSON blob.
+ index: Rc<RefCell<FxHashMap<types::Id, types::Item>>>,
+ /// The directory where the blob will be written to.
+ out_path: PathBuf,
+ cache: Rc<Cache>,
+}
+
+impl<'tcx> JsonRenderer<'tcx> {
+ fn sess(&self) -> &'tcx Session {
+ self.tcx.sess
+ }
+
+ fn get_trait_implementors(&mut self, id: DefId) -> Vec<types::Id> {
+ Rc::clone(&self.cache)
+ .implementors
+ .get(&id)
+ .map(|implementors| {
+ implementors
+ .iter()
+ .map(|i| {
+ let item = &i.impl_item;
+ self.item(item.clone()).unwrap();
+ from_item_id_with_name(item.item_id, self.tcx, item.name)
+ })
+ .collect()
+ })
+ .unwrap_or_default()
+ }
+
+ fn get_impls(&mut self, id: DefId) -> Vec<types::Id> {
+ Rc::clone(&self.cache)
+ .impls
+ .get(&id)
+ .map(|impls| {
+ impls
+ .iter()
+ .filter_map(|i| {
+ let item = &i.impl_item;
+
+ // HACK(hkmatsumoto): For impls of primitive types, we index them
+ // regardless of whether they're local. This is because users can
+ // document primitive items in an arbitrary crate by using
+ // `doc(primitive)`.
+ let mut is_primitive_impl = false;
+ if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind {
+ if impl_.trait_.is_none() {
+ if let clean::types::Type::Primitive(_) = impl_.for_ {
+ is_primitive_impl = true;
+ }
+ }
+ }
+
+ if item.item_id.is_local() || is_primitive_impl {
+ self.item(item.clone()).unwrap();
+ Some(from_item_id_with_name(item.item_id, self.tcx, item.name))
+ } else {
+ None
+ }
+ })
+ .collect()
+ })
+ .unwrap_or_default()
+ }
+
+ fn get_trait_items(&mut self) -> Vec<(types::Id, types::Item)> {
+ Rc::clone(&self.cache)
+ .traits
+ .iter()
+ .filter_map(|(&id, trait_item)| {
+ // only need to synthesize items for external traits
+ if !id.is_local() {
+ let trait_item = &trait_item.trait_;
+ trait_item.items.clone().into_iter().for_each(|i| self.item(i).unwrap());
+ let item_id = from_item_id(id.into(), self.tcx);
+ Some((
+ item_id.clone(),
+ types::Item {
+ id: item_id,
+ crate_id: id.krate.as_u32(),
+ name: self
+ .cache
+ .paths
+ .get(&id)
+ .unwrap_or_else(|| {
+ self.cache
+ .external_paths
+ .get(&id)
+ .expect("Trait should either be in local or external paths")
+ })
+ .0
+ .last()
+ .map(|s| s.to_string()),
+ visibility: types::Visibility::Public,
+ inner: types::ItemEnum::Trait(trait_item.clone().into_tcx(self.tcx)),
+ span: None,
+ docs: Default::default(),
+ links: Default::default(),
+ attrs: Default::default(),
+ deprecation: Default::default(),
+ },
+ ))
+ } else {
+ None
+ }
+ })
+ .collect()
+ }
+}
+
+impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
+ fn descr() -> &'static str {
+ "json"
+ }
+
+ const RUN_ON_MODULE: bool = false;
+
+ fn init(
+ krate: clean::Crate,
+ options: RenderOptions,
+ cache: Cache,
+ tcx: TyCtxt<'tcx>,
+ ) -> Result<(Self, clean::Crate), Error> {
+ debug!("Initializing json renderer");
+ Ok((
+ JsonRenderer {
+ tcx,
+ index: Rc::new(RefCell::new(FxHashMap::default())),
+ out_path: options.output,
+ cache: Rc::new(cache),
+ },
+ krate,
+ ))
+ }
+
+ fn make_child_renderer(&self) -> Self {
+ self.clone()
+ }
+
+ /// Inserts an item into the index. This should be used rather than directly calling insert on
+ /// the hashmap because certain items (traits and types) need to have their mappings for trait
+ /// implementations filled out before they're inserted.
+ fn item(&mut self, item: clean::Item) -> Result<(), Error> {
+ trace!("rendering {} {:?}", item.type_(), item.name);
+
+ // Flatten items that recursively store other items. We include orphaned items from
+ // stripped modules and etc that are otherwise reachable.
+ if let ItemKind::StrippedItem(inner) = &*item.kind {
+ inner.inner_items().for_each(|i| self.item(i.clone()).unwrap());
+ }
+
+ // Flatten items that recursively store other items
+ item.kind.inner_items().for_each(|i| self.item(i.clone()).unwrap());
+
+ let name = item.name;
+ let item_id = item.item_id;
+ if let Some(mut new_item) = self.convert_item(item) {
+ let can_be_ignored = match new_item.inner {
+ types::ItemEnum::Trait(ref mut t) => {
+ t.implementations = self.get_trait_implementors(item_id.expect_def_id());
+ false
+ }
+ types::ItemEnum::Struct(ref mut s) => {
+ s.impls = self.get_impls(item_id.expect_def_id());
+ false
+ }
+ types::ItemEnum::Enum(ref mut e) => {
+ e.impls = self.get_impls(item_id.expect_def_id());
+ false
+ }
+ types::ItemEnum::Union(ref mut u) => {
+ u.impls = self.get_impls(item_id.expect_def_id());
+ false
+ }
+
+ types::ItemEnum::Method(_)
+ | types::ItemEnum::AssocConst { .. }
+ | types::ItemEnum::AssocType { .. }
+ | types::ItemEnum::PrimitiveType(_) => true,
+ types::ItemEnum::Module(_)
+ | types::ItemEnum::ExternCrate { .. }
+ | types::ItemEnum::Import(_)
+ | types::ItemEnum::StructField(_)
+ | types::ItemEnum::Variant(_)
+ | types::ItemEnum::Function(_)
+ | types::ItemEnum::TraitAlias(_)
+ | types::ItemEnum::Impl(_)
+ | types::ItemEnum::Typedef(_)
+ | types::ItemEnum::OpaqueTy(_)
+ | types::ItemEnum::Constant(_)
+ | types::ItemEnum::Static(_)
+ | types::ItemEnum::ForeignType
+ | types::ItemEnum::Macro(_)
+ | types::ItemEnum::ProcMacro(_) => false,
+ };
+ let removed = self
+ .index
+ .borrow_mut()
+ .insert(from_item_id_with_name(item_id, self.tcx, name), new_item.clone());
+
+ // FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check
+ // to make sure the items are unique. The main place this happens is when an item, is
+ // reexported in more than one place. See `rustdoc-json/reexport/in_root_and_mod`
+ if let Some(old_item) = removed {
+ // In case of generic implementations (like `impl<T> Trait for T {}`), all the
+ // inner items will be duplicated so we can ignore if they are slightly different.
+ if !can_be_ignored {
+ assert_eq!(old_item, new_item);
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ fn mod_item_in(&mut self, _item: &clean::Item) -> Result<(), Error> {
+ unreachable!("RUN_ON_MODULE = false should never call mod_item_in")
+ }
+
+ fn after_krate(&mut self) -> Result<(), Error> {
+ debug!("Done with crate");
+
+ for primitive in Rc::clone(&self.cache).primitive_locations.values() {
+ self.get_impls(*primitive);
+ }
+
+ let e = ExternalCrate { crate_num: LOCAL_CRATE };
+
+ let mut index = (*self.index).clone().into_inner();
+ index.extend(self.get_trait_items());
+ // This needs to be the default HashMap for compatibility with the public interface for
+ // rustdoc-json-types
+ #[allow(rustc::default_hash_types)]
+ let output = types::Crate {
+ root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())),
+ crate_version: self.cache.crate_version.clone(),
+ includes_private: self.cache.document_private,
+ index: index.into_iter().collect(),
+ paths: self
+ .cache
+ .paths
+ .clone()
+ .into_iter()
+ .chain(self.cache.external_paths.clone().into_iter())
+ .map(|(k, (path, kind))| {
+ (
+ from_item_id(k.into(), self.tcx),
+ types::ItemSummary {
+ crate_id: k.krate.as_u32(),
+ path: path.iter().map(|s| s.to_string()).collect(),
+ kind: kind.into_tcx(self.tcx),
+ },
+ )
+ })
+ .collect(),
+ external_crates: self
+ .cache
+ .extern_locations
+ .iter()
+ .map(|(crate_num, external_location)| {
+ let e = ExternalCrate { crate_num: *crate_num };
+ (
+ crate_num.as_u32(),
+ types::ExternalCrate {
+ name: e.name(self.tcx).to_string(),
+ html_root_url: match external_location {
+ ExternalLocation::Remote(s) => Some(s.clone()),
+ _ => None,
+ },
+ },
+ )
+ })
+ .collect(),
+ format_version: types::FORMAT_VERSION,
+ };
+ let out_dir = self.out_path.clone();
+ try_err!(create_dir_all(&out_dir), out_dir);
+
+ let mut p = out_dir;
+ p.push(output.index.get(&output.root).unwrap().name.clone().unwrap());
+ p.set_extension("json");
+ let mut file = BufWriter::new(try_err!(File::create(&p), p));
+ serde_json::ser::to_writer(&mut file, &output).unwrap();
+ try_err!(file.flush(), p);
+
+ Ok(())
+ }
+
+ fn cache(&self) -> &Cache {
+ &self.cache
+ }
+}
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
new file mode 100644
index 000000000..92380d124
--- /dev/null
+++ b/src/librustdoc/lib.rs
@@ -0,0 +1,868 @@
+#![doc(
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/"
+)]
+#![feature(rustc_private)]
+#![feature(array_methods)]
+#![feature(assert_matches)]
+#![feature(box_patterns)]
+#![feature(control_flow_enum)]
+#![feature(drain_filter)]
+#![feature(let_chains)]
+#![feature(let_else)]
+#![feature(test)]
+#![feature(never_type)]
+#![feature(once_cell)]
+#![feature(type_ascription)]
+#![feature(iter_intersperse)]
+#![feature(type_alias_impl_trait)]
+#![feature(generic_associated_types)]
+#![recursion_limit = "256"]
+#![warn(rustc::internal)]
+#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
+#![allow(rustc::potential_query_instability)]
+
+#[macro_use]
+extern crate tracing;
+
+// N.B. these need `extern crate` even in 2018 edition
+// because they're loaded implicitly from the sysroot.
+// The reason they're loaded from the sysroot is because
+// the rustdoc artifacts aren't stored in rustc's cargo target directory.
+// So if `rustc` was specified in Cargo.toml, this would spuriously rebuild crates.
+//
+// Dependencies listed in Cargo.toml do not need `extern crate`.
+
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_attr;
+extern crate rustc_const_eval;
+extern crate rustc_data_structures;
+extern crate rustc_driver;
+extern crate rustc_errors;
+extern crate rustc_expand;
+extern crate rustc_feature;
+extern crate rustc_hir;
+extern crate rustc_hir_pretty;
+extern crate rustc_index;
+extern crate rustc_infer;
+extern crate rustc_interface;
+extern crate rustc_lexer;
+extern crate rustc_lint;
+extern crate rustc_lint_defs;
+extern crate rustc_macros;
+extern crate rustc_metadata;
+extern crate rustc_middle;
+extern crate rustc_parse;
+extern crate rustc_passes;
+extern crate rustc_resolve;
+extern crate rustc_serialize;
+extern crate rustc_session;
+extern crate rustc_span;
+extern crate rustc_target;
+extern crate rustc_trait_selection;
+extern crate rustc_typeck;
+extern crate test;
+
+// See docs in https://github.com/rust-lang/rust/blob/master/compiler/rustc/src/main.rs
+// about jemalloc.
+#[cfg(feature = "jemalloc")]
+extern crate jemalloc_sys;
+
+use std::default::Default;
+use std::env::{self, VarError};
+use std::io;
+use std::process;
+
+use rustc_driver::abort_on_err;
+use rustc_errors::ErrorGuaranteed;
+use rustc_interface::interface;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::config::{make_crate_type_option, ErrorOutputType, RustcOptGroup};
+use rustc_session::getopts;
+use rustc_session::{early_error, early_warn};
+
+use crate::clean::utils::DOC_RUST_LANG_ORG_CHANNEL;
+use crate::passes::collect_intra_doc_links;
+
+/// A macro to create a FxHashMap.
+///
+/// Example:
+///
+/// ```ignore(cannot-test-this-because-non-exported-macro)
+/// let letters = map!{"a" => "b", "c" => "d"};
+/// ```
+///
+/// Trailing commas are allowed.
+/// Commas between elements are required (even if the expression is a block).
+macro_rules! map {
+ ($( $key: expr => $val: expr ),* $(,)*) => {{
+ let mut map = ::rustc_data_structures::fx::FxHashMap::default();
+ $( map.insert($key, $val); )*
+ map
+ }}
+}
+
+mod clean;
+mod config;
+mod core;
+mod docfs;
+mod doctest;
+mod error;
+mod externalfiles;
+mod fold;
+mod formats;
+// used by the error-index generator, so it needs to be public
+pub mod html;
+mod json;
+pub(crate) mod lint;
+mod markdown;
+mod passes;
+mod scrape_examples;
+mod theme;
+mod visit;
+mod visit_ast;
+mod visit_lib;
+
+pub fn main() {
+ // See docs in https://github.com/rust-lang/rust/blob/master/compiler/rustc/src/main.rs
+ // about jemalloc.
+ #[cfg(feature = "jemalloc")]
+ {
+ use std::os::raw::{c_int, c_void};
+
+ #[used]
+ static _F1: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::calloc;
+ #[used]
+ static _F2: unsafe extern "C" fn(*mut *mut c_void, usize, usize) -> c_int =
+ jemalloc_sys::posix_memalign;
+ #[used]
+ static _F3: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::aligned_alloc;
+ #[used]
+ static _F4: unsafe extern "C" fn(usize) -> *mut c_void = jemalloc_sys::malloc;
+ #[used]
+ static _F5: unsafe extern "C" fn(*mut c_void, usize) -> *mut c_void = jemalloc_sys::realloc;
+ #[used]
+ static _F6: unsafe extern "C" fn(*mut c_void) = jemalloc_sys::free;
+
+ #[cfg(target_os = "macos")]
+ {
+ extern "C" {
+ fn _rjem_je_zone_register();
+ }
+
+ #[used]
+ static _F7: unsafe extern "C" fn() = _rjem_je_zone_register;
+ }
+ }
+
+ rustc_driver::set_sigpipe_handler();
+ rustc_driver::install_ice_hook();
+
+ // When using CI artifacts (with `download_stage1 = true`), tracing is unconditionally built
+ // with `--features=static_max_level_info`, which disables almost all rustdoc logging. To avoid
+ // this, compile our own version of `tracing` that logs all levels.
+ // NOTE: this compiles both versions of tracing unconditionally, because
+ // - The compile time hit is not that bad, especially compared to rustdoc's incremental times, and
+ // - Otherwise, there's no warning that logging is being ignored when `download_stage1 = true`.
+ // NOTE: The reason this doesn't show double logging when `download_stage1 = false` and
+ // `debug_logging = true` is because all rustc logging goes to its version of tracing (the one
+ // in the sysroot), and all of rustdoc's logging goes to its version (the one in Cargo.toml).
+ init_logging();
+ rustc_driver::init_env_logger("RUSTDOC_LOG");
+
+ let exit_code = rustc_driver::catch_with_exit_code(|| match get_args() {
+ Some(args) => main_args(&args),
+ _ => Err(ErrorGuaranteed::unchecked_claim_error_was_emitted()),
+ });
+ process::exit(exit_code);
+}
+
+fn init_logging() {
+ let color_logs = match std::env::var("RUSTDOC_LOG_COLOR").as_deref() {
+ Ok("always") => true,
+ Ok("never") => false,
+ Ok("auto") | Err(VarError::NotPresent) => atty::is(atty::Stream::Stdout),
+ Ok(value) => early_error(
+ ErrorOutputType::default(),
+ &format!("invalid log color value '{}': expected one of always, never, or auto", value),
+ ),
+ Err(VarError::NotUnicode(value)) => early_error(
+ ErrorOutputType::default(),
+ &format!(
+ "invalid log color value '{}': expected one of always, never, or auto",
+ value.to_string_lossy()
+ ),
+ ),
+ };
+ let filter = tracing_subscriber::EnvFilter::from_env("RUSTDOC_LOG");
+ let layer = tracing_tree::HierarchicalLayer::default()
+ .with_writer(io::stderr)
+ .with_indent_lines(true)
+ .with_ansi(color_logs)
+ .with_targets(true)
+ .with_wraparound(10)
+ .with_verbose_exit(true)
+ .with_verbose_entry(true)
+ .with_indent_amount(2);
+ #[cfg(parallel_compiler)]
+ let layer = layer.with_thread_ids(true).with_thread_names(true);
+
+ use tracing_subscriber::layer::SubscriberExt;
+ let subscriber = tracing_subscriber::Registry::default().with(filter).with(layer);
+ tracing::subscriber::set_global_default(subscriber).unwrap();
+}
+
+fn get_args() -> Option<Vec<String>> {
+ env::args_os()
+ .enumerate()
+ .map(|(i, arg)| {
+ arg.into_string()
+ .map_err(|arg| {
+ early_warn(
+ ErrorOutputType::default(),
+ &format!("Argument {} is not valid Unicode: {:?}", i, arg),
+ );
+ })
+ .ok()
+ })
+ .collect()
+}
+
+fn opts() -> Vec<RustcOptGroup> {
+ let stable: fn(_, fn(&mut getopts::Options) -> &mut _) -> _ = RustcOptGroup::stable;
+ let unstable: fn(_, fn(&mut getopts::Options) -> &mut _) -> _ = RustcOptGroup::unstable;
+ vec![
+ stable("h", |o| o.optflagmulti("h", "help", "show this help message")),
+ stable("V", |o| o.optflagmulti("V", "version", "print rustdoc's version")),
+ stable("v", |o| o.optflagmulti("v", "verbose", "use verbose output")),
+ stable("w", |o| o.optopt("w", "output-format", "the output type to write", "[html]")),
+ stable("output", |o| {
+ o.optopt(
+ "",
+ "output",
+ "Which directory to place the output. \
+ This option is deprecated, use --out-dir instead.",
+ "PATH",
+ )
+ }),
+ stable("o", |o| o.optopt("o", "out-dir", "which directory to place the output", "PATH")),
+ stable("crate-name", |o| {
+ o.optopt("", "crate-name", "specify the name of this crate", "NAME")
+ }),
+ make_crate_type_option(),
+ stable("L", |o| {
+ o.optmulti("L", "library-path", "directory to add to crate search path", "DIR")
+ }),
+ stable("cfg", |o| o.optmulti("", "cfg", "pass a --cfg to rustc", "")),
+ unstable("check-cfg", |o| o.optmulti("", "check-cfg", "pass a --check-cfg to rustc", "")),
+ stable("extern", |o| o.optmulti("", "extern", "pass an --extern to rustc", "NAME[=PATH]")),
+ unstable("extern-html-root-url", |o| {
+ o.optmulti(
+ "",
+ "extern-html-root-url",
+ "base URL to use for dependencies; for example, \
+ \"std=/doc\" links std::vec::Vec to /doc/std/vec/struct.Vec.html",
+ "NAME=URL",
+ )
+ }),
+ unstable("extern-html-root-takes-precedence", |o| {
+ o.optflagmulti(
+ "",
+ "extern-html-root-takes-precedence",
+ "give precedence to `--extern-html-root-url`, not `html_root_url`",
+ )
+ }),
+ stable("C", |o| {
+ o.optmulti("C", "codegen", "pass a codegen option to rustc", "OPT[=VALUE]")
+ }),
+ stable("document-private-items", |o| {
+ o.optflagmulti("", "document-private-items", "document private items")
+ }),
+ unstable("document-hidden-items", |o| {
+ o.optflagmulti("", "document-hidden-items", "document items that have doc(hidden)")
+ }),
+ stable("test", |o| o.optflagmulti("", "test", "run code examples as tests")),
+ stable("test-args", |o| {
+ o.optmulti("", "test-args", "arguments to pass to the test runner", "ARGS")
+ }),
+ unstable("test-run-directory", |o| {
+ o.optopt(
+ "",
+ "test-run-directory",
+ "The working directory in which to run tests",
+ "PATH",
+ )
+ }),
+ stable("target", |o| o.optopt("", "target", "target triple to document", "TRIPLE")),
+ stable("markdown-css", |o| {
+ o.optmulti(
+ "",
+ "markdown-css",
+ "CSS files to include via <link> in a rendered Markdown file",
+ "FILES",
+ )
+ }),
+ stable("html-in-header", |o| {
+ o.optmulti(
+ "",
+ "html-in-header",
+ "files to include inline in the <head> section of a rendered Markdown file \
+ or generated documentation",
+ "FILES",
+ )
+ }),
+ stable("html-before-content", |o| {
+ o.optmulti(
+ "",
+ "html-before-content",
+ "files to include inline between <body> and the content of a rendered \
+ Markdown file or generated documentation",
+ "FILES",
+ )
+ }),
+ stable("html-after-content", |o| {
+ o.optmulti(
+ "",
+ "html-after-content",
+ "files to include inline between the content and </body> of a rendered \
+ Markdown file or generated documentation",
+ "FILES",
+ )
+ }),
+ unstable("markdown-before-content", |o| {
+ o.optmulti(
+ "",
+ "markdown-before-content",
+ "files to include inline between <body> and the content of a rendered \
+ Markdown file or generated documentation",
+ "FILES",
+ )
+ }),
+ unstable("markdown-after-content", |o| {
+ o.optmulti(
+ "",
+ "markdown-after-content",
+ "files to include inline between the content and </body> of a rendered \
+ Markdown file or generated documentation",
+ "FILES",
+ )
+ }),
+ stable("markdown-playground-url", |o| {
+ o.optopt("", "markdown-playground-url", "URL to send code snippets to", "URL")
+ }),
+ stable("markdown-no-toc", |o| {
+ o.optflagmulti("", "markdown-no-toc", "don't include table of contents")
+ }),
+ stable("e", |o| {
+ o.optopt(
+ "e",
+ "extend-css",
+ "To add some CSS rules with a given file to generate doc with your \
+ own theme. However, your theme might break if the rustdoc's generated HTML \
+ changes, so be careful!",
+ "PATH",
+ )
+ }),
+ unstable("Z", |o| {
+ o.optmulti("Z", "", "unstable / perma-unstable options (only on nightly build)", "FLAG")
+ }),
+ stable("sysroot", |o| o.optopt("", "sysroot", "Override the system root", "PATH")),
+ unstable("playground-url", |o| {
+ o.optopt(
+ "",
+ "playground-url",
+ "URL to send code snippets to, may be reset by --markdown-playground-url \
+ or `#![doc(html_playground_url=...)]`",
+ "URL",
+ )
+ }),
+ unstable("display-doctest-warnings", |o| {
+ o.optflagmulti(
+ "",
+ "display-doctest-warnings",
+ "show warnings that originate in doctests",
+ )
+ }),
+ stable("crate-version", |o| {
+ o.optopt("", "crate-version", "crate version to print into documentation", "VERSION")
+ }),
+ unstable("sort-modules-by-appearance", |o| {
+ o.optflagmulti(
+ "",
+ "sort-modules-by-appearance",
+ "sort modules by where they appear in the program, rather than alphabetically",
+ )
+ }),
+ stable("default-theme", |o| {
+ o.optopt(
+ "",
+ "default-theme",
+ "Set the default theme. THEME should be the theme name, generally lowercase. \
+ If an unknown default theme is specified, the builtin default is used. \
+ The set of themes, and the rustdoc built-in default, are not stable.",
+ "THEME",
+ )
+ }),
+ unstable("default-setting", |o| {
+ o.optmulti(
+ "",
+ "default-setting",
+ "Default value for a rustdoc setting (used when \"rustdoc-SETTING\" is absent \
+ from web browser Local Storage). If VALUE is not supplied, \"true\" is used. \
+ Supported SETTINGs and VALUEs are not documented and not stable.",
+ "SETTING[=VALUE]",
+ )
+ }),
+ stable("theme", |o| {
+ o.optmulti(
+ "",
+ "theme",
+ "additional themes which will be added to the generated docs",
+ "FILES",
+ )
+ }),
+ stable("check-theme", |o| {
+ o.optmulti("", "check-theme", "check if given theme is valid", "FILES")
+ }),
+ unstable("resource-suffix", |o| {
+ o.optopt(
+ "",
+ "resource-suffix",
+ "suffix to add to CSS and JavaScript files, e.g., \"light.css\" will become \
+ \"light-suffix.css\"",
+ "PATH",
+ )
+ }),
+ stable("edition", |o| {
+ o.optopt(
+ "",
+ "edition",
+ "edition to use when compiling rust code (default: 2015)",
+ "EDITION",
+ )
+ }),
+ stable("color", |o| {
+ o.optopt(
+ "",
+ "color",
+ "Configure coloring of output:
+ auto = colorize, if output goes to a tty (default);
+ always = always colorize output;
+ never = never colorize output",
+ "auto|always|never",
+ )
+ }),
+ stable("error-format", |o| {
+ o.optopt(
+ "",
+ "error-format",
+ "How errors and other messages are produced",
+ "human|json|short",
+ )
+ }),
+ unstable("diagnostic-width", |o| {
+ o.optopt(
+ "",
+ "diagnostic-width",
+ "Provide width of the output for truncated error messages",
+ "WIDTH",
+ )
+ }),
+ stable("json", |o| {
+ o.optopt("", "json", "Configure the structure of JSON diagnostics", "CONFIG")
+ }),
+ unstable("disable-minification", |o| {
+ o.optflagmulti("", "disable-minification", "Disable minification applied on JS files")
+ }),
+ stable("allow", |o| o.optmulti("A", "allow", "Set lint allowed", "LINT")),
+ stable("warn", |o| o.optmulti("W", "warn", "Set lint warnings", "LINT")),
+ stable("force-warn", |o| o.optmulti("", "force-warn", "Set lint force-warn", "LINT")),
+ stable("deny", |o| o.optmulti("D", "deny", "Set lint denied", "LINT")),
+ stable("forbid", |o| o.optmulti("F", "forbid", "Set lint forbidden", "LINT")),
+ stable("cap-lints", |o| {
+ o.optmulti(
+ "",
+ "cap-lints",
+ "Set the most restrictive lint level. \
+ More restrictive lints are capped at this \
+ level. By default, it is at `forbid` level.",
+ "LEVEL",
+ )
+ }),
+ unstable("index-page", |o| {
+ o.optopt("", "index-page", "Markdown file to be used as index page", "PATH")
+ }),
+ unstable("enable-index-page", |o| {
+ o.optflagmulti("", "enable-index-page", "To enable generation of the index page")
+ }),
+ unstable("static-root-path", |o| {
+ o.optopt(
+ "",
+ "static-root-path",
+ "Path string to force loading static files from in output pages. \
+ If not set, uses combinations of '../' to reach the documentation root.",
+ "PATH",
+ )
+ }),
+ unstable("disable-per-crate-search", |o| {
+ o.optflagmulti(
+ "",
+ "disable-per-crate-search",
+ "disables generating the crate selector on the search box",
+ )
+ }),
+ unstable("persist-doctests", |o| {
+ o.optopt(
+ "",
+ "persist-doctests",
+ "Directory to persist doctest executables into",
+ "PATH",
+ )
+ }),
+ unstable("show-coverage", |o| {
+ o.optflagmulti(
+ "",
+ "show-coverage",
+ "calculate percentage of public items with documentation",
+ )
+ }),
+ unstable("enable-per-target-ignores", |o| {
+ o.optflagmulti(
+ "",
+ "enable-per-target-ignores",
+ "parse ignore-foo for ignoring doctests on a per-target basis",
+ )
+ }),
+ unstable("runtool", |o| {
+ o.optopt(
+ "",
+ "runtool",
+ "",
+ "The tool to run tests with when building for a different target than host",
+ )
+ }),
+ unstable("runtool-arg", |o| {
+ o.optmulti(
+ "",
+ "runtool-arg",
+ "",
+ "One (of possibly many) arguments to pass to the runtool",
+ )
+ }),
+ unstable("test-builder", |o| {
+ o.optopt("", "test-builder", "The rustc-like binary to use as the test builder", "PATH")
+ }),
+ unstable("check", |o| o.optflagmulti("", "check", "Run rustdoc checks")),
+ unstable("generate-redirect-map", |o| {
+ o.optflagmulti(
+ "",
+ "generate-redirect-map",
+ "Generate JSON file at the top level instead of generating HTML redirection files",
+ )
+ }),
+ unstable("emit", |o| {
+ o.optmulti(
+ "",
+ "emit",
+ "Comma separated list of types of output for rustdoc to emit",
+ "[unversioned-shared-resources,toolchain-shared-resources,invocation-specific]",
+ )
+ }),
+ unstable("no-run", |o| {
+ o.optflagmulti("", "no-run", "Compile doctests without running them")
+ }),
+ unstable("show-type-layout", |o| {
+ o.optflagmulti("", "show-type-layout", "Include the memory layout of types in the docs")
+ }),
+ unstable("nocapture", |o| {
+ o.optflag("", "nocapture", "Don't capture stdout and stderr of tests")
+ }),
+ unstable("generate-link-to-definition", |o| {
+ o.optflag(
+ "",
+ "generate-link-to-definition",
+ "Make the identifiers in the HTML source code pages navigable",
+ )
+ }),
+ unstable("scrape-examples-output-path", |o| {
+ o.optopt(
+ "",
+ "scrape-examples-output-path",
+ "",
+ "collect function call information and output at the given path",
+ )
+ }),
+ unstable("scrape-examples-target-crate", |o| {
+ o.optmulti(
+ "",
+ "scrape-examples-target-crate",
+ "",
+ "collect function call information for functions from the target crate",
+ )
+ }),
+ unstable("scrape-tests", |o| {
+ o.optflag("", "scrape-tests", "Include test code when scraping examples")
+ }),
+ unstable("with-examples", |o| {
+ o.optmulti(
+ "",
+ "with-examples",
+ "",
+ "path to function call information (for displaying examples in the documentation)",
+ )
+ }),
+ // deprecated / removed options
+ stable("plugin-path", |o| {
+ o.optmulti(
+ "",
+ "plugin-path",
+ "removed, see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ "DIR",
+ )
+ }),
+ stable("passes", |o| {
+ o.optmulti(
+ "",
+ "passes",
+ "removed, see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ "PASSES",
+ )
+ }),
+ stable("plugins", |o| {
+ o.optmulti(
+ "",
+ "plugins",
+ "removed, see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ "PLUGINS",
+ )
+ }),
+ stable("no-default", |o| {
+ o.optflagmulti(
+ "",
+ "no-defaults",
+ "removed, see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ )
+ }),
+ stable("r", |o| {
+ o.optopt(
+ "r",
+ "input-format",
+ "removed, see issue #44136 <https://github.com/rust-lang/rust/issues/44136> \
+ for more information",
+ "[rust]",
+ )
+ }),
+ ]
+}
+
+fn usage(argv0: &str) {
+ let mut options = getopts::Options::new();
+ for option in opts() {
+ (option.apply)(&mut options);
+ }
+ println!("{}", options.usage(&format!("{} [options] <input>", argv0)));
+ println!(" @path Read newline separated options from `path`\n");
+ println!(
+ "More information available at {}/rustdoc/what-is-rustdoc.html",
+ DOC_RUST_LANG_ORG_CHANNEL
+ );
+}
+
+/// A result type used by several functions under `main()`.
+type MainResult = Result<(), ErrorGuaranteed>;
+
+fn main_args(at_args: &[String]) -> MainResult {
+ let args = rustc_driver::args::arg_expand_all(at_args);
+
+ let mut options = getopts::Options::new();
+ for option in opts() {
+ (option.apply)(&mut options);
+ }
+ let matches = match options.parse(&args[1..]) {
+ Ok(m) => m,
+ Err(err) => {
+ early_error(ErrorOutputType::default(), &err.to_string());
+ }
+ };
+
+ // Note that we discard any distinction between different non-zero exit
+ // codes from `from_matches` here.
+ let options = match config::Options::from_matches(&matches, args) {
+ Ok(opts) => opts,
+ Err(code) => {
+ return if code == 0 {
+ Ok(())
+ } else {
+ Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ };
+ }
+ };
+ rustc_interface::util::run_in_thread_pool_with_globals(
+ options.edition,
+ 1, // this runs single-threaded, even in a parallel compiler
+ move || main_options(options),
+ )
+}
+
+fn wrap_return(diag: &rustc_errors::Handler, res: Result<(), String>) -> MainResult {
+ match res {
+ Ok(()) => Ok(()),
+ Err(err) => {
+ let reported = diag.struct_err(&err).emit();
+ Err(reported)
+ }
+ }
+}
+
+fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
+ krate: clean::Crate,
+ renderopts: config::RenderOptions,
+ cache: formats::cache::Cache,
+ tcx: TyCtxt<'tcx>,
+) -> MainResult {
+ match formats::run_format::<T>(krate, renderopts, cache, tcx) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ let mut msg =
+ tcx.sess.struct_err(&format!("couldn't generate documentation: {}", e.error));
+ let file = e.file.display().to_string();
+ if !file.is_empty() {
+ msg.note(&format!("failed to create or modify \"{}\"", file));
+ }
+ Err(msg.emit())
+ }
+ }
+}
+
+fn main_options(options: config::Options) -> MainResult {
+ let diag = core::new_handler(
+ options.error_format,
+ None,
+ options.diagnostic_width,
+ &options.unstable_opts,
+ );
+
+ match (options.should_test, options.markdown_input()) {
+ (true, true) => return wrap_return(&diag, markdown::test(options)),
+ (true, false) => return doctest::run(options),
+ (false, true) => {
+ return wrap_return(
+ &diag,
+ markdown::render(&options.input, options.render_options, options.edition),
+ );
+ }
+ (false, false) => {}
+ }
+
+ // need to move these items separately because we lose them by the time the closure is called,
+ // but we can't create the Handler ahead of time because it's not Send
+ let show_coverage = options.show_coverage;
+ let run_check = options.run_check;
+
+ // First, parse the crate and extract all relevant information.
+ info!("starting to run rustc");
+
+ // Interpret the input file as a rust source file, passing it through the
+ // compiler all the way through the analysis passes. The rustdoc output is
+ // then generated from the cleaned AST of the crate. This runs all the
+ // plug/cleaning passes.
+ let crate_version = options.crate_version.clone();
+
+ let output_format = options.output_format;
+ // FIXME: fix this clone (especially render_options)
+ let externs = options.externs.clone();
+ let render_options = options.render_options.clone();
+ let scrape_examples_options = options.scrape_examples_options.clone();
+ let document_private = options.render_options.document_private;
+ let config = core::create_config(options);
+
+ interface::create_compiler_and_run(config, |compiler| {
+ let sess = compiler.session();
+
+ if sess.opts.describe_lints {
+ let mut lint_store = rustc_lint::new_lint_store(
+ sess.opts.unstable_opts.no_interleave_lints,
+ sess.enable_internal_lints(),
+ );
+ let registered_lints = if let Some(register_lints) = compiler.register_lints() {
+ register_lints(sess, &mut lint_store);
+ true
+ } else {
+ false
+ };
+ rustc_driver::describe_lints(sess, &lint_store, registered_lints);
+ return Ok(());
+ }
+
+ compiler.enter(|queries| {
+ // We need to hold on to the complete resolver, so we cause everything to be
+ // cloned for the analysis passes to use. Suboptimal, but necessary in the
+ // current architecture.
+ // FIXME(#83761): Resolver cloning can lead to inconsistencies between data in the
+ // two copies because one of the copies can be modified after `TyCtxt` construction.
+ let (resolver, resolver_caches) = {
+ let (krate, resolver, _) = &*abort_on_err(queries.expansion(), sess).peek();
+ let resolver_caches = resolver.borrow_mut().access(|resolver| {
+ collect_intra_doc_links::early_resolve_intra_doc_links(
+ resolver,
+ sess,
+ krate,
+ externs,
+ document_private,
+ )
+ });
+ (resolver.clone(), resolver_caches)
+ };
+
+ if sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ sess.fatal("Compilation failed, aborting rustdoc");
+ }
+
+ let mut global_ctxt = abort_on_err(queries.global_ctxt(), sess).peek_mut();
+
+ global_ctxt.enter(|tcx| {
+ let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
+ core::run_global_ctxt(
+ tcx,
+ resolver,
+ resolver_caches,
+ show_coverage,
+ render_options,
+ output_format,
+ )
+ });
+ info!("finished with rustc");
+
+ if let Some(options) = scrape_examples_options {
+ return scrape_examples::run(krate, render_opts, cache, tcx, options);
+ }
+
+ cache.crate_version = crate_version;
+
+ if show_coverage {
+ // if we ran coverage, bail early, we don't need to also generate docs at this point
+ // (also we didn't load in any of the useful passes)
+ return Ok(());
+ } else if run_check {
+ // Since we're in "check" mode, no need to generate anything beyond this point.
+ return Ok(());
+ }
+
+ info!("going to format");
+ match output_format {
+ config::OutputFormat::Html => sess.time("render_html", || {
+ run_renderer::<html::render::Context<'_>>(krate, render_opts, cache, tcx)
+ }),
+ config::OutputFormat::Json => sess.time("render_json", || {
+ run_renderer::<json::JsonRenderer<'_>>(krate, render_opts, cache, tcx)
+ }),
+ }
+ })
+ })
+ })
+}
diff --git a/src/librustdoc/lint.rs b/src/librustdoc/lint.rs
new file mode 100644
index 000000000..240aec52c
--- /dev/null
+++ b/src/librustdoc/lint.rs
@@ -0,0 +1,202 @@
+use rustc_data_structures::fx::FxHashMap;
+use rustc_lint::LintStore;
+use rustc_lint_defs::{declare_tool_lint, Lint, LintId};
+use rustc_session::{lint, Session};
+
+use std::sync::LazyLock as Lazy;
+
+/// This function is used to setup the lint initialization. By default, in rustdoc, everything
+/// is "allowed". Depending if we run in test mode or not, we want some of them to be at their
+/// default level. For example, the "INVALID_CODEBLOCK_ATTRIBUTES" lint is activated in both
+/// modes.
+///
+/// A little detail easy to forget is that there is a way to set the lint level for all lints
+/// through the "WARNINGS" lint. To prevent this to happen, we set it back to its "normal" level
+/// inside this function.
+///
+/// It returns a tuple containing:
+/// * Vector of tuples of lints' name and their associated "max" level
+/// * HashMap of lint id with their associated "max" level
+pub(crate) fn init_lints<F>(
+ mut allowed_lints: Vec<String>,
+ lint_opts: Vec<(String, lint::Level)>,
+ filter_call: F,
+) -> (Vec<(String, lint::Level)>, FxHashMap<lint::LintId, lint::Level>)
+where
+ F: Fn(&lint::Lint) -> Option<(String, lint::Level)>,
+{
+ let warnings_lint_name = lint::builtin::WARNINGS.name;
+
+ allowed_lints.push(warnings_lint_name.to_owned());
+ allowed_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned());
+
+ let lints = || {
+ lint::builtin::HardwiredLints::get_lints()
+ .into_iter()
+ .chain(rustc_lint::SoftLints::get_lints().into_iter())
+ };
+
+ let lint_opts = lints()
+ .filter_map(|lint| {
+ // Permit feature-gated lints to avoid feature errors when trying to
+ // allow all lints.
+ if lint.feature_gate.is_some() || allowed_lints.iter().any(|l| lint.name == l) {
+ None
+ } else {
+ filter_call(lint)
+ }
+ })
+ .chain(lint_opts.into_iter())
+ .collect::<Vec<_>>();
+
+ let lint_caps = lints()
+ .filter_map(|lint| {
+ // We don't want to allow *all* lints so let's ignore
+ // those ones.
+ if allowed_lints.iter().any(|l| lint.name == l) {
+ None
+ } else {
+ Some((lint::LintId::of(lint), lint::Allow))
+ }
+ })
+ .collect();
+ (lint_opts, lint_caps)
+}
+
+macro_rules! declare_rustdoc_lint {
+ ($(#[$attr:meta])* $name: ident, $level: ident, $descr: literal $(,)?) => {
+ declare_tool_lint! {
+ $(#[$attr])* pub rustdoc::$name, $level, $descr
+ }
+ }
+}
+
+declare_rustdoc_lint! {
+ /// The `broken_intra_doc_links` lint detects failures in resolving
+ /// intra-doc link targets. This is a `rustdoc` only lint, see the
+ /// documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#broken_intra_doc_links
+ BROKEN_INTRA_DOC_LINKS,
+ Warn,
+ "failures in resolving intra-doc link targets"
+}
+
+declare_rustdoc_lint! {
+ /// This is a subset of `broken_intra_doc_links` that warns when linking from
+ /// a public item to a private one. This is a `rustdoc` only lint, see the
+ /// documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#private_intra_doc_links
+ PRIVATE_INTRA_DOC_LINKS,
+ Warn,
+ "linking from a public item to a private one"
+}
+
+declare_rustdoc_lint! {
+ /// The `invalid_codeblock_attributes` lint detects code block attributes
+ /// in documentation examples that have potentially mis-typed values. This
+ /// is a `rustdoc` only lint, see the documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#invalid_codeblock_attributes
+ INVALID_CODEBLOCK_ATTRIBUTES,
+ Warn,
+ "codeblock attribute looks a lot like a known one"
+}
+
+declare_rustdoc_lint! {
+ /// The `missing_crate_level_docs` lint detects if documentation is
+ /// missing at the crate root. This is a `rustdoc` only lint, see the
+ /// documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#missing_crate_level_docs
+ MISSING_CRATE_LEVEL_DOCS,
+ Allow,
+ "detects crates with no crate-level documentation"
+}
+
+declare_rustdoc_lint! {
+ /// The `missing_doc_code_examples` lint detects publicly-exported items
+ /// without code samples in their documentation. This is a `rustdoc` only
+ /// lint, see the documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#missing_doc_code_examples
+ MISSING_DOC_CODE_EXAMPLES,
+ Allow,
+ "detects publicly-exported items without code samples in their documentation"
+}
+
+declare_rustdoc_lint! {
+ /// The `private_doc_tests` lint detects code samples in docs of private
+ /// items not documented by `rustdoc`. This is a `rustdoc` only lint, see
+ /// the documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#private_doc_tests
+ PRIVATE_DOC_TESTS,
+ Allow,
+ "detects code samples in docs of private items not documented by rustdoc"
+}
+
+declare_rustdoc_lint! {
+ /// The `invalid_html_tags` lint detects invalid HTML tags. This is a
+ /// `rustdoc` only lint, see the documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#invalid_html_tags
+ INVALID_HTML_TAGS,
+ Allow,
+ "detects invalid HTML tags in doc comments"
+}
+
+declare_rustdoc_lint! {
+ /// The `bare_urls` lint detects when a URL is not a hyperlink.
+ /// This is a `rustdoc` only lint, see the documentation in the [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#bare_urls
+ BARE_URLS,
+ Warn,
+ "detects URLs that are not hyperlinks"
+}
+
+declare_rustdoc_lint! {
+ /// The `invalid_rust_codeblocks` lint detects Rust code blocks in
+ /// documentation examples that are invalid (e.g. empty, not parsable as
+ /// Rust code). This is a `rustdoc` only lint, see the documentation in the
+ /// [rustdoc book].
+ ///
+ /// [rustdoc book]: ../../../rustdoc/lints.html#invalid_rust_codeblocks
+ INVALID_RUST_CODEBLOCKS,
+ Warn,
+ "codeblock could not be parsed as valid Rust or is empty"
+}
+
+pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
+ vec![
+ BROKEN_INTRA_DOC_LINKS,
+ PRIVATE_INTRA_DOC_LINKS,
+ MISSING_DOC_CODE_EXAMPLES,
+ PRIVATE_DOC_TESTS,
+ INVALID_CODEBLOCK_ATTRIBUTES,
+ INVALID_RUST_CODEBLOCKS,
+ INVALID_HTML_TAGS,
+ BARE_URLS,
+ MISSING_CRATE_LEVEL_DOCS,
+ ]
+});
+
+pub(crate) fn register_lints(_sess: &Session, lint_store: &mut LintStore) {
+ lint_store.register_lints(&**RUSTDOC_LINTS);
+ lint_store.register_group(
+ true,
+ "rustdoc::all",
+ Some("rustdoc"),
+ RUSTDOC_LINTS.iter().map(|&lint| LintId::of(lint)).collect(),
+ );
+ for lint in &*RUSTDOC_LINTS {
+ let name = lint.name_lower();
+ lint_store.register_renamed(&name.replace("rustdoc::", ""), &name);
+ }
+ lint_store
+ .register_renamed("intra_doc_link_resolution_failure", "rustdoc::broken_intra_doc_links");
+ lint_store.register_renamed("non_autolinks", "rustdoc::bare_urls");
+ lint_store.register_renamed("rustdoc::non_autolinks", "rustdoc::bare_urls");
+}
diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs
new file mode 100644
index 000000000..0b557ef24
--- /dev/null
+++ b/src/librustdoc/markdown.rs
@@ -0,0 +1,151 @@
+use std::fmt::Write as _;
+use std::fs::{create_dir_all, read_to_string, File};
+use std::io::prelude::*;
+use std::path::Path;
+
+use rustc_span::edition::Edition;
+use rustc_span::source_map::DUMMY_SP;
+use rustc_span::Symbol;
+
+use crate::config::{Options, RenderOptions};
+use crate::doctest::{Collector, GlobalTestOptions};
+use crate::html::escape::Escape;
+use crate::html::markdown;
+use crate::html::markdown::{
+ find_testable_code, ErrorCodes, HeadingOffset, IdMap, Markdown, MarkdownWithToc,
+};
+
+/// Separate any lines at the start of the file that begin with `# ` or `%`.
+fn extract_leading_metadata(s: &str) -> (Vec<&str>, &str) {
+ let mut metadata = Vec::new();
+ let mut count = 0;
+
+ for line in s.lines() {
+ if line.starts_with("# ") || line.starts_with('%') {
+ // trim the whitespace after the symbol
+ metadata.push(line[1..].trim_start());
+ count += line.len() + 1;
+ } else {
+ return (metadata, &s[count..]);
+ }
+ }
+
+ // if we're here, then all lines were metadata `# ` or `%` lines.
+ (metadata, "")
+}
+
+/// Render `input` (e.g., "foo.md") into an HTML file in `output`
+/// (e.g., output = "bar" => "bar/foo.html").
+pub(crate) fn render<P: AsRef<Path>>(
+ input: P,
+ options: RenderOptions,
+ edition: Edition,
+) -> Result<(), String> {
+ if let Err(e) = create_dir_all(&options.output) {
+ return Err(format!("{}: {}", options.output.display(), e));
+ }
+
+ let input = input.as_ref();
+ let mut output = options.output;
+ output.push(input.file_name().unwrap());
+ output.set_extension("html");
+
+ let mut css = String::new();
+ for name in &options.markdown_css {
+ write!(css, r#"<link rel="stylesheet" type="text/css" href="{name}">"#)
+ .expect("Writing to a String can't fail");
+ }
+
+ let input_str = read_to_string(input).map_err(|err| format!("{}: {}", input.display(), err))?;
+ let playground_url = options.markdown_playground_url.or(options.playground_url);
+ let playground = playground_url.map(|url| markdown::Playground { crate_name: None, url });
+
+ let mut out = File::create(&output).map_err(|e| format!("{}: {}", output.display(), e))?;
+
+ let (metadata, text) = extract_leading_metadata(&input_str);
+ if metadata.is_empty() {
+ return Err("invalid markdown file: no initial lines starting with `# ` or `%`".to_owned());
+ }
+ let title = metadata[0];
+
+ let mut ids = IdMap::new();
+ let error_codes = ErrorCodes::from(options.unstable_features.is_nightly_build());
+ let text = if !options.markdown_no_toc {
+ MarkdownWithToc(text, &mut ids, error_codes, edition, &playground).into_string()
+ } else {
+ Markdown {
+ content: text,
+ links: &[],
+ ids: &mut ids,
+ error_codes,
+ edition,
+ playground: &playground,
+ heading_offset: HeadingOffset::H1,
+ }
+ .into_string()
+ };
+
+ let err = write!(
+ &mut out,
+ r#"<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <meta name="generator" content="rustdoc">
+ <title>{title}</title>
+
+ {css}
+ {in_header}
+</head>
+<body class="rustdoc">
+ <!--[if lte IE 8]>
+ <div class="warning">
+ This old browser is unsupported and will most likely display funky
+ things.
+ </div>
+ <![endif]-->
+
+ {before_content}
+ <h1 class="title">{title}</h1>
+ {text}
+ {after_content}
+</body>
+</html>"#,
+ title = Escape(title),
+ css = css,
+ in_header = options.external_html.in_header,
+ before_content = options.external_html.before_content,
+ text = text,
+ after_content = options.external_html.after_content,
+ );
+
+ match err {
+ Err(e) => Err(format!("cannot write to `{}`: {}", output.display(), e)),
+ Ok(_) => Ok(()),
+ }
+}
+
+/// Runs any tests/code examples in the markdown file `input`.
+pub(crate) fn test(options: Options) -> Result<(), String> {
+ let input_str = read_to_string(&options.input)
+ .map_err(|err| format!("{}: {}", options.input.display(), err))?;
+ let mut opts = GlobalTestOptions::default();
+ opts.no_crate_inject = true;
+ let mut collector = Collector::new(
+ Symbol::intern(&options.input.display().to_string()),
+ options.clone(),
+ true,
+ opts,
+ None,
+ Some(options.input),
+ options.enable_per_target_ignores,
+ );
+ collector.set_position(DUMMY_SP);
+ let codes = ErrorCodes::from(options.render_options.unstable_features.is_nightly_build());
+
+ find_testable_code(&input_str, &mut collector, codes, options.enable_per_target_ignores, None);
+
+ crate::doctest::run_tests(options.test_args, options.nocapture, collector.tests);
+ Ok(())
+}
diff --git a/src/librustdoc/passes/bare_urls.rs b/src/librustdoc/passes/bare_urls.rs
new file mode 100644
index 000000000..392e26ea6
--- /dev/null
+++ b/src/librustdoc/passes/bare_urls.rs
@@ -0,0 +1,112 @@
+//! Detects links that are not linkified, e.g., in Markdown such as `Go to https://example.com/.`
+//! Suggests wrapping the link with angle brackets: `Go to <https://example.com/>.` to linkify it.
+use super::Pass;
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::html::markdown::main_body_opts;
+use crate::visit::DocVisitor;
+use core::ops::Range;
+use pulldown_cmark::{Event, Parser, Tag};
+use regex::Regex;
+use rustc_errors::Applicability;
+use std::mem;
+use std::sync::LazyLock;
+
+pub(crate) const CHECK_BARE_URLS: Pass = Pass {
+ name: "check-bare-urls",
+ run: check_bare_urls,
+ description: "detects URLs that are not hyperlinks",
+};
+
+static URL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
+ Regex::new(concat!(
+ r"https?://", // url scheme
+ r"([-a-zA-Z0-9@:%._\+~#=]{2,256}\.)+", // one or more subdomains
+ r"[a-zA-Z]{2,63}", // root domain
+ r"\b([-a-zA-Z0-9@:%_\+.~#?&/=]*)" // optional query or url fragments
+ ))
+ .expect("failed to build regex")
+});
+
+struct BareUrlsLinter<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+}
+
+impl<'a, 'tcx> BareUrlsLinter<'a, 'tcx> {
+ fn find_raw_urls(
+ &self,
+ text: &str,
+ range: Range<usize>,
+ f: &impl Fn(&DocContext<'_>, &str, &str, Range<usize>),
+ ) {
+ trace!("looking for raw urls in {}", text);
+ // For now, we only check "full" URLs (meaning, starting with "http://" or "https://").
+ for match_ in URL_REGEX.find_iter(text) {
+ let url = match_.as_str();
+ let url_range = match_.range();
+ f(
+ self.cx,
+ "this URL is not a hyperlink",
+ url,
+ Range { start: range.start + url_range.start, end: range.start + url_range.end },
+ );
+ }
+ }
+}
+
+pub(crate) fn check_bare_urls(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ BareUrlsLinter { cx }.visit_crate(&krate);
+ krate
+}
+
+impl<'a, 'tcx> DocVisitor for BareUrlsLinter<'a, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ let Some(hir_id) = DocContext::as_local_hir_id(self.cx.tcx, item.item_id)
+ else {
+ // If non-local, no need to check anything.
+ return;
+ };
+ let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ if !dox.is_empty() {
+ let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
+ let sp = super::source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
+ .unwrap_or_else(|| item.attr_span(cx.tcx));
+ cx.tcx.struct_span_lint_hir(crate::lint::BARE_URLS, hir_id, sp, |lint| {
+ lint.build(msg)
+ .note("bare URLs are not automatically turned into clickable links")
+ .span_suggestion(
+ sp,
+ "use an automatic link instead",
+ format!("<{}>", url),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+ });
+ };
+
+ let mut p = Parser::new_ext(&dox, main_body_opts()).into_offset_iter();
+
+ while let Some((event, range)) = p.next() {
+ match event {
+ Event::Text(s) => self.find_raw_urls(&s, range, &report_diag),
+ // We don't want to check the text inside code blocks or links.
+ Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link(..))) => {
+ while let Some((event, _)) = p.next() {
+ match event {
+ Event::End(end)
+ if mem::discriminant(&end) == mem::discriminant(&tag) =>
+ {
+ break;
+ }
+ _ => {}
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+
+ self.visit_item_recur(item)
+ }
+}
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
new file mode 100644
index 000000000..4c6e3eb04
--- /dev/null
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -0,0 +1,276 @@
+//! Calculates information used for the --show-coverage flag.
+use crate::clean;
+use crate::core::DocContext;
+use crate::html::markdown::{find_testable_code, ErrorCodes};
+use crate::passes::check_doc_test_visibility::{should_have_doc_example, Tests};
+use crate::passes::Pass;
+use crate::visit::DocVisitor;
+use rustc_hir as hir;
+use rustc_lint::builtin::MISSING_DOCS;
+use rustc_middle::lint::LintLevelSource;
+use rustc_middle::ty::DefIdTree;
+use rustc_session::lint;
+use rustc_span::FileName;
+use serde::Serialize;
+
+use std::collections::BTreeMap;
+use std::ops;
+
+pub(crate) const CALCULATE_DOC_COVERAGE: Pass = Pass {
+ name: "calculate-doc-coverage",
+ run: calculate_doc_coverage,
+ description: "counts the number of items with and without documentation",
+};
+
+fn calculate_doc_coverage(krate: clean::Crate, ctx: &mut DocContext<'_>) -> clean::Crate {
+ let mut calc = CoverageCalculator { items: Default::default(), ctx };
+ calc.visit_crate(&krate);
+
+ calc.print_results();
+
+ krate
+}
+
+#[derive(Default, Copy, Clone, Serialize, Debug)]
+struct ItemCount {
+ total: u64,
+ with_docs: u64,
+ total_examples: u64,
+ with_examples: u64,
+}
+
+impl ItemCount {
+ fn count_item(
+ &mut self,
+ has_docs: bool,
+ has_doc_example: bool,
+ should_have_doc_examples: bool,
+ should_have_docs: bool,
+ ) {
+ if has_docs || should_have_docs {
+ self.total += 1;
+ }
+
+ if has_docs {
+ self.with_docs += 1;
+ }
+ if should_have_doc_examples || has_doc_example {
+ self.total_examples += 1;
+ }
+ if has_doc_example {
+ self.with_examples += 1;
+ }
+ }
+
+ fn percentage(&self) -> Option<f64> {
+ if self.total > 0 {
+ Some((self.with_docs as f64 * 100.0) / self.total as f64)
+ } else {
+ None
+ }
+ }
+
+ fn examples_percentage(&self) -> Option<f64> {
+ if self.total_examples > 0 {
+ Some((self.with_examples as f64 * 100.0) / self.total_examples as f64)
+ } else {
+ None
+ }
+ }
+}
+
+impl ops::Sub for ItemCount {
+ type Output = Self;
+
+ fn sub(self, rhs: Self) -> Self {
+ ItemCount {
+ total: self.total - rhs.total,
+ with_docs: self.with_docs - rhs.with_docs,
+ total_examples: self.total_examples - rhs.total_examples,
+ with_examples: self.with_examples - rhs.with_examples,
+ }
+ }
+}
+
+impl ops::AddAssign for ItemCount {
+ fn add_assign(&mut self, rhs: Self) {
+ self.total += rhs.total;
+ self.with_docs += rhs.with_docs;
+ self.total_examples += rhs.total_examples;
+ self.with_examples += rhs.with_examples;
+ }
+}
+
+struct CoverageCalculator<'a, 'b> {
+ items: BTreeMap<FileName, ItemCount>,
+ ctx: &'a mut DocContext<'b>,
+}
+
+fn limit_filename_len(filename: String) -> String {
+ let nb_chars = filename.chars().count();
+ if nb_chars > 35 {
+ "...".to_string()
+ + &filename[filename.char_indices().nth(nb_chars - 32).map(|x| x.0).unwrap_or(0)..]
+ } else {
+ filename
+ }
+}
+
+impl<'a, 'b> CoverageCalculator<'a, 'b> {
+ fn to_json(&self) -> String {
+ serde_json::to_string(
+ &self
+ .items
+ .iter()
+ .map(|(k, v)| (k.prefer_local().to_string(), v))
+ .collect::<BTreeMap<String, &ItemCount>>(),
+ )
+ .expect("failed to convert JSON data to string")
+ }
+
+ fn print_results(&self) {
+ let output_format = self.ctx.output_format;
+ if output_format.is_json() {
+ println!("{}", self.to_json());
+ return;
+ }
+ let mut total = ItemCount::default();
+
+ fn print_table_line() {
+ println!("+-{0:->35}-+-{0:->10}-+-{0:->10}-+-{0:->10}-+-{0:->10}-+", "");
+ }
+
+ fn print_table_record(
+ name: &str,
+ count: ItemCount,
+ percentage: f64,
+ examples_percentage: f64,
+ ) {
+ println!(
+ "| {:<35} | {:>10} | {:>9.1}% | {:>10} | {:>9.1}% |",
+ name, count.with_docs, percentage, count.with_examples, examples_percentage,
+ );
+ }
+
+ print_table_line();
+ println!(
+ "| {:<35} | {:>10} | {:>10} | {:>10} | {:>10} |",
+ "File", "Documented", "Percentage", "Examples", "Percentage",
+ );
+ print_table_line();
+
+ for (file, &count) in &self.items {
+ if let Some(percentage) = count.percentage() {
+ print_table_record(
+ &limit_filename_len(file.prefer_local().to_string_lossy().into()),
+ count,
+ percentage,
+ count.examples_percentage().unwrap_or(0.),
+ );
+
+ total += count;
+ }
+ }
+
+ print_table_line();
+ print_table_record(
+ "Total",
+ total,
+ total.percentage().unwrap_or(0.0),
+ total.examples_percentage().unwrap_or(0.0),
+ );
+ print_table_line();
+ }
+}
+
+impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
+ fn visit_item(&mut self, i: &clean::Item) {
+ if !i.item_id.is_local() {
+ // non-local items are skipped because they can be out of the users control,
+ // especially in the case of trait impls, which rustdoc eagerly inlines
+ return;
+ }
+
+ match *i.kind {
+ clean::StrippedItem(..) => {
+ // don't count items in stripped modules
+ return;
+ }
+ // docs on `use` and `extern crate` statements are not displayed, so they're not
+ // worth counting
+ clean::ImportItem(..) | clean::ExternCrateItem { .. } => {}
+ // Don't count trait impls, the missing-docs lint doesn't so we shouldn't either.
+ // Inherent impls *can* be documented, and those docs show up, but in most cases it
+ // doesn't make sense, as all methods on a type are in one single impl block
+ clean::ImplItem(_) => {}
+ _ => {
+ let has_docs = !i.attrs.doc_strings.is_empty();
+ let mut tests = Tests { found_tests: 0 };
+
+ find_testable_code(
+ &i.attrs.collapsed_doc_value().unwrap_or_default(),
+ &mut tests,
+ ErrorCodes::No,
+ false,
+ None,
+ );
+
+ let filename = i.span(self.ctx.tcx).filename(self.ctx.sess());
+ let has_doc_example = tests.found_tests != 0;
+ // The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
+ // would presumably panic if a fake `DefIndex` were passed.
+ let hir_id = self
+ .ctx
+ .tcx
+ .hir()
+ .local_def_id_to_hir_id(i.item_id.expect_def_id().expect_local());
+ let (level, source) = self.ctx.tcx.lint_level_at_node(MISSING_DOCS, hir_id);
+
+ // In case we have:
+ //
+ // ```
+ // enum Foo { Bar(u32) }
+ // // or:
+ // struct Bar(u32);
+ // ```
+ //
+ // there is no need to require documentation on the fields of tuple variants and
+ // tuple structs.
+ let should_be_ignored = i
+ .item_id
+ .as_def_id()
+ .and_then(|def_id| self.ctx.tcx.opt_parent(def_id))
+ .and_then(|def_id| self.ctx.tcx.hir().get_if_local(def_id))
+ .map(|node| {
+ matches!(
+ node,
+ hir::Node::Variant(hir::Variant {
+ data: hir::VariantData::Tuple(_, _),
+ ..
+ }) | hir::Node::Item(hir::Item {
+ kind: hir::ItemKind::Struct(hir::VariantData::Tuple(_, _), _),
+ ..
+ })
+ )
+ })
+ .unwrap_or(false);
+
+ // `missing_docs` is allow-by-default, so don't treat this as ignoring the item
+ // unless the user had an explicit `allow`.
+ //
+ let should_have_docs = !should_be_ignored
+ && (level != lint::Level::Allow || matches!(source, LintLevelSource::Default));
+
+ debug!("counting {:?} {:?} in {:?}", i.type_(), i.name, filename);
+ self.items.entry(filename).or_default().count_item(
+ has_docs,
+ has_doc_example,
+ should_have_doc_example(self.ctx, i),
+ should_have_docs,
+ );
+ }
+ }
+
+ self.visit_item_recur(i)
+ }
+}
diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs
new file mode 100644
index 000000000..0172ef570
--- /dev/null
+++ b/src/librustdoc/passes/check_code_block_syntax.rs
@@ -0,0 +1,205 @@
+//! Validates syntax inside Rust code blocks (\`\`\`rust).
+use rustc_data_structures::sync::{Lock, Lrc};
+use rustc_errors::{
+ emitter::Emitter, Applicability, Diagnostic, Handler, LazyFallbackBundle, LintDiagnosticBuilder,
+};
+use rustc_parse::parse_stream_from_source_str;
+use rustc_session::parse::ParseSess;
+use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
+use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::{FileName, InnerSpan, DUMMY_SP};
+
+use crate::clean;
+use crate::core::DocContext;
+use crate::html::markdown::{self, RustCodeBlock};
+use crate::passes::Pass;
+use crate::visit::DocVisitor;
+
+pub(crate) const CHECK_CODE_BLOCK_SYNTAX: Pass = Pass {
+ name: "check-code-block-syntax",
+ run: check_code_block_syntax,
+ description: "validates syntax inside Rust code blocks",
+};
+
+pub(crate) fn check_code_block_syntax(
+ krate: clean::Crate,
+ cx: &mut DocContext<'_>,
+) -> clean::Crate {
+ SyntaxChecker { cx }.visit_crate(&krate);
+ krate
+}
+
+struct SyntaxChecker<'a, 'tcx> {
+ cx: &'a DocContext<'tcx>,
+}
+
+impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
+ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeBlock) {
+ let buffer = Lrc::new(Lock::new(Buffer::default()));
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
+
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let source = dox[code_block.code].to_owned();
+ let sess = ParseSess::with_span_handler(handler, sm);
+
+ let edition = code_block.lang_string.edition.unwrap_or_else(|| self.cx.tcx.sess.edition());
+ let expn_data = ExpnData::default(
+ ExpnKind::AstPass(AstPass::TestHarness),
+ DUMMY_SP,
+ edition,
+ None,
+ None,
+ );
+ let expn_id =
+ self.cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
+ let span = DUMMY_SP.fresh_expansion(expn_id);
+
+ let is_empty = rustc_driver::catch_fatal_errors(|| {
+ parse_stream_from_source_str(
+ FileName::Custom(String::from("doctest")),
+ source,
+ &sess,
+ Some(span),
+ )
+ .is_empty()
+ })
+ .unwrap_or(false);
+ let buffer = buffer.borrow();
+
+ if !buffer.has_errors && !is_empty {
+ // No errors in a non-empty program.
+ return;
+ }
+
+ let Some(local_id) = item.item_id.as_def_id().and_then(|x| x.as_local())
+ else {
+ // We don't need to check the syntax for other crates so returning
+ // without doing anything should not be a problem.
+ return;
+ };
+
+ let hir_id = self.cx.tcx.hir().local_def_id_to_hir_id(local_id);
+ let empty_block = code_block.lang_string == Default::default() && code_block.is_fenced;
+ let is_ignore = code_block.lang_string.ignore != markdown::Ignore::None;
+
+ // The span and whether it is precise or not.
+ let (sp, precise_span) = match super::source_span_for_markdown_range(
+ self.cx.tcx,
+ dox,
+ &code_block.range,
+ &item.attrs,
+ ) {
+ Some(sp) => (sp, true),
+ None => (item.attr_span(self.cx.tcx), false),
+ };
+
+ // lambda that will use the lint to start a new diagnostic and add
+ // a suggestion to it when needed.
+ let diag_builder = |lint: LintDiagnosticBuilder<'_, ()>| {
+ let explanation = if is_ignore {
+ "`ignore` code blocks require valid Rust code for syntax highlighting; \
+ mark blocks that do not contain Rust code as text"
+ } else {
+ "mark blocks that do not contain Rust code as text"
+ };
+ let msg = if buffer.has_errors {
+ "could not parse code block as Rust code"
+ } else {
+ "Rust code block is empty"
+ };
+ let mut diag = lint.build(msg);
+
+ if precise_span {
+ if is_ignore {
+ // giving an accurate suggestion is hard because `ignore` might not have come first in the list.
+ // just give a `help` instead.
+ diag.span_help(
+ sp.from_inner(InnerSpan::new(0, 3)),
+ &format!("{}: ```text", explanation),
+ );
+ } else if empty_block {
+ diag.span_suggestion(
+ sp.from_inner(InnerSpan::new(0, 3)).shrink_to_hi(),
+ explanation,
+ "text",
+ Applicability::MachineApplicable,
+ );
+ }
+ } else if empty_block || is_ignore {
+ diag.help(&format!("{}: ```text", explanation));
+ }
+
+ // FIXME(#67563): Provide more context for these errors by displaying the spans inline.
+ for message in buffer.messages.iter() {
+ diag.note(message);
+ }
+
+ diag.emit();
+ };
+
+ // Finally build and emit the completed diagnostic.
+ // All points of divergence have been handled earlier so this can be
+ // done the same way whether the span is precise or not.
+ self.cx.tcx.struct_span_lint_hir(
+ crate::lint::INVALID_RUST_CODEBLOCKS,
+ hir_id,
+ sp,
+ diag_builder,
+ );
+ }
+}
+
+impl<'a, 'tcx> DocVisitor for SyntaxChecker<'a, 'tcx> {
+ fn visit_item(&mut self, item: &clean::Item) {
+ if let Some(dox) = &item.attrs.collapsed_doc_value() {
+ let sp = item.attr_span(self.cx.tcx);
+ let extra = crate::html::markdown::ExtraInfo::new_did(
+ self.cx.tcx,
+ item.item_id.expect_def_id(),
+ sp,
+ );
+ for code_block in markdown::rust_code_blocks(dox, &extra) {
+ self.check_rust_syntax(item, dox, code_block);
+ }
+ }
+
+ self.visit_item_recur(item)
+ }
+}
+
+#[derive(Default)]
+struct Buffer {
+ messages: Vec<String>,
+ has_errors: bool,
+}
+
+struct BufferEmitter {
+ buffer: Lrc<Lock<Buffer>>,
+ fallback_bundle: LazyFallbackBundle,
+}
+
+impl Emitter for BufferEmitter {
+ fn emit_diagnostic(&mut self, diag: &Diagnostic) {
+ let mut buffer = self.buffer.borrow_mut();
+ // FIXME(davidtwco): need to support translation here eventually
+ buffer.messages.push(format!("error from rustc: {}", diag.message[0].0.expect_str()));
+ if diag.is_error() {
+ buffer.has_errors = true;
+ }
+ }
+
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+
+ fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
+ None
+ }
+
+ fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
+ &**self.fallback_bundle
+ }
+}
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
new file mode 100644
index 000000000..e86f90833
--- /dev/null
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -0,0 +1,145 @@
+//! Looks for items missing (or incorrectly having) doctests.
+//!
+//! This pass is overloaded and runs two different lints.
+//!
+//! - MISSING_DOC_CODE_EXAMPLES: this lint is **UNSTABLE** and looks for public items missing doctests.
+//! - PRIVATE_DOC_TESTS: this lint is **STABLE** and looks for private items with doctests.
+
+use super::Pass;
+use crate::clean;
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::html::markdown::{find_testable_code, ErrorCodes, Ignore, LangString};
+use crate::visit::DocVisitor;
+use crate::visit_ast::inherits_doc_hidden;
+use rustc_hir as hir;
+use rustc_middle::lint::LintLevelSource;
+use rustc_session::lint;
+use rustc_span::symbol::sym;
+
+pub(crate) const CHECK_DOC_TEST_VISIBILITY: Pass = Pass {
+ name: "check_doc_test_visibility",
+ run: check_doc_test_visibility,
+ description: "run various visibility-related lints on doctests",
+};
+
+struct DocTestVisibilityLinter<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+}
+
+pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let mut coll = DocTestVisibilityLinter { cx };
+ coll.visit_crate(&krate);
+ krate
+}
+
+impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+
+ look_for_tests(self.cx, &dox, item);
+
+ self.visit_item_recur(item)
+ }
+}
+
+pub(crate) struct Tests {
+ pub(crate) found_tests: usize,
+}
+
+impl crate::doctest::Tester for Tests {
+ fn add_test(&mut self, _: String, config: LangString, _: usize) {
+ if config.rust && config.ignore == Ignore::None {
+ self.found_tests += 1;
+ }
+ }
+}
+
+pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -> bool {
+ if !cx.cache.access_levels.is_public(item.item_id.expect_def_id())
+ || matches!(
+ *item.kind,
+ clean::StructFieldItem(_)
+ | clean::VariantItem(_)
+ | clean::AssocConstItem(..)
+ | clean::AssocTypeItem(..)
+ | clean::TypedefItem(_)
+ | clean::StaticItem(_)
+ | clean::ConstantItem(_)
+ | clean::ExternCrateItem { .. }
+ | clean::ImportItem(_)
+ | clean::PrimitiveItem(_)
+ | clean::KeywordItem
+ // check for trait impl
+ | clean::ImplItem(box clean::Impl { trait_: Some(_), .. })
+ )
+ {
+ return false;
+ }
+
+ // The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
+ // would presumably panic if a fake `DefIndex` were passed.
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(item.item_id.expect_def_id().expect_local());
+
+ // check if parent is trait impl
+ if let Some(parent_hir_id) = cx.tcx.hir().find_parent_node(hir_id) {
+ if let Some(parent_node) = cx.tcx.hir().find(parent_hir_id) {
+ if matches!(
+ parent_node,
+ hir::Node::Item(hir::Item {
+ kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
+ ..
+ })
+ ) {
+ return false;
+ }
+ }
+ }
+
+ if cx.tcx.hir().attrs(hir_id).lists(sym::doc).has_word(sym::hidden)
+ || inherits_doc_hidden(cx.tcx, hir_id)
+ || cx.tcx.hir().span(hir_id).in_derive_expansion()
+ {
+ return false;
+ }
+ let (level, source) = cx.tcx.lint_level_at_node(crate::lint::MISSING_DOC_CODE_EXAMPLES, hir_id);
+ level != lint::Level::Allow || matches!(source, LintLevelSource::Default)
+}
+
+pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) {
+ let Some(hir_id) = DocContext::as_local_hir_id(cx.tcx, item.item_id)
+ else {
+ // If non-local, no need to check anything.
+ return;
+ };
+
+ let mut tests = Tests { found_tests: 0 };
+
+ find_testable_code(dox, &mut tests, ErrorCodes::No, false, None);
+
+ if tests.found_tests == 0 && cx.tcx.sess.is_nightly_build() {
+ if should_have_doc_example(cx, item) {
+ debug!("reporting error for {:?} (hir_id={:?})", item, hir_id);
+ let sp = item.attr_span(cx.tcx);
+ cx.tcx.struct_span_lint_hir(
+ crate::lint::MISSING_DOC_CODE_EXAMPLES,
+ hir_id,
+ sp,
+ |lint| {
+ lint.build("missing code example in this documentation").emit();
+ },
+ );
+ }
+ } else if tests.found_tests > 0
+ && !cx.cache.access_levels.is_exported(item.item_id.expect_def_id())
+ {
+ cx.tcx.struct_span_lint_hir(
+ crate::lint::PRIVATE_DOC_TESTS,
+ hir_id,
+ item.attr_span(cx.tcx),
+ |lint| {
+ lint.build("documentation test in private item").emit();
+ },
+ );
+ }
+}
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
new file mode 100644
index 000000000..7d7a63c53
--- /dev/null
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -0,0 +1,2161 @@
+//! This module implements [RFC 1946]: Intra-rustdoc-links
+//!
+//! [RFC 1946]: https://github.com/rust-lang/rfcs/blob/master/text/1946-intra-rustdoc-links.md
+
+use pulldown_cmark::LinkType;
+use rustc_ast::util::comments::may_have_doc_links;
+use rustc_data_structures::{
+ fx::{FxHashMap, FxHashSet},
+ intern::Interned,
+};
+use rustc_errors::{Applicability, Diagnostic};
+use rustc_hir::def::Namespace::*;
+use rustc_hir::def::{DefKind, Namespace, PerNS};
+use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
+use rustc_hir::Mutability;
+use rustc_middle::ty::{DefIdTree, Ty, TyCtxt};
+use rustc_middle::{bug, ty};
+use rustc_resolve::ParentScope;
+use rustc_session::lint::Lint;
+use rustc_span::hygiene::MacroKind;
+use rustc_span::symbol::{sym, Ident, Symbol};
+use rustc_span::BytePos;
+use smallvec::{smallvec, SmallVec};
+
+use std::borrow::Cow;
+use std::mem;
+use std::ops::Range;
+
+use crate::clean::{self, utils::find_nearest_parent_module};
+use crate::clean::{Crate, Item, ItemId, ItemLink, PrimitiveType};
+use crate::core::DocContext;
+use crate::html::markdown::{markdown_links, MarkdownLink};
+use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
+use crate::passes::Pass;
+use crate::visit::DocVisitor;
+
+mod early;
+pub(crate) use early::early_resolve_intra_doc_links;
+
+pub(crate) const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
+ name: "collect-intra-doc-links",
+ run: collect_intra_doc_links,
+ description: "resolves intra-doc links",
+};
+
+fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let mut collector =
+ LinkCollector { cx, mod_ids: Vec::new(), visited_links: FxHashMap::default() };
+ collector.visit_crate(&krate);
+ krate
+}
+
+#[derive(Copy, Clone, Debug, Hash)]
+enum Res {
+ Def(DefKind, DefId),
+ Primitive(PrimitiveType),
+}
+
+type ResolveRes = rustc_hir::def::Res<rustc_ast::NodeId>;
+
+impl Res {
+ fn descr(self) -> &'static str {
+ match self {
+ Res::Def(kind, id) => ResolveRes::Def(kind, id).descr(),
+ Res::Primitive(_) => "builtin type",
+ }
+ }
+
+ fn article(self) -> &'static str {
+ match self {
+ Res::Def(kind, id) => ResolveRes::Def(kind, id).article(),
+ Res::Primitive(_) => "a",
+ }
+ }
+
+ fn name(self, tcx: TyCtxt<'_>) -> Symbol {
+ match self {
+ Res::Def(_, id) => tcx.item_name(id),
+ Res::Primitive(prim) => prim.as_sym(),
+ }
+ }
+
+ fn def_id(self, tcx: TyCtxt<'_>) -> DefId {
+ match self {
+ Res::Def(_, id) => id,
+ Res::Primitive(prim) => *PrimitiveType::primitive_locations(tcx).get(&prim).unwrap(),
+ }
+ }
+
+ fn from_def_id(tcx: TyCtxt<'_>, def_id: DefId) -> Res {
+ Res::Def(tcx.def_kind(def_id), def_id)
+ }
+
+ /// Used for error reporting.
+ fn disambiguator_suggestion(self) -> Suggestion {
+ let kind = match self {
+ Res::Primitive(_) => return Suggestion::Prefix("prim"),
+ Res::Def(kind, _) => kind,
+ };
+ if kind == DefKind::Macro(MacroKind::Bang) {
+ return Suggestion::Macro;
+ } else if kind == DefKind::Fn || kind == DefKind::AssocFn {
+ return Suggestion::Function;
+ } else if kind == DefKind::Field {
+ return Suggestion::RemoveDisambiguator;
+ }
+
+ let prefix = match kind {
+ DefKind::Struct => "struct",
+ DefKind::Enum => "enum",
+ DefKind::Trait => "trait",
+ DefKind::Union => "union",
+ DefKind::Mod => "mod",
+ DefKind::Const | DefKind::ConstParam | DefKind::AssocConst | DefKind::AnonConst => {
+ "const"
+ }
+ DefKind::Static(_) => "static",
+ DefKind::Macro(MacroKind::Derive) => "derive",
+ // Now handle things that don't have a specific disambiguator
+ _ => match kind
+ .ns()
+ .expect("tried to calculate a disambiguator for a def without a namespace?")
+ {
+ Namespace::TypeNS => "type",
+ Namespace::ValueNS => "value",
+ Namespace::MacroNS => "macro",
+ },
+ };
+
+ Suggestion::Prefix(prefix)
+ }
+}
+
+impl TryFrom<ResolveRes> for Res {
+ type Error = ();
+
+ fn try_from(res: ResolveRes) -> Result<Self, ()> {
+ use rustc_hir::def::Res::*;
+ match res {
+ Def(kind, id) => Ok(Res::Def(kind, id)),
+ PrimTy(prim) => Ok(Res::Primitive(PrimitiveType::from_hir(prim))),
+ // e.g. `#[derive]`
+ NonMacroAttr(..) | Err => Result::Err(()),
+ other => bug!("unrecognized res {:?}", other),
+ }
+ }
+}
+
+/// The link failed to resolve. [`resolution_failure`] should look to see if there's
+/// a more helpful error that can be given.
+#[derive(Debug)]
+struct UnresolvedPath<'a> {
+ /// Item on which the link is resolved, used for resolving `Self`.
+ item_id: ItemId,
+ /// The scope the link was resolved in.
+ module_id: DefId,
+ /// If part of the link resolved, this has the `Res`.
+ ///
+ /// In `[std::io::Error::x]`, `std::io::Error` would be a partial resolution.
+ partial_res: Option<Res>,
+ /// The remaining unresolved path segments.
+ ///
+ /// In `[std::io::Error::x]`, `x` would be unresolved.
+ unresolved: Cow<'a, str>,
+}
+
+#[derive(Debug)]
+enum ResolutionFailure<'a> {
+ /// This resolved, but with the wrong namespace.
+ WrongNamespace {
+ /// What the link resolved to.
+ res: Res,
+ /// The expected namespace for the resolution, determined from the link's disambiguator.
+ ///
+ /// E.g., for `[fn@Result]` this is [`Namespace::ValueNS`],
+ /// even though `Result`'s actual namespace is [`Namespace::TypeNS`].
+ expected_ns: Namespace,
+ },
+ NotResolved(UnresolvedPath<'a>),
+}
+
+#[derive(Clone, Copy, Debug)]
+enum MalformedGenerics {
+ /// This link has unbalanced angle brackets.
+ ///
+ /// For example, `Vec<T` should trigger this, as should `Vec<T>>`.
+ UnbalancedAngleBrackets,
+ /// The generics are not attached to a type.
+ ///
+ /// For example, `<T>` should trigger this.
+ ///
+ /// This is detected by checking if the path is empty after the generics are stripped.
+ MissingType,
+ /// The link uses fully-qualified syntax, which is currently unsupported.
+ ///
+ /// For example, `<Vec as IntoIterator>::into_iter` should trigger this.
+ ///
+ /// This is detected by checking if ` as ` (the keyword `as` with spaces around it) is inside
+ /// angle brackets.
+ HasFullyQualifiedSyntax,
+ /// The link has an invalid path separator.
+ ///
+ /// For example, `Vec:<T>:new()` should trigger this. Note that `Vec:new()` will **not**
+ /// trigger this because it has no generics and thus [`strip_generics_from_path`] will not be
+ /// called.
+ ///
+ /// Note that this will also **not** be triggered if the invalid path separator is inside angle
+ /// brackets because rustdoc mostly ignores what's inside angle brackets (except for
+ /// [`HasFullyQualifiedSyntax`](MalformedGenerics::HasFullyQualifiedSyntax)).
+ ///
+ /// This is detected by checking if there is a colon followed by a non-colon in the link.
+ InvalidPathSeparator,
+ /// The link has too many angle brackets.
+ ///
+ /// For example, `Vec<<T>>` should trigger this.
+ TooManyAngleBrackets,
+ /// The link has empty angle brackets.
+ ///
+ /// For example, `Vec<>` should trigger this.
+ EmptyAngleBrackets,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub(crate) enum UrlFragment {
+ Item(DefId),
+ UserWritten(String),
+}
+
+impl UrlFragment {
+ /// Render the fragment, including the leading `#`.
+ pub(crate) fn render(&self, s: &mut String, tcx: TyCtxt<'_>) {
+ s.push('#');
+ match self {
+ &UrlFragment::Item(def_id) => {
+ let kind = match tcx.def_kind(def_id) {
+ DefKind::AssocFn => {
+ if tcx.impl_defaultness(def_id).has_value() {
+ "method."
+ } else {
+ "tymethod."
+ }
+ }
+ DefKind::AssocConst => "associatedconstant.",
+ DefKind::AssocTy => "associatedtype.",
+ DefKind::Variant => "variant.",
+ DefKind::Field => {
+ let parent_id = tcx.parent(def_id);
+ if tcx.def_kind(parent_id) == DefKind::Variant {
+ s.push_str("variant.");
+ s.push_str(tcx.item_name(parent_id).as_str());
+ ".field."
+ } else {
+ "structfield."
+ }
+ }
+ kind => bug!("unexpected associated item kind: {:?}", kind),
+ };
+ s.push_str(kind);
+ s.push_str(tcx.item_name(def_id).as_str());
+ }
+ UrlFragment::UserWritten(raw) => s.push_str(&raw),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+struct ResolutionInfo {
+ item_id: ItemId,
+ module_id: DefId,
+ dis: Option<Disambiguator>,
+ path_str: String,
+ extra_fragment: Option<String>,
+}
+
+#[derive(Clone)]
+struct DiagnosticInfo<'a> {
+ item: &'a Item,
+ dox: &'a str,
+ ori_link: &'a str,
+ link_range: Range<usize>,
+}
+
+struct LinkCollector<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+ /// A stack of modules used to decide what scope to resolve in.
+ ///
+ /// The last module will be used if the parent scope of the current item is
+ /// unknown.
+ mod_ids: Vec<DefId>,
+ /// Cache the resolved links so we can avoid resolving (and emitting errors for) the same link.
+ /// The link will be `None` if it could not be resolved (i.e. the error was cached).
+ visited_links: FxHashMap<ResolutionInfo, Option<(Res, Option<UrlFragment>)>>,
+}
+
+impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
+ /// Given a full link, parse it as an [enum struct variant].
+ ///
+ /// In particular, this will return an error whenever there aren't three
+ /// full path segments left in the link.
+ ///
+ /// [enum struct variant]: rustc_hir::VariantData::Struct
+ fn variant_field<'path>(
+ &self,
+ path_str: &'path str,
+ item_id: ItemId,
+ module_id: DefId,
+ ) -> Result<(Res, DefId), UnresolvedPath<'path>> {
+ let tcx = self.cx.tcx;
+ let no_res = || UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: None,
+ unresolved: path_str.into(),
+ };
+
+ debug!("looking for enum variant {}", path_str);
+ let mut split = path_str.rsplitn(3, "::");
+ let variant_field_name = split
+ .next()
+ .map(|f| Symbol::intern(f))
+ .expect("fold_item should ensure link is non-empty");
+ let variant_name =
+ // we're not sure this is a variant at all, so use the full string
+ // If there's no second component, the link looks like `[path]`.
+ // So there's no partial res and we should say the whole link failed to resolve.
+ split.next().map(|f| Symbol::intern(f)).ok_or_else(no_res)?;
+ let path = split
+ .next()
+ .map(|f| f.to_owned())
+ // If there's no third component, we saw `[a::b]` before and it failed to resolve.
+ // So there's no partial res.
+ .ok_or_else(no_res)?;
+ let ty_res = self.resolve_path(&path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
+
+ match ty_res {
+ Res::Def(DefKind::Enum, did) => match tcx.type_of(did).kind() {
+ ty::Adt(def, _) if def.is_enum() => {
+ if let Some(field) = def.all_fields().find(|f| f.name == variant_field_name) {
+ Ok((ty_res, field.did))
+ } else {
+ Err(UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: Some(Res::Def(DefKind::Enum, def.did())),
+ unresolved: variant_field_name.to_string().into(),
+ })
+ }
+ }
+ _ => unreachable!(),
+ },
+ _ => Err(UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: Some(ty_res),
+ unresolved: variant_name.to_string().into(),
+ }),
+ }
+ }
+
+ /// Given a primitive type, try to resolve an associated item.
+ fn resolve_primitive_associated_item(
+ &self,
+ prim_ty: PrimitiveType,
+ ns: Namespace,
+ item_name: Symbol,
+ ) -> Option<(Res, DefId)> {
+ let tcx = self.cx.tcx;
+
+ prim_ty.impls(tcx).find_map(|impl_| {
+ tcx.associated_items(impl_)
+ .find_by_name_and_namespace(tcx, Ident::with_dummy_span(item_name), ns, impl_)
+ .map(|item| (Res::Primitive(prim_ty), item.def_id))
+ })
+ }
+
+ fn resolve_self_ty(&self, path_str: &str, ns: Namespace, item_id: ItemId) -> Option<Res> {
+ if ns != TypeNS || path_str != "Self" {
+ return None;
+ }
+
+ let tcx = self.cx.tcx;
+ item_id
+ .as_def_id()
+ .map(|def_id| match tcx.def_kind(def_id) {
+ def_kind @ (DefKind::AssocFn
+ | DefKind::AssocConst
+ | DefKind::AssocTy
+ | DefKind::Variant
+ | DefKind::Field) => {
+ let parent_def_id = tcx.parent(def_id);
+ if def_kind == DefKind::Field && tcx.def_kind(parent_def_id) == DefKind::Variant
+ {
+ tcx.parent(parent_def_id)
+ } else {
+ parent_def_id
+ }
+ }
+ _ => def_id,
+ })
+ .and_then(|self_id| match tcx.def_kind(self_id) {
+ DefKind::Impl => self.def_id_to_res(self_id),
+ def_kind => Some(Res::Def(def_kind, self_id)),
+ })
+ }
+
+ /// Convenience wrapper around `resolve_rustdoc_path`.
+ ///
+ /// This also handles resolving `true` and `false` as booleans.
+ /// NOTE: `resolve_rustdoc_path` knows only about paths, not about types.
+ /// Associated items will never be resolved by this function.
+ fn resolve_path(
+ &self,
+ path_str: &str,
+ ns: Namespace,
+ item_id: ItemId,
+ module_id: DefId,
+ ) -> Option<Res> {
+ if let res @ Some(..) = self.resolve_self_ty(path_str, ns, item_id) {
+ return res;
+ }
+
+ // Resolver doesn't know about true, false, and types that aren't paths (e.g. `()`).
+ let result = self
+ .cx
+ .resolver_caches
+ .doc_link_resolutions
+ .get(&(Symbol::intern(path_str), ns, module_id))
+ .copied()
+ .unwrap_or_else(|| {
+ self.cx.enter_resolver(|resolver| {
+ let parent_scope =
+ ParentScope::module(resolver.expect_module(module_id), resolver);
+ resolver.resolve_rustdoc_path(path_str, ns, parent_scope)
+ })
+ })
+ .and_then(|res| res.try_into().ok())
+ .or_else(|| resolve_primitive(path_str, ns));
+ debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns);
+ result
+ }
+
+ /// Resolves a string as a path within a particular namespace. Returns an
+ /// optional URL fragment in the case of variants and methods.
+ fn resolve<'path>(
+ &mut self,
+ path_str: &'path str,
+ ns: Namespace,
+ item_id: ItemId,
+ module_id: DefId,
+ ) -> Result<(Res, Option<DefId>), UnresolvedPath<'path>> {
+ if let Some(res) = self.resolve_path(path_str, ns, item_id, module_id) {
+ return Ok(match res {
+ Res::Def(
+ DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Variant,
+ def_id,
+ ) => (Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id)),
+ _ => (res, None),
+ });
+ } else if ns == MacroNS {
+ return Err(UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: None,
+ unresolved: path_str.into(),
+ });
+ }
+
+ // Try looking for methods and associated items.
+ let mut split = path_str.rsplitn(2, "::");
+ // NB: `split`'s first element is always defined, even if the delimiter was not present.
+ // NB: `item_str` could be empty when resolving in the root namespace (e.g. `::std`).
+ let item_str = split.next().unwrap();
+ let item_name = Symbol::intern(item_str);
+ let path_root = split
+ .next()
+ .map(|f| f.to_owned())
+ // If there's no `::`, it's not an associated item.
+ // So we can be sure that `rustc_resolve` was accurate when it said it wasn't resolved.
+ .ok_or_else(|| {
+ debug!("found no `::`, assumming {} was correctly not in scope", item_name);
+ UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: None,
+ unresolved: item_str.into(),
+ }
+ })?;
+
+ // FIXME(#83862): this arbitrarily gives precedence to primitives over modules to support
+ // links to primitives when `#[doc(primitive)]` is present. It should give an ambiguity
+ // error instead and special case *only* modules with `#[doc(primitive)]`, not all
+ // primitives.
+ resolve_primitive(&path_root, TypeNS)
+ .or_else(|| self.resolve_path(&path_root, TypeNS, item_id, module_id))
+ .and_then(|ty_res| {
+ self.resolve_associated_item(ty_res, item_name, ns, module_id).map(Ok)
+ })
+ .unwrap_or_else(|| {
+ if ns == Namespace::ValueNS {
+ self.variant_field(path_str, item_id, module_id)
+ } else {
+ Err(UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res: None,
+ unresolved: path_root.into(),
+ })
+ }
+ })
+ .map(|(res, def_id)| (res, Some(def_id)))
+ }
+
+ /// Convert a DefId to a Res, where possible.
+ ///
+ /// This is used for resolving type aliases.
+ fn def_id_to_res(&self, ty_id: DefId) -> Option<Res> {
+ use PrimitiveType::*;
+ Some(match *self.cx.tcx.type_of(ty_id).kind() {
+ ty::Bool => Res::Primitive(Bool),
+ ty::Char => Res::Primitive(Char),
+ ty::Int(ity) => Res::Primitive(ity.into()),
+ ty::Uint(uty) => Res::Primitive(uty.into()),
+ ty::Float(fty) => Res::Primitive(fty.into()),
+ ty::Str => Res::Primitive(Str),
+ ty::Tuple(tys) if tys.is_empty() => Res::Primitive(Unit),
+ ty::Tuple(_) => Res::Primitive(Tuple),
+ ty::Array(..) => Res::Primitive(Array),
+ ty::Slice(_) => Res::Primitive(Slice),
+ ty::RawPtr(_) => Res::Primitive(RawPointer),
+ ty::Ref(..) => Res::Primitive(Reference),
+ ty::FnDef(..) => panic!("type alias to a function definition"),
+ ty::FnPtr(_) => Res::Primitive(Fn),
+ ty::Never => Res::Primitive(Never),
+ ty::Adt(ty::AdtDef(Interned(&ty::AdtDefData { did, .. }, _)), _) | ty::Foreign(did) => {
+ Res::from_def_id(self.cx.tcx, did)
+ }
+ ty::Projection(_)
+ | ty::Closure(..)
+ | ty::Generator(..)
+ | ty::GeneratorWitness(_)
+ | ty::Opaque(..)
+ | ty::Dynamic(..)
+ | ty::Param(_)
+ | ty::Bound(..)
+ | ty::Placeholder(_)
+ | ty::Infer(_)
+ | ty::Error(_) => return None,
+ })
+ }
+
+ /// Convert a PrimitiveType to a Ty, where possible.
+ ///
+ /// This is used for resolving trait impls for primitives
+ fn primitive_type_to_ty(&mut self, prim: PrimitiveType) -> Option<Ty<'tcx>> {
+ use PrimitiveType::*;
+ let tcx = self.cx.tcx;
+
+ // FIXME: Only simple types are supported here, see if we can support
+ // other types such as Tuple, Array, Slice, etc.
+ // See https://github.com/rust-lang/rust/issues/90703#issuecomment-1004263455
+ Some(tcx.mk_ty(match prim {
+ Bool => ty::Bool,
+ Str => ty::Str,
+ Char => ty::Char,
+ Never => ty::Never,
+ I8 => ty::Int(ty::IntTy::I8),
+ I16 => ty::Int(ty::IntTy::I16),
+ I32 => ty::Int(ty::IntTy::I32),
+ I64 => ty::Int(ty::IntTy::I64),
+ I128 => ty::Int(ty::IntTy::I128),
+ Isize => ty::Int(ty::IntTy::Isize),
+ F32 => ty::Float(ty::FloatTy::F32),
+ F64 => ty::Float(ty::FloatTy::F64),
+ U8 => ty::Uint(ty::UintTy::U8),
+ U16 => ty::Uint(ty::UintTy::U16),
+ U32 => ty::Uint(ty::UintTy::U32),
+ U64 => ty::Uint(ty::UintTy::U64),
+ U128 => ty::Uint(ty::UintTy::U128),
+ Usize => ty::Uint(ty::UintTy::Usize),
+ _ => return None,
+ }))
+ }
+
+ /// Resolve an associated item, returning its containing page's `Res`
+ /// and the fragment targeting the associated item on its page.
+ fn resolve_associated_item(
+ &mut self,
+ root_res: Res,
+ item_name: Symbol,
+ ns: Namespace,
+ module_id: DefId,
+ ) -> Option<(Res, DefId)> {
+ let tcx = self.cx.tcx;
+
+ match root_res {
+ Res::Primitive(prim) => {
+ self.resolve_primitive_associated_item(prim, ns, item_name).or_else(|| {
+ self.primitive_type_to_ty(prim)
+ .and_then(|ty| {
+ resolve_associated_trait_item(ty, module_id, item_name, ns, self.cx)
+ })
+ .map(|item| (root_res, item.def_id))
+ })
+ }
+ Res::Def(DefKind::TyAlias, did) => {
+ // Resolve the link on the type the alias points to.
+ // FIXME: if the associated item is defined directly on the type alias,
+ // it will show up on its documentation page, we should link there instead.
+ let res = self.def_id_to_res(did)?;
+ self.resolve_associated_item(res, item_name, ns, module_id)
+ }
+ Res::Def(
+ def_kind @ (DefKind::Struct | DefKind::Union | DefKind::Enum | DefKind::ForeignTy),
+ did,
+ ) => {
+ debug!("looking for associated item named {} for item {:?}", item_name, did);
+ // Checks if item_name is a variant of the `SomeItem` enum
+ if ns == TypeNS && def_kind == DefKind::Enum {
+ match tcx.type_of(did).kind() {
+ ty::Adt(adt_def, _) => {
+ for variant in adt_def.variants() {
+ if variant.name == item_name {
+ return Some((root_res, variant.def_id));
+ }
+ }
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ // Checks if item_name belongs to `impl SomeItem`
+ let assoc_item = tcx
+ .inherent_impls(did)
+ .iter()
+ .flat_map(|&imp| {
+ tcx.associated_items(imp).find_by_name_and_namespace(
+ tcx,
+ Ident::with_dummy_span(item_name),
+ ns,
+ imp,
+ )
+ })
+ .copied()
+ // There should only ever be one associated item that matches from any inherent impl
+ .next()
+ // Check if item_name belongs to `impl SomeTrait for SomeItem`
+ // FIXME(#74563): This gives precedence to `impl SomeItem`:
+ // Although having both would be ambiguous, use impl version for compatibility's sake.
+ // To handle that properly resolve() would have to support
+ // something like [`ambi_fn`](<SomeStruct as SomeTrait>::ambi_fn)
+ .or_else(|| {
+ resolve_associated_trait_item(
+ tcx.type_of(did),
+ module_id,
+ item_name,
+ ns,
+ self.cx,
+ )
+ });
+
+ debug!("got associated item {:?}", assoc_item);
+
+ if let Some(item) = assoc_item {
+ return Some((root_res, item.def_id));
+ }
+
+ if ns != Namespace::ValueNS {
+ return None;
+ }
+ debug!("looking for fields named {} for {:?}", item_name, did);
+ // FIXME: this doesn't really belong in `associated_item` (maybe `variant_field` is better?)
+ // NOTE: it's different from variant_field because it only resolves struct fields,
+ // not variant fields (2 path segments, not 3).
+ //
+ // We need to handle struct (and union) fields in this code because
+ // syntactically their paths are identical to associated item paths:
+ // `module::Type::field` and `module::Type::Assoc`.
+ //
+ // On the other hand, variant fields can't be mistaken for associated
+ // items because they look like this: `module::Type::Variant::field`.
+ //
+ // Variants themselves don't need to be handled here, even though
+ // they also look like associated items (`module::Type::Variant`),
+ // because they are real Rust syntax (unlike the intra-doc links
+ // field syntax) and are handled by the compiler's resolver.
+ let def = match tcx.type_of(did).kind() {
+ ty::Adt(def, _) if !def.is_enum() => def,
+ _ => return None,
+ };
+ let field =
+ def.non_enum_variant().fields.iter().find(|item| item.name == item_name)?;
+ Some((root_res, field.did))
+ }
+ Res::Def(DefKind::Trait, did) => tcx
+ .associated_items(did)
+ .find_by_name_and_namespace(tcx, Ident::with_dummy_span(item_name), ns, did)
+ .map(|item| {
+ let res = Res::Def(item.kind.as_def_kind(), item.def_id);
+ (res, item.def_id)
+ }),
+ _ => None,
+ }
+ }
+}
+
+fn full_res(tcx: TyCtxt<'_>, (base, assoc_item): (Res, Option<DefId>)) -> Res {
+ assoc_item.map_or(base, |def_id| Res::from_def_id(tcx, def_id))
+}
+
+/// Look to see if a resolved item has an associated item named `item_name`.
+///
+/// Given `[std::io::Error::source]`, where `source` is unresolved, this would
+/// find `std::error::Error::source` and return
+/// `<io::Error as error::Error>::source`.
+fn resolve_associated_trait_item<'a>(
+ ty: Ty<'a>,
+ module: DefId,
+ item_name: Symbol,
+ ns: Namespace,
+ cx: &mut DocContext<'a>,
+) -> Option<ty::AssocItem> {
+ // FIXME: this should also consider blanket impls (`impl<T> X for T`). Unfortunately
+ // `get_auto_trait_and_blanket_impls` is broken because the caching behavior is wrong. In the
+ // meantime, just don't look for these blanket impls.
+
+ // Next consider explicit impls: `impl MyTrait for MyType`
+ // Give precedence to inherent impls.
+ let traits = trait_impls_for(cx, ty, module);
+ debug!("considering traits {:?}", traits);
+ let mut candidates = traits.iter().filter_map(|&(impl_, trait_)| {
+ cx.tcx
+ .associated_items(trait_)
+ .find_by_name_and_namespace(cx.tcx, Ident::with_dummy_span(item_name), ns, trait_)
+ .map(|trait_assoc| {
+ trait_assoc_to_impl_assoc_item(cx.tcx, impl_, trait_assoc.def_id)
+ .unwrap_or(trait_assoc)
+ })
+ });
+ // FIXME(#74563): warn about ambiguity
+ debug!("the candidates were {:?}", candidates.clone().collect::<Vec<_>>());
+ candidates.next().copied()
+}
+
+/// Find the associated item in the impl `impl_id` that corresponds to the
+/// trait associated item `trait_assoc_id`.
+///
+/// This function returns `None` if no associated item was found in the impl.
+/// This can occur when the trait associated item has a default value that is
+/// not overridden in the impl.
+///
+/// This is just a wrapper around [`TyCtxt::impl_item_implementor_ids()`] and
+/// [`TyCtxt::associated_item()`] (with some helpful logging added).
+#[instrument(level = "debug", skip(tcx))]
+fn trait_assoc_to_impl_assoc_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_id: DefId,
+ trait_assoc_id: DefId,
+) -> Option<&'tcx ty::AssocItem> {
+ let trait_to_impl_assoc_map = tcx.impl_item_implementor_ids(impl_id);
+ debug!(?trait_to_impl_assoc_map);
+ let impl_assoc_id = *trait_to_impl_assoc_map.get(&trait_assoc_id)?;
+ debug!(?impl_assoc_id);
+ let impl_assoc = tcx.associated_item(impl_assoc_id);
+ debug!(?impl_assoc);
+ Some(impl_assoc)
+}
+
+/// Given a type, return all trait impls in scope in `module` for that type.
+/// Returns a set of pairs of `(impl_id, trait_id)`.
+///
+/// NOTE: this cannot be a query because more traits could be available when more crates are compiled!
+/// So it is not stable to serialize cross-crate.
+#[instrument(level = "debug", skip(cx))]
+fn trait_impls_for<'a>(
+ cx: &mut DocContext<'a>,
+ ty: Ty<'a>,
+ module: DefId,
+) -> FxHashSet<(DefId, DefId)> {
+ let tcx = cx.tcx;
+ let iter = cx.resolver_caches.traits_in_scope[&module].iter().flat_map(|trait_candidate| {
+ let trait_ = trait_candidate.def_id;
+ trace!("considering explicit impl for trait {:?}", trait_);
+
+ // Look at each trait implementation to see if it's an impl for `did`
+ tcx.find_map_relevant_impl(trait_, ty, |impl_| {
+ let trait_ref = tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
+ // Check if these are the same type.
+ let impl_type = trait_ref.self_ty();
+ trace!(
+ "comparing type {} with kind {:?} against type {:?}",
+ impl_type,
+ impl_type.kind(),
+ ty
+ );
+ // Fast path: if this is a primitive simple `==` will work
+ // NOTE: the `match` is necessary; see #92662.
+ // this allows us to ignore generics because the user input
+ // may not include the generic placeholders
+ // e.g. this allows us to match Foo (user comment) with Foo<T> (actual type)
+ let saw_impl = impl_type == ty
+ || match (impl_type.kind(), ty.kind()) {
+ (ty::Adt(impl_def, _), ty::Adt(ty_def, _)) => {
+ debug!("impl def_id: {:?}, ty def_id: {:?}", impl_def.did(), ty_def.did());
+ impl_def.did() == ty_def.did()
+ }
+ _ => false,
+ };
+
+ if saw_impl { Some((impl_, trait_)) } else { None }
+ })
+ });
+ iter.collect()
+}
+
+/// Check for resolve collisions between a trait and its derive.
+///
+/// These are common and we should just resolve to the trait in that case.
+fn is_derive_trait_collision<T>(ns: &PerNS<Result<(Res, T), ResolutionFailure<'_>>>) -> bool {
+ matches!(
+ *ns,
+ PerNS {
+ type_ns: Ok((Res::Def(DefKind::Trait, _), _)),
+ macro_ns: Ok((Res::Def(DefKind::Macro(MacroKind::Derive), _), _)),
+ ..
+ }
+ )
+}
+
+impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ let parent_node =
+ item.item_id.as_def_id().and_then(|did| find_nearest_parent_module(self.cx.tcx, did));
+ if parent_node.is_some() {
+ trace!("got parent node for {:?} {:?}, id {:?}", item.type_(), item.name, item.item_id);
+ }
+
+ let inner_docs = item.inner_docs(self.cx.tcx);
+
+ if item.is_mod() && inner_docs {
+ self.mod_ids.push(item.item_id.expect_def_id());
+ }
+
+ // We want to resolve in the lexical scope of the documentation.
+ // In the presence of re-exports, this is not the same as the module of the item.
+ // Rather than merging all documentation into one, resolve it one attribute at a time
+ // so we know which module it came from.
+ for (parent_module, doc) in item.attrs.prepare_to_doc_link_resolution() {
+ if !may_have_doc_links(&doc) {
+ continue;
+ }
+ debug!("combined_docs={}", doc);
+ // NOTE: if there are links that start in one crate and end in another, this will not resolve them.
+ // This is a degenerate case and it's not supported by rustdoc.
+ let parent_node = parent_module.or(parent_node);
+ let mut tmp_links = self
+ .cx
+ .resolver_caches
+ .markdown_links
+ .take()
+ .expect("`markdown_links` are already borrowed");
+ if !tmp_links.contains_key(&doc) {
+ tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc));
+ }
+ for md_link in &tmp_links[&doc] {
+ let link = self.resolve_link(item, &doc, parent_node, md_link);
+ if let Some(link) = link {
+ self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
+ }
+ }
+ self.cx.resolver_caches.markdown_links = Some(tmp_links);
+ }
+
+ if item.is_mod() {
+ if !inner_docs {
+ self.mod_ids.push(item.item_id.expect_def_id());
+ }
+
+ self.visit_item_recur(item);
+ self.mod_ids.pop();
+ } else {
+ self.visit_item_recur(item)
+ }
+ }
+}
+
+enum PreprocessingError {
+ /// User error: `[std#x#y]` is not valid
+ MultipleAnchors,
+ Disambiguator(Range<usize>, String),
+ MalformedGenerics(MalformedGenerics, String),
+}
+
+impl PreprocessingError {
+ fn report(&self, cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>) {
+ match self {
+ PreprocessingError::MultipleAnchors => report_multiple_anchors(cx, diag_info),
+ PreprocessingError::Disambiguator(range, msg) => {
+ disambiguator_error(cx, diag_info, range.clone(), msg)
+ }
+ PreprocessingError::MalformedGenerics(err, path_str) => {
+ report_malformed_generics(cx, diag_info, *err, path_str)
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+struct PreprocessingInfo {
+ path_str: String,
+ disambiguator: Option<Disambiguator>,
+ extra_fragment: Option<String>,
+ link_text: String,
+}
+
+// Not a typedef to avoid leaking several private structures from this module.
+pub(crate) struct PreprocessedMarkdownLink(
+ Result<PreprocessingInfo, PreprocessingError>,
+ MarkdownLink,
+);
+
+/// Returns:
+/// - `None` if the link should be ignored.
+/// - `Some(Err)` if the link should emit an error
+/// - `Some(Ok)` if the link is valid
+///
+/// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored.
+fn preprocess_link(
+ ori_link: &MarkdownLink,
+) -> Option<Result<PreprocessingInfo, PreprocessingError>> {
+ // [] is mostly likely not supposed to be a link
+ if ori_link.link.is_empty() {
+ return None;
+ }
+
+ // Bail early for real links.
+ if ori_link.link.contains('/') {
+ return None;
+ }
+
+ let stripped = ori_link.link.replace('`', "");
+ let mut parts = stripped.split('#');
+
+ let link = parts.next().unwrap();
+ if link.trim().is_empty() {
+ // This is an anchor to an element of the current page, nothing to do in here!
+ return None;
+ }
+ let extra_fragment = parts.next();
+ if parts.next().is_some() {
+ // A valid link can't have multiple #'s
+ return Some(Err(PreprocessingError::MultipleAnchors));
+ }
+
+ // Parse and strip the disambiguator from the link, if present.
+ let (disambiguator, path_str, link_text) = match Disambiguator::from_str(link) {
+ Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()),
+ Ok(None) => (None, link.trim(), link.trim()),
+ Err((err_msg, relative_range)) => {
+ // Only report error if we would not have ignored this link. See issue #83859.
+ if !should_ignore_link_with_disambiguators(link) {
+ let no_backticks_range = range_between_backticks(ori_link);
+ let disambiguator_range = (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end);
+ return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
+ } else {
+ return None;
+ }
+ }
+ };
+
+ if should_ignore_link(path_str) {
+ return None;
+ }
+
+ // Strip generics from the path.
+ let path_str = if path_str.contains(['<', '>'].as_slice()) {
+ match strip_generics_from_path(path_str) {
+ Ok(path) => path,
+ Err(err) => {
+ debug!("link has malformed generics: {}", path_str);
+ return Some(Err(PreprocessingError::MalformedGenerics(err, path_str.to_owned())));
+ }
+ }
+ } else {
+ path_str.to_owned()
+ };
+
+ // Sanity check to make sure we don't have any angle brackets after stripping generics.
+ assert!(!path_str.contains(['<', '>'].as_slice()));
+
+ // The link is not an intra-doc link if it still contains spaces after stripping generics.
+ if path_str.contains(' ') {
+ return None;
+ }
+
+ Some(Ok(PreprocessingInfo {
+ path_str,
+ disambiguator,
+ extra_fragment: extra_fragment.map(|frag| frag.to_owned()),
+ link_text: link_text.to_owned(),
+ }))
+}
+
+fn preprocessed_markdown_links(s: &str) -> Vec<PreprocessedMarkdownLink> {
+ markdown_links(s, |link| {
+ preprocess_link(&link).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
+ })
+}
+
+impl LinkCollector<'_, '_> {
+ /// This is the entry point for resolving an intra-doc link.
+ ///
+ /// FIXME(jynelson): this is way too many arguments
+ fn resolve_link(
+ &mut self,
+ item: &Item,
+ dox: &str,
+ parent_node: Option<DefId>,
+ link: &PreprocessedMarkdownLink,
+ ) -> Option<ItemLink> {
+ let PreprocessedMarkdownLink(pp_link, ori_link) = link;
+ trace!("considering link '{}'", ori_link.link);
+
+ let diag_info = DiagnosticInfo {
+ item,
+ dox,
+ ori_link: &ori_link.link,
+ link_range: ori_link.range.clone(),
+ };
+
+ let PreprocessingInfo { path_str, disambiguator, extra_fragment, link_text } =
+ pp_link.as_ref().map_err(|err| err.report(self.cx, diag_info.clone())).ok()?;
+ let disambiguator = *disambiguator;
+
+ // In order to correctly resolve intra-doc links we need to
+ // pick a base AST node to work from. If the documentation for
+ // this module came from an inner comment (//!) then we anchor
+ // our name resolution *inside* the module. If, on the other
+ // hand it was an outer comment (///) then we anchor the name
+ // resolution in the parent module on the basis that the names
+ // used are more likely to be intended to be parent names. For
+ // this, we set base_node to None for inner comments since
+ // we've already pushed this node onto the resolution stack but
+ // for outer comments we explicitly try and resolve against the
+ // parent_node first.
+ let inner_docs = item.inner_docs(self.cx.tcx);
+ let base_node =
+ if item.is_mod() && inner_docs { self.mod_ids.last().copied() } else { parent_node };
+ let module_id = base_node.expect("doc link without parent module");
+
+ let (mut res, fragment) = self.resolve_with_disambiguator_cached(
+ ResolutionInfo {
+ item_id: item.item_id,
+ module_id,
+ dis: disambiguator,
+ path_str: path_str.to_owned(),
+ extra_fragment: extra_fragment.clone(),
+ },
+ diag_info.clone(), // this struct should really be Copy, but Range is not :(
+ // For reference-style links we want to report only one error so unsuccessful
+ // resolutions are cached, for other links we want to report an error every
+ // time so they are not cached.
+ matches!(ori_link.kind, LinkType::Reference | LinkType::Shortcut),
+ )?;
+
+ // Check for a primitive which might conflict with a module
+ // Report the ambiguity and require that the user specify which one they meant.
+ // FIXME: could there ever be a primitive not in the type namespace?
+ if matches!(
+ disambiguator,
+ None | Some(Disambiguator::Namespace(Namespace::TypeNS) | Disambiguator::Primitive)
+ ) && !matches!(res, Res::Primitive(_))
+ {
+ if let Some(prim) = resolve_primitive(path_str, TypeNS) {
+ // `prim@char`
+ if matches!(disambiguator, Some(Disambiguator::Primitive)) {
+ res = prim;
+ } else {
+ // `[char]` when a `char` module is in scope
+ let candidates = vec![res, prim];
+ ambiguity_error(self.cx, diag_info, path_str, candidates);
+ return None;
+ }
+ }
+ }
+
+ match res {
+ Res::Primitive(prim) => {
+ if let Some(UrlFragment::Item(id)) = fragment {
+ // We're actually resolving an associated item of a primitive, so we need to
+ // verify the disambiguator (if any) matches the type of the associated item.
+ // This case should really follow the same flow as the `Res::Def` branch below,
+ // but attempting to add a call to `clean::register_res` causes an ICE. @jyn514
+ // thinks `register_res` is only needed for cross-crate re-exports, but Rust
+ // doesn't allow statements like `use str::trim;`, making this a (hopefully)
+ // valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677
+ // for discussion on the matter.
+ let kind = self.cx.tcx.def_kind(id);
+ self.verify_disambiguator(
+ path_str,
+ ori_link,
+ kind,
+ id,
+ disambiguator,
+ item,
+ &diag_info,
+ )?;
+
+ // FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
+ // However I'm not sure how to check that across crates.
+ if prim == PrimitiveType::RawPointer
+ && item.item_id.is_local()
+ && !self.cx.tcx.features().intra_doc_pointers
+ {
+ self.report_rawptr_assoc_feature_gate(dox, ori_link, item);
+ }
+ } else {
+ match disambiguator {
+ Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
+ Some(other) => {
+ self.report_disambiguator_mismatch(
+ path_str, ori_link, other, res, &diag_info,
+ );
+ return None;
+ }
+ }
+ }
+
+ Some(ItemLink {
+ link: ori_link.link.clone(),
+ link_text: link_text.clone(),
+ did: res.def_id(self.cx.tcx),
+ fragment,
+ })
+ }
+ Res::Def(kind, id) => {
+ let (kind_for_dis, id_for_dis) = if let Some(UrlFragment::Item(id)) = fragment {
+ (self.cx.tcx.def_kind(id), id)
+ } else {
+ (kind, id)
+ };
+ self.verify_disambiguator(
+ path_str,
+ ori_link,
+ kind_for_dis,
+ id_for_dis,
+ disambiguator,
+ item,
+ &diag_info,
+ )?;
+ let id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
+ Some(ItemLink {
+ link: ori_link.link.clone(),
+ link_text: link_text.clone(),
+ did: id,
+ fragment,
+ })
+ }
+ }
+ }
+
+ fn verify_disambiguator(
+ &self,
+ path_str: &str,
+ ori_link: &MarkdownLink,
+ kind: DefKind,
+ id: DefId,
+ disambiguator: Option<Disambiguator>,
+ item: &Item,
+ diag_info: &DiagnosticInfo<'_>,
+ ) -> Option<()> {
+ debug!("intra-doc link to {} resolved to {:?}", path_str, (kind, id));
+
+ // Disallow e.g. linking to enums with `struct@`
+ debug!("saw kind {:?} with disambiguator {:?}", kind, disambiguator);
+ match (kind, disambiguator) {
+ | (DefKind::Const | DefKind::ConstParam | DefKind::AssocConst | DefKind::AnonConst, Some(Disambiguator::Kind(DefKind::Const)))
+ // NOTE: this allows 'method' to mean both normal functions and associated functions
+ // This can't cause ambiguity because both are in the same namespace.
+ | (DefKind::Fn | DefKind::AssocFn, Some(Disambiguator::Kind(DefKind::Fn)))
+ // These are namespaces; allow anything in the namespace to match
+ | (_, Some(Disambiguator::Namespace(_)))
+ // If no disambiguator given, allow anything
+ | (_, None)
+ // All of these are valid, so do nothing
+ => {}
+ (actual, Some(Disambiguator::Kind(expected))) if actual == expected => {}
+ (_, Some(specified @ Disambiguator::Kind(_) | specified @ Disambiguator::Primitive)) => {
+ self.report_disambiguator_mismatch(path_str,ori_link,specified, Res::Def(kind, id),diag_info);
+ return None;
+ }
+ }
+
+ // item can be non-local e.g. when using #[doc(primitive = "pointer")]
+ if let Some((src_id, dst_id)) = id
+ .as_local()
+ // The `expect_def_id()` should be okay because `local_def_id_to_hir_id`
+ // would presumably panic if a fake `DefIndex` were passed.
+ .and_then(|dst_id| {
+ item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
+ })
+ {
+ if self.cx.tcx.privacy_access_levels(()).is_exported(src_id)
+ && !self.cx.tcx.privacy_access_levels(()).is_exported(dst_id)
+ {
+ privacy_error(self.cx, diag_info, path_str);
+ }
+ }
+
+ Some(())
+ }
+
+ fn report_disambiguator_mismatch(
+ &self,
+ path_str: &str,
+ ori_link: &MarkdownLink,
+ specified: Disambiguator,
+ resolved: Res,
+ diag_info: &DiagnosticInfo<'_>,
+ ) {
+ // The resolved item did not match the disambiguator; give a better error than 'not found'
+ let msg = format!("incompatible link kind for `{}`", path_str);
+ let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>| {
+ let note = format!(
+ "this link resolved to {} {}, which is not {} {}",
+ resolved.article(),
+ resolved.descr(),
+ specified.article(),
+ specified.descr(),
+ );
+ if let Some(sp) = sp {
+ diag.span_label(sp, &note);
+ } else {
+ diag.note(&note);
+ }
+ suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
+ };
+ report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
+ }
+
+ fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &MarkdownLink, item: &Item) {
+ let span =
+ super::source_span_for_markdown_range(self.cx.tcx, dox, &ori_link.range, &item.attrs)
+ .unwrap_or_else(|| item.attr_span(self.cx.tcx));
+ rustc_session::parse::feature_err(
+ &self.cx.tcx.sess.parse_sess,
+ sym::intra_doc_pointers,
+ span,
+ "linking to associated items of raw pointers is experimental",
+ )
+ .note("rustdoc does not allow disambiguating between `*const` and `*mut`, and pointers are unstable until it does")
+ .emit();
+ }
+
+ fn resolve_with_disambiguator_cached(
+ &mut self,
+ key: ResolutionInfo,
+ diag: DiagnosticInfo<'_>,
+ // If errors are cached then they are only reported on first ocurrence
+ // which we want in some cases but not in others.
+ cache_errors: bool,
+ ) -> Option<(Res, Option<UrlFragment>)> {
+ if let Some(res) = self.visited_links.get(&key) {
+ if res.is_some() || cache_errors {
+ return res.clone();
+ }
+ }
+
+ let res = self.resolve_with_disambiguator(&key, diag.clone()).and_then(|(res, def_id)| {
+ let fragment = match (&key.extra_fragment, def_id) {
+ (Some(_), Some(def_id)) => {
+ report_anchor_conflict(self.cx, diag, def_id);
+ return None;
+ }
+ (Some(u_frag), None) => Some(UrlFragment::UserWritten(u_frag.clone())),
+ (None, Some(def_id)) => Some(UrlFragment::Item(def_id)),
+ (None, None) => None,
+ };
+ Some((res, fragment))
+ });
+
+ if res.is_some() || cache_errors {
+ self.visited_links.insert(key, res.clone());
+ }
+ res
+ }
+
+ /// After parsing the disambiguator, resolve the main part of the link.
+ // FIXME(jynelson): wow this is just so much
+ fn resolve_with_disambiguator(
+ &mut self,
+ key: &ResolutionInfo,
+ diag: DiagnosticInfo<'_>,
+ ) -> Option<(Res, Option<DefId>)> {
+ let disambiguator = key.dis;
+ let path_str = &key.path_str;
+ let item_id = key.item_id;
+ let base_node = key.module_id;
+
+ match disambiguator.map(Disambiguator::ns) {
+ Some(expected_ns) => {
+ match self.resolve(path_str, expected_ns, item_id, base_node) {
+ Ok(res) => Some(res),
+ Err(err) => {
+ // We only looked in one namespace. Try to give a better error if possible.
+ // FIXME: really it should be `resolution_failure` that does this, not `resolve_with_disambiguator`.
+ // See https://github.com/rust-lang/rust/pull/76955#discussion_r493953382 for a good approach.
+ let mut err = ResolutionFailure::NotResolved(err);
+ for other_ns in [TypeNS, ValueNS, MacroNS] {
+ if other_ns != expected_ns {
+ if let Ok(res) =
+ self.resolve(path_str, other_ns, item_id, base_node)
+ {
+ err = ResolutionFailure::WrongNamespace {
+ res: full_res(self.cx.tcx, res),
+ expected_ns,
+ };
+ break;
+ }
+ }
+ }
+ resolution_failure(self, diag, path_str, disambiguator, smallvec![err])
+ }
+ }
+ }
+ None => {
+ // Try everything!
+ let mut candidate = |ns| {
+ self.resolve(path_str, ns, item_id, base_node)
+ .map_err(ResolutionFailure::NotResolved)
+ };
+
+ let candidates = PerNS {
+ macro_ns: candidate(MacroNS),
+ type_ns: candidate(TypeNS),
+ value_ns: candidate(ValueNS).and_then(|(res, def_id)| {
+ match res {
+ // Constructors are picked up in the type namespace.
+ Res::Def(DefKind::Ctor(..), _) => {
+ Err(ResolutionFailure::WrongNamespace { res, expected_ns: TypeNS })
+ }
+ _ => Ok((res, def_id)),
+ }
+ }),
+ };
+
+ let len = candidates.iter().filter(|res| res.is_ok()).count();
+
+ if len == 0 {
+ return resolution_failure(
+ self,
+ diag,
+ path_str,
+ disambiguator,
+ candidates.into_iter().filter_map(|res| res.err()).collect(),
+ );
+ }
+
+ if len == 1 {
+ Some(candidates.into_iter().find_map(|res| res.ok()).unwrap())
+ } else if len == 2 && is_derive_trait_collision(&candidates) {
+ Some(candidates.type_ns.unwrap())
+ } else {
+ let ignore_macro = is_derive_trait_collision(&candidates);
+ // If we're reporting an ambiguity, don't mention the namespaces that failed
+ let mut candidates =
+ candidates.map(|candidate| candidate.ok().map(|(res, _)| res));
+ if ignore_macro {
+ candidates.macro_ns = None;
+ }
+ ambiguity_error(self.cx, diag, path_str, candidates.present_items().collect());
+ None
+ }
+ }
+ }
+ }
+}
+
+/// Get the section of a link between the backticks,
+/// or the whole link if there aren't any backticks.
+///
+/// For example:
+///
+/// ```text
+/// [`Foo`]
+/// ^^^
+/// ```
+fn range_between_backticks(ori_link: &MarkdownLink) -> Range<usize> {
+ let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0);
+ let before_second_backtick_group = ori_link
+ .link
+ .bytes()
+ .skip(after_first_backtick_group)
+ .position(|b| b == b'`')
+ .unwrap_or(ori_link.link.len());
+ (ori_link.range.start + after_first_backtick_group)
+ ..(ori_link.range.start + before_second_backtick_group)
+}
+
+/// Returns true if we should ignore `link` due to it being unlikely
+/// that it is an intra-doc link. `link` should still have disambiguators
+/// if there were any.
+///
+/// The difference between this and [`should_ignore_link()`] is that this
+/// check should only be used on links that still have disambiguators.
+fn should_ignore_link_with_disambiguators(link: &str) -> bool {
+ link.contains(|ch: char| !(ch.is_alphanumeric() || ":_<>, !*&;@()".contains(ch)))
+}
+
+/// Returns true if we should ignore `path_str` due to it being unlikely
+/// that it is an intra-doc link.
+fn should_ignore_link(path_str: &str) -> bool {
+ path_str.contains(|ch: char| !(ch.is_alphanumeric() || ":_<>, !*&;".contains(ch)))
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+/// Disambiguators for a link.
+enum Disambiguator {
+ /// `prim@`
+ ///
+ /// This is buggy, see <https://github.com/rust-lang/rust/pull/77875#discussion_r503583103>
+ Primitive,
+ /// `struct@` or `f()`
+ Kind(DefKind),
+ /// `type@`
+ Namespace(Namespace),
+}
+
+impl Disambiguator {
+ /// Given a link, parse and return `(disambiguator, path_str, link_text)`.
+ ///
+ /// This returns `Ok(Some(...))` if a disambiguator was found,
+ /// `Ok(None)` if no disambiguator was found, or `Err(...)`
+ /// if there was a problem with the disambiguator.
+ fn from_str(link: &str) -> Result<Option<(Self, &str, &str)>, (String, Range<usize>)> {
+ use Disambiguator::{Kind, Namespace as NS, Primitive};
+
+ if let Some(idx) = link.find('@') {
+ let (prefix, rest) = link.split_at(idx);
+ let d = match prefix {
+ "struct" => Kind(DefKind::Struct),
+ "enum" => Kind(DefKind::Enum),
+ "trait" => Kind(DefKind::Trait),
+ "union" => Kind(DefKind::Union),
+ "module" | "mod" => Kind(DefKind::Mod),
+ "const" | "constant" => Kind(DefKind::Const),
+ "static" => Kind(DefKind::Static(Mutability::Not)),
+ "function" | "fn" | "method" => Kind(DefKind::Fn),
+ "derive" => Kind(DefKind::Macro(MacroKind::Derive)),
+ "type" => NS(Namespace::TypeNS),
+ "value" => NS(Namespace::ValueNS),
+ "macro" => NS(Namespace::MacroNS),
+ "prim" | "primitive" => Primitive,
+ _ => return Err((format!("unknown disambiguator `{}`", prefix), 0..idx)),
+ };
+ Ok(Some((d, &rest[1..], &rest[1..])))
+ } else {
+ let suffixes = [
+ ("!()", DefKind::Macro(MacroKind::Bang)),
+ ("!{}", DefKind::Macro(MacroKind::Bang)),
+ ("![]", DefKind::Macro(MacroKind::Bang)),
+ ("()", DefKind::Fn),
+ ("!", DefKind::Macro(MacroKind::Bang)),
+ ];
+ for (suffix, kind) in suffixes {
+ if let Some(path_str) = link.strip_suffix(suffix) {
+ // Avoid turning `!` or `()` into an empty string
+ if !path_str.is_empty() {
+ return Ok(Some((Kind(kind), path_str, link)));
+ }
+ }
+ }
+ Ok(None)
+ }
+ }
+
+ fn ns(self) -> Namespace {
+ match self {
+ Self::Namespace(n) => n,
+ Self::Kind(k) => {
+ k.ns().expect("only DefKinds with a valid namespace can be disambiguators")
+ }
+ Self::Primitive => TypeNS,
+ }
+ }
+
+ fn article(self) -> &'static str {
+ match self {
+ Self::Namespace(_) => panic!("article() doesn't make sense for namespaces"),
+ Self::Kind(k) => k.article(),
+ Self::Primitive => "a",
+ }
+ }
+
+ fn descr(self) -> &'static str {
+ match self {
+ Self::Namespace(n) => n.descr(),
+ // HACK(jynelson): the source of `DefKind::descr` only uses the DefId for
+ // printing "module" vs "crate" so using the wrong ID is not a huge problem
+ Self::Kind(k) => k.descr(CRATE_DEF_ID.to_def_id()),
+ Self::Primitive => "builtin type",
+ }
+ }
+}
+
+/// A suggestion to show in a diagnostic.
+enum Suggestion {
+ /// `struct@`
+ Prefix(&'static str),
+ /// `f()`
+ Function,
+ /// `m!`
+ Macro,
+ /// `foo` without any disambiguator
+ RemoveDisambiguator,
+}
+
+impl Suggestion {
+ fn descr(&self) -> Cow<'static, str> {
+ match self {
+ Self::Prefix(x) => format!("prefix with `{}@`", x).into(),
+ Self::Function => "add parentheses".into(),
+ Self::Macro => "add an exclamation mark".into(),
+ Self::RemoveDisambiguator => "remove the disambiguator".into(),
+ }
+ }
+
+ fn as_help(&self, path_str: &str) -> String {
+ // FIXME: if this is an implied shortcut link, it's bad style to suggest `@`
+ match self {
+ Self::Prefix(prefix) => format!("{}@{}", prefix, path_str),
+ Self::Function => format!("{}()", path_str),
+ Self::Macro => format!("{}!", path_str),
+ Self::RemoveDisambiguator => path_str.into(),
+ }
+ }
+
+ fn as_help_span(
+ &self,
+ path_str: &str,
+ ori_link: &str,
+ sp: rustc_span::Span,
+ ) -> Vec<(rustc_span::Span, String)> {
+ let inner_sp = match ori_link.find('(') {
+ Some(index) => sp.with_hi(sp.lo() + BytePos(index as _)),
+ None => sp,
+ };
+ let inner_sp = match ori_link.find('!') {
+ Some(index) => inner_sp.with_hi(inner_sp.lo() + BytePos(index as _)),
+ None => inner_sp,
+ };
+ let inner_sp = match ori_link.find('@') {
+ Some(index) => inner_sp.with_lo(inner_sp.lo() + BytePos(index as u32 + 1)),
+ None => inner_sp,
+ };
+ match self {
+ Self::Prefix(prefix) => {
+ // FIXME: if this is an implied shortcut link, it's bad style to suggest `@`
+ let mut sugg = vec![(sp.with_hi(inner_sp.lo()), format!("{}@", prefix))];
+ if sp.hi() != inner_sp.hi() {
+ sugg.push((inner_sp.shrink_to_hi().with_hi(sp.hi()), String::new()));
+ }
+ sugg
+ }
+ Self::Function => {
+ let mut sugg = vec![(inner_sp.shrink_to_hi().with_hi(sp.hi()), "()".to_string())];
+ if sp.lo() != inner_sp.lo() {
+ sugg.push((inner_sp.shrink_to_lo().with_lo(sp.lo()), String::new()));
+ }
+ sugg
+ }
+ Self::Macro => {
+ let mut sugg = vec![(inner_sp.shrink_to_hi(), "!".to_string())];
+ if sp.lo() != inner_sp.lo() {
+ sugg.push((inner_sp.shrink_to_lo().with_lo(sp.lo()), String::new()));
+ }
+ sugg
+ }
+ Self::RemoveDisambiguator => vec![(sp, path_str.into())],
+ }
+ }
+}
+
+/// Reports a diagnostic for an intra-doc link.
+///
+/// If no link range is provided, or the source span of the link cannot be determined, the span of
+/// the entire documentation block is used for the lint. If a range is provided but the span
+/// calculation fails, a note is added to the diagnostic pointing to the link in the markdown.
+///
+/// The `decorate` callback is invoked in all cases to allow further customization of the
+/// diagnostic before emission. If the span of the link was able to be determined, the second
+/// parameter of the callback will contain it, and the primary span of the diagnostic will be set
+/// to it.
+fn report_diagnostic(
+ tcx: TyCtxt<'_>,
+ lint: &'static Lint,
+ msg: &str,
+ DiagnosticInfo { item, ori_link: _, dox, link_range }: &DiagnosticInfo<'_>,
+ decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>),
+) {
+ let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
+ else {
+ // If non-local, no need to check anything.
+ info!("ignoring warning from parent crate: {}", msg);
+ return;
+ };
+
+ let sp = item.attr_span(tcx);
+
+ tcx.struct_span_lint_hir(lint, hir_id, sp, |lint| {
+ let mut diag = lint.build(msg);
+
+ let span =
+ super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
+ if dox.as_bytes().get(link_range.start) == Some(&b'`')
+ && dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
+ {
+ sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
+ } else {
+ sp
+ }
+ });
+
+ if let Some(sp) = span {
+ diag.set_span(sp);
+ } else {
+ // blah blah blah\nblah\nblah [blah] blah blah\nblah blah
+ // ^ ~~~~
+ // | link_range
+ // last_new_line_offset
+ let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
+ let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
+
+ // Print the line containing the `link_range` and manually mark it with '^'s.
+ diag.note(&format!(
+ "the link appears in this line:\n\n{line}\n\
+ {indicator: <before$}{indicator:^<found$}",
+ line = line,
+ indicator = "",
+ before = link_range.start - last_new_line_offset,
+ found = link_range.len(),
+ ));
+ }
+
+ decorate(&mut diag, span);
+
+ diag.emit();
+ });
+}
+
+/// Reports a link that failed to resolve.
+///
+/// This also tries to resolve any intermediate path segments that weren't
+/// handled earlier. For example, if passed `Item::Crate(std)` and `path_str`
+/// `std::io::Error::x`, this will resolve `std::io::Error`.
+fn resolution_failure(
+ collector: &mut LinkCollector<'_, '_>,
+ diag_info: DiagnosticInfo<'_>,
+ path_str: &str,
+ disambiguator: Option<Disambiguator>,
+ kinds: SmallVec<[ResolutionFailure<'_>; 3]>,
+) -> Option<(Res, Option<DefId>)> {
+ let tcx = collector.cx.tcx;
+ let mut recovered_res = None;
+ report_diagnostic(
+ tcx,
+ BROKEN_INTRA_DOC_LINKS,
+ &format!("unresolved link to `{}`", path_str),
+ &diag_info,
+ |diag, sp| {
+ let item = |res: Res| format!("the {} `{}`", res.descr(), res.name(tcx),);
+ let assoc_item_not_allowed = |res: Res| {
+ let name = res.name(tcx);
+ format!(
+ "`{}` is {} {}, not a module or type, and cannot have associated items",
+ name,
+ res.article(),
+ res.descr()
+ )
+ };
+ // ignore duplicates
+ let mut variants_seen = SmallVec::<[_; 3]>::new();
+ for mut failure in kinds {
+ let variant = std::mem::discriminant(&failure);
+ if variants_seen.contains(&variant) {
+ continue;
+ }
+ variants_seen.push(variant);
+
+ if let ResolutionFailure::NotResolved(UnresolvedPath {
+ item_id,
+ module_id,
+ partial_res,
+ unresolved,
+ }) = &mut failure
+ {
+ use DefKind::*;
+
+ let item_id = *item_id;
+ let module_id = *module_id;
+ // FIXME(jynelson): this might conflict with my `Self` fix in #76467
+ // FIXME: maybe use itertools `collect_tuple` instead?
+ fn split(path: &str) -> Option<(&str, &str)> {
+ let mut splitter = path.rsplitn(2, "::");
+ splitter.next().and_then(|right| splitter.next().map(|left| (left, right)))
+ }
+
+ // Check if _any_ parent of the path gets resolved.
+ // If so, report it and say the first which failed; if not, say the first path segment didn't resolve.
+ let mut name = path_str;
+ 'outer: loop {
+ let Some((start, end)) = split(name) else {
+ // avoid bug that marked [Quux::Z] as missing Z, not Quux
+ if partial_res.is_none() {
+ *unresolved = name.into();
+ }
+ break;
+ };
+ name = start;
+ for ns in [TypeNS, ValueNS, MacroNS] {
+ if let Ok(res) = collector.resolve(start, ns, item_id, module_id) {
+ debug!("found partial_res={:?}", res);
+ *partial_res = Some(full_res(collector.cx.tcx, res));
+ *unresolved = end.into();
+ break 'outer;
+ }
+ }
+ *unresolved = end.into();
+ }
+
+ let last_found_module = match *partial_res {
+ Some(Res::Def(DefKind::Mod, id)) => Some(id),
+ None => Some(module_id),
+ _ => None,
+ };
+ // See if this was a module: `[path]` or `[std::io::nope]`
+ if let Some(module) = last_found_module {
+ let note = if partial_res.is_some() {
+ // Part of the link resolved; e.g. `std::io::nonexistent`
+ let module_name = tcx.item_name(module);
+ format!("no item named `{}` in module `{}`", unresolved, module_name)
+ } else {
+ // None of the link resolved; e.g. `Notimported`
+ format!("no item named `{}` in scope", unresolved)
+ };
+ if let Some(span) = sp {
+ diag.span_label(span, &note);
+ } else {
+ diag.note(&note);
+ }
+
+ if !path_str.contains("::") {
+ if disambiguator.map_or(true, |d| d.ns() == MacroNS)
+ && let Some(&res) = collector.cx.resolver_caches.all_macro_rules
+ .get(&Symbol::intern(path_str))
+ {
+ diag.note(format!(
+ "`macro_rules` named `{path_str}` exists in this crate, \
+ but it is not in scope at this link's location"
+ ));
+ recovered_res = res.try_into().ok().map(|res| (res, None));
+ } else {
+ // If the link has `::` in it, assume it was meant to be an
+ // intra-doc link. Otherwise, the `[]` might be unrelated.
+ diag.help("to escape `[` and `]` characters, \
+ add '\\' before them like `\\[` or `\\]`");
+ }
+ }
+
+ continue;
+ }
+
+ // Otherwise, it must be an associated item or variant
+ let res = partial_res.expect("None case was handled by `last_found_module`");
+ let name = res.name(tcx);
+ let kind = match res {
+ Res::Def(kind, _) => Some(kind),
+ Res::Primitive(_) => None,
+ };
+ let path_description = if let Some(kind) = kind {
+ match kind {
+ Mod | ForeignMod => "inner item",
+ Struct => "field or associated item",
+ Enum | Union => "variant or associated item",
+ Variant
+ | Field
+ | Closure
+ | Generator
+ | AssocTy
+ | AssocConst
+ | AssocFn
+ | Fn
+ | Macro(_)
+ | Const
+ | ConstParam
+ | ExternCrate
+ | Use
+ | LifetimeParam
+ | Ctor(_, _)
+ | AnonConst
+ | InlineConst => {
+ let note = assoc_item_not_allowed(res);
+ if let Some(span) = sp {
+ diag.span_label(span, &note);
+ } else {
+ diag.note(&note);
+ }
+ return;
+ }
+ Trait | TyAlias | ForeignTy | OpaqueTy | TraitAlias | TyParam
+ | Static(_) => "associated item",
+ Impl | GlobalAsm => unreachable!("not a path"),
+ }
+ } else {
+ "associated item"
+ };
+ let note = format!(
+ "the {} `{}` has no {} named `{}`",
+ res.descr(),
+ name,
+ disambiguator.map_or(path_description, |d| d.descr()),
+ unresolved,
+ );
+ if let Some(span) = sp {
+ diag.span_label(span, &note);
+ } else {
+ diag.note(&note);
+ }
+
+ continue;
+ }
+ let note = match failure {
+ ResolutionFailure::NotResolved { .. } => unreachable!("handled above"),
+ ResolutionFailure::WrongNamespace { res, expected_ns } => {
+ suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+
+ format!(
+ "this link resolves to {}, which is not in the {} namespace",
+ item(res),
+ expected_ns.descr()
+ )
+ }
+ };
+ if let Some(span) = sp {
+ diag.span_label(span, &note);
+ } else {
+ diag.note(&note);
+ }
+ }
+ },
+ );
+
+ recovered_res
+}
+
+fn report_multiple_anchors(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>) {
+ let msg = format!("`{}` contains multiple anchors", diag_info.ori_link);
+ anchor_failure(cx, diag_info, &msg, 1)
+}
+
+fn report_anchor_conflict(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>, def_id: DefId) {
+ let (link, kind) = (diag_info.ori_link, Res::from_def_id(cx.tcx, def_id).descr());
+ let msg = format!("`{link}` contains an anchor, but links to {kind}s are already anchored");
+ anchor_failure(cx, diag_info, &msg, 0)
+}
+
+/// Report an anchor failure.
+fn anchor_failure(
+ cx: &DocContext<'_>,
+ diag_info: DiagnosticInfo<'_>,
+ msg: &str,
+ anchor_idx: usize,
+) {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
+ if let Some(mut sp) = sp {
+ if let Some((fragment_offset, _)) =
+ diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
+ {
+ sp = sp.with_lo(sp.lo() + BytePos(fragment_offset as _));
+ }
+ diag.span_label(sp, "invalid anchor");
+ }
+ });
+}
+
+/// Report an error in the link disambiguator.
+fn disambiguator_error(
+ cx: &DocContext<'_>,
+ mut diag_info: DiagnosticInfo<'_>,
+ disambiguator_range: Range<usize>,
+ msg: &str,
+) {
+ diag_info.link_range = disambiguator_range;
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| {
+ let msg = format!(
+ "see {}/rustdoc/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
+ crate::DOC_RUST_LANG_ORG_CHANNEL
+ );
+ diag.note(&msg);
+ });
+}
+
+fn report_malformed_generics(
+ cx: &DocContext<'_>,
+ diag_info: DiagnosticInfo<'_>,
+ err: MalformedGenerics,
+ path_str: &str,
+) {
+ report_diagnostic(
+ cx.tcx,
+ BROKEN_INTRA_DOC_LINKS,
+ &format!("unresolved link to `{}`", path_str),
+ &diag_info,
+ |diag, sp| {
+ let note = match err {
+ MalformedGenerics::UnbalancedAngleBrackets => "unbalanced angle brackets",
+ MalformedGenerics::MissingType => "missing type for generic parameters",
+ MalformedGenerics::HasFullyQualifiedSyntax => {
+ diag.note(
+ "see https://github.com/rust-lang/rust/issues/74563 for more information",
+ );
+ "fully-qualified syntax is unsupported"
+ }
+ MalformedGenerics::InvalidPathSeparator => "has invalid path separator",
+ MalformedGenerics::TooManyAngleBrackets => "too many angle brackets",
+ MalformedGenerics::EmptyAngleBrackets => "empty angle brackets",
+ };
+ if let Some(span) = sp {
+ diag.span_label(span, note);
+ } else {
+ diag.note(note);
+ }
+ },
+ );
+}
+
+/// Report an ambiguity error, where there were multiple possible resolutions.
+fn ambiguity_error(
+ cx: &DocContext<'_>,
+ diag_info: DiagnosticInfo<'_>,
+ path_str: &str,
+ candidates: Vec<Res>,
+) {
+ let mut msg = format!("`{}` is ", path_str);
+
+ match candidates.as_slice() {
+ [first_def, second_def] => {
+ msg += &format!(
+ "both {} {} and {} {}",
+ first_def.article(),
+ first_def.descr(),
+ second_def.article(),
+ second_def.descr(),
+ );
+ }
+ _ => {
+ let mut candidates = candidates.iter().peekable();
+ while let Some(res) = candidates.next() {
+ if candidates.peek().is_some() {
+ msg += &format!("{} {}, ", res.article(), res.descr());
+ } else {
+ msg += &format!("and {} {}", res.article(), res.descr());
+ }
+ }
+ }
+ }
+
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, |diag, sp| {
+ if let Some(sp) = sp {
+ diag.span_label(sp, "ambiguous link");
+ } else {
+ diag.note("ambiguous link");
+ }
+
+ for res in candidates {
+ suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ }
+ });
+}
+
+/// In case of an ambiguity or mismatched disambiguator, suggest the correct
+/// disambiguator.
+fn suggest_disambiguator(
+ res: Res,
+ diag: &mut Diagnostic,
+ path_str: &str,
+ ori_link: &str,
+ sp: Option<rustc_span::Span>,
+) {
+ let suggestion = res.disambiguator_suggestion();
+ let help = format!("to link to the {}, {}", res.descr(), suggestion.descr());
+
+ if let Some(sp) = sp {
+ let mut spans = suggestion.as_help_span(path_str, ori_link, sp);
+ if spans.len() > 1 {
+ diag.multipart_suggestion(&help, spans, Applicability::MaybeIncorrect);
+ } else {
+ let (sp, suggestion_text) = spans.pop().unwrap();
+ diag.span_suggestion_verbose(sp, &help, suggestion_text, Applicability::MaybeIncorrect);
+ }
+ } else {
+ diag.help(&format!("{}: {}", help, suggestion.as_help(path_str)));
+ }
+}
+
+/// Report a link from a public item to a private one.
+fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str: &str) {
+ let sym;
+ let item_name = match diag_info.item.name {
+ Some(name) => {
+ sym = name;
+ sym.as_str()
+ }
+ None => "<unknown>",
+ };
+ let msg =
+ format!("public documentation for `{}` links to private item `{}`", item_name, path_str);
+
+ report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
+ if let Some(sp) = sp {
+ diag.span_label(sp, "this item is private");
+ }
+
+ let note_msg = if cx.render_options.document_private {
+ "this link resolves only because you passed `--document-private-items`, but will break without"
+ } else {
+ "this link will resolve properly if you pass `--document-private-items`"
+ };
+ diag.note(note_msg);
+ });
+}
+
+/// Resolve a primitive type or value.
+fn resolve_primitive(path_str: &str, ns: Namespace) -> Option<Res> {
+ if ns != TypeNS {
+ return None;
+ }
+ use PrimitiveType::*;
+ let prim = match path_str {
+ "isize" => Isize,
+ "i8" => I8,
+ "i16" => I16,
+ "i32" => I32,
+ "i64" => I64,
+ "i128" => I128,
+ "usize" => Usize,
+ "u8" => U8,
+ "u16" => U16,
+ "u32" => U32,
+ "u64" => U64,
+ "u128" => U128,
+ "f32" => F32,
+ "f64" => F64,
+ "char" => Char,
+ "bool" | "true" | "false" => Bool,
+ "str" | "&str" => Str,
+ // See #80181 for why these don't have symbols associated.
+ "slice" => Slice,
+ "array" => Array,
+ "tuple" => Tuple,
+ "unit" => Unit,
+ "pointer" | "*const" | "*mut" => RawPointer,
+ "reference" | "&" | "&mut" => Reference,
+ "fn" => Fn,
+ "never" | "!" => Never,
+ _ => return None,
+ };
+ debug!("resolved primitives {:?}", prim);
+ Some(Res::Primitive(prim))
+}
+
+fn strip_generics_from_path(path_str: &str) -> Result<String, MalformedGenerics> {
+ let mut stripped_segments = vec![];
+ let mut path = path_str.chars().peekable();
+ let mut segment = Vec::new();
+
+ while let Some(chr) = path.next() {
+ match chr {
+ ':' => {
+ if path.next_if_eq(&':').is_some() {
+ let stripped_segment =
+ strip_generics_from_path_segment(mem::take(&mut segment))?;
+ if !stripped_segment.is_empty() {
+ stripped_segments.push(stripped_segment);
+ }
+ } else {
+ return Err(MalformedGenerics::InvalidPathSeparator);
+ }
+ }
+ '<' => {
+ segment.push(chr);
+
+ match path.next() {
+ Some('<') => {
+ return Err(MalformedGenerics::TooManyAngleBrackets);
+ }
+ Some('>') => {
+ return Err(MalformedGenerics::EmptyAngleBrackets);
+ }
+ Some(chr) => {
+ segment.push(chr);
+
+ while let Some(chr) = path.next_if(|c| *c != '>') {
+ segment.push(chr);
+ }
+ }
+ None => break,
+ }
+ }
+ _ => segment.push(chr),
+ }
+ trace!("raw segment: {:?}", segment);
+ }
+
+ if !segment.is_empty() {
+ let stripped_segment = strip_generics_from_path_segment(segment)?;
+ if !stripped_segment.is_empty() {
+ stripped_segments.push(stripped_segment);
+ }
+ }
+
+ debug!("path_str: {:?}\nstripped segments: {:?}", path_str, &stripped_segments);
+
+ let stripped_path = stripped_segments.join("::");
+
+ if !stripped_path.is_empty() { Ok(stripped_path) } else { Err(MalformedGenerics::MissingType) }
+}
+
+fn strip_generics_from_path_segment(segment: Vec<char>) -> Result<String, MalformedGenerics> {
+ let mut stripped_segment = String::new();
+ let mut param_depth = 0;
+
+ let mut latest_generics_chunk = String::new();
+
+ for c in segment {
+ if c == '<' {
+ param_depth += 1;
+ latest_generics_chunk.clear();
+ } else if c == '>' {
+ param_depth -= 1;
+ if latest_generics_chunk.contains(" as ") {
+ // The segment tries to use fully-qualified syntax, which is currently unsupported.
+ // Give a helpful error message instead of completely ignoring the angle brackets.
+ return Err(MalformedGenerics::HasFullyQualifiedSyntax);
+ }
+ } else {
+ if param_depth == 0 {
+ stripped_segment.push(c);
+ } else {
+ latest_generics_chunk.push(c);
+ }
+ }
+ }
+
+ if param_depth == 0 {
+ Ok(stripped_segment)
+ } else {
+ // The segment has unbalanced angle brackets, e.g. `Vec<T` or `Vec<T>>`
+ Err(MalformedGenerics::UnbalancedAngleBrackets)
+ }
+}
diff --git a/src/librustdoc/passes/collect_intra_doc_links/early.rs b/src/librustdoc/passes/collect_intra_doc_links/early.rs
new file mode 100644
index 000000000..38cfd7a27
--- /dev/null
+++ b/src/librustdoc/passes/collect_intra_doc_links/early.rs
@@ -0,0 +1,405 @@
+use crate::clean::Attributes;
+use crate::core::ResolverCaches;
+use crate::passes::collect_intra_doc_links::preprocessed_markdown_links;
+use crate::passes::collect_intra_doc_links::{Disambiguator, PreprocessedMarkdownLink};
+
+use rustc_ast::visit::{self, AssocCtxt, Visitor};
+use rustc_ast::{self as ast, ItemKind};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def::Namespace::*;
+use rustc_hir::def::{DefKind, Namespace, Res};
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, CRATE_DEF_ID};
+use rustc_hir::TraitCandidate;
+use rustc_middle::ty::{DefIdTree, Visibility};
+use rustc_resolve::{ParentScope, Resolver};
+use rustc_session::config::Externs;
+use rustc_session::Session;
+use rustc_span::symbol::sym;
+use rustc_span::{Symbol, SyntaxContext};
+
+use std::collections::hash_map::Entry;
+use std::mem;
+
+pub(crate) fn early_resolve_intra_doc_links(
+ resolver: &mut Resolver<'_>,
+ sess: &Session,
+ krate: &ast::Crate,
+ externs: Externs,
+ document_private_items: bool,
+) -> ResolverCaches {
+ let parent_scope =
+ ParentScope::module(resolver.expect_module(CRATE_DEF_ID.to_def_id()), resolver);
+ let mut link_resolver = EarlyDocLinkResolver {
+ resolver,
+ sess,
+ parent_scope,
+ visited_mods: Default::default(),
+ markdown_links: Default::default(),
+ doc_link_resolutions: Default::default(),
+ traits_in_scope: Default::default(),
+ all_traits: Default::default(),
+ all_trait_impls: Default::default(),
+ all_macro_rules: Default::default(),
+ document_private_items,
+ };
+
+ // Overridden `visit_item` below doesn't apply to the crate root,
+ // so we have to visit its attributes and reexports separately.
+ link_resolver.resolve_doc_links_local(&krate.attrs);
+ link_resolver.process_module_children_or_reexports(CRATE_DEF_ID.to_def_id());
+ visit::walk_crate(&mut link_resolver, krate);
+ link_resolver.process_extern_impls();
+
+ // FIXME: somehow rustdoc is still missing crates even though we loaded all
+ // the known necessary crates. Load them all unconditionally until we find a way to fix this.
+ // DO NOT REMOVE THIS without first testing on the reproducer in
+ // https://github.com/jyn514/objr/commit/edcee7b8124abf0e4c63873e8422ff81beb11ebb
+ for (extern_name, _) in externs.iter().filter(|(_, entry)| entry.add_prelude) {
+ link_resolver.resolver.resolve_rustdoc_path(extern_name, TypeNS, parent_scope);
+ }
+
+ ResolverCaches {
+ markdown_links: Some(link_resolver.markdown_links),
+ doc_link_resolutions: link_resolver.doc_link_resolutions,
+ traits_in_scope: link_resolver.traits_in_scope,
+ all_traits: Some(link_resolver.all_traits),
+ all_trait_impls: Some(link_resolver.all_trait_impls),
+ all_macro_rules: link_resolver.all_macro_rules,
+ }
+}
+
+fn doc_attrs<'a>(attrs: impl Iterator<Item = &'a ast::Attribute>) -> Attributes {
+ Attributes::from_ast_iter(attrs.map(|attr| (attr, None)), true)
+}
+
+struct EarlyDocLinkResolver<'r, 'ra> {
+ resolver: &'r mut Resolver<'ra>,
+ sess: &'r Session,
+ parent_scope: ParentScope<'ra>,
+ visited_mods: DefIdSet,
+ markdown_links: FxHashMap<String, Vec<PreprocessedMarkdownLink>>,
+ doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option<Res<ast::NodeId>>>,
+ traits_in_scope: DefIdMap<Vec<TraitCandidate>>,
+ all_traits: Vec<DefId>,
+ all_trait_impls: Vec<DefId>,
+ all_macro_rules: FxHashMap<Symbol, Res<ast::NodeId>>,
+ document_private_items: bool,
+}
+
+impl<'ra> EarlyDocLinkResolver<'_, 'ra> {
+ fn add_traits_in_scope(&mut self, def_id: DefId) {
+ // Calls to `traits_in_scope` are expensive, so try to avoid them if only possible.
+ // Keys in the `traits_in_scope` cache are always module IDs.
+ if let Entry::Vacant(entry) = self.traits_in_scope.entry(def_id) {
+ let module = self.resolver.get_nearest_non_block_module(def_id);
+ let module_id = module.def_id();
+ let entry = if module_id == def_id {
+ Some(entry)
+ } else if let Entry::Vacant(entry) = self.traits_in_scope.entry(module_id) {
+ Some(entry)
+ } else {
+ None
+ };
+ if let Some(entry) = entry {
+ entry.insert(self.resolver.traits_in_scope(
+ None,
+ &ParentScope::module(module, self.resolver),
+ SyntaxContext::root(),
+ None,
+ ));
+ }
+ }
+ }
+
+ /// Add traits in scope for links in impls collected by the `collect-intra-doc-links` pass.
+ /// That pass filters impls using type-based information, but we don't yet have such
+ /// information here, so we just conservatively calculate traits in scope for *all* modules
+ /// having impls in them.
+ fn process_extern_impls(&mut self) {
+ // Resolving links in already existing crates may trigger loading of new crates.
+ let mut start_cnum = 0;
+ loop {
+ let crates = Vec::from_iter(self.resolver.cstore().crates_untracked());
+ for &cnum in &crates[start_cnum..] {
+ let all_traits =
+ Vec::from_iter(self.resolver.cstore().traits_in_crate_untracked(cnum));
+ let all_trait_impls =
+ Vec::from_iter(self.resolver.cstore().trait_impls_in_crate_untracked(cnum));
+ let all_inherent_impls =
+ Vec::from_iter(self.resolver.cstore().inherent_impls_in_crate_untracked(cnum));
+ let all_incoherent_impls = Vec::from_iter(
+ self.resolver.cstore().incoherent_impls_in_crate_untracked(cnum),
+ );
+
+ // Querying traits in scope is expensive so we try to prune the impl and traits lists
+ // using privacy, private traits and impls from other crates are never documented in
+ // the current crate, and links in their doc comments are not resolved.
+ for &def_id in &all_traits {
+ if self.resolver.cstore().visibility_untracked(def_id).is_public() {
+ self.resolve_doc_links_extern_impl(def_id, false);
+ }
+ }
+ for &(trait_def_id, impl_def_id, simplified_self_ty) in &all_trait_impls {
+ if self.resolver.cstore().visibility_untracked(trait_def_id).is_public()
+ && simplified_self_ty.and_then(|ty| ty.def()).map_or(true, |ty_def_id| {
+ self.resolver.cstore().visibility_untracked(ty_def_id).is_public()
+ })
+ {
+ self.resolve_doc_links_extern_impl(impl_def_id, false);
+ }
+ }
+ for (ty_def_id, impl_def_id) in all_inherent_impls {
+ if self.resolver.cstore().visibility_untracked(ty_def_id).is_public() {
+ self.resolve_doc_links_extern_impl(impl_def_id, true);
+ }
+ }
+ for impl_def_id in all_incoherent_impls {
+ self.resolve_doc_links_extern_impl(impl_def_id, true);
+ }
+
+ self.all_traits.extend(all_traits);
+ self.all_trait_impls
+ .extend(all_trait_impls.into_iter().map(|(_, def_id, _)| def_id));
+ }
+
+ if crates.len() > start_cnum {
+ start_cnum = crates.len();
+ } else {
+ break;
+ }
+ }
+ }
+
+ fn resolve_doc_links_extern_impl(&mut self, def_id: DefId, is_inherent: bool) {
+ self.resolve_doc_links_extern_outer_fixme(def_id, def_id);
+ let assoc_item_def_ids = Vec::from_iter(
+ self.resolver.cstore().associated_item_def_ids_untracked(def_id, self.sess),
+ );
+ for assoc_def_id in assoc_item_def_ids {
+ if !is_inherent || self.resolver.cstore().visibility_untracked(assoc_def_id).is_public()
+ {
+ self.resolve_doc_links_extern_outer_fixme(assoc_def_id, def_id);
+ }
+ }
+ }
+
+ // FIXME: replace all uses with `resolve_doc_links_extern_outer` to actually resolve links, not
+ // just add traits in scope. This may be expensive and require benchmarking and optimization.
+ fn resolve_doc_links_extern_outer_fixme(&mut self, def_id: DefId, scope_id: DefId) {
+ if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
+ return;
+ }
+ if let Some(parent_id) = self.resolver.opt_parent(scope_id) {
+ self.add_traits_in_scope(parent_id);
+ }
+ }
+
+ fn resolve_doc_links_extern_outer(&mut self, def_id: DefId, scope_id: DefId) {
+ if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
+ return;
+ }
+ let attrs = Vec::from_iter(self.resolver.cstore().item_attrs_untracked(def_id, self.sess));
+ let parent_scope = ParentScope::module(
+ self.resolver.get_nearest_non_block_module(
+ self.resolver.opt_parent(scope_id).unwrap_or(scope_id),
+ ),
+ self.resolver,
+ );
+ self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope);
+ }
+
+ fn resolve_doc_links_extern_inner(&mut self, def_id: DefId) {
+ if !self.resolver.cstore().may_have_doc_links_untracked(def_id) {
+ return;
+ }
+ let attrs = Vec::from_iter(self.resolver.cstore().item_attrs_untracked(def_id, self.sess));
+ let parent_scope = ParentScope::module(self.resolver.expect_module(def_id), self.resolver);
+ self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope);
+ }
+
+ fn resolve_doc_links_local(&mut self, attrs: &[ast::Attribute]) {
+ if !attrs.iter().any(|attr| attr.may_have_doc_links()) {
+ return;
+ }
+ self.resolve_doc_links(doc_attrs(attrs.iter()), self.parent_scope);
+ }
+
+ fn resolve_and_cache(
+ &mut self,
+ path_str: &str,
+ ns: Namespace,
+ parent_scope: &ParentScope<'ra>,
+ ) -> bool {
+ // FIXME: This caching may be incorrect in case of multiple `macro_rules`
+ // items with the same name in the same module.
+ self.doc_link_resolutions
+ .entry((Symbol::intern(path_str), ns, parent_scope.module.def_id()))
+ .or_insert_with_key(|(path, ns, _)| {
+ self.resolver.resolve_rustdoc_path(path.as_str(), *ns, *parent_scope)
+ })
+ .is_some()
+ }
+
+ fn resolve_doc_links(&mut self, attrs: Attributes, parent_scope: ParentScope<'ra>) {
+ let mut need_traits_in_scope = false;
+ for (doc_module, doc) in attrs.prepare_to_doc_link_resolution() {
+ assert_eq!(doc_module, None);
+ let mut tmp_links = mem::take(&mut self.markdown_links);
+ let links =
+ tmp_links.entry(doc).or_insert_with_key(|doc| preprocessed_markdown_links(doc));
+ for PreprocessedMarkdownLink(pp_link, _) in links {
+ if let Ok(pinfo) = pp_link {
+ // The logic here is a conservative approximation for path resolution in
+ // `resolve_with_disambiguator`.
+ if let Some(ns) = pinfo.disambiguator.map(Disambiguator::ns) {
+ if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) {
+ continue;
+ }
+ }
+
+ // Resolve all namespaces due to no disambiguator or for diagnostics.
+ let mut any_resolved = false;
+ let mut need_assoc = false;
+ for ns in [TypeNS, ValueNS, MacroNS] {
+ if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) {
+ any_resolved = true;
+ } else if ns != MacroNS {
+ need_assoc = true;
+ }
+ }
+
+ // Resolve all prefixes for type-relative resolution or for diagnostics.
+ if need_assoc || !any_resolved {
+ let mut path = &pinfo.path_str[..];
+ while let Some(idx) = path.rfind("::") {
+ path = &path[..idx];
+ need_traits_in_scope = true;
+ for ns in [TypeNS, ValueNS, MacroNS] {
+ self.resolve_and_cache(path, ns, &parent_scope);
+ }
+ }
+ }
+ }
+ }
+ self.markdown_links = tmp_links;
+ }
+
+ if need_traits_in_scope {
+ self.add_traits_in_scope(parent_scope.module.def_id());
+ }
+ }
+
+ /// When reexports are inlined, they are replaced with item which they refer to, those items
+ /// may have links in their doc comments, those links are resolved at the item definition site,
+ /// so we need to know traits in scope at that definition site.
+ fn process_module_children_or_reexports(&mut self, module_id: DefId) {
+ if !self.visited_mods.insert(module_id) {
+ return; // avoid infinite recursion
+ }
+
+ for child in self.resolver.module_children_or_reexports(module_id) {
+ // This condition should give a superset of `denied` from `fn clean_use_statement`.
+ if child.vis.is_public()
+ || self.document_private_items
+ && child.vis != Visibility::Restricted(module_id)
+ && module_id.is_local()
+ {
+ if let Some(def_id) = child.res.opt_def_id() && !def_id.is_local() {
+ let scope_id = match child.res {
+ Res::Def(DefKind::Variant, ..) => self.resolver.parent(def_id),
+ _ => def_id,
+ };
+ self.resolve_doc_links_extern_outer(def_id, scope_id); // Outer attribute scope
+ if let Res::Def(DefKind::Mod, ..) = child.res {
+ self.resolve_doc_links_extern_inner(def_id); // Inner attribute scope
+ }
+ // `DefKind::Trait`s are processed in `process_extern_impls`.
+ if let Res::Def(DefKind::Mod | DefKind::Enum, ..) = child.res {
+ self.process_module_children_or_reexports(def_id);
+ }
+ if let Res::Def(DefKind::Struct | DefKind::Union | DefKind::Variant, _) =
+ child.res
+ {
+ let field_def_ids = Vec::from_iter(
+ self.resolver
+ .cstore()
+ .associated_item_def_ids_untracked(def_id, self.sess),
+ );
+ for field_def_id in field_def_ids {
+ self.resolve_doc_links_extern_outer(field_def_id, scope_id);
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+impl Visitor<'_> for EarlyDocLinkResolver<'_, '_> {
+ fn visit_item(&mut self, item: &ast::Item) {
+ self.resolve_doc_links_local(&item.attrs); // Outer attribute scope
+ if let ItemKind::Mod(..) = item.kind {
+ let module_def_id = self.resolver.local_def_id(item.id).to_def_id();
+ let module = self.resolver.expect_module(module_def_id);
+ let old_module = mem::replace(&mut self.parent_scope.module, module);
+ let old_macro_rules = self.parent_scope.macro_rules;
+ self.resolve_doc_links_local(&item.attrs); // Inner attribute scope
+ self.process_module_children_or_reexports(module_def_id);
+ visit::walk_item(self, item);
+ if item
+ .attrs
+ .iter()
+ .all(|attr| !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape))
+ {
+ self.parent_scope.macro_rules = old_macro_rules;
+ }
+ self.parent_scope.module = old_module;
+ } else {
+ match &item.kind {
+ ItemKind::Trait(..) => {
+ self.all_traits.push(self.resolver.local_def_id(item.id).to_def_id());
+ }
+ ItemKind::Impl(box ast::Impl { of_trait: Some(..), .. }) => {
+ self.all_trait_impls.push(self.resolver.local_def_id(item.id).to_def_id());
+ }
+ ItemKind::MacroDef(macro_def) if macro_def.macro_rules => {
+ let (macro_rules_scope, res) =
+ self.resolver.macro_rules_scope(self.resolver.local_def_id(item.id));
+ self.parent_scope.macro_rules = macro_rules_scope;
+ self.all_macro_rules.insert(item.ident.name, res);
+ }
+ _ => {}
+ }
+ visit::walk_item(self, item);
+ }
+ }
+
+ fn visit_assoc_item(&mut self, item: &ast::AssocItem, ctxt: AssocCtxt) {
+ self.resolve_doc_links_local(&item.attrs);
+ visit::walk_assoc_item(self, item, ctxt)
+ }
+
+ fn visit_foreign_item(&mut self, item: &ast::ForeignItem) {
+ self.resolve_doc_links_local(&item.attrs);
+ visit::walk_foreign_item(self, item)
+ }
+
+ fn visit_variant(&mut self, v: &ast::Variant) {
+ self.resolve_doc_links_local(&v.attrs);
+ visit::walk_variant(self, v)
+ }
+
+ fn visit_field_def(&mut self, field: &ast::FieldDef) {
+ self.resolve_doc_links_local(&field.attrs);
+ visit::walk_field_def(self, field)
+ }
+
+ fn visit_block(&mut self, block: &ast::Block) {
+ let old_macro_rules = self.parent_scope.macro_rules;
+ visit::walk_block(self, block);
+ self.parent_scope.macro_rules = old_macro_rules;
+ }
+
+ // NOTE: if doc-comments are ever allowed on other nodes (e.g. function parameters),
+ // then this will have to implement other visitor methods too.
+}
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
new file mode 100644
index 000000000..6b699c790
--- /dev/null
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -0,0 +1,273 @@
+//! Collects trait impls for each item in the crate. For example, if a crate
+//! defines a struct that implements a trait, this pass will note that the
+//! struct implements that trait.
+use super::Pass;
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::formats::cache::Cache;
+use crate::visit::DocVisitor;
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def_id::DefId;
+use rustc_middle::ty::{self, DefIdTree};
+use rustc_span::symbol::sym;
+
+pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass {
+ name: "collect-trait-impls",
+ run: collect_trait_impls,
+ description: "retrieves trait impls for items in the crate",
+};
+
+pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let synth_impls = cx.sess().time("collect_synthetic_impls", || {
+ let mut synth = SyntheticImplCollector { cx, impls: Vec::new() };
+ synth.visit_crate(&krate);
+ synth.impls
+ });
+
+ let prims: FxHashSet<PrimitiveType> = krate.primitives.iter().map(|p| p.1).collect();
+
+ let crate_items = {
+ let mut coll = ItemCollector::new();
+ cx.sess().time("collect_items_for_trait_impls", || coll.visit_crate(&krate));
+ coll.items
+ };
+
+ let mut new_items_external = Vec::new();
+ let mut new_items_local = Vec::new();
+
+ // External trait impls.
+ cx.with_all_trait_impls(|cx, all_trait_impls| {
+ let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
+ for &impl_def_id in all_trait_impls.iter().skip_while(|def_id| def_id.is_local()) {
+ inline::build_impl(cx, None, impl_def_id, None, &mut new_items_external);
+ }
+ });
+
+ // Local trait impls.
+ cx.with_all_trait_impls(|cx, all_trait_impls| {
+ let _prof_timer = cx.tcx.sess.prof.generic_activity("build_local_trait_impls");
+ let mut attr_buf = Vec::new();
+ for &impl_def_id in all_trait_impls.iter().take_while(|def_id| def_id.is_local()) {
+ let mut parent = Some(cx.tcx.parent(impl_def_id));
+ while let Some(did) = parent {
+ attr_buf.extend(
+ cx.tcx
+ .get_attrs(did, sym::doc)
+ .filter(|attr| {
+ if let Some([attr]) = attr.meta_item_list().as_deref() {
+ attr.has_name(sym::cfg)
+ } else {
+ false
+ }
+ })
+ .cloned(),
+ );
+ parent = cx.tcx.opt_parent(did);
+ }
+ inline::build_impl(cx, None, impl_def_id, Some(&attr_buf), &mut new_items_local);
+ attr_buf.clear();
+ }
+ });
+
+ cx.tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
+ for def_id in PrimitiveType::all_impls(cx.tcx) {
+ // Try to inline primitive impls from other crates.
+ if !def_id.is_local() {
+ inline::build_impl(cx, None, def_id, None, &mut new_items_external);
+ }
+ }
+ for (prim, did) in PrimitiveType::primitive_locations(cx.tcx) {
+ // Do not calculate blanket impl list for docs that are not going to be rendered.
+ // While the `impl` blocks themselves are only in `libcore`, the module with `doc`
+ // attached is directly included in `libstd` as well.
+ let tcx = cx.tcx;
+ if did.is_local() {
+ for def_id in prim.impls(tcx).filter(|def_id| {
+ // Avoid including impl blocks with filled-in generics.
+ // https://github.com/rust-lang/rust/issues/94937
+ //
+ // FIXME(notriddle): https://github.com/rust-lang/rust/issues/97129
+ //
+ // This tactic of using inherent impl blocks for getting
+ // auto traits and blanket impls is a hack. What we really
+ // want is to check if `[T]` impls `Send`, which has
+ // nothing to do with the inherent impl.
+ //
+ // Rustdoc currently uses these `impl` block as a source of
+ // the `Ty`, as well as the `ParamEnv`, `SubstsRef`, and
+ // `Generics`. To avoid relying on the `impl` block, these
+ // things would need to be created from wholecloth, in a
+ // form that is valid for use in type inference.
+ let ty = tcx.type_of(def_id);
+ match ty.kind() {
+ ty::Slice(ty)
+ | ty::Ref(_, ty, _)
+ | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
+ matches!(ty.kind(), ty::Param(..))
+ }
+ ty::Tuple(tys) => tys.iter().all(|ty| matches!(ty.kind(), ty::Param(..))),
+ _ => true,
+ }
+ }) {
+ let impls = get_auto_trait_and_blanket_impls(cx, def_id);
+ new_items_external.extend(impls.filter(|i| cx.inlined.insert(i.item_id)));
+ }
+ }
+ }
+ });
+
+ let mut cleaner = BadImplStripper { prims, items: crate_items, cache: &cx.cache };
+ let mut type_did_to_deref_target: FxHashMap<DefId, &Type> = FxHashMap::default();
+
+ // Follow all `Deref` targets of included items and recursively add them as valid
+ fn add_deref_target(
+ cx: &DocContext<'_>,
+ map: &FxHashMap<DefId, &Type>,
+ cleaner: &mut BadImplStripper<'_>,
+ targets: &mut FxHashSet<DefId>,
+ type_did: DefId,
+ ) {
+ if let Some(target) = map.get(&type_did) {
+ debug!("add_deref_target: type {:?}, target {:?}", type_did, target);
+ if let Some(target_prim) = target.primitive_type() {
+ cleaner.prims.insert(target_prim);
+ } else if let Some(target_did) = target.def_id(&cx.cache) {
+ // `impl Deref<Target = S> for S`
+ if !targets.insert(target_did) {
+ // Avoid infinite cycles
+ return;
+ }
+ cleaner.items.insert(target_did.into());
+ add_deref_target(cx, map, cleaner, targets, target_did);
+ }
+ }
+ }
+
+ // scan through included items ahead of time to splice in Deref targets to the "valid" sets
+ for it in new_items_external.iter().chain(new_items_local.iter()) {
+ if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind {
+ if trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait()
+ && cleaner.keep_impl(for_, true)
+ {
+ let target = items
+ .iter()
+ .find_map(|item| match *item.kind {
+ AssocTypeItem(ref t, _) => Some(&t.type_),
+ _ => None,
+ })
+ .expect("Deref impl without Target type");
+
+ if let Some(prim) = target.primitive_type() {
+ cleaner.prims.insert(prim);
+ } else if let Some(did) = target.def_id(&cx.cache) {
+ cleaner.items.insert(did.into());
+ }
+ if let Some(for_did) = for_.def_id(&cx.cache) {
+ if type_did_to_deref_target.insert(for_did, target).is_none() {
+ // Since only the `DefId` portion of the `Type` instances is known to be same for both the
+ // `Deref` target type and the impl for type positions, this map of types is keyed by
+ // `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
+ if cleaner.keep_impl_with_def_id(for_did.into()) {
+ let mut targets = FxHashSet::default();
+ targets.insert(for_did);
+ add_deref_target(
+ cx,
+ &type_did_to_deref_target,
+ &mut cleaner,
+ &mut targets,
+ for_did,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Filter out external items that are not needed
+ new_items_external.retain(|it| {
+ if let ImplItem(box Impl { ref for_, ref trait_, ref kind, .. }) = *it.kind {
+ cleaner.keep_impl(
+ for_,
+ trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait(),
+ ) || trait_.as_ref().map_or(false, |t| cleaner.keep_impl_with_def_id(t.def_id().into()))
+ || kind.is_blanket()
+ } else {
+ true
+ }
+ });
+
+ if let ModuleItem(Module { items, .. }) = &mut *krate.module.kind {
+ items.extend(synth_impls);
+ items.extend(new_items_external);
+ items.extend(new_items_local);
+ } else {
+ panic!("collect-trait-impls can't run");
+ };
+
+ krate
+}
+
+struct SyntheticImplCollector<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+ impls: Vec<Item>,
+}
+
+impl<'a, 'tcx> DocVisitor for SyntheticImplCollector<'a, 'tcx> {
+ fn visit_item(&mut self, i: &Item) {
+ if i.is_struct() || i.is_enum() || i.is_union() {
+ // FIXME(eddyb) is this `doc(hidden)` check needed?
+ if !self.cx.tcx.is_doc_hidden(i.item_id.expect_def_id()) {
+ self.impls
+ .extend(get_auto_trait_and_blanket_impls(self.cx, i.item_id.expect_def_id()));
+ }
+ }
+
+ self.visit_item_recur(i)
+ }
+}
+
+#[derive(Default)]
+struct ItemCollector {
+ items: FxHashSet<ItemId>,
+}
+
+impl ItemCollector {
+ fn new() -> Self {
+ Self::default()
+ }
+}
+
+impl DocVisitor for ItemCollector {
+ fn visit_item(&mut self, i: &Item) {
+ self.items.insert(i.item_id);
+
+ self.visit_item_recur(i)
+ }
+}
+
+struct BadImplStripper<'a> {
+ prims: FxHashSet<PrimitiveType>,
+ items: FxHashSet<ItemId>,
+ cache: &'a Cache,
+}
+
+impl<'a> BadImplStripper<'a> {
+ fn keep_impl(&self, ty: &Type, is_deref: bool) -> bool {
+ if let Generic(_) = ty {
+ // keep impls made on generics
+ true
+ } else if let Some(prim) = ty.primitive_type() {
+ self.prims.contains(&prim)
+ } else if let Some(did) = ty.def_id(self.cache) {
+ is_deref || self.keep_impl_with_def_id(did.into())
+ } else {
+ false
+ }
+ }
+
+ fn keep_impl_with_def_id(&self, item_id: ItemId) -> bool {
+ self.items.contains(&item_id)
+ }
+}
diff --git a/src/librustdoc/passes/html_tags.rs b/src/librustdoc/passes/html_tags.rs
new file mode 100644
index 000000000..f3a3c853c
--- /dev/null
+++ b/src/librustdoc/passes/html_tags.rs
@@ -0,0 +1,303 @@
+//! Detects invalid HTML (like an unclosed `<span>`) in doc comments.
+use super::Pass;
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::html::markdown::main_body_opts;
+use crate::visit::DocVisitor;
+
+use pulldown_cmark::{BrokenLink, Event, LinkType, Parser, Tag};
+
+use std::iter::Peekable;
+use std::ops::Range;
+use std::str::CharIndices;
+
+pub(crate) const CHECK_INVALID_HTML_TAGS: Pass = Pass {
+ name: "check-invalid-html-tags",
+ run: check_invalid_html_tags,
+ description: "detects invalid HTML tags in doc comments",
+};
+
+struct InvalidHtmlTagsLinter<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+}
+
+pub(crate) fn check_invalid_html_tags(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ if cx.tcx.sess.is_nightly_build() {
+ let mut coll = InvalidHtmlTagsLinter { cx };
+ coll.visit_crate(&krate);
+ }
+ krate
+}
+
+const ALLOWED_UNCLOSED: &[&str] = &[
+ "area", "base", "br", "col", "embed", "hr", "img", "input", "keygen", "link", "meta", "param",
+ "source", "track", "wbr",
+];
+
+fn drop_tag(
+ tags: &mut Vec<(String, Range<usize>)>,
+ tag_name: String,
+ range: Range<usize>,
+ f: &impl Fn(&str, &Range<usize>, bool),
+) {
+ let tag_name_low = tag_name.to_lowercase();
+ if let Some(pos) = tags.iter().rposition(|(t, _)| t.to_lowercase() == tag_name_low) {
+ // If the tag is nested inside a "<script>" or a "<style>" tag, no warning should
+ // be emitted.
+ let should_not_warn = tags.iter().take(pos + 1).any(|(at, _)| {
+ let at = at.to_lowercase();
+ at == "script" || at == "style"
+ });
+ for (last_tag_name, last_tag_span) in tags.drain(pos + 1..) {
+ if should_not_warn {
+ continue;
+ }
+ let last_tag_name_low = last_tag_name.to_lowercase();
+ if ALLOWED_UNCLOSED.contains(&last_tag_name_low.as_str()) {
+ continue;
+ }
+ // `tags` is used as a queue, meaning that everything after `pos` is included inside it.
+ // So `<h2><h3></h2>` will look like `["h2", "h3"]`. So when closing `h2`, we will still
+ // have `h3`, meaning the tag wasn't closed as it should have.
+ f(&format!("unclosed HTML tag `{}`", last_tag_name), &last_tag_span, true);
+ }
+ // Remove the `tag_name` that was originally closed
+ tags.pop();
+ } else {
+ // It can happen for example in this case: `<h2></script></h2>` (the `h2` tag isn't required
+ // but it helps for the visualization).
+ f(&format!("unopened HTML tag `{}`", tag_name), &range, false);
+ }
+}
+
+fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
+ use rustc_lexer::{is_id_continue, is_id_start};
+ let mut current_pos = end_pos;
+ loop {
+ if current_pos >= 2 && text[..current_pos].ends_with("::") {
+ current_pos -= 2;
+ }
+ let new_pos = text[..current_pos]
+ .char_indices()
+ .rev()
+ .take_while(|(_, c)| is_id_start(*c) || is_id_continue(*c))
+ .reduce(|_accum, item| item)
+ .and_then(|(new_pos, c)| is_id_start(c).then_some(new_pos));
+ if let Some(new_pos) = new_pos {
+ if current_pos != new_pos {
+ current_pos = new_pos;
+ continue;
+ }
+ }
+ break;
+ }
+ if current_pos == end_pos { None } else { Some(current_pos) }
+}
+
+fn is_valid_for_html_tag_name(c: char, is_empty: bool) -> bool {
+ // https://spec.commonmark.org/0.30/#raw-html
+ //
+ // > A tag name consists of an ASCII letter followed by zero or more ASCII letters, digits, or
+ // > hyphens (-).
+ c.is_ascii_alphabetic() || !is_empty && (c == '-' || c.is_ascii_digit())
+}
+
+fn extract_html_tag(
+ tags: &mut Vec<(String, Range<usize>)>,
+ text: &str,
+ range: &Range<usize>,
+ start_pos: usize,
+ iter: &mut Peekable<CharIndices<'_>>,
+ f: &impl Fn(&str, &Range<usize>, bool),
+) {
+ let mut tag_name = String::new();
+ let mut is_closing = false;
+ let mut prev_pos = start_pos;
+
+ loop {
+ let (pos, c) = match iter.peek() {
+ Some((pos, c)) => (*pos, *c),
+ // In case we reached the of the doc comment, we want to check that it's an
+ // unclosed HTML tag. For example "/// <h3".
+ None => (prev_pos, '\0'),
+ };
+ prev_pos = pos;
+ // Checking if this is a closing tag (like `</a>` for `<a>`).
+ if c == '/' && tag_name.is_empty() {
+ is_closing = true;
+ } else if is_valid_for_html_tag_name(c, tag_name.is_empty()) {
+ tag_name.push(c);
+ } else {
+ if !tag_name.is_empty() {
+ let mut r = Range { start: range.start + start_pos, end: range.start + pos };
+ if c == '>' {
+ // In case we have a tag without attribute, we can consider the span to
+ // refer to it fully.
+ r.end += 1;
+ }
+ if is_closing {
+ // In case we have "</div >" or even "</div >".
+ if c != '>' {
+ if !c.is_whitespace() {
+ // It seems like it's not a valid HTML tag.
+ break;
+ }
+ let mut found = false;
+ for (new_pos, c) in text[pos..].char_indices() {
+ if !c.is_whitespace() {
+ if c == '>' {
+ r.end = range.start + new_pos + 1;
+ found = true;
+ }
+ break;
+ }
+ }
+ if !found {
+ break;
+ }
+ }
+ drop_tag(tags, tag_name, r, f);
+ } else {
+ tags.push((tag_name, r));
+ }
+ }
+ break;
+ }
+ iter.next();
+ }
+}
+
+fn extract_tags(
+ tags: &mut Vec<(String, Range<usize>)>,
+ text: &str,
+ range: Range<usize>,
+ is_in_comment: &mut Option<Range<usize>>,
+ f: &impl Fn(&str, &Range<usize>, bool),
+) {
+ let mut iter = text.char_indices().peekable();
+
+ while let Some((start_pos, c)) = iter.next() {
+ if is_in_comment.is_some() {
+ if text[start_pos..].starts_with("-->") {
+ *is_in_comment = None;
+ }
+ } else if c == '<' {
+ if text[start_pos..].starts_with("<!--") {
+ // We skip the "!--" part. (Once `advance_by` is stable, might be nice to use it!)
+ iter.next();
+ iter.next();
+ iter.next();
+ *is_in_comment = Some(Range {
+ start: range.start + start_pos,
+ end: range.start + start_pos + 3,
+ });
+ } else {
+ extract_html_tag(tags, text, &range, start_pos, &mut iter, f);
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> DocVisitor for InvalidHtmlTagsLinter<'a, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ let tcx = self.cx.tcx;
+ let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
+ // If non-local, no need to check anything.
+ else { return };
+ let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ if !dox.is_empty() {
+ let report_diag = |msg: &str, range: &Range<usize>, is_open_tag: bool| {
+ let sp = match super::source_span_for_markdown_range(tcx, &dox, range, &item.attrs)
+ {
+ Some(sp) => sp,
+ None => item.attr_span(tcx),
+ };
+ tcx.struct_span_lint_hir(crate::lint::INVALID_HTML_TAGS, hir_id, sp, |lint| {
+ use rustc_lint_defs::Applicability;
+ let mut diag = lint.build(msg);
+ // If a tag looks like `<this>`, it might actually be a generic.
+ // We don't try to detect stuff `<like, this>` because that's not valid HTML,
+ // and we don't try to detect stuff `<like this>` because that's not valid Rust.
+ if let Some(Some(generics_start)) = (is_open_tag
+ && dox[..range.end].ends_with('>'))
+ .then(|| extract_path_backwards(&dox, range.start))
+ {
+ let generics_sp = match super::source_span_for_markdown_range(
+ tcx,
+ &dox,
+ &(generics_start..range.end),
+ &item.attrs,
+ ) {
+ Some(sp) => sp,
+ None => item.attr_span(tcx),
+ };
+ // multipart form is chosen here because ``Vec<i32>`` would be confusing.
+ diag.multipart_suggestion(
+ "try marking as source code",
+ vec![
+ (generics_sp.shrink_to_lo(), String::from("`")),
+ (generics_sp.shrink_to_hi(), String::from("`")),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ }
+ diag.emit()
+ });
+ };
+
+ let mut tags = Vec::new();
+ let mut is_in_comment = None;
+ let mut in_code_block = false;
+
+ let link_names = item.link_names(&self.cx.cache);
+
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ if let Some(link) =
+ link_names.iter().find(|link| *link.original_text == *broken_link.reference)
+ {
+ Some((link.href.as_str().into(), link.new_text.as_str().into()))
+ } else if matches!(
+ &broken_link.link_type,
+ LinkType::Reference | LinkType::ReferenceUnknown
+ ) {
+ // If the link is shaped [like][this], suppress any broken HTML in the [this] part.
+ // The `broken_intra_doc_links` will report typos in there anyway.
+ Some((
+ broken_link.reference.to_string().into(),
+ broken_link.reference.to_string().into(),
+ ))
+ } else {
+ None
+ }
+ };
+
+ let p =
+ Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ .into_offset_iter();
+
+ for (event, range) in p {
+ match event {
+ Event::Start(Tag::CodeBlock(_)) => in_code_block = true,
+ Event::Html(text) | Event::Text(text) if !in_code_block => {
+ extract_tags(&mut tags, &text, range, &mut is_in_comment, &report_diag)
+ }
+ Event::End(Tag::CodeBlock(_)) => in_code_block = false,
+ _ => {}
+ }
+ }
+
+ for (tag, range) in tags.iter().filter(|(t, _)| {
+ let t = t.to_lowercase();
+ !ALLOWED_UNCLOSED.contains(&t.as_str())
+ }) {
+ report_diag(&format!("unclosed HTML tag `{}`", tag), range, true);
+ }
+
+ if let Some(range) = is_in_comment {
+ report_diag("Unclosed HTML comment", &range, false);
+ }
+ }
+
+ self.visit_item_recur(item)
+ }
+}
diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs
new file mode 100644
index 000000000..f81b38ea3
--- /dev/null
+++ b/src/librustdoc/passes/mod.rs
@@ -0,0 +1,212 @@
+//! Contains information about "passes", used to modify crate information during the documentation
+//! process.
+
+use rustc_middle::ty::TyCtxt;
+use rustc_span::{InnerSpan, Span, DUMMY_SP};
+use std::ops::Range;
+
+use self::Condition::*;
+use crate::clean::{self, DocFragmentKind};
+use crate::core::DocContext;
+
+mod stripper;
+pub(crate) use stripper::*;
+
+mod bare_urls;
+pub(crate) use self::bare_urls::CHECK_BARE_URLS;
+
+mod strip_hidden;
+pub(crate) use self::strip_hidden::STRIP_HIDDEN;
+
+mod strip_private;
+pub(crate) use self::strip_private::STRIP_PRIVATE;
+
+mod strip_priv_imports;
+pub(crate) use self::strip_priv_imports::STRIP_PRIV_IMPORTS;
+
+mod propagate_doc_cfg;
+pub(crate) use self::propagate_doc_cfg::PROPAGATE_DOC_CFG;
+
+pub(crate) mod collect_intra_doc_links;
+pub(crate) use self::collect_intra_doc_links::COLLECT_INTRA_DOC_LINKS;
+
+mod check_doc_test_visibility;
+pub(crate) use self::check_doc_test_visibility::CHECK_DOC_TEST_VISIBILITY;
+
+mod collect_trait_impls;
+pub(crate) use self::collect_trait_impls::COLLECT_TRAIT_IMPLS;
+
+mod check_code_block_syntax;
+pub(crate) use self::check_code_block_syntax::CHECK_CODE_BLOCK_SYNTAX;
+
+mod calculate_doc_coverage;
+pub(crate) use self::calculate_doc_coverage::CALCULATE_DOC_COVERAGE;
+
+mod html_tags;
+pub(crate) use self::html_tags::CHECK_INVALID_HTML_TAGS;
+
+/// A single pass over the cleaned documentation.
+///
+/// Runs in the compiler context, so it has access to types and traits and the like.
+#[derive(Copy, Clone)]
+pub(crate) struct Pass {
+ pub(crate) name: &'static str,
+ pub(crate) run: fn(clean::Crate, &mut DocContext<'_>) -> clean::Crate,
+ pub(crate) description: &'static str,
+}
+
+/// In a list of passes, a pass that may or may not need to be run depending on options.
+#[derive(Copy, Clone)]
+pub(crate) struct ConditionalPass {
+ pub(crate) pass: Pass,
+ pub(crate) condition: Condition,
+}
+
+/// How to decide whether to run a conditional pass.
+#[derive(Copy, Clone)]
+pub(crate) enum Condition {
+ Always,
+ /// When `--document-private-items` is passed.
+ WhenDocumentPrivate,
+ /// When `--document-private-items` is not passed.
+ WhenNotDocumentPrivate,
+ /// When `--document-hidden-items` is not passed.
+ WhenNotDocumentHidden,
+}
+
+/// The full list of passes.
+pub(crate) const PASSES: &[Pass] = &[
+ CHECK_DOC_TEST_VISIBILITY,
+ STRIP_HIDDEN,
+ STRIP_PRIVATE,
+ STRIP_PRIV_IMPORTS,
+ PROPAGATE_DOC_CFG,
+ COLLECT_INTRA_DOC_LINKS,
+ CHECK_CODE_BLOCK_SYNTAX,
+ COLLECT_TRAIT_IMPLS,
+ CALCULATE_DOC_COVERAGE,
+ CHECK_INVALID_HTML_TAGS,
+ CHECK_BARE_URLS,
+];
+
+/// The list of passes run by default.
+pub(crate) const DEFAULT_PASSES: &[ConditionalPass] = &[
+ ConditionalPass::always(COLLECT_TRAIT_IMPLS),
+ ConditionalPass::always(CHECK_DOC_TEST_VISIBILITY),
+ ConditionalPass::new(STRIP_HIDDEN, WhenNotDocumentHidden),
+ ConditionalPass::new(STRIP_PRIVATE, WhenNotDocumentPrivate),
+ ConditionalPass::new(STRIP_PRIV_IMPORTS, WhenDocumentPrivate),
+ ConditionalPass::always(COLLECT_INTRA_DOC_LINKS),
+ ConditionalPass::always(CHECK_CODE_BLOCK_SYNTAX),
+ ConditionalPass::always(CHECK_INVALID_HTML_TAGS),
+ ConditionalPass::always(PROPAGATE_DOC_CFG),
+ ConditionalPass::always(CHECK_BARE_URLS),
+];
+
+/// The list of default passes run when `--doc-coverage` is passed to rustdoc.
+pub(crate) const COVERAGE_PASSES: &[ConditionalPass] = &[
+ ConditionalPass::new(STRIP_HIDDEN, WhenNotDocumentHidden),
+ ConditionalPass::new(STRIP_PRIVATE, WhenNotDocumentPrivate),
+ ConditionalPass::always(CALCULATE_DOC_COVERAGE),
+];
+
+impl ConditionalPass {
+ pub(crate) const fn always(pass: Pass) -> Self {
+ Self::new(pass, Always)
+ }
+
+ pub(crate) const fn new(pass: Pass, condition: Condition) -> Self {
+ ConditionalPass { pass, condition }
+ }
+}
+
+/// Returns the given default set of passes.
+pub(crate) fn defaults(show_coverage: bool) -> &'static [ConditionalPass] {
+ if show_coverage { COVERAGE_PASSES } else { DEFAULT_PASSES }
+}
+
+/// Returns a span encompassing all the given attributes.
+pub(crate) fn span_of_attrs(attrs: &clean::Attributes) -> Option<Span> {
+ if attrs.doc_strings.is_empty() {
+ return None;
+ }
+ let start = attrs.doc_strings[0].span;
+ if start == DUMMY_SP {
+ return None;
+ }
+ let end = attrs.doc_strings.last().expect("no doc strings provided").span;
+ Some(start.to(end))
+}
+
+/// Attempts to match a range of bytes from parsed markdown to a `Span` in the source code.
+///
+/// This method will return `None` if we cannot construct a span from the source map or if the
+/// attributes are not all sugared doc comments. It's difficult to calculate the correct span in
+/// that case due to escaping and other source features.
+pub(crate) fn source_span_for_markdown_range(
+ tcx: TyCtxt<'_>,
+ markdown: &str,
+ md_range: &Range<usize>,
+ attrs: &clean::Attributes,
+) -> Option<Span> {
+ let is_all_sugared_doc =
+ attrs.doc_strings.iter().all(|frag| frag.kind == DocFragmentKind::SugaredDoc);
+
+ if !is_all_sugared_doc {
+ return None;
+ }
+
+ let snippet = tcx.sess.source_map().span_to_snippet(span_of_attrs(attrs)?).ok()?;
+
+ let starting_line = markdown[..md_range.start].matches('\n').count();
+ let ending_line = starting_line + markdown[md_range.start..md_range.end].matches('\n').count();
+
+ // We use `split_terminator('\n')` instead of `lines()` when counting bytes so that we treat
+ // CRLF and LF line endings the same way.
+ let mut src_lines = snippet.split_terminator('\n');
+ let md_lines = markdown.split_terminator('\n');
+
+ // The number of bytes from the source span to the markdown span that are not part
+ // of the markdown, like comment markers.
+ let mut start_bytes = 0;
+ let mut end_bytes = 0;
+
+ 'outer: for (line_no, md_line) in md_lines.enumerate() {
+ loop {
+ let source_line = src_lines.next()?;
+ match source_line.find(md_line) {
+ Some(offset) => {
+ if line_no == starting_line {
+ start_bytes += offset;
+
+ if starting_line == ending_line {
+ break 'outer;
+ }
+ } else if line_no == ending_line {
+ end_bytes += offset;
+ break 'outer;
+ } else if line_no < starting_line {
+ start_bytes += source_line.len() - md_line.len();
+ } else {
+ end_bytes += source_line.len() - md_line.len();
+ }
+ break;
+ }
+ None => {
+ // Since this is a source line that doesn't include a markdown line,
+ // we have to count the newline that we split from earlier.
+ if line_no <= starting_line {
+ start_bytes += source_line.len() + 1;
+ } else {
+ end_bytes += source_line.len() + 1;
+ }
+ }
+ }
+ }
+ }
+
+ Some(span_of_attrs(attrs)?.from_inner(InnerSpan::new(
+ md_range.start + start_bytes,
+ md_range.end + start_bytes + end_bytes,
+ )))
+}
diff --git a/src/librustdoc/passes/propagate_doc_cfg.rs b/src/librustdoc/passes/propagate_doc_cfg.rs
new file mode 100644
index 000000000..0c5d83655
--- /dev/null
+++ b/src/librustdoc/passes/propagate_doc_cfg.rs
@@ -0,0 +1,45 @@
+//! Propagates [`#[doc(cfg(...))]`](https://github.com/rust-lang/rust/issues/43781) to child items.
+use std::sync::Arc;
+
+use crate::clean::cfg::Cfg;
+use crate::clean::{Crate, Item};
+use crate::core::DocContext;
+use crate::fold::DocFolder;
+use crate::passes::Pass;
+
+pub(crate) const PROPAGATE_DOC_CFG: Pass = Pass {
+ name: "propagate-doc-cfg",
+ run: propagate_doc_cfg,
+ description: "propagates `#[doc(cfg(...))]` to child items",
+};
+
+pub(crate) fn propagate_doc_cfg(cr: Crate, _: &mut DocContext<'_>) -> Crate {
+ CfgPropagator { parent_cfg: None }.fold_crate(cr)
+}
+
+struct CfgPropagator {
+ parent_cfg: Option<Arc<Cfg>>,
+}
+
+impl DocFolder for CfgPropagator {
+ fn fold_item(&mut self, mut item: Item) -> Option<Item> {
+ let old_parent_cfg = self.parent_cfg.clone();
+
+ let new_cfg = match (self.parent_cfg.take(), item.cfg.take()) {
+ (None, None) => None,
+ (Some(rc), None) | (None, Some(rc)) => Some(rc),
+ (Some(mut a), Some(b)) => {
+ let b = Arc::try_unwrap(b).unwrap_or_else(|rc| Cfg::clone(&rc));
+ *Arc::make_mut(&mut a) &= b;
+ Some(a)
+ }
+ };
+ self.parent_cfg = new_cfg.clone();
+ item.cfg = new_cfg;
+
+ let result = self.fold_item_recur(item);
+ self.parent_cfg = old_parent_cfg;
+
+ Some(result)
+ }
+}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
new file mode 100644
index 000000000..533e2ce46
--- /dev/null
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -0,0 +1,68 @@
+//! Strip all doc(hidden) items from the output.
+use rustc_span::symbol::sym;
+use std::mem;
+
+use crate::clean;
+use crate::clean::{Item, ItemIdSet, NestedAttributesExt};
+use crate::core::DocContext;
+use crate::fold::{strip_item, DocFolder};
+use crate::passes::{ImplStripper, Pass};
+
+pub(crate) const STRIP_HIDDEN: Pass = Pass {
+ name: "strip-hidden",
+ run: strip_hidden,
+ description: "strips all `#[doc(hidden)]` items from the output",
+};
+
+/// Strip items marked `#[doc(hidden)]`
+pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate {
+ let mut retained = ItemIdSet::default();
+
+ // strip all #[doc(hidden)] items
+ let krate = {
+ let mut stripper = Stripper { retained: &mut retained, update_retained: true };
+ stripper.fold_crate(krate)
+ };
+
+ // strip all impls referencing stripped items
+ let mut stripper = ImplStripper { retained: &retained, cache: &cx.cache };
+ stripper.fold_crate(krate)
+}
+
+struct Stripper<'a> {
+ retained: &'a mut ItemIdSet,
+ update_retained: bool,
+}
+
+impl<'a> DocFolder for Stripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ if i.attrs.lists(sym::doc).has_word(sym::hidden) {
+ debug!("strip_hidden: stripping {:?} {:?}", i.type_(), i.name);
+ // Use a dedicated hidden item for fields, variants, and modules.
+ // We need to keep private fields and variants, so that the docs
+ // can show a placeholder "// some variants omitted". We need to keep
+ // private modules, because they can contain impl blocks, and impl
+ // block privacy is inherited from the type and trait, not from the
+ // module it's defined in. Both of these are marked "stripped," and
+ // not included in the final docs, but since they still have an effect
+ // on the final doc, cannot be completely removed from the Clean IR.
+ match *i.kind {
+ clean::StructFieldItem(..) | clean::ModuleItem(..) | clean::VariantItem(..) => {
+ // We need to recurse into stripped modules to
+ // strip things like impl methods but when doing so
+ // we must not add any items to the `retained` set.
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = strip_item(self.fold_item_recur(i));
+ self.update_retained = old;
+ return Some(ret);
+ }
+ _ => return None,
+ }
+ } else {
+ if self.update_retained {
+ self.retained.insert(i.item_id);
+ }
+ }
+ Some(self.fold_item_recur(i))
+ }
+}
diff --git a/src/librustdoc/passes/strip_priv_imports.rs b/src/librustdoc/passes/strip_priv_imports.rs
new file mode 100644
index 000000000..85be8fa10
--- /dev/null
+++ b/src/librustdoc/passes/strip_priv_imports.rs
@@ -0,0 +1,16 @@
+//! Strips all private import statements (use, extern crate) from a
+//! crate.
+use crate::clean;
+use crate::core::DocContext;
+use crate::fold::DocFolder;
+use crate::passes::{ImportStripper, Pass};
+
+pub(crate) const STRIP_PRIV_IMPORTS: Pass = Pass {
+ name: "strip-priv-imports",
+ run: strip_priv_imports,
+ description: "strips all private import statements (`use`, `extern crate`) from a crate",
+};
+
+pub(crate) fn strip_priv_imports(krate: clean::Crate, _: &mut DocContext<'_>) -> clean::Crate {
+ ImportStripper.fold_crate(krate)
+}
diff --git a/src/librustdoc/passes/strip_private.rs b/src/librustdoc/passes/strip_private.rs
new file mode 100644
index 000000000..9ba841a31
--- /dev/null
+++ b/src/librustdoc/passes/strip_private.rs
@@ -0,0 +1,35 @@
+//! Strip all private items from the output. Additionally implies strip_priv_imports.
+//! Basically, the goal is to remove items that are not relevant for public documentation.
+use crate::clean::{self, ItemIdSet};
+use crate::core::DocContext;
+use crate::fold::DocFolder;
+use crate::passes::{ImplStripper, ImportStripper, Pass, Stripper};
+
+pub(crate) const STRIP_PRIVATE: Pass = Pass {
+ name: "strip-private",
+ run: strip_private,
+ description: "strips all private items from a crate which cannot be seen externally, \
+ implies strip-priv-imports",
+};
+
+/// Strip private items from the point of view of a crate or externally from a
+/// crate, specified by the `xcrate` flag.
+pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate {
+ // This stripper collects all *retained* nodes.
+ let mut retained = ItemIdSet::default();
+
+ // strip all private items
+ {
+ let mut stripper = Stripper {
+ retained: &mut retained,
+ access_levels: &cx.cache.access_levels,
+ update_retained: true,
+ is_json_output: cx.output_format.is_json() && !cx.show_coverage,
+ };
+ krate = ImportStripper.fold_crate(stripper.fold_crate(krate));
+ }
+
+ // strip all impls referencing private items
+ let mut stripper = ImplStripper { retained: &retained, cache: &cx.cache };
+ stripper.fold_crate(krate)
+}
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
new file mode 100644
index 000000000..0d419042a
--- /dev/null
+++ b/src/librustdoc/passes/stripper.rs
@@ -0,0 +1,188 @@
+//! A collection of utility functions for the `strip_*` passes.
+use rustc_hir::def_id::DefId;
+use rustc_middle::middle::privacy::AccessLevels;
+use std::mem;
+
+use crate::clean::{self, Item, ItemId, ItemIdSet};
+use crate::fold::{strip_item, DocFolder};
+use crate::formats::cache::Cache;
+
+pub(crate) struct Stripper<'a> {
+ pub(crate) retained: &'a mut ItemIdSet,
+ pub(crate) access_levels: &'a AccessLevels<DefId>,
+ pub(crate) update_retained: bool,
+ pub(crate) is_json_output: bool,
+}
+
+impl<'a> Stripper<'a> {
+ // We need to handle this differently for the JSON output because some non exported items could
+ // be used in public API. And so, we need these items as well. `is_exported` only checks if they
+ // are in the public API, which is not enough.
+ #[inline]
+ fn is_item_reachable(&self, item_id: ItemId) -> bool {
+ if self.is_json_output {
+ self.access_levels.is_reachable(item_id.expect_def_id())
+ } else {
+ self.access_levels.is_exported(item_id.expect_def_id())
+ }
+ }
+}
+
+impl<'a> DocFolder for Stripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ match *i.kind {
+ clean::StrippedItem(..) => {
+ // We need to recurse into stripped modules to strip things
+ // like impl methods but when doing so we must not add any
+ // items to the `retained` set.
+ debug!("Stripper: recursing into stripped {:?} {:?}", i.type_(), i.name);
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = self.fold_item_recur(i);
+ self.update_retained = old;
+ return Some(ret);
+ }
+ // These items can all get re-exported
+ clean::OpaqueTyItem(..)
+ | clean::TypedefItem(..)
+ | clean::StaticItem(..)
+ | clean::StructItem(..)
+ | clean::EnumItem(..)
+ | clean::TraitItem(..)
+ | clean::FunctionItem(..)
+ | clean::VariantItem(..)
+ | clean::MethodItem(..)
+ | clean::ForeignFunctionItem(..)
+ | clean::ForeignStaticItem(..)
+ | clean::ConstantItem(..)
+ | clean::UnionItem(..)
+ | clean::AssocConstItem(..)
+ | clean::AssocTypeItem(..)
+ | clean::TraitAliasItem(..)
+ | clean::MacroItem(..)
+ | clean::ForeignTypeItem => {
+ let item_id = i.item_id;
+ if item_id.is_local() && !self.is_item_reachable(item_id) {
+ debug!("Stripper: stripping {:?} {:?}", i.type_(), i.name);
+ return None;
+ }
+ }
+
+ clean::StructFieldItem(..) => {
+ if !i.visibility.is_public() {
+ return Some(strip_item(i));
+ }
+ }
+
+ clean::ModuleItem(..) => {
+ if i.item_id.is_local() && !i.visibility.is_public() {
+ debug!("Stripper: stripping module {:?}", i.name);
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = strip_item(self.fold_item_recur(i));
+ self.update_retained = old;
+ return Some(ret);
+ }
+ }
+
+ // handled in the `strip-priv-imports` pass
+ clean::ExternCrateItem { .. } | clean::ImportItem(..) => {}
+
+ clean::ImplItem(..) => {}
+
+ // tymethods etc. have no control over privacy
+ clean::TyMethodItem(..) | clean::TyAssocConstItem(..) | clean::TyAssocTypeItem(..) => {}
+
+ // Proc-macros are always public
+ clean::ProcMacroItem(..) => {}
+
+ // Primitives are never stripped
+ clean::PrimitiveItem(..) => {}
+
+ // Keywords are never stripped
+ clean::KeywordItem => {}
+ }
+
+ let fastreturn = match *i.kind {
+ // nothing left to do for traits (don't want to filter their
+ // methods out, visibility controlled by the trait)
+ clean::TraitItem(..) => true,
+
+ // implementations of traits are always public.
+ clean::ImplItem(ref imp) if imp.trait_.is_some() => true,
+ // Variant fields have inherited visibility
+ clean::VariantItem(clean::Variant::Struct(..) | clean::Variant::Tuple(..)) => true,
+ _ => false,
+ };
+
+ let i = if fastreturn {
+ if self.update_retained {
+ self.retained.insert(i.item_id);
+ }
+ return Some(i);
+ } else {
+ self.fold_item_recur(i)
+ };
+
+ if self.update_retained {
+ self.retained.insert(i.item_id);
+ }
+ Some(i)
+ }
+}
+
+/// This stripper discards all impls which reference stripped items
+pub(crate) struct ImplStripper<'a> {
+ pub(crate) retained: &'a ItemIdSet,
+ pub(crate) cache: &'a Cache,
+}
+
+impl<'a> DocFolder for ImplStripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ if let clean::ImplItem(ref imp) = *i.kind {
+ // Impl blocks can be skipped if they are: empty; not a trait impl; and have no
+ // documentation.
+ if imp.trait_.is_none() && imp.items.is_empty() && i.doc_value().is_none() {
+ return None;
+ }
+ if let Some(did) = imp.for_.def_id(self.cache) {
+ if did.is_local() && !imp.for_.is_assoc_ty() && !self.retained.contains(&did.into())
+ {
+ debug!("ImplStripper: impl item for stripped type; removing");
+ return None;
+ }
+ }
+ if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) {
+ if did.is_local() && !self.retained.contains(&did.into()) {
+ debug!("ImplStripper: impl item for stripped trait; removing");
+ return None;
+ }
+ }
+ if let Some(generics) = imp.trait_.as_ref().and_then(|t| t.generics()) {
+ for typaram in generics {
+ if let Some(did) = typaram.def_id(self.cache) {
+ if did.is_local() && !self.retained.contains(&did.into()) {
+ debug!(
+ "ImplStripper: stripped item in trait's generics; removing impl"
+ );
+ return None;
+ }
+ }
+ }
+ }
+ }
+ Some(self.fold_item_recur(i))
+ }
+}
+
+/// This stripper discards all private import statements (`use`, `extern crate`)
+pub(crate) struct ImportStripper;
+
+impl DocFolder for ImportStripper {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ match *i.kind {
+ clean::ExternCrateItem { .. } | clean::ImportItem(..) if !i.visibility.is_public() => {
+ None
+ }
+ _ => Some(self.fold_item_recur(i)),
+ }
+ }
+}
diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs
new file mode 100644
index 000000000..0d9684025
--- /dev/null
+++ b/src/librustdoc/scrape_examples.rs
@@ -0,0 +1,359 @@
+//! This module analyzes crates to find call sites that can serve as examples in the documentation.
+
+use crate::clean;
+use crate::config;
+use crate::formats;
+use crate::formats::renderer::FormatRenderer;
+use crate::html::render::Context;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::{
+ self as hir,
+ intravisit::{self, Visitor},
+};
+use rustc_interface::interface;
+use rustc_macros::{Decodable, Encodable};
+use rustc_middle::hir::map::Map;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_serialize::{
+ opaque::{FileEncoder, MemDecoder},
+ Decodable, Encodable,
+};
+use rustc_session::getopts;
+use rustc_span::{
+ def_id::{CrateNum, DefPathHash, LOCAL_CRATE},
+ edition::Edition,
+ BytePos, FileName, SourceFile,
+};
+
+use std::fs;
+use std::path::PathBuf;
+
+#[derive(Debug, Clone)]
+pub(crate) struct ScrapeExamplesOptions {
+ output_path: PathBuf,
+ target_crates: Vec<String>,
+ pub(crate) scrape_tests: bool,
+}
+
+impl ScrapeExamplesOptions {
+ pub(crate) fn new(
+ matches: &getopts::Matches,
+ diag: &rustc_errors::Handler,
+ ) -> Result<Option<Self>, i32> {
+ let output_path = matches.opt_str("scrape-examples-output-path");
+ let target_crates = matches.opt_strs("scrape-examples-target-crate");
+ let scrape_tests = matches.opt_present("scrape-tests");
+ match (output_path, !target_crates.is_empty(), scrape_tests) {
+ (Some(output_path), true, _) => Ok(Some(ScrapeExamplesOptions {
+ output_path: PathBuf::from(output_path),
+ target_crates,
+ scrape_tests,
+ })),
+ (Some(_), false, _) | (None, true, _) => {
+ diag.err("must use --scrape-examples-output-path and --scrape-examples-target-crate together");
+ Err(1)
+ }
+ (None, false, true) => {
+ diag.err("must use --scrape-examples-output-path and --scrape-examples-target-crate with --scrape-tests");
+ Err(1)
+ }
+ (None, false, false) => Ok(None),
+ }
+ }
+}
+
+#[derive(Encodable, Decodable, Debug, Clone)]
+pub(crate) struct SyntaxRange {
+ pub(crate) byte_span: (u32, u32),
+ pub(crate) line_span: (usize, usize),
+}
+
+impl SyntaxRange {
+ fn new(span: rustc_span::Span, file: &SourceFile) -> Option<Self> {
+ let get_pos = |bytepos: BytePos| file.original_relative_byte_pos(bytepos).0;
+ let get_line = |bytepos: BytePos| file.lookup_line(bytepos);
+
+ Some(SyntaxRange {
+ byte_span: (get_pos(span.lo()), get_pos(span.hi())),
+ line_span: (get_line(span.lo())?, get_line(span.hi())?),
+ })
+ }
+}
+
+#[derive(Encodable, Decodable, Debug, Clone)]
+pub(crate) struct CallLocation {
+ pub(crate) call_expr: SyntaxRange,
+ pub(crate) call_ident: SyntaxRange,
+ pub(crate) enclosing_item: SyntaxRange,
+}
+
+impl CallLocation {
+ fn new(
+ expr_span: rustc_span::Span,
+ ident_span: rustc_span::Span,
+ enclosing_item_span: rustc_span::Span,
+ source_file: &SourceFile,
+ ) -> Option<Self> {
+ Some(CallLocation {
+ call_expr: SyntaxRange::new(expr_span, source_file)?,
+ call_ident: SyntaxRange::new(ident_span, source_file)?,
+ enclosing_item: SyntaxRange::new(enclosing_item_span, source_file)?,
+ })
+ }
+}
+
+#[derive(Encodable, Decodable, Debug, Clone)]
+pub(crate) struct CallData {
+ pub(crate) locations: Vec<CallLocation>,
+ pub(crate) url: String,
+ pub(crate) display_name: String,
+ pub(crate) edition: Edition,
+}
+
+pub(crate) type FnCallLocations = FxHashMap<PathBuf, CallData>;
+pub(crate) type AllCallLocations = FxHashMap<DefPathHash, FnCallLocations>;
+
+/// Visitor for traversing a crate and finding instances of function calls.
+struct FindCalls<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ map: Map<'tcx>,
+ cx: Context<'tcx>,
+ target_crates: Vec<CrateNum>,
+ calls: &'a mut AllCallLocations,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for FindCalls<'a, 'tcx>
+where
+ 'tcx: 'a,
+{
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.map
+ }
+
+ fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) {
+ intravisit::walk_expr(self, ex);
+
+ let tcx = self.tcx;
+
+ // If we visit an item that contains an expression outside a function body,
+ // then we need to exit before calling typeck (which will panic). See
+ // test/run-make/rustdoc-scrape-examples-invalid-expr for an example.
+ let hir = tcx.hir();
+ if hir.maybe_body_owned_by(ex.hir_id.owner).is_none() {
+ return;
+ }
+
+ // Get type of function if expression is a function call
+ let (ty, call_span, ident_span) = match ex.kind {
+ hir::ExprKind::Call(f, _) => {
+ let types = tcx.typeck(ex.hir_id.owner);
+
+ if let Some(ty) = types.node_type_opt(f.hir_id) {
+ (ty, ex.span, f.span)
+ } else {
+ trace!("node_type_opt({}) = None", f.hir_id);
+ return;
+ }
+ }
+ hir::ExprKind::MethodCall(path, _, call_span) => {
+ let types = tcx.typeck(ex.hir_id.owner);
+ let Some(def_id) = types.type_dependent_def_id(ex.hir_id) else {
+ trace!("type_dependent_def_id({}) = None", ex.hir_id);
+ return;
+ };
+
+ let ident_span = path.ident.span;
+ (tcx.type_of(def_id), call_span, ident_span)
+ }
+ _ => {
+ return;
+ }
+ };
+
+ // If this span comes from a macro expansion, then the source code may not actually show
+ // a use of the given item, so it would be a poor example. Hence, we skip all uses in macros.
+ if call_span.from_expansion() {
+ trace!("Rejecting expr from macro: {call_span:?}");
+ return;
+ }
+
+ // If the enclosing item has a span coming from a proc macro, then we also don't want to include
+ // the example.
+ let enclosing_item_span = tcx
+ .hir()
+ .span_with_body(tcx.hir().local_def_id_to_hir_id(tcx.hir().get_parent_item(ex.hir_id)));
+ if enclosing_item_span.from_expansion() {
+ trace!("Rejecting expr ({call_span:?}) from macro item: {enclosing_item_span:?}");
+ return;
+ }
+
+ // If the enclosing item doesn't actually enclose the call, this means we probably have a weird
+ // macro issue even though the spans aren't tagged as being from an expansion.
+ if !enclosing_item_span.contains(call_span) {
+ warn!(
+ "Attempted to scrape call at [{call_span:?}] whose enclosing item [{enclosing_item_span:?}] doesn't contain the span of the call."
+ );
+ return;
+ }
+
+ // Similarly for the call w/ the function ident.
+ if !call_span.contains(ident_span) {
+ warn!(
+ "Attempted to scrape call at [{call_span:?}] whose identifier [{ident_span:?}] was not contained in the span of the call."
+ );
+ return;
+ }
+
+ // Save call site if the function resolves to a concrete definition
+ if let ty::FnDef(def_id, _) = ty.kind() {
+ if self.target_crates.iter().all(|krate| *krate != def_id.krate) {
+ trace!("Rejecting expr from crate not being documented: {call_span:?}");
+ return;
+ }
+
+ let source_map = tcx.sess.source_map();
+ let file = source_map.lookup_char_pos(call_span.lo()).file;
+ let file_path = match file.name.clone() {
+ FileName::Real(real_filename) => real_filename.into_local_path(),
+ _ => None,
+ };
+
+ if let Some(file_path) = file_path {
+ let abs_path = match fs::canonicalize(file_path.clone()) {
+ Ok(abs_path) => abs_path,
+ Err(_) => {
+ trace!("Could not canonicalize file path: {}", file_path.display());
+ return;
+ }
+ };
+
+ let cx = &self.cx;
+ let clean_span = crate::clean::types::Span::new(call_span);
+ let url = match cx.href_from_span(clean_span, false) {
+ Some(url) => url,
+ None => {
+ trace!(
+ "Rejecting expr ({call_span:?}) whose clean span ({clean_span:?}) cannot be turned into a link"
+ );
+ return;
+ }
+ };
+
+ let mk_call_data = || {
+ let display_name = file_path.display().to_string();
+ let edition = call_span.edition();
+ CallData { locations: Vec::new(), url, display_name, edition }
+ };
+
+ let fn_key = tcx.def_path_hash(*def_id);
+ let fn_entries = self.calls.entry(fn_key).or_default();
+
+ trace!("Including expr: {:?}", call_span);
+ let enclosing_item_span =
+ source_map.span_extend_to_prev_char(enclosing_item_span, '\n', false);
+ let location =
+ match CallLocation::new(call_span, ident_span, enclosing_item_span, &file) {
+ Some(location) => location,
+ None => {
+ trace!("Could not get serializable call location for {call_span:?}");
+ return;
+ }
+ };
+ fn_entries.entry(abs_path).or_insert_with(mk_call_data).locations.push(location);
+ }
+ }
+ }
+}
+
+pub(crate) fn run(
+ krate: clean::Crate,
+ mut renderopts: config::RenderOptions,
+ cache: formats::cache::Cache,
+ tcx: TyCtxt<'_>,
+ options: ScrapeExamplesOptions,
+) -> interface::Result<()> {
+ let inner = move || -> Result<(), String> {
+ // Generates source files for examples
+ renderopts.no_emit_shared = true;
+ let (cx, _) = Context::init(krate, renderopts, cache, tcx).map_err(|e| e.to_string())?;
+
+ // Collect CrateIds corresponding to provided target crates
+ // If two different versions of the crate in the dependency tree, then examples will be collcted from both.
+ let all_crates = tcx
+ .crates(())
+ .iter()
+ .chain([&LOCAL_CRATE])
+ .map(|crate_num| (crate_num, tcx.crate_name(*crate_num)))
+ .collect::<Vec<_>>();
+ let target_crates = options
+ .target_crates
+ .into_iter()
+ .flat_map(|target| all_crates.iter().filter(move |(_, name)| name.as_str() == target))
+ .map(|(crate_num, _)| **crate_num)
+ .collect::<Vec<_>>();
+
+ debug!("All crates in TyCtxt: {all_crates:?}");
+ debug!("Scrape examples target_crates: {target_crates:?}");
+
+ // Run call-finder on all items
+ let mut calls = FxHashMap::default();
+ let mut finder = FindCalls { calls: &mut calls, tcx, map: tcx.hir(), cx, target_crates };
+ tcx.hir().visit_all_item_likes_in_crate(&mut finder);
+
+ // The visitor might have found a type error, which we need to
+ // promote to a fatal error
+ if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ return Err(String::from("Compilation failed, aborting rustdoc"));
+ }
+
+ // Sort call locations within a given file in document order
+ for fn_calls in calls.values_mut() {
+ for file_calls in fn_calls.values_mut() {
+ file_calls.locations.sort_by_key(|loc| loc.call_expr.byte_span.0);
+ }
+ }
+
+ // Save output to provided path
+ let mut encoder = FileEncoder::new(options.output_path).map_err(|e| e.to_string())?;
+ calls.encode(&mut encoder);
+ encoder.finish().map_err(|e| e.to_string())?;
+
+ Ok(())
+ };
+
+ if let Err(e) = inner() {
+ tcx.sess.fatal(&e);
+ }
+
+ Ok(())
+}
+
+// Note: the Handler must be passed in explicitly because sess isn't available while parsing options
+pub(crate) fn load_call_locations(
+ with_examples: Vec<String>,
+ diag: &rustc_errors::Handler,
+) -> Result<AllCallLocations, i32> {
+ let inner = || {
+ let mut all_calls: AllCallLocations = FxHashMap::default();
+ for path in with_examples {
+ let bytes = fs::read(&path).map_err(|e| format!("{} (for path {})", e, path))?;
+ let mut decoder = MemDecoder::new(&bytes, 0);
+ let calls = AllCallLocations::decode(&mut decoder);
+
+ for (function, fn_calls) in calls.into_iter() {
+ all_calls.entry(function).or_default().extend(fn_calls.into_iter());
+ }
+ }
+
+ Ok(all_calls)
+ };
+
+ inner().map_err(|e: String| {
+ diag.err(&format!("failed to load examples: {}", e));
+ 1
+ })
+}
diff --git a/src/librustdoc/theme.rs b/src/librustdoc/theme.rs
new file mode 100644
index 000000000..0118d7dd2
--- /dev/null
+++ b/src/librustdoc/theme.rs
@@ -0,0 +1,271 @@
+use rustc_data_structures::fx::FxHashSet;
+use std::fs;
+use std::hash::{Hash, Hasher};
+use std::path::Path;
+
+use rustc_errors::Handler;
+
+#[cfg(test)]
+mod tests;
+
+#[derive(Debug, Clone, Eq)]
+pub(crate) struct CssPath {
+ pub(crate) name: String,
+ pub(crate) children: FxHashSet<CssPath>,
+}
+
+// This PartialEq implementation IS NOT COMMUTATIVE!!!
+//
+// The order is very important: the second object must have all first's rules.
+// However, the first is not required to have all of the second's rules.
+impl PartialEq for CssPath {
+ fn eq(&self, other: &CssPath) -> bool {
+ if self.name != other.name {
+ false
+ } else {
+ for child in &self.children {
+ if !other.children.iter().any(|c| child == c) {
+ return false;
+ }
+ }
+ true
+ }
+ }
+}
+
+impl Hash for CssPath {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.name.hash(state);
+ for x in &self.children {
+ x.hash(state);
+ }
+ }
+}
+
+impl CssPath {
+ fn new(name: String) -> CssPath {
+ CssPath { name, children: FxHashSet::default() }
+ }
+}
+
+/// All variants contain the position they occur.
+#[derive(Debug, Clone, Copy)]
+enum Events {
+ StartLineComment(usize),
+ StartComment(usize),
+ EndComment(usize),
+ InBlock(usize),
+ OutBlock(usize),
+}
+
+impl Events {
+ fn get_pos(&self) -> usize {
+ match *self {
+ Events::StartLineComment(p)
+ | Events::StartComment(p)
+ | Events::EndComment(p)
+ | Events::InBlock(p)
+ | Events::OutBlock(p) => p,
+ }
+ }
+
+ fn is_comment(&self) -> bool {
+ matches!(
+ self,
+ Events::StartLineComment(_) | Events::StartComment(_) | Events::EndComment(_)
+ )
+ }
+}
+
+fn previous_is_line_comment(events: &[Events]) -> bool {
+ matches!(events.last(), Some(&Events::StartLineComment(_)))
+}
+
+fn is_line_comment(pos: usize, v: &[u8], events: &[Events]) -> bool {
+ if let Some(&Events::StartComment(_)) = events.last() {
+ return false;
+ }
+ v[pos + 1] == b'/'
+}
+
+fn load_css_events(v: &[u8]) -> Vec<Events> {
+ let mut pos = 0;
+ let mut events = Vec::with_capacity(100);
+
+ while pos + 1 < v.len() {
+ match v[pos] {
+ b'/' if v[pos + 1] == b'*' => {
+ events.push(Events::StartComment(pos));
+ pos += 1;
+ }
+ b'/' if is_line_comment(pos, v, &events) => {
+ events.push(Events::StartLineComment(pos));
+ pos += 1;
+ }
+ b'\n' if previous_is_line_comment(&events) => {
+ events.push(Events::EndComment(pos));
+ }
+ b'*' if v[pos + 1] == b'/' => {
+ events.push(Events::EndComment(pos + 2));
+ pos += 1;
+ }
+ b'{' if !previous_is_line_comment(&events) => {
+ if let Some(&Events::StartComment(_)) = events.last() {
+ pos += 1;
+ continue;
+ }
+ events.push(Events::InBlock(pos + 1));
+ }
+ b'}' if !previous_is_line_comment(&events) => {
+ if let Some(&Events::StartComment(_)) = events.last() {
+ pos += 1;
+ continue;
+ }
+ events.push(Events::OutBlock(pos + 1));
+ }
+ _ => {}
+ }
+ pos += 1;
+ }
+ events
+}
+
+fn get_useful_next(events: &[Events], pos: &mut usize) -> Option<Events> {
+ while *pos < events.len() {
+ if !events[*pos].is_comment() {
+ return Some(events[*pos]);
+ }
+ *pos += 1;
+ }
+ None
+}
+
+fn get_previous_positions(events: &[Events], mut pos: usize) -> Vec<usize> {
+ let mut ret = Vec::with_capacity(3);
+
+ ret.push(events[pos].get_pos());
+ if pos > 0 {
+ pos -= 1;
+ }
+ loop {
+ if pos < 1 || !events[pos].is_comment() {
+ let x = events[pos].get_pos();
+ if *ret.last().unwrap() != x {
+ ret.push(x);
+ } else {
+ ret.push(0);
+ }
+ break;
+ }
+ ret.push(events[pos].get_pos());
+ pos -= 1;
+ }
+ if ret.len() & 1 != 0 && events[pos].is_comment() {
+ ret.push(0);
+ }
+ ret.iter().rev().cloned().collect()
+}
+
+fn build_rule(v: &[u8], positions: &[usize]) -> String {
+ minifier::css::minify(
+ &positions
+ .chunks(2)
+ .map(|x| ::std::str::from_utf8(&v[x[0]..x[1]]).unwrap_or(""))
+ .collect::<String>()
+ .trim()
+ .chars()
+ .filter_map(|c| match c {
+ '\n' | '\t' => Some(' '),
+ '/' | '{' | '}' => None,
+ c => Some(c),
+ })
+ .collect::<String>()
+ .split(' ')
+ .filter(|s| !s.is_empty())
+ .intersperse(" ")
+ .collect::<String>(),
+ )
+ .map(|css| css.to_string())
+ .unwrap_or_else(|_| String::new())
+}
+
+fn inner(v: &[u8], events: &[Events], pos: &mut usize) -> FxHashSet<CssPath> {
+ let mut paths = Vec::with_capacity(50);
+
+ while *pos < events.len() {
+ if let Some(Events::OutBlock(_)) = get_useful_next(events, pos) {
+ *pos += 1;
+ break;
+ }
+ if let Some(Events::InBlock(_)) = get_useful_next(events, pos) {
+ paths.push(CssPath::new(build_rule(v, &get_previous_positions(events, *pos))));
+ *pos += 1;
+ }
+ while let Some(Events::InBlock(_)) = get_useful_next(events, pos) {
+ if let Some(ref mut path) = paths.last_mut() {
+ for entry in inner(v, events, pos).iter() {
+ path.children.insert(entry.clone());
+ }
+ }
+ }
+ if let Some(Events::OutBlock(_)) = get_useful_next(events, pos) {
+ *pos += 1;
+ }
+ }
+ paths.iter().cloned().collect()
+}
+
+pub(crate) fn load_css_paths(v: &[u8]) -> CssPath {
+ let events = load_css_events(v);
+ let mut pos = 0;
+
+ let mut parent = CssPath::new("parent".to_owned());
+ parent.children = inner(v, &events, &mut pos);
+ parent
+}
+
+pub(crate) fn get_differences(against: &CssPath, other: &CssPath, v: &mut Vec<String>) {
+ if against.name == other.name {
+ for child in &against.children {
+ let mut found = false;
+ let mut found_working = false;
+ let mut tmp = Vec::new();
+
+ for other_child in &other.children {
+ if child.name == other_child.name {
+ if child != other_child {
+ get_differences(child, other_child, &mut tmp);
+ } else {
+ found_working = true;
+ }
+ found = true;
+ break;
+ }
+ }
+ if !found {
+ v.push(format!(" Missing \"{}\" rule", child.name));
+ } else if !found_working {
+ v.extend(tmp.iter().cloned());
+ }
+ }
+ }
+}
+
+pub(crate) fn test_theme_against<P: AsRef<Path>>(
+ f: &P,
+ against: &CssPath,
+ diag: &Handler,
+) -> (bool, Vec<String>) {
+ let data = match fs::read(f) {
+ Ok(c) => c,
+ Err(e) => {
+ diag.struct_err(&e.to_string()).emit();
+ return (false, vec![]);
+ }
+ };
+
+ let paths = load_css_paths(&data);
+ let mut ret = vec![];
+ get_differences(against, &paths, &mut ret);
+ (true, ret)
+}
diff --git a/src/librustdoc/theme/tests.rs b/src/librustdoc/theme/tests.rs
new file mode 100644
index 000000000..ae8f43c6d
--- /dev/null
+++ b/src/librustdoc/theme/tests.rs
@@ -0,0 +1,117 @@
+use super::*;
+
+#[test]
+fn test_comments_in_rules() {
+ let text = r#"
+rule a {}
+
+rule b, c
+// a line comment
+{}
+
+rule d
+// another line comment
+e {}
+
+rule f/* a multine
+
+comment*/{}
+
+rule g/* another multine
+
+comment*/h
+
+i {}
+
+rule j/*commeeeeent
+
+you like things like "{}" in there? :)
+*/
+end {}"#;
+
+ let against = r#"
+rule a {}
+
+rule b, c {}
+
+rule d e {}
+
+rule f {}
+
+rule gh i {}
+
+rule j end {}
+"#;
+
+ let mut ret = Vec::new();
+ get_differences(
+ &load_css_paths(against.as_bytes()),
+ &load_css_paths(text.as_bytes()),
+ &mut ret,
+ );
+ assert!(ret.is_empty());
+}
+
+#[test]
+fn test_text() {
+ let text = r#"
+a
+/* sdfs
+*/ b
+c // sdf
+d {}
+"#;
+ let paths = load_css_paths(text.as_bytes());
+ assert!(paths.children.contains(&CssPath::new("a b c d".to_owned())));
+}
+
+#[test]
+fn test_comparison() {
+ let x = r#"
+a {
+ b {
+ c {}
+ }
+}
+"#;
+
+ let y = r#"
+a {
+ b {}
+}
+"#;
+
+ let against = load_css_paths(y.as_bytes());
+ let other = load_css_paths(x.as_bytes());
+
+ let mut ret = Vec::new();
+ get_differences(&against, &other, &mut ret);
+ assert!(ret.is_empty());
+ get_differences(&other, &against, &mut ret);
+ assert_eq!(ret, vec![" Missing \"c\" rule".to_owned()]);
+}
+
+#[test]
+fn check_empty_css() {
+ let events = load_css_events(&[]);
+ assert_eq!(events.len(), 0);
+}
+
+#[test]
+fn check_invalid_css() {
+ let events = load_css_events(b"*");
+ assert_eq!(events.len(), 0);
+}
+
+#[test]
+fn test_with_minification() {
+ let text = include_str!("../html/static/css/themes/dark.css");
+ let minified = minifier::css::minify(&text).expect("CSS minification failed").to_string();
+
+ let against = load_css_paths(text.as_bytes());
+ let other = load_css_paths(minified.as_bytes());
+
+ let mut ret = Vec::new();
+ get_differences(&against, &other, &mut ret);
+ assert!(ret.is_empty());
+}
diff --git a/src/librustdoc/visit.rs b/src/librustdoc/visit.rs
new file mode 100644
index 000000000..0bb41977c
--- /dev/null
+++ b/src/librustdoc/visit.rs
@@ -0,0 +1,72 @@
+use crate::clean::*;
+
+pub(crate) trait DocVisitor: Sized {
+ fn visit_item(&mut self, item: &Item) {
+ self.visit_item_recur(item)
+ }
+
+ /// don't override!
+ fn visit_inner_recur(&mut self, kind: &ItemKind) {
+ match kind {
+ StrippedItem(..) => unreachable!(),
+ ModuleItem(i) => {
+ self.visit_mod(i);
+ }
+ StructItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
+ UnionItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
+ EnumItem(i) => i.variants.iter().for_each(|x| self.visit_item(x)),
+ TraitItem(i) => i.items.iter().for_each(|x| self.visit_item(x)),
+ ImplItem(i) => i.items.iter().for_each(|x| self.visit_item(x)),
+ VariantItem(i) => match i {
+ Variant::Struct(j) => j.fields.iter().for_each(|x| self.visit_item(x)),
+ Variant::Tuple(fields) => fields.iter().for_each(|x| self.visit_item(x)),
+ Variant::CLike => {}
+ },
+ ExternCrateItem { src: _ }
+ | ImportItem(_)
+ | FunctionItem(_)
+ | TypedefItem(_)
+ | OpaqueTyItem(_)
+ | StaticItem(_)
+ | ConstantItem(_)
+ | TraitAliasItem(_)
+ | TyMethodItem(_)
+ | MethodItem(_, _)
+ | StructFieldItem(_)
+ | ForeignFunctionItem(_)
+ | ForeignStaticItem(_)
+ | ForeignTypeItem
+ | MacroItem(_)
+ | ProcMacroItem(_)
+ | PrimitiveItem(_)
+ | TyAssocConstItem(..)
+ | AssocConstItem(..)
+ | TyAssocTypeItem(..)
+ | AssocTypeItem(..)
+ | KeywordItem => {}
+ }
+ }
+
+ /// don't override!
+ fn visit_item_recur(&mut self, item: &Item) {
+ match &*item.kind {
+ StrippedItem(i) => self.visit_inner_recur(i),
+ _ => self.visit_inner_recur(&item.kind),
+ }
+ }
+
+ fn visit_mod(&mut self, m: &Module) {
+ m.items.iter().for_each(|i| self.visit_item(i))
+ }
+
+ fn visit_crate(&mut self, c: &Crate) {
+ self.visit_item(&c.module);
+
+ // FIXME: make this a simple by-ref for loop once external_traits is cleaned up
+ let external_traits = { std::mem::take(&mut *c.external_traits.borrow_mut()) };
+ for (k, v) in external_traits {
+ v.trait_.items.iter().for_each(|i| self.visit_item(i));
+ c.external_traits.borrow_mut().insert(k, v);
+ }
+ }
+}
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
new file mode 100644
index 000000000..ca7a20bf3
--- /dev/null
+++ b/src/librustdoc/visit_ast.rs
@@ -0,0 +1,396 @@
+//! The Rust AST Visitor. Extracts useful information and massages it into a form
+//! usable for `clean`.
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir as hir;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::Node;
+use rustc_hir::CRATE_HIR_ID;
+use rustc_middle::middle::privacy::AccessLevel;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
+use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_span::Span;
+
+use std::mem;
+
+use crate::clean::{self, cfg::Cfg, AttributesExt, NestedAttributesExt};
+use crate::core;
+
+/// This module is used to store stuff from Rust's AST in a more convenient
+/// manner (and with prettier names) before cleaning.
+#[derive(Debug)]
+pub(crate) struct Module<'hir> {
+ pub(crate) name: Symbol,
+ pub(crate) where_inner: Span,
+ pub(crate) mods: Vec<Module<'hir>>,
+ pub(crate) id: hir::HirId,
+ // (item, renamed)
+ pub(crate) items: Vec<(&'hir hir::Item<'hir>, Option<Symbol>)>,
+ pub(crate) foreigns: Vec<(&'hir hir::ForeignItem<'hir>, Option<Symbol>)>,
+}
+
+impl Module<'_> {
+ pub(crate) fn new(name: Symbol, id: hir::HirId, where_inner: Span) -> Self {
+ Module { name, id, where_inner, mods: Vec::new(), items: Vec::new(), foreigns: Vec::new() }
+ }
+
+ pub(crate) fn where_outer(&self, tcx: TyCtxt<'_>) -> Span {
+ tcx.hir().span(self.id)
+ }
+}
+
+// FIXME: Should this be replaced with tcx.def_path_str?
+fn def_id_to_path(tcx: TyCtxt<'_>, did: DefId) -> Vec<Symbol> {
+ let crate_name = tcx.crate_name(did.krate);
+ let relative = tcx.def_path(did).data.into_iter().filter_map(|elem| elem.data.get_opt_name());
+ std::iter::once(crate_name).chain(relative).collect()
+}
+
+pub(crate) fn inherits_doc_hidden(tcx: TyCtxt<'_>, mut node: hir::HirId) -> bool {
+ while let Some(id) = tcx.hir().get_enclosing_scope(node) {
+ node = id;
+ if tcx.hir().attrs(node).lists(sym::doc).has_word(sym::hidden) {
+ return true;
+ }
+ }
+ false
+}
+
+// Also, is there some reason that this doesn't use the 'visit'
+// framework from syntax?.
+
+pub(crate) struct RustdocVisitor<'a, 'tcx> {
+ cx: &'a mut core::DocContext<'tcx>,
+ view_item_stack: FxHashSet<hir::HirId>,
+ inlining: bool,
+ /// Are the current module and all of its parents public?
+ inside_public_path: bool,
+ exact_paths: FxHashMap<DefId, Vec<Symbol>>,
+}
+
+impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
+ pub(crate) fn new(cx: &'a mut core::DocContext<'tcx>) -> RustdocVisitor<'a, 'tcx> {
+ // If the root is re-exported, terminate all recursion.
+ let mut stack = FxHashSet::default();
+ stack.insert(hir::CRATE_HIR_ID);
+ RustdocVisitor {
+ cx,
+ view_item_stack: stack,
+ inlining: false,
+ inside_public_path: true,
+ exact_paths: FxHashMap::default(),
+ }
+ }
+
+ fn store_path(&mut self, did: DefId) {
+ let tcx = self.cx.tcx;
+ self.exact_paths.entry(did).or_insert_with(|| def_id_to_path(tcx, did));
+ }
+
+ pub(crate) fn visit(mut self) -> Module<'tcx> {
+ let mut top_level_module = self.visit_mod_contents(
+ hir::CRATE_HIR_ID,
+ self.cx.tcx.hir().root_module(),
+ self.cx.tcx.crate_name(LOCAL_CRATE),
+ );
+
+ // `#[macro_export] macro_rules!` items are reexported at the top level of the
+ // crate, regardless of where they're defined. We want to document the
+ // top level rexport of the macro, not its original definition, since
+ // the rexport defines the path that a user will actually see. Accordingly,
+ // we add the rexport as an item here, and then skip over the original
+ // definition in `visit_item()` below.
+ //
+ // We also skip `#[macro_export] macro_rules!` that have already been inserted,
+ // it can happen if within the same module a `#[macro_export] macro_rules!`
+ // is declared but also a reexport of itself producing two exports of the same
+ // macro in the same module.
+ let mut inserted = FxHashSet::default();
+ for export in self.cx.tcx.module_reexports(CRATE_DEF_ID).unwrap_or(&[]) {
+ if let Res::Def(DefKind::Macro(_), def_id) = export.res {
+ if let Some(local_def_id) = def_id.as_local() {
+ if self.cx.tcx.has_attr(def_id, sym::macro_export) {
+ if inserted.insert(def_id) {
+ let item = self.cx.tcx.hir().expect_item(local_def_id);
+ top_level_module.items.push((item, None));
+ }
+ }
+ }
+ }
+ }
+
+ self.cx.cache.hidden_cfg = self
+ .cx
+ .tcx
+ .hir()
+ .attrs(CRATE_HIR_ID)
+ .iter()
+ .filter(|attr| attr.has_name(sym::doc))
+ .flat_map(|attr| attr.meta_item_list().into_iter().flatten())
+ .filter(|attr| attr.has_name(sym::cfg_hide))
+ .flat_map(|attr| {
+ attr.meta_item_list()
+ .unwrap_or(&[])
+ .iter()
+ .filter_map(|attr| {
+ Cfg::parse(attr.meta_item()?)
+ .map_err(|e| self.cx.sess().diagnostic().span_err(e.span, e.msg))
+ .ok()
+ })
+ .collect::<Vec<_>>()
+ })
+ .chain(
+ [Cfg::Cfg(sym::test, None), Cfg::Cfg(sym::doc, None), Cfg::Cfg(sym::doctest, None)]
+ .into_iter(),
+ )
+ .collect();
+
+ self.cx.cache.exact_paths = self.exact_paths;
+ top_level_module
+ }
+
+ fn visit_mod_contents(
+ &mut self,
+ id: hir::HirId,
+ m: &'tcx hir::Mod<'tcx>,
+ name: Symbol,
+ ) -> Module<'tcx> {
+ let mut om = Module::new(name, id, m.spans.inner_span);
+ let def_id = self.cx.tcx.hir().local_def_id(id).to_def_id();
+ // Keep track of if there were any private modules in the path.
+ let orig_inside_public_path = self.inside_public_path;
+ self.inside_public_path &= self.cx.tcx.visibility(def_id).is_public();
+ for &i in m.item_ids {
+ let item = self.cx.tcx.hir().item(i);
+ self.visit_item(item, None, &mut om);
+ }
+ self.inside_public_path = orig_inside_public_path;
+ om
+ }
+
+ /// Tries to resolve the target of a `pub use` statement and inlines the
+ /// target if it is defined locally and would not be documented otherwise,
+ /// or when it is specifically requested with `please_inline`.
+ /// (the latter is the case when the import is marked `doc(inline)`)
+ ///
+ /// Cross-crate inlining occurs later on during crate cleaning
+ /// and follows different rules.
+ ///
+ /// Returns `true` if the target has been inlined.
+ fn maybe_inline_local(
+ &mut self,
+ id: hir::HirId,
+ res: Res,
+ renamed: Option<Symbol>,
+ glob: bool,
+ om: &mut Module<'tcx>,
+ please_inline: bool,
+ ) -> bool {
+ debug!("maybe_inline_local res: {:?}", res);
+
+ if self.cx.output_format.is_json() {
+ return false;
+ }
+
+ let tcx = self.cx.tcx;
+ let Some(res_did) = res.opt_def_id() else {
+ return false;
+ };
+
+ let use_attrs = tcx.hir().attrs(id);
+ // Don't inline `doc(hidden)` imports so they can be stripped at a later stage.
+ let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline)
+ || use_attrs.lists(sym::doc).has_word(sym::hidden);
+
+ // For cross-crate impl inlining we need to know whether items are
+ // reachable in documentation -- a previously unreachable item can be
+ // made reachable by cross-crate inlining which we're checking here.
+ // (this is done here because we need to know this upfront).
+ if !res_did.is_local() && !is_no_inline {
+ let attrs = clean::inline::load_attrs(self.cx, res_did);
+ let self_is_hidden = attrs.lists(sym::doc).has_word(sym::hidden);
+ if !self_is_hidden {
+ if let Res::Def(kind, did) = res {
+ if kind == DefKind::Mod {
+ crate::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did)
+ } else {
+ // All items need to be handled here in case someone wishes to link
+ // to them with intra-doc links
+ self.cx.cache.access_levels.map.insert(did, AccessLevel::Public);
+ }
+ }
+ }
+ return false;
+ }
+
+ let res_hir_id = match res_did.as_local() {
+ Some(n) => tcx.hir().local_def_id_to_hir_id(n),
+ None => return false,
+ };
+
+ let is_private = !self.cx.cache.access_levels.is_public(res_did);
+ let is_hidden = inherits_doc_hidden(self.cx.tcx, res_hir_id);
+
+ // Only inline if requested or if the item would otherwise be stripped.
+ if (!please_inline && !is_private && !is_hidden) || is_no_inline {
+ return false;
+ }
+
+ if !self.view_item_stack.insert(res_hir_id) {
+ return false;
+ }
+
+ let ret = match tcx.hir().get(res_hir_id) {
+ Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
+ let prev = mem::replace(&mut self.inlining, true);
+ for &i in m.item_ids {
+ let i = self.cx.tcx.hir().item(i);
+ self.visit_item(i, None, om);
+ }
+ self.inlining = prev;
+ true
+ }
+ Node::Item(it) if !glob => {
+ let prev = mem::replace(&mut self.inlining, true);
+ self.visit_item(it, renamed, om);
+ self.inlining = prev;
+ true
+ }
+ Node::ForeignItem(it) if !glob => {
+ let prev = mem::replace(&mut self.inlining, true);
+ self.visit_foreign_item(it, renamed, om);
+ self.inlining = prev;
+ true
+ }
+ _ => false,
+ };
+ self.view_item_stack.remove(&res_hir_id);
+ ret
+ }
+
+ fn visit_item(
+ &mut self,
+ item: &'tcx hir::Item<'_>,
+ renamed: Option<Symbol>,
+ om: &mut Module<'tcx>,
+ ) {
+ debug!("visiting item {:?}", item);
+ let name = renamed.unwrap_or(item.ident.name);
+
+ let def_id = item.def_id.to_def_id();
+ let is_pub = self.cx.tcx.visibility(def_id).is_public();
+
+ if is_pub {
+ self.store_path(item.def_id.to_def_id());
+ }
+
+ match item.kind {
+ hir::ItemKind::ForeignMod { items, .. } => {
+ for item in items {
+ let item = self.cx.tcx.hir().foreign_item(item.id);
+ self.visit_foreign_item(item, None, om);
+ }
+ }
+ // If we're inlining, skip private items or item reexported as "_".
+ _ if self.inlining && (!is_pub || renamed == Some(kw::Underscore)) => {}
+ hir::ItemKind::GlobalAsm(..) => {}
+ hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
+ hir::ItemKind::Use(path, kind) => {
+ let is_glob = kind == hir::UseKind::Glob;
+
+ // Struct and variant constructors and proc macro stubs always show up alongside
+ // their definitions, we've already processed them so just discard these.
+ if let Res::Def(DefKind::Ctor(..), _) | Res::SelfCtor(..) = path.res {
+ return;
+ }
+
+ let attrs = self.cx.tcx.hir().attrs(item.hir_id());
+
+ // If there was a private module in the current path then don't bother inlining
+ // anything as it will probably be stripped anyway.
+ if is_pub && self.inside_public_path {
+ let please_inline = attrs.iter().any(|item| match item.meta_item_list() {
+ Some(ref list) if item.has_name(sym::doc) => {
+ list.iter().any(|i| i.has_name(sym::inline))
+ }
+ _ => false,
+ });
+ let ident = if is_glob { None } else { Some(name) };
+ if self.maybe_inline_local(
+ item.hir_id(),
+ path.res,
+ ident,
+ is_glob,
+ om,
+ please_inline,
+ ) {
+ return;
+ }
+ }
+
+ om.items.push((item, renamed))
+ }
+ hir::ItemKind::Macro(ref macro_def, _) => {
+ // `#[macro_export] macro_rules!` items are handled separately in `visit()`,
+ // above, since they need to be documented at the module top level. Accordingly,
+ // we only want to handle macros if one of three conditions holds:
+ //
+ // 1. This macro was defined by `macro`, and thus isn't covered by the case
+ // above.
+ // 2. This macro isn't marked with `#[macro_export]`, and thus isn't covered
+ // by the case above.
+ // 3. We're inlining, since a reexport where inlining has been requested
+ // should be inlined even if it is also documented at the top level.
+
+ let def_id = item.def_id.to_def_id();
+ let is_macro_2_0 = !macro_def.macro_rules;
+ let nonexported = !self.cx.tcx.has_attr(def_id, sym::macro_export);
+
+ if is_macro_2_0 || nonexported || self.inlining {
+ om.items.push((item, renamed));
+ }
+ }
+ hir::ItemKind::Mod(ref m) => {
+ om.mods.push(self.visit_mod_contents(item.hir_id(), m, name));
+ }
+ hir::ItemKind::Fn(..)
+ | hir::ItemKind::ExternCrate(..)
+ | hir::ItemKind::Enum(..)
+ | hir::ItemKind::Struct(..)
+ | hir::ItemKind::Union(..)
+ | hir::ItemKind::TyAlias(..)
+ | hir::ItemKind::OpaqueTy(..)
+ | hir::ItemKind::Static(..)
+ | hir::ItemKind::Trait(..)
+ | hir::ItemKind::TraitAlias(..) => om.items.push((item, renamed)),
+ hir::ItemKind::Const(..) => {
+ // Underscore constants do not correspond to a nameable item and
+ // so are never useful in documentation.
+ if name != kw::Underscore {
+ om.items.push((item, renamed));
+ }
+ }
+ hir::ItemKind::Impl(impl_) => {
+ // Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
+ // them up regardless of where they're located.
+ if !self.inlining && impl_.of_trait.is_none() {
+ om.items.push((item, None));
+ }
+ }
+ }
+ }
+
+ fn visit_foreign_item(
+ &mut self,
+ item: &'tcx hir::ForeignItem<'_>,
+ renamed: Option<Symbol>,
+ om: &mut Module<'tcx>,
+ ) {
+ // If inlining we only want to include public functions.
+ if !self.inlining || self.cx.tcx.visibility(item.def_id).is_public() {
+ om.foreigns.push((item, renamed));
+ }
+ }
+}
diff --git a/src/librustdoc/visit_lib.rs b/src/librustdoc/visit_lib.rs
new file mode 100644
index 000000000..f01ec3866
--- /dev/null
+++ b/src/librustdoc/visit_lib.rs
@@ -0,0 +1,82 @@
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::{CrateNum, DefId};
+use rustc_middle::middle::privacy::{AccessLevel, AccessLevels};
+use rustc_middle::ty::TyCtxt;
+
+// FIXME: this may not be exhaustive, but is sufficient for rustdocs current uses
+
+/// Similar to `librustc_privacy::EmbargoVisitor`, but also takes
+/// specific rustdoc annotations into account (i.e., `doc(hidden)`)
+pub(crate) struct LibEmbargoVisitor<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ // Accessibility levels for reachable nodes
+ access_levels: &'a mut AccessLevels<DefId>,
+ // Previous accessibility level, None means unreachable
+ prev_level: Option<AccessLevel>,
+ // Keeps track of already visited modules, in case a module re-exports its parent
+ visited_mods: FxHashSet<DefId>,
+}
+
+impl<'a, 'tcx> LibEmbargoVisitor<'a, 'tcx> {
+ pub(crate) fn new(cx: &'a mut crate::core::DocContext<'tcx>) -> LibEmbargoVisitor<'a, 'tcx> {
+ LibEmbargoVisitor {
+ tcx: cx.tcx,
+ access_levels: &mut cx.cache.access_levels,
+ prev_level: Some(AccessLevel::Public),
+ visited_mods: FxHashSet::default(),
+ }
+ }
+
+ pub(crate) fn visit_lib(&mut self, cnum: CrateNum) {
+ let did = cnum.as_def_id();
+ self.update(did, Some(AccessLevel::Public));
+ self.visit_mod(did);
+ }
+
+ // Updates node level and returns the updated level
+ fn update(&mut self, did: DefId, level: Option<AccessLevel>) -> Option<AccessLevel> {
+ let is_hidden = self.tcx.is_doc_hidden(did);
+
+ let old_level = self.access_levels.map.get(&did).cloned();
+ // Accessibility levels can only grow
+ if level > old_level && !is_hidden {
+ self.access_levels.map.insert(did, level.unwrap());
+ level
+ } else {
+ old_level
+ }
+ }
+
+ pub(crate) fn visit_mod(&mut self, def_id: DefId) {
+ if !self.visited_mods.insert(def_id) {
+ return;
+ }
+
+ for item in self.tcx.module_children(def_id).iter() {
+ if let Some(def_id) = item.res.opt_def_id() {
+ if self.tcx.def_key(def_id).parent.map_or(false, |d| d == def_id.index)
+ || item.vis.is_public()
+ {
+ self.visit_item(item.res);
+ }
+ }
+ }
+ }
+
+ fn visit_item(&mut self, res: Res<!>) {
+ let def_id = res.def_id();
+ let vis = self.tcx.visibility(def_id);
+ let inherited_item_level = if vis.is_public() { self.prev_level } else { None };
+
+ let item_level = self.update(def_id, inherited_item_level);
+
+ if let Res::Def(DefKind::Mod, _) = res {
+ let orig_level = self.prev_level;
+
+ self.prev_level = item_level;
+ self.visit_mod(def_id);
+ self.prev_level = orig_level;
+ }
+ }
+}