summaryrefslogtreecommitdiffstats
path: root/src/librustdoc/passes
diff options
context:
space:
mode:
Diffstat (limited to 'src/librustdoc/passes')
-rw-r--r--src/librustdoc/passes/bare_urls.rs110
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs4
-rw-r--r--src/librustdoc/passes/check_code_block_syntax.rs209
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs4
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs5
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs6
-rw-r--r--src/librustdoc/passes/lint.rs33
-rw-r--r--src/librustdoc/passes/lint/bare_urls.rs89
-rw-r--r--src/librustdoc/passes/lint/check_code_block_syntax.rs170
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs (renamed from src/librustdoc/passes/html_tags.rs)309
-rw-r--r--src/librustdoc/passes/mod.rs18
-rw-r--r--src/librustdoc/passes/strip_hidden.rs1
-rw-r--r--src/librustdoc/passes/strip_priv_imports.rs4
-rw-r--r--src/librustdoc/passes/strip_private.rs4
-rw-r--r--src/librustdoc/passes/stripper.rs49
15 files changed, 487 insertions, 528 deletions
diff --git a/src/librustdoc/passes/bare_urls.rs b/src/librustdoc/passes/bare_urls.rs
deleted file mode 100644
index 7ff3ccef9..000000000
--- a/src/librustdoc/passes/bare_urls.rs
+++ /dev/null
@@ -1,110 +0,0 @@
-//! Detects links that are not linkified, e.g., in Markdown such as `Go to https://example.com/.`
-//! Suggests wrapping the link with angle brackets: `Go to <https://example.com/>.` to linkify it.
-use super::Pass;
-use crate::clean::*;
-use crate::core::DocContext;
-use crate::html::markdown::main_body_opts;
-use crate::visit::DocVisitor;
-use core::ops::Range;
-use pulldown_cmark::{Event, Parser, Tag};
-use regex::Regex;
-use rustc_errors::Applicability;
-use std::mem;
-use std::sync::LazyLock;
-
-pub(crate) const CHECK_BARE_URLS: Pass = Pass {
- name: "check-bare-urls",
- run: check_bare_urls,
- description: "detects URLs that are not hyperlinks",
-};
-
-static URL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
- Regex::new(concat!(
- r"https?://", // url scheme
- r"([-a-zA-Z0-9@:%._\+~#=]{2,256}\.)+", // one or more subdomains
- r"[a-zA-Z]{2,63}", // root domain
- r"\b([-a-zA-Z0-9@:%_\+.~#?&/=]*)" // optional query or url fragments
- ))
- .expect("failed to build regex")
-});
-
-struct BareUrlsLinter<'a, 'tcx> {
- cx: &'a mut DocContext<'tcx>,
-}
-
-impl<'a, 'tcx> BareUrlsLinter<'a, 'tcx> {
- fn find_raw_urls(
- &self,
- text: &str,
- range: Range<usize>,
- f: &impl Fn(&DocContext<'_>, &str, &str, Range<usize>),
- ) {
- trace!("looking for raw urls in {}", text);
- // For now, we only check "full" URLs (meaning, starting with "http://" or "https://").
- for match_ in URL_REGEX.find_iter(text) {
- let url = match_.as_str();
- let url_range = match_.range();
- f(
- self.cx,
- "this URL is not a hyperlink",
- url,
- Range { start: range.start + url_range.start, end: range.start + url_range.end },
- );
- }
- }
-}
-
-pub(crate) fn check_bare_urls(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
- BareUrlsLinter { cx }.visit_crate(&krate);
- krate
-}
-
-impl<'a, 'tcx> DocVisitor for BareUrlsLinter<'a, 'tcx> {
- fn visit_item(&mut self, item: &Item) {
- let Some(hir_id) = DocContext::as_local_hir_id(self.cx.tcx, item.item_id)
- else {
- // If non-local, no need to check anything.
- return;
- };
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
- if !dox.is_empty() {
- let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
- let sp = super::source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
- .unwrap_or_else(|| item.attr_span(cx.tcx));
- cx.tcx.struct_span_lint_hir(crate::lint::BARE_URLS, hir_id, sp, msg, |lint| {
- lint.note("bare URLs are not automatically turned into clickable links")
- .span_suggestion(
- sp,
- "use an automatic link instead",
- format!("<{}>", url),
- Applicability::MachineApplicable,
- )
- });
- };
-
- let mut p = Parser::new_ext(&dox, main_body_opts()).into_offset_iter();
-
- while let Some((event, range)) = p.next() {
- match event {
- Event::Text(s) => self.find_raw_urls(&s, range, &report_diag),
- // We don't want to check the text inside code blocks or links.
- Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link(..))) => {
- while let Some((event, _)) = p.next() {
- match event {
- Event::End(end)
- if mem::discriminant(&end) == mem::discriminant(&tag) =>
- {
- break;
- }
- _ => {}
- }
- }
- }
- _ => {}
- }
- }
- }
-
- self.visit_item_recur(item)
- }
-}
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index 48835abf9..02b227896 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -244,10 +244,10 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
matches!(
node,
hir::Node::Variant(hir::Variant {
- data: hir::VariantData::Tuple(_, _),
+ data: hir::VariantData::Tuple(_, _, _),
..
}) | hir::Node::Item(hir::Item {
- kind: hir::ItemKind::Struct(hir::VariantData::Tuple(_, _), _),
+ kind: hir::ItemKind::Struct(hir::VariantData::Tuple(_, _, _), _),
..
})
)
diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs
deleted file mode 100644
index 2e651b538..000000000
--- a/src/librustdoc/passes/check_code_block_syntax.rs
+++ /dev/null
@@ -1,209 +0,0 @@
-//! Validates syntax inside Rust code blocks (\`\`\`rust).
-use rustc_data_structures::sync::{Lock, Lrc};
-use rustc_errors::{
- emitter::Emitter,
- translation::{to_fluent_args, Translate},
- Applicability, Diagnostic, Handler, LazyFallbackBundle,
-};
-use rustc_parse::parse_stream_from_source_str;
-use rustc_session::parse::ParseSess;
-use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
-use rustc_span::source_map::{FilePathMapping, SourceMap};
-use rustc_span::{FileName, InnerSpan, DUMMY_SP};
-
-use crate::clean;
-use crate::core::DocContext;
-use crate::html::markdown::{self, RustCodeBlock};
-use crate::passes::Pass;
-use crate::visit::DocVisitor;
-
-pub(crate) const CHECK_CODE_BLOCK_SYNTAX: Pass = Pass {
- name: "check-code-block-syntax",
- run: check_code_block_syntax,
- description: "validates syntax inside Rust code blocks",
-};
-
-pub(crate) fn check_code_block_syntax(
- krate: clean::Crate,
- cx: &mut DocContext<'_>,
-) -> clean::Crate {
- SyntaxChecker { cx }.visit_crate(&krate);
- krate
-}
-
-struct SyntaxChecker<'a, 'tcx> {
- cx: &'a DocContext<'tcx>,
-}
-
-impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
- fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeBlock) {
- let buffer = Lrc::new(Lock::new(Buffer::default()));
- let fallback_bundle =
- rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
- let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
-
- let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
- let handler = Handler::with_emitter(false, None, Box::new(emitter));
- let source = dox[code_block.code].to_owned();
- let sess = ParseSess::with_span_handler(handler, sm);
-
- let edition = code_block.lang_string.edition.unwrap_or_else(|| self.cx.tcx.sess.edition());
- let expn_data = ExpnData::default(
- ExpnKind::AstPass(AstPass::TestHarness),
- DUMMY_SP,
- edition,
- None,
- None,
- );
- let expn_id =
- self.cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
- let span = DUMMY_SP.fresh_expansion(expn_id);
-
- let is_empty = rustc_driver::catch_fatal_errors(|| {
- parse_stream_from_source_str(
- FileName::Custom(String::from("doctest")),
- source,
- &sess,
- Some(span),
- )
- .is_empty()
- })
- .unwrap_or(false);
- let buffer = buffer.borrow();
-
- if !buffer.has_errors && !is_empty {
- // No errors in a non-empty program.
- return;
- }
-
- let Some(local_id) = item.item_id.as_def_id().and_then(|x| x.as_local())
- else {
- // We don't need to check the syntax for other crates so returning
- // without doing anything should not be a problem.
- return;
- };
-
- let hir_id = self.cx.tcx.hir().local_def_id_to_hir_id(local_id);
- let empty_block = code_block.lang_string == Default::default() && code_block.is_fenced;
- let is_ignore = code_block.lang_string.ignore != markdown::Ignore::None;
-
- // The span and whether it is precise or not.
- let (sp, precise_span) = match super::source_span_for_markdown_range(
- self.cx.tcx,
- dox,
- &code_block.range,
- &item.attrs,
- ) {
- Some(sp) => (sp, true),
- None => (item.attr_span(self.cx.tcx), false),
- };
-
- let msg = if buffer.has_errors {
- "could not parse code block as Rust code"
- } else {
- "Rust code block is empty"
- };
-
- // Finally build and emit the completed diagnostic.
- // All points of divergence have been handled earlier so this can be
- // done the same way whether the span is precise or not.
- self.cx.tcx.struct_span_lint_hir(
- crate::lint::INVALID_RUST_CODEBLOCKS,
- hir_id,
- sp,
- msg,
- |lint| {
- let explanation = if is_ignore {
- "`ignore` code blocks require valid Rust code for syntax highlighting; \
- mark blocks that do not contain Rust code as text"
- } else {
- "mark blocks that do not contain Rust code as text"
- };
-
- if precise_span {
- if is_ignore {
- // giving an accurate suggestion is hard because `ignore` might not have come first in the list.
- // just give a `help` instead.
- lint.span_help(
- sp.from_inner(InnerSpan::new(0, 3)),
- &format!("{}: ```text", explanation),
- );
- } else if empty_block {
- lint.span_suggestion(
- sp.from_inner(InnerSpan::new(0, 3)).shrink_to_hi(),
- explanation,
- "text",
- Applicability::MachineApplicable,
- );
- }
- } else if empty_block || is_ignore {
- lint.help(&format!("{}: ```text", explanation));
- }
-
- // FIXME(#67563): Provide more context for these errors by displaying the spans inline.
- for message in buffer.messages.iter() {
- lint.note(message);
- }
-
- lint
- },
- );
- }
-}
-
-impl<'a, 'tcx> DocVisitor for SyntaxChecker<'a, 'tcx> {
- fn visit_item(&mut self, item: &clean::Item) {
- if let Some(dox) = &item.attrs.collapsed_doc_value() {
- let sp = item.attr_span(self.cx.tcx);
- let extra = crate::html::markdown::ExtraInfo::new_did(
- self.cx.tcx,
- item.item_id.expect_def_id(),
- sp,
- );
- for code_block in markdown::rust_code_blocks(dox, &extra) {
- self.check_rust_syntax(item, dox, code_block);
- }
- }
-
- self.visit_item_recur(item)
- }
-}
-
-#[derive(Default)]
-struct Buffer {
- messages: Vec<String>,
- has_errors: bool,
-}
-
-struct BufferEmitter {
- buffer: Lrc<Lock<Buffer>>,
- fallback_bundle: LazyFallbackBundle,
-}
-
-impl Translate for BufferEmitter {
- fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
- None
- }
-
- fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
- &**self.fallback_bundle
- }
-}
-
-impl Emitter for BufferEmitter {
- fn emit_diagnostic(&mut self, diag: &Diagnostic) {
- let mut buffer = self.buffer.borrow_mut();
-
- let fluent_args = to_fluent_args(diag.args());
- let translated_main_message = self.translate_message(&diag.message[0].0, &fluent_args);
-
- buffer.messages.push(format!("error from rustc: {}", translated_main_message));
- if diag.is_error() {
- buffer.has_errors = true;
- }
- }
-
- fn source_map(&self) -> Option<&Lrc<SourceMap>> {
- None
- }
-}
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 7740c6d5b..057d2fdd9 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -56,7 +56,7 @@ impl crate::doctest::Tester for Tests {
}
pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -> bool {
- if !cx.cache.effective_visibilities.is_directly_public(item.item_id.expect_def_id())
+ if !cx.cache.effective_visibilities.is_directly_public(cx.tcx, item.item_id.expect_def_id())
|| matches!(
*item.kind,
clean::StructFieldItem(_)
@@ -130,7 +130,7 @@ pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item
);
}
} else if tests.found_tests > 0
- && !cx.cache.effective_visibilities.is_exported(item.item_id.expect_def_id())
+ && !cx.cache.effective_visibilities.is_exported(cx.tcx, item.item_id.expect_def_id())
{
cx.tcx.struct_span_lint_hir(
crate::lint::PRIVATE_DOC_TESTS,
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 8aa0abd36..37a28b6b7 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -402,6 +402,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
})
.and_then(|self_id| match tcx.def_kind(self_id) {
DefKind::Impl => self.def_id_to_res(self_id),
+ DefKind::Use => None,
def_kind => Some(Res::Def(def_kind, self_id)),
})
}
@@ -1772,7 +1773,6 @@ fn resolution_failure(
// Otherwise, it must be an associated item or variant
let res = partial_res.expect("None case was handled by `last_found_module`");
- let name = res.name(tcx);
let kind = match res {
Res::Def(kind, _) => Some(kind),
Res::Primitive(_) => None,
@@ -1814,6 +1814,7 @@ fn resolution_failure(
} else {
"associated item"
};
+ let name = res.name(tcx);
let note = format!(
"the {} `{}` has no {} named `{}`",
res.descr(),
@@ -1893,7 +1894,7 @@ fn disambiguator_error(
diag_info.link_range = disambiguator_range;
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| {
let msg = format!(
- "see {}/rustdoc/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
+ "see {}/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
crate::DOC_RUST_LANG_ORG_CHANNEL
);
diag.note(&msg);
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 6b699c790..d57f981d5 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -8,7 +8,7 @@ use crate::formats::cache::Cache;
use crate::visit::DocVisitor;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_middle::ty::{self, DefIdTree};
use rustc_span::symbol::sym;
@@ -25,7 +25,9 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
synth.impls
});
- let prims: FxHashSet<PrimitiveType> = krate.primitives.iter().map(|p| p.1).collect();
+ let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
+ let prims: FxHashSet<PrimitiveType> =
+ local_crate.primitives(cx.tcx).iter().map(|p| p.1).collect();
let crate_items = {
let mut coll = ItemCollector::new();
diff --git a/src/librustdoc/passes/lint.rs b/src/librustdoc/passes/lint.rs
new file mode 100644
index 000000000..97031c4f0
--- /dev/null
+++ b/src/librustdoc/passes/lint.rs
@@ -0,0 +1,33 @@
+//! Runs several rustdoc lints, consolidating them into a single pass for
+//! efficiency and simplicity.
+
+mod bare_urls;
+mod check_code_block_syntax;
+mod html_tags;
+
+use super::Pass;
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::visit::DocVisitor;
+
+pub(crate) const RUN_LINTS: Pass =
+ Pass { name: "run-lints", run: run_lints, description: "runs some of rustdoc's lints" };
+
+struct Linter<'a, 'tcx> {
+ cx: &'a mut DocContext<'tcx>,
+}
+
+pub(crate) fn run_lints(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ Linter { cx }.visit_crate(&krate);
+ krate
+}
+
+impl<'a, 'tcx> DocVisitor for Linter<'a, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ bare_urls::visit_item(self.cx, item);
+ check_code_block_syntax::visit_item(self.cx, item);
+ html_tags::visit_item(self.cx, item);
+
+ self.visit_item_recur(item)
+ }
+}
diff --git a/src/librustdoc/passes/lint/bare_urls.rs b/src/librustdoc/passes/lint/bare_urls.rs
new file mode 100644
index 000000000..423230cfe
--- /dev/null
+++ b/src/librustdoc/passes/lint/bare_urls.rs
@@ -0,0 +1,89 @@
+//! Detects links that are not linkified, e.g., in Markdown such as `Go to https://example.com/.`
+//! Suggests wrapping the link with angle brackets: `Go to <https://example.com/>.` to linkify it.
+
+use crate::clean::*;
+use crate::core::DocContext;
+use crate::html::markdown::main_body_opts;
+use crate::passes::source_span_for_markdown_range;
+use core::ops::Range;
+use pulldown_cmark::{Event, Parser, Tag};
+use regex::Regex;
+use rustc_errors::Applicability;
+use std::mem;
+use std::sync::LazyLock;
+
+pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item) {
+ let Some(hir_id) = DocContext::as_local_hir_id(cx.tcx, item.item_id)
+ else {
+ // If non-local, no need to check anything.
+ return;
+ };
+ let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ if !dox.is_empty() {
+ let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
+ let sp = source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
+ .unwrap_or_else(|| item.attr_span(cx.tcx));
+ cx.tcx.struct_span_lint_hir(crate::lint::BARE_URLS, hir_id, sp, msg, |lint| {
+ lint.note("bare URLs are not automatically turned into clickable links")
+ .span_suggestion(
+ sp,
+ "use an automatic link instead",
+ format!("<{}>", url),
+ Applicability::MachineApplicable,
+ )
+ });
+ };
+
+ let mut p = Parser::new_ext(&dox, main_body_opts()).into_offset_iter();
+
+ while let Some((event, range)) = p.next() {
+ match event {
+ Event::Text(s) => find_raw_urls(cx, &s, range, &report_diag),
+ // We don't want to check the text inside code blocks or links.
+ Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link(..))) => {
+ while let Some((event, _)) = p.next() {
+ match event {
+ Event::End(end)
+ if mem::discriminant(&end) == mem::discriminant(&tag) =>
+ {
+ break;
+ }
+ _ => {}
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+}
+
+static URL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
+ Regex::new(concat!(
+ r"https?://", // url scheme
+ r"([-a-zA-Z0-9@:%._\+~#=]{2,256}\.)+", // one or more subdomains
+ r"[a-zA-Z]{2,63}", // root domain
+ r"\b([-a-zA-Z0-9@:%_\+.~#?&/=]*)" // optional query or url fragments
+ ))
+ .expect("failed to build regex")
+});
+
+fn find_raw_urls(
+ cx: &DocContext<'_>,
+ text: &str,
+ range: Range<usize>,
+ f: &impl Fn(&DocContext<'_>, &str, &str, Range<usize>),
+) {
+ trace!("looking for raw urls in {}", text);
+ // For now, we only check "full" URLs (meaning, starting with "http://" or "https://").
+ for match_ in URL_REGEX.find_iter(text) {
+ let url = match_.as_str();
+ let url_range = match_.range();
+ f(
+ cx,
+ "this URL is not a hyperlink",
+ url,
+ Range { start: range.start + url_range.start, end: range.start + url_range.end },
+ );
+ }
+}
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
new file mode 100644
index 000000000..5aa4f238b
--- /dev/null
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -0,0 +1,170 @@
+//! Validates syntax inside Rust code blocks (\`\`\`rust).
+use rustc_data_structures::sync::{Lock, Lrc};
+use rustc_errors::{
+ emitter::Emitter,
+ translation::{to_fluent_args, Translate},
+ Applicability, Diagnostic, Handler, LazyFallbackBundle,
+};
+use rustc_parse::parse_stream_from_source_str;
+use rustc_session::parse::ParseSess;
+use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
+use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::{FileName, InnerSpan, DUMMY_SP};
+
+use crate::clean;
+use crate::core::DocContext;
+use crate::html::markdown::{self, RustCodeBlock};
+use crate::passes::source_span_for_markdown_range;
+
+pub(crate) fn visit_item(cx: &DocContext<'_>, item: &clean::Item) {
+ if let Some(dox) = &item.attrs.collapsed_doc_value() {
+ let sp = item.attr_span(cx.tcx);
+ let extra =
+ crate::html::markdown::ExtraInfo::new_did(cx.tcx, item.item_id.expect_def_id(), sp);
+ for code_block in markdown::rust_code_blocks(dox, &extra) {
+ check_rust_syntax(cx, item, dox, code_block);
+ }
+ }
+}
+
+fn check_rust_syntax(
+ cx: &DocContext<'_>,
+ item: &clean::Item,
+ dox: &str,
+ code_block: RustCodeBlock,
+) {
+ let buffer = Lrc::new(Lock::new(Buffer::default()));
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
+
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let source = dox[code_block.code].to_owned();
+ let sess = ParseSess::with_span_handler(handler, sm);
+
+ let edition = code_block.lang_string.edition.unwrap_or_else(|| cx.tcx.sess.edition());
+ let expn_data =
+ ExpnData::default(ExpnKind::AstPass(AstPass::TestHarness), DUMMY_SP, edition, None, None);
+ let expn_id = cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
+ let span = DUMMY_SP.fresh_expansion(expn_id);
+
+ let is_empty = rustc_driver::catch_fatal_errors(|| {
+ parse_stream_from_source_str(
+ FileName::Custom(String::from("doctest")),
+ source,
+ &sess,
+ Some(span),
+ )
+ .is_empty()
+ })
+ .unwrap_or(false);
+ let buffer = buffer.borrow();
+
+ if !buffer.has_errors && !is_empty {
+ // No errors in a non-empty program.
+ return;
+ }
+
+ let Some(local_id) = item.item_id.as_def_id().and_then(|x| x.as_local())
+ else {
+ // We don't need to check the syntax for other crates so returning
+ // without doing anything should not be a problem.
+ return;
+ };
+
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
+ let empty_block = code_block.lang_string == Default::default() && code_block.is_fenced;
+ let is_ignore = code_block.lang_string.ignore != markdown::Ignore::None;
+
+ // The span and whether it is precise or not.
+ let (sp, precise_span) =
+ match source_span_for_markdown_range(cx.tcx, dox, &code_block.range, &item.attrs) {
+ Some(sp) => (sp, true),
+ None => (item.attr_span(cx.tcx), false),
+ };
+
+ let msg = if buffer.has_errors {
+ "could not parse code block as Rust code"
+ } else {
+ "Rust code block is empty"
+ };
+
+ // Finally build and emit the completed diagnostic.
+ // All points of divergence have been handled earlier so this can be
+ // done the same way whether the span is precise or not.
+ cx.tcx.struct_span_lint_hir(crate::lint::INVALID_RUST_CODEBLOCKS, hir_id, sp, msg, |lint| {
+ let explanation = if is_ignore {
+ "`ignore` code blocks require valid Rust code for syntax highlighting; \
+ mark blocks that do not contain Rust code as text"
+ } else {
+ "mark blocks that do not contain Rust code as text"
+ };
+
+ if precise_span {
+ if is_ignore {
+ // giving an accurate suggestion is hard because `ignore` might not have come first in the list.
+ // just give a `help` instead.
+ lint.span_help(
+ sp.from_inner(InnerSpan::new(0, 3)),
+ &format!("{}: ```text", explanation),
+ );
+ } else if empty_block {
+ lint.span_suggestion(
+ sp.from_inner(InnerSpan::new(0, 3)).shrink_to_hi(),
+ explanation,
+ "text",
+ Applicability::MachineApplicable,
+ );
+ }
+ } else if empty_block || is_ignore {
+ lint.help(&format!("{}: ```text", explanation));
+ }
+
+ // FIXME(#67563): Provide more context for these errors by displaying the spans inline.
+ for message in buffer.messages.iter() {
+ lint.note(message);
+ }
+
+ lint
+ });
+}
+
+#[derive(Default)]
+struct Buffer {
+ messages: Vec<String>,
+ has_errors: bool,
+}
+
+struct BufferEmitter {
+ buffer: Lrc<Lock<Buffer>>,
+ fallback_bundle: LazyFallbackBundle,
+}
+
+impl Translate for BufferEmitter {
+ fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
+ None
+ }
+
+ fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
+ &**self.fallback_bundle
+ }
+}
+
+impl Emitter for BufferEmitter {
+ fn emit_diagnostic(&mut self, diag: &Diagnostic) {
+ let mut buffer = self.buffer.borrow_mut();
+
+ let fluent_args = to_fluent_args(diag.args());
+ let translated_main_message = self.translate_message(&diag.message[0].0, &fluent_args);
+
+ buffer.messages.push(format!("error from rustc: {}", translated_main_message));
+ if diag.is_error() {
+ buffer.has_errors = true;
+ }
+ }
+
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+}
diff --git a/src/librustdoc/passes/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index a89ed7c7e..070c0aab5 100644
--- a/src/librustdoc/passes/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -1,9 +1,8 @@
//! Detects invalid HTML (like an unclosed `<span>`) in doc comments.
-use super::Pass;
use crate::clean::*;
use crate::core::DocContext;
use crate::html::markdown::main_body_opts;
-use crate::visit::DocVisitor;
+use crate::passes::source_span_for_markdown_range;
use pulldown_cmark::{BrokenLink, Event, LinkType, Parser, Tag};
@@ -11,20 +10,150 @@ use std::iter::Peekable;
use std::ops::Range;
use std::str::CharIndices;
-pub(crate) const CHECK_INVALID_HTML_TAGS: Pass = Pass {
- name: "check-invalid-html-tags",
- run: check_invalid_html_tags,
- description: "detects invalid HTML tags in doc comments",
-};
+pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
+ let tcx = cx.tcx;
+ let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
+ // If non-local, no need to check anything.
+ else { return };
+ let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ if !dox.is_empty() {
+ let report_diag = |msg: &str, range: &Range<usize>, is_open_tag: bool| {
+ let sp = match source_span_for_markdown_range(tcx, &dox, range, &item.attrs) {
+ Some(sp) => sp,
+ None => item.attr_span(tcx),
+ };
+ tcx.struct_span_lint_hir(crate::lint::INVALID_HTML_TAGS, hir_id, sp, msg, |lint| {
+ use rustc_lint_defs::Applicability;
+ // If a tag looks like `<this>`, it might actually be a generic.
+ // We don't try to detect stuff `<like, this>` because that's not valid HTML,
+ // and we don't try to detect stuff `<like this>` because that's not valid Rust.
+ let mut generics_end = range.end;
+ if let Some(Some(mut generics_start)) = (is_open_tag
+ && dox[..generics_end].ends_with('>'))
+ .then(|| extract_path_backwards(&dox, range.start))
+ {
+ while generics_start != 0
+ && generics_end < dox.len()
+ && dox.as_bytes()[generics_start - 1] == b'<'
+ && dox.as_bytes()[generics_end] == b'>'
+ {
+ generics_end += 1;
+ generics_start -= 1;
+ if let Some(new_start) = extract_path_backwards(&dox, generics_start) {
+ generics_start = new_start;
+ }
+ if let Some(new_end) = extract_path_forward(&dox, generics_end) {
+ generics_end = new_end;
+ }
+ }
+ if let Some(new_end) = extract_path_forward(&dox, generics_end) {
+ generics_end = new_end;
+ }
+ let generics_sp = match source_span_for_markdown_range(
+ tcx,
+ &dox,
+ &(generics_start..generics_end),
+ &item.attrs,
+ ) {
+ Some(sp) => sp,
+ None => item.attr_span(tcx),
+ };
+ // Sometimes, we only extract part of a path. For example, consider this:
+ //
+ // <[u32] as IntoIter<u32>>::Item
+ // ^^^^^ unclosed HTML tag `u32`
+ //
+ // We don't have any code for parsing fully-qualified trait paths.
+ // In theory, we could add it, but doing it correctly would require
+ // parsing the entire path grammar, which is problematic because of
+ // overlap between the path grammar and Markdown.
+ //
+ // The example above shows that ambiguity. Is `[u32]` intended to be an
+ // intra-doc link to the u32 primitive, or is it intended to be a slice?
+ //
+ // If the below conditional were removed, we would suggest this, which is
+ // not what the user probably wants.
+ //
+ // <[u32] as `IntoIter<u32>`>::Item
+ //
+ // We know that the user actually wants to wrap the whole thing in a code
+ // block, but the only reason we know that is because `u32` does not, in
+ // fact, implement IntoIter. If the example looks like this:
+ //
+ // <[Vec<i32>] as IntoIter<i32>::Item
+ //
+ // The ideal fix would be significantly different.
+ if (generics_start > 0 && dox.as_bytes()[generics_start - 1] == b'<')
+ || (generics_end < dox.len() && dox.as_bytes()[generics_end] == b'>')
+ {
+ return lint;
+ }
+ // multipart form is chosen here because ``Vec<i32>`` would be confusing.
+ lint.multipart_suggestion(
+ "try marking as source code",
+ vec![
+ (generics_sp.shrink_to_lo(), String::from("`")),
+ (generics_sp.shrink_to_hi(), String::from("`")),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ }
-struct InvalidHtmlTagsLinter<'a, 'tcx> {
- cx: &'a mut DocContext<'tcx>,
-}
+ lint
+ });
+ };
+
+ let mut tags = Vec::new();
+ let mut is_in_comment = None;
+ let mut in_code_block = false;
+
+ let link_names = item.link_names(&cx.cache);
-pub(crate) fn check_invalid_html_tags(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
- let mut coll = InvalidHtmlTagsLinter { cx };
- coll.visit_crate(&krate);
- krate
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ if let Some(link) =
+ link_names.iter().find(|link| *link.original_text == *broken_link.reference)
+ {
+ Some((link.href.as_str().into(), link.new_text.as_str().into()))
+ } else if matches!(
+ &broken_link.link_type,
+ LinkType::Reference | LinkType::ReferenceUnknown
+ ) {
+ // If the link is shaped [like][this], suppress any broken HTML in the [this] part.
+ // The `broken_intra_doc_links` will report typos in there anyway.
+ Some((
+ broken_link.reference.to_string().into(),
+ broken_link.reference.to_string().into(),
+ ))
+ } else {
+ None
+ }
+ };
+
+ let p = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ .into_offset_iter();
+
+ for (event, range) in p {
+ match event {
+ Event::Start(Tag::CodeBlock(_)) => in_code_block = true,
+ Event::Html(text) if !in_code_block => {
+ extract_tags(&mut tags, &text, range, &mut is_in_comment, &report_diag)
+ }
+ Event::End(Tag::CodeBlock(_)) => in_code_block = false,
+ _ => {}
+ }
+ }
+
+ for (tag, range) in tags.iter().filter(|(t, _)| {
+ let t = t.to_lowercase();
+ !ALLOWED_UNCLOSED.contains(&t.as_str())
+ }) {
+ report_diag(&format!("unclosed HTML tag `{}`", tag), range, true);
+ }
+
+ if let Some(range) = is_in_comment {
+ report_diag("Unclosed HTML comment", &range, false);
+ }
+ }
}
const ALLOWED_UNCLOSED: &[&str] = &[
@@ -276,155 +405,3 @@ fn extract_tags(
}
}
}
-
-impl<'a, 'tcx> DocVisitor for InvalidHtmlTagsLinter<'a, 'tcx> {
- fn visit_item(&mut self, item: &Item) {
- let tcx = self.cx.tcx;
- let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
- // If non-local, no need to check anything.
- else { return };
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
- if !dox.is_empty() {
- let report_diag = |msg: &str, range: &Range<usize>, is_open_tag: bool| {
- let sp = match super::source_span_for_markdown_range(tcx, &dox, range, &item.attrs)
- {
- Some(sp) => sp,
- None => item.attr_span(tcx),
- };
- tcx.struct_span_lint_hir(crate::lint::INVALID_HTML_TAGS, hir_id, sp, msg, |lint| {
- use rustc_lint_defs::Applicability;
- // If a tag looks like `<this>`, it might actually be a generic.
- // We don't try to detect stuff `<like, this>` because that's not valid HTML,
- // and we don't try to detect stuff `<like this>` because that's not valid Rust.
- let mut generics_end = range.end;
- if let Some(Some(mut generics_start)) = (is_open_tag
- && dox[..generics_end].ends_with('>'))
- .then(|| extract_path_backwards(&dox, range.start))
- {
- while generics_start != 0
- && generics_end < dox.len()
- && dox.as_bytes()[generics_start - 1] == b'<'
- && dox.as_bytes()[generics_end] == b'>'
- {
- generics_end += 1;
- generics_start -= 1;
- if let Some(new_start) = extract_path_backwards(&dox, generics_start) {
- generics_start = new_start;
- }
- if let Some(new_end) = extract_path_forward(&dox, generics_end) {
- generics_end = new_end;
- }
- }
- if let Some(new_end) = extract_path_forward(&dox, generics_end) {
- generics_end = new_end;
- }
- let generics_sp = match super::source_span_for_markdown_range(
- tcx,
- &dox,
- &(generics_start..generics_end),
- &item.attrs,
- ) {
- Some(sp) => sp,
- None => item.attr_span(tcx),
- };
- // Sometimes, we only extract part of a path. For example, consider this:
- //
- // <[u32] as IntoIter<u32>>::Item
- // ^^^^^ unclosed HTML tag `u32`
- //
- // We don't have any code for parsing fully-qualified trait paths.
- // In theory, we could add it, but doing it correctly would require
- // parsing the entire path grammar, which is problematic because of
- // overlap between the path grammar and Markdown.
- //
- // The example above shows that ambiguity. Is `[u32]` intended to be an
- // intra-doc link to the u32 primitive, or is it intended to be a slice?
- //
- // If the below conditional were removed, we would suggest this, which is
- // not what the user probably wants.
- //
- // <[u32] as `IntoIter<u32>`>::Item
- //
- // We know that the user actually wants to wrap the whole thing in a code
- // block, but the only reason we know that is because `u32` does not, in
- // fact, implement IntoIter. If the example looks like this:
- //
- // <[Vec<i32>] as IntoIter<i32>::Item
- //
- // The ideal fix would be significantly different.
- if (generics_start > 0 && dox.as_bytes()[generics_start - 1] == b'<')
- || (generics_end < dox.len() && dox.as_bytes()[generics_end] == b'>')
- {
- return lint;
- }
- // multipart form is chosen here because ``Vec<i32>`` would be confusing.
- lint.multipart_suggestion(
- "try marking as source code",
- vec![
- (generics_sp.shrink_to_lo(), String::from("`")),
- (generics_sp.shrink_to_hi(), String::from("`")),
- ],
- Applicability::MaybeIncorrect,
- );
- }
-
- lint
- });
- };
-
- let mut tags = Vec::new();
- let mut is_in_comment = None;
- let mut in_code_block = false;
-
- let link_names = item.link_names(&self.cx.cache);
-
- let mut replacer = |broken_link: BrokenLink<'_>| {
- if let Some(link) =
- link_names.iter().find(|link| *link.original_text == *broken_link.reference)
- {
- Some((link.href.as_str().into(), link.new_text.as_str().into()))
- } else if matches!(
- &broken_link.link_type,
- LinkType::Reference | LinkType::ReferenceUnknown
- ) {
- // If the link is shaped [like][this], suppress any broken HTML in the [this] part.
- // The `broken_intra_doc_links` will report typos in there anyway.
- Some((
- broken_link.reference.to_string().into(),
- broken_link.reference.to_string().into(),
- ))
- } else {
- None
- }
- };
-
- let p =
- Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
- .into_offset_iter();
-
- for (event, range) in p {
- match event {
- Event::Start(Tag::CodeBlock(_)) => in_code_block = true,
- Event::Html(text) if !in_code_block => {
- extract_tags(&mut tags, &text, range, &mut is_in_comment, &report_diag)
- }
- Event::End(Tag::CodeBlock(_)) => in_code_block = false,
- _ => {}
- }
- }
-
- for (tag, range) in tags.iter().filter(|(t, _)| {
- let t = t.to_lowercase();
- !ALLOWED_UNCLOSED.contains(&t.as_str())
- }) {
- report_diag(&format!("unclosed HTML tag `{}`", tag), range, true);
- }
-
- if let Some(range) = is_in_comment {
- report_diag("Unclosed HTML comment", &range, false);
- }
- }
-
- self.visit_item_recur(item)
- }
-}
diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs
index f81b38ea3..634e70ec9 100644
--- a/src/librustdoc/passes/mod.rs
+++ b/src/librustdoc/passes/mod.rs
@@ -12,9 +12,6 @@ use crate::core::DocContext;
mod stripper;
pub(crate) use stripper::*;
-mod bare_urls;
-pub(crate) use self::bare_urls::CHECK_BARE_URLS;
-
mod strip_hidden;
pub(crate) use self::strip_hidden::STRIP_HIDDEN;
@@ -36,14 +33,11 @@ pub(crate) use self::check_doc_test_visibility::CHECK_DOC_TEST_VISIBILITY;
mod collect_trait_impls;
pub(crate) use self::collect_trait_impls::COLLECT_TRAIT_IMPLS;
-mod check_code_block_syntax;
-pub(crate) use self::check_code_block_syntax::CHECK_CODE_BLOCK_SYNTAX;
-
mod calculate_doc_coverage;
pub(crate) use self::calculate_doc_coverage::CALCULATE_DOC_COVERAGE;
-mod html_tags;
-pub(crate) use self::html_tags::CHECK_INVALID_HTML_TAGS;
+mod lint;
+pub(crate) use self::lint::RUN_LINTS;
/// A single pass over the cleaned documentation.
///
@@ -82,11 +76,9 @@ pub(crate) const PASSES: &[Pass] = &[
STRIP_PRIV_IMPORTS,
PROPAGATE_DOC_CFG,
COLLECT_INTRA_DOC_LINKS,
- CHECK_CODE_BLOCK_SYNTAX,
COLLECT_TRAIT_IMPLS,
CALCULATE_DOC_COVERAGE,
- CHECK_INVALID_HTML_TAGS,
- CHECK_BARE_URLS,
+ RUN_LINTS,
];
/// The list of passes run by default.
@@ -97,10 +89,8 @@ pub(crate) const DEFAULT_PASSES: &[ConditionalPass] = &[
ConditionalPass::new(STRIP_PRIVATE, WhenNotDocumentPrivate),
ConditionalPass::new(STRIP_PRIV_IMPORTS, WhenDocumentPrivate),
ConditionalPass::always(COLLECT_INTRA_DOC_LINKS),
- ConditionalPass::always(CHECK_CODE_BLOCK_SYNTAX),
- ConditionalPass::always(CHECK_INVALID_HTML_TAGS),
ConditionalPass::always(PROPAGATE_DOC_CFG),
- ConditionalPass::always(CHECK_BARE_URLS),
+ ConditionalPass::always(RUN_LINTS),
];
/// The list of default passes run when `--doc-coverage` is passed to rustdoc.
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index 9914edf30..e07a788a7 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -27,6 +27,7 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea
// strip all impls referencing stripped items
let mut stripper = ImplStripper {
+ tcx: cx.tcx,
retained: &retained,
cache: &cx.cache,
is_json_output,
diff --git a/src/librustdoc/passes/strip_priv_imports.rs b/src/librustdoc/passes/strip_priv_imports.rs
index 85be8fa10..3bac5a8e5 100644
--- a/src/librustdoc/passes/strip_priv_imports.rs
+++ b/src/librustdoc/passes/strip_priv_imports.rs
@@ -11,6 +11,6 @@ pub(crate) const STRIP_PRIV_IMPORTS: Pass = Pass {
description: "strips all private import statements (`use`, `extern crate`) from a crate",
};
-pub(crate) fn strip_priv_imports(krate: clean::Crate, _: &mut DocContext<'_>) -> clean::Crate {
- ImportStripper.fold_crate(krate)
+pub(crate) fn strip_priv_imports(krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate {
+ ImportStripper { tcx: cx.tcx }.fold_crate(krate)
}
diff --git a/src/librustdoc/passes/strip_private.rs b/src/librustdoc/passes/strip_private.rs
index 450f69e15..8fc42462d 100644
--- a/src/librustdoc/passes/strip_private.rs
+++ b/src/librustdoc/passes/strip_private.rs
@@ -26,12 +26,14 @@ pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) ->
effective_visibilities: &cx.cache.effective_visibilities,
update_retained: true,
is_json_output,
+ tcx: cx.tcx,
};
- krate = ImportStripper.fold_crate(stripper.fold_crate(krate));
+ krate = ImportStripper { tcx: cx.tcx }.fold_crate(stripper.fold_crate(krate));
}
// strip all impls referencing private items
let mut stripper = ImplStripper {
+ tcx: cx.tcx,
retained: &retained,
cache: &cx.cache,
is_json_output,
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index 0089ce63d..995fb5dcc 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -1,19 +1,20 @@
//! A collection of utility functions for the `strip_*` passes.
use rustc_hir::def_id::DefId;
-use rustc_middle::middle::privacy::EffectiveVisibilities;
+use rustc_middle::ty::{TyCtxt, Visibility};
use rustc_span::symbol::sym;
-
use std::mem;
use crate::clean::{self, Item, ItemId, ItemIdSet, NestedAttributesExt};
use crate::fold::{strip_item, DocFolder};
use crate::formats::cache::Cache;
+use crate::visit_lib::RustdocEffectiveVisibilities;
-pub(crate) struct Stripper<'a> {
+pub(crate) struct Stripper<'a, 'tcx> {
pub(crate) retained: &'a mut ItemIdSet,
- pub(crate) effective_visibilities: &'a EffectiveVisibilities<DefId>,
+ pub(crate) effective_visibilities: &'a RustdocEffectiveVisibilities,
pub(crate) update_retained: bool,
pub(crate) is_json_output: bool,
+ pub(crate) tcx: TyCtxt<'tcx>,
}
// We need to handle this differently for the JSON output because some non exported items could
@@ -21,18 +22,19 @@ pub(crate) struct Stripper<'a> {
// are in the public API, which is not enough.
#[inline]
fn is_item_reachable(
+ tcx: TyCtxt<'_>,
is_json_output: bool,
- effective_visibilities: &EffectiveVisibilities<DefId>,
+ effective_visibilities: &RustdocEffectiveVisibilities,
item_id: ItemId,
) -> bool {
if is_json_output {
- effective_visibilities.is_reachable(item_id.expect_def_id())
+ effective_visibilities.is_reachable(tcx, item_id.expect_def_id())
} else {
- effective_visibilities.is_exported(item_id.expect_def_id())
+ effective_visibilities.is_exported(tcx, item_id.expect_def_id())
}
}
-impl<'a> DocFolder for Stripper<'a> {
+impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
match *i.kind {
clean::StrippedItem(..) => {
@@ -66,7 +68,12 @@ impl<'a> DocFolder for Stripper<'a> {
| clean::ForeignTypeItem => {
let item_id = i.item_id;
if item_id.is_local()
- && !is_item_reachable(self.is_json_output, self.effective_visibilities, item_id)
+ && !is_item_reachable(
+ self.tcx,
+ self.is_json_output,
+ self.effective_visibilities,
+ item_id,
+ )
{
debug!("Stripper: stripping {:?} {:?}", i.type_(), i.name);
return None;
@@ -74,13 +81,13 @@ impl<'a> DocFolder for Stripper<'a> {
}
clean::StructFieldItem(..) => {
- if !i.visibility.is_public() {
+ if i.visibility(self.tcx) != Some(Visibility::Public) {
return Some(strip_item(i));
}
}
clean::ModuleItem(..) => {
- if i.item_id.is_local() && !i.visibility.is_public() {
+ if i.item_id.is_local() && i.visibility(self.tcx) != Some(Visibility::Public) {
debug!("Stripper: stripping module {:?}", i.name);
let old = mem::replace(&mut self.update_retained, false);
let ret = strip_item(self.fold_item_recur(i));
@@ -146,14 +153,15 @@ impl<'a> DocFolder for Stripper<'a> {
}
/// This stripper discards all impls which reference stripped items
-pub(crate) struct ImplStripper<'a> {
+pub(crate) struct ImplStripper<'a, 'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
pub(crate) retained: &'a ItemIdSet,
pub(crate) cache: &'a Cache,
pub(crate) is_json_output: bool,
pub(crate) document_private: bool,
}
-impl<'a> ImplStripper<'a> {
+impl<'a> ImplStripper<'a, '_> {
#[inline]
fn should_keep_impl(&self, item: &Item, for_def_id: DefId) -> bool {
if !for_def_id.is_local() || self.retained.contains(&for_def_id.into()) {
@@ -161,7 +169,7 @@ impl<'a> ImplStripper<'a> {
} else if self.is_json_output {
// If the "for" item is exported and the impl block isn't `#[doc(hidden)]`, then we
// need to keep it.
- self.cache.effective_visibilities.is_exported(for_def_id)
+ self.cache.effective_visibilities.is_exported(self.tcx, for_def_id)
&& !item.attrs.lists(sym::doc).has_word(sym::hidden)
} else {
false
@@ -169,7 +177,7 @@ impl<'a> ImplStripper<'a> {
}
}
-impl<'a> DocFolder for ImplStripper<'a> {
+impl<'a> DocFolder for ImplStripper<'a, '_> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
if let clean::ImplItem(ref imp) = *i.kind {
// Impl blocks can be skipped if they are: empty; not a trait impl; and have no
@@ -185,6 +193,7 @@ impl<'a> DocFolder for ImplStripper<'a> {
let item_id = i.item_id;
item_id.is_local()
&& !is_item_reachable(
+ self.tcx,
self.is_json_output,
&self.cache.effective_visibilities,
item_id,
@@ -229,12 +238,16 @@ impl<'a> DocFolder for ImplStripper<'a> {
}
/// This stripper discards all private import statements (`use`, `extern crate`)
-pub(crate) struct ImportStripper;
+pub(crate) struct ImportStripper<'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+}
-impl DocFolder for ImportStripper {
+impl<'tcx> DocFolder for ImportStripper<'tcx> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
match *i.kind {
- clean::ExternCrateItem { .. } | clean::ImportItem(..) if !i.visibility.is_public() => {
+ clean::ExternCrateItem { .. } | clean::ImportItem(..)
+ if i.visibility(self.tcx) != Some(Visibility::Public) =>
+ {
None
}
_ => Some(self.fold_item_recur(i)),