summaryrefslogtreecommitdiffstats
path: root/src/librustdoc/passes
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
commit837b550238aa671a591ccf282dddeab29cadb206 (patch)
tree914b6b8862bace72bd3245ca184d374b08d8a672 /src/librustdoc/passes
parentAdding debian version 1.70.0+dfsg2-1. (diff)
downloadrustc-837b550238aa671a591ccf282dddeab29cadb206.tar.xz
rustc-837b550238aa671a591ccf282dddeab29cadb206.zip
Merging upstream version 1.71.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/librustdoc/passes')
-rw-r--r--src/librustdoc/passes/calculate_doc_coverage.rs8
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs6
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs289
-rw-r--r--src/librustdoc/passes/collect_trait_impls.rs34
-rw-r--r--src/librustdoc/passes/lint.rs2
-rw-r--r--src/librustdoc/passes/lint/bare_urls.rs2
-rw-r--r--src/librustdoc/passes/lint/check_code_block_syntax.rs8
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs2
-rw-r--r--src/librustdoc/passes/lint/unescaped_backticks.rs416
-rw-r--r--src/librustdoc/passes/strip_hidden.rs30
-rw-r--r--src/librustdoc/passes/stripper.rs2
11 files changed, 642 insertions, 157 deletions
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index be5286b24..6ead0cd96 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -206,13 +206,7 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
let has_docs = !i.attrs.doc_strings.is_empty();
let mut tests = Tests { found_tests: 0 };
- find_testable_code(
- &i.attrs.collapsed_doc_value().unwrap_or_default(),
- &mut tests,
- ErrorCodes::No,
- false,
- None,
- );
+ find_testable_code(&i.doc_value(), &mut tests, ErrorCodes::No, false, None);
let has_doc_example = tests.found_tests != 0;
let hir_id = DocContext::as_local_hir_id(self.ctx.tcx, i.item_id).unwrap();
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 6b13e6c95..b6cd897d3 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -34,9 +34,7 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
fn visit_item(&mut self, item: &Item) {
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
-
- look_for_tests(self.cx, &dox, item);
+ look_for_tests(self.cx, &item.doc_value(), item);
self.visit_item_recur(item)
}
@@ -95,7 +93,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
}
if cx.tcx.is_doc_hidden(def_id.to_def_id())
- || inherits_doc_hidden(cx.tcx, def_id)
+ || inherits_doc_hidden(cx.tcx, def_id, None)
|| cx.tcx.def_span(def_id.to_def_id()).in_derive_expansion()
{
return false;
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 2cd9c8a87..061a572c4 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -8,12 +8,12 @@ use rustc_data_structures::{
fx::{FxHashMap, FxHashSet},
intern::Interned,
};
-use rustc_errors::{Applicability, Diagnostic};
+use rustc_errors::{Applicability, Diagnostic, DiagnosticMessage};
use rustc_hir::def::Namespace::*;
use rustc_hir::def::{DefKind, Namespace, PerNS};
use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
use rustc_hir::Mutability;
-use rustc_middle::ty::{fast_reject::TreatProjections, Ty, TyCtxt};
+use rustc_middle::ty::{Ty, TyCtxt};
use rustc_middle::{bug, ty};
use rustc_resolve::rustdoc::{has_primitive_or_keyword_docs, prepare_to_doc_link_resolution};
use rustc_resolve::rustdoc::{strip_generics_from_path, MalformedGenerics};
@@ -24,13 +24,14 @@ use rustc_span::BytePos;
use smallvec::{smallvec, SmallVec};
use std::borrow::Cow;
+use std::fmt::Display;
use std::mem;
use std::ops::Range;
use crate::clean::{self, utils::find_nearest_parent_module};
use crate::clean::{Crate, Item, ItemLink, PrimitiveType};
use crate::core::DocContext;
-use crate::html::markdown::{markdown_links, MarkdownLink};
+use crate::html::markdown::{markdown_links, MarkdownLink, MarkdownLinkRange};
use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
use crate::passes::Pass;
use crate::visit::DocVisitor;
@@ -148,7 +149,7 @@ impl TryFrom<ResolveRes> for Res {
Def(kind, id) => Ok(Res::Def(kind, id)),
PrimTy(prim) => Ok(Res::Primitive(PrimitiveType::from_hir(prim))),
// e.g. `#[derive]`
- NonMacroAttr(..) | Err => Result::Err(()),
+ ToolMod | NonMacroAttr(..) | Err => Result::Err(()),
other => bug!("unrecognized res {:?}", other),
}
}
@@ -247,7 +248,7 @@ struct DiagnosticInfo<'a> {
item: &'a Item,
dox: &'a str,
ori_link: &'a str,
- link_range: Range<usize>,
+ link_range: MarkdownLinkRange,
}
struct LinkCollector<'a, 'tcx> {
@@ -722,7 +723,7 @@ fn resolve_associated_trait_item<'a>(
.iter()
.flat_map(|&(impl_, trait_)| {
filter_assoc_items_by_name_and_namespace(
- cx.tcx,
+ tcx,
trait_,
Ident::with_dummy_span(item_name),
ns,
@@ -772,11 +773,10 @@ fn trait_impls_for<'a>(
module: DefId,
) -> FxHashSet<(DefId, DefId)> {
let tcx = cx.tcx;
- let iter = tcx.doc_link_traits_in_scope(module).iter().flat_map(|&trait_| {
- trace!("considering explicit impl for trait {:?}", trait_);
+ let mut impls = FxHashSet::default();
- // Look at each trait implementation to see if it's an impl for `did`
- tcx.find_map_relevant_impl(trait_, ty, TreatProjections::ForLookup, |impl_| {
+ for &trait_ in tcx.doc_link_traits_in_scope(module) {
+ tcx.for_each_relevant_impl(trait_, ty, |impl_| {
let trait_ref = tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
// Check if these are the same type.
let impl_type = trait_ref.skip_binder().self_ty();
@@ -800,10 +800,13 @@ fn trait_impls_for<'a>(
_ => false,
};
- if saw_impl { Some((impl_, trait_)) } else { None }
- })
- });
- iter.collect()
+ if saw_impl {
+ impls.insert((impl_, trait_));
+ }
+ });
+ }
+
+ impls
}
/// Check for resolve collisions between a trait and its derive.
@@ -830,7 +833,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
enum PreprocessingError {
/// User error: `[std#x#y]` is not valid
MultipleAnchors,
- Disambiguator(Range<usize>, String),
+ Disambiguator(MarkdownLinkRange, String),
MalformedGenerics(MalformedGenerics, String),
}
@@ -839,7 +842,7 @@ impl PreprocessingError {
match self {
PreprocessingError::MultipleAnchors => report_multiple_anchors(cx, diag_info),
PreprocessingError::Disambiguator(range, msg) => {
- disambiguator_error(cx, diag_info, range.clone(), msg)
+ disambiguator_error(cx, diag_info, range.clone(), msg.as_str())
}
PreprocessingError::MalformedGenerics(err, path_str) => {
report_malformed_generics(cx, diag_info, *err, path_str)
@@ -870,6 +873,7 @@ pub(crate) struct PreprocessedMarkdownLink(
/// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored.
fn preprocess_link(
ori_link: &MarkdownLink,
+ dox: &str,
) -> Option<Result<PreprocessingInfo, PreprocessingError>> {
// [] is mostly likely not supposed to be a link
if ori_link.link.is_empty() {
@@ -903,9 +907,15 @@ fn preprocess_link(
Err((err_msg, relative_range)) => {
// Only report error if we would not have ignored this link. See issue #83859.
if !should_ignore_link_with_disambiguators(link) {
- let no_backticks_range = range_between_backticks(ori_link);
- let disambiguator_range = (no_backticks_range.start + relative_range.start)
- ..(no_backticks_range.start + relative_range.end);
+ let disambiguator_range = match range_between_backticks(&ori_link.range, dox) {
+ MarkdownLinkRange::Destination(no_backticks_range) => {
+ MarkdownLinkRange::Destination(
+ (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end),
+ )
+ }
+ mdlr @ MarkdownLinkRange::WholeLink(_) => mdlr,
+ };
return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
} else {
return None;
@@ -944,7 +954,7 @@ fn preprocess_link(
fn preprocessed_markdown_links(s: &str) -> Vec<PreprocessedMarkdownLink> {
markdown_links(s, |link| {
- preprocess_link(&link).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
+ preprocess_link(&link, s).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
})
}
@@ -1057,22 +1067,12 @@ impl LinkCollector<'_, '_> {
// valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677
// for discussion on the matter.
let kind = self.cx.tcx.def_kind(id);
- self.verify_disambiguator(
- path_str,
- ori_link,
- kind,
- id,
- disambiguator,
- item,
- &diag_info,
- )?;
+ self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?;
} else {
match disambiguator {
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
Some(other) => {
- self.report_disambiguator_mismatch(
- path_str, ori_link, other, res, &diag_info,
- );
+ self.report_disambiguator_mismatch(path_str, other, res, &diag_info);
return None;
}
}
@@ -1093,7 +1093,6 @@ impl LinkCollector<'_, '_> {
};
self.verify_disambiguator(
path_str,
- ori_link,
kind_for_dis,
id_for_dis,
disambiguator,
@@ -1115,7 +1114,6 @@ impl LinkCollector<'_, '_> {
fn verify_disambiguator(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
kind: DefKind,
id: DefId,
disambiguator: Option<Disambiguator>,
@@ -1139,7 +1137,7 @@ impl LinkCollector<'_, '_> {
=> {}
(actual, Some(Disambiguator::Kind(expected))) if actual == expected => {}
(_, Some(specified @ Disambiguator::Kind(_) | specified @ Disambiguator::Primitive)) => {
- self.report_disambiguator_mismatch(path_str,ori_link,specified, Res::Def(kind, id),diag_info);
+ self.report_disambiguator_mismatch(path_str, specified, Res::Def(kind, id), diag_info);
return None;
}
}
@@ -1161,14 +1159,13 @@ impl LinkCollector<'_, '_> {
fn report_disambiguator_mismatch(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
specified: Disambiguator,
resolved: Res,
diag_info: &DiagnosticInfo<'_>,
) {
// The resolved item did not match the disambiguator; give a better error than 'not found'
let msg = format!("incompatible link kind for `{}`", path_str);
- let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>| {
+ let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>, link_range| {
let note = format!(
"this link resolved to {} {}, which is not {} {}",
resolved.article(),
@@ -1177,18 +1174,28 @@ impl LinkCollector<'_, '_> {
specified.descr(),
);
if let Some(sp) = sp {
- diag.span_label(sp, &note);
+ diag.span_label(sp, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
- suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
+ suggest_disambiguator(resolved, diag, path_str, link_range, sp, diag_info);
};
- report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
+ report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, callback);
}
- fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &Range<usize>, item: &Item) {
- let span = super::source_span_for_markdown_range(self.cx.tcx, dox, ori_link, &item.attrs)
- .unwrap_or_else(|| item.attr_span(self.cx.tcx));
+ fn report_rawptr_assoc_feature_gate(
+ &self,
+ dox: &str,
+ ori_link: &MarkdownLinkRange,
+ item: &Item,
+ ) {
+ let span = super::source_span_for_markdown_range(
+ self.cx.tcx,
+ dox,
+ ori_link.inner_range(),
+ &item.attrs,
+ )
+ .unwrap_or_else(|| item.attr_span(self.cx.tcx));
rustc_session::parse::feature_err(
&self.cx.tcx.sess.parse_sess,
sym::intra_doc_pointers,
@@ -1293,7 +1300,8 @@ impl LinkCollector<'_, '_> {
}
}
}
- resolution_failure(self, diag, path_str, disambiguator, smallvec![err])
+ resolution_failure(self, diag, path_str, disambiguator, smallvec![err]);
+ return vec![];
}
}
}
@@ -1329,13 +1337,14 @@ impl LinkCollector<'_, '_> {
.fold(0, |acc, res| if let Ok(res) = res { acc + res.len() } else { acc });
if len == 0 {
- return resolution_failure(
+ resolution_failure(
self,
diag,
path_str,
disambiguator,
candidates.into_iter().filter_map(|res| res.err()).collect(),
);
+ return vec![];
} else if len == 1 {
candidates.into_iter().filter_map(|res| res.ok()).flatten().collect::<Vec<_>>()
} else {
@@ -1349,7 +1358,7 @@ impl LinkCollector<'_, '_> {
if has_derive_trait_collision {
candidates.macro_ns = None;
}
- candidates.into_iter().filter_map(|res| res).flatten().collect::<Vec<_>>()
+ candidates.into_iter().flatten().flatten().collect::<Vec<_>>()
}
}
}
@@ -1366,16 +1375,23 @@ impl LinkCollector<'_, '_> {
/// [`Foo`]
/// ^^^
/// ```
-fn range_between_backticks(ori_link: &MarkdownLink) -> Range<usize> {
- let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0);
- let before_second_backtick_group = ori_link
- .link
+///
+/// This function does nothing if `ori_link.range` is a `MarkdownLinkRange::WholeLink`.
+fn range_between_backticks(ori_link_range: &MarkdownLinkRange, dox: &str) -> MarkdownLinkRange {
+ let range = match ori_link_range {
+ mdlr @ MarkdownLinkRange::WholeLink(_) => return mdlr.clone(),
+ MarkdownLinkRange::Destination(inner) => inner.clone(),
+ };
+ let ori_link_text = &dox[range.clone()];
+ let after_first_backtick_group = ori_link_text.bytes().position(|b| b != b'`').unwrap_or(0);
+ let before_second_backtick_group = ori_link_text
.bytes()
.skip(after_first_backtick_group)
.position(|b| b == b'`')
- .unwrap_or(ori_link.link.len());
- (ori_link.range.start + after_first_backtick_group)
- ..(ori_link.range.start + before_second_backtick_group)
+ .unwrap_or(ori_link_text.len());
+ MarkdownLinkRange::Destination(
+ (range.start + after_first_backtick_group)..(range.start + before_second_backtick_group),
+ )
}
/// Returns true if we should ignore `link` due to it being unlikely
@@ -1419,6 +1435,7 @@ impl Disambiguator {
if let Some(idx) = link.find('@') {
let (prefix, rest) = link.split_at(idx);
let d = match prefix {
+ // If you update this list, please also update the relevant rustdoc book section!
"struct" => Kind(DefKind::Struct),
"enum" => Kind(DefKind::Enum),
"trait" => Kind(DefKind::Trait),
@@ -1437,6 +1454,7 @@ impl Disambiguator {
Ok(Some((d, &rest[1..], &rest[1..])))
} else {
let suffixes = [
+ // If you update this list, please also update the relevant rustdoc book section!
("!()", DefKind::Macro(MacroKind::Bang)),
("!{}", DefKind::Macro(MacroKind::Bang)),
("![]", DefKind::Macro(MacroKind::Bang)),
@@ -1523,14 +1541,23 @@ impl Suggestion {
sp: rustc_span::Span,
) -> Vec<(rustc_span::Span, String)> {
let inner_sp = match ori_link.find('(') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => sp.with_hi(sp.lo() + BytePos(index as _)),
None => sp,
};
let inner_sp = match ori_link.find('!') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_hi(inner_sp.lo() + BytePos(index as _)),
None => inner_sp,
};
let inner_sp = match ori_link.find('@') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_lo(inner_sp.lo() + BytePos(index as u32 + 1)),
None => inner_sp,
};
@@ -1575,9 +1602,9 @@ impl Suggestion {
fn report_diagnostic(
tcx: TyCtxt<'_>,
lint: &'static Lint,
- msg: &str,
+ msg: impl Into<DiagnosticMessage> + Display,
DiagnosticInfo { item, ori_link: _, dox, link_range }: &DiagnosticInfo<'_>,
- decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>),
+ decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>, MarkdownLinkRange),
) {
let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
else {
@@ -1589,16 +1616,32 @@ fn report_diagnostic(
let sp = item.attr_span(tcx);
tcx.struct_span_lint_hir(lint, hir_id, sp, msg, |lint| {
- let span =
- super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
- if dox.as_bytes().get(link_range.start) == Some(&b'`')
- && dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
- {
- sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
- } else {
- sp
- }
- });
+ let (span, link_range) = match link_range {
+ MarkdownLinkRange::Destination(md_range) => {
+ let mut md_range = md_range.clone();
+ let sp = super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs)
+ .map(|mut sp| {
+ while dox.as_bytes().get(md_range.start) == Some(&b' ')
+ || dox.as_bytes().get(md_range.start) == Some(&b'`')
+ {
+ md_range.start += 1;
+ sp = sp.with_lo(sp.lo() + BytePos(1));
+ }
+ while dox.as_bytes().get(md_range.end - 1) == Some(&b' ')
+ || dox.as_bytes().get(md_range.end - 1) == Some(&b'`')
+ {
+ md_range.end -= 1;
+ sp = sp.with_hi(sp.hi() - BytePos(1));
+ }
+ sp
+ });
+ (sp, MarkdownLinkRange::Destination(md_range))
+ }
+ MarkdownLinkRange::WholeLink(md_range) => (
+ super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs),
+ link_range.clone(),
+ ),
+ };
if let Some(sp) = span {
lint.set_span(sp);
@@ -1607,21 +1650,22 @@ fn report_diagnostic(
// ^ ~~~~
// | link_range
// last_new_line_offset
- let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
+ let md_range = link_range.inner_range().clone();
+ let last_new_line_offset = dox[..md_range.start].rfind('\n').map_or(0, |n| n + 1);
let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
- // Print the line containing the `link_range` and manually mark it with '^'s.
- lint.note(&format!(
+ // Print the line containing the `md_range` and manually mark it with '^'s.
+ lint.note(format!(
"the link appears in this line:\n\n{line}\n\
{indicator: <before$}{indicator:^<found$}",
line = line,
indicator = "",
- before = link_range.start - last_new_line_offset,
- found = link_range.len(),
+ before = md_range.start - last_new_line_offset,
+ found = md_range.len(),
));
}
- decorate(lint, span);
+ decorate(lint, span, link_range);
lint
});
@@ -1638,15 +1682,14 @@ fn resolution_failure(
path_str: &str,
disambiguator: Option<Disambiguator>,
kinds: SmallVec<[ResolutionFailure<'_>; 3]>,
-) -> Vec<(Res, Option<DefId>)> {
+) {
let tcx = collector.cx.tcx;
- let mut recovered_res = None;
report_diagnostic(
tcx,
BROKEN_INTRA_DOC_LINKS,
- &format!("unresolved link to `{}`", path_str),
+ format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, link_range| {
let item = |res: Res| format!("the {} `{}`", res.descr(), res.name(tcx),);
let assoc_item_not_allowed = |res: Res| {
let name = res.name(tcx);
@@ -1700,7 +1743,7 @@ fn resolution_failure(
if let Ok(v_res) = collector.resolve(start, ns, item_id, module_id) {
debug!("found partial_res={:?}", v_res);
if !v_res.is_empty() {
- *partial_res = Some(full_res(collector.cx.tcx, v_res[0]));
+ *partial_res = Some(full_res(tcx, v_res[0]));
*unresolved = end.into();
break 'outer;
}
@@ -1725,26 +1768,32 @@ fn resolution_failure(
format!("no item named `{}` in scope", unresolved)
};
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
if !path_str.contains("::") {
if disambiguator.map_or(true, |d| d.ns() == MacroNS)
- && let Some(&res) = collector.cx.tcx.resolutions(()).all_macro_rules
- .get(&Symbol::intern(path_str))
+ && collector
+ .cx
+ .tcx
+ .resolutions(())
+ .all_macro_rules
+ .get(&Symbol::intern(path_str))
+ .is_some()
{
diag.note(format!(
"`macro_rules` named `{path_str}` exists in this crate, \
but it is not in scope at this link's location"
));
- recovered_res = res.try_into().ok().map(|res| (res, None));
} else {
// If the link has `::` in it, assume it was meant to be an
// intra-doc link. Otherwise, the `[]` might be unrelated.
- diag.help("to escape `[` and `]` characters, \
- add '\\' before them like `\\[` or `\\]`");
+ diag.help(
+ "to escape `[` and `]` characters, \
+ add '\\' before them like `\\[` or `\\]`",
+ );
}
}
@@ -1776,9 +1825,9 @@ fn resolution_failure(
let variant = res.name(tcx);
let note = format!("variant `{variant}` has no such field");
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
return;
}
@@ -1801,9 +1850,9 @@ fn resolution_failure(
| InlineConst => {
let note = assoc_item_not_allowed(res);
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
return;
}
@@ -1823,9 +1872,9 @@ fn resolution_failure(
unresolved,
);
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
continue;
@@ -1833,7 +1882,14 @@ fn resolution_failure(
let note = match failure {
ResolutionFailure::NotResolved { .. } => unreachable!("handled above"),
ResolutionFailure::WrongNamespace { res, expected_ns } => {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(
+ res,
+ diag,
+ path_str,
+ link_range.clone(),
+ sp,
+ &diag_info,
+ );
format!(
"this link resolves to {}, which is not in the {} namespace",
@@ -1843,39 +1899,34 @@ fn resolution_failure(
}
};
if let Some(span) = sp {
- diag.span_label(span, &note);
+ diag.span_label(span, note);
} else {
- diag.note(&note);
+ diag.note(note);
}
}
},
);
-
- match recovered_res {
- Some(r) => vec![r],
- None => Vec::new(),
- }
}
fn report_multiple_anchors(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>) {
let msg = format!("`{}` contains multiple anchors", diag_info.ori_link);
- anchor_failure(cx, diag_info, &msg, 1)
+ anchor_failure(cx, diag_info, msg, 1)
}
fn report_anchor_conflict(cx: &DocContext<'_>, diag_info: DiagnosticInfo<'_>, def_id: DefId) {
let (link, kind) = (diag_info.ori_link, Res::from_def_id(cx.tcx, def_id).descr());
let msg = format!("`{link}` contains an anchor, but links to {kind}s are already anchored");
- anchor_failure(cx, diag_info, &msg, 0)
+ anchor_failure(cx, diag_info, msg, 0)
}
/// Report an anchor failure.
fn anchor_failure(
cx: &DocContext<'_>,
diag_info: DiagnosticInfo<'_>,
- msg: &str,
+ msg: String,
anchor_idx: usize,
) {
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp, _link_range| {
if let Some(mut sp) = sp {
if let Some((fragment_offset, _)) =
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
@@ -1891,16 +1942,16 @@ fn anchor_failure(
fn disambiguator_error(
cx: &DocContext<'_>,
mut diag_info: DiagnosticInfo<'_>,
- disambiguator_range: Range<usize>,
- msg: &str,
+ disambiguator_range: MarkdownLinkRange,
+ msg: impl Into<DiagnosticMessage> + Display,
) {
diag_info.link_range = disambiguator_range;
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp, _link_range| {
let msg = format!(
"see {}/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
crate::DOC_RUST_LANG_ORG_CHANNEL
);
- diag.note(&msg);
+ diag.note(msg);
});
}
@@ -1913,9 +1964,9 @@ fn report_malformed_generics(
report_diagnostic(
cx.tcx,
BROKEN_INTRA_DOC_LINKS,
- &format!("unresolved link to `{}`", path_str),
+ format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, _link_range| {
let note = match err {
MalformedGenerics::UnbalancedAngleBrackets => "unbalanced angle brackets",
MalformedGenerics::MissingType => "missing type for generic parameters",
@@ -1988,7 +2039,7 @@ fn ambiguity_error(
}
}
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "ambiguous link");
} else {
@@ -1996,7 +2047,7 @@ fn ambiguity_error(
}
for res in kinds {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(res, diag, path_str, link_range.clone(), sp, diag_info);
}
});
true
@@ -2008,22 +2059,28 @@ fn suggest_disambiguator(
res: Res,
diag: &mut Diagnostic,
path_str: &str,
- ori_link: &str,
+ link_range: MarkdownLinkRange,
sp: Option<rustc_span::Span>,
+ diag_info: &DiagnosticInfo<'_>,
) {
let suggestion = res.disambiguator_suggestion();
let help = format!("to link to the {}, {}", res.descr(), suggestion.descr());
- if let Some(sp) = sp {
+ let ori_link = match link_range {
+ MarkdownLinkRange::Destination(range) => Some(&diag_info.dox[range]),
+ MarkdownLinkRange::WholeLink(_) => None,
+ };
+
+ if let (Some(sp), Some(ori_link)) = (sp, ori_link) {
let mut spans = suggestion.as_help_span(path_str, ori_link, sp);
if spans.len() > 1 {
- diag.multipart_suggestion(&help, spans, Applicability::MaybeIncorrect);
+ diag.multipart_suggestion(help, spans, Applicability::MaybeIncorrect);
} else {
let (sp, suggestion_text) = spans.pop().unwrap();
- diag.span_suggestion_verbose(sp, &help, suggestion_text, Applicability::MaybeIncorrect);
+ diag.span_suggestion_verbose(sp, help, suggestion_text, Applicability::MaybeIncorrect);
}
} else {
- diag.help(&format!("{}: {}", help, suggestion.as_help(path_str)));
+ diag.help(format!("{}: {}", help, suggestion.as_help(path_str)));
}
}
@@ -2040,7 +2097,7 @@ fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str:
let msg =
format!("public documentation for `{}` links to private item `{}`", item_name, path_str);
- report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, &msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, _link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "this item is private");
}
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 8d204ddb7..fbf827cce 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -19,9 +19,10 @@ pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass {
};
pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let tcx = cx.tcx;
// We need to check if there are errors before running this pass because it would crash when
// we try to get auto and blanket implementations.
- if cx.tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
return krate;
}
@@ -32,8 +33,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
});
let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
- let prims: FxHashSet<PrimitiveType> =
- local_crate.primitives(cx.tcx).iter().map(|p| p.1).collect();
+ let prims: FxHashSet<PrimitiveType> = local_crate.primitives(tcx).iter().map(|p| p.1).collect();
let crate_items = {
let mut coll = ItemCollector::new();
@@ -46,9 +46,9 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// External trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
- for &cnum in cx.tcx.crates(()) {
- for &impl_def_id in cx.tcx.trait_impls_in_crate(cnum) {
+ let _prof_timer = tcx.sess.prof.generic_activity("build_extern_trait_impls");
+ for &cnum in tcx.crates(()) {
+ for &impl_def_id in tcx.trait_impls_in_crate(cnum) {
inline::build_impl(cx, impl_def_id, None, &mut new_items_external);
}
}
@@ -56,14 +56,13 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// Local trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_local_trait_impls");
+ let _prof_timer = tcx.sess.prof.generic_activity("build_local_trait_impls");
let mut attr_buf = Vec::new();
- for &impl_def_id in cx.tcx.trait_impls_in_crate(LOCAL_CRATE) {
- let mut parent = Some(cx.tcx.parent(impl_def_id));
+ for &impl_def_id in tcx.trait_impls_in_crate(LOCAL_CRATE) {
+ let mut parent = Some(tcx.parent(impl_def_id));
while let Some(did) = parent {
attr_buf.extend(
- cx.tcx
- .get_attrs(did, sym::doc)
+ tcx.get_attrs(did, sym::doc)
.filter(|attr| {
if let Some([attr]) = attr.meta_item_list().as_deref() {
attr.has_name(sym::cfg)
@@ -73,25 +72,24 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
})
.cloned(),
);
- parent = cx.tcx.opt_parent(did);
+ parent = tcx.opt_parent(did);
}
inline::build_impl(cx, impl_def_id, Some((&attr_buf, None)), &mut new_items_local);
attr_buf.clear();
}
}
- cx.tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
- for def_id in PrimitiveType::all_impls(cx.tcx) {
+ tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
+ for def_id in PrimitiveType::all_impls(tcx) {
// Try to inline primitive impls from other crates.
if !def_id.is_local() {
inline::build_impl(cx, def_id, None, &mut new_items_external);
}
}
- for (prim, did) in PrimitiveType::primitive_locations(cx.tcx) {
+ for (prim, did) in PrimitiveType::primitive_locations(tcx) {
// Do not calculate blanket impl list for docs that are not going to be rendered.
// While the `impl` blocks themselves are only in `libcore`, the module with `doc`
// attached is directly included in `libstd` as well.
- let tcx = cx.tcx;
if did.is_local() {
for def_id in prim.impls(tcx).filter(|def_id| {
// Avoid including impl blocks with filled-in generics.
@@ -157,7 +155,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
for it in new_items_external.iter().chain(new_items_local.iter()) {
if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait() &&
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() &&
cleaner.keep_impl(for_, true)
{
let target = items
@@ -199,7 +197,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
if let ImplItem(box Impl { ref for_, ref trait_, ref kind, .. }) = *it.kind {
cleaner.keep_impl(
for_,
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait(),
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait(),
) || trait_.as_ref().map_or(false, |t| cleaner.keep_impl_with_def_id(t.def_id().into()))
|| kind.is_blanket()
} else {
diff --git a/src/librustdoc/passes/lint.rs b/src/librustdoc/passes/lint.rs
index 97031c4f0..e653207b9 100644
--- a/src/librustdoc/passes/lint.rs
+++ b/src/librustdoc/passes/lint.rs
@@ -4,6 +4,7 @@
mod bare_urls;
mod check_code_block_syntax;
mod html_tags;
+mod unescaped_backticks;
use super::Pass;
use crate::clean::*;
@@ -27,6 +28,7 @@ impl<'a, 'tcx> DocVisitor for Linter<'a, 'tcx> {
bare_urls::visit_item(self.cx, item);
check_code_block_syntax::visit_item(self.cx, item);
html_tags::visit_item(self.cx, item);
+ unescaped_backticks::visit_item(self.cx, item);
self.visit_item_recur(item)
}
diff --git a/src/librustdoc/passes/lint/bare_urls.rs b/src/librustdoc/passes/lint/bare_urls.rs
index 423230cfe..a10d5fdb4 100644
--- a/src/librustdoc/passes/lint/bare_urls.rs
+++ b/src/librustdoc/passes/lint/bare_urls.rs
@@ -18,7 +18,7 @@ pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item) {
// If non-local, no need to check anything.
return;
};
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ let dox = item.doc_value();
if !dox.is_empty() {
let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
let sp = source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
index 26fbb03a4..f489f5081 100644
--- a/src/librustdoc/passes/lint/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -17,7 +17,7 @@ use crate::html::markdown::{self, RustCodeBlock};
use crate::passes::source_span_for_markdown_range;
pub(crate) fn visit_item(cx: &DocContext<'_>, item: &clean::Item) {
- if let Some(dox) = &item.attrs.collapsed_doc_value() {
+ if let Some(dox) = &item.opt_doc_value() {
let sp = item.attr_span(cx.tcx);
let extra = crate::html::markdown::ExtraInfo::new(cx.tcx, item.item_id.expect_def_id(), sp);
for code_block in markdown::rust_code_blocks(dox, &extra) {
@@ -108,7 +108,7 @@ fn check_rust_syntax(
// just give a `help` instead.
lint.span_help(
sp.from_inner(InnerSpan::new(0, 3)),
- &format!("{}: ```text", explanation),
+ format!("{}: ```text", explanation),
);
} else if empty_block {
lint.span_suggestion(
@@ -119,12 +119,12 @@ fn check_rust_syntax(
);
}
} else if empty_block || is_ignore {
- lint.help(&format!("{}: ```text", explanation));
+ lint.help(format!("{}: ```text", explanation));
}
// FIXME(#67563): Provide more context for these errors by displaying the spans inline.
for message in buffer.messages.iter() {
- lint.note(message);
+ lint.note(message.clone());
}
lint
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 4f72df5a5..f0403647a 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -15,7 +15,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
// If non-local, no need to check anything.
else { return };
- let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
+ let dox = item.doc_value();
if !dox.is_empty() {
let report_diag = |msg: &str, range: &Range<usize>, is_open_tag: bool| {
let sp = match source_span_for_markdown_range(tcx, &dox, range, &item.attrs) {
diff --git a/src/librustdoc/passes/lint/unescaped_backticks.rs b/src/librustdoc/passes/lint/unescaped_backticks.rs
new file mode 100644
index 000000000..865212205
--- /dev/null
+++ b/src/librustdoc/passes/lint/unescaped_backticks.rs
@@ -0,0 +1,416 @@
+//! Detects unescaped backticks (\`) in doc comments.
+
+use crate::clean::Item;
+use crate::core::DocContext;
+use crate::html::markdown::main_body_opts;
+use crate::passes::source_span_for_markdown_range;
+use pulldown_cmark::{BrokenLink, Event, Parser};
+use rustc_errors::DiagnosticBuilder;
+use rustc_lint_defs::Applicability;
+use std::ops::Range;
+
+pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
+ let tcx = cx.tcx;
+ let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id) else {
+ // If non-local, no need to check anything.
+ return;
+ };
+
+ let dox = item.doc_value();
+ if dox.is_empty() {
+ return;
+ }
+
+ let link_names = item.link_names(&cx.cache);
+ let mut replacer = |broken_link: BrokenLink<'_>| {
+ link_names
+ .iter()
+ .find(|link| *link.original_text == *broken_link.reference)
+ .map(|link| ((*link.href).into(), (*link.new_text).into()))
+ };
+ let parser = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ .into_offset_iter();
+
+ let mut element_stack = Vec::new();
+
+ let mut prev_text_end = 0;
+ for (event, event_range) in parser {
+ match event {
+ Event::Start(_) => {
+ element_stack.push(Element::new(event_range));
+ }
+ Event::End(_) => {
+ let element = element_stack.pop().unwrap();
+
+ let Some(backtick_index) = element.backtick_index else {
+ continue;
+ };
+
+ // If we can't get a span of the backtick, because it is in a `#[doc = ""]` attribute,
+ // use the span of the entire attribute as a fallback.
+ let span = source_span_for_markdown_range(
+ tcx,
+ &dox,
+ &(backtick_index..backtick_index + 1),
+ &item.attrs,
+ )
+ .unwrap_or_else(|| item.attr_span(tcx));
+
+ tcx.struct_span_lint_hir(crate::lint::UNESCAPED_BACKTICKS, hir_id, span, "unescaped backtick", |lint| {
+ let mut help_emitted = false;
+
+ match element.prev_code_guess {
+ PrevCodeGuess::None => {}
+ PrevCodeGuess::Start { guess, .. } => {
+ // "foo` `bar`" -> "`foo` `bar`"
+ if let Some(suggest_index) = clamp_start(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the opening backtick of a previous inline code may be missing");
+ help_emitted = true;
+ }
+ }
+ PrevCodeGuess::End { guess, .. } => {
+ // "`foo `bar`" -> "`foo` `bar`"
+ // Don't `clamp_end` here, because the suggestion is guaranteed to be inside
+ // an inline code node and we intentionally "break" the inline code here.
+ let suggest_index = guess;
+ if can_suggest_backtick(&dox, suggest_index) {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "a previous inline code might be longer than expected");
+ help_emitted = true;
+ }
+ }
+ }
+
+ if !element.prev_code_guess.is_confident() {
+ // "`foo` bar`" -> "`foo` `bar`"
+ if let Some(guess) = guess_start_of_code(&dox, element.element_range.start..backtick_index)
+ && let Some(suggest_index) = clamp_start(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the opening backtick of an inline code may be missing");
+ help_emitted = true;
+ }
+
+ // "`foo` `bar" -> "`foo` `bar`"
+ // Don't suggest closing backtick after single trailing char,
+ // if we already suggested opening backtick. For example:
+ // "foo`." -> "`foo`." or "foo`s" -> "`foo`s".
+ if let Some(guess) = guess_end_of_code(&dox, backtick_index + 1..element.element_range.end)
+ && let Some(suggest_index) = clamp_end(guess, &element.suggestible_ranges)
+ && can_suggest_backtick(&dox, suggest_index)
+ && (!help_emitted || suggest_index - backtick_index > 2)
+ {
+ suggest_insertion(cx, item, &dox, lint, suggest_index, '`', "the closing backtick of an inline code may be missing");
+ help_emitted = true;
+ }
+ }
+
+ if !help_emitted {
+ lint.help("the opening or closing backtick of an inline code may be missing");
+ }
+
+ suggest_insertion(cx, item, &dox, lint, backtick_index, '\\', "if you meant to use a literal backtick, escape it");
+
+ lint
+ });
+ }
+ Event::Code(_) => {
+ let element = element_stack
+ .last_mut()
+ .expect("expected inline code node to be inside of an element");
+ assert!(
+ event_range.start >= element.element_range.start
+ && event_range.end <= element.element_range.end
+ );
+
+ // This inline code might be longer than it's supposed to be.
+ // Only check single backtick inline code for now.
+ if !element.prev_code_guess.is_confident()
+ && dox.as_bytes().get(event_range.start) == Some(&b'`')
+ && dox.as_bytes().get(event_range.start + 1) != Some(&b'`')
+ {
+ let range_inside = event_range.start + 1..event_range.end - 1;
+ let text_inside = &dox[range_inside.clone()];
+
+ let is_confident = text_inside.starts_with(char::is_whitespace)
+ || text_inside.ends_with(char::is_whitespace);
+
+ if let Some(guess) = guess_end_of_code(&dox, range_inside) {
+ // Find earlier end of code.
+ element.prev_code_guess = PrevCodeGuess::End { guess, is_confident };
+ } else {
+ // Find alternate start of code.
+ let range_before = element.element_range.start..event_range.start;
+ if let Some(guess) = guess_start_of_code(&dox, range_before) {
+ element.prev_code_guess = PrevCodeGuess::Start { guess, is_confident };
+ }
+ }
+ }
+ }
+ Event::Text(text) => {
+ let element = element_stack
+ .last_mut()
+ .expect("expected inline text node to be inside of an element");
+ assert!(
+ event_range.start >= element.element_range.start
+ && event_range.end <= element.element_range.end
+ );
+
+ // The first char is escaped if the prev char is \ and not part of a text node.
+ let is_escaped = prev_text_end < event_range.start
+ && dox.as_bytes()[event_range.start - 1] == b'\\';
+
+ // Don't lint backslash-escaped (\`) or html-escaped (&#96;) backticks.
+ if *text == *"`" && !is_escaped && *text == dox[event_range.clone()] {
+ // We found a stray backtick.
+ assert!(
+ element.backtick_index.is_none(),
+ "expected at most one unescaped backtick per element",
+ );
+ element.backtick_index = Some(event_range.start);
+ }
+
+ prev_text_end = event_range.end;
+
+ if is_escaped {
+ // Ensure that we suggest "`\x" and not "\`x".
+ element.suggestible_ranges.push(event_range.start - 1..event_range.end);
+ } else {
+ element.suggestible_ranges.push(event_range);
+ }
+ }
+ _ => {}
+ }
+ }
+}
+
+/// A previous inline code node, that looks wrong.
+///
+/// `guess` is the position, where we want to suggest a \` and the guess `is_confident` if an
+/// inline code starts or ends with a whitespace.
+#[derive(Debug)]
+enum PrevCodeGuess {
+ None,
+
+ /// Missing \` at start.
+ ///
+ /// ```markdown
+ /// foo` `bar`
+ /// ```
+ Start {
+ guess: usize,
+ is_confident: bool,
+ },
+
+ /// Missing \` at end.
+ ///
+ /// ```markdown
+ /// `foo `bar`
+ /// ```
+ End {
+ guess: usize,
+ is_confident: bool,
+ },
+}
+
+impl PrevCodeGuess {
+ fn is_confident(&self) -> bool {
+ match *self {
+ PrevCodeGuess::None => false,
+ PrevCodeGuess::Start { is_confident, .. } | PrevCodeGuess::End { is_confident, .. } => {
+ is_confident
+ }
+ }
+ }
+}
+
+/// A markdown [tagged element], which may or may not contain an unescaped backtick.
+///
+/// [tagged element]: https://docs.rs/pulldown-cmark/0.9/pulldown_cmark/enum.Tag.html
+#[derive(Debug)]
+struct Element {
+ /// The full range (span) of the element in the doc string.
+ element_range: Range<usize>,
+
+ /// The ranges where we're allowed to put backticks.
+ /// This is used to prevent breaking markdown elements like links or lists.
+ suggestible_ranges: Vec<Range<usize>>,
+
+ /// The unescaped backtick.
+ backtick_index: Option<usize>,
+
+ /// Suggest a different start or end of an inline code.
+ prev_code_guess: PrevCodeGuess,
+}
+
+impl Element {
+ const fn new(element_range: Range<usize>) -> Self {
+ Self {
+ element_range,
+ suggestible_ranges: Vec::new(),
+ backtick_index: None,
+ prev_code_guess: PrevCodeGuess::None,
+ }
+ }
+}
+
+/// Given a potentially unclosed inline code, attempt to find the start.
+fn guess_start_of_code(dox: &str, range: Range<usize>) -> Option<usize> {
+ assert!(dox.as_bytes()[range.end] == b'`');
+
+ let mut braces = 0;
+ let mut guess = 0;
+ for (idx, ch) in dox[range.clone()].char_indices().rev() {
+ match ch {
+ ')' | ']' | '}' => braces += 1,
+ '(' | '[' | '{' => {
+ if braces == 0 {
+ guess = idx + 1;
+ break;
+ }
+ braces -= 1;
+ }
+ ch if ch.is_whitespace() && braces == 0 => {
+ guess = idx + 1;
+ break;
+ }
+ _ => (),
+ }
+ }
+
+ guess += range.start;
+
+ // Don't suggest empty inline code or duplicate backticks.
+ can_suggest_backtick(dox, guess).then_some(guess)
+}
+
+/// Given a potentially unclosed inline code, attempt to find the end.
+fn guess_end_of_code(dox: &str, range: Range<usize>) -> Option<usize> {
+ // Punctuation that should be outside of the inline code.
+ const TRAILING_PUNCTUATION: &[u8] = b".,";
+
+ assert!(dox.as_bytes()[range.start - 1] == b'`');
+
+ let text = dox[range.clone()].trim_end();
+ let mut braces = 0;
+ let mut guess = text.len();
+ for (idx, ch) in text.char_indices() {
+ match ch {
+ '(' | '[' | '{' => braces += 1,
+ ')' | ']' | '}' => {
+ if braces == 0 {
+ guess = idx;
+ break;
+ }
+ braces -= 1;
+ }
+ ch if ch.is_whitespace() && braces == 0 => {
+ guess = idx;
+ break;
+ }
+ _ => (),
+ }
+ }
+
+ // Strip a single trailing punctuation.
+ if guess >= 1
+ && TRAILING_PUNCTUATION.contains(&text.as_bytes()[guess - 1])
+ && (guess < 2 || !TRAILING_PUNCTUATION.contains(&text.as_bytes()[guess - 2]))
+ {
+ guess -= 1;
+ }
+
+ guess += range.start;
+
+ // Don't suggest empty inline code or duplicate backticks.
+ can_suggest_backtick(dox, guess).then_some(guess)
+}
+
+/// Returns whether inserting a backtick at `dox[index]` will not produce double backticks.
+fn can_suggest_backtick(dox: &str, index: usize) -> bool {
+ (index == 0 || dox.as_bytes()[index - 1] != b'`')
+ && (index == dox.len() || dox.as_bytes()[index] != b'`')
+}
+
+/// Increase the index until it is inside or one past the end of one of the ranges.
+///
+/// The ranges must be sorted for this to work correctly.
+fn clamp_start(index: usize, ranges: &[Range<usize>]) -> Option<usize> {
+ for range in ranges {
+ if range.start >= index {
+ return Some(range.start);
+ }
+ if index <= range.end {
+ return Some(index);
+ }
+ }
+ None
+}
+
+/// Decrease the index until it is inside or one past the end of one of the ranges.
+///
+/// The ranges must be sorted for this to work correctly.
+fn clamp_end(index: usize, ranges: &[Range<usize>]) -> Option<usize> {
+ for range in ranges.iter().rev() {
+ if range.end <= index {
+ return Some(range.end);
+ }
+ if index >= range.start {
+ return Some(index);
+ }
+ }
+ None
+}
+
+/// Try to emit a span suggestion and fall back to help messages if we can't find a suitable span.
+///
+/// This helps finding backticks in huge macro-generated docs.
+fn suggest_insertion(
+ cx: &DocContext<'_>,
+ item: &Item,
+ dox: &str,
+ lint: &mut DiagnosticBuilder<'_, ()>,
+ insert_index: usize,
+ suggestion: char,
+ message: &str,
+) {
+ /// Maximum bytes of context to show around the insertion.
+ const CONTEXT_MAX_LEN: usize = 80;
+
+ if let Some(span) =
+ source_span_for_markdown_range(cx.tcx, &dox, &(insert_index..insert_index), &item.attrs)
+ {
+ lint.span_suggestion(span, message, suggestion, Applicability::MaybeIncorrect);
+ } else {
+ let line_start = dox[..insert_index].rfind('\n').map_or(0, |idx| idx + 1);
+ let line_end = dox[insert_index..].find('\n').map_or(dox.len(), |idx| idx + insert_index);
+
+ let context_before_max_len = if insert_index - line_start < CONTEXT_MAX_LEN / 2 {
+ insert_index - line_start
+ } else if line_end - insert_index < CONTEXT_MAX_LEN / 2 {
+ CONTEXT_MAX_LEN - (line_end - insert_index)
+ } else {
+ CONTEXT_MAX_LEN / 2
+ };
+ let context_after_max_len = CONTEXT_MAX_LEN - context_before_max_len;
+
+ let (prefix, context_start) = if insert_index - line_start <= context_before_max_len {
+ ("", line_start)
+ } else {
+ ("...", dox.ceil_char_boundary(insert_index - context_before_max_len))
+ };
+ let (suffix, context_end) = if line_end - insert_index <= context_after_max_len {
+ ("", line_end)
+ } else {
+ ("...", dox.floor_char_boundary(insert_index + context_after_max_len))
+ };
+
+ let context_full = &dox[context_start..context_end].trim_end();
+ let context_before = &dox[context_start..insert_index];
+ let context_after = &dox[insert_index..context_end].trim_end();
+ lint.help(format!(
+ "{message}\n change: {prefix}{context_full}{suffix}\nto this: {prefix}{context_before}{suggestion}{context_after}{suffix}"
+ ));
+ }
+}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index a688aa148..972b0c5ec 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -1,5 +1,6 @@
//! Strip all doc(hidden) items from the output.
+use rustc_hir::def_id::LocalDefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym;
use std::mem;
@@ -29,6 +30,7 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea
update_retained: true,
tcx: cx.tcx,
is_in_hidden_item: false,
+ last_reexport: None,
};
stripper.fold_crate(krate)
};
@@ -49,13 +51,24 @@ struct Stripper<'a, 'tcx> {
update_retained: bool,
tcx: TyCtxt<'tcx>,
is_in_hidden_item: bool,
+ last_reexport: Option<LocalDefId>,
}
impl<'a, 'tcx> Stripper<'a, 'tcx> {
+ fn set_last_reexport_then_fold_item(&mut self, i: Item) -> Item {
+ let prev_from_reexport = self.last_reexport;
+ if i.inline_stmt_id.is_some() {
+ self.last_reexport = i.item_id.as_def_id().and_then(|def_id| def_id.as_local());
+ }
+ let ret = self.fold_item_recur(i);
+ self.last_reexport = prev_from_reexport;
+ ret
+ }
+
fn set_is_in_hidden_item_and_fold(&mut self, is_in_hidden_item: bool, i: Item) -> Item {
let prev = self.is_in_hidden_item;
self.is_in_hidden_item |= is_in_hidden_item;
- let ret = self.fold_item_recur(i);
+ let ret = self.set_last_reexport_then_fold_item(i);
self.is_in_hidden_item = prev;
ret
}
@@ -64,7 +77,7 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
/// of `is_in_hidden_item` to `true` because the impl children inherit its visibility.
fn recurse_in_impl_or_exported_macro(&mut self, i: Item) -> Item {
let prev = mem::replace(&mut self.is_in_hidden_item, false);
- let ret = self.fold_item_recur(i);
+ let ret = self.set_last_reexport_then_fold_item(i);
self.is_in_hidden_item = prev;
ret
}
@@ -86,13 +99,20 @@ impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
if !is_impl_or_exported_macro {
is_hidden = self.is_in_hidden_item || has_doc_hidden;
if !is_hidden && i.inline_stmt_id.is_none() {
- // We don't need to check if it's coming from a reexport since the reexport itself was
- // already checked.
+ // `i.inline_stmt_id` is `Some` if the item is directly reexported. If it is, we
+ // don't need to check it, because the reexport itself was already checked.
+ //
+ // If this item is the child of a reexported module, `self.last_reexport` will be
+ // `Some` even though `i.inline_stmt_id` is `None`. Hiddenness inheritance needs to
+ // account for the possibility that an item's true parent module is hidden, but it's
+ // inlined into a visible module true. This code shouldn't be reachable if the
+ // module's reexport is itself hidden, for the same reason it doesn't need to be
+ // checked if `i.inline_stmt_id` is Some: hidden reexports are never inlined.
is_hidden = i
.item_id
.as_def_id()
.and_then(|def_id| def_id.as_local())
- .map(|def_id| inherits_doc_hidden(self.tcx, def_id))
+ .map(|def_id| inherits_doc_hidden(self.tcx, def_id, self.last_reexport))
.unwrap_or(false);
}
}
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index cba55e5fe..73fc26a6b 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -194,7 +194,7 @@ impl<'a> DocFolder for ImplStripper<'a, '_> {
})
{
return None;
- } else if imp.items.is_empty() && i.doc_value().is_none() {
+ } else if imp.items.is_empty() && i.doc_value().is_empty() {
return None;
}
}