Skip to content

Rollup of 8 pull requests #143057

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 20 commits into from
Jun 27, 2025
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
e40515a
add method to retrieve body of coroutine
makai410 Jun 22, 2025
7e683cc
Do not emit `redundant_explicit_links` rustdoc lint if the doc commen…
GuillaumeGomez May 27, 2025
a0d6417
Add ui test for `redundant_explicit_links` rustdoc lint for items com…
GuillaumeGomez May 27, 2025
987c2ff
Update clippy source code to changes on `source_span_for_markdown_range`
GuillaumeGomez May 27, 2025
78cbcaf
Update tests to work with new DocFragment field and `redundant_explic…
GuillaumeGomez Jun 3, 2025
3b5525b
Improve code and documentation
GuillaumeGomez Jun 10, 2025
904652b
Suggest cloning `Arc` moved into closure
estebank May 1, 2024
250b5d2
Make missing lifetime suggestion verbose
compiler-errors Jun 24, 2025
8cd3fa0
Don't give APITs names with macro expansion placeholder fragments in it
compiler-errors Jun 18, 2025
2338821
tests: Do not run afoul of asm.validity.non-exhaustive in input-stats
workingjubilee Jun 11, 2025
1dfc840
make `tidy-alphabetical` use a natural sort
folkertdev May 20, 2025
59e1a3c
Simplify IfCause
compiler-errors Jun 25, 2025
aa8ba54
Rollup merge of #124595 - estebank:issue-104232, r=davidtwco
matthiaskrgr Jun 26, 2025
d5d5eb4
Rollup merge of #139594 - compiler-errors:if-cause, r=oli-obk
matthiaskrgr Jun 26, 2025
158340f
Rollup merge of #141311 - folkertdev:tidy-natural-sort, r=jieyouxu
matthiaskrgr Jun 26, 2025
cbcf183
Rollup merge of #141648 - GuillaumeGomez:redundant_explicit_links-exp…
matthiaskrgr Jun 26, 2025
d73bf10
Rollup merge of #142285 - workingjubilee:dont-use-bad-assembly, r=nne…
matthiaskrgr Jun 26, 2025
a05f2cc
Rollup merge of #142393 - compiler-errors:nofield, r=petrochenkov
matthiaskrgr Jun 26, 2025
2fc94f1
Rollup merge of #142884 - makai410:coroutine-body, r=celinval
matthiaskrgr Jun 26, 2025
2f8b715
Rollup merge of #142981 - compiler-errors:verbose-missing-suggestion,…
matthiaskrgr Jun 26, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
97 changes: 62 additions & 35 deletions compiler/rustc_resolve/src/rustdoc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ pub struct DocFragment {
pub doc: Symbol,
pub kind: DocFragmentKind,
pub indent: usize,
/// Because we tamper with the spans context, this information cannot be correctly retrieved
/// later on. So instead, we compute it and store it here.
pub from_expansion: bool,
}

#[derive(Clone, Copy, Debug)]
Expand Down Expand Up @@ -208,17 +211,18 @@ pub fn attrs_to_doc_fragments<'a, A: AttributeExt + Clone + 'a>(
for (attr, item_id) in attrs {
if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
let doc = beautify_doc_string(doc_str, comment_kind);
let (span, kind) = if attr.is_doc_comment() {
(attr.span(), DocFragmentKind::SugaredDoc)
let (span, kind, from_expansion) = if attr.is_doc_comment() {
let span = attr.span();
(span, DocFragmentKind::SugaredDoc, span.from_expansion())
} else {
(
attr.value_span()
.map(|i| i.with_ctxt(attr.span().ctxt()))
.unwrap_or(attr.span()),
DocFragmentKind::RawDoc,
)
let attr_span = attr.span();
let (span, from_expansion) = match attr.value_span() {
Some(sp) => (sp.with_ctxt(attr_span.ctxt()), sp.from_expansion()),
None => (attr_span, attr_span.from_expansion()),
};
(span, DocFragmentKind::RawDoc, from_expansion)
};
let fragment = DocFragment { span, doc, kind, item_id, indent: 0 };
let fragment = DocFragment { span, doc, kind, item_id, indent: 0, from_expansion };
doc_fragments.push(fragment);
} else if !doc_only {
other_attrs.push(attr.clone());
Expand Down Expand Up @@ -505,17 +509,26 @@ fn collect_link_data<'input, F: BrokenLinkCallback<'input>>(
display_text.map(String::into_boxed_str)
}

/// Returns a span encompassing all the document fragments.
pub fn span_of_fragments(fragments: &[DocFragment]) -> Option<Span> {
if fragments.is_empty() {
return None;
}
let start = fragments[0].span;
if start == DUMMY_SP {
/// Returns a tuple containing a span encompassing all the document fragments and a boolean that is
/// `true` if any of the fragments are from a macro expansion.
pub fn span_of_fragments_with_expansion(fragments: &[DocFragment]) -> Option<(Span, bool)> {
let (first_fragment, last_fragment) = match fragments {
[] => return None,
[first, .., last] => (first, last),
[first] => (first, first),
};
if first_fragment.span == DUMMY_SP {
return None;
}
let end = fragments.last().expect("no doc strings provided").span;
Some(start.to(end))
Some((
first_fragment.span.to(last_fragment.span),
fragments.iter().any(|frag| frag.from_expansion),
))
}

/// Returns a span encompassing all the document fragments.
pub fn span_of_fragments(fragments: &[DocFragment]) -> Option<Span> {
span_of_fragments_with_expansion(fragments).map(|(sp, _)| sp)
}

/// Attempts to match a range of bytes from parsed markdown to a `Span` in the source code.
Expand All @@ -529,18 +542,22 @@ pub fn span_of_fragments(fragments: &[DocFragment]) -> Option<Span> {
/// This method will return `Some` only if one of the following is true:
///
/// - The doc is made entirely from sugared doc comments, which cannot contain escapes
/// - The doc is entirely from a single doc fragment with a string literal exactly equal to `markdown`.
/// - The doc is entirely from a single doc fragment with a string literal exactly equal to
/// `markdown`.
/// - The doc comes from `include_str!`
/// - The doc includes exactly one substring matching `markdown[md_range]` which is contained in a single doc fragment.
/// - The doc includes exactly one substring matching `markdown[md_range]` which is contained in a
/// single doc fragment.
///
/// This function is defined in the compiler so it can be used by both `rustdoc` and `clippy`.
///
/// This function is defined in the compiler so it can be used by
/// both `rustdoc` and `clippy`.
/// It returns a tuple containing a span encompassing all the document fragments and a boolean that
/// is `true` if any of the *matched* fragments are from a macro expansion.
pub fn source_span_for_markdown_range(
tcx: TyCtxt<'_>,
markdown: &str,
md_range: &Range<usize>,
fragments: &[DocFragment],
) -> Option<Span> {
) -> Option<(Span, bool)> {
let map = tcx.sess.source_map();
source_span_for_markdown_range_inner(map, markdown, md_range, fragments)
}
Expand All @@ -551,7 +568,7 @@ pub fn source_span_for_markdown_range_inner(
markdown: &str,
md_range: &Range<usize>,
fragments: &[DocFragment],
) -> Option<Span> {
) -> Option<(Span, bool)> {
use rustc_span::BytePos;

if let &[fragment] = &fragments
Expand All @@ -562,11 +579,14 @@ pub fn source_span_for_markdown_range_inner(
&& let Ok(md_range_hi) = u32::try_from(md_range.end)
{
// Single fragment with string that contains same bytes as doc.
return Some(Span::new(
fragment.span.lo() + rustc_span::BytePos(md_range_lo),
fragment.span.lo() + rustc_span::BytePos(md_range_hi),
fragment.span.ctxt(),
fragment.span.parent(),
return Some((
Span::new(
fragment.span.lo() + rustc_span::BytePos(md_range_lo),
fragment.span.lo() + rustc_span::BytePos(md_range_hi),
fragment.span.ctxt(),
fragment.span.parent(),
),
fragment.from_expansion,
));
}

Expand Down Expand Up @@ -598,19 +618,21 @@ pub fn source_span_for_markdown_range_inner(
{
match_data = Some((i, match_start));
} else {
// Heirustic produced ambiguity, return nothing.
// Heuristic produced ambiguity, return nothing.
return None;
}
}
}
if let Some((i, match_start)) = match_data {
let sp = fragments[i].span;
let fragment = &fragments[i];
let sp = fragment.span;
// we need to calculate the span start,
// then use that in our calulations for the span end
let lo = sp.lo() + BytePos(match_start as u32);
return Some(
return Some((
sp.with_lo(lo).with_hi(lo + BytePos((md_range.end - md_range.start) as u32)),
);
fragment.from_expansion,
));
}
return None;
}
Expand Down Expand Up @@ -664,8 +686,13 @@ pub fn source_span_for_markdown_range_inner(
}
}

Some(span_of_fragments(fragments)?.from_inner(InnerSpan::new(
let (span, _) = span_of_fragments_with_expansion(fragments)?;
let src_span = span.from_inner(InnerSpan::new(
md_range.start + start_bytes,
md_range.end + start_bytes + end_bytes,
)))
));
Some((
src_span,
fragments.iter().any(|frag| frag.span.overlaps(src_span) && frag.from_expansion),
))
}
6 changes: 4 additions & 2 deletions compiler/rustc_resolve/src/rustdoc/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use super::{DocFragment, DocFragmentKind, source_span_for_markdown_range_inner};
fn single_backtick() {
let sm = SourceMap::new(FilePathMapping::empty());
sm.new_source_file(PathBuf::from("foo.rs").into(), r#"#[doc = "`"] fn foo() {}"#.to_string());
let span = source_span_for_markdown_range_inner(
let (span, _) = source_span_for_markdown_range_inner(
&sm,
"`",
&(0..1),
Expand All @@ -20,6 +20,7 @@ fn single_backtick() {
kind: DocFragmentKind::RawDoc,
doc: sym::empty, // unused placeholder
indent: 0,
from_expansion: false,
}],
)
.unwrap();
Expand All @@ -32,7 +33,7 @@ fn utf8() {
// regression test for https://github.com/rust-lang/rust/issues/141665
let sm = SourceMap::new(FilePathMapping::empty());
sm.new_source_file(PathBuf::from("foo.rs").into(), r#"#[doc = "⚠"] fn foo() {}"#.to_string());
let span = source_span_for_markdown_range_inner(
let (span, _) = source_span_for_markdown_range_inner(
&sm,
"⚠",
&(0..3),
Expand All @@ -42,6 +43,7 @@ fn utf8() {
kind: DocFragmentKind::RawDoc,
doc: sym::empty, // unused placeholder
indent: 0,
from_expansion: false,
}],
)
.unwrap();
Expand Down
1 change: 1 addition & 0 deletions src/librustdoc/clean/types/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ fn create_doc_fragment(s: &str) -> Vec<DocFragment> {
doc: Symbol::intern(s),
kind: DocFragmentKind::SugaredDoc,
indent: 0,
from_expansion: false,
}]
}

Expand Down
13 changes: 8 additions & 5 deletions src/librustdoc/passes/collect_intra_doc_links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1387,13 +1387,15 @@ impl LinkCollector<'_, '_> {
ori_link: &MarkdownLinkRange,
item: &Item,
) {
let span = source_span_for_markdown_range(
let span = match source_span_for_markdown_range(
self.cx.tcx,
dox,
ori_link.inner_range(),
&item.attrs.doc_strings,
)
.unwrap_or_else(|| item.attr_span(self.cx.tcx));
) {
Some((sp, _)) => sp,
None => item.attr_span(self.cx.tcx),
};
rustc_session::parse::feature_err(
self.cx.tcx.sess,
sym::intra_doc_pointers,
Expand Down Expand Up @@ -1836,7 +1838,7 @@ fn report_diagnostic(
let mut md_range = md_range.clone();
let sp =
source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs.doc_strings)
.map(|mut sp| {
.map(|(mut sp, _)| {
while dox.as_bytes().get(md_range.start) == Some(&b' ')
|| dox.as_bytes().get(md_range.start) == Some(&b'`')
{
Expand All @@ -1854,7 +1856,8 @@ fn report_diagnostic(
(sp, MarkdownLinkRange::Destination(md_range))
}
MarkdownLinkRange::WholeLink(md_range) => (
source_span_for_markdown_range(tcx, dox, md_range, &item.attrs.doc_strings),
source_span_for_markdown_range(tcx, dox, md_range, &item.attrs.doc_strings)
.map(|(sp, _)| sp),
link_range.clone(),
),
};
Expand Down
3 changes: 2 additions & 1 deletion src/librustdoc/passes/lint/bare_urls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ use crate::html::markdown::main_body_opts;

pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
let report_diag = |cx: &DocContext<'_>, msg: &'static str, range: Range<usize>| {
let maybe_sp = source_span_for_markdown_range(cx.tcx, dox, &range, &item.attrs.doc_strings);
let maybe_sp = source_span_for_markdown_range(cx.tcx, dox, &range, &item.attrs.doc_strings)
.map(|(sp, _)| sp);
let sp = maybe_sp.unwrap_or_else(|| item.attr_span(cx.tcx));
cx.tcx.node_span_lint(crate::lint::BARE_URLS, hir_id, sp, |lint| {
lint.primary_message(msg)
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/lint/check_code_block_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ fn check_rust_syntax(
&code_block.range,
&item.attrs.doc_strings,
) {
Some(sp) => (sp, true),
Some((sp, _)) => (sp, true),
None => (item.attr_span(cx.tcx), false),
};

Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/passes/lint/html_tags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
let tcx = cx.tcx;
let report_diag = |msg: String, range: &Range<usize>, is_open_tag: bool| {
let sp = match source_span_for_markdown_range(tcx, dox, range, &item.attrs.doc_strings) {
Some(sp) => sp,
Some((sp, _)) => sp,
None => item.attr_span(tcx),
};
tcx.node_span_lint(crate::lint::INVALID_HTML_TAGS, hir_id, sp, |lint| {
Expand Down Expand Up @@ -55,7 +55,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
&(generics_start..generics_end),
&item.attrs.doc_strings,
) {
Some(sp) => sp,
Some((sp, _)) => sp,
None => item.attr_span(tcx),
};
// Sometimes, we only extract part of a path. For example, consider this:
Expand Down
58 changes: 45 additions & 13 deletions src/librustdoc/passes/lint/redundant_explicit_links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,20 +161,36 @@ fn check_inline_or_reference_unknown_redundancy(

if dest_res == display_res {
let link_span =
source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
.unwrap_or(item.attr_span(cx.tcx));
let explicit_span = source_span_for_markdown_range(
match source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
{
Some((sp, from_expansion)) => {
if from_expansion {
return None;
}
sp
}
None => item.attr_span(cx.tcx),
};
let (explicit_span, false) = source_span_for_markdown_range(
cx.tcx,
doc,
&offset_explicit_range(doc, link_range, open, close),
&item.attrs.doc_strings,
)?;
let display_span = source_span_for_markdown_range(
)?
else {
// This `span` comes from macro expansion so skipping it.
return None;
};
let (display_span, false) = source_span_for_markdown_range(
cx.tcx,
doc,
resolvable_link_range,
&item.attrs.doc_strings,
)?;
)?
else {
// This `span` comes from macro expansion so skipping it.
return None;
};

cx.tcx.node_span_lint(crate::lint::REDUNDANT_EXPLICIT_LINKS, hir_id, explicit_span, |lint| {
lint.primary_message("redundant explicit link target")
Expand Down Expand Up @@ -206,21 +222,37 @@ fn check_reference_redundancy(

if dest_res == display_res {
let link_span =
source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
.unwrap_or(item.attr_span(cx.tcx));
let explicit_span = source_span_for_markdown_range(
match source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
{
Some((sp, from_expansion)) => {
if from_expansion {
return None;
}
sp
}
None => item.attr_span(cx.tcx),
};
let (explicit_span, false) = source_span_for_markdown_range(
cx.tcx,
doc,
&offset_explicit_range(doc, link_range.clone(), b'[', b']'),
&item.attrs.doc_strings,
)?;
let display_span = source_span_for_markdown_range(
)?
else {
// This `span` comes from macro expansion so skipping it.
return None;
};
let (display_span, false) = source_span_for_markdown_range(
cx.tcx,
doc,
resolvable_link_range,
&item.attrs.doc_strings,
)?;
let def_span = source_span_for_markdown_range(
)?
else {
// This `span` comes from macro expansion so skipping it.
return None;
};
let (def_span, _) = source_span_for_markdown_range(
cx.tcx,
doc,
&offset_reference_def_range(doc, dest, link_range),
Expand Down
Loading