Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rustdoc: point at span in include_str!-ed md file #123204

Merged
merged 3 commits into from Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
10 changes: 6 additions & 4 deletions compiler/rustc_builtin_macros/src/source_util.rs
Expand Up @@ -196,10 +196,10 @@ pub fn expand_include_str(
Err(guar) => return ExpandResult::Ready(DummyResult::any(sp, guar)),
};
ExpandResult::Ready(match load_binary_file(cx, path.as_str().as_ref(), sp, path_span) {
Ok(bytes) => match std::str::from_utf8(&bytes) {
Ok((bytes, bsp)) => match std::str::from_utf8(&bytes) {
Ok(src) => {
let interned_src = Symbol::intern(src);
MacEager::expr(cx.expr_str(sp, interned_src))
MacEager::expr(cx.expr_str(cx.with_def_site_ctxt(bsp), interned_src))
}
Err(_) => {
let guar = cx.dcx().span_err(sp, format!("`{path}` wasn't a utf-8 file"));
Expand All @@ -225,7 +225,9 @@ pub fn expand_include_bytes(
Err(guar) => return ExpandResult::Ready(DummyResult::any(sp, guar)),
};
ExpandResult::Ready(match load_binary_file(cx, path.as_str().as_ref(), sp, path_span) {
Ok(bytes) => {
Ok((bytes, _bsp)) => {
// Don't care about getting the span for the raw bytes,
// because the console can't really show them anyway.
let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes));
MacEager::expr(expr)
}
Expand All @@ -238,7 +240,7 @@ fn load_binary_file(
original_path: &Path,
macro_span: Span,
path_span: Span,
) -> Result<Lrc<[u8]>, Box<dyn MacResult>> {
) -> Result<(Lrc<[u8]>, Span), Box<dyn MacResult>> {
let resolved_path = match resolve_path(&cx.sess, original_path, macro_span) {
Ok(path) => path,
Err(err) => {
Expand Down
9 changes: 7 additions & 2 deletions compiler/rustc_errors/src/emitter.rs
Expand Up @@ -1513,7 +1513,9 @@ impl HumanEmitter {
for line_idx in 0..annotated_file.lines.len() {
let file = annotated_file.file.clone();
let line = &annotated_file.lines[line_idx];
if let Some(source_string) = file.get_line(line.line_index - 1) {
if let Some(source_string) =
line.line_index.checked_sub(1).and_then(|l| file.get_line(l))
{
let leading_whitespace = source_string
.chars()
.take_while(|c| c.is_whitespace())
Expand Down Expand Up @@ -1553,7 +1555,10 @@ impl HumanEmitter {
for line in &annotated_file.lines {
max_line_len = max(
max_line_len,
annotated_file.file.get_line(line.line_index - 1).map_or(0, |s| s.len()),
line.line_index
.checked_sub(1)
.and_then(|l| annotated_file.file.get_line(l))
.map_or(0, |s| s.len()),
);
for ann in &line.annotations {
span_right_margin = max(span_right_margin, ann.start_col.display);
Expand Down
45 changes: 38 additions & 7 deletions compiler/rustc_resolve/src/rustdoc.rs
Expand Up @@ -194,12 +194,12 @@ pub fn attrs_to_doc_fragments<'a>(
for (attr, item_id) in attrs {
if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
let doc = beautify_doc_string(doc_str, comment_kind);
let kind = if attr.is_doc_comment() {
DocFragmentKind::SugaredDoc
let (span, kind) = if attr.is_doc_comment() {
(attr.span, DocFragmentKind::SugaredDoc)
} else {
DocFragmentKind::RawDoc
(span_for_value(attr), DocFragmentKind::RawDoc)
};
let fragment = DocFragment { span: attr.span, doc, kind, item_id, indent: 0 };
let fragment = DocFragment { span, doc, kind, item_id, indent: 0 };
doc_fragments.push(fragment);
} else if !doc_only {
other_attrs.push(attr.clone());
Expand All @@ -211,6 +211,16 @@ pub fn attrs_to_doc_fragments<'a>(
(doc_fragments, other_attrs)
}

fn span_for_value(attr: &ast::Attribute) -> Span {
if let ast::AttrKind::Normal(normal) = &attr.kind
&& let ast::AttrArgs::Eq(_, ast::AttrArgsEq::Hir(meta)) = &normal.item.args
{
meta.span.with_ctxt(attr.span.ctxt())
} else {
attr.span
}
}

/// Return the doc-comments on this item, grouped by the module they came from.
/// The module can be different if this is a re-export with added documentation.
///
Expand Down Expand Up @@ -482,15 +492,36 @@ pub fn span_of_fragments(fragments: &[DocFragment]) -> Option<Span> {

/// Attempts to match a range of bytes from parsed markdown to a `Span` in the source code.
///
/// This method will return `None` if we cannot construct a span from the source map or if the
/// fragments are not all sugared doc comments. It's difficult to calculate the correct span in
/// that case due to escaping and other source features.
/// This method does not always work, because markdown bytes don't necessarily match source bytes,
/// like if escapes are used in the string. In this case, it returns `None`.
///
/// This method will return `Some` only if:
///
/// - The doc is made entirely from sugared doc comments, which cannot contain escapes
/// - The doc is entirely from a single doc fragment, with a string literal, exactly equal
/// - The doc comes from `include_str!`
pub fn source_span_for_markdown_range(
tcx: TyCtxt<'_>,
markdown: &str,
md_range: &Range<usize>,
fragments: &[DocFragment],
) -> Option<Span> {
if let &[fragment] = &fragments
&& fragment.kind == DocFragmentKind::RawDoc
&& let Ok(snippet) = tcx.sess.source_map().span_to_snippet(fragment.span)
&& snippet.trim_end() == markdown.trim_end()
&& let Ok(md_range_lo) = u32::try_from(md_range.start)
&& let Ok(md_range_hi) = u32::try_from(md_range.end)
{
// Single fragment with string that contains same bytes as doc.
return Some(Span::new(
fragment.span.lo() + rustc_span::BytePos(md_range_lo),
fragment.span.lo() + rustc_span::BytePos(md_range_hi),
fragment.span.ctxt(),
fragment.span.parent(),
));
}

let is_all_sugared_doc = fragments.iter().all(|frag| frag.kind == DocFragmentKind::SugaredDoc);

if !is_all_sugared_doc {
Expand Down
14 changes: 11 additions & 3 deletions compiler/rustc_span/src/source_map.rs
Expand Up @@ -218,7 +218,7 @@ impl SourceMap {
///
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
/// takes place.
pub fn load_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>> {
pub fn load_binary_file(&self, path: &Path) -> io::Result<(Lrc<[u8]>, Span)> {
let bytes = self.file_loader.read_binary_file(path)?;

// We need to add file to the `SourceMap`, so that it is present
Expand All @@ -227,8 +227,16 @@ impl SourceMap {
// via `mod`, so we try to use real file contents and not just an
// empty string.
let text = std::str::from_utf8(&bytes).unwrap_or("").to_string();
self.new_source_file(path.to_owned().into(), text);
Ok(bytes)
let file = self.new_source_file(path.to_owned().into(), text);
Ok((
bytes,
Span::new(
file.start_pos,
BytePos(file.start_pos.0 + file.source_len.0),
SyntaxContext::root(),
None,
),
))
}

// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
Expand Down
2 changes: 1 addition & 1 deletion src/tools/clippy/clippy_lints/src/large_include_file.rs
Expand Up @@ -71,7 +71,7 @@ impl LateLintPass<'_> for LargeIncludeFile {
span_lint_and_note(
cx,
LARGE_INCLUDE_FILE,
expr.span,
expr.span.source_callsite(),
"attempted to include a large file",
None,
&format!(
Expand Down
2 changes: 1 addition & 1 deletion src/tools/clippy/clippy_lints/src/strings.rs
Expand Up @@ -300,7 +300,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
e.span,
"calling `as_bytes()` on `include_str!(..)`",
"consider using `include_bytes!(..)` instead",
snippet_with_applicability(cx, receiver.span, r#""foo""#, &mut applicability).replacen(
snippet_with_applicability(cx, receiver.span.source_callsite(), r#""foo""#, &mut applicability).replacen(
"include_str",
"include_bytes",
1,
Expand Down
Expand Up @@ -7,7 +7,6 @@ LL | const TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt");
= note: the configuration allows a maximum size of 600 bytes
= note: `-D clippy::large-include-file` implied by `-D warnings`
= help: to override `-D warnings` add `#[allow(clippy::large_include_file)]`
= note: this error originates in the macro `include_bytes` (in Nightly builds, run with -Z macro-backtrace for more info)

error: attempted to include a large file
--> tests/ui-toml/large_include_file/large_include_file.rs:14:35
Expand All @@ -16,7 +15,6 @@ LL | const TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the configuration allows a maximum size of 600 bytes
= note: this error originates in the macro `include_str` (in Nightly builds, run with -Z macro-backtrace for more info)

error: aborting due to 2 previous errors

11 changes: 6 additions & 5 deletions src/tools/clippy/tests/ui/empty_docs.stderr
Expand Up @@ -25,19 +25,20 @@ LL | ///
= help: consider removing or filling it

error: empty doc comment
--> tests/ui/empty_docs.rs:30:5
--> tests/ui/empty_docs.rs:30:13
|
LL | #[doc = ""]
| ^^^^^^^^^^^
| ^^
|
= help: consider removing or filling it

error: empty doc comment
--> tests/ui/empty_docs.rs:33:5
--> tests/ui/empty_docs.rs:33:13
|
LL | / #[doc = ""]
LL | #[doc = ""]
| _____________^
LL | | #[doc = ""]
| |_______________^
| |______________^
|
= help: consider removing or filling it

Expand Down
10 changes: 10 additions & 0 deletions tests/rustdoc-ui/auxiliary/include-str-bare-urls.md
@@ -0,0 +1,10 @@
HEADS UP! https://example.com MUST SHOW UP IN THE STDERR FILE!

Normally, a line with errors on it will also have a comment
marking it up as something that needs to generate an error.

The test harness doesn't gather hot comments from this file.
Rustdoc will generate an error for the line, and the `.stderr`
snapshot includes this error, but Compiletest doesn't see it.

If the stderr file changes, make sure the warning points at the URL!
15 changes: 15 additions & 0 deletions tests/rustdoc-ui/include-str-bare-urls.rs
@@ -0,0 +1,15 @@
// https://github.com/rust-lang/rust/issues/118549
//
// HEADS UP!
//
// Normally, a line with errors on it will also have a comment
// marking it up as something that needs to generate an error.
//
// The test harness doesn't gather hot comments from the `.md` file.
// Rustdoc will generate an error for the line, and the `.stderr`
// snapshot includes this error, but Compiletest doesn't see it.
//
// If the stderr file changes, make sure the warning points at the URL!

#![deny(rustdoc::bare_urls)]
#![doc=include_str!("auxiliary/include-str-bare-urls.md")]
15 changes: 15 additions & 0 deletions tests/rustdoc-ui/include-str-bare-urls.stderr
@@ -0,0 +1,15 @@
error: this URL is not a hyperlink
--> $DIR/auxiliary/include-str-bare-urls.md:1:11
|
LL | HEADS UP! https://example.com MUST SHOW UP IN THE STDERR FILE!
| ^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://example.com>`
|
= note: bare URLs are not automatically turned into clickable links
note: the lint level is defined here
--> $DIR/include-str-bare-urls.rs:14:9
|
LL | #![deny(rustdoc::bare_urls)]
| ^^^^^^^^^^^^^^^^^^

error: aborting due to 1 previous error

4 changes: 2 additions & 2 deletions tests/rustdoc-ui/intra-doc/warning.rs
Expand Up @@ -47,11 +47,11 @@ pub fn d() {}

macro_rules! f {
($f:expr) => {
#[doc = $f] //~ WARNING `BarF`
#[doc = $f]
pub fn f() {}
}
}
f!("Foo\nbar [BarF] bar\nbaz");
f!("Foo\nbar [BarF] bar\nbaz"); //~ WARNING `BarF`

/** # for example,
*
Expand Down
19 changes: 8 additions & 11 deletions tests/rustdoc-ui/intra-doc/warning.stderr
Expand Up @@ -69,10 +69,10 @@ LL | bar [BarC] bar
= help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`

warning: unresolved link to `BarD`
--> $DIR/warning.rs:45:1
--> $DIR/warning.rs:45:9
|
LL | #[doc = "Foo\nbar [BarD] bar\nbaz"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the link appears in this line:

Expand All @@ -82,13 +82,10 @@ LL | #[doc = "Foo\nbar [BarD] bar\nbaz"]
= help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`

warning: unresolved link to `BarF`
--> $DIR/warning.rs:50:9
--> $DIR/warning.rs:54:4
|
LL | #[doc = $f]
| ^^^^^^^^^^^
...
LL | f!("Foo\nbar [BarF] bar\nbaz");
| ------------------------------ in this macro invocation
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the link appears in this line:

Expand All @@ -115,10 +112,10 @@ LL | * time to introduce a link [error]
= help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`

warning: unresolved link to `error`
--> $DIR/warning.rs:68:1
--> $DIR/warning.rs:68:9
|
LL | #[doc = "single line [error]"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^
|
= note: the link appears in this line:

Expand All @@ -128,10 +125,10 @@ LL | #[doc = "single line [error]"]
= help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`

warning: unresolved link to `error`
--> $DIR/warning.rs:71:1
--> $DIR/warning.rs:71:9
|
LL | #[doc = "single line with \"escaping\" [error]"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the link appears in this line:

Expand Down
7 changes: 4 additions & 3 deletions tests/rustdoc-ui/invalid-syntax.stderr
Expand Up @@ -90,12 +90,13 @@ LL | | /// ```
= note: error from rustc: unknown start of token: \

warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:70:1
--> $DIR/invalid-syntax.rs:70:9
|
LL | / #[doc = "```"]
LL | #[doc = "```"]
| _________^
LL | | /// \_
LL | | #[doc = "```"]
| |______________^
| |_____________^
|
= help: mark blocks that do not contain Rust code as text: ```text
= note: error from rustc: unknown start of token: \
Expand Down