Skip to content

Commit 9382f07

Browse files
committed
Use strip_{prefix,suffix} in rustdoc
1 parent 0c03aee commit 9382f07

File tree

10 files changed

+164
-155
lines changed

10 files changed

+164
-155
lines changed

src/bootstrap/doc.rs

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -461,22 +461,21 @@ impl Step for Std {
461461

462462
builder.run(&mut cargo.into());
463463
};
464-
let krates = ["alloc", "core", "std", "proc_macro", "test"];
465-
for krate in &krates {
464+
static KRATES: &[&str] = &["alloc", "core", "std", "proc_macro", "test"];
465+
for krate in KRATES {
466466
run_cargo_rustdoc_for(krate);
467467
}
468468
builder.cp_r(&my_out, &out);
469469

470470
// Look for src/libstd, src/libcore etc in the `x.py doc` arguments and
471471
// open the corresponding rendered docs.
472472
for path in builder.paths.iter().map(components_simplified) {
473-
if path.get(0) == Some(&"src")
474-
&& path.get(1).map_or(false, |dir| dir.starts_with("lib"))
475-
{
476-
let requested_crate = &path[1][3..];
477-
if krates.contains(&requested_crate) {
478-
let index = out.join(requested_crate).join("index.html");
479-
open(builder, &index);
473+
if let ["src", path, ..] = path.as_slice() {
474+
if let Some(krate) = path.strip_prefix("lib") {
475+
if KRATES.contains(&krate) {
476+
let index = out.join(krate).join("index.html");
477+
open(builder, &index);
478+
}
480479
}
481480
}
482481
}

src/librustc_ast/util/comments.rs

Lines changed: 34 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -29,27 +29,31 @@ pub struct Comment {
2929
}
3030

3131
pub fn is_line_doc_comment(s: &str) -> bool {
32-
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/')
33-
|| s.starts_with("//!");
34-
debug!("is {:?} a doc comment? {}", s, res);
35-
res
32+
let yes = match s.as_bytes() {
33+
[b'/', b'/', b'/', c, ..] => *c != b'/',
34+
[b'/', b'/', b'/', ..] => true,
35+
[b'/', b'/', b'!', ..] => true,
36+
_ => false,
37+
};
38+
debug!("is {:?} a line doc comment? {}", s, yes);
39+
yes
3640
}
3741

3842
pub fn is_block_doc_comment(s: &str) -> bool {
39-
// Prevent `/**/` from being parsed as a doc comment
40-
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*')
41-
|| s.starts_with("/*!"))
42-
&& s.len() >= 5;
43-
debug!("is {:?} a doc comment? {}", s, res);
44-
res
43+
// Previously, `/**/` was incorrectly regarded as a doc comment because it
44+
// starts with `/**` and ends with `*/`. However, this caused an ICE
45+
// because some code assumed that the length of a doc comment is at least 5.
46+
let yes = match s.as_bytes() {
47+
[b'/', b'*', b'*', c, _, ..] => *c != b'*',
48+
[b'/', b'*', b'!', _, _, ..] => true,
49+
_ => false,
50+
};
51+
debug!("is {:?} a block doc comment? {}", s, yes);
52+
yes
4553
}
4654

47-
// FIXME(#64197): Try to privatize this again.
48-
pub fn is_doc_comment(s: &str) -> bool {
49-
(s.starts_with("///") && is_line_doc_comment(s))
50-
|| s.starts_with("//!")
51-
|| (s.starts_with("/**") && is_block_doc_comment(s))
52-
|| s.starts_with("/*!")
55+
fn is_doc_comment(s: &str) -> bool {
56+
is_line_doc_comment(s) || is_block_doc_comment(s)
5357
}
5458

5559
pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
@@ -127,22 +131,26 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
127131
const ONELINERS: &[&str] = &["///!", "///", "//!", "//"];
128132

129133
for prefix in ONELINERS {
130-
if comment.starts_with(*prefix) {
131-
return (&comment[prefix.len()..]).to_string();
134+
if let Some(tail) = comment.strip_prefix(*prefix) {
135+
return tail.to_owned();
132136
}
133137
}
134138

135-
if comment.starts_with("/*") {
136-
let lines =
137-
comment[3..comment.len() - 2].lines().map(|s| s.to_string()).collect::<Vec<String>>();
139+
match comment
140+
.strip_prefix("/**")
141+
.or_else(|| comment.strip_prefix("/*!"))
142+
.and_then(|s| s.strip_suffix("*/"))
143+
{
144+
Some(doc) => {
145+
let lines = doc.lines().map(|s| s.to_string()).collect::<Vec<String>>();
138146

139-
let lines = vertical_trim(lines);
140-
let lines = horizontal_trim(lines);
147+
let lines = vertical_trim(lines);
148+
let lines = horizontal_trim(lines);
141149

142-
return lines.join("\n");
150+
lines.join("\n")
151+
}
152+
_ => panic!("not a doc-comment: {}", comment),
143153
}
144-
145-
panic!("not a doc-comment: {}", comment);
146154
}
147155

148156
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.

src/librustc_expand/parse/lexer/tests.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
use rustc_ast::token::{self, Token, TokenKind};
2-
use rustc_ast::util::comments::is_doc_comment;
2+
use rustc_ast::util::comments::is_line_doc_comment;
33
use rustc_ast::with_default_globals;
44
use rustc_data_structures::sync::Lrc;
55
use rustc_errors::{emitter::EmitterWriter, Handler};
@@ -225,9 +225,9 @@ fn literal_suffixes() {
225225

226226
#[test]
227227
fn line_doc_comments() {
228-
assert!(is_doc_comment("///"));
229-
assert!(is_doc_comment("/// blah"));
230-
assert!(!is_doc_comment("////"));
228+
assert!(is_line_doc_comment("///"));
229+
assert!(is_line_doc_comment("/// blah"));
230+
assert!(!is_line_doc_comment("////"));
231231
}
232232

233233
#[test]

src/librustdoc/clean/utils.rs

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -481,12 +481,14 @@ pub fn print_const(cx: &DocContext<'_>, n: &'tcx ty::Const<'_>) -> String {
481481
_ => {
482482
let mut s = n.to_string();
483483
// array lengths are obviously usize
484-
if s.ends_with("_usize") {
485-
let n = s.len() - "_usize".len();
486-
s.truncate(n);
487-
if s.ends_with(": ") {
488-
let n = s.len() - ": ".len();
489-
s.truncate(n);
484+
if let Some(head) = s.strip_suffix("_usize") {
485+
let new_len = match head.strip_suffix(": ") {
486+
None => head.len(),
487+
Some(hhead) => hhead.len(),
488+
};
489+
// SAFETY: `new_len` should be in between char boundary
490+
unsafe {
491+
s.as_mut_vec().set_len(new_len);
490492
}
491493
}
492494
s

src/librustdoc/html/markdown.rs

Lines changed: 22 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -133,16 +133,24 @@ impl<'a> Line<'a> {
133133
// then reallocate to remove it; which would make us return a String.
134134
fn map_line(s: &str) -> Line<'_> {
135135
let trimmed = s.trim();
136-
if trimmed.starts_with("##") {
137-
Line::Shown(Cow::Owned(s.replacen("##", "#", 1)))
138-
} else if trimmed.starts_with("# ") {
139-
// # text
140-
Line::Hidden(&trimmed[2..])
141-
} else if trimmed == "#" {
142-
// We cannot handle '#text' because it could be #[attr].
143-
Line::Hidden("")
144-
} else {
145-
Line::Shown(Cow::Borrowed(s))
136+
match trimmed.strip_prefix("#") {
137+
Some(tail) => match tail.strip_prefix("#") {
138+
// `##text` rendered as `#text`.
139+
Some(_) => Line::Shown(tail.into()),
140+
None => {
141+
if tail.is_empty() {
142+
// `#` will be hidden.
143+
Line::Hidden("")
144+
} else if let Some(text) = tail.strip_prefix(' ') {
145+
// `# text` will be hidden.
146+
Line::Hidden(text)
147+
} else {
148+
// `#text` will be shown as it could be `#[attr]`
149+
Line::Shown(s.into())
150+
}
151+
}
152+
},
153+
None => Line::Shown(s.into()),
146154
}
147155
}
148156

@@ -754,7 +762,7 @@ impl LangString {
754762
}
755763
x if x.starts_with("ignore-") => {
756764
if enable_per_target_ignores {
757-
ignores.push(x.trim_start_matches("ignore-").to_owned());
765+
ignores.push(x.strip_prefix("ignore-").unwrap().to_owned());
758766
seen_rust_tags = !seen_other_tags;
759767
}
760768
}
@@ -776,10 +784,10 @@ impl LangString {
776784
data.no_run = true;
777785
}
778786
x if x.starts_with("edition") => {
779-
data.edition = x[7..].parse::<Edition>().ok();
787+
data.edition = x.strip_prefix("edition").unwrap().parse::<Edition>().ok();
780788
}
781-
x if allow_error_code_check && x.starts_with('E') && x.len() == 5 => {
782-
if x[1..].parse::<u32>().is_ok() {
789+
x if allow_error_code_check && x.len() == 5 && x.starts_with('E') => {
790+
if x.strip_prefix('E').unwrap().parse::<u32>().is_ok() {
783791
data.error_codes.push(x.to_owned());
784792
seen_rust_tags = !seen_other_tags || seen_rust_tags;
785793
} else {

src/librustdoc/html/render.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -810,11 +810,9 @@ themePicker.onblur = handleThemeButtonsBlur;
810810
if line.starts_with(&format!("\"{}\"", krate)) {
811811
continue;
812812
}
813-
if line.ends_with(",\\") {
814-
ret.push(line[..line.len() - 2].to_string());
815-
} else {
816-
// Ends with "\\" (it's the case for the last added crate line)
817-
ret.push(line[..line.len() - 1].to_string());
813+
// Ends with "\\" (it's the case for the last added crate line)
814+
if let Some(head) = line.strip_suffix(",\\").or_else(|| line.strip_suffix("\\")) {
815+
ret.push(head.to_string());
818816
}
819817
krates.push(
820818
line.split('"')

src/librustdoc/html/sources.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,7 @@ impl<'a> SourceCollector<'a> {
8383
};
8484

8585
// Remove the utf-8 BOM if any
86-
let contents =
87-
if contents.starts_with("\u{feff}") { &contents[3..] } else { &contents[..] };
86+
let contents = contents.strip_prefix("\u{feff}").unwrap_or(&contents[..]);
8887

8988
// Create the intermediate directories
9089
let mut cur = self.dst.clone();

src/librustdoc/markdown.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ fn extract_leading_metadata(s: &str) -> (Vec<&str>, &str) {
1818
let mut count = 0;
1919

2020
for line in s.lines() {
21-
if line.starts_with("# ") || line.starts_with('%') {
21+
if let Some(tail) = line.strip_prefix("# ").or_else(|| line.strip_prefix('%')) {
2222
// trim the whitespace after the symbol
23-
metadata.push(line[1..].trim_start());
23+
metadata.push(tail.trim_start());
2424
count += line.len() + 1;
2525
} else {
2626
return (metadata, &s[count..]);

src/librustdoc/passes/collect_intra_doc_links.rs

Lines changed: 42 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -541,38 +541,31 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
541541

542542
let link = ori_link.replace("`", "");
543543
let parts = link.split('#').collect::<Vec<_>>();
544-
let (link, extra_fragment) = if parts.len() > 2 {
545-
build_diagnostic(
546-
cx,
547-
&item,
548-
&link,
549-
&dox,
550-
link_range,
551-
"has an issue with the link anchor.",
552-
"only one `#` is allowed in a link",
553-
None,
554-
);
555-
continue;
556-
} else if parts.len() == 2 {
557-
if parts[0].trim().is_empty() {
558-
// This is an anchor to an element of the current page, nothing to do in here!
544+
let (link, extra_fragment) = match *parts {
545+
[] => unreachable!("`str::split` always returns a non-empty list"),
546+
[a] => (a.to_owned(), None),
547+
// This is an anchor to an element of the current page, nothing to do in here!
548+
[a, _] if a.trim().is_empty() => continue,
549+
[a, b] => (a.to_owned(), Some(b.to_owned())),
550+
[_, _, _, ..] => {
551+
build_diagnostic(
552+
cx,
553+
&item,
554+
&link,
555+
&dox,
556+
link_range,
557+
"has an issue with the link anchor.",
558+
"only one `#` is allowed in a link",
559+
None,
560+
);
559561
continue;
560562
}
561-
(parts[0].to_owned(), Some(parts[1].to_owned()))
562-
} else {
563-
(parts[0].to_owned(), None)
564563
};
565564
let resolved_self;
566565
let mut path_str;
567566
let (res, fragment) = {
568-
let mut kind = None;
569-
path_str = if let Some(prefix) = ["struct@", "enum@", "type@", "trait@", "union@"]
570-
.iter()
571-
.find(|p| link.starts_with(**p))
572-
{
573-
kind = Some(TypeNS);
574-
link.trim_start_matches(prefix)
575-
} else if let Some(prefix) = [
567+
static TYPES: &[&str] = &["struct@", "enum@", "type@", "trait@", "union@"];
568+
static TY_KINDS: &[&str] = &[
576569
"const@",
577570
"static@",
578571
"value@",
@@ -581,28 +574,27 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
581574
"fn@",
582575
"module@",
583576
"method@",
584-
]
585-
.iter()
586-
.find(|p| link.starts_with(**p))
587-
{
588-
kind = Some(ValueNS);
589-
link.trim_start_matches(prefix)
590-
} else if link.ends_with("()") {
591-
kind = Some(ValueNS);
592-
link.trim_end_matches("()")
593-
} else if link.starts_with("macro@") {
594-
kind = Some(MacroNS);
595-
link.trim_start_matches("macro@")
596-
} else if link.starts_with("derive@") {
597-
kind = Some(MacroNS);
598-
link.trim_start_matches("derive@")
599-
} else if link.ends_with('!') {
600-
kind = Some(MacroNS);
601-
link.trim_end_matches('!')
602-
} else {
603-
&link[..]
604-
}
605-
.trim();
577+
];
578+
let (kind, path) =
579+
if let Some(tail) = TYPES.iter().filter_map(|&p| link.strip_prefix(p)).next() {
580+
(Some(TypeNS), tail)
581+
} else if let Some(tail) =
582+
TY_KINDS.iter().filter_map(|&p| link.strip_prefix(p)).next()
583+
{
584+
(Some(ValueNS), tail)
585+
} else if let Some(head) = link.strip_suffix("()") {
586+
(Some(ValueNS), head)
587+
} else if let Some(tail) =
588+
link.strip_prefix("macro@").or_else(|| link.strip_prefix("derive@"))
589+
{
590+
(Some(MacroNS), tail)
591+
} else if let Some(head) = link.strip_suffix('!') {
592+
(Some(MacroNS), head)
593+
} else {
594+
(None, &link[..])
595+
};
596+
597+
path_str = path.trim();
606598

607599
if path_str.contains(|ch: char| !(ch.is_alphanumeric() || ch == ':' || ch == '_')) {
608600
continue;
@@ -623,9 +615,9 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
623615
if item.is_mod() && item.attrs.inner_docs { None } else { parent_node };
624616

625617
// replace `Self` with suitable item's parent name
626-
if path_str.starts_with("Self::") {
618+
if let Some(item) = path_str.strip_prefix("Self::") {
627619
if let Some(ref name) = parent_name {
628-
resolved_self = format!("{}::{}", name, &path_str[6..]);
620+
resolved_self = format!("{}::{}", name, item);
629621
path_str = &resolved_self;
630622
}
631623
}

0 commit comments

Comments
 (0)