Skip to content

Commit ff40e37

Browse files
committed
Some code cleanup and tidy/test fixes
1 parent 738e145 commit ff40e37

File tree

17 files changed

+109
-101
lines changed

17 files changed

+109
-101
lines changed

src/doc/unstable-book/src/language-features/plugin.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -56,15 +56,15 @@ extern crate syntax_pos;
5656
extern crate rustc;
5757
extern crate rustc_plugin;
5858
59-
use syntax::parse::token;
59+
use syntax::parse::token::{self, Token};
6060
use syntax::tokenstream::TokenTree;
6161
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
6262
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
6363
use syntax_pos::Span;
6464
use rustc_plugin::Registry;
6565
6666
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
67-
-> Box<MacResult + 'static> {
67+
-> Box<dyn MacResult + 'static> {
6868
6969
static NUMERALS: &'static [(&'static str, usize)] = &[
7070
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
8080
}
8181
8282
let text = match args[0] {
83-
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
83+
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
8484
_ => {
8585
cx.span_err(sp, "argument should be a single identifier");
8686
return DummyResult::any(sp);

src/librustc_lint/builtin.rs

+3-9
Original file line numberDiff line numberDiff line change
@@ -1414,15 +1414,9 @@ impl KeywordIdents {
14141414
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
14151415
for tt in tokens.into_trees() {
14161416
match tt {
1417-
TokenTree::Token(token) => match token.ident() {
1418-
// only report non-raw idents
1419-
Some((ident, false)) => {
1420-
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
1421-
span: token.span.substitute_dummy(ident.span),
1422-
..ident
1423-
});
1424-
}
1425-
_ => {},
1417+
// Only report non-raw idents.
1418+
TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
1419+
self.check_ident_token(cx, UnderMacro(true), ident);
14261420
}
14271421
TokenTree::Delimited(_, _, tts) => {
14281422
self.check_tokens(cx, tts)

src/libsyntax/attr/mod.rs

+6-3
Original file line numberDiff line numberDiff line change
@@ -483,7 +483,8 @@ impl MetaItem {
483483
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
484484
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
485485
let mut segments = if let token::Ident(name, _) = kind {
486-
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
486+
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
487+
= tokens.peek() {
487488
tokens.next();
488489
vec![PathSegment::from_ident(Ident::new(name, span))]
489490
} else {
@@ -493,12 +494,14 @@ impl MetaItem {
493494
vec![PathSegment::path_root(span)]
494495
};
495496
loop {
496-
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() {
497+
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
498+
= tokens.next() {
497499
segments.push(PathSegment::from_ident(Ident::new(name, span)));
498500
} else {
499501
return None;
500502
}
501-
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
503+
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
504+
= tokens.peek() {
502505
tokens.next();
503506
} else {
504507
break;

src/libsyntax/diagnostics/plugin.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,9 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
7777
},
7878
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
7979
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
80-
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
80+
Some(&TokenTree::Token(Token {
81+
kind: token::Literal(token::Lit { symbol, .. }), ..
82+
}))) => {
8183
(code, Some(symbol))
8284
}
8385
_ => unreachable!()

src/libsyntax/early_buffered_lints.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
//! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
44
//! redundant. Later, these types can be converted to types for use by the rest of the compiler.
55
6-
use crate::syntax::ast::NodeId;
6+
use crate::ast::NodeId;
77
use syntax_pos::MultiSpan;
88

99
/// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be

src/libsyntax/ext/tt/quoted.rs

+21-20
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ pub struct Delimited {
2424

2525
impl Delimited {
2626
/// Returns the opening delimiter (possibly `NoDelim`).
27-
pub fn open_token(&self) -> token::TokenKind {
27+
pub fn open_token(&self) -> TokenKind {
2828
token::OpenDelim(self.delim)
2929
}
3030

3131
/// Returns the closing delimiter (possibly `NoDelim`).
32-
pub fn close_token(&self) -> token::TokenKind {
32+
pub fn close_token(&self) -> TokenKind {
3333
token::CloseDelim(self.delim)
3434
}
3535

@@ -59,7 +59,7 @@ pub struct SequenceRepetition {
5959
/// The sequence of token trees
6060
pub tts: Vec<TokenTree>,
6161
/// The optional separator
62-
pub separator: Option<token::TokenKind>,
62+
pub separator: Option<TokenKind>,
6363
/// Whether the sequence can be repeated zero (*), or one or more times (+)
6464
pub op: KleeneOp,
6565
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -210,20 +210,21 @@ pub fn parse(
210210
match tree {
211211
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
212212
let span = match trees.next() {
213-
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
214-
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
215-
Some((kind, _)) => {
216-
let span = token.span.with_lo(start_sp.lo());
217-
result.push(TokenTree::MetaVarDecl(span, ident, kind));
218-
continue;
219-
}
220-
_ => token.span,
213+
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
214+
match trees.next() {
215+
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
216+
Some((kind, _)) => {
217+
let span = token.span.with_lo(start_sp.lo());
218+
result.push(TokenTree::MetaVarDecl(span, ident, kind));
219+
continue;
220+
}
221+
_ => token.span,
222+
},
223+
tree => tree
224+
.as_ref()
225+
.map(tokenstream::TokenTree::span)
226+
.unwrap_or(span),
221227
},
222-
tree => tree
223-
.as_ref()
224-
.map(tokenstream::TokenTree::span)
225-
.unwrap_or(span),
226-
},
227228
tree => tree
228229
.as_ref()
229230
.map(tokenstream::TokenTree::span)
@@ -370,7 +371,7 @@ where
370371

371372
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
372373
/// `None`.
373-
fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
374+
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
374375
match *token {
375376
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
376377
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
@@ -423,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
423424
attrs: &[ast::Attribute],
424425
edition: Edition,
425426
macro_node_id: NodeId,
426-
) -> (Option<token::TokenKind>, KleeneOp)
427+
) -> (Option<TokenKind>, KleeneOp)
427428
where
428429
I: Iterator<Item = tokenstream::TokenTree>,
429430
{
@@ -448,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
448449
_features: &Features,
449450
_attrs: &[ast::Attribute],
450451
macro_node_id: NodeId,
451-
) -> (Option<token::TokenKind>, KleeneOp)
452+
) -> (Option<TokenKind>, KleeneOp)
452453
where
453454
I: Iterator<Item = tokenstream::TokenTree>,
454455
{
@@ -566,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
566567
sess: &ParseSess,
567568
_features: &Features,
568569
_attrs: &[ast::Attribute],
569-
) -> (Option<token::TokenKind>, KleeneOp)
570+
) -> (Option<TokenKind>, KleeneOp)
570571
where
571572
I: Iterator<Item = tokenstream::TokenTree>,
572573
{

src/libsyntax/ext/tt/transcribe.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ pub fn transcribe(
242242
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
243243
sp = sp.apply_mark(cx.current_expansion.mark);
244244
result.push(TokenTree::token(token::Dollar, sp).into());
245-
result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
245+
result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
246246
}
247247
}
248248

src/libsyntax/lib.rs

-6
Original file line numberDiff line numberDiff line change
@@ -137,12 +137,6 @@ pub mod util {
137137

138138
pub mod json;
139139

140-
pub mod syntax {
141-
pub use crate::ext;
142-
pub use crate::parse;
143-
pub use crate::ast;
144-
}
145-
146140
pub mod ast;
147141
pub mod attr;
148142
pub mod source_map;

src/libsyntax/parse/diagnostics.rs

+8-7
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@ use crate::ast::{
22
self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
33
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
44
};
5-
use crate::parse::{SeqSep, token, PResult, Parser};
5+
use crate::parse::{SeqSep, PResult, Parser};
66
use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
7+
use crate::parse::token::{self, TokenKind};
78
use crate::print::pprust;
89
use crate::ptr::P;
910
use crate::source_map::Spanned;
@@ -229,8 +230,8 @@ impl<'a> Parser<'a> {
229230

230231
pub fn expected_one_of_not_found(
231232
&mut self,
232-
edible: &[token::TokenKind],
233-
inedible: &[token::TokenKind],
233+
edible: &[TokenKind],
234+
inedible: &[TokenKind],
234235
) -> PResult<'a, bool /* recovered */> {
235236
fn tokens_to_string(tokens: &[TokenType]) -> String {
236237
let mut i = tokens.iter();
@@ -368,7 +369,7 @@ impl<'a> Parser<'a> {
368369

369370
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
370371
/// passes through any errors encountered. Used for error recovery.
371-
crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) {
372+
crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
372373
let handler = self.diagnostic();
373374

374375
if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@@ -388,7 +389,7 @@ impl<'a> Parser<'a> {
388389
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
389390
/// ^^ help: remove extra angle brackets
390391
/// ```
391-
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) {
392+
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
392393
// This function is intended to be invoked after parsing a path segment where there are two
393394
// cases:
394395
//
@@ -726,7 +727,7 @@ impl<'a> Parser<'a> {
726727
/// closing delimiter.
727728
pub fn unexpected_try_recover(
728729
&mut self,
729-
t: &token::TokenKind,
730+
t: &TokenKind,
730731
) -> PResult<'a, bool /* recovered */> {
731732
let token_str = pprust::token_to_string(t);
732733
let this_token_str = self.this_token_descr();
@@ -903,7 +904,7 @@ impl<'a> Parser<'a> {
903904

904905
crate fn recover_closing_delimiter(
905906
&mut self,
906-
tokens: &[token::TokenKind],
907+
tokens: &[TokenKind],
907908
mut err: DiagnosticBuilder<'a>,
908909
) -> PResult<'a, bool> {
909910
let mut pos = None;

src/libsyntax/parse/literal.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,8 @@ impl<'a> Parser<'a> {
272272
if self.token == token::Dot {
273273
// Attempt to recover `.4` as `0.4`.
274274
recovered = self.look_ahead(1, |t| {
275-
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind {
275+
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
276+
= t.kind {
276277
let next_span = self.look_ahead_span(1);
277278
if self.span.hi() == next_span.lo() {
278279
let s = String::from("0.") + &symbol.as_str();

src/libsyntax/parse/mod.rs

+13-6
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
55
use crate::source_map::{SourceMap, FilePathMapping};
66
use crate::feature_gate::UnstableFeatures;
77
use crate::parse::parser::Parser;
8-
use crate::syntax::parse::parser::emit_unclosed_delims;
8+
use crate::parse::parser::emit_unclosed_delims;
9+
use crate::parse::token::TokenKind;
910
use crate::tokenstream::{TokenStream, TokenTree};
1011
use crate::diagnostics::plugin::ErrorMap;
1112
use crate::print::pprust::token_to_string;
@@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>(
358359
/// A sequence separator.
359360
pub struct SeqSep {
360361
/// The seperator token.
361-
pub sep: Option<token::TokenKind>,
362+
pub sep: Option<TokenKind>,
362363
/// `true` if a trailing separator is allowed.
363364
pub trailing_sep_allowed: bool,
364365
}
365366

366367
impl SeqSep {
367-
pub fn trailing_allowed(t: token::TokenKind) -> SeqSep {
368+
pub fn trailing_allowed(t: TokenKind) -> SeqSep {
368369
SeqSep {
369370
sep: Some(t),
370371
trailing_sep_allowed: true,
@@ -426,7 +427,9 @@ mod tests {
426427
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
427428
(
428429
4,
429-
Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
430+
Some(&TokenTree::Token(Token {
431+
kind: token::Ident(name_macro_rules, false), ..
432+
})),
430433
Some(&TokenTree::Token(Token { kind: token::Not, .. })),
431434
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
432435
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
@@ -446,7 +449,9 @@ mod tests {
446449
(
447450
2,
448451
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
449-
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
452+
Some(&TokenTree::Token(Token {
453+
kind: token::Ident(name, false), ..
454+
})),
450455
)
451456
if first_delim == token::Paren && name.as_str() == "a" => {},
452457
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@@ -456,7 +461,9 @@ mod tests {
456461
(
457462
2,
458463
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
459-
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
464+
Some(&TokenTree::Token(Token {
465+
kind: token::Ident(name, false), ..
466+
})),
460467
)
461468
if second_delim == token::Paren && name.as_str() == "a" => {},
462469
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),

0 commit comments

Comments
 (0)