Skip to content

Simplify TokenStream some more #57486

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jan 19, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Remove ThinTokenStream.
`TokenStream` is now almost identical to `ThinTokenStream`. This commit
removes the latter, replacing it with the former.
  • Loading branch information
nnethercote committed Jan 13, 2019
commit ce0d9949b817267e88e8d366a8cee929abf1e4ba
2 changes: 1 addition & 1 deletion src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ for tokenstream::TokenTree {
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher);
std_hash::Hash::hash(&delim, hasher);
for sub_tt in tts.stream().trees() {
for sub_tt in tts.trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_lint/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1540,7 +1540,7 @@ impl KeywordIdents {
_ => {},
}
TokenTree::Delimited(_, _, tts) => {
self.check_tokens(cx, tts.stream())
self.check_tokens(cx, tts)
},
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi;
use source_map::{dummy_spanned, respan, Spanned};
use symbol::{keywords, Symbol};
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{ThinTokenStream, TokenStream};
use tokenstream::TokenStream;
use ThinVec;

use rustc_data_structures::fx::FxHashSet;
Expand Down Expand Up @@ -1216,7 +1216,7 @@ pub type Mac = Spanned<Mac_>;
pub struct Mac_ {
pub path: Path,
pub delim: MacDelimiter,
pub tts: ThinTokenStream,
pub tts: TokenStream,
}

#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
Expand All @@ -1228,13 +1228,13 @@ pub enum MacDelimiter {

impl Mac_ {
pub fn stream(&self) -> TokenStream {
self.tts.stream()
self.tts.clone()
}
}

#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MacroDef {
pub tokens: ThinTokenStream,
pub tokens: TokenStream,
pub legacy: bool,
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,7 @@ impl MetaItemKind {
}
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
tokens.next();
tts.stream()
tts.clone()
}
_ => return Some(MetaItemKind::Word),
};
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
},
TokenTree::Delimited(span, delim, ref tts) => {
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
stmts.extend(statements_mk_tts(cx, tts.stream()));
stmts.extend(statements_mk_tts(cx, tts.clone()));
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
stmts
}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
delim,
fld.fold_tts(tts.stream()).into(),
fld.fold_tts(tts).into(),
),
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -811,7 +811,7 @@ mod tests {
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
let tts = &macro_tts.stream().trees().collect::<Vec<_>>();
let tts = &macro_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
Expand All @@ -820,7 +820,7 @@ mod tests {
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
)
if macro_delim == token::Paren => {
let tts = &first_tts.stream().trees().collect::<Vec<_>>();
let tts = &first_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Expand All @@ -830,7 +830,7 @@ mod tests {
if first_delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
}
let tts = &second_tts.stream().trees().collect::<Vec<_>>();
let tts = &second_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ use print::pprust;
use ptr::P;
use parse::PResult;
use ThinVec;
use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
use symbol::{Symbol, keywords};

use std::borrow::Cow;
Expand Down Expand Up @@ -285,12 +285,12 @@ enum LastToken {
}

impl TokenCursorFrame {
fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim: delim,
span: sp,
open_delim: delim == token::NoDelim,
tree_cursor: tts.stream().into_trees(),
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
Expand Down Expand Up @@ -2325,7 +2325,7 @@ impl<'a> Parser<'a> {
})
}

fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
let delim = match self.token {
token::OpenDelim(delim) => delim,
_ => {
Expand All @@ -2345,7 +2345,7 @@ impl<'a> Parser<'a> {
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
Ok((delim, tts.stream().into()))
Ok((delim, tts.into()))
}

/// At the bottom (top?) of the precedence hierarchy,
Expand Down Expand Up @@ -4633,7 +4633,7 @@ impl<'a> Parser<'a> {
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts.stream(),
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
}
} else if self.check(&token::OpenDelim(token::Paren)) {
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/print/pprust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -807,7 +807,7 @@ pub trait PrintState<'a> {
TokenTree::Delimited(_, delim, tts) => {
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
self.print_tts(tts.stream())?;
self.print_tts(tts)?;
self.writer().space()?;
self.writer().word(token_to_string(&token::CloseDelim(delim)))
},
Expand Down
55 changes: 3 additions & 52 deletions src/libsyntax/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub enum TokenTree {
/// A single token
Token(Span, token::Token),
/// A delimited sequence of token trees
Delimited(DelimSpan, DelimToken, ThinTokenStream),
Delimited(DelimSpan, DelimToken, TokenStream),
}

impl TokenTree {
Expand All @@ -62,8 +62,7 @@ impl TokenTree {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
(&TokenTree::Delimited(_, delim, ref tts),
&TokenTree::Delimited(_, delim2, ref tts2)) => {
delim == delim2 &&
tts.stream().eq_unspanned(&tts2.stream())
delim == delim2 && tts.eq_unspanned(&tts2)
}
(_, _) => false,
}
Expand All @@ -81,8 +80,7 @@ impl TokenTree {
}
(&TokenTree::Delimited(_, delim, ref tts),
&TokenTree::Delimited(_, delim2, ref tts2)) => {
delim == delim2 &&
tts.stream().probably_equal_for_proc_macro(&tts2.stream())
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
}
(_, _) => false,
}
Expand Down Expand Up @@ -492,41 +490,6 @@ impl Cursor {
}
}

/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
#[derive(Debug, Clone)]
pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);

impl ThinTokenStream {
pub fn stream(&self) -> TokenStream {
self.clone().into()
}
}

impl From<TokenStream> for ThinTokenStream {
fn from(stream: TokenStream) -> ThinTokenStream {
ThinTokenStream(match stream {
TokenStream::Empty => None,
TokenStream::Stream(stream) => Some(stream),
})
}
}

impl From<ThinTokenStream> for TokenStream {
fn from(stream: ThinTokenStream) -> TokenStream {
stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
}
}

impl Eq for ThinTokenStream {}

impl PartialEq<ThinTokenStream> for ThinTokenStream {
fn eq(&self, other: &ThinTokenStream) -> bool {
TokenStream::from(self.clone()) == TokenStream::from(other.clone())
}
}

impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&pprust::tokens_to_string(self.clone()))
Expand All @@ -545,18 +508,6 @@ impl Decodable for TokenStream {
}
}

impl Encodable for ThinTokenStream {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
TokenStream::from(self.clone()).encode(encoder)
}
}

impl Decodable for ThinTokenStream {
fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
TokenStream::decode(decoder).map(Into::into)
}
}

#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub struct DelimSpan {
pub open: Span,
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt {
TokenTree::Token(_, tok) => visitor.visit_token(tok),
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
}
}

Expand Down