From 920bed121337538e4dbf58a73750157f5666cb72 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Thu, 22 Oct 2020 10:09:08 -0400 Subject: [PATCH] Don't create an empty `LazyTokenStream` --- compiler/rustc_parse/src/parser/attr.rs | 6 +++--- compiler/rustc_parse/src/parser/expr.rs | 2 +- compiler/rustc_parse/src/parser/item.rs | 2 +- compiler/rustc_parse/src/parser/mod.rs | 14 +++++++++---- .../rustc_parse/src/parser/nonterminal.rs | 20 +++++++++---------- 5 files changed, 25 insertions(+), 19 deletions(-) diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 20d41d8900f39..e4804cb66162b 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -72,7 +72,7 @@ impl<'a> Parser<'a> { } })?; if let Some(mut attr) = attr { - attr.tokens = Some(tokens); + attr.tokens = tokens; attrs.push(attr); } else { break; @@ -176,7 +176,7 @@ impl<'a> Parser<'a> { }; if capture_tokens { let (mut item, tokens) = self.collect_tokens(do_parse)?; - item.tokens = Some(tokens); + item.tokens = tokens; item } else { do_parse(self)? @@ -213,7 +213,7 @@ impl<'a> Parser<'a> { } })?; if let Some(mut attr) = attr { - attr.tokens = Some(tokens); + attr.tokens = tokens; attrs.push(attr); } else { break; diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 698a7e7d9cde8..a026353d82525 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1116,7 +1116,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P> { if needs_tokens { let (mut expr, tokens) = self.collect_tokens(f)?; - expr.tokens = Some(tokens); + expr.tokens = tokens; Ok(expr) } else { f(self) diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 4ad259715bd98..e57a2e42b5dde 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -151,7 +151,7 @@ impl<'a> Parser<'a> { if let Some(tokens) = tokens { if let Some(item) = &mut item { if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { - item.tokens = Some(tokens); + item.tokens = tokens; } } } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index fb825256d92cd..8ab39b6e6d8fa 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> { /// Records all tokens consumed by the provided callback, /// including the current token. These tokens are collected - /// into a `TokenStream`, and returned along with the result - /// of the callback. + /// into a `LazyTokenStream`, and returned along with the result + /// of the callback. The returned `LazyTokenStream` will be `None` + /// if not tokens were captured. /// /// Note: If your callback consumes an opening delimiter /// (including the case where you call `collect_tokens` @@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> { pub fn collect_tokens( &mut self, f: impl FnOnce(&mut Self) -> PResult<'a, R>, - ) -> PResult<'a, (R, LazyTokenStream)> { + ) -> PResult<'a, (R, Option)> { let start_token = (self.token.clone(), self.token_spacing); let mut cursor_snapshot = self.token_cursor.clone(); @@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> { let num_calls = new_calls - cursor_snapshot.num_next_calls; let desugar_doc_comments = self.desugar_doc_comments; + // We didn't capture any tokens + if num_calls == 0 { + return Ok((ret, None)); + } + // Produces a `TokenStream` on-demand. Using `cursor_snapshot` // and `num_calls`, we can reconstruct the `TokenStream` seen // by the callback. This allows us to avoid producing a `TokenStream` @@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> { }; let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); - Ok((ret, stream)) + Ok((ret, Some(stream))) } /// `::{` or `::*` diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 121f2699baa47..98fb1c8292510 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -103,7 +103,7 @@ impl<'a> Parser<'a> { // If we captured tokens during parsing (due to outer attributes), // use those. if item.tokens.is_none() { - item.tokens = Some(tokens); + item.tokens = tokens; } token::NtItem(item) } @@ -115,7 +115,7 @@ impl<'a> Parser<'a> { let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?; // We have have eaten an NtBlock, which could already have tokens if block.tokens.is_none() { - block.tokens = Some(tokens); + block.tokens = tokens; } token::NtBlock(block) } @@ -124,7 +124,7 @@ impl<'a> Parser<'a> { match stmt { Some(mut s) => { if s.tokens.is_none() { - s.tokens = Some(tokens); + s.tokens = tokens; } token::NtStmt(s) } @@ -137,7 +137,7 @@ impl<'a> Parser<'a> { let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?; // We have have eaten an NtPat, which could already have tokens if pat.tokens.is_none() { - pat.tokens = Some(tokens); + pat.tokens = tokens; } token::NtPat(pat) } @@ -146,7 +146,7 @@ impl<'a> Parser<'a> { // If we captured tokens during parsing (due to outer attributes), // use those. if expr.tokens.is_none() { - expr.tokens = Some(tokens); + expr.tokens = tokens; } token::NtExpr(expr) } @@ -155,7 +155,7 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_literal_maybe_minus())?; // We have have eaten a nonterminal, which could already have tokens if lit.tokens.is_none() { - lit.tokens = Some(tokens); + lit.tokens = tokens; } token::NtLiteral(lit) } @@ -163,7 +163,7 @@ impl<'a> Parser<'a> { let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?; // We have an eaten an NtTy, which could already have tokens if ty.tokens.is_none() { - ty.tokens = Some(tokens); + ty.tokens = tokens; } token::NtTy(ty) } @@ -183,7 +183,7 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_path(PathStyle::Type))?; // We have have eaten an NtPath, which could already have tokens if path.tokens.is_none() { - path.tokens = Some(tokens); + path.tokens = tokens; } token::NtPath(path) } @@ -191,7 +191,7 @@ impl<'a> Parser<'a> { let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; // We may have eaten a nonterminal, which could already have tokens if attr.tokens.is_none() { - attr.tokens = Some(tokens); + attr.tokens = tokens; } token::NtMeta(P(attr)) } @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?; // We may have etan an `NtVis`, which could already have tokens if vis.tokens.is_none() { - vis.tokens = Some(tokens); + vis.tokens = tokens; } token::NtVis(vis) }