@@ -61,6 +61,11 @@ pub enum ForceCollect {
6161    No , 
6262} 
6363
64+ pub  enum  TrailingToken  { 
65+     None , 
66+     Semi , 
67+ } 
68+ 
6469/// Like `maybe_whole_expr`, but for things other than expressions. 
6570#[ macro_export]  
6671macro_rules!  maybe_whole { 
@@ -1225,6 +1230,13 @@ impl<'a> Parser<'a> {
12251230        } 
12261231    } 
12271232
1233+     pub  fn  collect_tokens < R :  HasTokens > ( 
1234+         & mut  self , 
1235+         f :  impl  FnOnce ( & mut  Self )  -> PResult < ' a ,  R > , 
1236+     )  -> PResult < ' a ,  R >  { 
1237+         self . collect_tokens_trailing_token ( |this| Ok ( ( f ( this) ?,  TrailingToken :: None ) ) ) 
1238+     } 
1239+ 
12281240    /// Records all tokens consumed by the provided callback, 
12291241     /// including the current token. These tokens are collected 
12301242     /// into a `LazyTokenStream`, and returned along with the result 
@@ -1241,9 +1253,9 @@ impl<'a> Parser<'a> {
12411253     /// This restriction shouldn't be an issue in practice, 
12421254     /// since this function is used to record the tokens for 
12431255     /// a parsed AST item, which always has matching delimiters. 
1244-      pub  fn  collect_tokens < R :  HasTokens > ( 
1256+      pub  fn  collect_tokens_trailing_token < R :  HasTokens > ( 
12451257        & mut  self , 
1246-         f :  impl  FnOnce ( & mut  Self )  -> PResult < ' a ,  R > , 
1258+         f :  impl  FnOnce ( & mut  Self )  -> PResult < ' a ,  ( R ,   TrailingToken ) > , 
12471259    )  -> PResult < ' a ,  R >  { 
12481260        let  start_token = ( self . token . clone ( ) ,  self . token_spacing ) ; 
12491261        let  cursor_snapshot = TokenCursor  { 
@@ -1256,7 +1268,7 @@ impl<'a> Parser<'a> {
12561268            append_unglued_token :  self . token_cursor . append_unglued_token . clone ( ) , 
12571269        } ; 
12581270
1259-         let  mut  ret = f ( self ) ?; 
1271+         let  ( mut  ret,  trailing_token )  = f ( self ) ?; 
12601272
12611273        // Produces a `TokenStream` on-demand. Using `cursor_snapshot` 
12621274        // and `num_calls`, we can reconstruct the `TokenStream` seen 
@@ -1275,55 +1287,44 @@ impl<'a> Parser<'a> {
12751287            cursor_snapshot :  TokenCursor , 
12761288            num_calls :  usize , 
12771289            desugar_doc_comments :  bool , 
1278-             trailing_semi :  bool , 
12791290            append_unglued_token :  Option < TreeAndSpacing > , 
12801291        } 
12811292        impl  CreateTokenStream  for  LazyTokenStreamImpl  { 
12821293            fn  create_token_stream ( & self )  -> TokenStream  { 
1283-                 let  mut  num_calls = self . num_calls ; 
1284-                 if  self . trailing_semi  { 
1285-                     num_calls += 1 ; 
1286-                 } 
12871294                // The token produced by the final call to `next` or `next_desugared` 
12881295                // was not actually consumed by the callback. The combination 
12891296                // of chaining the initial token and using `take` produces the desired 
12901297                // result - we produce an empty `TokenStream` if no calls were made, 
12911298                // and omit the final token otherwise. 
12921299                let  mut  cursor_snapshot = self . cursor_snapshot . clone ( ) ; 
12931300                let  tokens = std:: iter:: once ( self . start_token . clone ( ) ) 
1294-                     . chain ( ( 0 ..num_calls) . map ( |_| { 
1301+                     . chain ( ( 0 ..self . num_calls ) . map ( |_| { 
12951302                        if  self . desugar_doc_comments  { 
12961303                            cursor_snapshot. next_desugared ( ) 
12971304                        }  else  { 
12981305                            cursor_snapshot. next ( ) 
12991306                        } 
13001307                    } ) ) 
1301-                     . take ( num_calls) ; 
1308+                     . take ( self . num_calls ) ; 
13021309
13031310                make_token_stream ( tokens,  self . append_unglued_token . clone ( ) ) 
13041311            } 
1305-             fn  add_trailing_semi ( & self )  -> Box < dyn  CreateTokenStream >  { 
1306-                 if  self . trailing_semi  { 
1307-                     panic ! ( "Called `add_trailing_semi` twice!" ) ; 
1308-                 } 
1309-                 if  self . append_unglued_token . is_some ( )  { 
1310-                     panic ! ( 
1311-                         "Cannot call `add_trailing_semi` when we have an unglued token {:?}" , 
1312-                         self . append_unglued_token
1313-                     ) ; 
1314-                 } 
1315-                 let  mut  new = self . clone ( ) ; 
1316-                 new. trailing_semi  = true ; 
1317-                 Box :: new ( new) 
1312+         } 
1313+ 
1314+         let  mut  num_calls = self . token_cursor . num_next_calls  - cursor_snapshot. num_next_calls ; 
1315+         match  trailing_token { 
1316+             TrailingToken :: None  => { } 
1317+             TrailingToken :: Semi  => { 
1318+                 assert_eq ! ( self . token. kind,  token:: Semi ) ; 
1319+                 num_calls += 1 ; 
13181320            } 
13191321        } 
13201322
13211323        let  lazy_impl = LazyTokenStreamImpl  { 
13221324            start_token, 
1323-             num_calls :   self . token_cursor . num_next_calls  - cursor_snapshot . num_next_calls , 
1325+             num_calls, 
13241326            cursor_snapshot, 
13251327            desugar_doc_comments :  self . desugar_doc_comments , 
1326-             trailing_semi :  false , 
13271328            append_unglued_token :  self . token_cursor . append_unglued_token . clone ( ) , 
13281329        } ; 
13291330        ret. finalize_tokens ( LazyTokenStream :: new ( lazy_impl) ) ; 
@@ -1427,9 +1428,9 @@ macro_rules! maybe_collect_tokens {
14271428        if  matches!( $force_collect,  ForceCollect :: Yes ) 
14281429            || $crate:: parser:: attr:: maybe_needs_tokens( $attrs) 
14291430        { 
1430-             $self. collect_tokens ( $f) 
1431+             $self. collect_tokens_trailing_token ( $f) 
14311432        }  else { 
1432-             $f( $self) 
1433+             Ok ( $f( $self) ? . 0 ) 
14331434        } 
14341435    } ; 
14351436} 
0 commit comments