From 73acb5091b7566d6fa1ccb2a963324bd448fa5e9 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Tue, 17 Nov 2020 14:27:44 -0500 Subject: [PATCH] Properly handle attributes on statements We now collect tokens for the underlying node wrapped by `StmtKind` instead of storing tokens directly in `Stmt`. `LazyTokenStream` now supports capturing a trailing semicolon after it is initially constructed. This allows us to avoid refactoring statement parsing to wrap the parsing of the semicolon in `parse_tokens`. Attributes on item statements (e.g. `fn foo() { #[bar] struct MyStruct; }`) are now treated as item attributes, not statement attributes, which is consistent with how we handle attributes on other kinds of statements. The feature-gating code is adjusted so that proc-macro attributes are still allowed on item statements on stable. Two built-in macros (`#[global_allocator]` and `#[test]`) needed to be adjusted to support being passed `Annotatable::Stmt`. --- compiler/rustc_ast/src/ast.rs | 48 ++- compiler/rustc_ast/src/mut_visit.rs | 14 +- compiler/rustc_ast/src/tokenstream.rs | 11 + compiler/rustc_ast/src/visit.rs | 2 +- .../src/deriving/debug.rs | 3 +- .../src/global_allocator.rs | 22 +- compiler/rustc_builtin_macros/src/test.rs | 40 ++- compiler/rustc_expand/src/base.rs | 2 - compiler/rustc_expand/src/build.rs | 26 +- compiler/rustc_expand/src/expand.rs | 32 +- compiler/rustc_expand/src/placeholders.rs | 11 +- compiler/rustc_interface/src/util.rs | 2 - compiler/rustc_parse/src/lib.rs | 23 +- compiler/rustc_parse/src/parser/mod.rs | 19 +- .../rustc_parse/src/parser/nonterminal.rs | 4 +- compiler/rustc_parse/src/parser/stmt.rs | 120 ++++--- .../ui/proc-macro/allowed-attr-stmt-expr.rs | 60 ++++ .../proc-macro/allowed-attr-stmt-expr.stdout | 309 ++++++++++++++++++ src/test/ui/proc-macro/attr-stmt-expr.rs | 33 ++ src/test/ui/proc-macro/attr-stmt-expr.stderr | 4 +- src/test/ui/proc-macro/keep-expr-tokens.rs | 7 + .../ui/proc-macro/keep-expr-tokens.stderr | 10 +- .../ui/proc-macro/keep-expr-tokens.stdout | 56 ++++ 23 files changed, 723 insertions(+), 135 deletions(-) create mode 100644 src/test/ui/proc-macro/allowed-attr-stmt-expr.rs create mode 100644 src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout create mode 100644 src/test/ui/proc-macro/keep-expr-tokens.stdout diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 328086af183d6..770ee81bcf147 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -901,10 +901,39 @@ pub struct Stmt { pub id: NodeId, pub kind: StmtKind, pub span: Span, - pub tokens: Option, } impl Stmt { + pub fn tokens(&self) -> Option<&LazyTokenStream> { + match self.kind { + StmtKind::Local(ref local) => local.tokens.as_ref(), + StmtKind::Item(ref item) => item.tokens.as_ref(), + StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mac) => mac.tokens.as_ref(), + } + } + + pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> { + match self.kind { + StmtKind::Local(ref mut local) => local.tokens.as_mut(), + StmtKind::Item(ref mut item) => item.tokens.as_mut(), + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(), + } + } + + pub fn set_tokens(&mut self, tokens: Option) { + match self.kind { + StmtKind::Local(ref mut local) => local.tokens = tokens, + StmtKind::Item(ref mut item) => item.tokens = tokens, + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens, + StmtKind::Empty => {} + StmtKind::MacCall(ref mut mac) => mac.tokens = tokens, + } + } + pub fn has_trailing_semicolon(&self) -> bool { match &self.kind { StmtKind::Semi(_) => true, @@ -912,18 +941,25 @@ impl Stmt { _ => false, } } + + /// Converts a parsed `Stmt` to a `Stmt` with + /// a trailing semicolon. + /// + /// This only modifies the parsed AST struct, not the attached + /// `LazyTokenStream`. The parser is responsible for calling + /// `CreateTokenStream::add_trailing_semi` when there is actually + /// a semicolon in the tokenstream. pub fn add_trailing_semicolon(mut self) -> Self { self.kind = match self.kind { StmtKind::Expr(expr) => StmtKind::Semi(expr), StmtKind::MacCall(mac) => { - StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs }| MacCallStmt { - mac, - style: MacStmtStyle::Semicolon, - attrs, + StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs, tokens }| { + MacCallStmt { mac, style: MacStmtStyle::Semicolon, attrs, tokens } })) } kind => kind, }; + self } @@ -963,6 +999,7 @@ pub struct MacCallStmt { pub mac: MacCall, pub style: MacStmtStyle, pub attrs: AttrVec, + pub tokens: Option, } #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)] @@ -988,6 +1025,7 @@ pub struct Local { pub init: Option>, pub span: Span, pub attrs: AttrVec, + pub tokens: Option, } /// An arm of a 'match'. diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index ddae0ab03e404..8dd16ed0fcebe 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -576,13 +576,14 @@ pub fn noop_visit_parenthesized_parameter_data( } pub fn noop_visit_local(local: &mut P, vis: &mut T) { - let Local { id, pat, ty, init, span, attrs } = local.deref_mut(); + let Local { id, pat, ty, init, span, attrs, tokens } = local.deref_mut(); vis.visit_id(id); vis.visit_pat(pat); visit_opt(ty, |ty| vis.visit_ty(ty)); visit_opt(init, |init| vis.visit_expr(init)); vis.visit_span(span); visit_thin_attrs(attrs, vis); + visit_lazy_tts(tokens, vis); } pub fn noop_visit_attribute(attr: &mut Attribute, vis: &mut T) { @@ -1325,16 +1326,12 @@ pub fn noop_filter_map_expr(mut e: P, vis: &mut T) -> Optio } pub fn noop_flat_map_stmt( - Stmt { kind, mut span, mut id, mut tokens }: Stmt, + Stmt { kind, mut span, mut id }: Stmt, vis: &mut T, ) -> SmallVec<[Stmt; 1]> { vis.visit_id(&mut id); vis.visit_span(&mut span); - visit_lazy_tts(&mut tokens, vis); - noop_flat_map_stmt_kind(kind, vis) - .into_iter() - .map(|kind| Stmt { id, kind, span, tokens: tokens.clone() }) - .collect() + noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| Stmt { id, kind, span }).collect() } pub fn noop_flat_map_stmt_kind( @@ -1351,9 +1348,10 @@ pub fn noop_flat_map_stmt_kind( StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(), StmtKind::Empty => smallvec![StmtKind::Empty], StmtKind::MacCall(mut mac) => { - let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut(); + let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut(); vis.visit_mac_call(mac_); visit_thin_attrs(attrs, vis); + visit_lazy_tts(tokens, vis); smallvec![StmtKind::MacCall(mac)] } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index fe67b905bf30f..b2207f2281620 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -121,10 +121,14 @@ where } pub trait CreateTokenStream: sync::Send + sync::Sync { + fn add_trailing_semi(&self) -> Box; fn create_token_stream(&self) -> TokenStream; } impl CreateTokenStream for TokenStream { + fn add_trailing_semi(&self) -> Box { + panic!("Cannot call `add_trailing_semi` on a `TokenStream`!"); + } fn create_token_stream(&self) -> TokenStream { self.clone() } @@ -141,6 +145,13 @@ impl LazyTokenStream { LazyTokenStream(Lrc::new(Box::new(inner))) } + /// Extends the captured stream by one token, + /// which must be a trailing semicolon. This + /// affects the `TokenStream` created by `make_tokenstream`. + pub fn add_trailing_semi(&self) -> LazyTokenStream { + LazyTokenStream(Lrc::new(self.0.add_trailing_semi())) + } + pub fn create_token_stream(&self) -> TokenStream { self.0.create_token_stream() } diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 560064182e18d..857220bde233d 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -686,7 +686,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) { StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr), StmtKind::Empty => {} StmtKind::MacCall(ref mac) => { - let MacCallStmt { ref mac, style: _, ref attrs } = **mac; + let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac; visitor.visit_mac_call(mac); for attr in attrs.iter() { visitor.visit_attribute(attr); diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index 9381264f498f3..5c21329069bfc 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -132,6 +132,7 @@ fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P) -> as id: ast::DUMMY_NODE_ID, span: sp, attrs: ast::AttrVec::new(), + tokens: None, }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp, tokens: None } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } } diff --git a/compiler/rustc_builtin_macros/src/global_allocator.rs b/compiler/rustc_builtin_macros/src/global_allocator.rs index 8478fcfbf09a6..e976805d9dd20 100644 --- a/compiler/rustc_builtin_macros/src/global_allocator.rs +++ b/compiler/rustc_builtin_macros/src/global_allocator.rs @@ -4,7 +4,7 @@ use rustc_ast::expand::allocator::{ AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS, }; use rustc_ast::ptr::P; -use rustc_ast::{self as ast, Attribute, Expr, FnHeader, FnSig, Generics, Param}; +use rustc_ast::{self as ast, Attribute, Expr, FnHeader, FnSig, Generics, Param, StmtKind}; use rustc_ast::{ItemKind, Mutability, Stmt, Ty, TyKind, Unsafe}; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; @@ -14,7 +14,7 @@ pub fn expand( ecx: &mut ExtCtxt<'_>, _span: Span, meta_item: &ast::MetaItem, - item: Annotatable, + mut item: Annotatable, ) -> Vec { check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator); @@ -22,6 +22,17 @@ pub fn expand( ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics"); vec![item] }; + let orig_item = item.clone(); + let mut is_stmt = false; + + // Allow using `#[global_allocator]` on an item statement + if let Annotatable::Stmt(stmt) = &item { + if let StmtKind::Item(item_) = &stmt.kind { + item = Annotatable::Item(item_.clone()); + is_stmt = true; + } + } + let item = match item { Annotatable::Item(item) => match item.kind { ItemKind::Static(..) => item, @@ -41,9 +52,14 @@ pub fn expand( let const_ty = ecx.ty(span, TyKind::Tup(Vec::new())); let const_body = ecx.expr_block(ecx.block(span, stmts)); let const_item = ecx.item_const(span, Ident::new(kw::Underscore, span), const_ty, const_body); + let const_item = if is_stmt { + Annotatable::Stmt(P(ecx.stmt_item(span, const_item))) + } else { + Annotatable::Item(const_item) + }; // Return the original item and the new methods. - vec![Annotatable::Item(item), Annotatable::Item(const_item)] + vec![orig_item, const_item] } struct AllocFnFactory<'a, 'b> { diff --git a/compiler/rustc_builtin_macros/src/test.rs b/compiler/rustc_builtin_macros/src/test.rs index 1de0b32f5196b..25d3f46da6cdc 100644 --- a/compiler/rustc_builtin_macros/src/test.rs +++ b/compiler/rustc_builtin_macros/src/test.rs @@ -4,6 +4,7 @@ use crate::util::check_builtin_macro_attribute; use rustc_ast as ast; use rustc_ast::attr; +use rustc_ast::ptr::P; use rustc_ast_pretty::pprust; use rustc_expand::base::*; use rustc_session::Session; @@ -78,8 +79,16 @@ pub fn expand_test_or_bench( return vec![]; } - let item = match item { - Annotatable::Item(i) => i, + let (item, is_stmt) = match item { + Annotatable::Item(i) => (i, false), + Annotatable::Stmt(stmt) if matches!(stmt.kind, ast::StmtKind::Item(_)) => { + // FIXME: Use an 'if let' guard once they are implemented + if let ast::StmtKind::Item(i) = stmt.into_inner().kind { + (i, true) + } else { + unreachable!() + } + } other => { cx.struct_span_err( other.span(), @@ -304,14 +313,25 @@ pub fn expand_test_or_bench( tracing::debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); - vec![ - // Access to libtest under a hygienic name - Annotatable::Item(test_extern), - // The generated test case - Annotatable::Item(test_const), - // The original item - Annotatable::Item(item), - ] + if is_stmt { + vec![ + // Access to libtest under a hygienic name + Annotatable::Stmt(P(cx.stmt_item(sp, test_extern))), + // The generated test case + Annotatable::Stmt(P(cx.stmt_item(sp, test_const))), + // The original item + Annotatable::Stmt(P(cx.stmt_item(sp, item))), + ] + } else { + vec![ + // Access to libtest under a hygienic name + Annotatable::Item(test_extern), + // The generated test case + Annotatable::Item(test_const), + // The original item + Annotatable::Item(item), + ] + } } fn item_path(mod_path: &[Ident], item_ident: &Ident) -> String { diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index b435def87ac84..7b4b81dc56023 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -366,7 +366,6 @@ macro_rules! make_stmts_default { id: ast::DUMMY_NODE_ID, span: e.span, kind: ast::StmtKind::Expr(e), - tokens: None }] }) }; @@ -609,7 +608,6 @@ impl MacResult for DummyResult { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)), span: self.span, - tokens: None }]) } diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 30f0fc6cddfa2..fe67b401fccf9 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -140,12 +140,7 @@ impl<'a> ExtCtxt<'a> { } pub fn stmt_expr(&self, expr: P) -> ast::Stmt { - ast::Stmt { - id: ast::DUMMY_NODE_ID, - span: expr.span, - kind: ast::StmtKind::Expr(expr), - tokens: None, - } + ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) } } pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P) -> ast::Stmt { @@ -162,13 +157,9 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span: sp, attrs: AttrVec::new(), - }); - ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Local(local), - span: sp, tokens: None, - } + }); + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } } // Generates `let _: Type;`, which is usually used for type assertions. @@ -180,17 +171,13 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span, attrs: AttrVec::new(), + tokens: None, }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span, tokens: None } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span } } pub fn stmt_item(&self, sp: Span, item: P) -> ast::Stmt { - ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Item(item), - span: sp, - tokens: None, - } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp } } pub fn block_expr(&self, expr: P) -> P { @@ -200,7 +187,6 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr), - tokens: None, }], ) } diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 1b31bd6a30510..b01a5b0f84f08 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -806,7 +806,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { | Annotatable::TraitItem(_) | Annotatable::ImplItem(_) | Annotatable::ForeignItem(_) => return, - Annotatable::Stmt(_) => "statements", + Annotatable::Stmt(stmt) => { + // Attributes are stable on item statements, + // but unstable on all other kinds of statements + if stmt.is_item() { + return; + } + "statements" + } Annotatable::Expr(_) => "expressions", Annotatable::Arm(..) | Annotatable::Field(..) @@ -1354,21 +1361,16 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { // we'll expand attributes on expressions separately if !stmt.is_expr() { - let (attr, derives, after_derive) = if stmt.is_item() { - // FIXME: Handle custom attributes on statements (#15701) - (None, vec![], false) - } else { - // ignore derives on non-item statements so it falls through - // to the unused-attributes lint - let (attr, after_derive) = self.classify_nonitem(&mut stmt); - (attr, vec![], after_derive) - }; + // ignore derives here - if this is an item, we will + // handle them we when process the `StmtKind`. If this isn't + // an item, then it will be ignored. + let (attr, after_derive) = self.classify_nonitem(&mut stmt); - if attr.is_some() || !derives.is_empty() { + if attr.is_some() { return self .collect_attr( attr, - derives, + vec![], Annotatable::Stmt(P(stmt)), AstFragmentKind::Stmts, after_derive, @@ -1378,7 +1380,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } if let StmtKind::MacCall(mac) = stmt.kind { - let MacCallStmt { mac, style, attrs } = mac.into_inner(); + let MacCallStmt { mac, style, attrs, tokens: _ } = mac.into_inner(); self.check_attributes(&attrs); let mut placeholder = self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts).make_stmts(); @@ -1395,10 +1397,10 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } // The placeholder expander gives ids to statements, so we avoid folding the id here. - let ast::Stmt { id, kind, span, tokens } = stmt; + let ast::Stmt { id, kind, span } = stmt; noop_flat_map_stmt_kind(kind, self) .into_iter() - .map(|kind| ast::Stmt { id, kind, span, tokens: tokens.clone() }) + .map(|kind| ast::Stmt { id, kind, span }) .collect() } diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index f0e5826f403fb..ce19e813bb3a6 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -104,8 +104,9 @@ pub fn placeholder( mac: mac_placeholder(), style: ast::MacStmtStyle::Braces, attrs: ast::AttrVec::new(), + tokens: None, }); - ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac), tokens: None } + ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) } }]), AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm { attrs: Default::default(), @@ -331,12 +332,8 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> { // FIXME: We will need to preserve the original semicolon token and // span as part of #15701 - let empty_stmt = ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Empty, - span: DUMMY_SP, - tokens: None, - }; + let empty_stmt = + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Empty, span: DUMMY_SP }; if let Some(stmt) = stmts.pop() { if stmt.has_trailing_semicolon() { diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 20a7b47313ecf..399492e694697 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -807,7 +807,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> { id: resolver.next_node_id(), kind: ast::StmtKind::Expr(expr), span: rustc_span::DUMMY_SP, - tokens: None, } } @@ -824,7 +823,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> { id: self.resolver.next_node_id(), span: rustc_span::DUMMY_SP, kind: ast::StmtKind::Expr(loop_expr), - tokens: None, }; if self.within_static_or_const { diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index f125a12147af5..5a2bfc21dfbd7 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -7,6 +7,7 @@ #![feature(or_patterns)] use rustc_ast as ast; +use rustc_ast::attr::HasAttrs; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; @@ -250,29 +251,23 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // before we fall back to the stringification. let convert_tokens = - |tokens: &Option| tokens.as_ref().map(|t| t.create_token_stream()); + |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), - Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), - Nonterminal::NtStmt(ref stmt) => { - // FIXME: We currently only collect tokens for `:stmt` - // matchers in `macro_rules!` macros. When we start collecting - // tokens for attributes on statements, we will need to prepend - // attributes here - convert_tokens(&stmt.tokens) - } - Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), - Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), + Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()), + Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()), + Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()), + Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), - Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), - Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), + Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()), + Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()), + Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 40aa2db58c720..2a779c37b8962 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -1213,14 +1213,20 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. + #[derive(Clone)] struct LazyTokenStreamImpl { start_token: (Token, Spacing), cursor_snapshot: TokenCursor, num_calls: usize, desugar_doc_comments: bool, + trailing_semi: bool, } impl CreateTokenStream for LazyTokenStreamImpl { fn create_token_stream(&self) -> TokenStream { + let mut num_calls = self.num_calls; + if self.trailing_semi { + num_calls += 1; + } // The token produced by the final call to `next` or `next_desugared` // was not actually consumed by the callback. The combination // of chaining the initial token and using `take` produces the desired @@ -1228,17 +1234,25 @@ impl<'a> Parser<'a> { // and omit the final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); let tokens = std::iter::once(self.start_token.clone()) - .chain((0..self.num_calls).map(|_| { + .chain((0..num_calls).map(|_| { if self.desugar_doc_comments { cursor_snapshot.next_desugared() } else { cursor_snapshot.next() } })) - .take(self.num_calls); + .take(num_calls); make_token_stream(tokens) } + fn add_trailing_semi(&self) -> Box { + if self.trailing_semi { + panic!("Called `add_trailing_semi` twice!"); + } + let mut new = self.clone(); + new.trailing_semi = true; + Box::new(new) + } } let lazy_impl = LazyTokenStreamImpl { @@ -1246,6 +1260,7 @@ impl<'a> Parser<'a> { num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls, cursor_snapshot, desugar_doc_comments: self.desugar_doc_comments, + trailing_semi: false, }; Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index ab88362dad954..c007f96a79800 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -117,8 +117,8 @@ impl<'a> Parser<'a> { let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?; match stmt { Some(mut s) => { - if s.tokens.is_none() { - s.tokens = tokens; + if s.tokens().is_none() { + s.set_tokens(tokens); } token::NtStmt(s) } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 131ff1ae6b3da..93bafeaef48ae 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -7,8 +7,10 @@ use super::{BlockMode, Parser, Restrictions, SemiColonMode}; use crate::maybe_whole; use rustc_ast as ast; +use rustc_ast::attr::HasAttrs; use rustc_ast::ptr::P; use rustc_ast::token::{self, TokenKind}; +use rustc_ast::tokenstream::LazyTokenStream; use rustc_ast::util::classify; use rustc_ast::{AttrStyle, AttrVec, Attribute, MacCall, MacCallStmt, MacStmtStyle}; use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID}; @@ -31,45 +33,75 @@ impl<'a> Parser<'a> { } fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option> { - maybe_whole!(self, NtStmt, |x| Some(x)); - - let attrs = self.parse_outer_attributes()?; + let mut attrs = self.parse_outer_attributes()?; + let has_attrs = !attrs.is_empty(); let lo = self.token.span; - let stmt = if self.eat_keyword(kw::Let) { - self.parse_local_mk(lo, attrs.into())? - } else if self.is_kw_followed_by_ident(kw::Mut) { - self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")? - } else if self.is_kw_followed_by_ident(kw::Auto) { - self.bump(); // `auto` - let msg = "write `let` instead of `auto` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.into(), msg, "let")? - } else if self.is_kw_followed_by_ident(sym::var) { - self.bump(); // `var` - let msg = "write `let` instead of `var` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.into(), msg, "let")? - } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() { - // We have avoided contextual keywords like `union`, items with `crate` visibility, - // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something - // that starts like a path (1 token), but it fact not a path. - // Also, we avoid stealing syntax from `parse_item_`. - self.parse_stmt_path_start(lo, attrs)? - } else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? { - // FIXME: Bad copy of attrs - self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) - } else if self.eat(&token::Semi) { - // Do not attempt to parse an expression if we're done here. - self.error_outer_attrs(&attrs); - self.mk_stmt(lo, StmtKind::Empty) - } else if self.token != token::CloseDelim(token::Brace) { - // Remainder are line-expr stmts. - let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?; - self.mk_stmt(lo.to(e.span), StmtKind::Expr(e)) + maybe_whole!(self, NtStmt, |stmt| { + let mut stmt = stmt; + stmt.visit_attrs(|stmt_attrs| { + mem::swap(stmt_attrs, &mut attrs); + stmt_attrs.extend(attrs); + }); + Some(stmt) + }); + + let parse_stmt_inner = |this: &mut Self| { + let stmt = if this.eat_keyword(kw::Let) { + this.parse_local_mk(lo, attrs.into())? + } else if this.is_kw_followed_by_ident(kw::Mut) { + this.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")? + } else if this.is_kw_followed_by_ident(kw::Auto) { + this.bump(); // `auto` + let msg = "write `let` instead of `auto` to introduce a new variable"; + this.recover_stmt_local(lo, attrs.into(), msg, "let")? + } else if this.is_kw_followed_by_ident(sym::var) { + this.bump(); // `var` + let msg = "write `let` instead of `var` to introduce a new variable"; + this.recover_stmt_local(lo, attrs.into(), msg, "let")? + } else if this.check_path() + && !this.token.is_qpath_start() + && !this.is_path_start_item() + { + // We have avoided contextual keywords like `union`, items with `crate` visibility, + // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something + // that starts like a path (1 token), but it fact not a path. + // Also, we avoid stealing syntax from `parse_item_`. + this.parse_stmt_path_start(lo, attrs)? + } else if let Some(item) = + this.parse_item_common(attrs.clone(), false, true, |_| true)? + { + // FIXME: Bad copy of attrs + this.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) + } else if this.eat(&token::Semi) { + // Do not attempt to parse an expression if we're done here. + this.error_outer_attrs(&attrs); + this.mk_stmt(lo, StmtKind::Empty) + } else if this.token != token::CloseDelim(token::Brace) { + // Remainder are line-expr stmts. + let e = this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?; + this.mk_stmt(lo.to(e.span), StmtKind::Expr(e)) + } else { + this.error_outer_attrs(&attrs); + return Ok(None); + }; + Ok(Some(stmt)) + }; + + let stmt = if has_attrs { + let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?; + if let Some(stmt) = &mut stmt { + // If we already have tokens (e.g. due to encounting an `NtStmt`), + // use those instead. + if stmt.tokens().is_none() { + stmt.set_tokens(tokens); + } + } + stmt } else { - self.error_outer_attrs(&attrs); - return Ok(None); + parse_stmt_inner(self)? }; - Ok(Some(stmt)) + Ok(stmt) } fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec) -> PResult<'a, Stmt> { @@ -107,7 +139,7 @@ impl<'a> Parser<'a> { let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { - StmtKind::MacCall(P(MacCallStmt { mac, style, attrs })) + StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None })) } else { // Since none of the above applied, this is an expression statement macro. let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new()); @@ -219,7 +251,7 @@ impl<'a> Parser<'a> { } }; let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span }; - Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs })) + Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None })) } /// Parses the RHS of a local variable declaration (e.g., '= 14;'). @@ -376,6 +408,12 @@ impl<'a> Parser<'a> { None => return Ok(None), }; + let add_semi_token = |tokens: Option<&mut LazyTokenStream>| { + if let Some(tokens) = tokens { + *tokens = tokens.add_trailing_semi(); + } + }; + let mut eat_semi = true; match stmt.kind { // Expression without semicolon. @@ -430,6 +468,9 @@ impl<'a> Parser<'a> { } } eat_semi = false; + // We just checked that there's a semicolon in the tokenstream, + // so capture it + add_semi_token(local.tokens.as_mut()); } StmtKind::Empty => eat_semi = false, _ => {} @@ -437,6 +478,9 @@ impl<'a> Parser<'a> { if eat_semi && self.eat(&token::Semi) { stmt = stmt.add_trailing_semicolon(); + // We just checked that we have a semicolon in the tokenstream, + // so capture it + add_semi_token(stmt.tokens_mut()); } stmt.span = stmt.span.to(self.prev_token.span); Ok(Some(stmt)) @@ -447,7 +491,7 @@ impl<'a> Parser<'a> { } pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt { - Stmt { id: DUMMY_NODE_ID, kind, span, tokens: None } + Stmt { id: DUMMY_NODE_ID, kind, span } } pub(super) fn mk_stmt_err(&self, span: Span) -> Stmt { diff --git a/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs b/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs new file mode 100644 index 0000000000000..72aacb886b7e3 --- /dev/null +++ b/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs @@ -0,0 +1,60 @@ +// aux-build:attr-stmt-expr.rs +// aux-build:test-macros.rs +// compile-flags: -Z span-debug +// check-pass + +#![feature(proc_macro_hygiene)] +#![feature(stmt_expr_attributes)] +#![allow(dead_code)] + +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; + +extern crate attr_stmt_expr; +extern crate test_macros; +use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr}; +use test_macros::print_attr; +use std::println; + +fn print_str(string: &'static str) { + // macros are handled a bit differently + #[expect_print_expr] + println!("{}", string) +} + +macro_rules! make_stmt { + ($stmt:stmt) => { + #[print_attr] + #[allow(unused)] + $stmt + } +} + +macro_rules! second_make_stmt { + ($stmt:stmt) => { + make_stmt!($stmt); + } +} + + +fn main() { + make_stmt!(struct Foo {}); + + #[print_attr] + #[expect_let] + let string = "Hello, world!"; + + #[print_attr] + #[expect_print_stmt] + println!("{}", string); + + #[print_attr] + second_make_stmt!(#[allow(dead_code)] struct Bar {}); + + #[print_attr] + #[allow(unused)] + struct Other {}; + + #[expect_expr] + print_str("string") +} diff --git a/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout b/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout new file mode 100644 index 0000000000000..97dff63e2eb23 --- /dev/null +++ b/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout @@ -0,0 +1,309 @@ +PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] struct Foo { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:28:9: 28:10 (#12), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:28:11: 28:16 (#12), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: $DIR/allowed-attr-stmt-expr.rs:28:17: 28:23 (#12), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:28:16: 28:24 (#12), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:28:10: 28:25 (#12), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:41:16: 41:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/allowed-attr-stmt-expr.rs:41:23: 41:26 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:41:27: 41:29 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:44:5: 44:6 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "expect_let", + span: $DIR/allowed-attr-stmt-expr.rs:44:7: 44:17 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:44:6: 44:18 (#0), + }, + Ident { + ident: "let", + span: $DIR/allowed-attr-stmt-expr.rs:45:5: 45:8 (#0), + }, + Ident { + ident: "string", + span: $DIR/allowed-attr-stmt-expr.rs:45:9: 45:15 (#0), + }, + Punct { + ch: '=', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:45:16: 45:17 (#0), + }, + Literal { + kind: Str, + symbol: "Hello, world!", + suffix: None, + span: $DIR/allowed-attr-stmt-expr.rs:45:18: 45:33 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:45:33: 45:34 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:48:5: 48:6 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "expect_print_stmt", + span: $DIR/allowed-attr-stmt-expr.rs:48:7: 48:24 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:48:6: 48:25 (#0), + }, + Ident { + ident: "println", + span: $DIR/allowed-attr-stmt-expr.rs:49:5: 49:12 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:49:12: 49:13 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Str, + symbol: "{}", + suffix: None, + span: $DIR/allowed-attr-stmt-expr.rs:49:14: 49:18 (#0), + }, + Punct { + ch: ',', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:49:18: 49:19 (#0), + }, + Ident { + ident: "string", + span: $DIR/allowed-attr-stmt-expr.rs:49:20: 49:26 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:49:13: 49:27 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:49:27: 49:28 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "second_make_stmt", + span: $DIR/allowed-attr-stmt-expr.rs:52:5: 52:21 (#0), + }, + Punct { + ch: '!', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:52:21: 52:22 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:52:23: 52:24 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:52:25: 52:30 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "dead_code", + span: $DIR/allowed-attr-stmt-expr.rs:52:31: 52:40 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:52:30: 52:41 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:52:24: 52:42 (#0), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:52:43: 52:49 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/allowed-attr-stmt-expr.rs:52:50: 52:53 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:52:54: 52:56 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:52:22: 52:57 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:52:57: 52:58 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] #[allow(dead_code)] struct Bar { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:28:9: 28:10 (#34), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:28:11: 28:16 (#34), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: $DIR/allowed-attr-stmt-expr.rs:28:17: 28:23 (#34), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:28:16: 28:24 (#34), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:28:10: 28:25 (#34), + }, + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:52:23: 52:24 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:52:25: 52:30 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "dead_code", + span: $DIR/allowed-attr-stmt-expr.rs:52:31: 52:40 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:52:30: 52:41 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:52:24: 52:42 (#0), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:52:43: 52:49 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/allowed-attr-stmt-expr.rs:52:50: 52:53 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:52:54: 52:56 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] struct Other { } ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:55:5: 55:6 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:55:7: 55:12 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: $DIR/allowed-attr-stmt-expr.rs:55:13: 55:19 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:55:12: 55:20 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:55:6: 55:21 (#0), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:56:5: 56:11 (#0), + }, + Ident { + ident: "Other", + span: $DIR/allowed-attr-stmt-expr.rs:56:12: 56:17 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:56:18: 56:20 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:56:20: 56:21 (#0), + }, +] diff --git a/src/test/ui/proc-macro/attr-stmt-expr.rs b/src/test/ui/proc-macro/attr-stmt-expr.rs index 14a392db4e118..f17784e026f61 100644 --- a/src/test/ui/proc-macro/attr-stmt-expr.rs +++ b/src/test/ui/proc-macro/attr-stmt-expr.rs @@ -1,8 +1,16 @@ // aux-build:attr-stmt-expr.rs +// aux-build:test-macros.rs +// compile-flags: -Z span-debug #![feature(proc_macro_hygiene)] +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; +extern crate test_macros; extern crate attr_stmt_expr; + +use test_macros::print_attr; +use std::println; use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr}; fn print_str(string: &'static str) { @@ -13,13 +21,38 @@ fn print_str(string: &'static str) { println!("{}", string) } +macro_rules! make_stmt { + ($stmt:stmt) => { + #[print_attr] + #[allow(unused)] + $stmt + } +} + +macro_rules! second_make_stmt { + ($stmt:stmt) => { + make_stmt!($stmt); + } +} + fn main() { + make_stmt!(struct Foo {}); + + #[print_attr] #[expect_let] let string = "Hello, world!"; + #[print_attr] #[expect_print_stmt] println!("{}", string); + #[print_attr] + second_make_stmt!(#[allow(dead_code)] struct Bar {}); + + #[print_attr] + #[allow(unused)] + struct Other {} + #[expect_expr] //~^ ERROR attributes on expressions are experimental //~| HELP add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable diff --git a/src/test/ui/proc-macro/attr-stmt-expr.stderr b/src/test/ui/proc-macro/attr-stmt-expr.stderr index 0d6f247cf8359..9c244ffb3a10e 100644 --- a/src/test/ui/proc-macro/attr-stmt-expr.stderr +++ b/src/test/ui/proc-macro/attr-stmt-expr.stderr @@ -1,5 +1,5 @@ error[E0658]: attributes on expressions are experimental - --> $DIR/attr-stmt-expr.rs:10:5 + --> $DIR/attr-stmt-expr.rs:18:5 | LL | #[expect_print_expr] | ^^^^^^^^^^^^^^^^^^^^ @@ -8,7 +8,7 @@ LL | #[expect_print_expr] = help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable error[E0658]: attributes on expressions are experimental - --> $DIR/attr-stmt-expr.rs:23:5 + --> $DIR/attr-stmt-expr.rs:56:5 | LL | #[expect_expr] | ^^^^^^^^^^^^^^ diff --git a/src/test/ui/proc-macro/keep-expr-tokens.rs b/src/test/ui/proc-macro/keep-expr-tokens.rs index 888785363cfe6..5aab2816cb312 100644 --- a/src/test/ui/proc-macro/keep-expr-tokens.rs +++ b/src/test/ui/proc-macro/keep-expr-tokens.rs @@ -1,8 +1,11 @@ // aux-build:test-macros.rs +// compile-flags: -Z span-debug #![feature(stmt_expr_attributes)] #![feature(proc_macro_hygiene)] +#![no_std] // Don't load unnecessary hygiene information from std + extern crate test_macros; use test_macros::recollect_attr; @@ -12,4 +15,8 @@ fn main() { for item in missing_fn() {} //~ ERROR cannot find (#[recollect_attr] #[recollect_attr] ((#[recollect_attr] bad))); //~ ERROR cannot + + #[test_macros::print_attr] + #[allow(unused)] + { 1 +1; } // Don't change the weird spacing of the '+' } diff --git a/src/test/ui/proc-macro/keep-expr-tokens.stderr b/src/test/ui/proc-macro/keep-expr-tokens.stderr index 2be8c0184da1c..755d3ffca5b50 100644 --- a/src/test/ui/proc-macro/keep-expr-tokens.stderr +++ b/src/test/ui/proc-macro/keep-expr-tokens.stderr @@ -1,15 +1,19 @@ error[E0425]: cannot find function `missing_fn` in this scope - --> $DIR/keep-expr-tokens.rs:12:17 + --> $DIR/keep-expr-tokens.rs:15:17 | LL | for item in missing_fn() {} | ^^^^^^^^^^ not found in this scope error[E0425]: cannot find value `bad` in this scope - --> $DIR/keep-expr-tokens.rs:14:62 + --> $DIR/keep-expr-tokens.rs:17:62 | LL | (#[recollect_attr] #[recollect_attr] ((#[recollect_attr] bad))); | ^^^ not found in this scope -error: aborting due to 2 previous errors +error: language item required, but not found: `eh_personality` + +error: `#[panic_handler]` function required, but not found + +error: aborting due to 4 previous errors For more information about this error, try `rustc --explain E0425`. diff --git a/src/test/ui/proc-macro/keep-expr-tokens.stdout b/src/test/ui/proc-macro/keep-expr-tokens.stdout new file mode 100644 index 0000000000000..f9ee6aef48dda --- /dev/null +++ b/src/test/ui/proc-macro/keep-expr-tokens.stdout @@ -0,0 +1,56 @@ +PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] { 1 + 1 ; } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/keep-expr-tokens.rs:20:5: 20:6 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/keep-expr-tokens.rs:20:7: 20:12 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: $DIR/keep-expr-tokens.rs:20:13: 20:19 (#0), + }, + ], + span: $DIR/keep-expr-tokens.rs:20:12: 20:20 (#0), + }, + ], + span: $DIR/keep-expr-tokens.rs:20:6: 20:21 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/keep-expr-tokens.rs:21:7: 21:8 (#0), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/keep-expr-tokens.rs:21:9: 21:10 (#0), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/keep-expr-tokens.rs:21:10: 21:11 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/keep-expr-tokens.rs:21:11: 21:12 (#0), + }, + ], + span: $DIR/keep-expr-tokens.rs:21:5: 21:14 (#0), + }, +]