Skip to content

Commit

Permalink
Auto merge of rust-lang#77255 - Aaron1011:feature/collect-attr-tokens…
Browse files Browse the repository at this point in the history
…, r=petrochenkov

Unconditionally capture tokens for attributes.

This allows us to avoid synthesizing tokens in `prepend_attr`, since we
have the original tokens available.

We still need to synthesize tokens when expanding `cfg_attr`,
but this is an unavoidable consequence of the syntax of `cfg_attr` -
the user does not supply the `#` and `[]` tokens that a `cfg_attr`
expands to.

This is based on PR rust-lang#77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
  • Loading branch information
bors committed Oct 24, 2020
2 parents 89fdb30 + 5c7d8d0 commit ffa2e7a
Show file tree
Hide file tree
Showing 19 changed files with 251 additions and 138 deletions.
1 change: 1 addition & 0 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2423,6 +2423,7 @@ pub struct Attribute {
/// or the construct this attribute is contained within (inner).
pub style: AttrStyle,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down
10 changes: 8 additions & 2 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri
}

pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span }
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None }
}

/// Returns an inner attribute with the given value and span.
Expand All @@ -344,7 +344,13 @@ pub fn mk_doc_comment(
data: Symbol,
span: Span,
) -> Attribute {
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span }
Attribute {
kind: AttrKind::DocComment(comment_kind, data),
id: mk_attr_id(),
style,
span,
tokens: None,
}
}

pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -577,7 +577,7 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
}

pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
let Attribute { kind, id: _, style: _, span } = attr;
let Attribute { kind, id: _, style: _, span, tokens: _ } = attr;
match kind {
AttrKind::Normal(AttrItem { path, args, tokens: _ }) => {
vis.visit_path(path);
Expand Down
8 changes: 5 additions & 3 deletions compiler/rustc_ast_lowering/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
ex.span = e.span;
}
// Merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
ex.attrs = attrs.into();
return ex;
}

Expand Down Expand Up @@ -1471,13 +1471,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::MatchSource::ForLoopDesugar,
));

let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();

// This is effectively `{ let _result = ...; _result }`.
// The construct was introduced in #21984 and is necessary to make sure that
// temporaries in the `head` expression are dropped and do not leak to the
// surrounding scope of the `match` since the `match` is not a terminating scope.
//
// Also, add the attributes to the outer returned expr node.
self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone())
self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
}

/// Desugar `ExprKind::Try` from: `<expr>?` into:
Expand Down
5 changes: 3 additions & 2 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -972,7 +972,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
};

Attribute { kind, id: attr.id, style: attr.style, span: attr.span }
// Tokens aren't needed after macro expansion and parsing
Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None }
}

fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
Expand Down Expand Up @@ -1713,7 +1714,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
pat: self.lower_pat(&l.pat),
init,
span: l.span,
attrs: l.attrs.clone(),
attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
source: hir::LocalSource::Normal,
},
ids,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_builtin_macros/src/cmdline_attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
);

let start_span = parser.token.span;
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item() {
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) {
Ok(ai) => ai,
Err(mut err) => {
err.emit();
Expand Down
35 changes: 34 additions & 1 deletion compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@
use rustc_ast::attr::HasAttrs;
use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
use rustc_feature::{Feature, Features, State as FeatureState};
use rustc_feature::{
Expand Down Expand Up @@ -289,7 +292,37 @@ impl<'a> StripUnconfigured<'a> {
expanded_attrs
.into_iter()
.flat_map(|(item, span)| {
let attr = attr::mk_attr_from_item(attr.style, item, span);
let orig_tokens =
attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr));

// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
// have captured tokens for `attr` itself, but we need to
// synthesize tokens for the wrapper `#` and `[]`, which
// we do below.

// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
panic!("Bad tokens for attribute {:?}", attr);
}
// We don't really have a good span to use for the syntheized `[]`
// in `#[attr]`, so just use the span of the `#` token.
let bracket_group = TokenTree::Delimited(
DelimSpan::from_single(pound_token.span()),
DelimToken::Bracket,
item.tokens
.clone()
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.into_token_stream(),
);

let mut attr = attr::mk_attr_from_item(attr.style, item, span);
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
(pound_token, Spacing::Alone),
(bracket_group, Spacing::Alone),
]))));
self.process_cfg_attr(attr)
})
.collect()
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_expand/src/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1785,6 +1785,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
span: at.span,
id: at.id,
style: at.style,
tokens: None,
};
} else {
noop_visit_attribute(at, self)
Expand Down
72 changes: 70 additions & 2 deletions compiler/rustc_interface/src/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ use crate::interface::{Compiler, Result};
use crate::proc_macro_decls;
use crate::util;

use rustc_ast::mut_visit::MutVisitor;
use rustc_ast::{self as ast, visit};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, token, visit};
use rustc_codegen_ssa::back::link::emit_metadata;
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_data_structures::sync::{par_iter, Lrc, OnceCell, ParallelIterator, WorkerLocal};
Expand Down Expand Up @@ -36,6 +37,7 @@ use rustc_span::symbol::Symbol;
use rustc_span::{FileName, RealFileName};
use rustc_trait_selection::traits;
use rustc_typeck as typeck;
use smallvec::SmallVec;
use tracing::{info, warn};

use rustc_serialize::json;
Expand All @@ -50,6 +52,64 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::{env, fs, iter, mem};

/// Remove alls `LazyTokenStreams` from an AST struct
/// Normally, this is done during AST lowering. However,
/// printing the AST JSON requires us to serialize
/// the entire AST, and we don't want to serialize
/// a `LazyTokenStream`.
struct TokenStripper;
impl mut_visit::MutVisitor for TokenStripper {
fn flat_map_item(&mut self, mut i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
i.tokens = None;
mut_visit::noop_flat_map_item(i, self)
}
fn visit_block(&mut self, b: &mut P<ast::Block>) {
b.tokens = None;
mut_visit::noop_visit_block(b, self);
}
fn flat_map_stmt(&mut self, mut stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
stmt.tokens = None;
mut_visit::noop_flat_map_stmt(stmt, self)
}
fn visit_pat(&mut self, p: &mut P<ast::Pat>) {
p.tokens = None;
mut_visit::noop_visit_pat(p, self);
}
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
ty.tokens = None;
mut_visit::noop_visit_ty(ty, self);
}
fn visit_attribute(&mut self, attr: &mut ast::Attribute) {
attr.tokens = None;
if let ast::AttrKind::Normal(ast::AttrItem { tokens, .. }) = &mut attr.kind {
*tokens = None;
}
mut_visit::noop_visit_attribute(attr, self);
}

fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) {
if let token::Nonterminal::NtMeta(meta) = nt {
meta.tokens = None;
}
// Handles all of the other cases
mut_visit::noop_visit_interpolated(nt, self);
}

fn visit_path(&mut self, p: &mut ast::Path) {
p.tokens = None;
mut_visit::noop_visit_path(p, self);
}
fn visit_vis(&mut self, vis: &mut ast::Visibility) {
vis.tokens = None;
mut_visit::noop_visit_vis(vis, self);
}
fn visit_expr(&mut self, e: &mut P<ast::Expr>) {
e.tokens = None;
mut_visit::noop_visit_expr(e, self);
}
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {}
}

pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
let krate = sess.time("parse_crate", || match input {
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
Expand All @@ -59,6 +119,10 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
})?;

if sess.opts.debugging_opts.ast_json_noexpand {
// Set any `token` fields to `None` before
// we display the AST.
let mut krate = krate.clone();
TokenStripper.visit_crate(&mut krate);
println!("{}", json::as_json(&krate));
}

Expand Down Expand Up @@ -379,6 +443,10 @@ fn configure_and_expand_inner<'a>(
}

if sess.opts.debugging_opts.ast_json {
// Set any `token` fields to `None` before
// we display the AST.
let mut krate = krate.clone();
TokenStripper.visit_crate(&mut krate);
println!("{}", json::as_json(&krate));
}

Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_middle/src/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,12 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> {
debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
debug_assert!(!attr.is_doc_comment());

let ast::Attribute { kind, id: _, style, span } = attr;
let ast::Attribute { kind, id: _, style, span, tokens } = attr;
if let ast::AttrKind::Normal(item) = kind {
item.hash_stable(self, hasher);
style.hash_stable(self, hasher);
span.hash_stable(self, hasher);
tokens.as_ref().expect_none("Tokens should have been removed during lowering!");
} else {
unreachable!();
}
Expand Down
55 changes: 8 additions & 47 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());

let tokens = match *nt {
Nonterminal::NtItem(ref item) => {
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
}
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
Nonterminal::NtStmt(ref stmt) => {
// FIXME: We currently only collect tokens for `:stmt`
Expand All @@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
if expr.tokens.is_none() {
debug!("missing tokens for expr {:?}", expr);
}
prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span)
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
}
};

Expand Down Expand Up @@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
}

fn prepend_attrs(
sess: &ParseSess,
attrs: &[ast::Attribute],
tokens: Option<&tokenstream::LazyTokenStream>,
span: rustc_span::Span,
) -> Option<tokenstream::TokenStream> {
let tokens = tokens?.clone().into_token_stream();
if attrs.is_empty() {
Expand All @@ -619,47 +615,12 @@ fn prepend_attrs(
ast::AttrStyle::Outer,
"inner attributes should prevent cached tokens from existing"
);

let source = pprust::attribute_to_string(attr);
let macro_filename = FileName::macro_expansion_source_code(&source);

let item = match attr.kind {
ast::AttrKind::Normal(ref item) => item,
ast::AttrKind::DocComment(..) => {
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
builder.push(stream);
continue;
}
};

// synthesize # [ $path $tokens ] manually here
let mut brackets = tokenstream::TokenStreamBuilder::new();

// For simple paths, push the identifier directly
if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() {
let ident = item.path.segments[0].ident;
let token = token::Ident(ident.name, ident.as_str().starts_with("r#"));
brackets.push(tokenstream::TokenTree::token(token, ident.span));

// ... and for more complicated paths, fall back to a reparse hack that
// should eventually be removed.
} else {
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
brackets.push(stream);
}

brackets.push(item.args.outer_tokens());

// The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good
// enough" for now at least.
builder.push(tokenstream::TokenTree::token(token::Pound, attr.span));
let delim_span = tokenstream::DelimSpan::from_single(attr.span);
builder.push(tokenstream::TokenTree::Delimited(
delim_span,
token::DelimToken::Bracket,
brackets.build(),
));
builder.push(
attr.tokens
.clone()
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
.into_token_stream(),
);
}
builder.push(tokens.clone());
Some(builder.build())
Expand Down
Loading

0 comments on commit ffa2e7a

Please sign in to comment.