From 98c8e0a05dd7b1ecbbda28c1d01e05c1e41b1638 Mon Sep 17 00:00:00 2001 From: cgswords Date: Thu, 4 Aug 2016 12:20:01 -0700 Subject: [PATCH] Proc_macro is alive --- mk/crates.mk | 8 +- src/libproc_macro/Cargo.toml | 15 + src/libproc_macro/build.rs | 89 ++++ src/libproc_macro/lib.rs | 137 +++++ src/libproc_macro/parse.rs | 26 + src/libproc_macro/prelude.rs | 12 + src/libproc_macro/qquote.rs | 470 ++++++++++++++++++ src/librustc_driver/Cargo.toml | 3 +- src/libsyntax/ext/proc_macro_shim.rs | 69 +++ src/libsyntax/lib.rs | 1 + src/libsyntax/tokenstream.rs | 6 + src/rustc/Cargo.lock | 11 + .../auxiliary/cond_noprelude_plugin.rs | 65 +++ .../auxiliary/cond_plugin.rs | 66 +++ .../auxiliary/cond_prelude_plugin.rs | 60 +++ src/test/run-pass-fulldeps/macro-quote-1.rs | 28 ++ .../run-pass-fulldeps/macro-quote-cond.rs | 54 ++ .../macro-quote-noprelude.rs | 54 ++ .../run-pass-fulldeps/macro-quote-prelude.rs | 54 ++ src/tools/tidy/src/cargo.rs | 6 + 20 files changed, 1230 insertions(+), 4 deletions(-) create mode 100644 src/libproc_macro/Cargo.toml create mode 100644 src/libproc_macro/build.rs create mode 100644 src/libproc_macro/lib.rs create mode 100644 src/libproc_macro/parse.rs create mode 100644 src/libproc_macro/prelude.rs create mode 100644 src/libproc_macro/qquote.rs create mode 100644 src/libsyntax/ext/proc_macro_shim.rs create mode 100644 src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs create mode 100644 src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs create mode 100644 src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs create mode 100644 src/test/run-pass-fulldeps/macro-quote-1.rs create mode 100644 src/test/run-pass-fulldeps/macro-quote-cond.rs create mode 100644 src/test/run-pass-fulldeps/macro-quote-noprelude.rs create mode 100644 src/test/run-pass-fulldeps/macro-quote-prelude.rs diff --git a/mk/crates.mk b/mk/crates.mk index 0bd0c70bd0519..5ff6d7a89dbe0 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -60,7 +60,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_ rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ rustc_const_eval rustc_const_math rustc_incremental -HOST_CRATES := syntax syntax_ext syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \ +HOST_CRATES := syntax syntax_ext proc_macro syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \ flate arena graphviz rbml log serialize TOOLS := compiletest rustdoc rustc rustbook error_index_generator @@ -100,6 +100,7 @@ DEPS_test := std getopts term native:rust_test_helpers DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros +DEPS_proc_macro := syntax syntax_pos rustc_plugin log DEPS_syntax_pos := serialize DEPS_rustc_const_math := std syntax log serialize @@ -114,8 +115,9 @@ DEPS_rustc_borrowck := rustc log graphviz syntax syntax_pos rustc_errors rustc_m DEPS_rustc_data_structures := std log serialize DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \ rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \ - rustc_trans rustc_privacy rustc_lint rustc_plugin \ - rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval \ + rustc_trans rustc_privacy rustc_lint rustc_plugin \ + rustc_metadata syntax_ext proc_macro \ + rustc_passes rustc_save_analysis rustc_const_eval \ rustc_incremental syntax_pos rustc_errors DEPS_rustc_errors := log libc serialize syntax_pos DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml new file mode 100644 index 0000000000000..99fb1d65cda90 --- /dev/null +++ b/src/libproc_macro/Cargo.toml @@ -0,0 +1,15 @@ +[package] +authors = ["The Rust Project Developers"] +name = "proc_macro" +version = "0.0.0" + +[lib] +name = "proc_macro" +path = "lib.rs" +crate-type = ["dylib"] + +[dependencies] +log = { path = "../liblog" } +rustc_plugin = { path = "../librustc_plugin" } +syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro/build.rs b/src/libproc_macro/build.rs new file mode 100644 index 0000000000000..7b7590b863b71 --- /dev/null +++ b/src/libproc_macro/build.rs @@ -0,0 +1,89 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern crate syntax; +extern crate syntax_pos; + +use syntax::ast::Ident; +use syntax::codemap::DUMMY_SP; +use syntax::parse::token::{self, Token, keywords, str_to_ident}; +use syntax::tokenstream::{self, TokenTree, TokenStream}; +use std::rc::Rc; + +/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and +/// provide TokenStream concatenation as a generic operator. +pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream { + TokenStream::concat(ts1, ts2) +} + +/// Checks if two identifiers have the same name, disregarding context. This allows us to +/// fake 'reserved' keywords. +// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is +// probably the easiest way to do that. +pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool { + let tid = match *tident { + TokenTree::Token(_, Token::Ident(ref id)) => id, + _ => { + return false; + } + }; + + tid.name == id.name +} + +// ____________________________________________________________________________________________ +// Conversion operators + +/// Convert a `&str` into a Token. +pub fn str_to_token_ident(s: &str) -> Token { + Token::Ident(str_to_ident(s)) +} + +/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that +/// corresponds to it. +pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { + Token::Ident(str_to_ident(&kw.name().as_str()[..])) +} + +// ____________________________________________________________________________________________ +// Build Procedures + +/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified +/// delimiter. +pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream { + let tts = ts.to_tts(); + TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP, + Rc::new(tokenstream::Delimited { + delim: delim, + open_span: DUMMY_SP, + tts: tts, + close_span: DUMMY_SP, + }))]) +} + +/// Takes `ts` and returns `[ts]`. +pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream { + build_delimited(ts, token::DelimToken::Bracket) +} + +/// Takes `ts` and returns `{ts}`. +pub fn build_brace_delimited(ts: TokenStream) -> TokenStream { + build_delimited(ts, token::DelimToken::Brace) +} + +/// Takes `ts` and returns `(ts)`. +pub fn build_paren_delimited(ts: TokenStream) -> TokenStream { + build_delimited(ts, token::DelimToken::Paren) +} + +/// Constructs `()`. +pub fn build_empty_args() -> TokenStream { + build_paren_delimited(TokenStream::mk_empty()) +} diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs new file mode 100644 index 0000000000000..9e25cb88e015c --- /dev/null +++ b/src/libproc_macro/lib.rs @@ -0,0 +1,137 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Proc_Macro +//! +//! A library for procedural macro writers. +//! +//! ## Usage +//! This package provides the `qquote!` macro for syntax creation, and the prelude +//! (at libproc_macro::prelude) provides a number of operations: +//! - `concat`, for concatenating two TokenStreams. +//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context. +//! - `str_to_token_ident`, for converting an `&str` into a Token. +//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a +//! Token. +//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter +//! by wrapping the TokenStream in the delimiter. +//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for +//! easing the above. +//! - `build_empty_args`, which returns a TokenStream containing `()`. +//! - `lex`, which takes an `&str` and returns the TokenStream it represents. +//! +//! The `qquote!` macro also imports `syntax::ext::proc_macro_shim::prelude::*`, so you +//! will need to `extern crate syntax` for usage. (This is a temporary solution until more +//! of the external API in libproc_macro is stabilized to support the token construction +//! operations that the qausiquoter relies on.) The shim file also provides additional +//! operations, such as `build_block_emitter` (as used in the `cond` example below). +//! +//! ## TokenStreams +//! +//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of +//! TokenTrees, where indexing treats delimited values as a single term. That is, the term +//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where, +//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`. +//! +//! If a user has a TokenStream that is a single, delimited value, they can use +//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream +//! as: +//! ``` +//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)` +//! ``` +//! +//! Check the TokenStream documentation for more information; the structure also provides +//! cheap concatenation and slicing. +//! +//! ## Quasiquotation +//! +//! The quasiquoter creates output that, when run, constructs the tokenstream specified as +//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will +//! construct the TokenStream `5 | + | 5`. +//! +//! ### Unquoting +//! +//! Unquoting is currently done as `unquote`, and works by taking the single next +//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works +//! fine, but `unquote foo` is also supported. +//! +//! A simple example might be: +//! +//!``` +//!fn double(tmp: TokenStream) -> TokenStream { +//! qquote!(unquote(tmp) * 2) +//!} +//!``` +//! +//! ### Large Example: Implementing Scheme's `cond` +//! +//! Below is the full implementation of Scheme's `cond` operator. +//! +//! ``` +//! fn cond_rec(input: TokenStream) -> TokenStream { +//! if input.is_empty() { return quote!(); } +//! +//! let next = input.slice(0..1); +//! let rest = input.slice_from(1..); +//! +//! let clause : TokenStream = match next.maybe_delimited() { +//! Some(ts) => ts, +//! _ => panic!("Invalid input"), +//! }; +//! +//! // clause is ([test]) [rhs] +//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } +//! +//! let test: TokenStream = clause.slice(0..1); +//! let rhs: TokenStream = clause.slice_from(1..); +//! +//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { +//! quote!({unquote(rhs)}) +//! } else { +//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) +//! } +//! } +//! ``` +//! + +#![crate_name = "proc_macro"] +#![unstable(feature = "rustc_private", issue = "27812")] +#![feature(plugin_registrar)] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(staged_api)] +#![feature(rustc_diagnostic_macros)] +#![feature(rustc_private)] + +extern crate rustc_plugin; +extern crate syntax; +extern crate syntax_pos; +#[macro_use] extern crate log; + +mod qquote; +pub mod build; +pub mod parse; +pub mod prelude; +use qquote::qquote; + +use rustc_plugin::Registry; + +// ____________________________________________________________________________________________ +// Main macro definition + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("qquote", qquote); +} diff --git a/src/libproc_macro/parse.rs b/src/libproc_macro/parse.rs new file mode 100644 index 0000000000000..9af8a68cdcf49 --- /dev/null +++ b/src/libproc_macro/parse.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Parsing utilities for writing procedural macros. + +extern crate syntax; + +use syntax::parse::{ParseSess, filemap_to_tts}; +use syntax::tokenstream::TokenStream; + +/// Map a string to tts, using a made-up filename. For example, `lex(15)` will return a +/// TokenStream containing the literal 15. +pub fn lex(source_str: &str) -> TokenStream { + let ps = ParseSess::new(); + TokenStream::from_tts(filemap_to_tts(&ps, + ps.codemap().new_filemap("procmacro_lex".to_string(), + None, + source_str.to_owned()))) +} diff --git a/src/libproc_macro/prelude.rs b/src/libproc_macro/prelude.rs new file mode 100644 index 0000000000000..4c0c8ba6c6684 --- /dev/null +++ b/src/libproc_macro/prelude.rs @@ -0,0 +1,12 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub use build::*; +pub use parse::*; diff --git a/src/libproc_macro/qquote.rs b/src/libproc_macro/qquote.rs new file mode 100644 index 0000000000000..67d0c77b00d83 --- /dev/null +++ b/src/libproc_macro/qquote.rs @@ -0,0 +1,470 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Quasiquoter +//! This file contains the implementation internals of the quasiquoter provided by `quote!`. +//! +//! ## Ouput +//! The quasiquoter produces output of the form: +//! let tmp0 = ...; +//! let tmp1 = ...; +//! ... +//! concat(from_tokens(...), concat(...)) +//! +//! To the more explicit, the quasiquoter produces a series of bindings that each +//! construct TokenStreams via constructing Tokens and using `from_tokens`, ultimately +//! invoking `concat` on these bindings (and inlined expressions) to construct a +//! TokenStream that resembles the output syntax. +//! + +extern crate rustc_plugin; +extern crate syntax; +extern crate syntax_pos; + +use build::*; +use parse::lex; +use qquote::int_build::*; + +use syntax::ast::Ident; +use syntax::codemap::Span; +use syntax::ext::base::*; +use syntax::ext::base; +use syntax::ext::proc_macro_shim::build_block_emitter; +use syntax::parse::token::{self, Token, gensym_ident, str_to_ident}; +use syntax::print::pprust; +use syntax::tokenstream::{TokenTree, TokenStream}; + +// ____________________________________________________________________________________________ +// Main definition +/// The user should use the macro, not this procedure. +pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + + debug!("\nTTs in: {:?}\n", pprust::tts_to_string(&tts[..])); + let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned())); + debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..])); + let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"), + lex("use proc_macro::prelude::*;")); + build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output))) +} + +// ____________________________________________________________________________________________ +// Datatype Definitions + +#[derive(Debug)] +struct QDelimited { + delim: token::DelimToken, + open_span: Span, + tts: Vec, + close_span: Span, +} + +#[derive(Debug)] +enum QTT { + TT(TokenTree), + QDL(QDelimited), + QIdent(TokenTree), +} + +type Bindings = Vec<(Ident, TokenStream)>; + +// ____________________________________________________________________________________________ +// Quasiquoter Algorithm +// This algorithm works as follows: +// Input: TokenStream +// 1. Walk the TokenStream, gathering up the unquoted expressions and marking them separately. +// 2. Hoist any unquoted term into its own let-binding via a gensym'd identifier +// 3. Convert the body from a `complex expression` into a simplified one via `convert_complex_tts +// 4. Stitch everything together with `concat`. +fn qquoter<'cx>(cx: &'cx mut ExtCtxt, ts: TokenStream) -> TokenStream { + if ts.is_empty() { + return lex("TokenStream::mk_empty()"); + } + let qq_res = qquote_iter(cx, 0, ts); + let mut bindings = qq_res.0; + let body = qq_res.1; + let mut cct_res = convert_complex_tts(cx, body); + + bindings.append(&mut cct_res.0); + + if bindings.is_empty() { + cct_res.1 + } else { + debug!("BINDINGS"); + for b in bindings.clone() { + debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..])); + } + TokenStream::concat(unravel(bindings), cct_res.1) + } +} + +fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindings, Vec) { + let mut depth = depth; + let mut bindings: Bindings = Vec::new(); + let mut output: Vec = Vec::new(); + + let mut iter = ts.iter(); + + loop { + let next = iter.next(); + if next.is_none() { + break; + } + let next = next.unwrap().clone(); + match next { + TokenTree::Token(_, Token::Ident(id)) if is_unquote(id) => { + if depth == 0 { + let exp = iter.next(); + if exp.is_none() { + break; + } // produce an error or something first + let exp = vec![exp.unwrap().to_owned()]; + debug!("RHS: {:?}", exp.clone()); + let new_id = gensym_ident("tmp"); + debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone())); + debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec()); + bindings.push((new_id, TokenStream::from_tts(exp))); + debug!("BINDINGS"); + for b in bindings.clone() { + debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..])); + } + output.push(QTT::QIdent(as_tt(Token::Ident(new_id.clone())))); + } else { + depth = depth - 1; + output.push(QTT::TT(next.clone())); + } + } + TokenTree::Token(_, Token::Ident(id)) if is_qquote(id) => { + depth = depth + 1; + } + TokenTree::Delimited(_, ref dl) => { + let br = qquote_iter(cx, depth, TokenStream::from_tts(dl.tts.clone().to_owned())); + let mut bind_ = br.0; + let res_ = br.1; + bindings.append(&mut bind_); + + let new_dl = QDelimited { + delim: dl.delim, + open_span: dl.open_span, + tts: res_, + close_span: dl.close_span, + }; + + output.push(QTT::QDL(new_dl)); + } + t => { + output.push(QTT::TT(t)); + } + } + } + + (bindings, output) +} + +// ____________________________________________________________________________________________ +// Turns QQTs into a TokenStream and some Bindings. +/// Construct a chain of concatenations. +fn unravel_concats(tss: Vec) -> TokenStream { + let mut pushes: Vec = + tss.into_iter().filter(|&ref ts| !ts.is_empty()).collect(); + let mut output = match pushes.pop() { + Some(ts) => ts, + None => { + return TokenStream::mk_empty(); + } + }; + + while let Some(ts) = pushes.pop() { + output = build_fn_call(str_to_ident("concat"), + concat(concat(ts, + from_tokens(vec![Token::Comma])), + output)); + } + output +} + +/// This converts the vector of QTTs into a seet of Bindings for construction and the main +/// body as a TokenStream. +fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec) -> (Bindings, TokenStream) { + let mut pushes: Vec = Vec::new(); + let mut bindings: Bindings = Vec::new(); + + let mut iter = tts.into_iter(); + + loop { + let next = iter.next(); + if next.is_none() { + break; + } + let next = next.unwrap(); + match next { + QTT::TT(TokenTree::Token(_, t)) => { + let token_out = emit_token(t); + pushes.push(token_out); + } + // FIXME handle sequence repetition tokens + QTT::QDL(qdl) => { + debug!(" QDL: {:?} ", qdl.tts); + let new_id = gensym_ident("qdl_tmp"); + let mut cct_rec = convert_complex_tts(cx, qdl.tts); + bindings.append(&mut cct_rec.0); + bindings.push((new_id, cct_rec.1)); + + let sep = build_delim_tok(qdl.delim); + + pushes.push(build_mod_call(vec![str_to_ident("proc_macro"), + str_to_ident("build"), + str_to_ident("build_delimited")], + concat(from_tokens(vec![Token::Ident(new_id)]), + concat(lex(","), sep)))); + } + QTT::QIdent(t) => { + pushes.push(TokenStream::from_tts(vec![t])); + pushes.push(TokenStream::mk_empty()); + } + _ => panic!("Unhandled case!"), + } + + } + + (bindings, unravel_concats(pushes)) +} + +// ____________________________________________________________________________________________ +// Utilities + +/// Unravels Bindings into a TokenStream of `let` declarations. +fn unravel(binds: Bindings) -> TokenStream { + let mut output = TokenStream::mk_empty(); + + for b in binds { + output = concat(output, build_let(b.0, b.1)); + } + + output +} + +/// Checks if the Ident is `unquote`. +fn is_unquote(id: Ident) -> bool { + let qq = str_to_ident("unquote"); + id.name == qq.name // We disregard context; unquote is _reserved_ +} + +/// Checks if the Ident is `quote`. +fn is_qquote(id: Ident) -> bool { + let qq = str_to_ident("qquote"); + id.name == qq.name // We disregard context; qquote is _reserved_ +} + +mod int_build { + extern crate syntax; + extern crate syntax_pos; + + use parse::*; + use build::*; + + use syntax::ast::{self, Ident}; + use syntax::codemap::{DUMMY_SP}; + use syntax::parse::token::{self, Token, keywords, str_to_ident}; + use syntax::tokenstream::{TokenTree, TokenStream}; + + // ____________________________________________________________________________________________ + // Emitters + + pub fn emit_token(t: Token) -> TokenStream { + concat(lex("TokenStream::from_tokens"), + build_paren_delimited(build_vec(build_token_tt(t)))) + } + + pub fn emit_lit(l: token::Lit, n: Option) -> TokenStream { + let suf = match n { + Some(n) => format!("Some(ast::Name({}))", n.0), + None => "None".to_string(), + }; + + let lit = match l { + token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()), + token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()), + token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()), + token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()), + token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()), + token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()), + _ => panic!("Unsupported literal"), + }; + + let res = format!("Token::Literal({},{})", lit, suf); + debug!("{}", res); + lex(&res) + } + + // ____________________________________________________________________________________________ + // Token Builders + + pub fn build_binop_tok(bot: token::BinOpToken) -> TokenStream { + match bot { + token::BinOpToken::Plus => lex("Token::BinOp(BinOpToken::Plus)"), + token::BinOpToken::Minus => lex("Token::BinOp(BinOpToken::Minus)"), + token::BinOpToken::Star => lex("Token::BinOp(BinOpToken::Star)"), + token::BinOpToken::Slash => lex("Token::BinOp(BinOpToken::Slash)"), + token::BinOpToken::Percent => lex("Token::BinOp(BinOpToken::Percent)"), + token::BinOpToken::Caret => lex("Token::BinOp(BinOpToken::Caret)"), + token::BinOpToken::And => lex("Token::BinOp(BinOpToken::And)"), + token::BinOpToken::Or => lex("Token::BinOp(BinOpToken::Or)"), + token::BinOpToken::Shl => lex("Token::BinOp(BinOpToken::Shl)"), + token::BinOpToken::Shr => lex("Token::BinOp(BinOpToken::Shr)"), + } + } + + pub fn build_binopeq_tok(bot: token::BinOpToken) -> TokenStream { + match bot { + token::BinOpToken::Plus => lex("Token::BinOpEq(BinOpToken::Plus)"), + token::BinOpToken::Minus => lex("Token::BinOpEq(BinOpToken::Minus)"), + token::BinOpToken::Star => lex("Token::BinOpEq(BinOpToken::Star)"), + token::BinOpToken::Slash => lex("Token::BinOpEq(BinOpToken::Slash)"), + token::BinOpToken::Percent => lex("Token::BinOpEq(BinOpToken::Percent)"), + token::BinOpToken::Caret => lex("Token::BinOpEq(BinOpToken::Caret)"), + token::BinOpToken::And => lex("Token::BinOpEq(BinOpToken::And)"), + token::BinOpToken::Or => lex("Token::BinOpEq(BinOpToken::Or)"), + token::BinOpToken::Shl => lex("Token::BinOpEq(BinOpToken::Shl)"), + token::BinOpToken::Shr => lex("Token::BinOpEq(BinOpToken::Shr)"), + } + } + + pub fn build_delim_tok(dt: token::DelimToken) -> TokenStream { + match dt { + token::DelimToken::Paren => lex("DelimToken::Paren"), + token::DelimToken::Bracket => lex("DelimToken::Bracket"), + token::DelimToken::Brace => lex("DelimToken::Brace"), + token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), + } + } + + pub fn build_token_tt(t: Token) -> TokenStream { + match t { + Token::Eq => lex("Token::Eq"), + Token::Lt => lex("Token::Lt"), + Token::Le => lex("Token::Le"), + Token::EqEq => lex("Token::EqEq"), + Token::Ne => lex("Token::Ne"), + Token::Ge => lex("Token::Ge"), + Token::Gt => lex("Token::Gt"), + Token::AndAnd => lex("Token::AndAnd"), + Token::OrOr => lex("Token::OrOr"), + Token::Not => lex("Token::Not"), + Token::Tilde => lex("Token::Tilde"), + Token::BinOp(tok) => build_binop_tok(tok), + Token::BinOpEq(tok) => build_binopeq_tok(tok), + Token::At => lex("Token::At"), + Token::Dot => lex("Token::Dot"), + Token::DotDot => lex("Token::DotDot"), + Token::DotDotDot => lex("Token::DotDotDot"), + Token::Comma => lex("Token::Comma"), + Token::Semi => lex("Token::Semi"), + Token::Colon => lex("Token::Colon"), + Token::ModSep => lex("Token::ModSep"), + Token::RArrow => lex("Token::RArrow"), + Token::LArrow => lex("Token::LArrow"), + Token::FatArrow => lex("Token::FatArrow"), + Token::Pound => lex("Token::Pound"), + Token::Dollar => lex("Token::Dollar"), + Token::Question => lex("Token::Question"), + Token::OpenDelim(dt) => { + match dt { + token::DelimToken::Paren => lex("Token::OpenDelim(DelimToken::Paren)"), + token::DelimToken::Bracket => lex("Token::OpenDelim(DelimToken::Bracket)"), + token::DelimToken::Brace => lex("Token::OpenDelim(DelimToken::Brace)"), + token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), + } + } + Token::CloseDelim(dt) => { + match dt { + token::DelimToken::Paren => lex("Token::CloseDelim(DelimToken::Paren)"), + token::DelimToken::Bracket => lex("Token::CloseDelim(DelimToken::Bracket)"), + token::DelimToken::Brace => lex("Token::CloseDelim(DelimToken::Brace)"), + token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), + } + } + Token::Underscore => lex("_"), + Token::Literal(lit, sfx) => emit_lit(lit, sfx), + // fix ident expansion information... somehow + Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)), + Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", + ident.name)), + _ => panic!("Unhandled case!"), + } + } + + // ____________________________________________________________________________________________ + // Conversion operators + + pub fn as_tt(t: Token) -> TokenTree { + // FIXME do something nicer with the spans + TokenTree::Token(DUMMY_SP, t) + } + + // ____________________________________________________________________________________________ + // Build Procedures + + /// Takes `input` and returns `vec![input]`. + pub fn build_vec(ts: TokenStream) -> TokenStream { + build_mac_call(str_to_ident("vec"), ts) + // tts.clone().to_owned() + } + + /// Takes `ident` and `rhs` and produces `let ident = rhs;`. + pub fn build_let(id: Ident, tts: TokenStream) -> TokenStream { + concat(from_tokens(vec![keyword_to_token_ident(keywords::Let), + Token::Ident(id), + Token::Eq]), + concat(tts, from_tokens(vec![Token::Semi]))) + } + + /// Takes `ident ...`, and `args ...` and produces `ident::...(args ...)`. + pub fn build_mod_call(ids: Vec, args: TokenStream) -> TokenStream { + let call = from_tokens(intersperse(ids.into_iter().map(|id| Token::Ident(id)).collect(), + Token::ModSep)); + concat(call, build_paren_delimited(args)) + } + + /// Takes `ident` and `args ...` and produces `ident(args ...)`. + pub fn build_fn_call(name: Ident, args: TokenStream) -> TokenStream { + concat(from_tokens(vec![Token::Ident(name)]), build_paren_delimited(args)) + } + + /// Takes `ident` and `args ...` and produces `ident!(args ...)`. + pub fn build_mac_call(name: Ident, args: TokenStream) -> TokenStream { + concat(from_tokens(vec![Token::Ident(name), Token::Not]), + build_paren_delimited(args)) + } + + // ____________________________________________________________________________________________ + // Utilities + + /// A wrapper around `TokenStream::from_tokens` to avoid extra namespace specification and + /// provide it as a generic operator. + pub fn from_tokens(tokens: Vec) -> TokenStream { + TokenStream::from_tokens(tokens) + } + + pub fn intersperse(vs: Vec, t: T) -> Vec + where T: Clone + { + if vs.len() < 2 { + return vs; + } + let mut output = vec![vs.get(0).unwrap().to_owned()]; + + for v in vs.into_iter().skip(1) { + output.push(t.clone()); + output.push(v); + } + output + } +} diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 54c62d3665994..772d83eb2cfad 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -33,4 +33,5 @@ rustc_metadata = { path = "../librustc_metadata" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_ext = { path = "../libsyntax_ext" } -syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file +syntax_pos = { path = "../libsyntax_pos" } +proc_macro = { path = "../libproc_macro" } diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs new file mode 100644 index 0000000000000..fa37e9b54e457 --- /dev/null +++ b/src/libsyntax/ext/proc_macro_shim.rs @@ -0,0 +1,69 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This is a shim file to ease the transition to the final procedural macro interface for +//! Macros 2.0. It currently exposes the `libsyntax` operations that the quasiquoter's +//! output needs to compile correctly, along with the following operators: +//! +//! - `build_block_emitter`, which produces a `block` output macro result from the +//! provided TokenStream. + +use ast; +use codemap::Span; +use parse::parser::Parser; +use ptr::P; +use tokenstream::TokenStream; +use ext::base::*; + +/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses +/// the TokenStream as a block and returns it as an `Expr`. +pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStream) + -> Box { + let parser = cx.new_parser_from_tts(&output.to_tts()); + + struct Result<'a> { + prsr: Parser<'a>, + span: Span, + }; //FIXME is this the right lifetime + + impl<'a> Result<'a> { + fn block(&mut self) -> P { + let res = self.prsr.parse_block().unwrap(); + res + } + } + + impl<'a> MacResult for Result<'a> { + fn make_expr(self: Box) -> Option> { + let mut me = *self; + Some(P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Block(me.block()), + span: me.span, + attrs: ast::ThinVec::new(), + })) + + } + } + + Box::new(Result { + prsr: parser, + span: sp, + }) +} + +pub mod prelude { + pub use ext::proc_macro_shim::build_block_emitter; + pub use ast::Ident; + pub use codemap::{DUMMY_SP, Span}; + pub use ext::base::{ExtCtxt, MacResult}; + pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident}; + pub use tokenstream::{TokenTree, TokenStream}; +} diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 5ad1744418890..b4311fc007d3d 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -128,6 +128,7 @@ pub mod ext { pub mod build; pub mod expand; pub mod hygiene; + pub mod proc_macro_shim; pub mod quote; pub mod source_util; diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 89ead21cc10cb..aab6f3d682e2c 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -548,6 +548,12 @@ impl TokenStream { TokenStream::mk_leaf(Rc::new(trees), span) } + /// Convert a vector of Tokens into a TokenStream. + pub fn from_tokens(tokens: Vec) -> TokenStream { + // FIXME do something nicer with the spans + TokenStream::from_tts(tokens.into_iter().map(|t| TokenTree::Token(DUMMY_SP, t)).collect()) + } + /// Manually change a TokenStream's span. pub fn respan(self, span: Span) -> TokenStream { match self.ts { diff --git a/src/rustc/Cargo.lock b/src/rustc/Cargo.lock index 0b2287cf233d1..4f78519e13aa8 100644 --- a/src/rustc/Cargo.lock +++ b/src/rustc/Cargo.lock @@ -40,6 +40,16 @@ version = "0.0.0" name = "log" version = "0.0.0" +[[package]] +name = "proc_macro" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc_plugin 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + [[package]] name = "rbml" version = "0.0.0" @@ -136,6 +146,7 @@ dependencies = [ "flate 0.0.0", "graphviz 0.0.0", "log 0.0.0", + "proc_macro 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_borrowck 0.0.0", diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs new file mode 100644 index 0000000000000..6aee63e2858e2 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs @@ -0,0 +1,65 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused_parens)] +#![feature(plugin)] +#![feature(plugin_registrar)] +#![feature(rustc_private)] +#![plugin(proc_macro)] + +extern crate rustc_plugin; +extern crate proc_macro; +extern crate syntax; + +use proc_macro::build::ident_eq; + +use syntax::ext::base::{ExtCtxt, MacResult}; +use syntax::ext::proc_macro_shim::build_block_emitter; +use syntax::tokenstream::{TokenTree, TokenStream}; +use syntax::parse::token::str_to_ident; +use syntax::codemap::Span; + +use rustc_plugin::Registry; + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("cond", cond); +} + +fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { + let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); + build_block_emitter(cx, sp, output) +} + +fn cond_rec(input: TokenStream) -> TokenStream { + if input.is_empty() { + return qquote!(); + } + + let next = input.slice(0..1); + let rest = input.slice_from(1..); + + let clause : TokenStream = match next.maybe_delimited() { + Some(ts) => ts, + _ => panic!("Invalid input"), + }; + + // clause is ([test]) [rhs] + if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } + + let test: TokenStream = clause.slice(0..1); + let rhs: TokenStream = clause.slice_from(1..); + + if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + qquote!({unquote(rhs)}) + } else { + qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) + } +} diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs new file mode 100644 index 0000000000000..8291c8a1e41c6 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -0,0 +1,66 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused_parens)] +#![feature(plugin)] +#![feature(plugin_registrar)] +#![feature(rustc_private)] +#![plugin(proc_macro)] + +extern crate rustc_plugin; +extern crate proc_macro; +extern crate syntax; + +use proc_macro::prelude::*; + +use rustc_plugin::Registry; + +use syntax::ast::Ident; +use syntax::codemap::{DUMMY_SP, Span}; +use syntax::ext::proc_macro_shim::build_block_emitter; +use syntax::ext::base::{ExtCtxt, MacResult}; +use syntax::parse::token::{self, Token, DelimToken, keywords, str_to_ident}; +use syntax::tokenstream::{TokenTree, TokenStream}; + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("cond", cond); +} + +fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { + let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); + build_block_emitter(cx, sp, output) +} + +fn cond_rec(input: TokenStream) -> TokenStream { + if input.is_empty() { + return qquote!(); + } + + let next = input.slice(0..1); + let rest = input.slice_from(1..); + + let clause : TokenStream = match next.maybe_delimited() { + Some(ts) => ts, + _ => panic!("Invalid input"), + }; + + // clause is ([test]) [rhs] + if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } + + let test: TokenStream = clause.slice(0..1); + let rhs: TokenStream = clause.slice_from(1..); + + if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + qquote!({unquote(rhs)}) + } else { + qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) + } +} diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs new file mode 100644 index 0000000000000..2d92a0ef18199 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs @@ -0,0 +1,60 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused_parens)] +#![feature(plugin)] +#![feature(plugin_registrar)] +#![feature(rustc_private)] +#![plugin(proc_macro)] + +extern crate rustc_plugin; +extern crate proc_macro; +extern crate syntax; + +use syntax::ext::proc_macro_shim::prelude::*; +use proc_macro::prelude::*; + +use rustc_plugin::Registry; + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("cond", cond); +} + +fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { + let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); + build_block_emitter(cx, sp, output) +} + +fn cond_rec(input: TokenStream) -> TokenStream { + if input.is_empty() { + return qquote!(); + } + + let next = input.slice(0..1); + let rest = input.slice_from(1..); + + let clause : TokenStream = match next.maybe_delimited() { + Some(ts) => ts, + _ => panic!("Invalid input"), + }; + + // clause is ([test]) [rhs] + if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } + + let test: TokenStream = clause.slice(0..1); + let rhs: TokenStream = clause.slice_from(1..); + + if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + qquote!({unquote(rhs)}) + } else { + qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) + } +} diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs new file mode 100644 index 0000000000000..4ee775dec0cef --- /dev/null +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -0,0 +1,28 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-stage1 + +#![feature(plugin)] +#![feature(rustc_private)] +#![plugin(proc_macro)] + +extern crate proc_macro; +use proc_macro::prelude::*; + +extern crate syntax; +use syntax::ast::Ident; +use syntax::codemap::DUMMY_SP; +use syntax::parse::token::{self, Token, keywords, str_to_ident}; + +fn main() { + let lex_true = lex("true"); + assert_eq!(qquote!(true).eq_unspanned(&lex_true), true); +} diff --git a/src/test/run-pass-fulldeps/macro-quote-cond.rs b/src/test/run-pass-fulldeps/macro-quote-cond.rs new file mode 100644 index 0000000000000..fa969b6a087cf --- /dev/null +++ b/src/test/run-pass-fulldeps/macro-quote-cond.rs @@ -0,0 +1,54 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:cond_plugin.rs +// ignore-stage1 + +#![feature(plugin)] +#![feature(rustc_private)] +#![plugin(cond_plugin)] + +fn fact(n : i64) -> i64 { + if n == 0 { + 1 + } else { + n * fact(n - 1) + } +} + +fn fact_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + (else (n * fact_cond(n-1))) + ) +} + +fn fib(n : i64) -> i64 { + if n == 0 || n == 1 { + 1 + } else { + fib(n-1) + fib(n-2) + } +} + +fn fib_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + ((n == 1) 1) + (else (fib_cond(n-1) + fib_cond(n-2))) + ) +} + +fn main() { + assert_eq!(fact(3), fact_cond(3)); + assert_eq!(fact(5), fact_cond(5)); + assert_eq!(fib(5), fib_cond(5)); + assert_eq!(fib(8), fib_cond(8)); +} diff --git a/src/test/run-pass-fulldeps/macro-quote-noprelude.rs b/src/test/run-pass-fulldeps/macro-quote-noprelude.rs new file mode 100644 index 0000000000000..4184ca7be372f --- /dev/null +++ b/src/test/run-pass-fulldeps/macro-quote-noprelude.rs @@ -0,0 +1,54 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:cond_noprelude_plugin.rs +// ignore-stage1 + +#![feature(plugin)] +#![feature(rustc_private)] +#![plugin(cond_noprelude_plugin)] + +fn fact(n : i64) -> i64 { + if n == 0 { + 1 + } else { + n * fact(n - 1) + } +} + +fn fact_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + (else (n * fact_cond(n-1))) + ) +} + +fn fib(n : i64) -> i64 { + if n == 0 || n == 1 { + 1 + } else { + fib(n-1) + fib(n-2) + } +} + +fn fib_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + ((n == 1) 1) + (else (fib_cond(n-1) + fib_cond(n-2))) + ) +} + +fn main() { + assert_eq!(fact(3), fact_cond(3)); + assert_eq!(fact(5), fact_cond(5)); + assert_eq!(fib(5), fib_cond(5)); + assert_eq!(fib(8), fib_cond(8)); +} diff --git a/src/test/run-pass-fulldeps/macro-quote-prelude.rs b/src/test/run-pass-fulldeps/macro-quote-prelude.rs new file mode 100644 index 0000000000000..5b703a5bc2668 --- /dev/null +++ b/src/test/run-pass-fulldeps/macro-quote-prelude.rs @@ -0,0 +1,54 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:cond_prelude_plugin.rs +// ignore-stage1 + +#![feature(plugin)] +#![feature(rustc_private)] +#![plugin(cond_prelude_plugin)] + +fn fact(n : i64) -> i64 { + if n == 0 { + 1 + } else { + n * fact(n - 1) + } +} + +fn fact_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + (else (n * fact_cond(n-1))) + ) +} + +fn fib(n : i64) -> i64 { + if n == 0 || n == 1 { + 1 + } else { + fib(n-1) + fib(n-2) + } +} + +fn fib_cond(n : i64) -> i64 { + cond!( + ((n == 0) 1) + ((n == 1) 1) + (else (fib_cond(n-1) + fib_cond(n-2))) + ) +} + +fn main() { + assert_eq!(fact(3), fact_cond(3)); + assert_eq!(fact(5), fact_cond(5)); + assert_eq!(fib(5), fib_cond(5)); + assert_eq!(fib(8), fib_cond(8)); +} diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index 48016721d52c1..4932fd5147afa 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -88,6 +88,12 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { continue } + // We want the compiler to depend on the proc_macro crate so that it is built and + // included in the end, but we don't want to actually use it in the compiler. + if toml.contains("name = \"rustc_driver\"") && krate == "proc_macro" { + continue + } + if !librs.contains(&format!("extern crate {}", krate)) { println!("{} doesn't have `extern crate {}`, but Cargo.toml \ depends on it", libfile.display(), krate);