Merge pull request #4172 from graydon/remove-old-syntax-ext

Remove old syntax ext
This commit is contained in:
Brian Anderson 2012-12-12 17:13:41 -08:00
commit 0494b078a9
41 changed files with 276 additions and 1965 deletions

View File

@ -1033,6 +1033,8 @@ fn decode_item_ast(par_doc: ebml::Doc) -> @ast::item {
trait fake_ext_ctxt {
fn cfg() -> ast::crate_cfg;
fn parse_sess() -> parse::parse_sess;
fn call_site() -> span;
fn ident_of(st: ~str) -> ast::ident;
}
#[cfg(test)]
@ -1042,6 +1044,16 @@ type fake_session = parse::parse_sess;
impl fake_session: fake_ext_ctxt {
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_info: None
}
}
fn ident_of(st: ~str) -> ast::ident {
self.interner.intern(@st)
}
}
#[cfg(test)]
@ -1050,7 +1062,8 @@ fn mk_ctxt() -> fake_ext_ctxt {
}
#[cfg(test)]
fn roundtrip(in_item: @ast::item) {
fn roundtrip(in_item: Option<@ast::item>) {
let in_item = in_item.get();
let bytes = do io::with_bytes_writer |wr| {
let ebml_w = writer::Serializer(wr);
encode_item_ast(ebml_w, in_item);
@ -1074,45 +1087,45 @@ fn roundtrip(in_item: @ast::item) {
#[test]
fn test_basic() {
let ext_cx = mk_ctxt();
roundtrip(#ast[item]{
roundtrip(quote_item!(
fn foo() {}
});
));
}
#[test]
fn test_smalltalk() {
let ext_cx = mk_ctxt();
roundtrip(#ast[item]{
roundtrip(quote_item!(
fn foo() -> int { 3 + 4 } // first smalltalk program ever executed.
});
));
}
#[test]
fn test_more() {
let ext_cx = mk_ctxt();
roundtrip(#ast[item]{
roundtrip(quote_item!(
fn foo(x: uint, y: uint) -> uint {
let z = x + y;
return z;
}
});
));
}
#[test]
fn test_simplification() {
let ext_cx = mk_ctxt();
let item_in = ast::ii_item(#ast[item] {
let item_in = ast::ii_item(quote_item!(
fn new_int_alist<B: Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return {eq_fn: eq_int, mut data: ~[]};
}
});
).get());
let item_out = simplify_ast(item_in);
let item_exp = ast::ii_item(#ast[item] {
let item_exp = ast::ii_item(quote_item!(
fn new_int_alist<B: Copy>() -> alist<int, B> {
return {eq_fn: eq_int, mut data: ~[]};
}
});
).get());
match (item_out, item_exp) {
(ast::ii_item(item_out), ast::ii_item(item_exp)) => {
assert pprust::item_to_str(item_out, ext_cx.parse_sess().interner)

View File

@ -831,24 +831,10 @@ enum matcher_ {
type mac = spanned<mac_>;
type mac_arg = Option<@expr>;
#[auto_serialize]
#[auto_deserialize]
type mac_body_ = {span: span};
type mac_body = Option<mac_body_>;
#[auto_serialize]
#[auto_deserialize]
enum mac_ {
mac_invoc(@path, mac_arg, mac_body), // old macro-invocation
mac_invoc_tt(@path,~[token_tree]), // new macro-invocation
mac_ellipsis, // old pattern-match (obsolete)
// the span is used by the quoter/anti-quoter ...
mac_aq(span /* span of quote */, @expr), // anti-quote
mac_var(uint)
}
type lit = spanned<lit_>;

View File

@ -309,7 +309,7 @@ priv impl ext_ctxt {
fn lambda(blk: ast::blk) -> @ast::expr {
let ext_cx = self;
let blk_e = self.expr(blk.span, ast::expr_block(blk));
#ast{ || $(blk_e) }
quote_expr!( || $blk_e )
}
fn blk(span: span, stmts: ~[@ast::stmt]) -> ast::blk {

View File

@ -13,32 +13,19 @@ use parse::parser;
use diagnostic::span_handler;
use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom};
use ast_util::dummy_sp;
use parse::token;
// obsolete old-style #macro code:
//
// syntax_expander, normal, macro_defining, macro_definer,
// builtin
//
// new-style macro! tt code:
//
// syntax_expander_tt, syntax_expander_tt_item, mac_result,
// normal_tt, item_tt
//
// also note that ast::mac has way too many cases and can probably
// be trimmed down substantially.
// second argument is the span to blame for general argument problems
type syntax_expander_ =
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> @ast::expr;
// second argument is the origin of the macro, if user-defined
type syntax_expander = {expander: syntax_expander_, span: Option<span>};
// also note that ast::mac used to have a bunch of extraneous cases and
// is now probably a redundant AST node, can be merged with
// ast::mac_invoc_tt.
type macro_def = {name: ~str, ext: syntax_extension};
// macro_definer is obsolete, remove when #old_macros go away.
type macro_definer =
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
type item_decorator =
fn@(ext_ctxt, span, ast::meta_item, ~[@ast::item]) -> ~[@ast::item];
@ -60,13 +47,7 @@ enum mac_result {
enum syntax_extension {
// normal() is obsolete, remove when #old_macros go away.
normal(syntax_expander),
// macro_defining() is obsolete, remove when #old_macros go away.
macro_defining(macro_definer),
// #[auto_serialize] and such. will probably survive death of #old_macros
// #[auto_serialize] and such
item_decorator(item_decorator),
// Token-tree expanders
@ -80,8 +61,6 @@ enum syntax_extension {
// A temporary hard-coded map of methods for expanding syntax extension
// AST nodes into full ASTs
fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
fn builtin(f: syntax_expander_) -> syntax_extension
{normal({expander: f, span: None})}
fn builtin_normal_tt(f: syntax_expander_tt_) -> syntax_extension {
normal_tt({expander: f, span: None})
}
@ -89,28 +68,25 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
item_tt({expander: f, span: None})
}
let syntax_expanders = HashMap();
syntax_expanders.insert(~"macro",
macro_defining(ext::simplext::add_new_extension));
syntax_expanders.insert(~"macro_rules",
builtin_item_tt(
ext::tt::macro_rules::add_new_extension));
syntax_expanders.insert(~"fmt", builtin(ext::fmt::expand_syntax_ext));
syntax_expanders.insert(~"fmt",
builtin_normal_tt(ext::fmt::expand_syntax_ext));
syntax_expanders.insert(
~"auto_serialize",
item_decorator(ext::auto_serialize::expand_auto_serialize));
syntax_expanders.insert(
~"auto_deserialize",
item_decorator(ext::auto_serialize::expand_auto_deserialize));
syntax_expanders.insert(~"env", builtin(ext::env::expand_syntax_ext));
syntax_expanders.insert(~"env",
builtin_normal_tt(ext::env::expand_syntax_ext));
syntax_expanders.insert(~"concat_idents",
builtin(ext::concat_idents::expand_syntax_ext));
syntax_expanders.insert(~"ident_to_str",
builtin(ext::ident_to_str::expand_syntax_ext));
builtin_normal_tt(
ext::concat_idents::expand_syntax_ext));
syntax_expanders.insert(~"log_syntax",
builtin_normal_tt(
ext::log_syntax::expand_syntax_ext));
syntax_expanders.insert(~"ast",
builtin(ext::qquote::expand_ast));
syntax_expanders.insert(~"deriving_eq",
item_decorator(
ext::deriving::expand_deriving_eq));
@ -133,21 +109,29 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
builtin_normal_tt(ext::quote::expand_quote_stmt));
syntax_expanders.insert(~"line",
builtin(ext::source_util::expand_line));
builtin_normal_tt(
ext::source_util::expand_line));
syntax_expanders.insert(~"col",
builtin(ext::source_util::expand_col));
builtin_normal_tt(
ext::source_util::expand_col));
syntax_expanders.insert(~"file",
builtin(ext::source_util::expand_file));
builtin_normal_tt(
ext::source_util::expand_file));
syntax_expanders.insert(~"stringify",
builtin(ext::source_util::expand_stringify));
builtin_normal_tt(
ext::source_util::expand_stringify));
syntax_expanders.insert(~"include",
builtin(ext::source_util::expand_include));
builtin_normal_tt(
ext::source_util::expand_include));
syntax_expanders.insert(~"include_str",
builtin(ext::source_util::expand_include_str));
builtin_normal_tt(
ext::source_util::expand_include_str));
syntax_expanders.insert(~"include_bin",
builtin(ext::source_util::expand_include_bin));
builtin_normal_tt(
ext::source_util::expand_include_bin));
syntax_expanders.insert(~"module_path",
builtin(ext::source_util::expand_mod));
builtin_normal_tt(
ext::source_util::expand_mod));
syntax_expanders.insert(~"proto",
builtin_item_tt(ext::pipes::expand_proto));
syntax_expanders.insert(
@ -303,87 +287,39 @@ fn expr_to_ident(cx: ext_ctxt,
}
}
fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
min: uint, name: ~str) -> ~[@ast::expr] {
return get_mac_args(cx, sp, arg, min, None, name);
fn check_zero_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree],
name: &str) {
if tts.len() != 0 {
cx.span_fatal(sp, fmt!("%s takes no arguments", name));
}
}
fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
min: uint, max: Option<uint>, name: ~str) -> ~[@ast::expr] {
match arg {
Some(expr) => match expr.node {
ast::expr_vec(elts, _) => {
let elts_len = vec::len(elts);
match max {
Some(max) if ! (min <= elts_len && elts_len <= max) => {
cx.span_fatal(sp,
fmt!("%s! takes between %u and %u arguments.",
name, min, max));
}
None if ! (min <= elts_len) => {
cx.span_fatal(sp, fmt!("%s! needs at least %u arguments.",
name, min));
}
_ => return elts /* we are good */
}
}
_ => {
cx.span_fatal(sp, fmt!("%s!: malformed invocation", name))
fn get_single_str_from_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree],
name: &str) -> ~str {
if tts.len() != 1 {
cx.span_fatal(sp, fmt!("%s takes 1 argument.", name));
}
match tts[0] {
ast::tt_tok(_, token::LIT_STR(ident)) => cx.str_of(ident),
_ =>
cx.span_fatal(sp, fmt!("%s requires a string.", name))
}
}
fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree])
-> ~[@ast::expr] {
let p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(),
tts);
let mut es = ~[];
while p.token != token::EOF {
if es.len() != 0 {
p.eat(token::COMMA);
}
},
None => cx.span_fatal(sp, fmt!("%s!: missing arguments", name))
es.push(p.parse_expr());
}
}
fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
-> ast::mac_body_
{
match (args) {
Some(body) => body,
None => cx.span_fatal(sp, ~"missing macro body")
}
}
// Massage syntactic form of new-style arguments to internal representation
// of old-style macro args, such that old-style macro can be run and invoked
// using new syntax. This will be obsolete when #old_macros go away.
fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree])
-> ast::mac_arg {
use ast::{matcher, matcher_, match_tok, match_seq, match_nonterminal};
use parse::lexer::{new_tt_reader, reader};
use tt::macro_parser::{parse_or_else, matched_seq,
matched_nonterminal};
// these spans won't matter, anyways
fn ms(m: matcher_) -> matcher {
{node: m, span: dummy_sp()}
}
let arg_nm = cx.parse_sess().interner.gensym(@~"arg");
let argument_gram = ~[ms(match_seq(~[
ms(match_nonterminal(arg_nm, parse::token::special_idents::expr, 0u))
], Some(parse::token::COMMA), true, 0u, 1u))];
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, None, arg);
let args =
match parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader,
argument_gram).get(arg_nm) {
@matched_seq(s, _) => {
do s.map() |lf| {
match *lf {
@matched_nonterminal(parse::token::nt_expr(arg)) =>
arg, /* whew! list of exprs, here we come! */
_ => fail ~"badly-structured parse result"
}
}
},
_ => fail ~"badly-structured parse result"
};
return Some(@{id: parse::next_node_id(cx.parse_sess()),
callee_id: parse::next_node_id(cx.parse_sess()),
node: ast::expr_vec(args, ast::m_imm), span: sp});
es
}
//

View File

@ -10,19 +10,32 @@
use base::*;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args_no_max(cx,sp,arg,1u,~"concat_idents");
fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let mut res_str = ~"";
for args.each |e| {
res_str += *cx.parse_sess().interner.get(
expr_to_ident(cx, *e, ~"expected an ident"));
for tts.eachi |i, e| {
if i & 1 == 1 {
match *e {
ast::tt_tok(_, token::COMMA) => (),
_ => cx.span_fatal(sp, ~"concat_idents! \
expecting comma.")
}
} else {
match *e {
ast::tt_tok(_, token::IDENT(ident,_)) =>
res_str += cx.str_of(ident),
_ => cx.span_fatal(sp, ~"concat_idents! \
requires ident args.")
}
}
}
let res = cx.parse_sess().interner.intern(@res_str);
return @{id: cx.next_id(),
callee_id: cx.next_id(),
node: ast::expr_path(@{span: sp, global: false, idents: ~[res],
rp: None, types: ~[]}),
span: sp};
let e = @{id: cx.next_id(),
callee_id: cx.next_id(),
node: ast::expr_path(@{span: sp, global: false,
idents: ~[res],
rp: None, types: ~[]}),
span: sp};
mr_expr(e)
}

View File

@ -18,18 +18,19 @@ use base::*;
use build::mk_uniq_str;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx, sp, arg, 1u, option::Some(1u), ~"env");
fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let var = get_single_str_from_tts(cx, sp, tts, "env!");
// FIXME (#2248): if this was more thorough it would manufacture an
// Option<str> rather than just an maybe-empty string.
let var = expr_to_str(cx, args[0], ~"env! requires a string");
match os::getenv(var) {
option::None => return mk_uniq_str(cx, sp, ~""),
option::Some(ref s) => return mk_uniq_str(cx, sp, (*s))
}
let e = match os::getenv(var) {
option::None => mk_uniq_str(cx, sp, ~""),
option::Some(ref s) => mk_uniq_str(cx, sp, (*s))
};
mr_expr(e)
}
//

View File

@ -10,11 +10,10 @@
use std::map::HashMap;
use ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
use ast::{crate, expr_, expr_mac, mac_invoc_tt,
tt_delim, tt_tok, item_mac, stmt_, stmt_mac, stmt_expr, stmt_semi};
use fold::*;
use ext::base::*;
use ext::qquote::{qq_helper};
use parse::{parser, parse_expr_from_source_str, new_parser_from_tts};
@ -32,51 +31,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
expr_mac(ref mac) => {
match (*mac).node {
// Old-style macros. For compatibility, will erase this whole
// block once we've transitioned.
mac_invoc(pth, args, body) => {
assert (vec::len(pth.idents) > 0u);
/* using idents and token::special_idents would make the
the macro names be hygienic */
let extname = cx.parse_sess().interner.get(pth.idents[0]);
match exts.find(*extname) {
None => {
cx.span_fatal(pth.span,
fmt!("macro undefined: '%s'", *extname))
}
Some(item_decorator(_)) => {
cx.span_fatal(
pth.span,
fmt!("%s can only be used as a decorator", *extname));
}
Some(normal({expander: exp, span: exp_sp})) => {
cx.bt_push(ExpandedFrom({call_site: s,
callie: {name: *extname, span: exp_sp}}));
let expanded = exp(cx, (*mac).span, args, body);
//keep going, outside-in
let fully_expanded = fld.fold_expr(expanded).node;
cx.bt_pop();
(fully_expanded, s)
}
Some(macro_defining(ext)) => {
let named_extension = ext(cx, (*mac).span, args, body);
exts.insert(named_extension.name, named_extension.ext);
(ast::expr_rec(~[], None), s)
}
Some(normal_tt(_)) => {
cx.span_fatal(pth.span,
fmt!("this tt-style macro should be \
invoked '%s!(...)'", *extname))
}
Some(item_tt(*)) => {
cx.span_fatal(pth.span,
~"cannot use item macros in this context");
}
}
}
// Token-tree macros, these will be the only case when we're
// finished transitioning.
@ -108,21 +62,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
(fully_expanded, s)
}
Some(normal({expander: exp, span: exp_sp})) => {
cx.bt_push(ExpandedFrom({call_site: s,
callie: {name: *extname, span: exp_sp}}));
//convert the new-style invoc for the old-style macro
let arg = base::tt_args_to_original_flavor(cx, pth.span,
(*tts));
let expanded = exp(cx, (*mac).span, arg, None);
//keep going, outside-in
let fully_expanded = fld.fold_expr(expanded).node;
cx.bt_pop();
(fully_expanded, s)
}
_ => {
cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro",
@ -131,7 +70,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
}
}
_ => cx.span_bug((*mac).span, ~"naked syntactic bit")
}
}
_ => orig(e, s, fld)
@ -166,10 +104,14 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
ast::meta_list(ref n, _) => (*n)
};
match exts.find(mname) {
None | Some(normal(_)) | Some(macro_defining(_))
| Some(normal_tt(_)) | Some(item_tt(*)) => items,
None | Some(normal_tt(_)) | Some(item_tt(*)) => items,
Some(item_decorator(dec_fn)) => {
dec_fn(cx, attr.span, attr.node.value, items)
cx.bt_push(ExpandedFrom({call_site: attr.span,
callie: {name: copy mname,
span: None}}));
let r = dec_fn(cx, attr.span, attr.node.value, items);
cx.bt_pop();
r
}
}
}
@ -205,36 +147,16 @@ fn expand_item(exts: HashMap<~str, syntax_extension>,
}
}
// avoid excess indentation when a series of nested `match`es
// has only one "good" outcome
macro_rules! biased_match (
( ($e :expr) ~ ($p :pat) else $err :stmt ;
$( ($e_cdr:expr) ~ ($p_cdr:pat) else $err_cdr:stmt ; )*
=> $body:expr
) => (
match $e {
$p => {
biased_match!($( ($e_cdr) ~ ($p_cdr) else $err_cdr ; )*
=> $body)
}
_ => { $err }
}
);
( => $body:expr ) => ( $body )
)
// Support for item-position macro invocations, exactly the same
// logic as for expression-position macro invocations.
fn expand_item_mac(exts: HashMap<~str, syntax_extension>,
cx: ext_ctxt, &&it: @ast::item,
fld: ast_fold) -> Option<@ast::item> {
let (pth, tts) = biased_match!(
(it.node) ~ (item_mac({node: mac_invoc_tt(pth, ref tts), _})) else {
cx.span_bug(it.span, ~"invalid item macro invocation")
};
=> (pth, (*tts))
);
let (pth, tts) = match it.node {
item_mac({node: mac_invoc_tt(pth, ref tts), _}) => (pth, (*tts)),
_ => cx.span_bug(it.span, ~"invalid item macro invocation")
};
let extname = cx.parse_sess().interner.get(pth.idents[0]);
let expanded = match exts.find(*extname) {
@ -289,12 +211,15 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span))
-> (stmt_, span)
{
let (mac, pth, tts, semi) = biased_match! (
(s) ~ (stmt_mac(ref mac, semi)) else return orig(s, sp, fld);
((*mac).node) ~ (mac_invoc_tt(pth, ref tts)) else {
cx.span_bug((*mac).span, ~"naked syntactic bit")
};
=> ((*mac), pth, (*tts), semi));
let (mac, pth, tts, semi) = match s {
stmt_mac(ref mac, semi) => {
match (*mac).node {
mac_invoc_tt(pth, ref tts) => ((*mac), pth, (*tts), semi)
}
}
_ => return orig(s, sp, fld)
};
assert(vec::len(pth.idents) == 1u);
let extname = cx.parse_sess().interner.get(pth.idents[0]);
@ -321,23 +246,6 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
(fully_expanded, sp)
}
Some(normal({expander: exp, span: exp_sp})) => {
cx.bt_push(ExpandedFrom({call_site: sp,
callie: {name: *extname,
span: exp_sp}}));
//convert the new-style invoc for the old-style macro
let arg = base::tt_args_to_original_flavor(cx, pth.span, tts);
let exp_expr = exp(cx, mac.span, arg, None);
let expanded = @{node: stmt_expr(exp_expr, cx.next_id()),
span: exp_expr.span};
//keep going, outside-in
let fully_expanded = fld.fold_stmt(expanded).node;
cx.bt_pop();
(fully_expanded, sp)
}
_ => {
cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro", *extname))

View File

@ -21,9 +21,12 @@ use codemap::span;
use ext::build::*;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args_no_max(cx, sp, arg, 1u, ~"fmt");
fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let args = get_exprs_from_tts(cx, copy tts);
if args.len() == 0 {
cx.span_fatal(sp, "fmt! takes at least 1 argument.");
}
let fmt =
expr_to_str(cx, args[0],
~"first argument to fmt! must be a string literal.");
@ -37,7 +40,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
parse_fmt_err_(cx, fmtspan, s)
};
let pieces = parse_fmt_string(fmt, parse_fmt_err);
return pieces_to_expr(cx, sp, pieces, args);
mr_expr(pieces_to_expr(cx, sp, pieces, args))
}
// FIXME (#2249): A lot of these functions for producing expressions can

View File

@ -1,20 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use base::*;
use build::mk_uniq_str;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx,sp,arg,1u,option::Some(1u),~"ident_to_str");
return mk_uniq_str(cx, sp, *cx.parse_sess().interner.get(
expr_to_ident(cx, args[0u], ~"expected an ident")));
}

View File

@ -1,370 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{crate, expr_, mac_invoc,
mac_aq, mac_var};
use parse::parser;
use parse::parser::{Parser, parse_from_source_str};
use dvec::DVec;
use parse::token::ident_interner;
use codemap::{CharPos, BytePos};
use fold::*;
use visit::*;
use ext::base::*;
use ext::build::*;
use print::*;
use io::*;
use codemap::span;
struct gather_item {
lo: BytePos,
hi: BytePos,
e: @ast::expr,
constr: ~str
}
type aq_ctxt = @{lo: BytePos, gather: DVec<gather_item>};
enum fragment {
from_expr(@ast::expr),
from_ty(@ast::Ty)
}
fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
strs.map(|str| cx.parse_sess().interner.intern(@*str))
}
fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident {
cx.parse_sess().interner.intern(@str)
}
trait qq_helper {
fn span() -> span;
fn visit(aq_ctxt, vt<aq_ctxt>);
fn extract_mac() -> Option<ast::mac_>;
fn mk_parse_fn(ext_ctxt,span) -> @ast::expr;
fn get_fold_fn() -> ~str;
}
impl @ast::crate: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_crate"]))
}
fn get_fold_fn() -> ~str {~"fold_crate"}
}
impl @ast::expr: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {
match (self.node) {
ast::expr_mac({node: ref mac, _}) => Some((*mac)),
_ => None
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_expr"]))
}
fn get_fold_fn() -> ~str {~"fold_expr"}
}
impl @ast::Ty: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {
match (self.node) {
ast::ty_mac({node: ref mac, _}) => Some((*mac)),
_ => None
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_ty"]))
}
fn get_fold_fn() -> ~str {~"fold_ty"}
}
impl @ast::item: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_item"]))
}
fn get_fold_fn() -> ~str {~"fold_item"}
}
impl @ast::stmt: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_stmt"]))
}
fn get_fold_fn() -> ~str {~"fold_stmt"}
}
impl @ast::pat: qq_helper {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
fn extract_mac() -> Option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
~"parse_pat"]))
}
fn get_fold_fn() -> ~str {~"fold_pat"}
}
fn gather_anti_quotes<N: qq_helper>(lo: BytePos, node: N) -> aq_ctxt
{
let v = @{visit_expr: |node, &&cx, v| visit_aq(node, ~"from_expr", cx, v),
visit_ty: |node, &&cx, v| visit_aq(node, ~"from_ty", cx, v),
.. *default_visitor()};
let cx = @{lo:lo, gather: DVec()};
node.visit(cx, mk_vt(v));
// FIXME (#2250): Maybe this is an overkill (merge_sort), it might
// be better to just keep the gather array in sorted order.
do cx.gather.swap |v| {
pure fn by_lo(a: &gather_item, b: &gather_item) -> bool {
a.lo < b.lo
}
std::sort::merge_sort(v, by_lo)
};
return cx;
}
fn visit_aq<T:qq_helper>(node: T, constr: ~str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
{
match (node.extract_mac()) {
Some(mac_aq(sp, e)) => {
cx.gather.push(gather_item {
lo: sp.lo - cx.lo,
hi: sp.hi - cx.lo,
e: e,
constr: constr});
}
_ => node.visit(cx, v)
}
}
fn is_space(c: char) -> bool {
parse::lexer::is_whitespace(c)
}
fn expand_ast(ecx: ext_ctxt, _sp: span,
arg: ast::mac_arg, body: ast::mac_body)
-> @ast::expr
{
let mut what = ~"expr";
do arg.iter |arg| {
let args: ~[@ast::expr] =
match arg.node {
ast::expr_vec(elts, _) => elts,
_ => {
ecx.span_fatal
(_sp, ~"#ast requires arguments of the form `~[...]`.")
}
};
if vec::len::<@ast::expr>(args) != 1u {
ecx.span_fatal(_sp, ~"#ast requires exactly one arg");
}
match (args[0].node) {
ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
=> what = *ecx.parse_sess().interner.get(id[0]),
_ => ecx.span_fatal(args[0].span, ~"expected an identifier")
}
}
let body = get_mac_body(ecx,_sp,body);
return match what {
~"crate" => finish(ecx, body, parse_crate),
~"expr" => finish(ecx, body, parse_expr),
~"ty" => finish(ecx, body, parse_ty),
~"item" => finish(ecx, body, parse_item),
~"stmt" => finish(ecx, body, parse_stmt),
~"pat" => finish(ecx, body, parse_pat),
_ => ecx.span_fatal(_sp, ~"unsupported ast type")
};
}
fn parse_crate(p: Parser) -> @ast::crate { p.parse_crate_mod(~[]) }
fn parse_ty(p: Parser) -> @ast::Ty { p.parse_ty(false) }
fn parse_stmt(p: Parser) -> @ast::stmt { p.parse_stmt(~[]) }
fn parse_expr(p: Parser) -> @ast::expr { p.parse_expr() }
fn parse_pat(p: Parser) -> @ast::pat { p.parse_pat(true) }
fn parse_item(p: Parser) -> @ast::item {
match p.parse_item(~[]) {
Some(item) => item,
None => fail ~"parse_item: parsing an item failed"
}
}
fn finish<T: qq_helper>
(ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: Parser) -> T)
-> @ast::expr
{
let cm = ecx.codemap();
let str = @cm.span_to_snippet(body.span);
debug!("qquote--str==%?", str);
let fname = cm.mk_substr_filename(body.span);
let node = parse_from_source_str
(f, fname, codemap::FssInternal(body.span), str,
ecx.cfg(), ecx.parse_sess());
let loc = cm.lookup_char_pos(body.span.lo);
let sp = node.span();
let qcx = gather_anti_quotes(sp.lo, node);
let cx = qcx;
for uint::range(1u, cx.gather.len()) |i| {
assert cx.gather[i-1u].lo < cx.gather[i].lo;
// ^^ check that the vector is sorted
assert cx.gather[i-1u].hi <= cx.gather[i].lo;
// ^^ check that the spans are non-overlapping
}
let mut str2 = ~"";
enum state {active, skip(uint), blank};
let mut state = active;
let mut i = BytePos(0u);
let mut j = 0u;
let g_len = cx.gather.len();
for str::chars_each(*str) |ch| {
if (j < g_len && i == cx.gather[j].lo) {
assert ch == '$';
let repl = fmt!("$%u ", j);
state = skip(str::char_len(repl));
str2 += repl;
}
match copy state {
active => str::push_char(&mut str2, ch),
skip(1u) => state = blank,
skip(sk) => state = skip (sk-1u),
blank if is_space(ch) => str::push_char(&mut str2, ch),
blank => str::push_char(&mut str2, ' ')
}
i += BytePos(1u);
if (j < g_len && i == cx.gather[j].hi) {
assert ch == ')';
state = active;
j += 1u;
}
}
let cx = ecx;
let cfg_call = || mk_call_(
cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
id_ext(cx, ~"cfg")), ~[]);
let parse_sess_call = || mk_call_(
cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
id_ext(cx, ~"parse_sess")), ~[]);
let pcall = mk_call(cx,sp,
ids_ext(cx, ~[~"syntax", ~"parse", ~"parser",
~"parse_from_source_str"]),
~[node.mk_parse_fn(cx,sp),
mk_uniq_str(cx,sp, fname),
mk_call(cx,sp,
ids_ext(cx, ~[~"syntax",~"ext",
~"qquote", ~"mk_file_substr"]),
~[mk_uniq_str(cx,sp, loc.file.name),
mk_uint(cx,sp, loc.line),
mk_uint(cx,sp, loc.col.to_uint())]),
mk_unary(cx,sp, ast::box(ast::m_imm),
mk_uniq_str(cx,sp, str2)),
cfg_call(),
parse_sess_call()]
);
let mut rcall = pcall;
if (g_len > 0u) {
rcall = mk_call(cx,sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
~"replace"]),
~[pcall,
mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| {
mk_call(cx,sp,
ids_ext(cx, ~[~"syntax", ~"ext",
~"qquote", g.constr]),
~[g.e])})),
mk_path(cx,sp,
ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote",
node.get_fold_fn()]))]);
}
return rcall;
}
fn replace<T>(node: T, repls: ~[fragment], ff: fn (ast_fold, T) -> T)
-> T
{
let aft = default_ast_fold();
let f_pre = @{fold_expr: |a,b,c|replace_expr(repls, a, b, c,
aft.fold_expr),
fold_ty: |a,b,c|replace_ty(repls, a, b, c,
aft.fold_ty),
.. *aft};
return ff(make_fold(f_pre), node);
}
fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate {
@f.fold_crate(*n)
}
fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)}
fn fold_ty(f: ast_fold, &&n: @ast::Ty) -> @ast::Ty {f.fold_ty(n)}
fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {
f.fold_item(n).get() //HACK: we know we don't drop items
}
fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)}
fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)}
fn replace_expr(repls: ~[fragment],
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
{
match e {
ast::expr_mac({node: mac_var(i), _}) => match (repls[i]) {
from_expr(r) => (r.node, r.span),
_ => fail /* fixme error message */
},
_ => orig(e,s,fld)
}
}
fn replace_ty(repls: ~[fragment],
e: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span))
-> (ast::ty_, span)
{
match e {
ast::ty_mac({node: mac_var(i), _}) => match (repls[i]) {
from_ty(r) => (r.node, r.span),
_ => fail /* fixme error message */
},
_ => orig(e,s,fld)
}
}
fn mk_file_substr(fname: ~str, line: uint, col: uint) ->
codemap::FileSubstr {
codemap::FssExternal({filename: fname, line: line, col: CharPos(col)})
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:

View File

@ -406,7 +406,6 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
AT => "AT",
DOT => "DOT",
DOTDOT => "DOTDOT",
ELLIPSIS => "ELLIPSIS",
COMMA => "COMMA",
SEMI => "SEMI",
COLON => "COLON",

View File

@ -1,750 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use codemap::span;
use std::map::HashMap;
use dvec::DVec;
use base::*;
use fold::*;
use ast_util::respan;
use ast::{ident, path, Ty, blk_, expr, expr_path,
expr_vec, expr_mac, mac_invoc, node_id, expr_index};
export add_new_extension;
fn path_to_ident(pth: @path) -> Option<ident> {
if vec::len(pth.idents) == 1u && vec::len(pth.types) == 0u {
return Some(pth.idents[0u]);
}
return None;
}
//a vec of binders might be a little big.
type clause = {params: binders, body: @expr};
/* logically, an arb_depth should contain only one kind of matchable */
enum arb_depth<T> { leaf(T), seq(@~[arb_depth<T>], span), }
enum matchable {
match_expr(@expr),
match_path(@path),
match_ident(ast::spanned<ident>),
match_ty(@Ty),
match_block(ast::blk),
match_exact, /* don't bind anything, just verify the AST traversal */
}
/* for when given an incompatible bit of AST */
fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! {
match m {
match_expr(x) => cx.span_fatal(
x.span, ~"this argument is an expr, expected " + expected),
match_path(x) => cx.span_fatal(
x.span, ~"this argument is a path, expected " + expected),
match_ident(x) => cx.span_fatal(
x.span, ~"this argument is an ident, expected " + expected),
match_ty(x) => cx.span_fatal(
x.span, ~"this argument is a type, expected " + expected),
match_block(ref x) => cx.span_fatal(
(*x).span, ~"this argument is a block, expected " + expected),
match_exact => cx.bug(~"what is a match_exact doing in a bindings?")
}
}
// We can't make all the matchables in a match_result the same type because
// idents can be paths, which can be exprs.
// If we want better match failure error messages (like in Fortifying Syntax),
// we'll want to return something indicating amount of progress and location
// of failure instead of `none`.
type match_result = Option<arb_depth<matchable>>;
type selector = fn@(matchable) -> match_result;
fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
{pre: ~[@expr], rep: Option<@expr>, post: ~[@expr]} {
let mut idx: uint = 0u;
let mut res = None;
for elts.each |elt| {
match elt.node {
expr_mac(ref m) => match (*m).node {
ast::mac_ellipsis => {
if res.is_some() {
cx.span_fatal((*m).span, ~"only one ellipsis allowed");
}
res =
Some({pre: vec::slice(elts, 0u, idx - 1u),
rep: Some(elts[idx - 1u]),
post: vec::slice(elts, idx + 1u, vec::len(elts))});
}
_ => ()
},
_ => ()
}
idx += 1u;
}
return match res {
Some(val) => val,
None => {pre: elts, rep: None, post: ~[]}
}
}
fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) ->
Option<~[U]> {
let mut res = ~[];
for v.each |elem| {
match f(*elem) {
None => return None,
Some(ref fv) => res.push((*fv))
}
}
return Some(res);
}
fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
match ad {
leaf(ref x) => return f((*x)),
seq(ads, span) => match option_flatten_map(|x| a_d_map(x, f), *ads) {
None => return None,
Some(ts) => return Some(seq(@ts, span))
}
}
}
fn compose_sels(s1: selector, s2: selector) -> selector {
fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
return match s1(m) {
None => None,
Some(ref matches) => a_d_map((*matches), s2)
}
}
return { |x| scomp(s1, s2, x) };
}
type binders =
{real_binders: HashMap<ident, selector>,
literal_ast_matchers: DVec<selector>};
type bindings = HashMap<ident, arb_depth<matchable>>;
fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
/* these three functions are the big moving parts */
/* create the selectors needed to bind and verify the pattern */
fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
let res: binders =
{real_binders: HashMap(),
literal_ast_matchers: DVec()};
//this oughta return binders instead, but macro args are a sequence of
//expressions, rather than a single expression
fn trivial_selector(m: matchable) -> match_result {
return Some(leaf(m));
}
p_t_s_rec(cx, match_expr(e), trivial_selector, res);
move res
}
/* use the selectors on the actual arguments to the macro to extract
bindings. Most of the work is done in p_t_s, which generates the
selectors. */
fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> {
let res = HashMap();
//need to do this first, to check vec lengths.
for b.literal_ast_matchers.each |sel| {
match (*sel)(match_expr(e)) { None => return None, _ => () }
}
let mut never_mind: bool = false;
for b.real_binders.each |key, val| {
match val(match_expr(e)) {
None => never_mind = true,
Some(ref mtc) => { res.insert(key, (*mtc)); }
}
};
//HACK: `ret` doesn't work in `for each`
if never_mind { return None; }
return Some(res);
}
/* use the bindings on the body to generate the expanded code */
fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
let idx_path: @mut ~[uint] = @mut ~[];
fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { return cx.next_id(); }
fn new_span(cx: ext_ctxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
}
let afp = default_ast_fold();
let f_pre =
@{fold_ident: |x,y|transcribe_ident(cx, b, idx_path, x, y),
fold_path: |x,y|transcribe_path(cx, b, idx_path, x, y),
fold_expr: |x,y,z|
transcribe_expr(cx, b, idx_path, x, y, z, afp.fold_expr)
,
fold_ty: |x,y,z|
transcribe_type(cx, b, idx_path,
x, y, z, afp.fold_ty)
,
fold_block: |x,y,z|
transcribe_block(cx, b, idx_path, x, y, z, afp.fold_block)
,
map_exprs: |x,y|
transcribe_exprs(cx, b, idx_path, x, y)
,
new_id: |x|new_id(x, cx),
.. *afp};
let f = make_fold(f_pre);
let result = f.fold_expr(body);
return result;
}
/* helper: descend into a matcher */
pure fn follow(m: arb_depth<matchable>, idx_path: &[uint]) ->
arb_depth<matchable> {
let mut res: arb_depth<matchable> = m;
for vec::each(idx_path) |idx| {
res = match res {
leaf(_) => return res,/* end of the line */
seq(new_ms, _) => new_ms[*idx]
}
}
return res;
}
fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>,
idx_path: @mut ~[uint]) -> Option<matchable> {
match mmaybe {
None => return None,
Some(ref m) => {
return match follow((*m), *idx_path) {
seq(_, sp) => {
cx.span_fatal(sp,
~"syntax matched under ... but not " +
~"used that way.")
}
leaf(ref m) => return Some((*m))
}
}
}
}
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
let idents = HashMap();
fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
idents: HashMap<ident, ()>) -> ident {
if b.contains_key(i) { idents.insert(i, ()); }
return i;
}
// using fold is a hack: we want visit, but it doesn't hit idents ) :
// solve this with macros
let f_pre =
@{fold_ident: |x,y|mark_ident(x, y, b, idents),
.. *default_ast_fold()};
let f = make_fold(f_pre);
f.fold_expr(e); // ignore result
for idents.each_key |x| { it(x); };
}
fn wrong_occurs(cx: ext_ctxt, l: ident, l_c: uint, r: ident, r_c: uint)
-> ~str {
fmt!("'%s' occurs %u times, but '%s' occurs %u times",
*cx.parse_sess().interner.get(l), l_c,
*cx.parse_sess().interner.get(r), r_c)
}
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
recur: fn@(&&v: @expr) -> @expr,
exprs: ~[@expr]) -> ~[@expr] {
match elts_to_ell(cx, exprs) {
{pre: pre, rep: repeat_me_maybe, post: post} => {
let mut res = vec::map(pre, |x| recur(*x));
match repeat_me_maybe {
None => (),
Some(repeat_me) => {
let mut repeat: Option<{rep_count: uint, name: ident}> = None;
/* we need to walk over all the free vars in lockstep, except for
the leaves, which are just duplicated */
do free_vars(b, repeat_me) |fv| {
let fv_depth = b.get(fv);
let cur_pos = follow(fv_depth, *idx_path);
match cur_pos {
leaf(_) => (),
seq(ms, _) => {
match repeat {
None => {
repeat = Some({rep_count: vec::len(*ms), name: fv});
}
Some({rep_count: old_len, name: old_name}) => {
let len = vec::len(*ms);
if old_len != len {
let msg = wrong_occurs(cx, fv, len,
old_name, old_len);
cx.span_fatal(repeat_me.span, msg);
}
}
}
}
}
};
match repeat {
None => {
cx.span_fatal(repeat_me.span,
~"'...' surrounds an expression without any" +
~" repeating syntax variables");
}
Some({rep_count: rc, _}) => {
/* Whew, we now know how how many times to repeat */
let mut idx: uint = 0u;
while idx < rc {
idx_path.push(idx);
res.push(recur(repeat_me)); // whew!
idx_path.pop();
idx += 1u;
}
}
}
}
}
res = vec::append(res, vec::map(post, |x| recur(*x)));
return res;
}
}
}
// substitute, in a position that's required to be an ident
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
&&i: ident, _fld: ast_fold) -> ident {
return match follow_for_trans(cx, b.find(i), idx_path) {
Some(match_ident(a_id)) => a_id.node,
Some(ref m) => match_error(cx, (*m), ~"an identifier"),
None => i
}
}
fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
p: path, _fld: ast_fold) -> path {
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { return p; }
match follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
Some(match_ident(id)) => {
{span: id.span, global: false, idents: ~[id.node],
rp: None, types: ~[]}
}
Some(match_path(a_pth)) => *a_pth,
Some(ref m) => match_error(cx, (*m), ~"a path"),
None => p
}
}
fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
{
return match e {
expr_path(p) => {
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
(e, s);
}
match follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
Some(match_ident(id)) => {
(expr_path(@{span: id.span,
global: false,
idents: ~[id.node],
rp: None,
types: ~[]}), id.span)
}
Some(match_path(a_pth)) => (expr_path(a_pth), s),
Some(match_expr(a_exp)) => (a_exp.node, a_exp.span),
Some(ref m) => match_error(cx, (*m), ~"an expression"),
None => orig(e, s, fld)
}
}
_ => orig(e, s, fld)
}
}
fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
t: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
-> (ast::ty_, span)
{
return match t {
ast::ty_path(pth, _) => {
match path_to_ident(pth) {
Some(id) => {
match follow_for_trans(cx, b.find(id), idx_path) {
Some(match_ty(ty)) => (ty.node, ty.span),
Some(ref m) => match_error(cx, (*m), ~"a type"),
None => orig(t, s, fld)
}
}
None => orig(t, s, fld)
}
}
_ => orig(t, s, fld)
}
}
/* for parsing reasons, syntax variables bound to blocks must be used like
`{v}` */
fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
blk: blk_, s: span, fld: ast_fold,
orig: fn@(blk_, span, ast_fold) -> (blk_, span))
-> (blk_, span)
{
return match block_to_ident(blk) {
Some(id) => {
match follow_for_trans(cx, b.find(id), idx_path) {
Some(match_block(ref new_blk)) => {
((*new_blk).node, (*new_blk).span)
}
// possibly allow promotion of ident/path/expr to blocks?
Some(ref m) => match_error(cx, (*m), ~"a block"),
None => orig(blk, s, fld)
}
}
None => orig(blk, s, fld)
}
}
/* traverse the pattern, building instructions on how to bind the actual
argument. ps accumulates instructions on navigating the tree.*/
fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
//it might be possible to traverse only exprs, not matchables
match m {
match_expr(e) => {
match e.node {
expr_path(p_pth) => p_t_s_r_path(cx, p_pth, s, b),
expr_vec(p_elts, _) => {
match elts_to_ell(cx, p_elts) {
{pre: pre, rep: Some(repeat_me), post: post} => {
p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
b);
if vec::len(pre) > 0u {
p_t_s_r_actual_vector(cx, pre, true, s, b);
}
p_t_s_r_ellipses(cx, repeat_me, vec::len(pre), s, b);
if vec::len(post) > 0u {
cx.span_unimpl(e.span,
~"matching after `...` not yet supported");
}
}
{pre: pre, rep: None, post: post} => {
if post.len() > 0 {
cx.bug(~"elts_to_ell provided an invalid result");
}
p_t_s_r_length(cx, vec::len(pre), false, s, b);
p_t_s_r_actual_vector(cx, pre, false, s, b);
}
}
}
/* FIXME (#2251): handle embedded types and blocks, at least */
expr_mac(ref mac) => {
p_t_s_r_mac(cx, (*mac), s, b);
}
_ => {
fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
match_result {
return match m {
match_expr(e) => {
if managed::ptr_eq(e, pat) {
// XXX: Is this right?
Some(leaf(match_exact))
} else {
None
}
}
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
b.literal_ast_matchers.push(|x| select(cx, x, e));
}
}
}
_ => cx.bug(~"undocumented invariant in p_t_s_rec")
}
}
/* make a match more precise */
fn specialize_match(m: matchable) -> matchable {
return match m {
match_expr(e) => {
match e.node {
expr_path(pth) => {
match path_to_ident(pth) {
Some(id) => match_ident(respan(pth.span, id)),
None => match_path(pth)
}
}
_ => m
}
}
_ => m
}
}
/* pattern_to_selectors helper functions */
fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
match path_to_ident(p) {
Some(p_id) => {
fn select(cx: ext_ctxt, m: matchable) -> match_result {
return match m {
match_expr(*) => Some(leaf(specialize_match(m))),
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
if b.real_binders.contains_key(p_id) {
cx.span_fatal(p.span, ~"duplicate binding identifier");
}
b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x)));
}
None => ()
}
}
fn block_to_ident(blk: blk_) -> Option<ident> {
if vec::len(blk.stmts) != 0u { return None; }
return match blk.expr {
Some(expr) => match expr.node {
expr_path(pth) => path_to_ident(pth),
_ => None
},
None => None
}
}
fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) {
fn select_pt_1(cx: ext_ctxt, m: matchable,
fn_m: fn(ast::mac) -> match_result) -> match_result {
return match m {
match_expr(e) => match e.node {
expr_mac(ref mac) => fn_m((*mac)),
_ => None
},
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
fn no_des(cx: ext_ctxt, sp: span, syn: ~str) -> ! {
cx.span_fatal(sp, ~"destructuring " + syn + ~" is not yet supported");
}
match mac.node {
ast::mac_ellipsis => cx.span_fatal(mac.span, ~"misused `...`"),
ast::mac_invoc(_, _, _) => no_des(cx, mac.span, ~"macro calls"),
ast::mac_invoc_tt(_, _) => no_des(cx, mac.span, ~"macro calls"),
ast::mac_aq(_,_) => no_des(cx, mac.span, ~"antiquotes"),
ast::mac_var(_) => no_des(cx, mac.span, ~"antiquote variables")
}
}
fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
b: binders) {
fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
match_result {
return match m {
match_expr(e) => {
match e.node {
expr_vec(arg_elts, _) => {
let mut elts = ~[];
let mut idx = offset;
while idx < vec::len(arg_elts) {
elts.push(leaf(match_expr(arg_elts[idx])));
idx += 1u;
}
// using repeat_me.span is a little wacky, but the
// error we want to report is one in the macro def
Some(seq(@elts, repeat_me.span))
}
_ => None
}
}
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
p_t_s_rec(cx, match_expr(repeat_me),
compose_sels(s, |x| select(cx, repeat_me, offset, x)), b);
}
fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
b: binders) {
fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
match_result {
return match m {
match_expr(e) => {
match e.node {
expr_vec(arg_elts, _) => {
let actual_len = vec::len(arg_elts);
if at_least && actual_len >= len || actual_len == len {
Some(leaf(match_exact))
} else { None }
}
_ => None
}
}
_ => None
}
}
b.literal_ast_matchers.push(
compose_sels(s, |x| len_select(cx, x, at_least, len)));
}
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
s: selector, b: binders) {
let mut idx: uint = 0u;
while idx < vec::len(elts) {
fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
return match m {
match_expr(e) => {
match e.node {
expr_vec(arg_elts, _) => {
Some(leaf(match_expr(arg_elts[idx])))
}
_ => None
}
}
_ => cx.bug(~"broken traversal in p_t_s_r")
}
}
p_t_s_rec(cx, match_expr(elts[idx]),
compose_sels(s, |x, copy idx| select(cx, x, idx)), b);
idx += 1u;
}
}
fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> base::macro_def {
let args = get_mac_args_no_max(cx, sp, arg, 0u, ~"macro");
let mut macro_name: Option<~str> = None;
let mut clauses: ~[@clause] = ~[];
for args.each |arg| {
match arg.node {
expr_vec(elts, _) => {
if vec::len(elts) != 2u {
cx.span_fatal((*arg).span,
~"extension clause must consist of ~[" +
~"macro invocation, expansion body]");
}
match elts[0u].node {
expr_mac(ref mac) => {
match (*mac).node {
mac_invoc(pth, invoc_arg, _) => {
match path_to_ident(pth) {
Some(id) => {
let id_str = cx.str_of(id);
match macro_name {
None => macro_name = Some(id_str),
Some(ref other_id) => if id_str != (*other_id) {
cx.span_fatal(pth.span,
~"macro name must be " +
~"consistent");
}
}
},
None => cx.span_fatal(pth.span,
~"macro name must not be a path")
}
let arg = match invoc_arg {
Some(arg) => arg,
None => cx.span_fatal((*mac).span,
~"macro must have arguments")
};
clauses.push(@{params: pattern_to_selectors(cx, arg),
body: elts[1u]});
// FIXME (#2251): check duplicates (or just simplify
// the macro arg situation)
}
_ => {
cx.span_bug((*mac).span, ~"undocumented invariant in \
add_extension");
}
}
}
_ => {
cx.span_fatal(elts[0u].span,
~"extension clause must" +
~" start with a macro invocation.");
}
}
}
_ => {
cx.span_fatal((*arg).span,
~"extension must be ~[clause, " + ~" ...]");
}
}
}
let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses);
return {name:
match macro_name {
Some(ref id) => (*id),
None => cx.span_fatal(sp, ~"macro definition must have " +
~"at least one clause")
},
ext: normal({expander: ext, span: Some(arg.get().span)})};
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body,
clauses: ~[@clause]) -> @expr {
let arg = match arg {
Some(arg) => arg,
None => cx.span_fatal(sp, ~"macro must have arguments")
};
for clauses.each |c| {
match use_selectors_to_bind(c.params, arg) {
Some(bindings) => return transcribe(cx, bindings, c.body),
None => loop
}
}
cx.span_fatal(sp, ~"no clauses match macro invocation");
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@ -23,63 +23,58 @@ export expand_include_str;
export expand_include_bin;
/* line!(): expands to the current line number */
fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"line");
fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
base::check_zero_tts(cx, sp, tts, "line!");
let loc = cx.codemap().lookup_char_pos(sp.lo);
return mk_uint(cx, sp, loc.line);
base::mr_expr(mk_uint(cx, sp, loc.line))
}
/* col!(): expands to the current column number */
fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"col");
fn expand_col(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
base::check_zero_tts(cx, sp, tts, "col!");
let loc = cx.codemap().lookup_char_pos(sp.lo);
return mk_uint(cx, sp, loc.col.to_uint());
base::mr_expr(mk_uint(cx, sp, loc.col.to_uint()))
}
/* file!(): expands to the current filename */
/* The filemap (`loc.file`) contains a bunch more information we could spit
* out if we wanted. */
fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"file");
fn expand_file(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
base::check_zero_tts(cx, sp, tts, "file!");
let Loc { file: @FileMap { name: filename, _ }, _ } =
cx.codemap().lookup_char_pos(sp.lo);
return mk_uniq_str(cx, sp, filename);
base::mr_expr(mk_uniq_str(cx, sp, filename))
}
fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx, sp, arg, 1u, option::Some(1u), ~"stringify");
let s = pprust::expr_to_str(args[0], cx.parse_sess().interner);
return mk_uniq_str(cx, sp, s);
fn expand_stringify(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let s = pprust::tts_to_str(tts, cx.parse_sess().interner);
base::mr_expr(mk_uniq_str(cx, sp, s))
}
fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body)
-> @ast::expr {
get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"file");
return mk_uniq_str(cx, sp,
str::connect(cx.mod_path().map(
|x| cx.str_of(*x)), ~"::"));
fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
base::check_zero_tts(cx, sp, tts, "module_path!");
base::mr_expr(mk_uniq_str(cx, sp,
str::connect(cx.mod_path().map(
|x| cx.str_of(*x)), ~"::")))
}
fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx, sp, arg, 1u, option::Some(1u), ~"include");
let file = expr_to_str(cx, args[0], ~"include_str! requires a string");
fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let file = get_single_str_from_tts(cx, sp, tts, "include!");
let p = parse::new_sub_parser_from_file(
cx.parse_sess(), cx.cfg(),
&res_rel_file(cx, sp, &Path(file)), sp);
return p.parse_expr();
base::mr_expr(p.parse_expr())
}
fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx,sp,arg,1u,option::Some(1u),~"include_str");
let file = expr_to_str(cx, args[0], ~"include_str! requires a string");
fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file)));
match res {
result::Ok(_) => { /* Continue. */ }
@ -88,21 +83,18 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
}
}
return mk_uniq_str(cx, sp, result::unwrap(res));
base::mr_expr(mk_uniq_str(cx, sp, result::unwrap(res)))
}
fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let args = get_mac_args(cx,sp,arg,1u,option::Some(1u),~"include_bin");
let file = expr_to_str(cx, args[0], ~"include_bin! requires a string");
fn expand_include_bin(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
-> base::mac_result {
let file = get_single_str_from_tts(cx, sp, tts, "include_bin!");
match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) {
result::Ok(src) => {
let u8_exprs = vec::map(src, |char| {
mk_u8(cx, sp, *char)
});
return mk_base_vec_e(cx, sp, u8_exprs);
base::mr_expr(mk_base_vec_e(cx, sp, u8_exprs))
}
result::Err(ref e) => {
cx.parse_sess().span_diagnostic.handler().fatal((*e))

View File

@ -120,14 +120,7 @@ fn fold_arg_(a: arg, fld: ast_fold) -> arg {
fn fold_mac_(m: mac, fld: ast_fold) -> mac {
return {node:
match m.node {
mac_invoc(pth, arg, body) => {
mac_invoc(fld.fold_path(pth),
option::map(&arg, |x| fld.fold_expr(*x)), body)
}
mac_invoc_tt(*) => m.node,
mac_ellipsis => mac_ellipsis,
mac_aq(_,_) => /* FIXME (#2543) */ copy m.node,
mac_var(_) => /* FIXME (#2543) */ copy m.node,
},
span: fld.new_span(m.span)};
}

View File

@ -12,16 +12,9 @@ use either::{Either, Left, Right};
use ast_util::spanned;
use common::*; //resolve bug?
export attr_or_ext;
export parser_attr;
// A type to distingush between the parsing of item attributes or syntax
// extensions, which both begin with token.POUND
type attr_or_ext = Option<Either<~[ast::attribute], @ast::expr>>;
trait parser_attr {
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
-> attr_or_ext;
fn parse_outer_attributes() -> ~[ast::attribute];
fn parse_attribute(style: ast::attr_style) -> ast::attribute;
fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) ->
@ -35,34 +28,6 @@ trait parser_attr {
impl Parser: parser_attr {
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
-> attr_or_ext
{
let expect_item_next = vec::is_not_empty(first_item_attrs);
match self.token {
token::POUND => {
let lo = self.span.lo;
if self.look_ahead(1u) == token::LBRACKET {
self.bump();
let first_attr =
self.parse_attribute_naked(ast::attr_outer, lo);
return Some(Left(vec::append(~[first_attr],
self.parse_outer_attributes())));
} else if !(self.look_ahead(1u) == token::LT
|| self.look_ahead(1u) == token::LBRACKET
|| self.look_ahead(1u) == token::POUND
|| expect_item_next) {
self.bump();
return Some(Right(self.parse_syntax_ext_naked(lo)));
} else { return None; }
}
token::DOC_COMMENT(_) => {
return Some(Left(self.parse_outer_attributes()));
}
_ => return None
}
}
// Parse attributes that appear before an item
fn parse_outer_attributes() -> ~[ast::attribute] {
let mut attrs: ~[ast::attribute] = ~[];

View File

@ -515,11 +515,6 @@ fn next_token_inner(rdr: string_reader) -> token::Token {
bump(rdr);
return token::DOTDOT;
}
if rdr.curr == '.' && nextch(rdr) == '.' {
bump(rdr);
bump(rdr);
return token::ELLIPSIS;
}
return token::DOT;
}
'(' => { bump(rdr); return token::LPAREN; }

View File

@ -54,8 +54,8 @@ use ast::{_mod, add, arg, arm, attribute,
item_foreign_mod, item_impl, item_mac, item_mod, item_trait,
item_ty, lit, lit_, lit_bool, lit_float, lit_float_unsuffixed,
lit_int, lit_int_unsuffixed, lit_nil, lit_str, lit_uint, local,
m_const, m_imm, m_mutbl, mac_, mac_aq, mac_ellipsis, mac_invoc,
mac_invoc_tt, mac_var, matcher, match_nonterminal, match_seq,
m_const, m_imm, m_mutbl, mac_,
mac_invoc_tt, matcher, match_nonterminal, match_seq,
match_tok, method, mode, module_ns, mt, mul, mutability,
named_field, neg, noreturn, not, pat, pat_box, pat_enum,
pat_ident, pat_lit, pat_range, pat_rec, pat_region, pat_struct,
@ -81,13 +81,6 @@ use ast::{_mod, add, arg, arm, attribute,
export Parser;
// FIXME (#3726): #ast expects to find this here but it's actually
// defined in `parse` Fixing this will be easier when we have export
// decls on individual items -- then parse can export this publicly, and
// everything else crate-visibly.
use parse::parse_from_source_str;
export parse_from_source_str;
export item_or_view_item, iovi_none, iovi_view_item, iovi_item;
enum restriction {
@ -517,15 +510,6 @@ impl Parser {
let lo = self.span.lo;
match self.maybe_parse_dollar_mac() {
Some(ref e) => {
return @{id: self.get_id(),
node: ty_mac(spanned(lo, self.span.hi, (*e))),
span: mk_sp(lo, self.span.hi)};
}
None => ()
}
let t = if self.token == token::LPAREN {
self.bump();
if self.token == token::RPAREN {
@ -737,32 +721,6 @@ impl Parser {
}
}
fn maybe_parse_dollar_mac() -> Option<mac_> {
match copy self.token {
token::DOLLAR => {
let lo = self.span.lo;
self.bump();
match copy self.token {
token::LIT_INT_UNSUFFIXED(num) => {
self.bump();
Some(mac_var(num as uint))
}
token::LPAREN => {
self.bump();
let e = self.parse_expr();
self.expect(token::RPAREN);
let hi = self.last_span.hi;
Some(mac_aq(mk_sp(lo,hi), e))
}
_ => {
self.fatal(~"expected `(` or unsuffixed integer literal");
}
}
}
_ => None
}
}
fn maybe_parse_fixed_vstore_with_star() -> Option<uint> {
if self.eat(token::BINOP(token::STAR)) {
match copy self.token {
@ -935,11 +893,6 @@ impl Parser {
let mut ex: expr_;
match self.maybe_parse_dollar_mac() {
Some(ref x) => return self.mk_mac_expr(lo, self.span.hi, (*x)),
_ => ()
}
if self.token == token::LPAREN {
self.bump();
if self.token == token::RPAREN {
@ -1029,13 +982,6 @@ impl Parser {
}
}
hi = self.span.hi;
} else if self.token == token::ELLIPSIS {
self.bump();
return self.mk_mac_expr(lo, self.span.hi, mac_ellipsis);
} else if self.token == token::POUND {
let ex_ext = self.parse_syntax_ext();
hi = ex_ext.span.hi;
ex = ex_ext.node;
} else if self.eat_keyword(~"fail") {
if can_begin_expr(self.token) {
let e = self.parse_expr();
@ -1148,54 +1094,6 @@ impl Parser {
return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk));
}
fn parse_syntax_ext() -> @expr {
let lo = self.span.lo;
self.expect(token::POUND);
return self.parse_syntax_ext_naked(lo);
}
fn parse_syntax_ext_naked(lo: BytePos) -> @expr {
match self.token {
token::IDENT(_, _) => (),
_ => self.fatal(~"expected a syntax expander name")
}
let pth = self.parse_path_without_tps();
//temporary for a backwards-compatible cycle:
let sep = seq_sep_trailing_disallowed(token::COMMA);
let mut e = None;
if (self.token == token::LPAREN || self.token == token::LBRACKET) {
let lo = self.span.lo;
let es =
if self.token == token::LPAREN {
self.parse_unspanned_seq(token::LPAREN, token::RPAREN,
sep, |p| p.parse_expr())
} else {
self.parse_unspanned_seq(token::LBRACKET, token::RBRACKET,
sep, |p| p.parse_expr())
};
let hi = self.span.hi;
e = Some(self.mk_expr(lo, hi, expr_vec(es, m_imm)));
}
let mut b = None;
if self.token == token::LBRACE {
self.bump();
let lo = self.span.lo;
let mut depth = 1u;
while (depth > 0u) {
match (self.token) {
token::LBRACE => depth += 1u,
token::RBRACE => depth -= 1u,
token::EOF => self.fatal(~"unexpected EOF in macro body"),
_ => ()
}
self.bump();
}
let hi = self.last_span.lo;
b = Some({span: mk_sp(lo,hi)});
}
return self.mk_mac_expr(lo, self.span.hi, mac_invoc(pth, e, b));
}
fn parse_dot_or_call_expr() -> @expr {
let b = self.parse_bottom_expr();
self.parse_dot_or_call_expr_with(b)
@ -2260,17 +2158,8 @@ impl Parser {
}
} else {
let mut item_attrs;
match self.parse_outer_attrs_or_ext(first_item_attrs) {
None => item_attrs = ~[],
Some(Left(ref attrs)) => item_attrs = (*attrs),
Some(Right(ext)) => {
return @spanned(lo, ext.span.hi,
stmt_expr(ext, self.get_id()));
}
}
let item_attrs = vec::append(first_item_attrs, item_attrs);
let item_attrs = vec::append(first_item_attrs,
self.parse_outer_attributes());
match self.parse_item_or_view_item(item_attrs,
true, false, false) {

View File

@ -49,7 +49,6 @@ enum Token {
AT,
DOT,
DOTDOT,
ELLIPSIS,
COMMA,
SEMI,
COLON,
@ -137,7 +136,6 @@ fn to_str(in: @ident_interner, t: Token) -> ~str {
AT => ~"@",
DOT => ~".",
DOTDOT => ~"..",
ELLIPSIS => ~"...",
COMMA => ~",",
SEMI => ~";",
COLON => ~":",
@ -578,12 +576,6 @@ impl Token : cmp::Eq {
_ => false
}
}
ELLIPSIS => {
match (*other) {
ELLIPSIS => true,
_ => false
}
}
COMMA => {
match (*other) {
COMMA => true,

View File

@ -154,8 +154,7 @@ fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
mut top: 0,
mut bottom: 0,
print_stack: DVec(),
mut pending_indentation: 0,
mut token_tree_last_was_ident: false})
mut pending_indentation: 0 })
}
@ -261,7 +260,6 @@ type printer_ = {
print_stack: DVec<print_stack_elt>,
// buffered indentation to avoid writing trailing whitespace
mut pending_indentation: int,
mut token_tree_last_was_ident: bool
};
enum printer {

View File

@ -118,6 +118,10 @@ fn tt_to_str(tt: ast::token_tree, intr: @ident_interner) -> ~str {
to_str(tt, print_tt, intr)
}
fn tts_to_str(tts: &[ast::token_tree], intr: @ident_interner) -> ~str {
to_str(tts, print_tts, intr)
}
fn stmt_to_str(s: ast::stmt, intr: @ident_interner) -> ~str {
to_str(s, print_stmt, intr)
}
@ -584,15 +588,10 @@ fn print_item(s: ps, &&item: @ast::item) {
print_ident(s, item.ident);
cbox(s, indent_unit);
popen(s);
for (*tts).each |tt| {
print_tt(s, *tt);
}
print_tts(s, *tts);
pclose(s);
end(s);
}
ast::item_mac(_) => {
fail ~"invalid item-position syntax bit"
}
}
(s.ann.post)(ann_node);
}
@ -739,17 +738,9 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param],
/// expression arguments as expressions). It can be done! I think.
fn print_tt(s: ps, tt: ast::token_tree) {
match tt {
ast::tt_delim(ref tts) =>
for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); },
ast::tt_delim(ref tts) => print_tts(s, *tts),
ast::tt_tok(_, ref tk) => {
match (*tk) {
parse::token::IDENT(*) => { // don't let idents run together
if s.s.token_tree_last_was_ident { word(s.s, ~" ") }
s.s.token_tree_last_was_ident = true;
}
_ => { s.s.token_tree_last_was_ident = false; }
}
word(s.s, parse::token::to_str(s.intr, (*tk)));
word(s.s, parse::token::to_str(s.intr, (*tk)));
}
ast::tt_seq(_, ref tts, ref sep, zerok) => {
word(s.s, ~"$(");
@ -760,16 +751,25 @@ fn print_tt(s: ps, tt: ast::token_tree) {
None => ()
}
word(s.s, if zerok { ~"*" } else { ~"+" });
s.s.token_tree_last_was_ident = false;
}
ast::tt_nonterminal(_, name) => {
word(s.s, ~"$");
print_ident(s, name);
s.s.token_tree_last_was_ident = true;
}
}
}
fn print_tts(s: ps, &&tts: &[ast::token_tree]) {
ibox(s, 0);
for tts.eachi |i, tt| {
if i != 0 {
space(s.s);
}
print_tt(s, *tt);
}
end(s);
}
fn print_variant(s: ps, v: ast::variant) {
print_visibility(s, v.node.vis);
match v.node.kind {
@ -1000,26 +1000,13 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
fn print_mac(s: ps, m: ast::mac) {
match m.node {
ast::mac_invoc(path, arg, _body) => {
word(s.s, ~"#");
print_path(s, path, false);
match arg {
Some(@{node: ast::expr_vec(_, _), _}) => (),
_ => word(s.s, ~" ")
}
arg.iter(|a| print_expr(s, *a));
// FIXME: extension 'body' (#2339)
}
ast::mac_invoc_tt(pth, ref tts) => {
print_path(s, pth, false);
word(s.s, ~"!");
popen(s);
for (*tts).each() |tt| { print_tt(s, *tt); }
print_tts(s, *tts);
pclose(s);
}
ast::mac_ellipsis => word(s.s, ~"..."),
ast::mac_var(v) => word(s.s, fmt!("$%u", v)),
_ => { /* fixme */ }
}
}

View File

@ -75,9 +75,6 @@ mod ext {
#[legacy_exports]
#[path = "ext/expand.rs"]
mod expand;
#[legacy_exports]
#[path = "ext/qquote.rs"]
mod qquote;
#[path = "ext/quote.rs"]
mod quote;
@ -102,9 +99,6 @@ mod ext {
}
#[legacy_exports]
#[path = "ext/simplext.rs"]
mod simplext;
#[legacy_exports]
#[path = "ext/fmt.rs"]
mod fmt;
@ -115,9 +109,6 @@ mod ext {
#[path = "ext/concat_idents.rs"]
mod concat_idents;
#[legacy_exports]
#[path = "ext/ident_to_str.rs"]
mod ident_to_str;
#[legacy_exports]
#[path = "ext/log_syntax.rs"]
mod log_syntax;
#[legacy_exports]

View File

@ -379,15 +379,8 @@ fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
for exprs.each |ex| { (v.visit_expr)(*ex, e, v); }
}
fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
match m.node {
ast::mac_invoc(_, arg, _) => {
option::map(&arg, |arg| (v.visit_expr)(*arg, e, v)); }
ast::mac_invoc_tt(*) => { /* no user-serviceable parts inside */ }
ast::mac_ellipsis => (),
ast::mac_aq(*) => { /* FIXME: maybe visit (Issue #2340) */ }
ast::mac_var(_) => ()
}
fn visit_mac<E>(_m: mac, _e: E, _v: vt<E>) {
/* no user-serviceable parts inside */
}
fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {

View File

@ -1,15 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:expected a syntax expander name
fn main() {
#();
}

View File

@ -8,6 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: env! takes between 1 and 1 arguments
// error-pattern: env! takes 1 argument
fn main() { env!(); }

View File

@ -8,6 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: env! takes between 1 and 1 arguments
// error-pattern: env! takes 1 argument
fn main() { env!("one", "two"); }

View File

@ -8,6 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:fmt! needs at least 1 arguments
// error-pattern:fmt! takes at least 1 argument
fn main() { fmt!(); }

View File

@ -1,17 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regresion test for issue #1448 and #1386
fn main() {
#macro[[#apply[f, [x, ...]], f(x, ...)]];
fn add(a: int, b: int) -> int { return a + b; }
assert (apply!(add, [y, 15]) == 16); //~ ERROR unresolved name: y
}

View File

@ -1,20 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//error-pattern:is an expr, expected a path
fn main() {
#macro[[#mylambda[x, body],
{
fn f(x: int) -> int { return body }
f
}]];
assert (mylambda!(y * 1, y * 2)(8) == 16);
}

View File

@ -1,18 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//error-pattern:no clauses match
fn main() {
#macro[[#trivial[], 1 * 2 * 4 * 2 * 1]];
assert (trivial!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) ==
16);
}

View File

@ -21,36 +21,44 @@ use syntax::codemap;
use syntax::parse;
use syntax::print::*;
fn new_parse_sess() -> parse::parse_sess {
fail;
}
trait fake_ext_ctxt {
fn session() -> fake_session;
fn cfg() -> ast::crate_cfg;
fn parse_sess() -> parse::parse_sess;
fn call_site() -> span;
fn ident_of(st: ~str) -> ast::ident;
}
type fake_options = {cfg: ast::crate_cfg};
type fake_session = {opts: @fake_options,
parse_sess: parse::parse_sess};
type fake_session = parse::parse_sess;
impl fake_session: fake_ext_ctxt {
fn session() -> fake_session {self}
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_info: None
}
}
fn ident_of(st: ~str) -> ast::ident {
self.interner.intern(@st)
}
}
fn mk_ctxt() -> fake_ext_ctxt {
let opts : fake_options = {cfg: ~[]};
{opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt
parse::new_parse_sess(None) as fake_ext_ctxt
}
fn main() {
let ext_cx = mk_ctxt();
let abc = #ast{23};
let abc = quote_expr!(23);
check_pp(abc, pprust::print_expr, "23");
let expr3 = #ast{2 - $(abcd) + 7}; //~ ERROR unresolved name: abcd
let expr3 = quote_expr!(2 - $abcd + 7); //~ ERROR unresolved name: abcd
check_pp(expr3, pprust::print_expr, "2 - 23 + 7");
}

View File

@ -8,9 +8,10 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test Can't use syntax crate here
extern mod std;
use syntax;
extern mod syntax;
use std::io::*;
@ -20,33 +21,39 @@ use syntax::codemap;
use syntax::parse::parser;
use syntax::print::*;
fn new_parse_sess() -> parser::parse_sess {
fail;
}
trait fake_ext_ctxt {
fn session() -> fake_session;
fn cfg() -> ast::crate_cfg;
fn parse_sess() -> parse::parse_sess;
fn call_site() -> span;
fn ident_of(st: ~str) -> ast::ident;
}
type fake_options = {cfg: ast::crate_cfg};
type fake_session = {opts: @fake_options,
parse_sess: parser::parse_sess};
type fake_session = parse::parse_sess;
impl fake_session: fake_ext_ctxt {
fn session() -> fake_session {self}
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_info: None
}
}
fn ident_of(st: ~str) -> ast::ident {
self.interner.intern(@st)
}
}
fn mk_ctxt() -> fake_ext_ctxt {
let opts : fake_options = {cfg: ~[]};
{opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt
parse::new_parse_sess(None) as fake_ext_ctxt
}
fn main() {
let ext_cx = mk_ctxt();
let stmt = #ast[stmt]{let x int = 20;}; //~ ERROR expected end-of-string
let stmt = quote_stmt!(let x int = 20;); //~ ERROR expected end-of-string
check_pp(*stmt, pprust::print_stmt, "");
}

View File

@ -55,8 +55,8 @@ fn mk_ctxt() -> fake_ext_ctxt {
fn main() {
let ext_cx = mk_ctxt();
let s = #ast[expr]{__s};
let e = #ast[expr]{__e};
let f = #ast[expr]{$(s).foo {|__e| $(e)}};
let s = quote_expr!(__s);
let e = quote_expr!(__e);
let f = quote_expr!($s.foo {|__e| $e});
log(error, pprust::expr_to_str(f));
}

View File

@ -20,12 +20,16 @@ use io::*;
use syntax::diagnostic;
use syntax::ast;
use syntax::codemap;
use syntax::codemap::span;
use syntax::parse;
use syntax::print::*;
trait fake_ext_ctxt {
fn cfg() -> ast::crate_cfg;
fn parse_sess() -> parse::parse_sess;
fn call_site() -> span;
fn ident_of(st: ~str) -> ast::ident;
}
type fake_session = parse::parse_sess;
@ -33,66 +37,41 @@ type fake_session = parse::parse_sess;
impl fake_session: fake_ext_ctxt {
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_info: None
}
}
fn ident_of(st: ~str) -> ast::ident {
self.interner.intern(@copy st)
}
}
fn mk_ctxt() -> fake_ext_ctxt {
parse::new_parse_sess(None) as fake_ext_ctxt
}
fn main() {
let ext_cx = mk_ctxt();
let abc = #ast{23};
let abc = quote_expr!(23);
check_pp(ext_cx, abc, pprust::print_expr, ~"23");
let expr3 = #ast{2 - $(abc) + 7};
check_pp(ext_cx, expr3, pprust::print_expr, ~"2 - 23 + 7");
let expr4 = #ast{2 - $(#ast{3}) + 9};
check_pp(ext_cx, expr4, pprust::print_expr, ~"2 - 3 + 9");
let ty = #ast[ty]{int};
let ty = quote_ty!(int);
check_pp(ext_cx, ty, pprust::print_type, ~"int");
let ty2 = #ast[ty]{option<$(ty)>};
check_pp(ext_cx, ty2, pprust::print_type, ~"option<int>");
let item = #ast[item]{const x : int = 10;};
let item = quote_item!(const x : int = 10;).get();
check_pp(ext_cx, item, pprust::print_item, ~"const x: int = 10;");
let item2: @ast::item = #ast[item]{const x : int = $(abc);};
check_pp(ext_cx, item2, pprust::print_item, ~"const x: int = 23;");
let stmt = #ast[stmt]{let x = 20;};
let stmt = quote_stmt!(let x = 20;);
check_pp(ext_cx, *stmt, pprust::print_stmt, ~"let x = 20;");
let stmt2 = #ast[stmt]{let x : $(ty) = $(abc);};
check_pp(ext_cx, *stmt2, pprust::print_stmt, ~"let x: int = 23;");
let pat = #ast[pat]{some(_)};
let pat = quote_pat!(some(_));
check_pp(ext_cx, pat, pprust::print_refutable_pat, ~"some(_)");
// issue #1785
let x = #ast{1};
let test1 = #ast{1+$(x)};
check_pp(ext_cx, test1, pprust::print_expr, ~"1 + 1");
let test2 = #ast{$(x)+1};
check_pp(ext_cx, test2, pprust::print_expr, ~"1 + 1");
let y = #ast{2};
let test3 = #ast{$(x) + $(y)};
check_pp(ext_cx, test3, pprust::print_expr, ~"1 + 2");
let crate = #ast[crate] { fn a() { } };
check_pp(ext_cx, crate, pprust::print_crate_, ~"fn a() { }\n");
// issue #1926
let s = #ast[expr]{__s};
let e = #ast[expr]{__e};
let call = #ast[expr]{$(s).foo(|__e| $(e) )};
check_pp(ext_cx, call, pprust::print_expr, ~"__s.foo(|__e| __e)")
}
fn check_pp<T>(cx: fake_ext_ctxt,

View File

@ -16,7 +16,7 @@ struct cat {
impl cat: Drop {
#[cat_dropper]
fn finalize(&self) { error!("%s landed on hir feet",self.name); }
fn finalize(&self) { error!("%s landed on hir feet" , self . name); }
}

View File

@ -1,22 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-pretty - token trees can't pretty print
fn main() {
#macro[[#trivial[], 1 * 2 * 4 * 2 * 1]];
assert (trivial!() == 16);
macro_rules! trivial_tt(
() => {1*2*4*2*1}
)
assert(trivial_tt!() == 16);
}

View File

@ -1,24 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
#macro[[#apply[f, [x, ...]], f(x, ...)]];
macro_rules! apply_tt(
($f:expr, ($($x:expr),*)) => {$f($($x),*)}
)
fn add(a: int, b: int) -> int { return a + b; }
assert(apply!(add, [1, 15]) == 16);
assert(apply!(add, [1, 15]) == 16);
assert(apply_tt!(add, (1, 15)) == 16);
}

View File

@ -1,60 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
// I can't for the life of me manage to untangle all of the brackets
// in this test, so I am xfailing it...
fn main() {
#macro[[#zip_or_unzip[[x, ...], [y, ...]], [[x, y], ...]],
[#zip_or_unzip[[xx, yy], ...], [[xx, ...], [yy, ...]]]];
assert (zip_or_unzip!([1, 2, 3, 4], [5, 6, 7, 8]) ==
[[1, 5], [2, 6], [3, 7], [4, 8]]);
assert (zip_or_unzip!([1, 5], [2, 6], [3, 7], [4, 8]) ==
[[1, 2, 3, 4], [5, 6, 7, 8]]);
#macro[[#nested[[[x, ...], ...], [[y, ...], ...]], [[[x, y], ...], ...]]];
assert (nested!([[1, 2, 3, 4, 5], [7, 8, 9, 10, 11, 12]],
[[-1, -2, -3, -4, -5], [-7, -8, -9, -10, -11, -12]]) ==
[[[1, -1], [2, -2], [3, -3], [4, -4], [5, -5]],
[[7, -7], [8, -8], [9, -9], [10, -10], [11, -11],
[12, -12]]]);
#macro[[#dup[y, [x, ...]], [[y, x], ...]]];
assert (dup!(1, [1, 2, 3, 4]) == [[1, 1], [1, 2], [1, 3], [1, 4]]);
#macro[[#lambda[x, #<t>, body, #<s>],
{
fn result(x: t) -> s { return body }
result
}]];
assert (lambda!(i, #<uint>, i + 4u, #<uint>)(12u) == 16u);
#macro[[#sum[x, xs, ...], x + #sum[xs, ...]], [#sum[], 0]];
assert (sum!(1, 2, 3, 4) == 10);
#macro[[#transcr_mixed[a, as, ...], #sum[6, as, ...] * a]];
assert (transcr_mixed!(10, 5, 4, 3, 2, 1) == 210);
#macro[[#surround[pre, [xs, ...], post], [pre, xs, ..., post]]];
assert (surround!(1, [2, 3, 4], 5) == [1, 2, 3, 4, 5]);
}

View File

@ -1,21 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-pretty - token trees can't pretty print
fn main() {
#macro[[#m1[a], a * 4]];
assert (m1!(2) == 8);
macro_rules! m1tt (
($a:expr) => {$a*4}
);
assert(m1tt!(2) == 8);
}

View File

@ -13,6 +13,6 @@ fn main() {
let asdf_fdsa = ~"<.<";
assert (concat_idents!(asd, f_f, dsa) == ~"<.<");
assert (ident_to_str!(use_mention_distinction) ==
assert (stringify!(use_mention_distinction) ==
~"use_mention_distinction");
}

View File

@ -1,7 +1,7 @@
/* this is for run-pass/syntax-extension-source-utils.rs */
{
assert(#file[].ends_with("includeme.fragment"));
assert(#line[] == 5u);
#fmt["victory robot %u", #line[]]
assert(file!().ends_with("includeme.fragment"));
assert(line!() == 5u);
fmt!("victory robot %u", line!())
}

View File

@ -24,7 +24,7 @@ fn main() {
assert(line!() == 24);
assert(col!() == 11);
assert(file!().ends_with(~"syntax-extension-source-utils.rs"));
assert(stringify!((2*3) + 5) == ~"(2 * 3) + 5");
assert(stringify!((2*3) + 5) == ~"( 2 * 3 ) + 5");
assert(include!("syntax-extension-source-utils-files/includeme.fragment")
== ~"victory robot 6");