syntax: remove most code handling old-style syntax extensions.
This commit is contained in:
parent
9cced55b93
commit
e24ae85025
@ -842,13 +842,7 @@ type mac_body = Option<mac_body_>;
|
||||
#[auto_serialize]
|
||||
#[auto_deserialize]
|
||||
enum mac_ {
|
||||
mac_invoc(@path, mac_arg, mac_body), // old macro-invocation
|
||||
mac_invoc_tt(@path,~[token_tree]), // new macro-invocation
|
||||
mac_ellipsis, // old pattern-match (obsolete)
|
||||
|
||||
// the span is used by the quoter/anti-quoter ...
|
||||
mac_aq(span /* span of quote */, @expr), // anti-quote
|
||||
mac_var(uint)
|
||||
}
|
||||
|
||||
type lit = spanned<lit_>;
|
||||
|
@ -16,16 +16,16 @@ use ast_util::dummy_sp;
|
||||
|
||||
// obsolete old-style #macro code:
|
||||
//
|
||||
// syntax_expander, normal, macro_defining, macro_definer,
|
||||
// builtin
|
||||
// syntax_expander, normal, builtin
|
||||
//
|
||||
// new-style macro! tt code:
|
||||
//
|
||||
// syntax_expander_tt, syntax_expander_tt_item, mac_result,
|
||||
// normal_tt, item_tt
|
||||
//
|
||||
// also note that ast::mac has way too many cases and can probably
|
||||
// be trimmed down substantially.
|
||||
// also note that ast::mac used to have a bunch of extraneous cases and
|
||||
// is now probably a redundant AST node, can be merged with
|
||||
// ast::mac_invoc_tt.
|
||||
|
||||
// second argument is the span to blame for general argument problems
|
||||
type syntax_expander_ =
|
||||
@ -35,10 +35,6 @@ type syntax_expander = {expander: syntax_expander_, span: Option<span>};
|
||||
|
||||
type macro_def = {name: ~str, ext: syntax_extension};
|
||||
|
||||
// macro_definer is obsolete, remove when #old_macros go away.
|
||||
type macro_definer =
|
||||
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
|
||||
|
||||
type item_decorator =
|
||||
fn@(ext_ctxt, span, ast::meta_item, ~[@ast::item]) -> ~[@ast::item];
|
||||
|
||||
@ -63,9 +59,6 @@ enum syntax_extension {
|
||||
// normal() is obsolete, remove when #old_macros go away.
|
||||
normal(syntax_expander),
|
||||
|
||||
// macro_defining() is obsolete, remove when #old_macros go away.
|
||||
macro_defining(macro_definer),
|
||||
|
||||
// #[auto_serialize] and such. will probably survive death of #old_macros
|
||||
item_decorator(item_decorator),
|
||||
|
||||
@ -89,8 +82,6 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> {
|
||||
item_tt({expander: f, span: None})
|
||||
}
|
||||
let syntax_expanders = HashMap();
|
||||
syntax_expanders.insert(~"macro",
|
||||
macro_defining(ext::simplext::add_new_extension));
|
||||
syntax_expanders.insert(~"macro_rules",
|
||||
builtin_item_tt(
|
||||
ext::tt::macro_rules::add_new_extension));
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
use std::map::HashMap;
|
||||
|
||||
use ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
|
||||
use ast::{crate, expr_, expr_mac, mac_invoc_tt,
|
||||
tt_delim, tt_tok, item_mac, stmt_, stmt_mac, stmt_expr, stmt_semi};
|
||||
use fold::*;
|
||||
use ext::base::*;
|
||||
@ -31,51 +31,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
expr_mac(ref mac) => {
|
||||
|
||||
match (*mac).node {
|
||||
// Old-style macros. For compatibility, will erase this whole
|
||||
// block once we've transitioned.
|
||||
mac_invoc(pth, args, body) => {
|
||||
assert (vec::len(pth.idents) > 0u);
|
||||
/* using idents and token::special_idents would make the
|
||||
the macro names be hygienic */
|
||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||
match exts.find(*extname) {
|
||||
None => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!("macro undefined: '%s'", *extname))
|
||||
}
|
||||
Some(item_decorator(_)) => {
|
||||
cx.span_fatal(
|
||||
pth.span,
|
||||
fmt!("%s can only be used as a decorator", *extname));
|
||||
}
|
||||
Some(normal({expander: exp, span: exp_sp})) => {
|
||||
|
||||
cx.bt_push(ExpandedFrom({call_site: s,
|
||||
callie: {name: *extname, span: exp_sp}}));
|
||||
let expanded = exp(cx, (*mac).span, args, body);
|
||||
|
||||
//keep going, outside-in
|
||||
let fully_expanded = fld.fold_expr(expanded).node;
|
||||
cx.bt_pop();
|
||||
|
||||
(fully_expanded, s)
|
||||
}
|
||||
Some(macro_defining(ext)) => {
|
||||
let named_extension = ext(cx, (*mac).span, args, body);
|
||||
exts.insert(named_extension.name, named_extension.ext);
|
||||
(ast::expr_rec(~[], None), s)
|
||||
}
|
||||
Some(normal_tt(_)) => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!("this tt-style macro should be \
|
||||
invoked '%s!(...)'", *extname))
|
||||
}
|
||||
Some(item_tt(*)) => {
|
||||
cx.span_fatal(pth.span,
|
||||
~"cannot use item macros in this context");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Token-tree macros, these will be the only case when we're
|
||||
// finished transitioning.
|
||||
@ -130,7 +85,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
|
||||
}
|
||||
}
|
||||
_ => cx.span_bug((*mac).span, ~"naked syntactic bit")
|
||||
}
|
||||
}
|
||||
_ => orig(e, s, fld)
|
||||
@ -165,8 +119,8 @@ fn expand_mod_items(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
ast::meta_list(ref n, _) => (*n)
|
||||
};
|
||||
match exts.find(mname) {
|
||||
None | Some(normal(_)) | Some(macro_defining(_))
|
||||
| Some(normal_tt(_)) | Some(item_tt(*)) => items,
|
||||
None | Some(normal(_))
|
||||
| Some(normal_tt(_)) | Some(item_tt(*)) => items,
|
||||
Some(item_decorator(dec_fn)) => {
|
||||
cx.bt_push(ExpandedFrom({call_site: attr.span,
|
||||
callie: {name: copy mname,
|
||||
@ -209,36 +163,16 @@ fn expand_item(exts: HashMap<~str, syntax_extension>,
|
||||
}
|
||||
}
|
||||
|
||||
// avoid excess indentation when a series of nested `match`es
|
||||
// has only one "good" outcome
|
||||
macro_rules! biased_match (
|
||||
( ($e :expr) ~ ($p :pat) else $err :stmt ;
|
||||
$( ($e_cdr:expr) ~ ($p_cdr:pat) else $err_cdr:stmt ; )*
|
||||
=> $body:expr
|
||||
) => (
|
||||
match $e {
|
||||
$p => {
|
||||
biased_match!($( ($e_cdr) ~ ($p_cdr) else $err_cdr ; )*
|
||||
=> $body)
|
||||
}
|
||||
_ => { $err }
|
||||
}
|
||||
);
|
||||
( => $body:expr ) => ( $body )
|
||||
)
|
||||
|
||||
|
||||
// Support for item-position macro invocations, exactly the same
|
||||
// logic as for expression-position macro invocations.
|
||||
fn expand_item_mac(exts: HashMap<~str, syntax_extension>,
|
||||
cx: ext_ctxt, &&it: @ast::item,
|
||||
fld: ast_fold) -> Option<@ast::item> {
|
||||
let (pth, tts) = biased_match!(
|
||||
(it.node) ~ (item_mac({node: mac_invoc_tt(pth, ref tts), _})) else {
|
||||
cx.span_bug(it.span, ~"invalid item macro invocation")
|
||||
};
|
||||
=> (pth, (*tts))
|
||||
);
|
||||
|
||||
let (pth, tts) = match it.node {
|
||||
item_mac({node: mac_invoc_tt(pth, ref tts), _}) => (pth, (*tts)),
|
||||
_ => cx.span_bug(it.span, ~"invalid item macro invocation")
|
||||
};
|
||||
|
||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||
let expanded = match exts.find(*extname) {
|
||||
@ -293,12 +227,15 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span))
|
||||
-> (stmt_, span)
|
||||
{
|
||||
let (mac, pth, tts, semi) = biased_match! (
|
||||
(s) ~ (stmt_mac(ref mac, semi)) else return orig(s, sp, fld);
|
||||
((*mac).node) ~ (mac_invoc_tt(pth, ref tts)) else {
|
||||
cx.span_bug((*mac).span, ~"naked syntactic bit")
|
||||
};
|
||||
=> ((*mac), pth, (*tts), semi));
|
||||
|
||||
let (mac, pth, tts, semi) = match s {
|
||||
stmt_mac(ref mac, semi) => {
|
||||
match (*mac).node {
|
||||
mac_invoc_tt(pth, ref tts) => ((*mac), pth, (*tts), semi)
|
||||
}
|
||||
}
|
||||
_ => return orig(s, sp, fld)
|
||||
};
|
||||
|
||||
assert(vec::len(pth.idents) == 1u);
|
||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||
|
@ -406,7 +406,6 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
||||
AT => "AT",
|
||||
DOT => "DOT",
|
||||
DOTDOT => "DOTDOT",
|
||||
ELLIPSIS => "ELLIPSIS",
|
||||
COMMA => "COMMA",
|
||||
SEMI => "SEMI",
|
||||
COLON => "COLON",
|
||||
|
@ -1,750 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use codemap::span;
|
||||
use std::map::HashMap;
|
||||
use dvec::DVec;
|
||||
|
||||
use base::*;
|
||||
|
||||
use fold::*;
|
||||
use ast_util::respan;
|
||||
use ast::{ident, path, Ty, blk_, expr, expr_path,
|
||||
expr_vec, expr_mac, mac_invoc, node_id, expr_index};
|
||||
|
||||
export add_new_extension;
|
||||
|
||||
fn path_to_ident(pth: @path) -> Option<ident> {
|
||||
if vec::len(pth.idents) == 1u && vec::len(pth.types) == 0u {
|
||||
return Some(pth.idents[0u]);
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
//a vec of binders might be a little big.
|
||||
type clause = {params: binders, body: @expr};
|
||||
|
||||
/* logically, an arb_depth should contain only one kind of matchable */
|
||||
enum arb_depth<T> { leaf(T), seq(@~[arb_depth<T>], span), }
|
||||
|
||||
|
||||
enum matchable {
|
||||
match_expr(@expr),
|
||||
match_path(@path),
|
||||
match_ident(ast::spanned<ident>),
|
||||
match_ty(@Ty),
|
||||
match_block(ast::blk),
|
||||
match_exact, /* don't bind anything, just verify the AST traversal */
|
||||
}
|
||||
|
||||
/* for when given an incompatible bit of AST */
|
||||
fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! {
|
||||
match m {
|
||||
match_expr(x) => cx.span_fatal(
|
||||
x.span, ~"this argument is an expr, expected " + expected),
|
||||
match_path(x) => cx.span_fatal(
|
||||
x.span, ~"this argument is a path, expected " + expected),
|
||||
match_ident(x) => cx.span_fatal(
|
||||
x.span, ~"this argument is an ident, expected " + expected),
|
||||
match_ty(x) => cx.span_fatal(
|
||||
x.span, ~"this argument is a type, expected " + expected),
|
||||
match_block(ref x) => cx.span_fatal(
|
||||
(*x).span, ~"this argument is a block, expected " + expected),
|
||||
match_exact => cx.bug(~"what is a match_exact doing in a bindings?")
|
||||
}
|
||||
}
|
||||
|
||||
// We can't make all the matchables in a match_result the same type because
|
||||
// idents can be paths, which can be exprs.
|
||||
|
||||
// If we want better match failure error messages (like in Fortifying Syntax),
|
||||
// we'll want to return something indicating amount of progress and location
|
||||
// of failure instead of `none`.
|
||||
type match_result = Option<arb_depth<matchable>>;
|
||||
type selector = fn@(matchable) -> match_result;
|
||||
|
||||
fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
|
||||
{pre: ~[@expr], rep: Option<@expr>, post: ~[@expr]} {
|
||||
let mut idx: uint = 0u;
|
||||
let mut res = None;
|
||||
for elts.each |elt| {
|
||||
match elt.node {
|
||||
expr_mac(ref m) => match (*m).node {
|
||||
ast::mac_ellipsis => {
|
||||
if res.is_some() {
|
||||
cx.span_fatal((*m).span, ~"only one ellipsis allowed");
|
||||
}
|
||||
res =
|
||||
Some({pre: vec::slice(elts, 0u, idx - 1u),
|
||||
rep: Some(elts[idx - 1u]),
|
||||
post: vec::slice(elts, idx + 1u, vec::len(elts))});
|
||||
}
|
||||
_ => ()
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
idx += 1u;
|
||||
}
|
||||
return match res {
|
||||
Some(val) => val,
|
||||
None => {pre: elts, rep: None, post: ~[]}
|
||||
}
|
||||
}
|
||||
|
||||
fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) ->
|
||||
Option<~[U]> {
|
||||
let mut res = ~[];
|
||||
for v.each |elem| {
|
||||
match f(*elem) {
|
||||
None => return None,
|
||||
Some(ref fv) => res.push((*fv))
|
||||
}
|
||||
}
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
|
||||
match ad {
|
||||
leaf(ref x) => return f((*x)),
|
||||
seq(ads, span) => match option_flatten_map(|x| a_d_map(x, f), *ads) {
|
||||
None => return None,
|
||||
Some(ts) => return Some(seq(@ts, span))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compose_sels(s1: selector, s2: selector) -> selector {
|
||||
fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
|
||||
return match s1(m) {
|
||||
None => None,
|
||||
Some(ref matches) => a_d_map((*matches), s2)
|
||||
}
|
||||
}
|
||||
return { |x| scomp(s1, s2, x) };
|
||||
}
|
||||
|
||||
|
||||
|
||||
type binders =
|
||||
{real_binders: HashMap<ident, selector>,
|
||||
literal_ast_matchers: DVec<selector>};
|
||||
type bindings = HashMap<ident, arb_depth<matchable>>;
|
||||
|
||||
fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
|
||||
|
||||
/* these three functions are the big moving parts */
|
||||
|
||||
/* create the selectors needed to bind and verify the pattern */
|
||||
|
||||
fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
|
||||
let res: binders =
|
||||
{real_binders: HashMap(),
|
||||
literal_ast_matchers: DVec()};
|
||||
//this oughta return binders instead, but macro args are a sequence of
|
||||
//expressions, rather than a single expression
|
||||
fn trivial_selector(m: matchable) -> match_result {
|
||||
return Some(leaf(m));
|
||||
}
|
||||
p_t_s_rec(cx, match_expr(e), trivial_selector, res);
|
||||
move res
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* use the selectors on the actual arguments to the macro to extract
|
||||
bindings. Most of the work is done in p_t_s, which generates the
|
||||
selectors. */
|
||||
|
||||
fn use_selectors_to_bind(b: binders, e: @expr) -> Option<bindings> {
|
||||
let res = HashMap();
|
||||
//need to do this first, to check vec lengths.
|
||||
for b.literal_ast_matchers.each |sel| {
|
||||
match (*sel)(match_expr(e)) { None => return None, _ => () }
|
||||
}
|
||||
let mut never_mind: bool = false;
|
||||
for b.real_binders.each |key, val| {
|
||||
match val(match_expr(e)) {
|
||||
None => never_mind = true,
|
||||
Some(ref mtc) => { res.insert(key, (*mtc)); }
|
||||
}
|
||||
};
|
||||
//HACK: `ret` doesn't work in `for each`
|
||||
if never_mind { return None; }
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
/* use the bindings on the body to generate the expanded code */
|
||||
|
||||
fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
|
||||
let idx_path: @mut ~[uint] = @mut ~[];
|
||||
fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { return cx.next_id(); }
|
||||
fn new_span(cx: ext_ctxt, sp: span) -> span {
|
||||
/* this discards information in the case of macro-defining macros */
|
||||
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
|
||||
}
|
||||
let afp = default_ast_fold();
|
||||
let f_pre =
|
||||
@{fold_ident: |x,y|transcribe_ident(cx, b, idx_path, x, y),
|
||||
fold_path: |x,y|transcribe_path(cx, b, idx_path, x, y),
|
||||
fold_expr: |x,y,z|
|
||||
transcribe_expr(cx, b, idx_path, x, y, z, afp.fold_expr)
|
||||
,
|
||||
fold_ty: |x,y,z|
|
||||
transcribe_type(cx, b, idx_path,
|
||||
x, y, z, afp.fold_ty)
|
||||
,
|
||||
fold_block: |x,y,z|
|
||||
transcribe_block(cx, b, idx_path, x, y, z, afp.fold_block)
|
||||
,
|
||||
map_exprs: |x,y|
|
||||
transcribe_exprs(cx, b, idx_path, x, y)
|
||||
,
|
||||
new_id: |x|new_id(x, cx),
|
||||
.. *afp};
|
||||
let f = make_fold(f_pre);
|
||||
let result = f.fold_expr(body);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/* helper: descend into a matcher */
|
||||
pure fn follow(m: arb_depth<matchable>, idx_path: &[uint]) ->
|
||||
arb_depth<matchable> {
|
||||
let mut res: arb_depth<matchable> = m;
|
||||
for vec::each(idx_path) |idx| {
|
||||
res = match res {
|
||||
leaf(_) => return res,/* end of the line */
|
||||
seq(new_ms, _) => new_ms[*idx]
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
fn follow_for_trans(cx: ext_ctxt, mmaybe: Option<arb_depth<matchable>>,
|
||||
idx_path: @mut ~[uint]) -> Option<matchable> {
|
||||
match mmaybe {
|
||||
None => return None,
|
||||
Some(ref m) => {
|
||||
return match follow((*m), *idx_path) {
|
||||
seq(_, sp) => {
|
||||
cx.span_fatal(sp,
|
||||
~"syntax matched under ... but not " +
|
||||
~"used that way.")
|
||||
}
|
||||
leaf(ref m) => return Some((*m))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
|
||||
fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
|
||||
let idents = HashMap();
|
||||
fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
|
||||
idents: HashMap<ident, ()>) -> ident {
|
||||
if b.contains_key(i) { idents.insert(i, ()); }
|
||||
return i;
|
||||
}
|
||||
// using fold is a hack: we want visit, but it doesn't hit idents ) :
|
||||
// solve this with macros
|
||||
let f_pre =
|
||||
@{fold_ident: |x,y|mark_ident(x, y, b, idents),
|
||||
.. *default_ast_fold()};
|
||||
let f = make_fold(f_pre);
|
||||
f.fold_expr(e); // ignore result
|
||||
for idents.each_key |x| { it(x); };
|
||||
}
|
||||
|
||||
fn wrong_occurs(cx: ext_ctxt, l: ident, l_c: uint, r: ident, r_c: uint)
|
||||
-> ~str {
|
||||
fmt!("'%s' occurs %u times, but '%s' occurs %u times",
|
||||
*cx.parse_sess().interner.get(l), l_c,
|
||||
*cx.parse_sess().interner.get(r), r_c)
|
||||
}
|
||||
|
||||
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
|
||||
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
recur: fn@(&&v: @expr) -> @expr,
|
||||
exprs: ~[@expr]) -> ~[@expr] {
|
||||
match elts_to_ell(cx, exprs) {
|
||||
{pre: pre, rep: repeat_me_maybe, post: post} => {
|
||||
let mut res = vec::map(pre, |x| recur(*x));
|
||||
match repeat_me_maybe {
|
||||
None => (),
|
||||
Some(repeat_me) => {
|
||||
let mut repeat: Option<{rep_count: uint, name: ident}> = None;
|
||||
/* we need to walk over all the free vars in lockstep, except for
|
||||
the leaves, which are just duplicated */
|
||||
do free_vars(b, repeat_me) |fv| {
|
||||
let fv_depth = b.get(fv);
|
||||
let cur_pos = follow(fv_depth, *idx_path);
|
||||
match cur_pos {
|
||||
leaf(_) => (),
|
||||
seq(ms, _) => {
|
||||
match repeat {
|
||||
None => {
|
||||
repeat = Some({rep_count: vec::len(*ms), name: fv});
|
||||
}
|
||||
Some({rep_count: old_len, name: old_name}) => {
|
||||
let len = vec::len(*ms);
|
||||
if old_len != len {
|
||||
let msg = wrong_occurs(cx, fv, len,
|
||||
old_name, old_len);
|
||||
cx.span_fatal(repeat_me.span, msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
match repeat {
|
||||
None => {
|
||||
cx.span_fatal(repeat_me.span,
|
||||
~"'...' surrounds an expression without any" +
|
||||
~" repeating syntax variables");
|
||||
}
|
||||
Some({rep_count: rc, _}) => {
|
||||
/* Whew, we now know how how many times to repeat */
|
||||
let mut idx: uint = 0u;
|
||||
while idx < rc {
|
||||
idx_path.push(idx);
|
||||
res.push(recur(repeat_me)); // whew!
|
||||
idx_path.pop();
|
||||
idx += 1u;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
res = vec::append(res, vec::map(post, |x| recur(*x)));
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// substitute, in a position that's required to be an ident
|
||||
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
&&i: ident, _fld: ast_fold) -> ident {
|
||||
return match follow_for_trans(cx, b.find(i), idx_path) {
|
||||
Some(match_ident(a_id)) => a_id.node,
|
||||
Some(ref m) => match_error(cx, (*m), ~"an identifier"),
|
||||
None => i
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
p: path, _fld: ast_fold) -> path {
|
||||
// Don't substitute into qualified names.
|
||||
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { return p; }
|
||||
match follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
|
||||
Some(match_ident(id)) => {
|
||||
{span: id.span, global: false, idents: ~[id.node],
|
||||
rp: None, types: ~[]}
|
||||
}
|
||||
Some(match_path(a_pth)) => *a_pth,
|
||||
Some(ref m) => match_error(cx, (*m), ~"a path"),
|
||||
None => p
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
e: ast::expr_, s: span, fld: ast_fold,
|
||||
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
|
||||
-> (ast::expr_, span)
|
||||
{
|
||||
return match e {
|
||||
expr_path(p) => {
|
||||
// Don't substitute into qualified names.
|
||||
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
|
||||
(e, s);
|
||||
}
|
||||
match follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
|
||||
Some(match_ident(id)) => {
|
||||
(expr_path(@{span: id.span,
|
||||
global: false,
|
||||
idents: ~[id.node],
|
||||
rp: None,
|
||||
types: ~[]}), id.span)
|
||||
}
|
||||
Some(match_path(a_pth)) => (expr_path(a_pth), s),
|
||||
Some(match_expr(a_exp)) => (a_exp.node, a_exp.span),
|
||||
Some(ref m) => match_error(cx, (*m), ~"an expression"),
|
||||
None => orig(e, s, fld)
|
||||
}
|
||||
}
|
||||
_ => orig(e, s, fld)
|
||||
}
|
||||
}
|
||||
|
||||
fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
t: ast::ty_, s: span, fld: ast_fold,
|
||||
orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
|
||||
-> (ast::ty_, span)
|
||||
{
|
||||
return match t {
|
||||
ast::ty_path(pth, _) => {
|
||||
match path_to_ident(pth) {
|
||||
Some(id) => {
|
||||
match follow_for_trans(cx, b.find(id), idx_path) {
|
||||
Some(match_ty(ty)) => (ty.node, ty.span),
|
||||
Some(ref m) => match_error(cx, (*m), ~"a type"),
|
||||
None => orig(t, s, fld)
|
||||
}
|
||||
}
|
||||
None => orig(t, s, fld)
|
||||
}
|
||||
}
|
||||
_ => orig(t, s, fld)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* for parsing reasons, syntax variables bound to blocks must be used like
|
||||
`{v}` */
|
||||
|
||||
fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
blk: blk_, s: span, fld: ast_fold,
|
||||
orig: fn@(blk_, span, ast_fold) -> (blk_, span))
|
||||
-> (blk_, span)
|
||||
{
|
||||
return match block_to_ident(blk) {
|
||||
Some(id) => {
|
||||
match follow_for_trans(cx, b.find(id), idx_path) {
|
||||
Some(match_block(ref new_blk)) => {
|
||||
((*new_blk).node, (*new_blk).span)
|
||||
}
|
||||
|
||||
// possibly allow promotion of ident/path/expr to blocks?
|
||||
Some(ref m) => match_error(cx, (*m), ~"a block"),
|
||||
None => orig(blk, s, fld)
|
||||
}
|
||||
}
|
||||
None => orig(blk, s, fld)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* traverse the pattern, building instructions on how to bind the actual
|
||||
argument. ps accumulates instructions on navigating the tree.*/
|
||||
fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
|
||||
|
||||
//it might be possible to traverse only exprs, not matchables
|
||||
match m {
|
||||
match_expr(e) => {
|
||||
match e.node {
|
||||
expr_path(p_pth) => p_t_s_r_path(cx, p_pth, s, b),
|
||||
expr_vec(p_elts, _) => {
|
||||
match elts_to_ell(cx, p_elts) {
|
||||
{pre: pre, rep: Some(repeat_me), post: post} => {
|
||||
p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
|
||||
b);
|
||||
if vec::len(pre) > 0u {
|
||||
p_t_s_r_actual_vector(cx, pre, true, s, b);
|
||||
}
|
||||
p_t_s_r_ellipses(cx, repeat_me, vec::len(pre), s, b);
|
||||
|
||||
if vec::len(post) > 0u {
|
||||
cx.span_unimpl(e.span,
|
||||
~"matching after `...` not yet supported");
|
||||
}
|
||||
}
|
||||
{pre: pre, rep: None, post: post} => {
|
||||
if post.len() > 0 {
|
||||
cx.bug(~"elts_to_ell provided an invalid result");
|
||||
}
|
||||
p_t_s_r_length(cx, vec::len(pre), false, s, b);
|
||||
p_t_s_r_actual_vector(cx, pre, false, s, b);
|
||||
}
|
||||
}
|
||||
}
|
||||
/* FIXME (#2251): handle embedded types and blocks, at least */
|
||||
expr_mac(ref mac) => {
|
||||
p_t_s_r_mac(cx, (*mac), s, b);
|
||||
}
|
||||
_ => {
|
||||
fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
|
||||
match_result {
|
||||
return match m {
|
||||
match_expr(e) => {
|
||||
if managed::ptr_eq(e, pat) {
|
||||
// XXX: Is this right?
|
||||
Some(leaf(match_exact))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => cx.bug(~"broken traversal in p_t_s_r")
|
||||
}
|
||||
}
|
||||
b.literal_ast_matchers.push(|x| select(cx, x, e));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => cx.bug(~"undocumented invariant in p_t_s_rec")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* make a match more precise */
|
||||
fn specialize_match(m: matchable) -> matchable {
|
||||
return match m {
|
||||
match_expr(e) => {
|
||||
match e.node {
|
||||
expr_path(pth) => {
|
||||
match path_to_ident(pth) {
|
||||
Some(id) => match_ident(respan(pth.span, id)),
|
||||
None => match_path(pth)
|
||||
}
|
||||
}
|
||||
_ => m
|
||||
}
|
||||
}
|
||||
_ => m
|
||||
}
|
||||
}
|
||||
|
||||
/* pattern_to_selectors helper functions */
|
||||
fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
|
||||
match path_to_ident(p) {
|
||||
Some(p_id) => {
|
||||
fn select(cx: ext_ctxt, m: matchable) -> match_result {
|
||||
return match m {
|
||||
match_expr(*) => Some(leaf(specialize_match(m))),
|
||||
_ => cx.bug(~"broken traversal in p_t_s_r")
|
||||
}
|
||||
}
|
||||
if b.real_binders.contains_key(p_id) {
|
||||
cx.span_fatal(p.span, ~"duplicate binding identifier");
|
||||
}
|
||||
b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x)));
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
|
||||
fn block_to_ident(blk: blk_) -> Option<ident> {
|
||||
if vec::len(blk.stmts) != 0u { return None; }
|
||||
return match blk.expr {
|
||||
Some(expr) => match expr.node {
|
||||
expr_path(pth) => path_to_ident(pth),
|
||||
_ => None
|
||||
},
|
||||
None => None
|
||||
}
|
||||
}
|
||||
|
||||
fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) {
|
||||
fn select_pt_1(cx: ext_ctxt, m: matchable,
|
||||
fn_m: fn(ast::mac) -> match_result) -> match_result {
|
||||
return match m {
|
||||
match_expr(e) => match e.node {
|
||||
expr_mac(ref mac) => fn_m((*mac)),
|
||||
_ => None
|
||||
},
|
||||
_ => cx.bug(~"broken traversal in p_t_s_r")
|
||||
}
|
||||
}
|
||||
fn no_des(cx: ext_ctxt, sp: span, syn: ~str) -> ! {
|
||||
cx.span_fatal(sp, ~"destructuring " + syn + ~" is not yet supported");
|
||||
}
|
||||
match mac.node {
|
||||
ast::mac_ellipsis => cx.span_fatal(mac.span, ~"misused `...`"),
|
||||
ast::mac_invoc(_, _, _) => no_des(cx, mac.span, ~"macro calls"),
|
||||
ast::mac_invoc_tt(_, _) => no_des(cx, mac.span, ~"macro calls"),
|
||||
ast::mac_aq(_,_) => no_des(cx, mac.span, ~"antiquotes"),
|
||||
ast::mac_var(_) => no_des(cx, mac.span, ~"antiquote variables")
|
||||
}
|
||||
}
|
||||
|
||||
fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
|
||||
b: binders) {
|
||||
fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
|
||||
match_result {
|
||||
return match m {
|
||||
match_expr(e) => {
|
||||
match e.node {
|
||||
expr_vec(arg_elts, _) => {
|
||||
let mut elts = ~[];
|
||||
let mut idx = offset;
|
||||
while idx < vec::len(arg_elts) {
|
||||
elts.push(leaf(match_expr(arg_elts[idx])));
|
||||
idx += 1u;
|
||||
}
|
||||
|
||||
// using repeat_me.span is a little wacky, but the
|
||||
// error we want to report is one in the macro def
|
||||
Some(seq(@elts, repeat_me.span))
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
_ => cx.bug(~"broken traversal in p_t_s_r")
|
||||
}
|
||||
}
|
||||
p_t_s_rec(cx, match_expr(repeat_me),
|
||||
compose_sels(s, |x| select(cx, repeat_me, offset, x)), b);
|
||||
}
|
||||
|
||||
|
||||
fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
|
||||
b: binders) {
|
||||
fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
|
||||
match_result {
|
||||
return match m {
|
||||
match_expr(e) => {
|
||||
match e.node {
|
||||
expr_vec(arg_elts, _) => {
|
||||
let actual_len = vec::len(arg_elts);
|
||||
if at_least && actual_len >= len || actual_len == len {
|
||||
Some(leaf(match_exact))
|
||||
} else { None }
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
b.literal_ast_matchers.push(
|
||||
compose_sels(s, |x| len_select(cx, x, at_least, len)));
|
||||
}
|
||||
|
||||
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
|
||||
s: selector, b: binders) {
|
||||
let mut idx: uint = 0u;
|
||||
while idx < vec::len(elts) {
|
||||
fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
|
||||
return match m {
|
||||
match_expr(e) => {
|
||||
match e.node {
|
||||
expr_vec(arg_elts, _) => {
|
||||
Some(leaf(match_expr(arg_elts[idx])))
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
_ => cx.bug(~"broken traversal in p_t_s_r")
|
||||
}
|
||||
}
|
||||
p_t_s_rec(cx, match_expr(elts[idx]),
|
||||
compose_sels(s, |x, copy idx| select(cx, x, idx)), b);
|
||||
idx += 1u;
|
||||
}
|
||||
}
|
||||
|
||||
fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
_body: ast::mac_body) -> base::macro_def {
|
||||
let args = get_mac_args_no_max(cx, sp, arg, 0u, ~"macro");
|
||||
|
||||
let mut macro_name: Option<~str> = None;
|
||||
let mut clauses: ~[@clause] = ~[];
|
||||
for args.each |arg| {
|
||||
match arg.node {
|
||||
expr_vec(elts, _) => {
|
||||
if vec::len(elts) != 2u {
|
||||
cx.span_fatal((*arg).span,
|
||||
~"extension clause must consist of ~[" +
|
||||
~"macro invocation, expansion body]");
|
||||
}
|
||||
|
||||
|
||||
match elts[0u].node {
|
||||
expr_mac(ref mac) => {
|
||||
match (*mac).node {
|
||||
mac_invoc(pth, invoc_arg, _) => {
|
||||
match path_to_ident(pth) {
|
||||
Some(id) => {
|
||||
let id_str = cx.str_of(id);
|
||||
match macro_name {
|
||||
None => macro_name = Some(id_str),
|
||||
Some(ref other_id) => if id_str != (*other_id) {
|
||||
cx.span_fatal(pth.span,
|
||||
~"macro name must be " +
|
||||
~"consistent");
|
||||
}
|
||||
}
|
||||
},
|
||||
None => cx.span_fatal(pth.span,
|
||||
~"macro name must not be a path")
|
||||
}
|
||||
let arg = match invoc_arg {
|
||||
Some(arg) => arg,
|
||||
None => cx.span_fatal((*mac).span,
|
||||
~"macro must have arguments")
|
||||
};
|
||||
clauses.push(@{params: pattern_to_selectors(cx, arg),
|
||||
body: elts[1u]});
|
||||
|
||||
// FIXME (#2251): check duplicates (or just simplify
|
||||
// the macro arg situation)
|
||||
}
|
||||
_ => {
|
||||
cx.span_bug((*mac).span, ~"undocumented invariant in \
|
||||
add_extension");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.span_fatal(elts[0u].span,
|
||||
~"extension clause must" +
|
||||
~" start with a macro invocation.");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.span_fatal((*arg).span,
|
||||
~"extension must be ~[clause, " + ~" ...]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses);
|
||||
|
||||
return {name:
|
||||
match macro_name {
|
||||
Some(ref id) => (*id),
|
||||
None => cx.span_fatal(sp, ~"macro definition must have " +
|
||||
~"at least one clause")
|
||||
},
|
||||
ext: normal({expander: ext, span: Some(arg.get().span)})};
|
||||
|
||||
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
_body: ast::mac_body,
|
||||
clauses: ~[@clause]) -> @expr {
|
||||
let arg = match arg {
|
||||
Some(arg) => arg,
|
||||
None => cx.span_fatal(sp, ~"macro must have arguments")
|
||||
};
|
||||
for clauses.each |c| {
|
||||
match use_selectors_to_bind(c.params, arg) {
|
||||
Some(bindings) => return transcribe(cx, bindings, c.body),
|
||||
None => loop
|
||||
}
|
||||
}
|
||||
cx.span_fatal(sp, ~"no clauses match macro invocation");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//
|
||||
// Local Variables:
|
||||
// mode: rust
|
||||
// fill-column: 78;
|
||||
// indent-tabs-mode: nil
|
||||
// c-basic-offset: 4
|
||||
// buffer-file-coding-system: utf-8-unix
|
||||
// End:
|
||||
//
|
@ -120,14 +120,7 @@ fn fold_arg_(a: arg, fld: ast_fold) -> arg {
|
||||
fn fold_mac_(m: mac, fld: ast_fold) -> mac {
|
||||
return {node:
|
||||
match m.node {
|
||||
mac_invoc(pth, arg, body) => {
|
||||
mac_invoc(fld.fold_path(pth),
|
||||
option::map(&arg, |x| fld.fold_expr(*x)), body)
|
||||
}
|
||||
mac_invoc_tt(*) => m.node,
|
||||
mac_ellipsis => mac_ellipsis,
|
||||
mac_aq(_,_) => /* FIXME (#2543) */ copy m.node,
|
||||
mac_var(_) => /* FIXME (#2543) */ copy m.node,
|
||||
},
|
||||
span: fld.new_span(m.span)};
|
||||
}
|
||||
|
@ -12,16 +12,9 @@ use either::{Either, Left, Right};
|
||||
use ast_util::spanned;
|
||||
use common::*; //resolve bug?
|
||||
|
||||
export attr_or_ext;
|
||||
export parser_attr;
|
||||
|
||||
// A type to distingush between the parsing of item attributes or syntax
|
||||
// extensions, which both begin with token.POUND
|
||||
type attr_or_ext = Option<Either<~[ast::attribute], @ast::expr>>;
|
||||
|
||||
trait parser_attr {
|
||||
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
|
||||
-> attr_or_ext;
|
||||
fn parse_outer_attributes() -> ~[ast::attribute];
|
||||
fn parse_attribute(style: ast::attr_style) -> ast::attribute;
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) ->
|
||||
@ -35,34 +28,6 @@ trait parser_attr {
|
||||
|
||||
impl Parser: parser_attr {
|
||||
|
||||
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
|
||||
-> attr_or_ext
|
||||
{
|
||||
let expect_item_next = vec::is_not_empty(first_item_attrs);
|
||||
match self.token {
|
||||
token::POUND => {
|
||||
let lo = self.span.lo;
|
||||
if self.look_ahead(1u) == token::LBRACKET {
|
||||
self.bump();
|
||||
let first_attr =
|
||||
self.parse_attribute_naked(ast::attr_outer, lo);
|
||||
return Some(Left(vec::append(~[first_attr],
|
||||
self.parse_outer_attributes())));
|
||||
} else if !(self.look_ahead(1u) == token::LT
|
||||
|| self.look_ahead(1u) == token::LBRACKET
|
||||
|| self.look_ahead(1u) == token::POUND
|
||||
|| expect_item_next) {
|
||||
self.bump();
|
||||
return Some(Right(self.parse_syntax_ext_naked(lo)));
|
||||
} else { return None; }
|
||||
}
|
||||
token::DOC_COMMENT(_) => {
|
||||
return Some(Left(self.parse_outer_attributes()));
|
||||
}
|
||||
_ => return None
|
||||
}
|
||||
}
|
||||
|
||||
// Parse attributes that appear before an item
|
||||
fn parse_outer_attributes() -> ~[ast::attribute] {
|
||||
let mut attrs: ~[ast::attribute] = ~[];
|
||||
|
@ -515,11 +515,6 @@ fn next_token_inner(rdr: string_reader) -> token::Token {
|
||||
bump(rdr);
|
||||
return token::DOTDOT;
|
||||
}
|
||||
if rdr.curr == '.' && nextch(rdr) == '.' {
|
||||
bump(rdr);
|
||||
bump(rdr);
|
||||
return token::ELLIPSIS;
|
||||
}
|
||||
return token::DOT;
|
||||
}
|
||||
'(' => { bump(rdr); return token::LPAREN; }
|
||||
|
@ -54,8 +54,8 @@ use ast::{_mod, add, arg, arm, attribute,
|
||||
item_foreign_mod, item_impl, item_mac, item_mod, item_trait,
|
||||
item_ty, lit, lit_, lit_bool, lit_float, lit_float_unsuffixed,
|
||||
lit_int, lit_int_unsuffixed, lit_nil, lit_str, lit_uint, local,
|
||||
m_const, m_imm, m_mutbl, mac_, mac_aq, mac_ellipsis, mac_invoc,
|
||||
mac_invoc_tt, mac_var, matcher, match_nonterminal, match_seq,
|
||||
m_const, m_imm, m_mutbl, mac_,
|
||||
mac_invoc_tt, matcher, match_nonterminal, match_seq,
|
||||
match_tok, method, mode, module_ns, mt, mul, mutability,
|
||||
named_field, neg, noreturn, not, pat, pat_box, pat_enum,
|
||||
pat_ident, pat_lit, pat_range, pat_rec, pat_region, pat_struct,
|
||||
@ -510,15 +510,6 @@ impl Parser {
|
||||
|
||||
let lo = self.span.lo;
|
||||
|
||||
match self.maybe_parse_dollar_mac() {
|
||||
Some(ref e) => {
|
||||
return @{id: self.get_id(),
|
||||
node: ty_mac(spanned(lo, self.span.hi, (*e))),
|
||||
span: mk_sp(lo, self.span.hi)};
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
|
||||
let t = if self.token == token::LPAREN {
|
||||
self.bump();
|
||||
if self.token == token::RPAREN {
|
||||
@ -730,32 +721,6 @@ impl Parser {
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_parse_dollar_mac() -> Option<mac_> {
|
||||
match copy self.token {
|
||||
token::DOLLAR => {
|
||||
let lo = self.span.lo;
|
||||
self.bump();
|
||||
match copy self.token {
|
||||
token::LIT_INT_UNSUFFIXED(num) => {
|
||||
self.bump();
|
||||
Some(mac_var(num as uint))
|
||||
}
|
||||
token::LPAREN => {
|
||||
self.bump();
|
||||
let e = self.parse_expr();
|
||||
self.expect(token::RPAREN);
|
||||
let hi = self.last_span.hi;
|
||||
Some(mac_aq(mk_sp(lo,hi), e))
|
||||
}
|
||||
_ => {
|
||||
self.fatal(~"expected `(` or unsuffixed integer literal");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_parse_fixed_vstore_with_star() -> Option<uint> {
|
||||
if self.eat(token::BINOP(token::STAR)) {
|
||||
match copy self.token {
|
||||
@ -928,11 +893,6 @@ impl Parser {
|
||||
|
||||
let mut ex: expr_;
|
||||
|
||||
match self.maybe_parse_dollar_mac() {
|
||||
Some(ref x) => return self.mk_mac_expr(lo, self.span.hi, (*x)),
|
||||
_ => ()
|
||||
}
|
||||
|
||||
if self.token == token::LPAREN {
|
||||
self.bump();
|
||||
if self.token == token::RPAREN {
|
||||
@ -1022,13 +982,6 @@ impl Parser {
|
||||
}
|
||||
}
|
||||
hi = self.span.hi;
|
||||
} else if self.token == token::ELLIPSIS {
|
||||
self.bump();
|
||||
return self.mk_mac_expr(lo, self.span.hi, mac_ellipsis);
|
||||
} else if self.token == token::POUND {
|
||||
let ex_ext = self.parse_syntax_ext();
|
||||
hi = ex_ext.span.hi;
|
||||
ex = ex_ext.node;
|
||||
} else if self.eat_keyword(~"fail") {
|
||||
if can_begin_expr(self.token) {
|
||||
let e = self.parse_expr();
|
||||
@ -1141,54 +1094,6 @@ impl Parser {
|
||||
return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk));
|
||||
}
|
||||
|
||||
fn parse_syntax_ext() -> @expr {
|
||||
let lo = self.span.lo;
|
||||
self.expect(token::POUND);
|
||||
return self.parse_syntax_ext_naked(lo);
|
||||
}
|
||||
|
||||
fn parse_syntax_ext_naked(lo: BytePos) -> @expr {
|
||||
match self.token {
|
||||
token::IDENT(_, _) => (),
|
||||
_ => self.fatal(~"expected a syntax expander name")
|
||||
}
|
||||
let pth = self.parse_path_without_tps();
|
||||
//temporary for a backwards-compatible cycle:
|
||||
let sep = seq_sep_trailing_disallowed(token::COMMA);
|
||||
let mut e = None;
|
||||
if (self.token == token::LPAREN || self.token == token::LBRACKET) {
|
||||
let lo = self.span.lo;
|
||||
let es =
|
||||
if self.token == token::LPAREN {
|
||||
self.parse_unspanned_seq(token::LPAREN, token::RPAREN,
|
||||
sep, |p| p.parse_expr())
|
||||
} else {
|
||||
self.parse_unspanned_seq(token::LBRACKET, token::RBRACKET,
|
||||
sep, |p| p.parse_expr())
|
||||
};
|
||||
let hi = self.span.hi;
|
||||
e = Some(self.mk_expr(lo, hi, expr_vec(es, m_imm)));
|
||||
}
|
||||
let mut b = None;
|
||||
if self.token == token::LBRACE {
|
||||
self.bump();
|
||||
let lo = self.span.lo;
|
||||
let mut depth = 1u;
|
||||
while (depth > 0u) {
|
||||
match (self.token) {
|
||||
token::LBRACE => depth += 1u,
|
||||
token::RBRACE => depth -= 1u,
|
||||
token::EOF => self.fatal(~"unexpected EOF in macro body"),
|
||||
_ => ()
|
||||
}
|
||||
self.bump();
|
||||
}
|
||||
let hi = self.last_span.lo;
|
||||
b = Some({span: mk_sp(lo,hi)});
|
||||
}
|
||||
return self.mk_mac_expr(lo, self.span.hi, mac_invoc(pth, e, b));
|
||||
}
|
||||
|
||||
fn parse_dot_or_call_expr() -> @expr {
|
||||
let b = self.parse_bottom_expr();
|
||||
self.parse_dot_or_call_expr_with(b)
|
||||
@ -2253,17 +2158,8 @@ impl Parser {
|
||||
}
|
||||
|
||||
} else {
|
||||
let mut item_attrs;
|
||||
match self.parse_outer_attrs_or_ext(first_item_attrs) {
|
||||
None => item_attrs = ~[],
|
||||
Some(Left(ref attrs)) => item_attrs = (*attrs),
|
||||
Some(Right(ext)) => {
|
||||
return @spanned(lo, ext.span.hi,
|
||||
stmt_expr(ext, self.get_id()));
|
||||
}
|
||||
}
|
||||
|
||||
let item_attrs = vec::append(first_item_attrs, item_attrs);
|
||||
let item_attrs = vec::append(first_item_attrs,
|
||||
self.parse_outer_attributes());
|
||||
|
||||
match self.parse_item_or_view_item(item_attrs,
|
||||
true, false, false) {
|
||||
|
@ -49,7 +49,6 @@ enum Token {
|
||||
AT,
|
||||
DOT,
|
||||
DOTDOT,
|
||||
ELLIPSIS,
|
||||
COMMA,
|
||||
SEMI,
|
||||
COLON,
|
||||
@ -137,7 +136,6 @@ fn to_str(in: @ident_interner, t: Token) -> ~str {
|
||||
AT => ~"@",
|
||||
DOT => ~".",
|
||||
DOTDOT => ~"..",
|
||||
ELLIPSIS => ~"...",
|
||||
COMMA => ~",",
|
||||
SEMI => ~";",
|
||||
COLON => ~":",
|
||||
@ -578,12 +576,6 @@ impl Token : cmp::Eq {
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
ELLIPSIS => {
|
||||
match (*other) {
|
||||
ELLIPSIS => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
COMMA => {
|
||||
match (*other) {
|
||||
COMMA => true,
|
||||
|
@ -590,9 +590,6 @@ fn print_item(s: ps, &&item: @ast::item) {
|
||||
pclose(s);
|
||||
end(s);
|
||||
}
|
||||
ast::item_mac(_) => {
|
||||
fail ~"invalid item-position syntax bit"
|
||||
}
|
||||
}
|
||||
(s.ann.post)(ann_node);
|
||||
}
|
||||
@ -1000,16 +997,6 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
|
||||
|
||||
fn print_mac(s: ps, m: ast::mac) {
|
||||
match m.node {
|
||||
ast::mac_invoc(path, arg, _body) => {
|
||||
word(s.s, ~"#");
|
||||
print_path(s, path, false);
|
||||
match arg {
|
||||
Some(@{node: ast::expr_vec(_, _), _}) => (),
|
||||
_ => word(s.s, ~" ")
|
||||
}
|
||||
arg.iter(|a| print_expr(s, *a));
|
||||
// FIXME: extension 'body' (#2339)
|
||||
}
|
||||
ast::mac_invoc_tt(pth, ref tts) => {
|
||||
print_path(s, pth, false);
|
||||
word(s.s, ~"!");
|
||||
@ -1017,9 +1004,6 @@ fn print_mac(s: ps, m: ast::mac) {
|
||||
for (*tts).each() |tt| { print_tt(s, *tt); }
|
||||
pclose(s);
|
||||
}
|
||||
ast::mac_ellipsis => word(s.s, ~"..."),
|
||||
ast::mac_var(v) => word(s.s, fmt!("$%u", v)),
|
||||
_ => { /* fixme */ }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,9 +99,6 @@ mod ext {
|
||||
}
|
||||
|
||||
|
||||
#[legacy_exports]
|
||||
#[path = "ext/simplext.rs"]
|
||||
mod simplext;
|
||||
#[legacy_exports]
|
||||
#[path = "ext/fmt.rs"]
|
||||
mod fmt;
|
||||
|
@ -379,15 +379,8 @@ fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
|
||||
for exprs.each |ex| { (v.visit_expr)(*ex, e, v); }
|
||||
}
|
||||
|
||||
fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
|
||||
match m.node {
|
||||
ast::mac_invoc(_, arg, _) => {
|
||||
option::map(&arg, |arg| (v.visit_expr)(*arg, e, v)); }
|
||||
ast::mac_invoc_tt(*) => { /* no user-serviceable parts inside */ }
|
||||
ast::mac_ellipsis => (),
|
||||
ast::mac_aq(*) => { /* FIXME: maybe visit (Issue #2340) */ }
|
||||
ast::mac_var(_) => ()
|
||||
}
|
||||
fn visit_mac<E>(_m: mac, _e: E, _v: vt<E>) {
|
||||
/* no user-serviceable parts inside */
|
||||
}
|
||||
|
||||
fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
|
||||
|
@ -1,17 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Regresion test for issue #1448 and #1386
|
||||
|
||||
fn main() {
|
||||
#macro[[#apply[f, [x, ...]], f(x, ...)]];
|
||||
fn add(a: int, b: int) -> int { return a + b; }
|
||||
assert (apply!(add, [y, 15]) == 16); //~ ERROR unresolved name: y
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//error-pattern:is an expr, expected a path
|
||||
fn main() {
|
||||
#macro[[#mylambda[x, body],
|
||||
{
|
||||
fn f(x: int) -> int { return body }
|
||||
f
|
||||
}]];
|
||||
|
||||
assert (mylambda!(y * 1, y * 2)(8) == 16);
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//error-pattern:no clauses match
|
||||
|
||||
fn main() {
|
||||
#macro[[#trivial[], 1 * 2 * 4 * 2 * 1]];
|
||||
|
||||
assert (trivial!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) ==
|
||||
16);
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// xfail-pretty - token trees can't pretty print
|
||||
|
||||
fn main() {
|
||||
#macro[[#trivial[], 1 * 2 * 4 * 2 * 1]];
|
||||
|
||||
assert (trivial!() == 16);
|
||||
|
||||
macro_rules! trivial_tt(
|
||||
() => {1*2*4*2*1}
|
||||
)
|
||||
assert(trivial_tt!() == 16);
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
|
||||
fn main() {
|
||||
#macro[[#apply[f, [x, ...]], f(x, ...)]];
|
||||
|
||||
macro_rules! apply_tt(
|
||||
($f:expr, ($($x:expr),*)) => {$f($($x),*)}
|
||||
)
|
||||
|
||||
fn add(a: int, b: int) -> int { return a + b; }
|
||||
|
||||
assert(apply!(add, [1, 15]) == 16);
|
||||
assert(apply!(add, [1, 15]) == 16);
|
||||
assert(apply_tt!(add, (1, 15)) == 16);
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// xfail-test
|
||||
// I can't for the life of me manage to untangle all of the brackets
|
||||
// in this test, so I am xfailing it...
|
||||
|
||||
fn main() {
|
||||
#macro[[#zip_or_unzip[[x, ...], [y, ...]], [[x, y], ...]],
|
||||
[#zip_or_unzip[[xx, yy], ...], [[xx, ...], [yy, ...]]]];
|
||||
|
||||
|
||||
assert (zip_or_unzip!([1, 2, 3, 4], [5, 6, 7, 8]) ==
|
||||
[[1, 5], [2, 6], [3, 7], [4, 8]]);
|
||||
assert (zip_or_unzip!([1, 5], [2, 6], [3, 7], [4, 8]) ==
|
||||
[[1, 2, 3, 4], [5, 6, 7, 8]]);
|
||||
|
||||
|
||||
#macro[[#nested[[[x, ...], ...], [[y, ...], ...]], [[[x, y], ...], ...]]];
|
||||
assert (nested!([[1, 2, 3, 4, 5], [7, 8, 9, 10, 11, 12]],
|
||||
[[-1, -2, -3, -4, -5], [-7, -8, -9, -10, -11, -12]]) ==
|
||||
[[[1, -1], [2, -2], [3, -3], [4, -4], [5, -5]],
|
||||
[[7, -7], [8, -8], [9, -9], [10, -10], [11, -11],
|
||||
[12, -12]]]);
|
||||
|
||||
#macro[[#dup[y, [x, ...]], [[y, x], ...]]];
|
||||
|
||||
assert (dup!(1, [1, 2, 3, 4]) == [[1, 1], [1, 2], [1, 3], [1, 4]]);
|
||||
|
||||
|
||||
#macro[[#lambda[x, #<t>, body, #<s>],
|
||||
{
|
||||
fn result(x: t) -> s { return body }
|
||||
result
|
||||
}]];
|
||||
|
||||
|
||||
assert (lambda!(i, #<uint>, i + 4u, #<uint>)(12u) == 16u);
|
||||
|
||||
#macro[[#sum[x, xs, ...], x + #sum[xs, ...]], [#sum[], 0]];
|
||||
|
||||
assert (sum!(1, 2, 3, 4) == 10);
|
||||
|
||||
|
||||
#macro[[#transcr_mixed[a, as, ...], #sum[6, as, ...] * a]];
|
||||
|
||||
assert (transcr_mixed!(10, 5, 4, 3, 2, 1) == 210);
|
||||
|
||||
#macro[[#surround[pre, [xs, ...], post], [pre, xs, ..., post]]];
|
||||
|
||||
assert (surround!(1, [2, 3, 4], 5) == [1, 2, 3, 4, 5]);
|
||||
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// xfail-pretty - token trees can't pretty print
|
||||
|
||||
fn main() {
|
||||
#macro[[#m1[a], a * 4]];
|
||||
assert (m1!(2) == 8);
|
||||
|
||||
macro_rules! m1tt (
|
||||
($a:expr) => {$a*4}
|
||||
);
|
||||
assert(m1tt!(2) == 8);
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
/* this is for run-pass/syntax-extension-source-utils.rs */
|
||||
|
||||
{
|
||||
assert(#file[].ends_with("includeme.fragment"));
|
||||
assert(#line[] == 5u);
|
||||
#fmt["victory robot %u", #line[]]
|
||||
assert(file!().ends_with("includeme.fragment"));
|
||||
assert(line!() == 5u);
|
||||
fmt!("victory robot %u", line!())
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user