Plumbing and parsing for item-position macros.

This commit is contained in:
Eric Holk 2012-07-05 12:10:33 -07:00
parent a787f40013
commit 05cdda3a2c
19 changed files with 135 additions and 10 deletions

View File

@ -704,6 +704,7 @@ enum item_ {
item_trait(~[ty_param], region_param, ~[ty_method]),
item_impl(~[ty_param], region_param, option<@trait_ref> /* trait */,
@ty /* self */, ~[@method]),
item_mac(mac),
}
#[auto_serialize]

View File

@ -17,12 +17,18 @@ type item_decorator =
type syntax_expander_tt = {expander: syntax_expander_tt_, span: option<span>};
type syntax_expander_tt_ = fn@(ext_ctxt, span, ast::token_tree) -> @ast::expr;
type syntax_expander_tt_item
= {expander: syntax_expander_tt_item_, span: option<span>};
type syntax_expander_tt_item_
= fn@(ext_ctxt, span, ast::ident, ast::token_tree) -> @ast::item;
enum syntax_extension {
normal(syntax_expander),
macro_defining(macro_definer),
item_decorator(item_decorator),
normal_tt(syntax_expander_tt)
normal_tt(syntax_expander_tt),
item_tt(syntax_expander_tt_item),
}
// A temporary hard-coded map of methods for expanding syntax extension
@ -30,6 +36,9 @@ enum syntax_extension {
fn syntax_expander_table() -> hashmap<str, syntax_extension> {
fn builtin(f: syntax_expander_) -> syntax_extension
{normal({expander: f, span: none})}
fn builtin_item_tt(f: syntax_expander_tt_item_) -> syntax_extension {
item_tt({expander: f, span: none})
}
let syntax_expanders = str_hash::<syntax_extension>();
syntax_expanders.insert("fmt", builtin(ext::fmt::expand_syntax_ext));
syntax_expanders.insert("auto_serialize",
@ -61,6 +70,8 @@ fn syntax_expander_table() -> hashmap<str, syntax_extension> {
builtin(ext::source_util::expand_include_bin));
syntax_expanders.insert("mod",
builtin(ext::source_util::expand_mod));
syntax_expanders.insert("proto",
builtin_item_tt(ext::pipes::expand_proto));
ret syntax_expanders;
}

View File

@ -1,7 +1,7 @@
import std::map::hashmap;
import ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
tt_delim, tt_flat};
tt_delim, tt_flat, item_mac};
import fold::*;
import ext::base::*;
import ext::qquote::{qq_helper};
@ -52,6 +52,10 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
#fmt["this tt-style macro should be \
invoked '%s!{...}'", *extname])
}
some(item_tt(*)) {
cx.span_fatal(pth.span,
"cannot use item macros in this context");
}
}
}
mac_invoc_tt(pth, tt) {
@ -109,7 +113,7 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
};
alt exts.find(*mname) {
none | some(normal(_)) | some(macro_defining(_))
| some(normal_tt(_)) {
| some(normal_tt(_)) | some(item_tt(*)) {
items
}
@ -124,7 +128,8 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
}
/* record module we enter for `#mod` */
fn expand_item(cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
fn expand_item(exts: hashmap<str, syntax_extension>,
cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
orig: fn@(&&@ast::item, ast_fold) -> @ast::item)
-> @ast::item
{
@ -132,12 +137,41 @@ fn expand_item(cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
ast::item_mod(_) | ast::item_foreign_mod(_) {true}
_ {false}
};
let it = alt it.node {
ast::item_mac(*) {
expand_item_mac(exts, cx, it)
}
_ { it }
};
if is_mod { cx.mod_push(it.ident); }
let ret_val = orig(it, fld);
if is_mod { cx.mod_pop(); }
ret ret_val;
}
fn expand_item_mac(exts: hashmap<str, syntax_extension>,
cx: ext_ctxt, &&it: @ast::item) -> @ast::item {
alt it.node {
item_mac({node: mac_invoc_tt(pth, tt), span}) {
let extname = pth.idents[0];
alt exts.find(*extname) {
none {
cx.span_fatal(pth.span,
#fmt("macro undefined: '%s'", *extname))
}
some(item_tt(expand)) {
expand.expander(cx, it.span, it.ident, tt)
}
_ { cx.span_fatal(it.span,
#fmt("%s is not a legal here", *extname)) }
}
}
_ {
cx.span_bug(it.span, "invalid item macro invocation");
}
}
}
fn new_span(cx: ext_ctxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
@ -166,7 +200,7 @@ fn expand_crate(parse_sess: parse::parse_sess,
let f_pre =
@{fold_expr: |a,b,c| expand_expr(exts, cx, a, b, c, afp.fold_expr),
fold_mod: |a,b| expand_mod_items(exts, cx, a, b, afp.fold_mod),
fold_item: |a,b| expand_item(cx, a, b, afp.fold_item),
fold_item: |a,b| expand_item(exts, cx, a, b, afp.fold_item),
new_span: |a|new_span(cx, a)
with *afp};
let f = make_fold(f_pre);

View File

@ -0,0 +1,10 @@
import codemap::span;
import ext::base::ext_ctxt;
fn expand_proto(cx: ext_ctxt, span: span, id: ast::ident, tt: ast::token_tree)
-> @ast::item
{
cx.span_unimpl(span,
"Protocol compiler")
}

View File

@ -283,6 +283,10 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
rp,
/* FIXME (#2543) */ copy methods)
}
item_mac(m) {
// TODO: we might actually want to do something here.
item_mac(m)
}
};
}

View File

@ -12,6 +12,9 @@ export parse_crate_from_source_str;
export parse_expr_from_source_str, parse_item_from_source_str;
export parse_from_source_str;
// this used to be `import common::parser_common`, but it was causing
// unresolved import errors. Maybe resolve3 will fix it.
import common::*;
import parser::parser;
//import attr::parser_attr;
import attr::*; //resolve bug?
@ -20,8 +23,7 @@ import common::*; //resolve bug?
import ast::node_id;
import util::interner;
// FIXME (#1935): resolve badness
import lexer::{string_reader_as_reader, tt_reader_as_reader, reader,
string_reader, tt_reader};
import lexer::*;
import diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
type parse_sess = @{

View File

@ -1,7 +1,6 @@
import either::{either, left, right};
import ast_util::spanned;
import common::*; //resolve bug?
//import common::{parser_common, seq_sep_trailing_disallowed};
export attr_or_ext;
export parser_attr;

View File

@ -92,6 +92,15 @@ impl parser_common for parser {
self.token_is_keyword(word, self.token)
}
fn is_any_keyword(tok: token::token) -> bool {
alt tok {
token::IDENT(sid, false) {
self.keywords.contains_key(*self.get_str(sid))
}
_ { false }
}
}
fn eat_keyword(word: str) -> bool {
self.require_keyword(word);

View File

@ -9,7 +9,7 @@ import lexer::reader;
import prec::{as_prec, token_to_binop};
import attr::parser_attr;
import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
seq_sep_none, token_to_str, parser_common};
seq_sep_none, token_to_str};
import dvec::{dvec, extensions};
import vec::{push};
import ast::*;
@ -2595,6 +2595,21 @@ class parser {
self.parse_item_impl()
} else if self.eat_keyword("class") {
self.parse_item_class()
} else if !self.is_any_keyword(copy self.token)
&& self.look_ahead(1) == token::NOT
{
// item macro.
let pth = self.parse_path_without_tps();
#error("parsing invocation of %s", *pth.idents[0]);
self.expect(token::NOT);
let id = self.parse_ident();
let tt = self.parse_token_tree();
let m = ast::mac_invoc_tt(pth, tt);
let m: ast::mac = {node: m,
span: {lo: self.span.lo,
hi: self.span.hi,
expn_info: none}};
(id, item_mac(m), none)
} else { ret none; };
some(self.mk_item(lo, self.last_span.hi, ident, item_, vis,
alt extra_attrs {

View File

@ -589,6 +589,9 @@ fn print_item(s: ps, &&item: @ast::item) {
for methods.each |meth| { print_ty_method(s, meth); }
bclose(s, item.span);
}
ast::item_mac(_m) {
fail "item macros unimplemented"
}
}
s.ann.post(ann_node);
}

View File

@ -78,4 +78,6 @@ mod ext {
mod log_syntax;
mod auto_serialize;
mod source_util;
mod pipes;
}

View File

@ -163,6 +163,7 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
v.visit_ty(m.decl.output, e, v);
}
}
item_mac(_m) { fail "item macros unimplemented" }
}
}

View File

@ -217,6 +217,7 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt,
}
}
item_impl(*) {}
item_mac(*) { fail "item macros unimplemented" }
}
}
}
@ -749,6 +750,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_path(ebml_w, path, ast_map::path_name(item.ident));
ebml_w.end_tag();
}
item_mac(*) { fail "item macros unimplemented" }
}
}

View File

@ -1352,6 +1352,7 @@ fn found_def_item(i: @ast::item, ns: namespace) -> option<def> {
}
}
ast::item_impl(*) { /* ??? */ }
ast::item_mac(*) { fail "item macros unimplemented" }
}
ret none;
}
@ -1658,6 +1659,9 @@ fn index_mod(md: ast::_mod) -> mod_index {
// add the class name itself
add_to_index(index, it.ident, mie_item(it));
}
ast::item_mac(*) {
fail "item macros unimplemented"
}
}
}
ret index;

View File

@ -13,7 +13,7 @@ import syntax::ast::{expr_binary, expr_cast, expr_field, expr_fn};
import syntax::ast::{expr_fn_block, expr_index, expr_new, expr_path};
import syntax::ast::{expr_unary, fn_decl, foreign_item, foreign_item_fn};
import syntax::ast::{ident, trait_ref, impure_fn, instance_var, item};
import syntax::ast::{item_class, item_const, item_enum, item_fn};
import syntax::ast::{item_class, item_const, item_enum, item_fn, item_mac};
import syntax::ast::{item_foreign_mod, item_trait, item_impl, item_mod};
import syntax::ast::{item_ty, local, local_crate, method, node_id, pat};
import syntax::ast::{pat_enum, pat_ident, path, prim_ty, stmt_decl, ty};
@ -871,6 +871,10 @@ class Resolver {
(*name_bindings).define_type(def_ty(local_def(item.id)));
visit_item(item, new_parent, visitor);
}
item_mac(*) {
fail "item macros unimplemented"
}
}
}
@ -2854,6 +2858,10 @@ class Resolver {
item_const(*) {
visit_item(item, (), visitor);
}
item_mac(*) {
fail "item macros unimplemented"
}
}
self.xray_context = orig_xray_flag;

View File

@ -148,6 +148,7 @@ fn traverse_public_item(cx: ctx, item: @item) {
}
item_const(*) |
item_enum(*) | item_trait(*) {}
item_mac(*) { fail "item macros unimplemented" }
}
}

View File

@ -55,6 +55,7 @@ fn find_pre_post_item(ccx: crate_ctxt, i: item) {
item_impl(_, _, _, _, ms) {
for ms.each |m| { find_pre_post_method(ccx, m); }
}
item_mac(*) { fail "item macros unimplemented" }
}
}

View File

@ -537,6 +537,7 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
}
ast::item_impl(*) | ast::item_mod(_) |
ast::item_foreign_mod(_) { fail; }
ast::item_mac(*) { fail "item macros unimplemented" }
}
}

View File

@ -0,0 +1,17 @@
// xfail-test
// An example to make sure the protocol parsing syntax extension works.
proto! pingpong {
ping:send {
ping -> pong
}
pong:recv {
pong -> ping
}
}
fn main() {
// TODO: do something with the protocol
}