2012-12-03 18:48:01 -06:00
|
|
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2012-12-23 16:41:37 -06:00
|
|
|
use ast;
|
|
|
|
use attr;
|
2013-01-25 18:57:39 -06:00
|
|
|
use codemap::{BytePos, Pos, span};
|
2012-11-08 19:00:55 -06:00
|
|
|
use ext::base::ext_ctxt;
|
2012-12-23 16:41:37 -06:00
|
|
|
use ext::base;
|
|
|
|
use ext::build;
|
2012-12-13 15:05:22 -06:00
|
|
|
use parse::token::*;
|
2012-12-23 16:41:37 -06:00
|
|
|
use parse::token;
|
|
|
|
use parse;
|
|
|
|
|
2013-01-08 21:37:25 -06:00
|
|
|
use core::prelude::*;
|
2012-12-23 16:41:37 -06:00
|
|
|
use core::str;
|
2012-11-08 19:00:55 -06:00
|
|
|
|
|
|
|
/**
|
|
|
|
*
|
|
|
|
* Quasiquoting works via token trees.
|
|
|
|
*
|
2012-12-06 18:18:41 -06:00
|
|
|
* This is registered as a set of expression syntax extension called quote!
|
|
|
|
* that lifts its argument token-tree to an AST representing the
|
|
|
|
* construction of the same token tree, with ast::tt_nonterminal nodes
|
|
|
|
* interpreted as antiquotes (splices).
|
2012-11-08 19:00:55 -06:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
pub mod rt {
|
2012-12-23 16:41:37 -06:00
|
|
|
use ast;
|
2013-01-08 21:37:25 -06:00
|
|
|
use ext::base::ext_ctxt;
|
2012-12-23 16:41:37 -06:00
|
|
|
use parse;
|
|
|
|
use print::pprust;
|
|
|
|
|
|
|
|
use core::str;
|
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
pub use ast::*;
|
|
|
|
pub use parse::token::*;
|
2012-11-20 18:07:57 -06:00
|
|
|
pub use parse::new_parser_from_tts;
|
2012-11-16 20:54:48 -06:00
|
|
|
pub use codemap::BytePos;
|
|
|
|
pub use codemap::span;
|
2012-12-06 18:18:41 -06:00
|
|
|
|
|
|
|
use print::pprust;
|
2012-12-13 15:05:22 -06:00
|
|
|
use print::pprust::{item_to_str, ty_to_str};
|
2012-12-06 18:18:41 -06:00
|
|
|
|
|
|
|
trait ToTokens {
|
|
|
|
pub fn to_tokens(_cx: ext_ctxt) -> ~[token_tree];
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for ~[token_tree] {
|
2012-12-06 18:18:41 -06:00
|
|
|
pub fn to_tokens(_cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
copy self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Should be (when bugs in default methods are fixed):
|
|
|
|
|
|
|
|
trait ToSource : ToTokens {
|
|
|
|
// Takes a thing and generates a string containing rust code for it.
|
|
|
|
pub fn to_source(cx: ext_ctxt) -> ~str;
|
|
|
|
|
|
|
|
// If you can make source, you can definitely make tokens.
|
|
|
|
pub fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
trait ToSource {
|
|
|
|
// Takes a thing and generates a string containing rust code for it.
|
|
|
|
pub fn to_source(cx: ext_ctxt) -> ~str;
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for ast::ident {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
copy *cx.parse_sess().interner.get(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for @ast::item {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
item_to_str(self, cx.parse_sess().interner)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for ~[@ast::item] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
str::connect(self.map(|i| i.to_source(cx)), ~"\n\n")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for @ast::Ty {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
ty_to_str(self, cx.parse_sess().interner)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for ~[@ast::Ty] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
str::connect(self.map(|i| i.to_source(cx)), ~", ")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for ~[ast::ty_param] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
pprust::typarams_to_str(self, cx.parse_sess().interner)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToSource for @ast::expr {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_source(cx: ext_ctxt) -> ~str {
|
|
|
|
pprust::expr_to_str(self, cx.parse_sess().interner)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Alas ... we write these out instead. All redundant.
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for ast::ident {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for @ast::item {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for ~[@ast::item] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for @ast::Ty {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for ~[@ast::Ty] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for ~[ast::ty_param] {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ToTokens for @ast::expr {
|
2012-12-06 18:18:41 -06:00
|
|
|
fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
|
|
|
cx.parse_tts(self.to_source(cx))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
trait ExtParseUtils {
|
|
|
|
fn parse_item(s: ~str) -> @ast::item;
|
|
|
|
fn parse_expr(s: ~str) -> @ast::expr;
|
|
|
|
fn parse_stmt(s: ~str) -> @ast::stmt;
|
|
|
|
fn parse_tts(s: ~str) -> ~[ast::token_tree];
|
|
|
|
}
|
|
|
|
|
2013-02-14 13:47:00 -06:00
|
|
|
impl ExtParseUtils for ext_ctxt {
|
2012-12-06 18:18:41 -06:00
|
|
|
|
|
|
|
fn parse_item(s: ~str) -> @ast::item {
|
|
|
|
let res = parse::parse_item_from_source_str(
|
|
|
|
~"<quote expansion>",
|
|
|
|
@(copy s),
|
|
|
|
self.cfg(),
|
|
|
|
~[],
|
|
|
|
self.parse_sess());
|
|
|
|
match res {
|
|
|
|
Some(ast) => ast,
|
|
|
|
None => {
|
|
|
|
error!("Parse error with ```\n%s\n```", s);
|
2013-02-11 21:26:38 -06:00
|
|
|
fail!()
|
2012-12-06 18:18:41 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_stmt(s: ~str) -> @ast::stmt {
|
|
|
|
parse::parse_stmt_from_source_str(
|
|
|
|
~"<quote expansion>",
|
|
|
|
@(copy s),
|
|
|
|
self.cfg(),
|
|
|
|
~[],
|
|
|
|
self.parse_sess())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_expr(s: ~str) -> @ast::expr {
|
|
|
|
parse::parse_expr_from_source_str(
|
|
|
|
~"<quote expansion>",
|
|
|
|
@(copy s),
|
|
|
|
self.cfg(),
|
|
|
|
self.parse_sess())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_tts(s: ~str) -> ~[ast::token_tree] {
|
|
|
|
parse::parse_tts_from_source_str(
|
|
|
|
~"<quote expansion>",
|
|
|
|
@(copy s),
|
|
|
|
self.cfg(),
|
|
|
|
self.parse_sess())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
}
|
2012-11-08 19:00:55 -06:00
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
pub fn expand_quote_tokens(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
|
|
|
base::MRExpr(expand_tts(cx, sp, tts))
|
2012-11-16 16:50:35 -06:00
|
|
|
}
|
2012-11-08 19:00:55 -06:00
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
pub fn expand_quote_expr(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
|
|
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_expr", ~[], tts))
|
2012-11-16 16:50:35 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn expand_quote_item(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
2012-11-16 16:50:35 -06:00
|
|
|
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
2013-01-22 18:45:27 -06:00
|
|
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_item",
|
2012-11-16 16:50:35 -06:00
|
|
|
~[e_attrs], tts))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn expand_quote_pat(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
2012-11-16 16:50:35 -06:00
|
|
|
let e_refutable = build::mk_lit(cx, sp, ast::lit_bool(true));
|
2013-01-22 18:45:27 -06:00
|
|
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_pat",
|
2012-11-16 16:50:35 -06:00
|
|
|
~[e_refutable], tts))
|
|
|
|
}
|
|
|
|
|
2012-12-06 13:09:46 -06:00
|
|
|
pub fn expand_quote_ty(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
2012-11-16 16:50:35 -06:00
|
|
|
let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false));
|
2013-01-22 18:45:27 -06:00
|
|
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_ty",
|
2012-11-16 16:50:35 -06:00
|
|
|
~[e_param_colons], tts))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn expand_quote_stmt(cx: ext_ctxt,
|
|
|
|
sp: span,
|
2013-01-22 18:45:27 -06:00
|
|
|
tts: ~[ast::token_tree]) -> base::MacResult {
|
2012-11-16 16:50:35 -06:00
|
|
|
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
2013-01-22 18:45:27 -06:00
|
|
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_stmt",
|
2012-11-16 16:50:35 -06:00
|
|
|
~[e_attrs], tts))
|
2012-11-08 19:00:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
|
|
|
|
strs.map(|str| cx.parse_sess().interner.intern(@*str))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident {
|
|
|
|
cx.parse_sess().interner.intern(@str)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Lift an ident to the expr that evaluates to that ident.
|
|
|
|
fn mk_ident(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr {
|
2012-12-04 19:06:10 -06:00
|
|
|
let e_meth = build::mk_access(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"ext_cx"]),
|
|
|
|
id_ext(cx, ~"ident_of"));
|
|
|
|
let e_str = build::mk_uniq_str(cx, sp, cx.str_of(ident));
|
|
|
|
build::mk_call_(cx, sp, e_meth, ~[e_str])
|
2012-11-08 19:00:55 -06:00
|
|
|
}
|
|
|
|
|
2012-11-16 20:54:48 -06:00
|
|
|
fn mk_bytepos(cx: ext_ctxt, sp: span, bpos: BytePos) -> @ast::expr {
|
2012-11-20 18:07:57 -06:00
|
|
|
let path = ids_ext(cx, ~[~"BytePos"]);
|
2012-11-16 20:54:48 -06:00
|
|
|
let arg = build::mk_uint(cx, sp, bpos.to_uint());
|
|
|
|
build::mk_call(cx, sp, path, ~[arg])
|
|
|
|
}
|
2012-11-08 19:00:55 -06:00
|
|
|
|
|
|
|
fn mk_binop(cx: ext_ctxt, sp: span, bop: token::binop) -> @ast::expr {
|
|
|
|
let name = match bop {
|
|
|
|
PLUS => "PLUS",
|
|
|
|
MINUS => "MINUS",
|
|
|
|
STAR => "STAR",
|
|
|
|
SLASH => "SLASH",
|
|
|
|
PERCENT => "PERCENT",
|
|
|
|
CARET => "CARET",
|
|
|
|
AND => "AND",
|
|
|
|
OR => "OR",
|
|
|
|
SHL => "SHL",
|
|
|
|
SHR => "SHR"
|
|
|
|
};
|
|
|
|
build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[name.to_owned()]))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
|
|
|
|
|
|
|
match tok {
|
|
|
|
BINOP(binop) => {
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"BINOP"]),
|
|
|
|
~[mk_binop(cx, sp, binop)]);
|
|
|
|
}
|
|
|
|
BINOPEQ(binop) => {
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"BINOPEQ"]),
|
|
|
|
~[mk_binop(cx, sp, binop)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIT_INT(i, ity) => {
|
|
|
|
let s_ity = match ity {
|
|
|
|
ast::ty_i => ~"ty_i",
|
|
|
|
ast::ty_char => ~"ty_char",
|
|
|
|
ast::ty_i8 => ~"ty_i8",
|
|
|
|
ast::ty_i16 => ~"ty_i16",
|
|
|
|
ast::ty_i32 => ~"ty_i32",
|
|
|
|
ast::ty_i64 => ~"ty_i64"
|
|
|
|
};
|
|
|
|
let e_ity =
|
|
|
|
build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[s_ity]));
|
|
|
|
|
|
|
|
let e_i64 = build::mk_lit(cx, sp, ast::lit_int(i, ast::ty_i64));
|
|
|
|
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"LIT_INT"]),
|
|
|
|
~[e_i64, e_ity]);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIT_UINT(u, uty) => {
|
|
|
|
let s_uty = match uty {
|
|
|
|
ast::ty_u => ~"ty_u",
|
|
|
|
ast::ty_u8 => ~"ty_u8",
|
|
|
|
ast::ty_u16 => ~"ty_u16",
|
|
|
|
ast::ty_u32 => ~"ty_u32",
|
|
|
|
ast::ty_u64 => ~"ty_u64"
|
|
|
|
};
|
|
|
|
let e_uty =
|
|
|
|
build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[s_uty]));
|
|
|
|
|
|
|
|
let e_u64 = build::mk_lit(cx, sp, ast::lit_uint(u, ast::ty_u64));
|
|
|
|
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"LIT_UINT"]),
|
|
|
|
~[e_u64, e_uty]);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIT_INT_UNSUFFIXED(i) => {
|
|
|
|
let e_i64 = build::mk_lit(cx, sp,
|
|
|
|
ast::lit_int(i, ast::ty_i64));
|
|
|
|
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"LIT_INT_UNSUFFIXED"]),
|
|
|
|
~[e_i64]);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIT_FLOAT(fident, fty) => {
|
|
|
|
let s_fty = match fty {
|
|
|
|
ast::ty_f => ~"ty_f",
|
|
|
|
ast::ty_f32 => ~"ty_f32",
|
|
|
|
ast::ty_f64 => ~"ty_f64"
|
|
|
|
};
|
|
|
|
let e_fty =
|
|
|
|
build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[s_fty]));
|
|
|
|
|
|
|
|
let e_fident = mk_ident(cx, sp, fident);
|
|
|
|
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"LIT_FLOAT"]),
|
|
|
|
~[e_fident, e_fty]);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIT_STR(ident) => {
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"LIT_STR"]),
|
|
|
|
~[mk_ident(cx, sp, ident)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
IDENT(ident, b) => {
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"IDENT"]),
|
|
|
|
~[mk_ident(cx, sp, ident),
|
|
|
|
build::mk_lit(cx, sp, ast::lit_bool(b))]);
|
|
|
|
}
|
|
|
|
|
|
|
|
DOC_COMMENT(ident) => {
|
|
|
|
return build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"DOC_COMMENT"]),
|
|
|
|
~[mk_ident(cx, sp, ident)]);
|
|
|
|
}
|
|
|
|
|
2013-02-11 21:26:38 -06:00
|
|
|
INTERPOLATED(_) => fail!(~"quote! with interpolated token"),
|
2012-11-08 19:00:55 -06:00
|
|
|
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
|
|
|
|
let name = match tok {
|
|
|
|
EQ => "EQ",
|
|
|
|
LT => "LT",
|
|
|
|
LE => "LE",
|
|
|
|
EQEQ => "EQEQ",
|
|
|
|
NE => "NE",
|
|
|
|
GE => "GE",
|
|
|
|
GT => "GT",
|
|
|
|
ANDAND => "ANDAND",
|
|
|
|
OROR => "OROR",
|
|
|
|
NOT => "NOT",
|
|
|
|
TILDE => "TILDE",
|
|
|
|
AT => "AT",
|
|
|
|
DOT => "DOT",
|
|
|
|
DOTDOT => "DOTDOT",
|
|
|
|
COMMA => "COMMA",
|
|
|
|
SEMI => "SEMI",
|
|
|
|
COLON => "COLON",
|
|
|
|
MOD_SEP => "MOD_SEP",
|
|
|
|
RARROW => "RARROW",
|
|
|
|
LARROW => "LARROW",
|
|
|
|
DARROW => "DARROW",
|
|
|
|
FAT_ARROW => "FAT_ARROW",
|
|
|
|
LPAREN => "LPAREN",
|
|
|
|
RPAREN => "RPAREN",
|
|
|
|
LBRACKET => "LBRACKET",
|
|
|
|
RBRACKET => "RBRACKET",
|
|
|
|
LBRACE => "LBRACE",
|
|
|
|
RBRACE => "RBRACE",
|
|
|
|
POUND => "POUND",
|
|
|
|
DOLLAR => "DOLLAR",
|
|
|
|
UNDERSCORE => "UNDERSCORE",
|
|
|
|
EOF => "EOF",
|
2013-02-11 21:26:38 -06:00
|
|
|
_ => fail!()
|
2012-11-08 19:00:55 -06:00
|
|
|
};
|
|
|
|
build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[name.to_owned()]))
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree)
|
|
|
|
-> ~[@ast::stmt] {
|
|
|
|
|
2012-11-08 19:00:55 -06:00
|
|
|
match *tt {
|
2012-12-06 13:01:58 -06:00
|
|
|
|
2012-12-04 12:50:00 -06:00
|
|
|
ast::tt_tok(sp, ref tok) => {
|
2012-12-06 13:01:58 -06:00
|
|
|
let e_sp = build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"sp"]));
|
2012-11-20 18:07:57 -06:00
|
|
|
let e_tok =
|
|
|
|
build::mk_call(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"tt_tok"]),
|
2012-12-06 13:01:58 -06:00
|
|
|
~[e_sp, mk_token(cx, sp, *tok)]);
|
|
|
|
let e_push =
|
|
|
|
build::mk_call_(cx, sp,
|
|
|
|
build::mk_access(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"tt"]),
|
|
|
|
id_ext(cx, ~"push")),
|
|
|
|
~[e_tok]);
|
|
|
|
~[build::mk_stmt(cx, sp, e_push)]
|
2012-11-08 19:00:55 -06:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
ast::tt_delim(ref tts) => mk_tts(cx, sp, *tts),
|
2013-02-11 21:26:38 -06:00
|
|
|
ast::tt_seq(*) => fail!(~"tt_seq in quote!"),
|
2012-11-08 19:00:55 -06:00
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
ast::tt_nonterminal(sp, ident) => {
|
2012-12-06 18:18:41 -06:00
|
|
|
|
|
|
|
// tt.push_all_move($ident.to_tokens(ext_cx))
|
|
|
|
|
|
|
|
let e_to_toks =
|
|
|
|
build::mk_call_(cx, sp,
|
|
|
|
build::mk_access
|
|
|
|
(cx, sp,
|
|
|
|
~[ident],
|
|
|
|
id_ext(cx, ~"to_tokens")),
|
|
|
|
~[build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"ext_cx"]))]);
|
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
let e_push =
|
|
|
|
build::mk_call_(cx, sp,
|
|
|
|
build::mk_access
|
|
|
|
(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"tt"]),
|
|
|
|
id_ext(cx, ~"push_all_move")),
|
2012-12-06 18:18:41 -06:00
|
|
|
~[e_to_toks]);
|
|
|
|
|
|
|
|
~[build::mk_stmt(cx, sp, e_push)]
|
2012-12-06 13:01:58 -06:00
|
|
|
}
|
2012-11-08 19:00:55 -06:00
|
|
|
}
|
2012-11-16 16:50:35 -06:00
|
|
|
}
|
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
|
|
|
-> ~[@ast::stmt] {
|
|
|
|
let mut ss = ~[];
|
|
|
|
for tts.each |tt| {
|
|
|
|
ss.push_all_move(mk_tt(cx, sp, tt));
|
|
|
|
}
|
|
|
|
ss
|
2012-11-20 18:07:57 -06:00
|
|
|
}
|
2012-11-16 16:50:35 -06:00
|
|
|
|
2012-11-20 18:07:57 -06:00
|
|
|
fn expand_tts(cx: ext_ctxt,
|
|
|
|
sp: span,
|
|
|
|
tts: ~[ast::token_tree]) -> @ast::expr {
|
2012-12-06 13:01:58 -06:00
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
// NB: It appears that the main parser loses its mind if we consider
|
|
|
|
// $foo as a tt_nonterminal during the main parse, so we have to re-parse
|
|
|
|
// under quote_depth > 0. This is silly and should go away; the _guess_ is
|
|
|
|
// it has to do with transition away from supporting old-style macros, so
|
|
|
|
// try removing it when enough of them are gone.
|
2012-12-06 13:01:58 -06:00
|
|
|
|
2012-11-20 18:07:57 -06:00
|
|
|
let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts);
|
2012-11-16 16:50:35 -06:00
|
|
|
p.quote_depth += 1u;
|
2012-11-20 18:07:57 -06:00
|
|
|
let tts = p.parse_all_token_trees();
|
|
|
|
p.abort_if_errors();
|
2012-11-16 16:50:35 -06:00
|
|
|
|
|
|
|
// We want to emit a block expression that does a sequence of 'use's to
|
2012-12-06 13:01:58 -06:00
|
|
|
// import the runtime module, followed by a tt-building expression.
|
|
|
|
|
2012-11-16 16:50:35 -06:00
|
|
|
let uses = ~[ build::mk_glob_use(cx, sp, ids_ext(cx, ~[~"syntax",
|
|
|
|
~"ext",
|
|
|
|
~"quote",
|
|
|
|
~"rt"])) ];
|
2012-12-06 13:01:58 -06:00
|
|
|
|
|
|
|
// We also bind a single value, sp, to ext_cx.call_site()
|
|
|
|
//
|
|
|
|
// This causes every span in a token-tree quote to be attributed to the
|
|
|
|
// call site of the extension using the quote. We can't really do much
|
|
|
|
// better since the source of the quote may well be in a library that
|
|
|
|
// was not even parsed by this compilation run, that the user has no
|
|
|
|
// source code for (eg. in libsyntax, which they're just _using_).
|
|
|
|
//
|
|
|
|
// The old quasiquoter had an elaborate mechanism for denoting input
|
|
|
|
// file locations from which quotes originated; unfortunately this
|
|
|
|
// relied on feeding the source string of the quote back into the
|
|
|
|
// compiler (which we don't really want to do) and, in any case, only
|
|
|
|
// pushed the problem a very small step further back: an error
|
|
|
|
// resulting from a parse of the resulting quote is still attributed to
|
|
|
|
// the site the string literal occured, which was in a source file
|
|
|
|
// _other_ than the one the user has control over. For example, an
|
|
|
|
// error in a quote from the protocol compiler, invoked in user code
|
|
|
|
// using proto! for example, will be attributed to the pipec.rs file in
|
|
|
|
// libsyntax, which the user might not even have source to (unless they
|
|
|
|
// happen to have a compiler on hand). Over all, the phase distinction
|
|
|
|
// just makes quotes "hard to attribute". Possibly this could be fixed
|
|
|
|
// by recreating some of the original qq machinery in the tt regime
|
|
|
|
// (pushing fake FileMaps onto the parser to account for original sites
|
|
|
|
// of quotes, for example) but at this point it seems not likely to be
|
|
|
|
// worth the hassle.
|
|
|
|
|
|
|
|
let e_sp = build::mk_call_(cx, sp,
|
|
|
|
build::mk_access(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"ext_cx"]),
|
|
|
|
id_ext(cx, ~"call_site")),
|
|
|
|
~[]);
|
|
|
|
|
|
|
|
let stmt_let_sp = build::mk_local(cx, sp, false,
|
|
|
|
id_ext(cx, ~"sp"),
|
|
|
|
e_sp);
|
2012-12-06 18:18:41 -06:00
|
|
|
|
2012-12-06 13:01:58 -06:00
|
|
|
let stmt_let_tt = build::mk_local(cx, sp, true,
|
|
|
|
id_ext(cx, ~"tt"),
|
|
|
|
build::mk_uniq_vec_e(cx, sp, ~[]));
|
|
|
|
|
|
|
|
build::mk_block(cx, sp, uses,
|
|
|
|
~[stmt_let_sp,
|
|
|
|
stmt_let_tt] + mk_tts(cx, sp, tts),
|
|
|
|
Some(build::mk_path(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"tt"]))))
|
2012-11-16 16:50:35 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
fn expand_parse_call(cx: ext_ctxt,
|
|
|
|
sp: span,
|
|
|
|
parse_method: ~str,
|
|
|
|
arg_exprs: ~[@ast::expr],
|
|
|
|
tts: ~[ast::token_tree]) -> @ast::expr {
|
2012-11-20 18:07:57 -06:00
|
|
|
let tts_expr = expand_tts(cx, sp, tts);
|
2012-11-16 16:50:35 -06:00
|
|
|
|
|
|
|
let cfg_call = || build::mk_call_(
|
|
|
|
cx, sp, build::mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
|
|
|
|
id_ext(cx, ~"cfg")), ~[]);
|
|
|
|
|
|
|
|
let parse_sess_call = || build::mk_call_(
|
|
|
|
cx, sp, build::mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]),
|
|
|
|
id_ext(cx, ~"parse_sess")), ~[]);
|
|
|
|
|
|
|
|
let new_parser_call =
|
2012-12-23 16:41:37 -06:00
|
|
|
build::mk_call_global(cx, sp,
|
|
|
|
ids_ext(cx, ~[~"syntax",
|
|
|
|
~"ext",
|
|
|
|
~"quote",
|
|
|
|
~"rt",
|
|
|
|
~"new_parser_from_tts"]),
|
|
|
|
~[parse_sess_call(),
|
|
|
|
cfg_call(),
|
|
|
|
tts_expr]);
|
2012-11-16 16:50:35 -06:00
|
|
|
|
|
|
|
build::mk_call_(cx, sp,
|
|
|
|
build::mk_access_(cx, sp, new_parser_call,
|
|
|
|
id_ext(cx, parse_method)),
|
|
|
|
arg_exprs)
|
|
|
|
}
|
|
|
|
|