rust/src/libsyntax/parse/mod.rs

678 lines
24 KiB
Rust
Raw Normal View History

// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
2012-11-28 18:20:41 -06:00
//! The main parser interface
2013-01-30 11:56:33 -06:00
use ast::node_id;
use ast;
2013-04-23 12:57:41 -05:00
use codemap::{span, CodeMap, FileMap, FileSubstr};
use codemap;
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
use parse::attr::parser_attr;
2013-03-26 15:38:07 -05:00
use parse::lexer::reader;
use parse::parser::Parser;
use parse::token::{ident_interner, mk_ident_interner};
use core::io;
2013-04-23 12:57:41 -05:00
use core::option::{None, Option, Some};
use core::path::Path;
2013-05-01 00:47:09 -05:00
use core::result::{Err, Ok};
pub mod lexer;
pub mod parser;
pub mod token;
pub mod comments;
pub mod attr;
2012-11-18 19:56:50 -06:00
2013-01-30 11:56:33 -06:00
2012-11-18 19:56:50 -06:00
/// Common routines shared by parser mods
pub mod common;
2012-11-18 19:56:50 -06:00
/// Routines the parser uses to classify AST nodes
pub mod classify;
2012-11-18 19:56:50 -06:00
/// Reporting obsolete syntax
pub mod obsolete;
2012-11-28 18:20:41 -06:00
2013-03-08 12:19:19 -06:00
// info about a parsing session.
// This structure and the reader both have
// an interner associated with them. If they're
// not the same, bad things can happen.
pub struct ParseSess {
2013-03-08 12:19:19 -06:00
cm: @codemap::CodeMap, // better be the same as the one in the reader!
next_id: node_id,
2013-03-08 12:19:19 -06:00
span_diagnostic: @span_handler, // better be the same as the one in the reader!
2012-11-28 18:20:41 -06:00
interner: @ident_interner,
}
2012-11-28 18:20:41 -06:00
pub fn new_parse_sess(demitter: Option<Emitter>) -> @mut ParseSess {
2012-11-28 18:20:41 -06:00
let cm = @CodeMap::new();
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
interner: mk_ident_interner(),
}
2012-11-28 18:20:41 -06:00
}
pub fn new_parse_sess_special_handler(sh: @span_handler,
cm: @codemap::CodeMap)
-> @mut ParseSess {
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: sh,
interner: mk_ident_interner(),
}
2012-11-28 18:20:41 -06:00
}
2013-02-11 15:36:24 -06:00
// a bunch of utility functions of the form parse_<thing>_from_<source>
// where <thing> includes crate, expr, item, stmt, tts, and one that
// uses a HOF to parse anything, and <source> includes file and
// source_str.
pub fn parse_crate_from_file(
input: &Path,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::crate {
2013-04-23 12:57:41 -05:00
new_parser_from_file(sess, /*bad*/ copy cfg, input).parse_crate_mod()
2013-02-11 15:36:24 -06:00
// why is there no p.abort_if_errors here?
2012-11-28 18:20:41 -06:00
}
pub fn parse_crate_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::crate {
let p = new_parser_from_source_str(
sess,
/*bad*/ copy cfg,
/*bad*/ copy name,
source
);
2013-04-23 12:57:41 -05:00
maybe_aborted(p.parse_crate_mod(),p)
2012-11-28 18:20:41 -06:00
}
pub fn parse_expr_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::expr {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
source
);
maybe_aborted(p.parse_expr(), p)
2012-11-28 18:20:41 -06:00
}
pub fn parse_item_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
attrs: ~[ast::attribute],
sess: @mut ParseSess
) -> Option<@ast::item> {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
source
);
maybe_aborted(p.parse_item(attrs),p)
2012-11-28 18:20:41 -06:00
}
pub fn parse_meta_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::meta_item {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
source
);
maybe_aborted(p.parse_meta_item(),p)
}
pub fn parse_stmt_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
attrs: ~[ast::attribute],
sess: @mut ParseSess
) -> @ast::stmt {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
source
);
maybe_aborted(p.parse_stmt(attrs),p)
2012-11-28 18:20:41 -06:00
}
pub fn parse_tts_from_source_str(
name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> ~[ast::token_tree] {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
source
);
*p.quote_depth += 1u;
2013-04-23 12:57:41 -05:00
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
2012-11-28 18:20:41 -06:00
}
2013-04-23 12:57:41 -05:00
// given a function and parsing information (source str,
// filename, crate cfg, and sess), create a parser,
// apply the function, and check that the parser
// consumed all of the input before returning the function's
// result.
pub fn parse_from_source_str<T>(
f: &fn(&Parser) -> T,
name: ~str, ss: codemap::FileSubstr,
source: @~str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> T {
2013-04-23 12:57:41 -05:00
let p = new_parser_from_source_substr(
2013-02-26 08:35:36 -06:00
sess,
cfg,
name,
ss,
2013-02-26 08:35:36 -06:00
source
);
let r = f(&p);
2012-11-28 18:20:41 -06:00
if !p.reader.is_eof() {
p.reader.fatal(~"expected end-of-string");
}
maybe_aborted(r,p)
2012-11-28 18:20:41 -06:00
}
2013-04-23 12:57:41 -05:00
// return the next unused node id.
pub fn next_node_id(sess: @mut ParseSess) -> node_id {
2012-11-28 18:20:41 -06:00
let rv = sess.next_id;
sess.next_id += 1;
// ID 0 is reserved for the crate and doesn't actually exist in the AST
2013-03-28 20:39:09 -05:00
assert!(rv != 0);
2012-11-28 18:20:41 -06:00
return rv;
}
2013-04-23 12:57:41 -05:00
// Create a new parser from a source string
pub fn new_parser_from_source_str(sess: @mut ParseSess,
cfg: ast::crate_cfg,
name: ~str,
source: @~str)
-> Parser {
2013-04-23 12:57:41 -05:00
filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
2012-11-28 18:20:41 -06:00
}
2013-04-23 12:57:41 -05:00
// Create a new parser from a source string where the origin
// is specified as a substring of another file.
pub fn new_parser_from_source_substr(sess: @mut ParseSess,
cfg: ast::crate_cfg,
name: ~str,
ss: codemap::FileSubstr,
source: @~str)
-> Parser {
filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg)
2012-11-28 18:20:41 -06:00
}
/// Create a new parser, handling errors as appropriate
2012-11-28 18:20:41 -06:00
/// if the file doesn't exist
pub fn new_parser_from_file(
sess: @mut ParseSess,
cfg: ast::crate_cfg,
path: &Path
) -> Parser {
2013-04-23 12:57:41 -05:00
filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg)
2012-11-28 18:20:41 -06:00
}
2013-04-23 12:57:41 -05:00
/// Given a session, a crate config, a path, and a span, add
/// the file at the given path to the codemap, and return a parser.
/// On an error, use the given span as the source of the problem.
pub fn new_sub_parser_from_file(
sess: @mut ParseSess,
cfg: ast::crate_cfg,
path: &Path,
sp: span
) -> Parser {
2013-04-23 12:57:41 -05:00
filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg)
}
/// Given a filemap and config, return a parser
pub fn filemap_to_parser(sess: @mut ParseSess,
filemap: @FileMap,
cfg: ast::crate_cfg) -> Parser {
tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg)
}
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts(sess: @mut ParseSess,
cfg: ast::crate_cfg,
tts: ~[ast::token_tree]) -> Parser {
tts_to_parser(sess,tts,cfg)
}
// base abstractions
/// Given a session and a path and an optional span (for error reporting),
/// add the path to the session's codemap and return the new filemap.
pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
-> @FileMap {
match io::read_whole_file_str(path) {
Ok(src) => string_to_filemap(sess, @src, path.to_str()),
Err(e) => {
2013-04-23 12:57:41 -05:00
match spanopt {
Some(span) => sess.span_diagnostic.span_fatal(span, e),
None => sess.span_diagnostic.handler().fatal(e)
}
2012-11-28 18:20:41 -06:00
}
}
}
2013-04-23 12:57:41 -05:00
// given a session and a string, add the string to
// the session's codemap and return the new filemap
pub fn string_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str)
-> @FileMap {
sess.cm.new_filemap(path, source)
}
// given a session and a string and a path and a FileSubStr, add
// the string to the CodeMap and return the new FileMap
pub fn substring_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str,
filesubstr: FileSubstr) -> @FileMap {
sess.cm.new_filemap_w_substr(path,filesubstr,source)
}
// given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap)
-> ~[ast::token_tree] {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = ~[];
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
let p1 = Parser(sess, cfg, srdr as @reader);
p1.parse_all_token_trees()
}
// given tts and cfg, produce a parser
pub fn tts_to_parser(sess: @mut ParseSess,
tts: ~[ast::token_tree],
cfg: ast::crate_cfg) -> Parser {
let trdr = lexer::new_tt_reader(
copy sess.span_diagnostic,
sess.interner,
None,
tts
);
Parser(sess, cfg, trdr as @reader)
2012-11-28 18:20:41 -06:00
}
2013-01-30 11:56:33 -06:00
// abort if necessary
pub fn maybe_aborted<T>(result : T, p: Parser) -> T {
p.abort_if_errors();
result
}
2013-02-04 15:15:17 -06:00
#[cfg(test)]
mod test {
use super::*;
use std::serialize::Encodable;
use std;
use core::io;
2013-04-23 12:57:41 -05:00
use core::option::Option;
use core::option::Some;
use core::option::None;
2013-04-23 12:57:41 -05:00
use core::int;
use core::num::NumCast;
use core::path::Path;
2013-04-23 12:57:41 -05:00
use codemap::{dummy_sp, CodeMap, span, BytePos, spanned};
use opt_vec;
use ast;
2013-04-23 12:57:41 -05:00
use abi;
use ast_util::mk_ident;
use parse::parser::Parser;
use parse::token::{ident_interner, mk_ident_interner, mk_fresh_ident_interner};
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
// add known names to interner for testing
fn mk_testing_interner() -> @ident_interner {
let i = mk_fresh_ident_interner();
// baby hack; in order to put the identifiers
// 'a' and 'b' at known locations, we're going
// to fill up the interner to length 100. If
// the # of preloaded items on the interner
// ever gets larger than 100, we'll have to
// adjust this number (say, to 200) and
// change the numbers in the identifier
// test cases below.
assert!(i.len() < 100);
for int::range(0,100-((i.len()).to_int())) |_dc| {
i.gensym(@~"dontcare");
}
i.intern(@~"a");
i.intern(@~"b");
i.intern(@~"c");
i.intern(@~"d");
i.intern(@~"return");
assert!(i.get(ast::ident{repr:101,ctxt:0}) == @~"b");
i
}
// make a parse_sess that's closed over a
// testing interner (where a -> 100, b -> 101)
fn mk_testing_parse_sess() -> @mut ParseSess {
let interner = mk_testing_interner();
let cm = @CodeMap::new();
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(None), cm),
interner: interner,
}
}
// map a string to tts, using a made-up filename: return both the token_trees
// and the ParseSess
fn string_to_tts_t (source_str : @~str) -> (~[ast::token_tree],@mut ParseSess) {
let ps = mk_testing_parse_sess();
(filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps)
}
// map a string to tts, return the tt without its parsesess
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
let (tts,ps) = string_to_tts_t(source_str);
tts
}
// map string to parser (via tts)
fn string_to_parser(source_str: @~str) -> Parser {
let ps = mk_testing_parse_sess();
new_parser_from_source_str(ps,~[],~"bogofile",source_str)
}
2013-02-04 15:15:17 -06:00
#[cfg(test)] fn to_json_str<E : Encodable<std::json::Encoder>>(val: @E) -> ~str {
2013-03-07 22:44:38 -06:00
do io::with_str_writer |writer| {
let mut encoder = std::json::Encoder(writer);
val.encode(&mut encoder);
2013-03-07 22:44:38 -06:00
}
2013-02-04 15:15:17 -06:00
}
fn string_to_crate (source_str : @~str) -> @ast::crate {
2013-04-23 12:57:41 -05:00
string_to_parser(source_str).parse_crate_mod()
}
2013-04-23 12:57:41 -05:00
fn string_to_expr (source_str : @~str) -> @ast::expr {
string_to_parser(source_str).parse_expr()
}
2013-04-23 12:57:41 -05:00
fn string_to_item (source_str : @~str) -> Option<@ast::item> {
string_to_parser(source_str).parse_item(~[])
}
2013-04-23 12:57:41 -05:00
fn string_to_stmt (source_str : @~str) -> @ast::stmt {
string_to_parser(source_str).parse_stmt(~[])
}
// produce a codemap::span
fn sp (a: uint, b: uint) -> span {
span{lo:BytePos(a),hi:BytePos(b),expn_info:None}
}
// convert a vector of uints to a vector of ast::idents
fn ints_to_idents(ids: ~[uint]) -> ~[ast::ident] {
ids.map(|u| mk_ident(*u))
}
#[test] fn path_exprs_1 () {
assert_eq!(string_to_expr(@~"a"),
@ast::expr{id:1,
callee_id:2,
node:ast::expr_path(@ast::Path {span:sp(0,1),
global:false,
idents:~[mk_ident(100)],
rp:None,
types:~[]}),
span:sp(0,1)})
}
#[test] fn path_exprs_2 () {
assert_eq!(string_to_expr(@~"::a::b"),
@ast::expr{id:1,
callee_id:2,
node:ast::expr_path(@ast::Path {span:sp(0,6),
global:true,
idents:ints_to_idents(~[100,101]),
rp:None,
types:~[]}),
span:sp(0,6)})
}
#[should_fail]
#[test] fn bad_path_expr_1() {
string_to_expr(@~"::abc::def::return");
}
#[test] fn string_to_tts_1 () {
let (tts,ps) = string_to_tts_t(@~"fn a (b : int) { b; }");
assert_eq!(to_json_str(@tts),
~"[\
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
2013-04-23 12:57:41 -05:00
[\"tt_tok\",null,[\"IDENT\",\"a\",false]],\
[\
\"tt_delim\",\
[\
[\"tt_tok\",null,\"LPAREN\"],\
2013-04-23 12:57:41 -05:00
[\"tt_tok\",null,[\"IDENT\",\"b\",false]],\
[\"tt_tok\",null,\"COLON\"],\
[\"tt_tok\",null,[\"IDENT\",\"int\",false]],\
[\"tt_tok\",null,\"RPAREN\"]\
]\
],\
[\
\"tt_delim\",\
[\
[\"tt_tok\",null,\"LBRACE\"],\
2013-04-23 12:57:41 -05:00
[\"tt_tok\",null,[\"IDENT\",\"b\",false]],\
[\"tt_tok\",null,\"SEMI\"],\
[\"tt_tok\",null,\"RBRACE\"]\
]\
]\
]"
2013-04-23 12:57:41 -05:00
);
}
#[test] fn ret_expr() {
assert_eq!(string_to_expr(@~"return d"),
@ast::expr{id:3,
callee_id:4,
node:ast::expr_ret(
Some(@ast::expr{id:1,callee_id:2,
node:ast::expr_path(
@ast::Path{span:sp(7,8),
global:false,
idents:~[mk_ident(103)],
rp:None,
types:~[]
}),
span:sp(7,8)})),
span:sp(0,8)})
}
#[test] fn parse_stmt_1 () {
assert_eq!(string_to_stmt(@~"b;"),
@spanned{
node: ast::stmt_expr(@ast::expr{
id: 1,
callee_id: 2,
node: ast::expr_path(
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp:None,
types: ~[]}),
span: sp(0,1)},
3), // fixme
span: sp(0,1)})
}
fn parser_done(p: Parser){
assert_eq!(*p.token,token::EOF);
}
2013-04-23 12:57:41 -05:00
#[test] fn parse_ident_pat () {
let parser = string_to_parser(@~"b");
assert_eq!(parser.parse_pat(false),
@ast::pat{id:1, // fixme
node: ast::pat_ident(ast::bind_by_copy,
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
span: sp(0,1)});
parser_done(parser);
2013-04-23 12:57:41 -05:00
}
#[test] fn parse_arg () {
let parser = string_to_parser(@~"b : int");
assert_eq!(parser.parse_arg_general(true),
ast::arg{
is_mutbl: false,
2013-04-29 12:47:26 -05:00
ty: @ast::Ty{id:3, // fixme
2013-04-23 12:57:41 -05:00
node: ast::ty_path(@ast::Path{
span:sp(4,4), // this is bizarre...
// check this in the original parser?
global:false,
idents:~[mk_ident(105)],
rp: None,
types: ~[]},
2013-04-29 12:47:26 -05:00
2),
2013-04-23 12:57:41 -05:00
span:sp(4,7)},
2013-04-29 12:47:26 -05:00
pat: @ast::pat{id:1,
2013-04-23 12:57:41 -05:00
node: ast::pat_ident(ast::bind_by_copy,
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
2013-05-05 13:09:19 -05:00
span: sp(0,1)},
2013-04-29 12:47:26 -05:00
id: 4 // fixme
2013-04-23 12:57:41 -05:00
})
}
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int", and on the
// assignment order of the node_ids.
assert_eq!(string_to_item(@~"fn a (b : int) { b; }"),
Some(
@ast::item{ident:mk_ident(100),
attrs:~[],
2013-04-29 12:47:26 -05:00
id: 10, // fixme
2013-04-23 12:57:41 -05:00
node: ast::item_fn(ast::fn_decl{
inputs: ~[ast::arg{
is_mutbl: false,
2013-04-29 12:47:26 -05:00
ty: @ast::Ty{id:3, // fixme
2013-04-23 12:57:41 -05:00
node: ast::ty_path(@ast::Path{
span:sp(10,13),
global:false,
idents:~[mk_ident(106)],
rp: None,
types: ~[]},
2013-04-29 12:47:26 -05:00
2),
2013-04-23 12:57:41 -05:00
span:sp(10,13)},
2013-04-29 12:47:26 -05:00
pat: @ast::pat{id:1, // fixme
2013-04-23 12:57:41 -05:00
node: ast::pat_ident(
ast::bind_by_copy,
@ast::Path{
span:sp(6,7),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
2013-05-05 13:09:19 -05:00
span: sp(6,7)},
2013-04-29 12:47:26 -05:00
id: 4 // fixme
2013-04-23 12:57:41 -05:00
}],
2013-04-29 12:47:26 -05:00
output: @ast::Ty{id:5, // fixme
2013-04-23 12:57:41 -05:00
node: ast::ty_nil,
span:sp(15,15)}, // not sure
cf: ast::return_val
},
ast::impure_fn,
abi::AbiSet::Rust(),
ast::Generics{ // no idea on either of these:
lifetimes: opt_vec::Empty,
ty_params: opt_vec::Empty,
},
spanned{
span: sp(15,21),
node: ast::blk_{
view_items: ~[],
stmts: ~[@spanned{
node: ast::stmt_semi(@ast::expr{
2013-04-29 12:47:26 -05:00
id: 6,
callee_id: 7,
2013-04-23 12:57:41 -05:00
node: ast::expr_path(
@ast::Path{
span:sp(17,18),
global:false,
idents:~[mk_ident(101)],
rp:None,
types: ~[]}),
span: sp(17,18)},
2013-04-29 12:47:26 -05:00
8), // fixme
2013-04-23 12:57:41 -05:00
span: sp(17,18)}],
expr: None,
2013-04-29 12:47:26 -05:00
id: 9, // fixme
2013-04-23 12:57:41 -05:00
rules: ast::default_blk // no idea
}}),
vis: ast::inherited,
span: sp(0,21)}));
}
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr(@~"3 + 4");
string_to_expr(@~"a::z.froob(b,@(987+3))");
2013-02-04 15:15:17 -06:00
}
}