2017-12-29 00:27:56 +03:00
|
|
|
extern crate file;
|
2017-12-29 00:56:36 +03:00
|
|
|
extern crate libsyntax2;
|
2018-01-07 14:56:08 +03:00
|
|
|
extern crate testutils;
|
2017-12-29 00:27:56 +03:00
|
|
|
|
2018-01-07 14:56:08 +03:00
|
|
|
use std::path::{Path};
|
2017-12-29 00:56:36 +03:00
|
|
|
use std::fmt::Write;
|
|
|
|
|
2017-12-31 17:54:33 +03:00
|
|
|
use libsyntax2::{Token, tokenize};
|
2018-01-07 14:56:08 +03:00
|
|
|
use testutils::{assert_equal_text, collect_tests};
|
2017-12-29 00:27:56 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn lexer_tests() {
|
2018-01-07 14:56:08 +03:00
|
|
|
for test_case in collect_tests(&["lexer"]) {
|
2017-12-29 00:27:56 +03:00
|
|
|
lexer_test_case(&test_case);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lexer_test_case(path: &Path) {
|
|
|
|
let actual = {
|
|
|
|
let text = file::get_text(path).unwrap();
|
|
|
|
let tokens = tokenize(&text);
|
2017-12-30 15:29:09 +03:00
|
|
|
dump_tokens(&tokens, &text)
|
2017-12-29 00:27:56 +03:00
|
|
|
};
|
2018-01-07 14:56:08 +03:00
|
|
|
let path = path.with_extension("txt");
|
|
|
|
let expected = file::get_text(&path).unwrap();
|
2017-12-29 23:33:04 +03:00
|
|
|
let expected = expected.as_str();
|
|
|
|
let actual = actual.as_str();
|
2018-01-07 14:56:08 +03:00
|
|
|
assert_equal_text(expected, actual, &path)
|
2017-12-29 00:27:56 +03:00
|
|
|
}
|
|
|
|
|
2017-12-30 15:29:09 +03:00
|
|
|
fn dump_tokens(tokens: &[Token], text: &str) -> String {
|
2017-12-29 00:56:36 +03:00
|
|
|
let mut acc = String::new();
|
2017-12-30 15:29:09 +03:00
|
|
|
let mut offset = 0;
|
2017-12-29 00:56:36 +03:00
|
|
|
for token in tokens {
|
2017-12-30 15:29:09 +03:00
|
|
|
let len: u32 = token.len.into();
|
|
|
|
let len = len as usize;
|
|
|
|
let token_text = &text[offset..offset + len];
|
|
|
|
offset += len;
|
|
|
|
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
|
2017-12-29 00:56:36 +03:00
|
|
|
}
|
|
|
|
acc
|
2017-12-29 00:27:56 +03:00
|
|
|
}
|