2019-05-07 11:38:26 -05:00
|
|
|
mod unescape;
|
|
|
|
|
2019-01-28 14:03:56 -06:00
|
|
|
mod block;
|
2019-04-05 15:34:45 -05:00
|
|
|
mod field_expr;
|
2018-11-08 08:42:00 -06:00
|
|
|
|
2019-01-07 07:15:47 -06:00
|
|
|
use crate::{
|
|
|
|
algo::visit::{visitor_ctx, VisitorCtx},
|
2019-07-04 15:05:17 -05:00
|
|
|
ast, AstNode, SourceFile, SyntaxError,
|
|
|
|
SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING},
|
|
|
|
SyntaxNode, TextUnit, T,
|
2019-01-07 07:15:47 -06:00
|
|
|
};
|
|
|
|
|
2019-05-07 11:38:26 -05:00
|
|
|
pub(crate) use unescape::EscapeError;
|
|
|
|
|
2019-01-07 07:15:47 -06:00
|
|
|
pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
|
2018-11-08 08:42:00 -06:00
|
|
|
let mut errors = Vec::new();
|
|
|
|
for node in file.syntax().descendants() {
|
|
|
|
let _ = visitor_ctx(&mut errors)
|
2019-03-30 05:25:53 -05:00
|
|
|
.visit::<ast::Literal, _>(validate_literal)
|
2019-02-21 06:51:22 -06:00
|
|
|
.visit::<ast::Block, _>(block::validate_block_node)
|
2019-04-05 15:34:45 -05:00
|
|
|
.visit::<ast::FieldExpr, _>(field_expr::validate_field_expr_node)
|
2019-07-18 11:23:05 -05:00
|
|
|
.accept(&node);
|
2018-11-08 08:42:00 -06:00
|
|
|
}
|
|
|
|
errors
|
|
|
|
}
|
2019-02-21 06:51:22 -06:00
|
|
|
|
2019-03-30 05:25:53 -05:00
|
|
|
// FIXME: kill duplication
|
2019-07-18 11:23:05 -05:00
|
|
|
fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
2019-05-07 11:38:26 -05:00
|
|
|
let token = literal.token();
|
|
|
|
let text = token.text().as_str();
|
|
|
|
match token.kind() {
|
|
|
|
BYTE => {
|
|
|
|
if let Some(end) = text.rfind('\'') {
|
|
|
|
if let Some(without_quotes) = text.get(2..end) {
|
|
|
|
if let Err((off, err)) = unescape::unescape_byte(without_quotes) {
|
|
|
|
let off = token.range().start() + TextUnit::from_usize(off + 2);
|
|
|
|
acc.push(SyntaxError::new(err.into(), off))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
CHAR => {
|
|
|
|
if let Some(end) = text.rfind('\'') {
|
|
|
|
if let Some(without_quotes) = text.get(1..end) {
|
|
|
|
if let Err((off, err)) = unescape::unescape_char(without_quotes) {
|
|
|
|
let off = token.range().start() + TextUnit::from_usize(off + 1);
|
|
|
|
acc.push(SyntaxError::new(err.into(), off))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BYTE_STRING => {
|
|
|
|
if let Some(end) = text.rfind('\"') {
|
|
|
|
if let Some(without_quotes) = text.get(2..end) {
|
|
|
|
unescape::unescape_byte_str(without_quotes, &mut |range, char| {
|
|
|
|
if let Err(err) = char {
|
|
|
|
let off = range.start;
|
|
|
|
let off = token.range().start() + TextUnit::from_usize(off + 2);
|
|
|
|
acc.push(SyntaxError::new(err.into(), off))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
STRING => {
|
|
|
|
if let Some(end) = text.rfind('\"') {
|
|
|
|
if let Some(without_quotes) = text.get(1..end) {
|
|
|
|
unescape::unescape_str(without_quotes, &mut |range, char| {
|
|
|
|
if let Err(err) = char {
|
|
|
|
let off = range.start;
|
|
|
|
let off = token.range().start() + TextUnit::from_usize(off + 1);
|
|
|
|
acc.push(SyntaxError::new(err.into(), off))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-03-30 05:25:53 -05:00
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-21 06:51:22 -06:00
|
|
|
pub(crate) fn validate_block_structure(root: &SyntaxNode) {
|
|
|
|
let mut stack = Vec::new();
|
|
|
|
for node in root.descendants() {
|
|
|
|
match node.kind() {
|
2019-05-15 07:35:47 -05:00
|
|
|
T!['{'] => stack.push(node),
|
|
|
|
T!['}'] => {
|
2019-02-21 06:51:22 -06:00
|
|
|
if let Some(pair) = stack.pop() {
|
|
|
|
assert_eq!(
|
|
|
|
node.parent(),
|
|
|
|
pair.parent(),
|
2019-07-20 04:48:24 -05:00
|
|
|
"\nunpaired curleys:\n{}\n{:#?}\n",
|
2019-02-21 06:51:22 -06:00
|
|
|
root.text(),
|
2019-07-20 04:48:24 -05:00
|
|
|
root,
|
2019-02-21 06:51:22 -06:00
|
|
|
);
|
|
|
|
assert!(
|
|
|
|
node.next_sibling().is_none() && pair.prev_sibling().is_none(),
|
|
|
|
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
|
|
|
|
node,
|
|
|
|
root.text(),
|
|
|
|
node.text(),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|