2020-06-17 01:53:51 -05:00
|
|
|
//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
|
|
|
|
//! We first split everything before and after the separator `==>>`. Next, both the search pattern
|
|
|
|
//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
|
|
|
|
//! placeholders, which start with `$`. For replacement templates, this is the final form. For
|
|
|
|
//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
|
|
|
|
//! e.g. expressions, type references etc.
|
2022-04-25 11:51:59 -05:00
|
|
|
use ide_db::{FxHashMap, FxHashSet};
|
2021-12-27 12:33:24 -06:00
|
|
|
use std::{fmt::Display, str::FromStr};
|
|
|
|
use syntax::{SmolStr, SyntaxKind, SyntaxNode, T};
|
2020-06-17 01:53:51 -05:00
|
|
|
|
2020-06-23 07:03:39 -05:00
|
|
|
use crate::errors::bail;
|
2021-12-27 10:44:45 -06:00
|
|
|
use crate::{fragments, SsrError, SsrPattern, SsrRule};
|
2020-06-17 01:53:51 -05:00
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) struct ParsedRule {
|
|
|
|
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
|
|
|
|
pub(crate) pattern: SyntaxNode,
|
2020-07-22 04:15:19 -05:00
|
|
|
pub(crate) template: Option<SyntaxNode>,
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
2020-07-02 21:57:17 -05:00
|
|
|
pub(crate) struct RawPattern {
|
2020-06-17 01:53:51 -05:00
|
|
|
tokens: Vec<PatternElement>,
|
|
|
|
}
|
|
|
|
|
|
|
|
// Part of a search or replace pattern.
|
|
|
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub(crate) enum PatternElement {
|
|
|
|
Token(Token),
|
|
|
|
Placeholder(Placeholder),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub(crate) struct Placeholder {
|
|
|
|
/// The name of this placeholder. e.g. for "$a", this would be "a"
|
2020-08-05 20:30:52 -05:00
|
|
|
pub(crate) ident: Var,
|
2020-06-17 01:53:51 -05:00
|
|
|
/// A unique name used in place of this placeholder when we parse the pattern as Rust code.
|
|
|
|
stand_in_name: String,
|
2020-06-23 04:07:42 -05:00
|
|
|
pub(crate) constraints: Vec<Constraint>,
|
|
|
|
}
|
|
|
|
|
2020-08-05 20:30:52 -05:00
|
|
|
/// Represents a `$var` in an SSR query.
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
2020-11-02 09:31:38 -06:00
|
|
|
pub(crate) struct Var(pub(crate) String);
|
2020-08-05 20:30:52 -05:00
|
|
|
|
2020-06-23 04:07:42 -05:00
|
|
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub(crate) enum Constraint {
|
|
|
|
Kind(NodeKind),
|
|
|
|
Not(Box<Constraint>),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub(crate) enum NodeKind {
|
|
|
|
Literal,
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
|
|
pub(crate) struct Token {
|
|
|
|
kind: SyntaxKind,
|
|
|
|
pub(crate) text: SmolStr,
|
|
|
|
}
|
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
impl ParsedRule {
|
|
|
|
fn new(
|
|
|
|
pattern: &RawPattern,
|
2020-07-22 04:15:19 -05:00
|
|
|
template: Option<&RawPattern>,
|
2020-07-02 21:57:17 -05:00
|
|
|
) -> Result<Vec<ParsedRule>, SsrError> {
|
|
|
|
let raw_pattern = pattern.as_rust_code();
|
2020-07-22 04:15:19 -05:00
|
|
|
let raw_template = template.map(|t| t.as_rust_code());
|
2021-03-21 07:13:34 -05:00
|
|
|
let raw_template = raw_template.as_deref();
|
2020-07-02 21:57:17 -05:00
|
|
|
let mut builder = RuleBuilder {
|
|
|
|
placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
|
|
|
|
rules: Vec::new(),
|
|
|
|
};
|
2021-01-03 10:45:49 -06:00
|
|
|
|
2021-12-27 12:17:40 -06:00
|
|
|
let raw_template_stmt = raw_template.map(fragments::stmt);
|
2021-12-27 12:13:55 -06:00
|
|
|
if let raw_template_expr @ Some(Ok(_)) = raw_template.map(fragments::expr) {
|
2021-12-27 12:33:24 -06:00
|
|
|
builder.try_add(fragments::expr(&raw_pattern), raw_template_expr);
|
2021-01-03 10:45:49 -06:00
|
|
|
} else {
|
2021-12-27 12:33:24 -06:00
|
|
|
builder.try_add(fragments::expr(&raw_pattern), raw_template_stmt.clone());
|
2021-01-03 10:45:49 -06:00
|
|
|
}
|
2021-12-27 12:33:24 -06:00
|
|
|
builder.try_add(fragments::ty(&raw_pattern), raw_template.map(fragments::ty));
|
|
|
|
builder.try_add(fragments::item(&raw_pattern), raw_template.map(fragments::item));
|
|
|
|
builder.try_add(fragments::pat(&raw_pattern), raw_template.map(fragments::pat));
|
|
|
|
builder.try_add(fragments::stmt(&raw_pattern), raw_template_stmt);
|
2020-07-02 21:57:17 -05:00
|
|
|
builder.build()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct RuleBuilder {
|
|
|
|
placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
|
|
|
|
rules: Vec<ParsedRule>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RuleBuilder {
|
2021-12-27 12:33:24 -06:00
|
|
|
fn try_add(
|
2021-12-27 10:44:45 -06:00
|
|
|
&mut self,
|
|
|
|
pattern: Result<SyntaxNode, ()>,
|
|
|
|
template: Option<Result<SyntaxNode, ()>>,
|
|
|
|
) {
|
|
|
|
match (pattern, template) {
|
|
|
|
(Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
|
|
|
|
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
|
|
|
|
pattern,
|
|
|
|
template: Some(template),
|
|
|
|
}),
|
|
|
|
(Ok(pattern), None) => self.rules.push(ParsedRule {
|
|
|
|
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
|
|
|
|
pattern,
|
|
|
|
template: None,
|
|
|
|
}),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 01:01:00 -05:00
|
|
|
fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
|
2020-07-02 21:57:17 -05:00
|
|
|
if self.rules.is_empty() {
|
2020-07-22 04:15:19 -05:00
|
|
|
bail!("Not a valid Rust expression, type, item, path or pattern");
|
2020-07-02 21:57:17 -05:00
|
|
|
}
|
2020-07-29 01:01:00 -05:00
|
|
|
// If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
|
|
|
|
// mix leads to strange semantics, since the path-based rules only match things where the
|
|
|
|
// path refers to semantically the same thing, whereas the non-path-based rules could match
|
|
|
|
// anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
|
|
|
|
// `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
|
2020-07-31 13:09:09 -05:00
|
|
|
// pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
|
2020-07-29 01:01:00 -05:00
|
|
|
// renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
|
|
|
|
// have to use the slow-scan search mechanism.
|
|
|
|
if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
|
|
|
|
let old_len = self.rules.len();
|
|
|
|
self.rules.retain(|rule| contains_path(&rule.pattern));
|
|
|
|
if self.rules.len() < old_len {
|
2021-03-08 14:19:44 -06:00
|
|
|
cov_mark::hit!(pattern_is_a_single_segment_path);
|
2020-07-29 01:01:00 -05:00
|
|
|
}
|
|
|
|
}
|
2020-07-02 21:57:17 -05:00
|
|
|
Ok(self.rules)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 01:01:00 -05:00
|
|
|
/// Returns whether there are any paths in `node`.
|
|
|
|
fn contains_path(node: &SyntaxNode) -> bool {
|
|
|
|
node.kind() == SyntaxKind::PATH
|
|
|
|
|| node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
|
|
|
|
}
|
|
|
|
|
2020-06-17 01:53:51 -05:00
|
|
|
impl FromStr for SsrRule {
|
|
|
|
type Err = SsrError;
|
|
|
|
|
|
|
|
fn from_str(query: &str) -> Result<SsrRule, SsrError> {
|
|
|
|
let mut it = query.split("==>>");
|
|
|
|
let pattern = it.next().expect("at least empty string").trim();
|
|
|
|
let template = it
|
|
|
|
.next()
|
2020-06-29 19:43:37 -05:00
|
|
|
.ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
|
2020-06-17 01:53:51 -05:00
|
|
|
.trim()
|
|
|
|
.to_string();
|
|
|
|
if it.next().is_some() {
|
|
|
|
return Err(SsrError("More than one delimiter found".into()));
|
|
|
|
}
|
2020-07-02 21:57:17 -05:00
|
|
|
let raw_pattern = pattern.parse()?;
|
|
|
|
let raw_template = template.parse()?;
|
|
|
|
let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
|
|
|
|
let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
|
2020-06-17 01:53:51 -05:00
|
|
|
validate_rule(&rule)?;
|
|
|
|
Ok(rule)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
impl FromStr for RawPattern {
|
2020-06-17 01:53:51 -05:00
|
|
|
type Err = SsrError;
|
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
|
|
|
|
Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
impl RawPattern {
|
2020-06-17 01:53:51 -05:00
|
|
|
/// Returns this search pattern as Rust source code that we can feed to the Rust parser.
|
|
|
|
fn as_rust_code(&self) -> String {
|
|
|
|
let mut res = String::new();
|
|
|
|
for t in &self.tokens {
|
|
|
|
res.push_str(match t {
|
|
|
|
PatternElement::Token(token) => token.text.as_str(),
|
|
|
|
PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
2020-07-02 21:57:17 -05:00
|
|
|
pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
|
2020-06-17 01:53:51 -05:00
|
|
|
let mut res = FxHashMap::default();
|
|
|
|
for t in &self.tokens {
|
|
|
|
if let PatternElement::Placeholder(placeholder) = t {
|
|
|
|
res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromStr for SsrPattern {
|
|
|
|
type Err = SsrError;
|
|
|
|
|
|
|
|
fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
|
2020-07-02 21:57:17 -05:00
|
|
|
let raw_pattern = pattern_str.parse()?;
|
|
|
|
let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
|
2021-09-15 13:22:06 -05:00
|
|
|
Ok(SsrPattern { parsed_rules })
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
|
|
|
|
/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
|
|
|
|
/// replace pattern.
|
|
|
|
fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
|
|
|
|
let mut res = Vec::new();
|
|
|
|
let mut placeholder_names = FxHashSet::default();
|
|
|
|
let mut tokens = tokenize(pattern_str)?.into_iter();
|
|
|
|
while let Some(token) = tokens.next() {
|
2020-07-01 18:19:58 -05:00
|
|
|
if token.kind == T![$] {
|
2020-06-17 01:53:51 -05:00
|
|
|
let placeholder = parse_placeholder(&mut tokens)?;
|
|
|
|
if !placeholder_names.insert(placeholder.ident.clone()) {
|
2020-08-05 20:30:52 -05:00
|
|
|
bail!("Placeholder `{}` repeats more than once", placeholder.ident);
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
res.push(PatternElement::Placeholder(placeholder));
|
|
|
|
} else {
|
|
|
|
res.push(PatternElement::Token(token));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(res)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
|
|
|
|
/// pattern didn't define.
|
|
|
|
fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
|
2020-06-29 10:07:52 -05:00
|
|
|
let mut defined_placeholders = FxHashSet::default();
|
2020-07-02 21:57:17 -05:00
|
|
|
for p in &rule.pattern.tokens {
|
2020-06-17 01:53:51 -05:00
|
|
|
if let PatternElement::Placeholder(placeholder) = p {
|
|
|
|
defined_placeholders.insert(&placeholder.ident);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut undefined = Vec::new();
|
|
|
|
for p in &rule.template.tokens {
|
|
|
|
if let PatternElement::Placeholder(placeholder) = p {
|
|
|
|
if !defined_placeholders.contains(&placeholder.ident) {
|
2020-08-05 20:30:52 -05:00
|
|
|
undefined.push(placeholder.ident.to_string());
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
2020-06-23 04:07:42 -05:00
|
|
|
if !placeholder.constraints.is_empty() {
|
|
|
|
bail!("Replacement placeholders cannot have constraints");
|
|
|
|
}
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if !undefined.is_empty() {
|
|
|
|
bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
|
2021-12-18 08:20:38 -06:00
|
|
|
let lexed = parser::LexedStr::new(source);
|
|
|
|
if let Some((_, first_error)) = lexed.errors().next() {
|
2020-06-17 01:53:51 -05:00
|
|
|
bail!("Failed to parse pattern: {}", first_error);
|
|
|
|
}
|
|
|
|
let mut tokens: Vec<Token> = Vec::new();
|
2021-12-18 08:20:38 -06:00
|
|
|
for i in 0..lexed.len() {
|
|
|
|
tokens.push(Token { kind: lexed.kind(i), text: lexed.text(i).into() });
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
Ok(tokens)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
|
|
|
|
let mut name = None;
|
2020-06-23 04:07:42 -05:00
|
|
|
let mut constraints = Vec::new();
|
2020-06-17 01:53:51 -05:00
|
|
|
if let Some(token) = tokens.next() {
|
|
|
|
match token.kind {
|
|
|
|
SyntaxKind::IDENT => {
|
|
|
|
name = Some(token.text);
|
|
|
|
}
|
2020-07-01 18:19:58 -05:00
|
|
|
T!['{'] => {
|
2020-06-23 04:07:42 -05:00
|
|
|
let token =
|
|
|
|
tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
|
|
|
|
if token.kind == SyntaxKind::IDENT {
|
|
|
|
name = Some(token.text);
|
|
|
|
}
|
|
|
|
loop {
|
|
|
|
let token = tokens
|
|
|
|
.next()
|
|
|
|
.ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
|
|
|
|
match token.kind {
|
2020-07-01 18:19:58 -05:00
|
|
|
T![:] => {
|
2020-06-23 04:07:42 -05:00
|
|
|
constraints.push(parse_constraint(tokens)?);
|
|
|
|
}
|
2020-07-01 18:19:58 -05:00
|
|
|
T!['}'] => break,
|
2020-06-23 04:07:42 -05:00
|
|
|
_ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-06-17 01:53:51 -05:00
|
|
|
_ => {
|
2020-06-23 07:03:39 -05:00
|
|
|
bail!("Placeholders should either be $name or ${{name:constraints}}");
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
|
2020-06-23 04:07:42 -05:00
|
|
|
Ok(Placeholder::new(name, constraints))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
|
|
|
|
let constraint_type = tokens
|
|
|
|
.next()
|
|
|
|
.ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
|
|
|
|
.text
|
|
|
|
.to_string();
|
|
|
|
match constraint_type.as_str() {
|
|
|
|
"kind" => {
|
|
|
|
expect_token(tokens, "(")?;
|
|
|
|
let t = tokens.next().ok_or_else(|| {
|
|
|
|
SsrError::new("Unexpected end of constraint while looking for kind")
|
|
|
|
})?;
|
|
|
|
if t.kind != SyntaxKind::IDENT {
|
|
|
|
bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
|
|
|
|
}
|
|
|
|
expect_token(tokens, ")")?;
|
|
|
|
Ok(Constraint::Kind(NodeKind::from(&t.text)?))
|
|
|
|
}
|
|
|
|
"not" => {
|
|
|
|
expect_token(tokens, "(")?;
|
|
|
|
let sub = parse_constraint(tokens)?;
|
|
|
|
expect_token(tokens, ")")?;
|
|
|
|
Ok(Constraint::Not(Box::new(sub)))
|
|
|
|
}
|
|
|
|
x => bail!("Unsupported constraint type '{}'", x),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
|
|
|
|
if let Some(t) = tokens.next() {
|
|
|
|
if t.text == expected {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
bail!("Expected {} found {}", expected, t.text);
|
|
|
|
}
|
2020-06-23 07:03:39 -05:00
|
|
|
bail!("Expected {} found end of stream", expected);
|
2020-06-23 04:07:42 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl NodeKind {
|
|
|
|
fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
|
|
|
|
Ok(match name.as_str() {
|
|
|
|
"literal" => NodeKind::Literal,
|
|
|
|
_ => bail!("Unknown node kind '{}'", name),
|
|
|
|
})
|
|
|
|
}
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Placeholder {
|
2020-06-23 04:07:42 -05:00
|
|
|
fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
|
2020-08-05 20:30:52 -05:00
|
|
|
Self {
|
2022-12-23 12:42:58 -06:00
|
|
|
stand_in_name: format!("__placeholder_{name}"),
|
2020-08-05 20:30:52 -05:00
|
|
|
constraints,
|
|
|
|
ident: Var(name.to_string()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for Var {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
write!(f, "${}", self.0)
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parser_happy_case() {
|
|
|
|
fn token(kind: SyntaxKind, text: &str) -> PatternElement {
|
|
|
|
PatternElement::Token(Token { kind, text: SmolStr::new(text) })
|
|
|
|
}
|
|
|
|
fn placeholder(name: &str) -> PatternElement {
|
2020-06-23 04:07:42 -05:00
|
|
|
PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
|
|
|
|
assert_eq!(
|
2020-07-02 21:57:17 -05:00
|
|
|
result.pattern.tokens,
|
2020-06-17 01:53:51 -05:00
|
|
|
vec![
|
|
|
|
token(SyntaxKind::IDENT, "foo"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T!['('], "("),
|
2020-06-17 01:53:51 -05:00
|
|
|
placeholder("a"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T![,], ","),
|
2020-06-17 01:53:51 -05:00
|
|
|
token(SyntaxKind::WHITESPACE, " "),
|
|
|
|
placeholder("b"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T![')'], ")"),
|
2020-06-17 01:53:51 -05:00
|
|
|
]
|
|
|
|
);
|
|
|
|
assert_eq!(
|
|
|
|
result.template.tokens,
|
|
|
|
vec![
|
|
|
|
token(SyntaxKind::IDENT, "bar"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T!['('], "("),
|
2020-06-17 01:53:51 -05:00
|
|
|
placeholder("b"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T![,], ","),
|
2020-06-17 01:53:51 -05:00
|
|
|
token(SyntaxKind::WHITESPACE, " "),
|
|
|
|
placeholder("a"),
|
2020-07-01 18:19:58 -05:00
|
|
|
token(T![')'], ")"),
|
2020-06-17 01:53:51 -05:00
|
|
|
]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|