2020-06-17 01:53:51 -05:00
|
|
|
//! This module is responsible for matching a search pattern against a node in the AST. In the
|
|
|
|
//! process of matching, placeholder values are recorded.
|
|
|
|
|
|
|
|
use crate::{
|
2020-08-05 20:30:52 -05:00
|
|
|
parsing::{Constraint, NodeKind, Placeholder, Var},
|
2020-08-05 04:48:52 -05:00
|
|
|
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
|
2020-07-02 21:57:17 -05:00
|
|
|
SsrMatches,
|
2020-06-17 01:53:51 -05:00
|
|
|
};
|
|
|
|
use hir::Semantics;
|
2022-04-25 11:51:59 -05:00
|
|
|
use ide_db::{base_db::FileRange, FxHashMap};
|
2020-06-17 01:53:51 -05:00
|
|
|
use std::{cell::Cell, iter::Peekable};
|
2021-01-19 16:56:11 -06:00
|
|
|
use syntax::{
|
2022-04-25 11:51:59 -05:00
|
|
|
ast::{self, AstNode, AstToken},
|
|
|
|
SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken,
|
2021-01-19 16:56:11 -06:00
|
|
|
};
|
2020-06-17 01:53:51 -05:00
|
|
|
|
|
|
|
// Creates a match error. If we're currently attempting to match some code that we thought we were
|
|
|
|
// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
|
|
|
|
macro_rules! match_error {
|
|
|
|
($e:expr) => {{
|
|
|
|
MatchFailed {
|
|
|
|
reason: if recording_match_fail_reasons() {
|
|
|
|
Some(format!("{}", $e))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}};
|
|
|
|
($fmt:expr, $($arg:tt)+) => {{
|
|
|
|
MatchFailed {
|
|
|
|
reason: if recording_match_fail_reasons() {
|
|
|
|
Some(format!($fmt, $($arg)+))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
|
|
|
|
macro_rules! fail_match {
|
|
|
|
($($args:tt)*) => {return Err(match_error!($($args)*))};
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Information about a match that was found.
|
|
|
|
#[derive(Debug)]
|
2020-06-30 00:55:20 -05:00
|
|
|
pub struct Match {
|
|
|
|
pub(crate) range: FileRange,
|
2020-06-17 01:53:51 -05:00
|
|
|
pub(crate) matched_node: SyntaxNode,
|
|
|
|
pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
|
|
|
|
pub(crate) ignored_comments: Vec<ast::Comment>,
|
2020-07-22 04:15:19 -05:00
|
|
|
pub(crate) rule_index: usize,
|
2020-07-22 01:48:12 -05:00
|
|
|
/// The depth of matched_node.
|
|
|
|
pub(crate) depth: usize,
|
2020-07-22 01:46:29 -05:00
|
|
|
// Each path in the template rendered for the module in which the match was found.
|
|
|
|
pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Information about a placeholder bound in a match.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) struct PlaceholderMatch {
|
|
|
|
pub(crate) range: FileRange,
|
|
|
|
/// More matches, found within `node`.
|
|
|
|
pub(crate) inner_matches: SsrMatches,
|
2020-08-05 16:26:28 -05:00
|
|
|
/// How many times the code that the placeholder matched needed to be dereferenced. Will only be
|
|
|
|
/// non-zero if the placeholder matched to the receiver of a method call.
|
|
|
|
pub(crate) autoderef_count: usize,
|
|
|
|
pub(crate) autoref_kind: ast::SelfParamKind,
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) struct MatchFailureReason {
|
|
|
|
pub(crate) reason: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub(crate) struct MatchFailed {
|
|
|
|
/// The reason why we failed to match. Only present when debug_active true in call to
|
|
|
|
/// `get_match`.
|
|
|
|
pub(crate) reason: Option<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
|
|
|
|
/// the match, if it does. Since we only do matching in this module and searching is done by the
|
|
|
|
/// parent module, we don't populate nested matches.
|
|
|
|
pub(crate) fn get_match(
|
|
|
|
debug_active: bool,
|
2020-07-22 01:46:29 -05:00
|
|
|
rule: &ResolvedRule,
|
2020-06-17 01:53:51 -05:00
|
|
|
code: &SyntaxNode,
|
|
|
|
restrict_range: &Option<FileRange>,
|
2020-08-13 09:39:16 -05:00
|
|
|
sema: &Semantics<ide_db::RootDatabase>,
|
2020-06-17 01:53:51 -05:00
|
|
|
) -> Result<Match, MatchFailed> {
|
|
|
|
record_match_fails_reasons_scope(debug_active, || {
|
2020-07-02 22:09:14 -05:00
|
|
|
Matcher::try_match(rule, code, restrict_range, sema)
|
2020-06-17 01:53:51 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-07-02 22:09:14 -05:00
|
|
|
/// Checks if our search pattern matches a particular node of the AST.
|
|
|
|
struct Matcher<'db, 'sema> {
|
2020-08-13 09:39:16 -05:00
|
|
|
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
|
2020-06-17 01:53:51 -05:00
|
|
|
/// If any placeholders come from anywhere outside of this range, then the match will be
|
|
|
|
/// rejected.
|
|
|
|
restrict_range: Option<FileRange>,
|
2020-07-22 01:46:29 -05:00
|
|
|
rule: &'sema ResolvedRule,
|
2020-07-02 22:09:14 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Which phase of matching we're currently performing. We do two phases because most attempted
|
|
|
|
/// matches will fail and it means we can defer more expensive checks to the second phase.
|
|
|
|
enum Phase<'a> {
|
|
|
|
/// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
|
|
|
|
First,
|
|
|
|
/// On the second phase, we construct the `Match`. Things like what placeholders bind to is
|
|
|
|
/// recorded.
|
|
|
|
Second(&'a mut Match),
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
2020-07-02 22:09:14 -05:00
|
|
|
impl<'db, 'sema> Matcher<'db, 'sema> {
|
2020-06-17 01:53:51 -05:00
|
|
|
fn try_match(
|
2020-07-22 01:46:29 -05:00
|
|
|
rule: &ResolvedRule,
|
2020-06-17 01:53:51 -05:00
|
|
|
code: &SyntaxNode,
|
|
|
|
restrict_range: &Option<FileRange>,
|
2020-08-13 09:39:16 -05:00
|
|
|
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
|
2020-06-17 01:53:51 -05:00
|
|
|
) -> Result<Match, MatchFailed> {
|
2021-03-16 19:56:31 -05:00
|
|
|
let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
|
2020-06-17 01:53:51 -05:00
|
|
|
// First pass at matching, where we check that node types and idents match.
|
2020-07-22 01:46:29 -05:00
|
|
|
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
|
2020-06-17 01:53:51 -05:00
|
|
|
match_state.validate_range(&sema.original_range(code))?;
|
2020-07-02 22:09:14 -05:00
|
|
|
let mut the_match = Match {
|
2020-06-30 00:55:20 -05:00
|
|
|
range: sema.original_range(code),
|
2020-06-17 01:53:51 -05:00
|
|
|
matched_node: code.clone(),
|
|
|
|
placeholder_values: FxHashMap::default(),
|
|
|
|
ignored_comments: Vec::new(),
|
2020-07-22 04:15:19 -05:00
|
|
|
rule_index: rule.index,
|
2020-07-22 01:48:12 -05:00
|
|
|
depth: 0,
|
2020-07-22 01:46:29 -05:00
|
|
|
rendered_template_paths: FxHashMap::default(),
|
2020-07-02 22:09:14 -05:00
|
|
|
};
|
2020-06-17 01:53:51 -05:00
|
|
|
// Second matching pass, where we record placeholder matches, ignored comments and maybe do
|
|
|
|
// any other more expensive checks that we didn't want to do on the first pass.
|
2020-07-22 01:46:29 -05:00
|
|
|
match_state.attempt_match_node(
|
|
|
|
&mut Phase::Second(&mut the_match),
|
|
|
|
&rule.pattern.node,
|
|
|
|
code,
|
|
|
|
)?;
|
2020-07-22 01:48:12 -05:00
|
|
|
the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
|
2020-07-22 01:46:29 -05:00
|
|
|
if let Some(template) = &rule.template {
|
|
|
|
the_match.render_template_paths(template, sema)?;
|
|
|
|
}
|
2020-07-02 22:09:14 -05:00
|
|
|
Ok(the_match)
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks that `range` is within the permitted range if any. This is applicable when we're
|
|
|
|
/// processing a macro expansion and we want to fail the match if we're working with a node that
|
|
|
|
/// didn't originate from the token tree of the macro call.
|
|
|
|
fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
|
|
|
|
if let Some(restrict_range) = &self.restrict_range {
|
|
|
|
if restrict_range.file_id != range.file_id
|
|
|
|
|| !restrict_range.range.contains_range(range.range)
|
|
|
|
{
|
|
|
|
fail_match!("Node originated from a macro");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn attempt_match_node(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
pattern: &SyntaxNode,
|
|
|
|
code: &SyntaxNode,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
// Handle placeholders.
|
2020-08-05 16:26:28 -05:00
|
|
|
if let Some(placeholder) = self.get_placeholder_for_node(pattern) {
|
2020-06-23 04:07:42 -05:00
|
|
|
for constraint in &placeholder.constraints {
|
|
|
|
self.check_constraint(constraint, code)?;
|
|
|
|
}
|
2020-07-02 22:09:14 -05:00
|
|
|
if let Phase::Second(matches_out) = phase {
|
|
|
|
let original_range = self.sema.original_range(code);
|
|
|
|
// We validated the range for the node when we started the match, so the placeholder
|
|
|
|
// probably can't fail range validation, but just to be safe...
|
|
|
|
self.validate_range(&original_range)?;
|
2020-08-05 16:26:28 -05:00
|
|
|
matches_out.placeholder_values.insert(
|
|
|
|
placeholder.ident.clone(),
|
2021-09-15 13:22:06 -05:00
|
|
|
PlaceholderMatch::from_range(original_range),
|
2020-08-05 16:26:28 -05:00
|
|
|
);
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
return Ok(());
|
|
|
|
}
|
2020-07-24 05:53:48 -05:00
|
|
|
// We allow a UFCS call to match a method call, provided they resolve to the same function.
|
2020-08-05 04:48:52 -05:00
|
|
|
if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
|
|
|
|
if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
|
|
|
|
return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
|
|
|
|
}
|
|
|
|
if let Some(code) = ast::CallExpr::cast(code.clone()) {
|
|
|
|
return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
|
2020-07-24 05:53:48 -05:00
|
|
|
}
|
|
|
|
}
|
2020-06-17 01:53:51 -05:00
|
|
|
if pattern.kind() != code.kind() {
|
2020-07-02 22:09:14 -05:00
|
|
|
fail_match!(
|
2020-07-24 05:53:48 -05:00
|
|
|
"Pattern had `{}` ({:?}), code had `{}` ({:?})",
|
2020-07-02 22:09:14 -05:00
|
|
|
pattern.text(),
|
|
|
|
pattern.kind(),
|
|
|
|
code.text(),
|
|
|
|
code.kind()
|
|
|
|
);
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
// Some kinds of nodes have special handling. For everything else, we fall back to default
|
|
|
|
// matching.
|
|
|
|
match code.kind() {
|
2020-07-30 09:21:30 -05:00
|
|
|
SyntaxKind::RECORD_EXPR_FIELD_LIST => {
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_record_field_list(phase, pattern, code)
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
2020-07-02 22:09:14 -05:00
|
|
|
SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
|
2020-07-22 01:46:29 -05:00
|
|
|
SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
|
2020-07-02 22:09:14 -05:00
|
|
|
_ => self.attempt_match_node_children(phase, pattern, code),
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn attempt_match_node_children(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
pattern: &SyntaxNode,
|
|
|
|
code: &SyntaxNode,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
self.attempt_match_sequences(
|
2020-07-02 22:09:14 -05:00
|
|
|
phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
PatternIterator::new(pattern),
|
|
|
|
code.children_with_tokens(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn attempt_match_sequences(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
pattern_it: PatternIterator,
|
|
|
|
mut code_it: SyntaxElementChildren,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
let mut pattern_it = pattern_it.peekable();
|
|
|
|
loop {
|
2020-07-02 22:09:14 -05:00
|
|
|
match phase.next_non_trivial(&mut code_it) {
|
2020-06-17 01:53:51 -05:00
|
|
|
None => {
|
|
|
|
if let Some(p) = pattern_it.next() {
|
2020-06-29 19:43:37 -05:00
|
|
|
fail_match!("Part of the pattern was unmatched: {:?}", p);
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
Some(SyntaxElement::Token(c)) => {
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_token(phase, &mut pattern_it, &c)?;
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
Some(SyntaxElement::Node(c)) => match pattern_it.next() {
|
|
|
|
Some(SyntaxElement::Node(p)) => {
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_node(phase, &p, &c)?;
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
|
|
|
|
None => fail_match!("Pattern reached end, code has {}", c.text()),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn attempt_match_token(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
pattern: &mut Peekable<PatternIterator>,
|
2020-08-12 11:26:51 -05:00
|
|
|
code: &syntax::SyntaxToken,
|
2020-06-17 01:53:51 -05:00
|
|
|
) -> Result<(), MatchFailed> {
|
2020-07-02 22:09:14 -05:00
|
|
|
phase.record_ignored_comments(code);
|
2020-06-17 01:53:51 -05:00
|
|
|
// Ignore whitespace and comments.
|
|
|
|
if code.kind().is_trivia() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
if let Some(SyntaxElement::Token(p)) = pattern.peek() {
|
|
|
|
// If the code has a comma and the pattern is about to close something, then accept the
|
|
|
|
// comma without advancing the pattern. i.e. ignore trailing commas.
|
|
|
|
if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
// Conversely, if the pattern has a comma and the code doesn't, skip that part of the
|
|
|
|
// pattern and continue to match the code.
|
|
|
|
if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
|
|
|
|
pattern.next();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Consume an element from the pattern and make sure it matches.
|
|
|
|
match pattern.next() {
|
|
|
|
Some(SyntaxElement::Token(p)) => {
|
|
|
|
if p.kind() != code.kind() || p.text() != code.text() {
|
|
|
|
fail_match!(
|
|
|
|
"Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
|
|
|
|
p.text(),
|
|
|
|
p.kind(),
|
|
|
|
code.text(),
|
|
|
|
code.kind()
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Some(SyntaxElement::Node(p)) => {
|
|
|
|
// Not sure if this is actually reachable.
|
|
|
|
fail_match!(
|
|
|
|
"Pattern wanted {:?}, but code had token '{}' ({:?})",
|
|
|
|
p,
|
|
|
|
code.text(),
|
|
|
|
code.kind()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-06-23 04:07:42 -05:00
|
|
|
fn check_constraint(
|
|
|
|
&self,
|
|
|
|
constraint: &Constraint,
|
|
|
|
code: &SyntaxNode,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
match constraint {
|
|
|
|
Constraint::Kind(kind) => {
|
|
|
|
kind.matches(code)?;
|
|
|
|
}
|
|
|
|
Constraint::Not(sub) => {
|
|
|
|
if self.check_constraint(&*sub, code).is_ok() {
|
|
|
|
fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-07-22 01:46:29 -05:00
|
|
|
/// Paths are matched based on whether they refer to the same thing, even if they're written
|
|
|
|
/// differently.
|
|
|
|
fn attempt_match_path(
|
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
|
|
|
pattern: &SyntaxNode,
|
|
|
|
code: &SyntaxNode,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
|
|
|
|
let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
|
|
|
|
let code_path = ast::Path::cast(code.clone()).unwrap();
|
|
|
|
if let (Some(pattern_segment), Some(code_segment)) =
|
|
|
|
(pattern_path.segment(), code_path.segment())
|
|
|
|
{
|
|
|
|
// Match everything within the segment except for the name-ref, which is handled
|
|
|
|
// separately via comparing what the path resolves to below.
|
|
|
|
self.attempt_match_opt(
|
|
|
|
phase,
|
2020-07-31 11:29:29 -05:00
|
|
|
pattern_segment.generic_arg_list(),
|
|
|
|
code_segment.generic_arg_list(),
|
2020-07-22 01:46:29 -05:00
|
|
|
)?;
|
|
|
|
self.attempt_match_opt(
|
|
|
|
phase,
|
|
|
|
pattern_segment.param_list(),
|
|
|
|
code_segment.param_list(),
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
if matches!(phase, Phase::Second(_)) {
|
|
|
|
let resolution = self
|
|
|
|
.sema
|
|
|
|
.resolve_path(&code_path)
|
|
|
|
.ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
|
|
|
|
if pattern_resolved.resolution != resolution {
|
|
|
|
fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return self.attempt_match_node_children(phase, pattern, code);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn attempt_match_opt<T: AstNode>(
|
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
|
|
|
pattern: Option<T>,
|
|
|
|
code: Option<T>,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
match (pattern, code) {
|
2021-06-12 22:54:16 -05:00
|
|
|
(Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
|
2020-07-22 01:46:29 -05:00
|
|
|
(None, None) => Ok(()),
|
|
|
|
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
|
|
|
|
(None, Some(c)) => {
|
|
|
|
fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-17 01:53:51 -05:00
|
|
|
/// We want to allow the records to match in any order, so we have special matching logic for
|
|
|
|
/// them.
|
|
|
|
fn attempt_match_record_field_list(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-17 01:53:51 -05:00
|
|
|
pattern: &SyntaxNode,
|
|
|
|
code: &SyntaxNode,
|
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
// Build a map keyed by field name.
|
2021-01-19 16:56:11 -06:00
|
|
|
let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
|
2020-06-17 01:53:51 -05:00
|
|
|
for child in code.children() {
|
2020-07-30 09:21:30 -05:00
|
|
|
if let Some(record) = ast::RecordExprField::cast(child.clone()) {
|
2020-06-17 01:53:51 -05:00
|
|
|
if let Some(name) = record.field_name() {
|
2021-01-19 16:56:11 -06:00
|
|
|
fields_by_name.insert(name.text().into(), child.clone());
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for p in pattern.children_with_tokens() {
|
|
|
|
if let SyntaxElement::Node(p) = p {
|
|
|
|
if let Some(name_element) = p.first_child_or_token() {
|
2020-07-02 22:09:14 -05:00
|
|
|
if self.get_placeholder(&name_element).is_some() {
|
2020-06-17 01:53:51 -05:00
|
|
|
// If the pattern is using placeholders for field names then order
|
|
|
|
// independence doesn't make sense. Fall back to regular ordered
|
|
|
|
// matching.
|
2020-07-02 22:09:14 -05:00
|
|
|
return self.attempt_match_node_children(phase, pattern, code);
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
if let Some(ident) = only_ident(name_element) {
|
|
|
|
let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
|
|
|
|
match_error!(
|
|
|
|
"Placeholder has record field '{}', but code doesn't",
|
|
|
|
ident
|
|
|
|
)
|
|
|
|
})?;
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_node(phase, &p, &code_record)?;
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(unmatched_fields) = fields_by_name.keys().next() {
|
|
|
|
fail_match!(
|
|
|
|
"{} field(s) of a record literal failed to match, starting with {}",
|
|
|
|
fields_by_name.len(),
|
|
|
|
unmatched_fields
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-06-22 03:15:51 -05:00
|
|
|
/// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
|
2020-06-23 03:59:18 -05:00
|
|
|
/// tree it can match a sequence of tokens. Note, that this code will only be used when the
|
|
|
|
/// pattern matches the macro invocation. For matches within the macro call, we'll already have
|
|
|
|
/// expanded the macro.
|
2020-06-22 03:15:51 -05:00
|
|
|
fn attempt_match_token_tree(
|
2020-07-02 22:09:14 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-06-22 03:15:51 -05:00
|
|
|
pattern: &SyntaxNode,
|
2020-08-12 11:26:51 -05:00
|
|
|
code: &syntax::SyntaxNode,
|
2020-06-22 03:15:51 -05:00
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
let mut pattern = PatternIterator::new(pattern).peekable();
|
|
|
|
let mut children = code.children_with_tokens();
|
|
|
|
while let Some(child) = children.next() {
|
2020-07-02 22:09:14 -05:00
|
|
|
if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
|
2020-06-22 03:15:51 -05:00
|
|
|
pattern.next();
|
|
|
|
let next_pattern_token = pattern
|
|
|
|
.peek()
|
|
|
|
.and_then(|p| match p {
|
|
|
|
SyntaxElement::Token(t) => Some(t.clone()),
|
|
|
|
SyntaxElement::Node(n) => n.first_token(),
|
|
|
|
})
|
|
|
|
.map(|p| p.text().to_string());
|
|
|
|
let first_matched_token = child.clone();
|
|
|
|
let mut last_matched_token = child;
|
|
|
|
// Read code tokens util we reach one equal to the next token from our pattern
|
|
|
|
// or we reach the end of the token tree.
|
2021-09-19 16:34:07 -05:00
|
|
|
for next in &mut children {
|
2020-06-22 03:15:51 -05:00
|
|
|
match &next {
|
|
|
|
SyntaxElement::Token(t) => {
|
|
|
|
if Some(t.to_string()) == next_pattern_token {
|
|
|
|
pattern.next();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
SyntaxElement::Node(n) => {
|
|
|
|
if let Some(first_token) = n.first_token() {
|
2021-01-19 16:56:11 -06:00
|
|
|
if Some(first_token.text()) == next_pattern_token.as_deref() {
|
2020-06-22 03:15:51 -05:00
|
|
|
if let Some(SyntaxElement::Node(p)) = pattern.next() {
|
|
|
|
// We have a subtree that starts with the next token in our pattern.
|
2021-06-12 22:54:16 -05:00
|
|
|
self.attempt_match_token_tree(phase, &p, n)?;
|
2020-06-22 03:15:51 -05:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
last_matched_token = next;
|
|
|
|
}
|
2020-07-02 22:09:14 -05:00
|
|
|
if let Phase::Second(match_out) = phase {
|
2020-06-22 03:15:51 -05:00
|
|
|
match_out.placeholder_values.insert(
|
2020-08-05 20:30:52 -05:00
|
|
|
placeholder.ident.clone(),
|
2020-06-22 03:15:51 -05:00
|
|
|
PlaceholderMatch::from_range(FileRange {
|
|
|
|
file_id: self.sema.original_range(code).file_id,
|
|
|
|
range: first_matched_token
|
|
|
|
.text_range()
|
|
|
|
.cover(last_matched_token.text_range()),
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Match literal (non-placeholder) tokens.
|
|
|
|
match child {
|
|
|
|
SyntaxElement::Token(token) => {
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_token(phase, &mut pattern, &token)?;
|
2020-06-22 03:15:51 -05:00
|
|
|
}
|
|
|
|
SyntaxElement::Node(node) => match pattern.next() {
|
|
|
|
Some(SyntaxElement::Node(p)) => {
|
2020-07-02 22:09:14 -05:00
|
|
|
self.attempt_match_token_tree(phase, &p, &node)?;
|
2020-06-22 03:15:51 -05:00
|
|
|
}
|
|
|
|
Some(SyntaxElement::Token(p)) => fail_match!(
|
|
|
|
"Pattern has token '{}', code has subtree '{}'",
|
|
|
|
p.text(),
|
|
|
|
node.text()
|
|
|
|
),
|
|
|
|
None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(p) = pattern.next() {
|
|
|
|
fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-08-05 04:48:52 -05:00
|
|
|
fn attempt_match_ufcs_to_method_call(
|
2020-07-24 05:53:48 -05:00
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
2020-08-05 04:48:52 -05:00
|
|
|
pattern_ufcs: &UfcsCallInfo,
|
2020-07-24 05:53:48 -05:00
|
|
|
code: &ast::MethodCallExpr,
|
|
|
|
) -> Result<(), MatchFailed> {
|
2021-09-27 05:54:24 -05:00
|
|
|
use ast::HasArgList;
|
2020-07-24 05:53:48 -05:00
|
|
|
let code_resolved_function = self
|
|
|
|
.sema
|
|
|
|
.resolve_method_call(code)
|
|
|
|
.ok_or_else(|| match_error!("Failed to resolve method call"))?;
|
2020-08-05 04:48:52 -05:00
|
|
|
if pattern_ufcs.function != code_resolved_function {
|
2020-07-24 05:53:48 -05:00
|
|
|
fail_match!("Method call resolved to a different function");
|
|
|
|
}
|
|
|
|
// Check arguments.
|
2020-08-05 04:48:52 -05:00
|
|
|
let mut pattern_args = pattern_ufcs
|
|
|
|
.call_expr
|
2020-07-24 05:53:48 -05:00
|
|
|
.arg_list()
|
|
|
|
.ok_or_else(|| match_error!("Pattern function call has no args"))?
|
|
|
|
.args();
|
2020-08-05 16:26:28 -05:00
|
|
|
// If the function we're calling takes a self parameter, then we store additional
|
|
|
|
// information on the placeholder match about autoderef and autoref. This allows us to use
|
|
|
|
// the placeholder in a context where autoderef and autoref don't apply.
|
2020-08-19 08:16:24 -05:00
|
|
|
if code_resolved_function.self_param(self.sema.db).is_some() {
|
2020-08-21 12:12:38 -05:00
|
|
|
if let (Some(pattern_type), Some(expr)) =
|
|
|
|
(&pattern_ufcs.qualifier_type, &code.receiver())
|
|
|
|
{
|
2020-08-05 16:26:28 -05:00
|
|
|
let deref_count = self.check_expr_type(pattern_type, expr)?;
|
|
|
|
let pattern_receiver = pattern_args.next();
|
2020-08-21 12:12:38 -05:00
|
|
|
self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?;
|
2020-08-05 16:26:28 -05:00
|
|
|
if let Phase::Second(match_out) = phase {
|
|
|
|
if let Some(placeholder_value) = pattern_receiver
|
|
|
|
.and_then(|n| self.get_placeholder_for_node(n.syntax()))
|
|
|
|
.and_then(|placeholder| {
|
|
|
|
match_out.placeholder_values.get_mut(&placeholder.ident)
|
|
|
|
})
|
|
|
|
{
|
|
|
|
placeholder_value.autoderef_count = deref_count;
|
|
|
|
placeholder_value.autoref_kind = self
|
|
|
|
.sema
|
|
|
|
.resolve_method_call_as_callable(code)
|
|
|
|
.and_then(|callable| callable.receiver_param(self.sema.db))
|
|
|
|
.map(|self_param| self_param.kind())
|
|
|
|
.unwrap_or(ast::SelfParamKind::Owned);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2020-08-21 12:12:38 -05:00
|
|
|
self.attempt_match_opt(phase, pattern_args.next(), code.receiver())?;
|
2020-08-05 16:26:28 -05:00
|
|
|
}
|
2020-07-24 05:53:48 -05:00
|
|
|
let mut code_args =
|
|
|
|
code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
|
|
|
|
loop {
|
|
|
|
match (pattern_args.next(), code_args.next()) {
|
|
|
|
(None, None) => return Ok(()),
|
|
|
|
(p, c) => self.attempt_match_opt(phase, p, c)?,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-05 04:48:52 -05:00
|
|
|
fn attempt_match_ufcs_to_ufcs(
|
|
|
|
&self,
|
|
|
|
phase: &mut Phase,
|
|
|
|
pattern_ufcs: &UfcsCallInfo,
|
|
|
|
code: &ast::CallExpr,
|
|
|
|
) -> Result<(), MatchFailed> {
|
2021-09-27 05:54:24 -05:00
|
|
|
use ast::HasArgList;
|
2020-08-05 04:48:52 -05:00
|
|
|
// Check that the first argument is the expected type.
|
|
|
|
if let (Some(pattern_type), Some(expr)) = (
|
|
|
|
&pattern_ufcs.qualifier_type,
|
|
|
|
&code.arg_list().and_then(|code_args| code_args.args().next()),
|
|
|
|
) {
|
|
|
|
self.check_expr_type(pattern_type, expr)?;
|
|
|
|
}
|
|
|
|
self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
|
|
|
|
}
|
|
|
|
|
2020-08-05 16:26:28 -05:00
|
|
|
/// Verifies that `expr` matches `pattern_type`, possibly after dereferencing some number of
|
|
|
|
/// times. Returns the number of times it needed to be dereferenced.
|
2020-08-05 04:48:52 -05:00
|
|
|
fn check_expr_type(
|
|
|
|
&self,
|
|
|
|
pattern_type: &hir::Type,
|
|
|
|
expr: &ast::Expr,
|
2020-08-05 16:26:28 -05:00
|
|
|
) -> Result<usize, MatchFailed> {
|
2020-08-05 04:48:52 -05:00
|
|
|
use hir::HirDisplay;
|
2021-08-02 13:42:25 -05:00
|
|
|
let code_type = self
|
|
|
|
.sema
|
|
|
|
.type_of_expr(expr)
|
|
|
|
.ok_or_else(|| {
|
|
|
|
match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
|
|
|
|
})?
|
2021-08-03 10:28:51 -05:00
|
|
|
.original;
|
2020-08-05 16:26:28 -05:00
|
|
|
// Temporary needed to make the borrow checker happy.
|
|
|
|
let res = code_type
|
2020-08-05 04:48:52 -05:00
|
|
|
.autoderef(self.sema.db)
|
2020-08-05 16:26:28 -05:00
|
|
|
.enumerate()
|
|
|
|
.find(|(_, deref_code_type)| pattern_type == deref_code_type)
|
|
|
|
.map(|(count, _)| count)
|
|
|
|
.ok_or_else(|| {
|
|
|
|
match_error!(
|
|
|
|
"Pattern type `{}` didn't match code type `{}`",
|
|
|
|
pattern_type.display(self.sema.db),
|
|
|
|
code_type.display(self.sema.db)
|
|
|
|
)
|
|
|
|
});
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {
|
|
|
|
self.get_placeholder(&SyntaxElement::Node(node.clone()))
|
2020-08-05 04:48:52 -05:00
|
|
|
}
|
|
|
|
|
2020-07-02 22:09:14 -05:00
|
|
|
fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
|
2020-07-02 21:57:17 -05:00
|
|
|
only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
|
2020-07-02 22:09:14 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-22 01:46:29 -05:00
|
|
|
impl Match {
|
|
|
|
fn render_template_paths(
|
|
|
|
&mut self,
|
|
|
|
template: &ResolvedPattern,
|
2020-08-13 09:39:16 -05:00
|
|
|
sema: &Semantics<ide_db::RootDatabase>,
|
2020-07-22 01:46:29 -05:00
|
|
|
) -> Result<(), MatchFailed> {
|
|
|
|
let module = sema
|
|
|
|
.scope(&self.matched_node)
|
2022-03-31 04:12:08 -05:00
|
|
|
.ok_or_else(|| match_error!("Matched node isn't in a module"))?
|
|
|
|
.module();
|
2020-07-22 01:46:29 -05:00
|
|
|
for (path, resolved_path) in &template.resolved_paths {
|
|
|
|
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
|
|
|
|
let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
|
|
|
|
match_error!("Failed to render template path `{}` at match location")
|
|
|
|
})?;
|
|
|
|
self.rendered_template_paths.insert(path.clone(), mod_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-02 22:09:14 -05:00
|
|
|
impl Phase<'_> {
|
2020-06-17 01:53:51 -05:00
|
|
|
fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
|
|
|
|
loop {
|
|
|
|
let c = code_it.next();
|
|
|
|
if let Some(SyntaxElement::Token(t)) = &c {
|
|
|
|
self.record_ignored_comments(t);
|
|
|
|
if t.kind().is_trivia() {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return c;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn record_ignored_comments(&mut self, token: &SyntaxToken) {
|
|
|
|
if token.kind() == SyntaxKind::COMMENT {
|
2020-07-02 22:09:14 -05:00
|
|
|
if let Phase::Second(match_out) = self {
|
2020-06-17 01:53:51 -05:00
|
|
|
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
|
|
|
match_out.ignored_comments.push(comment);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_closing_token(kind: SyntaxKind) -> bool {
|
|
|
|
kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
|
|
|
|
where
|
|
|
|
F: Fn() -> T,
|
|
|
|
{
|
|
|
|
RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
|
|
|
|
let res = f();
|
|
|
|
RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
|
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
// For performance reasons, we don't want to record the reason why every match fails, only the bit
|
|
|
|
// of code that the user indicated they thought would match. We use a thread local to indicate when
|
|
|
|
// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
|
|
|
|
// of code that can make the decision to not match.
|
|
|
|
thread_local! {
|
|
|
|
pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn recording_match_fail_reasons() -> bool {
|
|
|
|
RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PlaceholderMatch {
|
2021-09-15 13:22:06 -05:00
|
|
|
fn from_range(range: FileRange) -> Self {
|
2020-08-05 16:26:28 -05:00
|
|
|
Self {
|
|
|
|
range,
|
|
|
|
inner_matches: SsrMatches::default(),
|
|
|
|
autoderef_count: 0,
|
|
|
|
autoref_kind: ast::SelfParamKind::Owned,
|
|
|
|
}
|
2020-06-22 03:15:51 -05:00
|
|
|
}
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
|
2020-06-23 04:07:42 -05:00
|
|
|
impl NodeKind {
|
|
|
|
fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
|
|
|
|
let ok = match self {
|
|
|
|
Self::Literal => {
|
2021-03-08 14:19:44 -06:00
|
|
|
cov_mark::hit!(literal_constraint);
|
2020-06-23 04:07:42 -05:00
|
|
|
ast::Literal::can_cast(node.kind())
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if !ok {
|
|
|
|
fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-17 01:53:51 -05:00
|
|
|
// If `node` contains nothing but an ident then return it, otherwise return None.
|
|
|
|
fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
|
|
|
|
match element {
|
|
|
|
SyntaxElement::Token(t) => {
|
|
|
|
if t.kind() == SyntaxKind::IDENT {
|
|
|
|
return Some(t);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
SyntaxElement::Node(n) => {
|
|
|
|
let mut children = n.children_with_tokens();
|
|
|
|
if let (Some(only_child), None) = (children.next(), children.next()) {
|
|
|
|
return only_ident(only_child);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
struct PatternIterator {
|
|
|
|
iter: SyntaxElementChildren,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Iterator for PatternIterator {
|
|
|
|
type Item = SyntaxElement;
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<SyntaxElement> {
|
2021-09-19 16:34:07 -05:00
|
|
|
for element in &mut self.iter {
|
2020-06-17 01:53:51 -05:00
|
|
|
if !element.kind().is_trivia() {
|
|
|
|
return Some(element);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PatternIterator {
|
|
|
|
fn new(parent: &SyntaxNode) -> Self {
|
|
|
|
Self { iter: parent.children_with_tokens() }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2020-07-02 22:09:14 -05:00
|
|
|
use crate::{MatchFinder, SsrRule};
|
2020-06-17 01:53:51 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parse_match_replace() {
|
|
|
|
let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
|
2020-07-22 01:23:43 -05:00
|
|
|
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
|
2020-06-17 01:53:51 -05:00
|
|
|
|
2020-07-28 20:44:01 -05:00
|
|
|
let (db, position, selections) = crate::tests::single_file(input);
|
2022-03-31 04:12:08 -05:00
|
|
|
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
2020-07-22 01:46:29 -05:00
|
|
|
match_finder.add_rule(rule).unwrap();
|
2020-07-22 01:23:43 -05:00
|
|
|
let matches = match_finder.matches();
|
2020-06-17 01:53:51 -05:00
|
|
|
assert_eq!(matches.matches.len(), 1);
|
|
|
|
assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
|
|
|
|
assert_eq!(matches.matches[0].placeholder_values.len(), 1);
|
|
|
|
|
2020-07-22 01:23:43 -05:00
|
|
|
let edits = match_finder.edits();
|
|
|
|
assert_eq!(edits.len(), 1);
|
2021-01-14 15:43:36 -06:00
|
|
|
let edit = &edits[&position.file_id];
|
2020-06-17 01:53:51 -05:00
|
|
|
let mut after = input.to_string();
|
2021-01-14 11:35:22 -06:00
|
|
|
edit.apply(&mut after);
|
2020-07-22 01:23:43 -05:00
|
|
|
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
|
2020-06-17 01:53:51 -05:00
|
|
|
}
|
|
|
|
}
|