Merge #11201
11201: fix: Fix completions not considering ancestor items for attribute search r=Veykril a=Veykril Turns out we never filled the `CompletionContext` with the attribute expansion of attributed impls and traits when typing in the assoc items, as we were only considering the assoc item to have an attribute to expand. bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
735b542146
@ -1,5 +1,7 @@
|
||||
//! See `CompletionContext` structure.
|
||||
|
||||
use std::iter;
|
||||
|
||||
use base_db::SourceDatabaseExt;
|
||||
use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
|
||||
use ide_db::{
|
||||
@ -431,12 +433,17 @@ fn expand_and_fill(
|
||||
mut fake_ident_token: SyntaxToken,
|
||||
) {
|
||||
let _p = profile::span("CompletionContext::expand_and_fill");
|
||||
loop {
|
||||
// Expand attributes
|
||||
if let (Some(actual_item), Some(item_with_fake_ident)) = (
|
||||
'expansion: loop {
|
||||
let parent_item =
|
||||
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
|
||||
let ancestor_items = iter::successors(
|
||||
Option::zip(
|
||||
find_node_at_offset::<ast::Item>(&original_file, offset),
|
||||
find_node_at_offset::<ast::Item>(&speculative_file, offset),
|
||||
) {
|
||||
),
|
||||
|(a, b)| parent_item(a).zip(parent_item(b)),
|
||||
);
|
||||
for (actual_item, item_with_fake_ident) in ancestor_items {
|
||||
match (
|
||||
self.sema.expand_attr_macro(&actual_item),
|
||||
self.sema.speculative_expand_attr_macro(
|
||||
@ -445,19 +452,22 @@ fn expand_and_fill(
|
||||
fake_ident_token.clone(),
|
||||
),
|
||||
) {
|
||||
(Some(actual_expansion), Some(speculative_expansion)) => {
|
||||
let new_offset = speculative_expansion.1.text_range().start();
|
||||
// maybe parent items have attributes
|
||||
(None, None) => (),
|
||||
// successful expansions
|
||||
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
|
||||
let new_offset = fake_mapped_token.text_range().start();
|
||||
if new_offset > actual_expansion.text_range().end() {
|
||||
break;
|
||||
break 'expansion;
|
||||
}
|
||||
original_file = actual_expansion;
|
||||
speculative_file = speculative_expansion.0;
|
||||
fake_ident_token = speculative_expansion.1;
|
||||
speculative_file = fake_expansion;
|
||||
fake_ident_token = fake_mapped_token;
|
||||
offset = new_offset;
|
||||
continue;
|
||||
continue 'expansion;
|
||||
}
|
||||
(None, None) => (),
|
||||
_ => break,
|
||||
// exactly one expansion failed, inconsistent state so stop expanding completely
|
||||
_ => break 'expansion,
|
||||
}
|
||||
}
|
||||
|
||||
@ -477,30 +487,33 @@ fn expand_and_fill(
|
||||
None => break,
|
||||
};
|
||||
|
||||
if let (Some(actual_expansion), Some(speculative_expansion)) = (
|
||||
match (
|
||||
self.sema.expand(&actual_macro_call),
|
||||
self.sema.speculative_expand(
|
||||
&actual_macro_call,
|
||||
&speculative_args,
|
||||
fake_ident_token,
|
||||
fake_ident_token.clone(),
|
||||
),
|
||||
) {
|
||||
let new_offset = speculative_expansion.1.text_range().start();
|
||||
// successful expansions
|
||||
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
|
||||
let new_offset = fake_mapped_token.text_range().start();
|
||||
if new_offset > actual_expansion.text_range().end() {
|
||||
break;
|
||||
}
|
||||
original_file = actual_expansion;
|
||||
speculative_file = speculative_expansion.0;
|
||||
fake_ident_token = speculative_expansion.1;
|
||||
speculative_file = fake_expansion;
|
||||
fake_ident_token = fake_mapped_token;
|
||||
offset = new_offset;
|
||||
} else {
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
self.fill(&original_file, speculative_file, offset);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user