Merge #10126
10126: feat: Speculatively expand attributes in completions r=Veykril a=Veykril ![j1OjBt5Nca](https://user-images.githubusercontent.com/3757771/133163858-91930072-1441-4ce4-9979-b0ad2727b47f.gif) Fixes #9866 Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
6eecd84771
@ -210,6 +210,7 @@ impl ChangeFixture {
|
|||||||
let proc_lib_file = file_id;
|
let proc_lib_file = file_id;
|
||||||
file_id.0 += 1;
|
file_id.0 += 1;
|
||||||
|
|
||||||
|
let (proc_macro, source) = test_proc_macros(&proc_macros);
|
||||||
let mut fs = FileSet::default();
|
let mut fs = FileSet::default();
|
||||||
fs.insert(
|
fs.insert(
|
||||||
proc_lib_file,
|
proc_lib_file,
|
||||||
@ -217,7 +218,7 @@ impl ChangeFixture {
|
|||||||
);
|
);
|
||||||
roots.push(SourceRoot::new_library(fs));
|
roots.push(SourceRoot::new_library(fs));
|
||||||
|
|
||||||
change.change_file(proc_lib_file, Some(Arc::new(String::new())));
|
change.change_file(proc_lib_file, Some(Arc::new(String::from(source))));
|
||||||
|
|
||||||
let all_crates = crate_graph.crates_in_topological_order();
|
let all_crates = crate_graph.crates_in_topological_order();
|
||||||
|
|
||||||
@ -228,7 +229,7 @@ impl ChangeFixture {
|
|||||||
CfgOptions::default(),
|
CfgOptions::default(),
|
||||||
CfgOptions::default(),
|
CfgOptions::default(),
|
||||||
Env::default(),
|
Env::default(),
|
||||||
test_proc_macros(&proc_macros),
|
proc_macro,
|
||||||
);
|
);
|
||||||
|
|
||||||
for krate in all_crates {
|
for krate in all_crates {
|
||||||
@ -250,14 +251,33 @@ impl ChangeFixture {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_proc_macros(proc_macros: &[String]) -> Vec<ProcMacro> {
|
fn test_proc_macros(proc_macros: &[String]) -> (Vec<ProcMacro>, String) {
|
||||||
std::array::IntoIter::new([ProcMacro {
|
// The source here is only required so that paths to the macros exist and are resolvable.
|
||||||
name: "identity".into(),
|
let source = r#"
|
||||||
kind: crate::ProcMacroKind::Attr,
|
#[proc_macro_attribute]
|
||||||
expander: Arc::new(IdentityProcMacroExpander),
|
pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
}])
|
item
|
||||||
|
}
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
|
||||||
|
attr
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
let proc_macros = std::array::IntoIter::new([
|
||||||
|
ProcMacro {
|
||||||
|
name: "identity".into(),
|
||||||
|
kind: crate::ProcMacroKind::Attr,
|
||||||
|
expander: Arc::new(IdentityProcMacroExpander),
|
||||||
|
},
|
||||||
|
ProcMacro {
|
||||||
|
name: "input_replace".into(),
|
||||||
|
kind: crate::ProcMacroKind::Attr,
|
||||||
|
expander: Arc::new(AttributeInputReplaceProcMacroExpander),
|
||||||
|
},
|
||||||
|
])
|
||||||
.filter(|pm| proc_macros.iter().any(|name| name == &pm.name))
|
.filter(|pm| proc_macros.iter().any(|name| name == &pm.name))
|
||||||
.collect()
|
.collect();
|
||||||
|
(proc_macros, source.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
@ -299,8 +319,9 @@ impl From<Fixture> for FileMeta {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Identity mapping
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct IdentityProcMacroExpander;
|
struct IdentityProcMacroExpander;
|
||||||
impl ProcMacroExpander for IdentityProcMacroExpander {
|
impl ProcMacroExpander for IdentityProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
@ -311,3 +332,19 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
|
|||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pastes the attribute input as its output
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct AttributeInputReplaceProcMacroExpander;
|
||||||
|
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
||||||
|
fn expand(
|
||||||
|
&self,
|
||||||
|
_: &Subtree,
|
||||||
|
attrs: Option<&Subtree>,
|
||||||
|
_: &Env,
|
||||||
|
) -> Result<Subtree, ProcMacroExpansionError> {
|
||||||
|
attrs
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -166,6 +166,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||||||
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
|
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn speculative_expand_attr_macro(
|
||||||
|
&self,
|
||||||
|
actual_macro_call: &ast::Item,
|
||||||
|
speculative_args: &ast::Item,
|
||||||
|
token_to_map: SyntaxToken,
|
||||||
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
|
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: Rename to descend_into_macros_single
|
// FIXME: Rename to descend_into_macros_single
|
||||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
self.imp.descend_into_macros(token).pop().unwrap()
|
self.imp.descend_into_macros(token).pop().unwrap()
|
||||||
@ -452,7 +461,24 @@ impl<'db> SemanticsImpl<'db> {
|
|||||||
hir_expand::db::expand_speculative(
|
hir_expand::db::expand_speculative(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
macro_call_id,
|
macro_call_id,
|
||||||
speculative_args,
|
speculative_args.syntax(),
|
||||||
|
token_to_map,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn speculative_expand_attr(
|
||||||
|
&self,
|
||||||
|
actual_macro_call: &ast::Item,
|
||||||
|
speculative_args: &ast::Item,
|
||||||
|
token_to_map: SyntaxToken,
|
||||||
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
|
let sa = self.analyze(actual_macro_call.syntax());
|
||||||
|
let macro_call = InFile::new(sa.file_id, actual_macro_call.clone());
|
||||||
|
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
|
||||||
|
hir_expand::db::expand_speculative(
|
||||||
|
self.db.upcast(),
|
||||||
|
macro_call_id,
|
||||||
|
speculative_args.syntax(),
|
||||||
token_to_map,
|
token_to_map,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use std::sync::Arc;
|
|||||||
use base_db::{salsa, SourceDatabase};
|
use base_db::{salsa, SourceDatabase};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{ExpandError, ExpandResult};
|
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::diff,
|
algo::diff,
|
||||||
ast::{self, AttrsOwner, NameOwner},
|
ast::{self, AttrsOwner, NameOwner},
|
||||||
@ -141,27 +141,72 @@ pub trait AstDatabase: SourceDatabase {
|
|||||||
pub fn expand_speculative(
|
pub fn expand_speculative(
|
||||||
db: &dyn AstDatabase,
|
db: &dyn AstDatabase,
|
||||||
actual_macro_call: MacroCallId,
|
actual_macro_call: MacroCallId,
|
||||||
speculative_args: &ast::TokenTree,
|
speculative_args: &SyntaxNode,
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax());
|
let loc = db.lookup_intern_macro(actual_macro_call);
|
||||||
let range =
|
let macro_def = db.macro_def(loc.def)?;
|
||||||
token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
|
let token_range = token_to_map.text_range();
|
||||||
let token_id = tmap_1.token_by_range(range)?;
|
|
||||||
|
|
||||||
let macro_def = {
|
// Build the subtree and token mapping for the speculative args
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro(actual_macro_call);
|
let censor = censor_for_macro_input(&loc, &speculative_args);
|
||||||
db.macro_def(loc.def)?
|
let (mut tt, spec_args_tmap) =
|
||||||
|
mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
|
||||||
|
|
||||||
|
let (attr_arg, token_id) = match loc.kind {
|
||||||
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
|
// Attributes may have an input token tree, build the subtree and map for this as well
|
||||||
|
// then try finding a token id for our token if it is inside this input subtree.
|
||||||
|
let item = ast::Item::cast(speculative_args.clone())?;
|
||||||
|
let attr = item.attrs().nth(invoc_attr_index as usize)?;
|
||||||
|
match attr.token_tree() {
|
||||||
|
Some(token_tree) => {
|
||||||
|
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
||||||
|
tree.delimiter = None;
|
||||||
|
|
||||||
|
let shift = mbe::Shift::new(&tt);
|
||||||
|
shift.shift_all(&mut tree);
|
||||||
|
|
||||||
|
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
|
||||||
|
let attr_input_start =
|
||||||
|
token_tree.left_delimiter_token()?.text_range().start();
|
||||||
|
let range = token_range.checked_sub(attr_input_start)?;
|
||||||
|
let token_id = shift.shift(map.token_by_range(range)?);
|
||||||
|
Some(token_id)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
(Some(tree), token_id)
|
||||||
|
}
|
||||||
|
_ => (None, None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (None, None),
|
||||||
|
};
|
||||||
|
let token_id = match token_id {
|
||||||
|
Some(token_id) => token_id,
|
||||||
|
// token wasn't inside an attribute input so it has to be in the general macro input
|
||||||
|
None => {
|
||||||
|
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
||||||
|
let token_id = spec_args_tmap.token_by_range(range)?;
|
||||||
|
macro_def.map_id_down(token_id)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||||
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
|
let speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
|
||||||
|
tt.delimiter = None;
|
||||||
|
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
|
||||||
|
} else {
|
||||||
|
macro_def.expand(db, actual_macro_call, &tt)
|
||||||
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
|
let (node, rev_tmap) =
|
||||||
|
token_tree_to_syntax_node(&speculative_expansion.value, expand_to).ok()?;
|
||||||
|
|
||||||
let (node, tmap_2) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to).ok()?;
|
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
|
||||||
|
|
||||||
let token_id = macro_def.map_id_down(token_id);
|
|
||||||
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
|
|
||||||
let token = node.syntax_node().covering_element(range).into_token()?;
|
let token = node.syntax_node().covering_element(range).into_token()?;
|
||||||
Some((node.syntax_node(), token))
|
Some((node.syntax_node(), token))
|
||||||
}
|
}
|
||||||
@ -259,7 +304,19 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
|
|||||||
let loc = db.lookup_intern_macro(id);
|
let loc = db.lookup_intern_macro(id);
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
let censor = match loc.kind {
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
|
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
|
||||||
|
|
||||||
|
if loc.def.is_proc_macro() {
|
||||||
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
|
tt.delimiter = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Arc::new((tt, tmap)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
|
||||||
|
match loc.kind {
|
||||||
MacroCallKind::FnLike { .. } => None,
|
MacroCallKind::FnLike { .. } => None,
|
||||||
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
|
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
|
||||||
Some(item) => item
|
Some(item) => item
|
||||||
@ -275,15 +332,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
|
|||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
},
|
},
|
||||||
};
|
|
||||||
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
|
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
|
||||||
tt.delimiter = None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Arc::new((tt, tmap)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
|
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
|
||||||
@ -367,11 +416,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
|
|||||||
None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()),
|
None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let macro_rules = match db.macro_def(loc.def) {
|
let expander = match db.macro_def(loc.def) {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return ExpandResult::str_err("Failed to find macro definition".into()),
|
None => return ExpandResult::str_err("Failed to find macro definition".into()),
|
||||||
};
|
};
|
||||||
let ExpandResult { value: tt, err } = macro_rules.expand(db, id, ¯o_arg.0);
|
let ExpandResult { value: tt, err } = expander.expand(db, id, ¯o_arg.0);
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
// XXX: Make ExpandResult a real error and use .map_err instead?
|
// XXX: Make ExpandResult a real error and use .map_err instead?
|
||||||
|
@ -370,6 +370,7 @@ impl ExpansionInfo {
|
|||||||
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
||||||
assert_eq!(token.file_id, self.arg.file_id);
|
assert_eq!(token.file_id, self.arg.file_id);
|
||||||
let token_id = if let Some(item) = item {
|
let token_id = if let Some(item) = item {
|
||||||
|
// check if we are mapping down in an attribute input
|
||||||
let call_id = match self.expanded.file_id.0 {
|
let call_id = match self.expanded.file_id.0 {
|
||||||
HirFileIdRepr::FileId(_) => return None,
|
HirFileIdRepr::FileId(_) => return None,
|
||||||
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id,
|
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id,
|
||||||
|
@ -175,48 +175,93 @@ impl<'a> CompletionContext<'a> {
|
|||||||
incomplete_let: false,
|
incomplete_let: false,
|
||||||
no_completion_required: false,
|
no_completion_required: false,
|
||||||
};
|
};
|
||||||
|
ctx.expand_and_fill(
|
||||||
|
original_file.syntax().clone(),
|
||||||
|
file_with_fake_ident.syntax().clone(),
|
||||||
|
position.offset,
|
||||||
|
fake_ident_token,
|
||||||
|
);
|
||||||
|
Some(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
let mut original_file = original_file.syntax().clone();
|
fn expand_and_fill(
|
||||||
let mut speculative_file = file_with_fake_ident.syntax().clone();
|
&mut self,
|
||||||
let mut offset = position.offset;
|
mut original_file: SyntaxNode,
|
||||||
let mut fake_ident_token = fake_ident_token;
|
mut speculative_file: SyntaxNode,
|
||||||
|
mut offset: TextSize,
|
||||||
// Are we inside a macro call?
|
mut fake_ident_token: SyntaxToken,
|
||||||
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
|
) {
|
||||||
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
|
loop {
|
||||||
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
|
// Expand attributes
|
||||||
) {
|
if let (Some(actual_item), Some(item_with_fake_ident)) = (
|
||||||
if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
|
find_node_at_offset::<ast::Item>(&original_file, offset),
|
||||||
!= macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
|
find_node_at_offset::<ast::Item>(&speculative_file, offset),
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let speculative_args = match macro_call_with_fake_ident.token_tree() {
|
|
||||||
Some(tt) => tt,
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
if let (Some(actual_expansion), Some(speculative_expansion)) = (
|
|
||||||
ctx.sema.expand(&actual_macro_call),
|
|
||||||
ctx.sema.speculative_expand(
|
|
||||||
&actual_macro_call,
|
|
||||||
&speculative_args,
|
|
||||||
fake_ident_token,
|
|
||||||
),
|
|
||||||
) {
|
) {
|
||||||
let new_offset = speculative_expansion.1.text_range().start();
|
match (
|
||||||
if new_offset > actual_expansion.text_range().end() {
|
self.sema.expand_attr_macro(&actual_item),
|
||||||
|
self.sema.speculative_expand_attr_macro(
|
||||||
|
&actual_item,
|
||||||
|
&item_with_fake_ident,
|
||||||
|
fake_ident_token.clone(),
|
||||||
|
),
|
||||||
|
) {
|
||||||
|
(Some(actual_expansion), Some(speculative_expansion)) => {
|
||||||
|
let new_offset = speculative_expansion.1.text_range().start();
|
||||||
|
if new_offset > actual_expansion.text_range().end() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
original_file = actual_expansion;
|
||||||
|
speculative_file = speculative_expansion.0;
|
||||||
|
fake_ident_token = speculative_expansion.1;
|
||||||
|
offset = new_offset;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
(None, None) => (),
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expand fn-like macro calls
|
||||||
|
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
|
||||||
|
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
|
||||||
|
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
|
||||||
|
) {
|
||||||
|
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
|
||||||
|
let mac_call_path1 =
|
||||||
|
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
|
||||||
|
if mac_call_path0 != mac_call_path1 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let speculative_args = match macro_call_with_fake_ident.token_tree() {
|
||||||
|
Some(tt) => tt,
|
||||||
|
None => break,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let (Some(actual_expansion), Some(speculative_expansion)) = (
|
||||||
|
self.sema.expand(&actual_macro_call),
|
||||||
|
self.sema.speculative_expand(
|
||||||
|
&actual_macro_call,
|
||||||
|
&speculative_args,
|
||||||
|
fake_ident_token,
|
||||||
|
),
|
||||||
|
) {
|
||||||
|
let new_offset = speculative_expansion.1.text_range().start();
|
||||||
|
if new_offset > actual_expansion.text_range().end() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
original_file = actual_expansion;
|
||||||
|
speculative_file = speculative_expansion.0;
|
||||||
|
fake_ident_token = speculative_expansion.1;
|
||||||
|
offset = new_offset;
|
||||||
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
original_file = actual_expansion;
|
|
||||||
speculative_file = speculative_expansion.0;
|
|
||||||
fake_ident_token = speculative_expansion.1;
|
|
||||||
offset = new_offset;
|
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ctx.fill(&original_file, speculative_file, offset);
|
|
||||||
Some(ctx)
|
self.fill(&original_file, speculative_file, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether completions in that particular case don't make much sense.
|
/// Checks whether completions in that particular case don't make much sense.
|
||||||
|
@ -15,6 +15,7 @@ mod item_list;
|
|||||||
mod item;
|
mod item;
|
||||||
mod pattern;
|
mod pattern;
|
||||||
mod predicate;
|
mod predicate;
|
||||||
|
mod proc_macros;
|
||||||
mod record;
|
mod record;
|
||||||
mod sourcegen;
|
mod sourcegen;
|
||||||
mod type_pos;
|
mod type_pos;
|
||||||
@ -23,7 +24,7 @@ mod visibility;
|
|||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use hir::{PrefixKind, Semantics};
|
use hir::{db::DefDatabase, PrefixKind, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
|
base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
|
||||||
helpers::{
|
helpers::{
|
||||||
@ -96,6 +97,7 @@ fn completion_list_with_config(config: CompletionConfig, ra_fixture: &str) -> St
|
|||||||
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||||
let mut database = RootDatabase::default();
|
let mut database = RootDatabase::default();
|
||||||
|
database.set_enable_proc_attr_macros(true);
|
||||||
database.apply_change(change_fixture.change);
|
database.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = range_or_offset.expect_offset();
|
let offset = range_or_offset.expect_offset();
|
||||||
|
145
crates/ide_completion/src/tests/proc_macros.rs
Normal file
145
crates/ide_completion/src/tests/proc_macros.rs
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
//! Completion tests for expressions.
|
||||||
|
use expect_test::{expect, Expect};
|
||||||
|
|
||||||
|
use crate::tests::completion_list;
|
||||||
|
|
||||||
|
fn check(ra_fixture: &str, expect: Expect) {
|
||||||
|
let actual = completion_list(ra_fixture);
|
||||||
|
expect.assert_eq(&actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complete_dot_in_attr() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity
|
||||||
|
pub struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn foo(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macros::identity]
|
||||||
|
fn main() {
|
||||||
|
Foo.$0
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
me foo() fn(&self)
|
||||||
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
|
sn match match expr {}
|
||||||
|
sn box Box::new(expr)
|
||||||
|
sn ok Ok(expr)
|
||||||
|
sn err Err(expr)
|
||||||
|
sn some Some(expr)
|
||||||
|
sn dbg dbg!(expr)
|
||||||
|
sn dbgr dbg!(&expr)
|
||||||
|
sn call function(expr)
|
||||||
|
sn let let
|
||||||
|
sn letm let mut
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complete_dot_in_attr2() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity
|
||||||
|
pub struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn foo(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macros::identity]
|
||||||
|
fn main() {
|
||||||
|
Foo.f$0
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
me foo() fn(&self)
|
||||||
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
|
sn match match expr {}
|
||||||
|
sn box Box::new(expr)
|
||||||
|
sn ok Ok(expr)
|
||||||
|
sn err Err(expr)
|
||||||
|
sn some Some(expr)
|
||||||
|
sn dbg dbg!(expr)
|
||||||
|
sn dbgr dbg!(&expr)
|
||||||
|
sn call function(expr)
|
||||||
|
sn let let
|
||||||
|
sn letm let mut
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complete_dot_in_attr_input() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: input_replace
|
||||||
|
pub struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn foo(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macros::input_replace(
|
||||||
|
fn suprise() {
|
||||||
|
Foo.$0
|
||||||
|
}
|
||||||
|
)]
|
||||||
|
fn main() {}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
me foo() fn(&self)
|
||||||
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
|
sn match match expr {}
|
||||||
|
sn box Box::new(expr)
|
||||||
|
sn ok Ok(expr)
|
||||||
|
sn err Err(expr)
|
||||||
|
sn some Some(expr)
|
||||||
|
sn dbg dbg!(expr)
|
||||||
|
sn dbgr dbg!(&expr)
|
||||||
|
sn call function(expr)
|
||||||
|
sn let let
|
||||||
|
sn letm let mut
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complete_dot_in_attr_input2() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: input_replace
|
||||||
|
pub struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn foo(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macros::input_replace(
|
||||||
|
fn suprise() {
|
||||||
|
Foo.f$0
|
||||||
|
}
|
||||||
|
)]
|
||||||
|
fn main() {}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
me foo() fn(&self)
|
||||||
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
|
sn match match expr {}
|
||||||
|
sn box Box::new(expr)
|
||||||
|
sn ok Ok(expr)
|
||||||
|
sn err Err(expr)
|
||||||
|
sn some Some(expr)
|
||||||
|
sn dbg dbg!(expr)
|
||||||
|
sn dbgr dbg!(&expr)
|
||||||
|
sn call function(expr)
|
||||||
|
sn let let
|
||||||
|
sn letm let mut
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
@ -309,6 +309,7 @@ fn check_dbg(path: &Path, text: &str) {
|
|||||||
"handlers/remove_dbg.rs",
|
"handlers/remove_dbg.rs",
|
||||||
// We have .dbg postfix
|
// We have .dbg postfix
|
||||||
"ide_completion/src/completions/postfix.rs",
|
"ide_completion/src/completions/postfix.rs",
|
||||||
|
"ide_completion/src/tests/proc_macros.rs",
|
||||||
// The documentation in string literals may contain anything for its own purposes
|
// The documentation in string literals may contain anything for its own purposes
|
||||||
"ide_completion/src/lib.rs",
|
"ide_completion/src/lib.rs",
|
||||||
"ide_db/src/helpers/generated_lints.rs",
|
"ide_db/src/helpers/generated_lints.rs",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user