Expand into pseudo-derive attribute expansions in completions

This commit is contained in:
Lukas Wirth 2022-03-10 20:53:50 +01:00
parent 533f178a52
commit a8b76b632c
13 changed files with 197 additions and 56 deletions

View File

@ -151,6 +151,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.expand_attr_macro(item)
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
self.imp.expand_derive_as_pseudo_attr_macro(attr)
}
pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
self.imp.resolve_derive_macro(derive)
}
@ -185,6 +189,19 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
}
pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_derive_as_pseudo_attr_macro(
actual_macro_call,
speculative_args,
token_to_map,
)
}
/// Descend the token into macrocalls to its first mapped counterpart.
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros_single(token)
@ -438,9 +455,16 @@ impl<'db> SemanticsImpl<'db> {
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
let file_id = macro_call_id.as_file();
let node = self.parse_or_expand(file_id)?;
Some(node)
self.parse_or_expand(macro_call_id.as_file())
}
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(attr.clone());
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
self.parse_or_expand(call_id.as_file())
}
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@ -533,6 +557,25 @@ impl<'db> SemanticsImpl<'db> {
)
}
fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let attr = self.wrap_node_infile(actual_macro_call.clone());
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
let macro_call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
})?;
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_call_id,
speculative_args.syntax(),
token_to_map,
)
}
// This might not be the correct way to do this, but it works for now
fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
let mut res = smallvec![];

View File

@ -1,7 +1,5 @@
//! Builtin attributes.
use itertools::Itertools;
use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
@ -98,10 +96,16 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => &attr_args.0,
_ => return ExpandResult::ok(tt.clone()),
MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
_ => return ExpandResult::ok(Default::default()),
};
pseudo_derive_attr_expansion(tt, derives)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
@ -111,21 +115,12 @@ fn derive_attr_expand(
};
let mut token_trees = Vec::new();
for (comma, group) in &derives
.token_trees
.iter()
.filter_map(|tt| match tt {
tt::TokenTree::Leaf(l) => Some(l),
tt::TokenTree::Subtree(_) => None,
})
.group_by(|l| matches!(l, tt::Leaf::Punct(tt::Punct { char: ',', .. })))
for tt in (&args.token_trees)
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
if comma {
continue;
}
token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('['));
token_trees.extend(group.cloned().map(tt::TokenTree::Leaf));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
token_trees.push(mk_leaf('('));

View File

@ -14,10 +14,10 @@ use syntax::{
};
use crate::{
ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr,
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile,
ProcMacroExpander,
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -161,14 +161,16 @@ pub fn expand_speculative(
);
let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
let attr = item
.doc_comments_and_attrs()
.nth(invoc_attr_index as usize)
.and_then(Either::left)?;
MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
let attr = if is_derive {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone())
} else {
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left)
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
@ -205,11 +207,15 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
tt.delimiter = None;
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
} else {
macro_def.expand(db, actual_macro_call, &tt)
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = None;
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
}
_ => macro_def.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);

View File

@ -32,7 +32,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
}
let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
let input_expressions = mac_input.into_iter().group_by(|tok| tok.kind() == T![,]);
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))

View File

@ -29,6 +29,8 @@ mod derive;
mod lint;
mod repr;
pub(crate) use self::derive::complete_derive;
/// Complete inputs to known builtin attributes as well as derive attributes
pub(crate) fn complete_known_attribute_input(
acc: &mut Completions,
@ -46,7 +48,6 @@ pub(crate) fn complete_known_attribute_input(
match path.text().as_str() {
"repr" => repr::complete_repr(acc, ctx, tt),
"derive" => derive::complete_derive(acc, ctx, ctx.attr.as_ref()?),
"feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES),
"allow" | "warn" | "deny" | "forbid" => {
let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
@ -62,9 +63,7 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, &existing_lints, &lints);
}
"cfg" => {
cfg::complete_cfg(acc, ctx);
}
"cfg" => cfg::complete_cfg(acc, ctx),
_ => (),
}
Some(())
@ -347,7 +346,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
.children_with_tokens()
.skip(1)
.take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()

View File

@ -6,14 +6,21 @@ use ide_db::{
};
use itertools::Itertools;
use rustc_hash::FxHashSet;
use syntax::{ast, SmolStr, SyntaxKind};
use syntax::{SmolStr, SyntaxKind};
use crate::{
completions::flyimport::compute_fuzzy_completion_order_key, context::CompletionContext,
item::CompletionItem, Completions, ImportEdit,
completions::flyimport::compute_fuzzy_completion_order_key,
context::{CompletionContext, PathCompletionCtx, PathKind},
item::CompletionItem,
Completions, ImportEdit,
};
pub(super) fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, attr: &ast::Attr) {
pub(crate) fn complete_derive(acc: &mut Completions, ctx: &CompletionContext) {
let attr = match (&ctx.path_context, ctx.attr.as_ref()) {
(Some(PathCompletionCtx { kind: Some(PathKind::Derive), .. }), Some(attr)) => attr,
_ => return,
};
let core = ctx.famous_defs().core();
let existing_derives: FxHashSet<_> =
ctx.sema.resolve_derive_macro(attr).into_iter().flatten().flatten().collect();

View File

@ -153,7 +153,7 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext)
};
match (kind, import.original_item) {
// Aren't handled in flyimport
(PathKind::Vis { .. } | PathKind::Use, _) => false,
(PathKind::Vis { .. } | PathKind::Use | PathKind::Derive, _) => false,
// modules are always fair game
(_, ItemInNs::Types(hir::ModuleDef::Module(_))) => true,
// and so are macros(except for attributes)

View File

@ -63,7 +63,13 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
}
match kind {
Some(PathKind::Pat | PathKind::Attr { .. } | PathKind::Vis { .. } | PathKind::Use) => {
Some(
PathKind::Pat
| PathKind::Attr { .. }
| PathKind::Vis { .. }
| PathKind::Use
| PathKind::Derive,
) => {
return;
}
_ => {

View File

@ -19,10 +19,11 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
Some(PathCompletionCtx {
kind:
Some(
PathKind::Vis { .. }
| PathKind::Attr { .. }
PathKind::Attr { .. }
| PathKind::Derive
| PathKind::Pat
| PathKind::Use { .. }
| PathKind::Pat,
| PathKind::Vis { .. },
),
..
}) => return,

View File

@ -43,11 +43,12 @@ pub(crate) enum Visible {
No,
}
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(super) enum PathKind {
Expr,
Type,
Attr { kind: AttrKind, annotated_item_kind: Option<SyntaxKind> },
Derive,
Mac,
Pat,
Vis { has_in_token: bool },
@ -472,6 +473,8 @@ impl<'a> CompletionContext<'a> {
mut fake_ident_token: SyntaxToken,
) {
let _p = profile::span("CompletionContext::expand_and_fill");
let mut derive_ctx = None;
'expansion: loop {
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
@ -509,11 +512,45 @@ impl<'a> CompletionContext<'a> {
_ => break 'expansion,
}
}
let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
Some(it) => it,
None => break,
};
let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
Some(it) => it,
None => break,
};
// Expand pseudo-derive expansion
if let (Some(orig_attr), Some(spec_attr)) = (
orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
) {
match (
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
&spec_attr,
fake_ident_token.clone(),
),
) {
// Clearly not a derive macro
(None, None) => (),
// successful expansions
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
let new_offset = fake_mapped_token.text_range().start();
derive_ctx = Some((actual_expansion, fake_expansion, new_offset));
break 'expansion;
}
// exactly one expansion failed, inconsistent state so stop expanding completely
_ => break 'expansion,
}
}
// Expand fn-like macro calls
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
@ -553,7 +590,7 @@ impl<'a> CompletionContext<'a> {
break;
}
self.fill(&original_file, speculative_file, offset);
self.fill(&original_file, speculative_file, offset, derive_ctx);
}
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
@ -697,6 +734,7 @@ impl<'a> CompletionContext<'a> {
original_file: &SyntaxNode,
file_with_fake_ident: SyntaxNode,
offset: TextSize,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize)>,
) {
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
let syntax_element = NodeOrToken::Token(fake_ident_token);
@ -724,6 +762,21 @@ impl<'a> CompletionContext<'a> {
self.expected_type = expected_type;
self.expected_name = expected_name;
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset)) = derive_ctx {
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
if let Some((path_ctx, _)) =
Self::classify_name_ref(&self.sema, &original_file, name_ref)
{
self.path_context =
Some(PathCompletionCtx { kind: Some(PathKind::Derive), ..path_ctx });
}
}
return;
}
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
Some(it) => it,
None => return,
@ -743,6 +796,7 @@ impl<'a> CompletionContext<'a> {
.token_ancestors_with_macros(self.token.clone())
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::Fn::cast);
match name_like {
ast::NameLike::Lifetime(lifetime) => {
self.lifetime_ctx =

View File

@ -152,6 +152,7 @@ pub fn completions(
let mut acc = Completions::default();
completions::attribute::complete_attribute(&mut acc, &ctx);
completions::attribute::complete_derive(&mut acc, &ctx);
completions::attribute::complete_known_attribute_input(&mut acc, &ctx);
completions::dot::complete_dot(&mut acc, &ctx);
completions::extern_abi::complete_extern_abi(&mut acc, &ctx);

View File

@ -760,6 +760,7 @@ mod derive {
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive
#[derive(der$0)] struct Test;
"#,
expect![[r#"
@ -769,6 +770,7 @@ mod derive {
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive
use proc_macros::DeriveIdentity;
#[derive(der$0)] struct Test;
"#,
@ -784,6 +786,7 @@ use proc_macros::DeriveIdentity;
"DeriveIdentity",
r#"
//- proc_macros: derive_identity
//- minicore: derive
#[derive(der$0)] struct Test;
"#,
r#"
@ -793,6 +796,32 @@ use proc_macros::DeriveIdentity;
"#,
);
}
#[test]
fn qualified() {
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive, copy, clone
#[derive(proc_macros::$0)] struct Test;
"#,
expect![[r#"
de Clone, Copy
de Clone
"#]],
);
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive, copy, clone
#[derive(proc_macros::C$0)] struct Test;
"#,
expect![[r#"
de Clone, Copy
de Clone
"#]],
);
}
}
mod lint {

View File

@ -443,7 +443,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
None => None,
Some(tok) => Some(tok),
});
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
let paths = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))