Expand into pseudo-derive attribute expansions in completions

This commit is contained in:
Lukas Wirth 2022-03-10 20:53:50 +01:00
parent 533f178a52
commit a8b76b632c
13 changed files with 197 additions and 56 deletions

View File

@ -151,6 +151,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.expand_attr_macro(item) self.imp.expand_attr_macro(item)
} }
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
self.imp.expand_derive_as_pseudo_attr_macro(attr)
}
pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> { pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
self.imp.resolve_derive_macro(derive) self.imp.resolve_derive_macro(derive)
} }
@ -185,6 +189,19 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map) self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
} }
pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_derive_as_pseudo_attr_macro(
actual_macro_call,
speculative_args,
token_to_map,
)
}
/// Descend the token into macrocalls to its first mapped counterpart. /// Descend the token into macrocalls to its first mapped counterpart.
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros_single(token) self.imp.descend_into_macros_single(token)
@ -438,9 +455,16 @@ impl<'db> SemanticsImpl<'db> {
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> { fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
let file_id = macro_call_id.as_file(); self.parse_or_expand(macro_call_id.as_file())
let node = self.parse_or_expand(file_id)?; }
Some(node)
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(attr.clone());
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
})?;
self.parse_or_expand(call_id.as_file())
} }
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> { fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@ -533,6 +557,25 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let attr = self.wrap_node_infile(actual_macro_call.clone());
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
let macro_call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
})?;
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_call_id,
speculative_args.syntax(),
token_to_map,
)
}
// This might not be the correct way to do this, but it works for now // This might not be the correct way to do this, but it works for now
fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> { fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];

View File

@ -1,7 +1,5 @@
//! Builtin attributes. //! Builtin attributes.
use itertools::Itertools;
use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind}; use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin { macro_rules! register_builtin {
@ -98,10 +96,16 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind { let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => &attr_args.0, MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
_ => return ExpandResult::ok(tt.clone()), _ => return ExpandResult::ok(Default::default()),
}; };
pseudo_derive_attr_expansion(tt, derives)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| { let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char, char,
@ -111,21 +115,12 @@ fn derive_attr_expand(
}; };
let mut token_trees = Vec::new(); let mut token_trees = Vec::new();
for (comma, group) in &derives for tt in (&args.token_trees)
.token_trees .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
.iter()
.filter_map(|tt| match tt {
tt::TokenTree::Leaf(l) => Some(l),
tt::TokenTree::Subtree(_) => None,
})
.group_by(|l| matches!(l, tt::Leaf::Punct(tt::Punct { char: ',', .. })))
{ {
if comma {
continue;
}
token_trees.push(mk_leaf('#')); token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('[')); token_trees.push(mk_leaf('['));
token_trees.extend(group.cloned().map(tt::TokenTree::Leaf)); token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']')); token_trees.push(mk_leaf(']'));
} }
token_trees.push(mk_leaf('(')); token_trees.push(mk_leaf('('));

View File

@ -14,10 +14,10 @@ use syntax::{
}; };
use crate::{ use crate::{
ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
ProcMacroExpander, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
}; };
/// Total limit on the number of tokens produced by any macro invocation. /// Total limit on the number of tokens produced by any macro invocation.
@ -161,14 +161,16 @@ pub fn expand_speculative(
); );
let (attr_arg, token_id) = match loc.kind { let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => { MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
// Attributes may have an input token tree, build the subtree and map for this as well let attr = if is_derive {
// then try finding a token id for our token if it is inside this input subtree. // for pseudo-derive expansion we actually pass the attribute itself only
let item = ast::Item::cast(speculative_args.clone())?; ast::Attr::cast(speculative_args.clone())
let attr = item } else {
.doc_comments_and_attrs() // Attributes may have an input token tree, build the subtree and map for this as well
.nth(invoc_attr_index as usize) // then try finding a token id for our token if it is inside this input subtree.
.and_then(Either::left)?; let item = ast::Item::cast(speculative_args.clone())?;
item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left)
}?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
@ -205,11 +207,15 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind { let mut speculative_expansion = match loc.def.kind {
tt.delimiter = None; MacroDefKind::ProcMacro(expander, ..) => {
expander.expand(db, loc.krate, &tt, attr_arg.as_ref()) tt.delimiter = None;
} else { expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
macro_def.expand(db, actual_macro_call, &tt) }
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
}
_ => macro_def.expand(db, actual_macro_call, &tt),
}; };
let expand_to = macro_expand_to(db, actual_macro_call); let expand_to = macro_expand_to(db, actual_macro_call);

View File

@ -32,7 +32,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
} }
let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim); let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
let input_expressions = mac_input.into_iter().group_by(|tok| tok.kind() == T![,]); let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions let input_expressions = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then(|| group))

View File

@ -29,6 +29,8 @@ mod derive;
mod lint; mod lint;
mod repr; mod repr;
pub(crate) use self::derive::complete_derive;
/// Complete inputs to known builtin attributes as well as derive attributes /// Complete inputs to known builtin attributes as well as derive attributes
pub(crate) fn complete_known_attribute_input( pub(crate) fn complete_known_attribute_input(
acc: &mut Completions, acc: &mut Completions,
@ -46,7 +48,6 @@ pub(crate) fn complete_known_attribute_input(
match path.text().as_str() { match path.text().as_str() {
"repr" => repr::complete_repr(acc, ctx, tt), "repr" => repr::complete_repr(acc, ctx, tt),
"derive" => derive::complete_derive(acc, ctx, ctx.attr.as_ref()?),
"feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES), "feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES),
"allow" | "warn" | "deny" | "forbid" => { "allow" | "warn" | "deny" | "forbid" => {
let existing_lints = parse_tt_as_comma_sep_paths(tt)?; let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
@ -62,9 +63,7 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, &existing_lints, &lints); lint::complete_lint(acc, ctx, &existing_lints, &lints);
} }
"cfg" => { "cfg" => cfg::complete_cfg(acc, ctx),
cfg::complete_cfg(acc, ctx);
}
_ => (), _ => (),
} }
Some(()) Some(())
@ -347,7 +346,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
.children_with_tokens() .children_with_tokens()
.skip(1) .skip(1)
.take_while(|it| it.as_token() != Some(&r_paren)); .take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]); let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
Some( Some(
input_expressions input_expressions
.into_iter() .into_iter()

View File

@ -6,14 +6,21 @@ use ide_db::{
}; };
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ast, SmolStr, SyntaxKind}; use syntax::{SmolStr, SyntaxKind};
use crate::{ use crate::{
completions::flyimport::compute_fuzzy_completion_order_key, context::CompletionContext, completions::flyimport::compute_fuzzy_completion_order_key,
item::CompletionItem, Completions, ImportEdit, context::{CompletionContext, PathCompletionCtx, PathKind},
item::CompletionItem,
Completions, ImportEdit,
}; };
pub(super) fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, attr: &ast::Attr) { pub(crate) fn complete_derive(acc: &mut Completions, ctx: &CompletionContext) {
let attr = match (&ctx.path_context, ctx.attr.as_ref()) {
(Some(PathCompletionCtx { kind: Some(PathKind::Derive), .. }), Some(attr)) => attr,
_ => return,
};
let core = ctx.famous_defs().core(); let core = ctx.famous_defs().core();
let existing_derives: FxHashSet<_> = let existing_derives: FxHashSet<_> =
ctx.sema.resolve_derive_macro(attr).into_iter().flatten().flatten().collect(); ctx.sema.resolve_derive_macro(attr).into_iter().flatten().flatten().collect();

View File

@ -153,7 +153,7 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext)
}; };
match (kind, import.original_item) { match (kind, import.original_item) {
// Aren't handled in flyimport // Aren't handled in flyimport
(PathKind::Vis { .. } | PathKind::Use, _) => false, (PathKind::Vis { .. } | PathKind::Use | PathKind::Derive, _) => false,
// modules are always fair game // modules are always fair game
(_, ItemInNs::Types(hir::ModuleDef::Module(_))) => true, (_, ItemInNs::Types(hir::ModuleDef::Module(_))) => true,
// and so are macros(except for attributes) // and so are macros(except for attributes)

View File

@ -63,7 +63,13 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
} }
match kind { match kind {
Some(PathKind::Pat | PathKind::Attr { .. } | PathKind::Vis { .. } | PathKind::Use) => { Some(
PathKind::Pat
| PathKind::Attr { .. }
| PathKind::Vis { .. }
| PathKind::Use
| PathKind::Derive,
) => {
return; return;
} }
_ => { _ => {

View File

@ -19,10 +19,11 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
Some(PathCompletionCtx { Some(PathCompletionCtx {
kind: kind:
Some( Some(
PathKind::Vis { .. } PathKind::Attr { .. }
| PathKind::Attr { .. } | PathKind::Derive
| PathKind::Pat
| PathKind::Use { .. } | PathKind::Use { .. }
| PathKind::Pat, | PathKind::Vis { .. },
), ),
.. ..
}) => return, }) => return,

View File

@ -43,11 +43,12 @@ pub(crate) enum Visible {
No, No,
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(super) enum PathKind { pub(super) enum PathKind {
Expr, Expr,
Type, Type,
Attr { kind: AttrKind, annotated_item_kind: Option<SyntaxKind> }, Attr { kind: AttrKind, annotated_item_kind: Option<SyntaxKind> },
Derive,
Mac, Mac,
Pat, Pat,
Vis { has_in_token: bool }, Vis { has_in_token: bool },
@ -472,6 +473,8 @@ impl<'a> CompletionContext<'a> {
mut fake_ident_token: SyntaxToken, mut fake_ident_token: SyntaxToken,
) { ) {
let _p = profile::span("CompletionContext::expand_and_fill"); let _p = profile::span("CompletionContext::expand_and_fill");
let mut derive_ctx = None;
'expansion: loop { 'expansion: loop {
let parent_item = let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
@ -509,11 +512,45 @@ impl<'a> CompletionContext<'a> {
_ => break 'expansion, _ => break 'expansion,
} }
} }
let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
Some(it) => it,
None => break,
};
let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
Some(it) => it,
None => break,
};
// Expand pseudo-derive expansion
if let (Some(orig_attr), Some(spec_attr)) = (
orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
) {
match (
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
&spec_attr,
fake_ident_token.clone(),
),
) {
// Clearly not a derive macro
(None, None) => (),
// successful expansions
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
let new_offset = fake_mapped_token.text_range().start();
derive_ctx = Some((actual_expansion, fake_expansion, new_offset));
break 'expansion;
}
// exactly one expansion failed, inconsistent state so stop expanding completely
_ => break 'expansion,
}
}
// Expand fn-like macro calls // Expand fn-like macro calls
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset), orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset), spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
) { ) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 = let mac_call_path1 =
@ -553,7 +590,7 @@ impl<'a> CompletionContext<'a> {
break; break;
} }
self.fill(&original_file, speculative_file, offset); self.fill(&original_file, speculative_file, offset, derive_ctx);
} }
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) { fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
@ -697,6 +734,7 @@ impl<'a> CompletionContext<'a> {
original_file: &SyntaxNode, original_file: &SyntaxNode,
file_with_fake_ident: SyntaxNode, file_with_fake_ident: SyntaxNode,
offset: TextSize, offset: TextSize,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize)>,
) { ) {
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
let syntax_element = NodeOrToken::Token(fake_ident_token); let syntax_element = NodeOrToken::Token(fake_ident_token);
@ -724,6 +762,21 @@ impl<'a> CompletionContext<'a> {
self.expected_type = expected_type; self.expected_type = expected_type;
self.expected_name = expected_name; self.expected_name = expected_name;
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset)) = derive_ctx {
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
if let Some((path_ctx, _)) =
Self::classify_name_ref(&self.sema, &original_file, name_ref)
{
self.path_context =
Some(PathCompletionCtx { kind: Some(PathKind::Derive), ..path_ctx });
}
}
return;
}
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) { let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
Some(it) => it, Some(it) => it,
None => return, None => return,
@ -743,6 +796,7 @@ impl<'a> CompletionContext<'a> {
.token_ancestors_with_macros(self.token.clone()) .token_ancestors_with_macros(self.token.clone())
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::Fn::cast); .find_map(ast::Fn::cast);
match name_like { match name_like {
ast::NameLike::Lifetime(lifetime) => { ast::NameLike::Lifetime(lifetime) => {
self.lifetime_ctx = self.lifetime_ctx =

View File

@ -152,6 +152,7 @@ pub fn completions(
let mut acc = Completions::default(); let mut acc = Completions::default();
completions::attribute::complete_attribute(&mut acc, &ctx); completions::attribute::complete_attribute(&mut acc, &ctx);
completions::attribute::complete_derive(&mut acc, &ctx);
completions::attribute::complete_known_attribute_input(&mut acc, &ctx); completions::attribute::complete_known_attribute_input(&mut acc, &ctx);
completions::dot::complete_dot(&mut acc, &ctx); completions::dot::complete_dot(&mut acc, &ctx);
completions::extern_abi::complete_extern_abi(&mut acc, &ctx); completions::extern_abi::complete_extern_abi(&mut acc, &ctx);

View File

@ -760,6 +760,7 @@ mod derive {
check_derive( check_derive(
r#" r#"
//- proc_macros: derive_identity //- proc_macros: derive_identity
//- minicore: derive
#[derive(der$0)] struct Test; #[derive(der$0)] struct Test;
"#, "#,
expect![[r#" expect![[r#"
@ -769,6 +770,7 @@ mod derive {
check_derive( check_derive(
r#" r#"
//- proc_macros: derive_identity //- proc_macros: derive_identity
//- minicore: derive
use proc_macros::DeriveIdentity; use proc_macros::DeriveIdentity;
#[derive(der$0)] struct Test; #[derive(der$0)] struct Test;
"#, "#,
@ -784,6 +786,7 @@ use proc_macros::DeriveIdentity;
"DeriveIdentity", "DeriveIdentity",
r#" r#"
//- proc_macros: derive_identity //- proc_macros: derive_identity
//- minicore: derive
#[derive(der$0)] struct Test; #[derive(der$0)] struct Test;
"#, "#,
r#" r#"
@ -793,6 +796,32 @@ use proc_macros::DeriveIdentity;
"#, "#,
); );
} }
#[test]
fn qualified() {
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive, copy, clone
#[derive(proc_macros::$0)] struct Test;
"#,
expect![[r#"
de Clone, Copy
de Clone
"#]],
);
check_derive(
r#"
//- proc_macros: derive_identity
//- minicore: derive, copy, clone
#[derive(proc_macros::C$0)] struct Test;
"#,
expect![[r#"
de Clone, Copy
de Clone
"#]],
);
}
} }
mod lint { mod lint {

View File

@ -443,7 +443,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
None => None, None => None,
Some(tok) => Some(tok), Some(tok) => Some(tok),
}); });
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]); let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
let paths = input_expressions let paths = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then(|| group))