fix: Fix lint completions not working for unclosed attributes
This commit is contained in:
parent
03f6d92ae4
commit
0001a42570
@ -30,16 +30,12 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
|
|||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
match (name_ref, attribute.token_tree()) {
|
match (name_ref, attribute.token_tree()) {
|
||||||
(Some(path), Some(token_tree)) => match path.text().as_str() {
|
(Some(path), Some(tt)) if tt.l_paren_token().is_some() => match path.text().as_str() {
|
||||||
"repr" => repr::complete_repr(acc, ctx, token_tree),
|
"repr" => repr::complete_repr(acc, ctx, tt),
|
||||||
"derive" => {
|
"derive" => derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?),
|
||||||
derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?)
|
"feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES),
|
||||||
}
|
|
||||||
"feature" => {
|
|
||||||
lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?, FEATURES)
|
|
||||||
}
|
|
||||||
"allow" | "warn" | "deny" | "forbid" => {
|
"allow" | "warn" | "deny" | "forbid" => {
|
||||||
let existing_lints = parse_tt_as_comma_sep_paths(token_tree)?;
|
let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
|
||||||
lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS);
|
lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS);
|
||||||
lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS);
|
lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS);
|
||||||
lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS);
|
lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS);
|
||||||
@ -49,8 +45,8 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
|
|||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
},
|
},
|
||||||
(None, Some(_)) => (),
|
(_, Some(_)) => (),
|
||||||
_ => complete_new_attribute(acc, ctx, attribute),
|
(_, None) => complete_new_attribute(acc, ctx, attribute),
|
||||||
}
|
}
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
@ -831,6 +831,20 @@ mod lint {
|
|||||||
r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
|
r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lint_unclosed() {
|
||||||
|
check_edit(
|
||||||
|
"deprecated",
|
||||||
|
r#"#[allow(dep$0 struct Test;"#,
|
||||||
|
r#"#[allow(deprecated struct Test;"#,
|
||||||
|
);
|
||||||
|
check_edit(
|
||||||
|
"bare_urls",
|
||||||
|
r#"#[allow(rustdoc::$0 struct Test;"#,
|
||||||
|
r#"#[allow(rustdoc::bare_urls struct Test;"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod repr {
|
mod repr {
|
||||||
|
@ -305,20 +305,24 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the input token tree as comma separated paths.
|
/// Parses the input token tree as comma separated plain paths.
|
||||||
pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
|
pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
|
||||||
let r_paren = input.r_paren_token()?;
|
let r_paren = input.r_paren_token();
|
||||||
let tokens = input
|
let tokens =
|
||||||
.syntax()
|
input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
|
||||||
.children_with_tokens()
|
// seeing a keyword means the attribute is unclosed so stop parsing here
|
||||||
.skip(1)
|
Some(tok) if tok.kind().is_keyword() => None,
|
||||||
.take_while(|it| it.as_token() != Some(&r_paren));
|
// don't include the right token tree parenthesis if it exists
|
||||||
|
tok @ Some(_) if tok == r_paren => None,
|
||||||
|
// only nodes that we can find are other TokenTrees, those are unexpected in this parse though
|
||||||
|
None => None,
|
||||||
|
Some(tok) => Some(tok),
|
||||||
|
});
|
||||||
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
||||||
Some(
|
let paths = input_expressions
|
||||||
input_expressions
|
.into_iter()
|
||||||
.into_iter()
|
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
|
||||||
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
|
.collect();
|
||||||
.collect::<Vec<ast::Path>>(),
|
Some(paths)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user