5089: Disable auto-complete on comments r=matklad a=BGluth Resolves #4907 by disabling any auto-completion on comments. As flodiebold [pointed out](https://github.com/rust-analyzer/rust-analyzer/issues/4907#issuecomment-648439979), in the future we may want to support some form of auto-completion within doc comments, but for now it was suggested to just disable auto-completion on them entirely. The implementation involves adding a new field `is_comment` to `CompletionContext` and checking if the immediate token we auto-completed on is a comment. I couldn't see a case where we need to check any of the ancestors, but let me know if this is not sufficient. I also wasn't sure if it was necessary to add a new field to this struct, but I decided it's probably the best option if we want to potentially do auto-completion on doc comments in the future. Finally, the three tests I added should I think ideally not filter results by `CompletionKind::Keyword`, but if I want to get unfiltered results, I need access to a non-public function [get_all_completion_items](9a4d02faf9/crates/ra_ide/src/completion/test_utils.rs (L32-L39)
) which I don't know if I should make public just for this. 5161: SSR: Add initial support for placeholder constraints r=matklad a=davidlattimore 5184: Always install required nightly extension if current one is not nightly r=matklad a=Veetaha This is weird, but having switched back to stable by uninstalling the extension appears that vscode doesn't destroy the `PersistentState` and thus changing to `nightly` channel doesn't work because the last check for nightly extension was less than 1 hour ago. The simple solution is to skip this check if we know that the current extension version is not nightly. 5185: Force showing extension activation error pop-up notification r=matklad a=Veetaha Fixes https://github.com/rust-analyzer/rust-analyzer/issues/5091 5186: fix: correct pd/ppd/tfn/tmod completion doc r=matklad a=fannheywarda33eefa3b2/crates/ra_ide/src/completion/complete_snippet.rs (L23-L24)
Co-authored-by: BGluth <gluthb@gmail.com> Co-authored-by: David Lattimore <dml@google.com> Co-authored-by: Veetaha <veetaha2@gmail.com> Co-authored-by: Heyward Fann <fannheyward@gmail.com>
This commit is contained in:
commit
57ed622ec4
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1248,6 +1248,7 @@ dependencies = [
|
||||
"ra_syntax",
|
||||
"ra_text_edit",
|
||||
"rustc-hash",
|
||||
"test_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -63,11 +63,11 @@
|
||||
// There also snippet completions:
|
||||
//
|
||||
// .Expressions
|
||||
// - `pd` -> `println!("{:?}")`
|
||||
// - `ppd` -> `println!("{:#?}")`
|
||||
// - `pd` -> `eprintln!(" = {:?}", );")`
|
||||
// - `ppd` -> `eprintln!(" = {:#?}", );`
|
||||
//
|
||||
// .Items
|
||||
// - `tfn` -> `#[test] fn f(){}`
|
||||
// - `tfn` -> `#[test] fn feature(){}`
|
||||
// - `tmod` ->
|
||||
// ```rust
|
||||
// #[cfg(test)]
|
||||
@ -75,7 +75,7 @@
|
||||
// use super::*;
|
||||
//
|
||||
// #[test]
|
||||
// fn test_fn() {}
|
||||
// fn test_name() {}
|
||||
// }
|
||||
// ```
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use ra_syntax::ast;
|
||||
use ra_syntax::{ast, SyntaxKind};
|
||||
|
||||
use crate::completion::{
|
||||
CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
|
||||
@ -37,6 +37,10 @@ pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC
|
||||
}
|
||||
|
||||
pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if ctx.token.kind() == SyntaxKind::COMMENT {
|
||||
return;
|
||||
}
|
||||
|
||||
let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent;
|
||||
if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling {
|
||||
add_keyword(ctx, acc, "where", "where ");
|
||||
|
@ -1516,4 +1516,54 @@ fn go(world: &WorldSnapshot) {
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_keyword_autocompletion_on_line_comments() {
|
||||
assert_debug_snapshot!(
|
||||
do_completion(
|
||||
r"
|
||||
fn test() {
|
||||
let x = 2; // A comment<|>
|
||||
}
|
||||
",
|
||||
CompletionKind::Keyword
|
||||
),
|
||||
@r###"
|
||||
[]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_keyword_autocompletion_on_multi_line_comments() {
|
||||
assert_debug_snapshot!(
|
||||
do_completion(
|
||||
r"
|
||||
/*
|
||||
Some multi-line comment<|>
|
||||
*/
|
||||
",
|
||||
CompletionKind::Keyword
|
||||
),
|
||||
@r###"
|
||||
[]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_keyword_autocompletion_on_doc_comments() {
|
||||
assert_debug_snapshot!(
|
||||
do_completion(
|
||||
r"
|
||||
/// Some doc comment
|
||||
/// let test<|> = 1
|
||||
",
|
||||
CompletionKind::Keyword
|
||||
),
|
||||
@r###"
|
||||
[]
|
||||
"###
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,18 @@
|
||||
// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
|
||||
// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
|
||||
// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
|
||||
//
|
||||
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
|
||||
//
|
||||
// Supported constraints:
|
||||
//
|
||||
// |===
|
||||
// | Constraint | Restricts placeholder
|
||||
//
|
||||
// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
|
||||
// | not(a) | Negates the constraint `a`
|
||||
// |===
|
||||
//
|
||||
// Available via the command `rust-analyzer.ssr`.
|
||||
//
|
||||
// ```rust
|
||||
|
@ -17,3 +17,4 @@ ra_db = { path = "../ra_db" }
|
||||
ra_ide_db = { path = "../ra_ide_db" }
|
||||
hir = { path = "../ra_hir", package = "ra_hir" }
|
||||
rustc-hash = "1.1.0"
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
@ -2,7 +2,7 @@
|
||||
//! process of matching, placeholder values are recorded.
|
||||
|
||||
use crate::{
|
||||
parsing::{Placeholder, SsrTemplate},
|
||||
parsing::{Constraint, NodeKind, Placeholder, SsrTemplate},
|
||||
SsrMatches, SsrPattern, SsrRule,
|
||||
};
|
||||
use hir::Semantics;
|
||||
@ -11,6 +11,7 @@
|
||||
use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::{cell::Cell, iter::Peekable};
|
||||
use test_utils::mark;
|
||||
|
||||
// Creates a match error. If we're currently attempting to match some code that we thought we were
|
||||
// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
|
||||
@ -169,6 +170,9 @@ fn attempt_match_node(
|
||||
if let Some(placeholder) =
|
||||
match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone()))
|
||||
{
|
||||
for constraint in &placeholder.constraints {
|
||||
self.check_constraint(constraint, code)?;
|
||||
}
|
||||
if self.match_out.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
@ -292,6 +296,24 @@ fn attempt_match_token(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_constraint(
|
||||
&self,
|
||||
constraint: &Constraint,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
match constraint {
|
||||
Constraint::Kind(kind) => {
|
||||
kind.matches(code)?;
|
||||
}
|
||||
Constraint::Not(sub) => {
|
||||
if self.check_constraint(&*sub, code).is_ok() {
|
||||
fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// We want to allow the records to match in any order, so we have special matching logic for
|
||||
/// them.
|
||||
fn attempt_match_record_field_list(
|
||||
@ -515,6 +537,21 @@ pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, Matc
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeKind {
|
||||
fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
|
||||
let ok = match self {
|
||||
Self::Literal => {
|
||||
mark::hit!(literal_constraint);
|
||||
ast::Literal::can_cast(node.kind())
|
||||
}
|
||||
};
|
||||
if !ok {
|
||||
fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// If `node` contains nothing but an ident then return it, otherwise return None.
|
||||
fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
|
||||
match element {
|
||||
|
@ -6,7 +6,7 @@
|
||||
//! e.g. expressions, type references etc.
|
||||
|
||||
use crate::{SsrError, SsrPattern, SsrRule};
|
||||
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind};
|
||||
use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::str::FromStr;
|
||||
|
||||
@ -39,6 +39,18 @@ pub(crate) struct Placeholder {
|
||||
pub(crate) ident: SmolStr,
|
||||
/// A unique name used in place of this placeholder when we parse the pattern as Rust code.
|
||||
stand_in_name: String,
|
||||
pub(crate) constraints: Vec<Constraint>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Constraint {
|
||||
Kind(NodeKind),
|
||||
Not(Box<Constraint>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum NodeKind {
|
||||
Literal,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@ -149,7 +161,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
|
||||
let mut placeholder_names = FxHashSet::default();
|
||||
let mut tokens = tokenize(pattern_str)?.into_iter();
|
||||
while let Some(token) = tokens.next() {
|
||||
if token.kind == SyntaxKind::DOLLAR {
|
||||
if token.kind == T![$] {
|
||||
let placeholder = parse_placeholder(&mut tokens)?;
|
||||
if !placeholder_names.insert(placeholder.ident.clone()) {
|
||||
bail!("Name `{}` repeats more than once", placeholder.ident);
|
||||
@ -177,6 +189,9 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
|
||||
if !defined_placeholders.contains(&placeholder.ident) {
|
||||
undefined.push(format!("${}", placeholder.ident));
|
||||
}
|
||||
if !placeholder.constraints.is_empty() {
|
||||
bail!("Replacement placeholders cannot have constraints");
|
||||
}
|
||||
}
|
||||
}
|
||||
if !undefined.is_empty() {
|
||||
@ -205,23 +220,90 @@ fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
|
||||
|
||||
fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
|
||||
let mut name = None;
|
||||
let mut constraints = Vec::new();
|
||||
if let Some(token) = tokens.next() {
|
||||
match token.kind {
|
||||
SyntaxKind::IDENT => {
|
||||
name = Some(token.text);
|
||||
}
|
||||
T!['{'] => {
|
||||
let token =
|
||||
tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
|
||||
if token.kind == SyntaxKind::IDENT {
|
||||
name = Some(token.text);
|
||||
}
|
||||
loop {
|
||||
let token = tokens
|
||||
.next()
|
||||
.ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
|
||||
match token.kind {
|
||||
T![:] => {
|
||||
constraints.push(parse_constraint(tokens)?);
|
||||
}
|
||||
T!['}'] => break,
|
||||
_ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
bail!("Placeholders should be $name");
|
||||
bail!("Placeholders should either be $name or ${name:constraints}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
|
||||
Ok(Placeholder::new(name))
|
||||
Ok(Placeholder::new(name, constraints))
|
||||
}
|
||||
|
||||
fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
|
||||
let constraint_type = tokens
|
||||
.next()
|
||||
.ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
|
||||
.text
|
||||
.to_string();
|
||||
match constraint_type.as_str() {
|
||||
"kind" => {
|
||||
expect_token(tokens, "(")?;
|
||||
let t = tokens.next().ok_or_else(|| {
|
||||
SsrError::new("Unexpected end of constraint while looking for kind")
|
||||
})?;
|
||||
if t.kind != SyntaxKind::IDENT {
|
||||
bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
|
||||
}
|
||||
expect_token(tokens, ")")?;
|
||||
Ok(Constraint::Kind(NodeKind::from(&t.text)?))
|
||||
}
|
||||
"not" => {
|
||||
expect_token(tokens, "(")?;
|
||||
let sub = parse_constraint(tokens)?;
|
||||
expect_token(tokens, ")")?;
|
||||
Ok(Constraint::Not(Box::new(sub)))
|
||||
}
|
||||
x => bail!("Unsupported constraint type '{}'", x),
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
|
||||
if let Some(t) = tokens.next() {
|
||||
if t.text == expected {
|
||||
return Ok(());
|
||||
}
|
||||
bail!("Expected {} found {}", expected, t.text);
|
||||
}
|
||||
bail!("Expected {} found end of stream");
|
||||
}
|
||||
|
||||
impl NodeKind {
|
||||
fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
|
||||
Ok(match name.as_str() {
|
||||
"literal" => NodeKind::Literal,
|
||||
_ => bail!("Unknown node kind '{}'", name),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Placeholder {
|
||||
fn new(name: SmolStr) -> Self {
|
||||
Self { stand_in_name: format!("__placeholder_{}", name), ident: name }
|
||||
fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
|
||||
Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name }
|
||||
}
|
||||
}
|
||||
|
||||
@ -241,31 +323,31 @@ fn token(kind: SyntaxKind, text: &str) -> PatternElement {
|
||||
PatternElement::Token(Token { kind, text: SmolStr::new(text) })
|
||||
}
|
||||
fn placeholder(name: &str) -> PatternElement {
|
||||
PatternElement::Placeholder(Placeholder::new(SmolStr::new(name)))
|
||||
PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
|
||||
}
|
||||
let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
|
||||
assert_eq!(
|
||||
result.pattern.raw.tokens,
|
||||
vec![
|
||||
token(SyntaxKind::IDENT, "foo"),
|
||||
token(SyntaxKind::L_PAREN, "("),
|
||||
token(T!['('], "("),
|
||||
placeholder("a"),
|
||||
token(SyntaxKind::COMMA, ","),
|
||||
token(T![,], ","),
|
||||
token(SyntaxKind::WHITESPACE, " "),
|
||||
placeholder("b"),
|
||||
token(SyntaxKind::R_PAREN, ")"),
|
||||
token(T![')'], ")"),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
result.template.tokens,
|
||||
vec![
|
||||
token(SyntaxKind::IDENT, "bar"),
|
||||
token(SyntaxKind::L_PAREN, "("),
|
||||
token(T!['('], "("),
|
||||
placeholder("b"),
|
||||
token(SyntaxKind::COMMA, ","),
|
||||
token(T![,], ","),
|
||||
token(SyntaxKind::WHITESPACE, " "),
|
||||
placeholder("a"),
|
||||
token(SyntaxKind::R_PAREN, ")"),
|
||||
token(T![')'], ")"),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
use crate::{MatchFinder, SsrRule};
|
||||
use ra_db::{FileId, SourceDatabaseExt};
|
||||
use test_utils::mark;
|
||||
|
||||
fn parse_error_text(query: &str) -> String {
|
||||
format!("{}", query.parse::<SsrRule>().unwrap_err())
|
||||
@ -301,6 +302,22 @@ fn match_pattern() {
|
||||
assert_matches("Some($a)", "fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_constraint() {
|
||||
mark::check!(literal_constraint);
|
||||
let code = r#"
|
||||
fn f1() {
|
||||
let x1 = Some(42);
|
||||
let x2 = Some("foo");
|
||||
let x3 = Some(x1);
|
||||
let x4 = Some(40 + 2);
|
||||
let x5 = Some(true);
|
||||
}
|
||||
"#;
|
||||
assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
|
||||
assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_reordered_struct_instantiation() {
|
||||
assert_matches(
|
||||
|
@ -19,6 +19,16 @@ let ctx: Ctx | undefined;
|
||||
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
|
||||
|
||||
export async function activate(context: vscode.ExtensionContext) {
|
||||
// For some reason vscode not always shows pop-up error notifications
|
||||
// when an extension fails to activate, so we do it explicitly by ourselves.
|
||||
// FIXME: remove this bit of code once vscode fixes this issue: https://github.com/microsoft/vscode/issues/101242
|
||||
await tryActivate(context).catch(err => {
|
||||
void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
async function tryActivate(context: vscode.ExtensionContext) {
|
||||
// Register a "dumb" onEnter command for the case where server fails to
|
||||
// start.
|
||||
//
|
||||
@ -58,9 +68,7 @@ export async function activate(context: vscode.ExtensionContext) {
|
||||
|
||||
const workspaceFolder = vscode.workspace.workspaceFolders?.[0];
|
||||
if (workspaceFolder === undefined) {
|
||||
const err = "Cannot activate rust-analyzer when no folder is opened";
|
||||
void vscode.window.showErrorMessage(err);
|
||||
throw new Error(err);
|
||||
throw new Error("no folder is opened");
|
||||
}
|
||||
|
||||
// Note: we try to start the server before we activate type hints so that it
|
||||
@ -152,13 +160,17 @@ async function bootstrapExtension(config: Config, state: PersistentState): Promi
|
||||
return;
|
||||
};
|
||||
|
||||
const lastCheck = state.lastCheck;
|
||||
const now = Date.now();
|
||||
if (config.package.releaseTag === NIGHTLY_TAG) {
|
||||
// Check if we should poll github api for the new nightly version
|
||||
// if we haven't done it during the past hour
|
||||
const lastCheck = state.lastCheck;
|
||||
|
||||
const anHour = 60 * 60 * 1000;
|
||||
const shouldDownloadNightly = state.releaseId === undefined || (now - (lastCheck ?? 0)) > anHour;
|
||||
const anHour = 60 * 60 * 1000;
|
||||
const shouldCheckForNewNightly = state.releaseId === undefined || (now - (lastCheck ?? 0)) > anHour;
|
||||
|
||||
if (!shouldDownloadNightly) return;
|
||||
if (!shouldCheckForNewNightly) return;
|
||||
}
|
||||
|
||||
const release = await fetchRelease("nightly").catch((e) => {
|
||||
log.error(e);
|
||||
|
Loading…
Reference in New Issue
Block a user