remove unnecessary lazy evaluations
This commit is contained in:
parent
7530d76f00
commit
cc80c5bd07
@ -159,15 +159,14 @@ pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
|
|||||||
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
|
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
|
||||||
let (def, mut iter) = match item {
|
let (def, mut iter) = match item {
|
||||||
ItemInNs::Macros(def) => {
|
ItemInNs::Macros(def) => {
|
||||||
return self
|
return self.macros.iter().find_map(|(name, &(other_def, vis))| {
|
||||||
.macros
|
(other_def == def).then_some((name, vis))
|
||||||
.iter()
|
});
|
||||||
.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
|
|
||||||
}
|
}
|
||||||
ItemInNs::Types(def) => (def, self.types.iter()),
|
ItemInNs::Types(def) => (def, self.types.iter()),
|
||||||
ItemInNs::Values(def) => (def, self.values.iter()),
|
ItemInNs::Values(def) => (def, self.values.iter()),
|
||||||
};
|
};
|
||||||
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)))
|
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
|
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
|
||||||
|
@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||||||
}
|
}
|
||||||
let pp = pretty_print_macro_expansion(
|
let pp = pretty_print_macro_expansion(
|
||||||
parse.syntax_node(),
|
parse.syntax_node(),
|
||||||
show_token_ids.then(|| &*token_map),
|
show_token_ids.then_some(&*token_map),
|
||||||
);
|
);
|
||||||
let indent = IndentLevel::from_node(call.syntax());
|
let indent = IndentLevel::from_node(call.syntax());
|
||||||
let pp = reindent(indent, pp);
|
let pp = reindent(indent, pp);
|
||||||
|
@ -208,7 +208,7 @@ fn eager_macro_recur(
|
|||||||
// Collect replacement
|
// Collect replacement
|
||||||
for child in children {
|
for child in children {
|
||||||
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||||
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
|
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
||||||
None => {
|
None => {
|
||||||
diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
|
diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
|
||||||
continue;
|
continue;
|
||||||
|
@ -37,7 +37,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
|
|||||||
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
|
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
|
||||||
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
|
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
|
||||||
};
|
};
|
||||||
(name == "Goal").then(|| x)
|
(name == "Goal").then_some(x)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
@ -714,7 +714,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
|||||||
let impl_data = find_matching_impl(impls, table, trait_ref)?;
|
let impl_data = find_matching_impl(impls, table, trait_ref)?;
|
||||||
impl_data.items.iter().find_map(|it| match it {
|
impl_data.items.iter().find_map(|it| match it {
|
||||||
AssocItemId::FunctionId(f) => {
|
AssocItemId::FunctionId(f) => {
|
||||||
(db.function_data(*f).name == *name).then(|| AssocItemId::FunctionId(*f))
|
(db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f))
|
||||||
}
|
}
|
||||||
AssocItemId::ConstId(c) => db
|
AssocItemId::ConstId(c) => db
|
||||||
.const_data(*c)
|
.const_data(*c)
|
||||||
|
@ -61,7 +61,7 @@ pub fn traits_in_scope_from_clauses<'a>(
|
|||||||
) -> impl Iterator<Item = TraitId> + 'a {
|
) -> impl Iterator<Item = TraitId> + 'a {
|
||||||
self.traits_from_clauses
|
self.traits_from_clauses
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
|
.filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1559,7 +1559,7 @@ pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
|
pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
|
||||||
self.has_self_param(db).then(|| SelfParam { func: self.id })
|
self.has_self_param(db).then_some(SelfParam { func: self.id })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
|
pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
|
||||||
|
@ -795,7 +795,7 @@ fn descend_into_macros_impl(
|
|||||||
// requeue the tokens we got from mapping our current token down
|
// requeue the tokens we got from mapping our current token down
|
||||||
stack.extend(mapped_tokens);
|
stack.extend(mapped_tokens);
|
||||||
// if the length changed we have found a mapping for the token
|
// if the length changed we have found a mapping for the token
|
||||||
(stack.len() != len).then(|| ())
|
(stack.len() != len).then_some(())
|
||||||
};
|
};
|
||||||
|
|
||||||
// Remap the next token in the queue into a macro call its in, if it is not being remapped
|
// Remap the next token in the queue into a macro call its in, if it is not being remapped
|
||||||
@ -1221,7 +1221,7 @@ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate>
|
|||||||
krate
|
krate
|
||||||
.dependencies(self.db)
|
.dependencies(self.db)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find_map(|dep| (dep.name == name).then(|| dep.krate))
|
.find_map(|dep| (dep.name == name).then_some(dep.krate))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
||||||
|
@ -987,7 +987,7 @@ fn resolve_hir_path_(
|
|||||||
db,
|
db,
|
||||||
def,
|
def,
|
||||||
res.in_type_ns()?,
|
res.in_type_ns()?,
|
||||||
|name, id| (name == unresolved.name).then(|| id),
|
|name, id| (name == unresolved.name).then_some(id),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.map(TypeAlias::from)
|
.map(TypeAlias::from)
|
||||||
|
@ -326,7 +326,7 @@ fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
|
|||||||
fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
|
fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
|
||||||
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
||||||
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
|
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
|
||||||
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
|
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def(
|
|||||||
// For now we only handle expansion for a tuple of enums. Here
|
// For now we only handle expansion for a tuple of enums. Here
|
||||||
// we map non-enum items to None and rely on `collect` to
|
// we map non-enum items to None and rely on `collect` to
|
||||||
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
|
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
|
||||||
_ => ty.is_bool().then(|| ExtendedEnum::Bool),
|
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -216,7 +216,7 @@ fn validate_method_call_expr(
|
|||||||
let krate = module.krate();
|
let krate = module.krate();
|
||||||
|
|
||||||
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
||||||
it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
|
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -588,7 +588,7 @@ fn tail_expr(&self) -> Option<ast::Expr> {
|
|||||||
FunctionBody::Expr(expr) => Some(expr.clone()),
|
FunctionBody::Expr(expr) => Some(expr.clone()),
|
||||||
FunctionBody::Span { parent, text_range } => {
|
FunctionBody::Span { parent, text_range } => {
|
||||||
let tail_expr = parent.tail_expr()?;
|
let tail_expr = parent.tail_expr()?;
|
||||||
text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
|
text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,7 +178,7 @@ fn extract_generic_params(
|
|||||||
.fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
|
.fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
|
||||||
};
|
};
|
||||||
|
|
||||||
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param));
|
let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param));
|
||||||
tagged_one.then(|| make::generic_param_list(generics))
|
tagged_one.then(|| make::generic_param_list(generics))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,7 +271,7 @@ fn generate_getter_from_info(
|
|||||||
}}",
|
}}",
|
||||||
vis,
|
vis,
|
||||||
record_field_info.fn_name,
|
record_field_info.fn_name,
|
||||||
info.mutable.then(|| "mut ").unwrap_or_default(),
|
info.mutable.then_some("mut ").unwrap_or_default(),
|
||||||
ty,
|
ty,
|
||||||
body,
|
body,
|
||||||
);
|
);
|
||||||
|
@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str
|
|||||||
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
|
let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
|
||||||
let input_expressions = input_expressions
|
let input_expressions = input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
||||||
.collect::<Option<Vec<ast::Expr>>>()?;
|
.collect::<Option<Vec<ast::Expr>>>()?;
|
||||||
|
|
||||||
|
@ -613,7 +613,7 @@ pub(crate) fn convert_reference_type(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
|
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
|
||||||
ty.is_copy(db).then(|| ReferenceConversionType::Copy)
|
ty.is_copy(db).then_some(ReferenceConversionType::Copy)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_as_ref_str(
|
fn handle_as_ref_str(
|
||||||
@ -624,7 +624,7 @@ fn handle_as_ref_str(
|
|||||||
let str_type = hir::BuiltinType::str().ty(db);
|
let str_type = hir::BuiltinType::str().ty(db);
|
||||||
|
|
||||||
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
|
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
|
||||||
.then(|| ReferenceConversionType::AsRefStr)
|
.then_some(ReferenceConversionType::AsRefStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_as_ref_slice(
|
fn handle_as_ref_slice(
|
||||||
@ -636,7 +636,7 @@ fn handle_as_ref_slice(
|
|||||||
let slice_type = hir::Type::new_slice(type_argument);
|
let slice_type = hir::Type::new_slice(type_argument);
|
||||||
|
|
||||||
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
|
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
|
||||||
.then(|| ReferenceConversionType::AsRefSlice)
|
.then_some(ReferenceConversionType::AsRefSlice)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_dereferenced(
|
fn handle_dereferenced(
|
||||||
@ -647,7 +647,7 @@ fn handle_dereferenced(
|
|||||||
let type_argument = ty.type_arguments().next()?;
|
let type_argument = ty.type_arguments().next()?;
|
||||||
|
|
||||||
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
|
ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
|
||||||
.then(|| ReferenceConversionType::Dereferenced)
|
.then_some(ReferenceConversionType::Dereferenced)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_option_as_ref(
|
fn handle_option_as_ref(
|
||||||
|
@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
|
|||||||
Some(
|
Some(
|
||||||
input_expressions
|
input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
||||||
.collect::<Vec<ast::Expr>>(),
|
.collect::<Vec<ast::Expr>>(),
|
||||||
)
|
)
|
||||||
|
@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
|
|||||||
.flat_map(|list| list.use_trees())
|
.flat_map(|list| list.use_trees())
|
||||||
// We use Option here to early return from this function(this is not the
|
// We use Option here to early return from this function(this is not the
|
||||||
// same as a `filter` op).
|
// same as a `filter` op).
|
||||||
.map(|tree| merge.is_tree_allowed(&tree).then(|| tree))
|
.map(|tree| merge.is_tree_allowed(&tree).then_some(tree))
|
||||||
.collect::<Option<_>>()?;
|
.collect::<Option<_>>()?;
|
||||||
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
|
use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
|
||||||
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
|
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
|
||||||
|
@ -608,7 +608,7 @@ fn found_self_module_name_ref(
|
|||||||
let reference = FileReference {
|
let reference = FileReference {
|
||||||
range,
|
range,
|
||||||
name: ast::NameLike::NameRef(name_ref.clone()),
|
name: ast::NameLike::NameRef(name_ref.clone()),
|
||||||
category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import),
|
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
|
||||||
};
|
};
|
||||||
sink(file_id, reference)
|
sink(file_id, reference)
|
||||||
}
|
}
|
||||||
@ -787,7 +787,7 @@ impl ReferenceCategory {
|
|||||||
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
|
fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
|
||||||
// Only Locals and Fields have accesses for now.
|
// Only Locals and Fields have accesses for now.
|
||||||
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
|
if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
|
||||||
return is_name_ref_in_import(r).then(|| ReferenceCategory::Import);
|
return is_name_ref_in_import(r).then_some(ReferenceCategory::Import);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mode = r.syntax().ancestors().find_map(|node| {
|
let mode = r.syntax().ancestors().find_map(|node| {
|
||||||
|
@ -449,7 +449,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
|
|||||||
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
|
let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
|
||||||
let paths = input_expressions
|
let paths = input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| {
|
.filter_map(|mut tokens| {
|
||||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
||||||
ast::Expr::PathExpr(it) => it.path(),
|
ast::Expr::PathExpr(it) => it.path(),
|
||||||
|
@ -273,7 +273,7 @@ pub(crate) fn get_definition_with_descend_at<T>(
|
|||||||
let (in_expansion_range, link, ns) =
|
let (in_expansion_range, link, ns) =
|
||||||
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
|
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
|
||||||
let mapped = doc_mapping.map(range)?;
|
let mapped = doc_mapping.map(range)?;
|
||||||
(mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
|
(mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns))
|
||||||
})?;
|
})?;
|
||||||
// get the relative range to the doc/attribute in the expansion
|
// get the relative range to the doc/attribute in the expansion
|
||||||
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
|
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
|
||||||
|
@ -205,7 +205,7 @@ fn non_word_char(c: char) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let start_idx = before.rfind(non_word_char)? as u32;
|
let start_idx = before.rfind(non_word_char)? as u32;
|
||||||
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
|
let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
|
||||||
|
|
||||||
let from: TextSize = (start_idx + 1).into();
|
let from: TextSize = (start_idx + 1).into();
|
||||||
let to: TextSize = (cursor_position + end_idx).into();
|
let to: TextSize = (cursor_position + end_idx).into();
|
||||||
|
@ -110,7 +110,7 @@ fn impls_for_trait_item(
|
|||||||
.filter_map(|imp| {
|
.filter_map(|imp| {
|
||||||
let item = imp.items(sema.db).iter().find_map(|itm| {
|
let item = imp.items(sema.db).iter().find_map(|itm| {
|
||||||
let itm_name = itm.name(sema.db)?;
|
let itm_name = itm.name(sema.db)?;
|
||||||
(itm_name == fun_name).then(|| *itm)
|
(itm_name == fun_name).then_some(*itm)
|
||||||
})?;
|
})?;
|
||||||
item.try_to_nav(sema.db)
|
item.try_to_nav(sema.db)
|
||||||
})
|
})
|
||||||
|
@ -110,7 +110,7 @@ fn highlight_references(
|
|||||||
.and_then(|decl| decl.focus_range)
|
.and_then(|decl| decl.focus_range)
|
||||||
.map(|range| {
|
.map(|range| {
|
||||||
let category =
|
let category =
|
||||||
references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
|
references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write);
|
||||||
HighlightedRange { range, category }
|
HighlightedRange { range, category }
|
||||||
});
|
});
|
||||||
if let Some(hl_range) = hl_range {
|
if let Some(hl_range) = hl_range {
|
||||||
@ -365,7 +365,7 @@ fn check_with_config(ra_fixture: &str, config: HighlightRelatedConfig) {
|
|||||||
|
|
||||||
let mut expected = annotations
|
let mut expected = annotations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
|
.map(|(r, access)| (r.range, (!access.is_empty()).then_some(access)))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let mut actual = hls
|
let mut actual = hls
|
||||||
|
@ -167,7 +167,7 @@ fn is_named_constructor(
|
|||||||
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
|
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
(ctor_name == ty_name).then(|| ())
|
(ctor_name == ty_name).then_some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
|
fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
|
||||||
|
@ -111,7 +111,7 @@ fn punctuation(
|
|||||||
let is_raw_ptr = (|| {
|
let is_raw_ptr = (|| {
|
||||||
let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
|
let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
|
||||||
let expr = prefix_expr.expr()?;
|
let expr = prefix_expr.expr()?;
|
||||||
sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ())
|
sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(())
|
||||||
})();
|
})();
|
||||||
if let Some(()) = is_raw_ptr {
|
if let Some(()) = is_raw_ptr {
|
||||||
HlTag::Operator(HlOperator::Other) | HlMod::Unsafe
|
HlTag::Operator(HlOperator::Other) | HlMod::Unsafe
|
||||||
|
@ -140,7 +140,7 @@ fn max_id(subtree: &tt::Subtree) -> Option<u32> {
|
|||||||
| tt::Leaf::Punct(tt::Punct { id, .. })
|
| tt::Leaf::Punct(tt::Punct { id, .. })
|
||||||
| tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
|
| tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
|
||||||
|
|
||||||
(id != tt::TokenId::unspecified()).then(|| id.0)
|
(id != tt::TokenId::unspecified()).then_some(id.0)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
subtree.token_trees.iter().filter_map(filter).max()
|
subtree.token_trees.iter().filter_map(filter).max()
|
||||||
|
@ -273,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
|
|||||||
_ => return Err(ParseError::InvalidRepeat),
|
_ => return Err(ParseError::InvalidRepeat),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
return Ok((has_sep.then(|| separator), repeat_kind));
|
return Ok((has_sep.then_some(separator), repeat_kind));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -228,7 +228,7 @@ fn completion_item(
|
|||||||
max_relevance: u32,
|
max_relevance: u32,
|
||||||
item: CompletionItem,
|
item: CompletionItem,
|
||||||
) {
|
) {
|
||||||
let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
|
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
|
||||||
let mut additional_text_edits = Vec::new();
|
let mut additional_text_edits = Vec::new();
|
||||||
|
|
||||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||||
@ -258,7 +258,7 @@ fn completion_item(
|
|||||||
text_edit.unwrap()
|
text_edit.unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
|
let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET);
|
||||||
let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
|
let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
|
||||||
let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
|
let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
|
||||||
Some(command::trigger_parameter_hints())
|
Some(command::trigger_parameter_hints())
|
||||||
|
@ -3921,7 +3921,7 @@ pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
|
|||||||
impl AstNode for AnyHasArgList {
|
impl AstNode for AnyHasArgList {
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) }
|
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) }
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasArgList { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4006,7 +4006,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasAttrs { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4043,7 +4043,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasDocComments { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4058,7 +4058,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION)
|
matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4071,7 +4071,7 @@ pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
|
|||||||
impl AstNode for AnyHasLoopBody {
|
impl AstNode for AnyHasLoopBody {
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) }
|
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) }
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasLoopBody { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4084,7 +4084,7 @@ pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
|
|||||||
impl AstNode for AnyHasModuleItem {
|
impl AstNode for AnyHasModuleItem {
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) }
|
fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) }
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasModuleItem { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4119,7 +4119,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasName { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4137,7 +4137,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasTypeBounds { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
@ -4171,7 +4171,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
|
Self::can_cast(syntax.kind()).then_some(AnyHasVisibility { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
|
@ -253,7 +253,7 @@ fn can_cast(kind: SyntaxKind) -> bool {
|
|||||||
matches!(kind, #(#kinds)|*)
|
matches!(kind, #(#kinds)|*)
|
||||||
}
|
}
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
Self::can_cast(syntax.kind()).then(|| #name { syntax })
|
Self::can_cast(syntax.kind()).then_some(#name { syntax })
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode {
|
fn syntax(&self) -> &SyntaxNode {
|
||||||
&self.syntax
|
&self.syntax
|
||||||
|
Loading…
Reference in New Issue
Block a user