more clippy fixes
This commit is contained in:
parent
3c468ab2fc
commit
35e5daacc3
@ -49,13 +49,13 @@ pub(crate) fn goto_definition(
|
||||
let parent = token.parent()?;
|
||||
if let Some(tt) = ast::TokenTree::cast(parent) {
|
||||
if let x @ Some(_) =
|
||||
try_lookup_include_path(&sema, tt, token.clone(), position.file_id)
|
||||
try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
||||
{
|
||||
return x;
|
||||
}
|
||||
}
|
||||
Some(
|
||||
Definition::from_token(&sema, &token)
|
||||
Definition::from_token(sema, &token)
|
||||
.into_iter()
|
||||
.flat_map(|def| {
|
||||
try_find_trait_item_definition(sema.db, &def)
|
||||
@ -145,7 +145,7 @@ mod tests {
|
||||
fn check(ra_fixture: &str) {
|
||||
let (analysis, position, expected) = fixture::annotations(ra_fixture);
|
||||
let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
|
||||
if navs.len() == 0 {
|
||||
if navs.is_empty() {
|
||||
panic!("unresolved reference")
|
||||
}
|
||||
|
||||
|
@ -353,7 +353,7 @@ fn check(ra_fixture: &str) {
|
||||
fn check_with_config(ra_fixture: &str, config: HighlightRelatedConfig) {
|
||||
let (analysis, pos, annotations) = fixture::annotations(ra_fixture);
|
||||
|
||||
let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or(Vec::default());
|
||||
let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default();
|
||||
|
||||
let mut expected = annotations
|
||||
.into_iter()
|
||||
|
@ -97,7 +97,7 @@ pub(crate) fn hover(
|
||||
let file = sema.parse(file_id).syntax().clone();
|
||||
|
||||
if !range.is_empty() {
|
||||
return hover_ranged(&file, range, &sema, config);
|
||||
return hover_ranged(&file, range, sema, config);
|
||||
}
|
||||
let offset = range.start();
|
||||
|
||||
@ -121,7 +121,7 @@ pub(crate) fn hover(
|
||||
// FIXME: Definition should include known lints and the like instead of having this special case here
|
||||
if let Some(res) = descended.iter().find_map(|token| {
|
||||
let attr = token.ancestors().find_map(ast::Attr::cast)?;
|
||||
render::try_for_lint(&attr, &token)
|
||||
render::try_for_lint(&attr, token)
|
||||
}) {
|
||||
return Some(RangeInfo::new(original_token.text_range(), res));
|
||||
}
|
||||
@ -164,7 +164,7 @@ pub(crate) fn hover_for_definition(
|
||||
) -> Option<HoverResult> {
|
||||
let famous_defs = match &definition {
|
||||
Definition::ModuleDef(hir::ModuleDef::BuiltinType(_)) => {
|
||||
Some(FamousDefs(&sema, sema.scope(&node).krate()))
|
||||
Some(FamousDefs(sema, sema.scope(node).krate()))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
@ -179,7 +179,7 @@ pub(crate) fn hover_for_definition(
|
||||
res.actions.push(action);
|
||||
}
|
||||
|
||||
if let Some(action) = runnable_action(&sema, definition, file_id) {
|
||||
if let Some(action) = runnable_action(sema, definition, file_id) {
|
||||
res.actions.push(action);
|
||||
}
|
||||
|
||||
@ -246,7 +246,7 @@ fn hover_type_fallback(
|
||||
}
|
||||
};
|
||||
|
||||
let res = render::type_info(&sema, config, &expr_or_pat)?;
|
||||
let res = render::type_info(sema, config, &expr_or_pat)?;
|
||||
let range = sema.original_range(&node).range;
|
||||
Some(RangeInfo::new(range, res))
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ fn get_bind_pat_hints(
|
||||
let desc_pat = descended.as_ref().unwrap_or(pat);
|
||||
let ty = sema.type_of_pat(&desc_pat.clone().into())?.original;
|
||||
|
||||
if should_not_display_type_hint(sema, &pat, &ty) {
|
||||
if should_not_display_type_hint(sema, pat, &ty) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -269,7 +269,7 @@ fn is_named_constructor(
|
||||
callable_kind
|
||||
{
|
||||
if let Some(ctor) = path.segment() {
|
||||
return (&ctor.to_string() == ty_name).then(|| ());
|
||||
return (ctor.to_string() == ty_name).then(|| ());
|
||||
}
|
||||
}
|
||||
|
||||
@ -285,7 +285,7 @@ fn is_named_constructor(
|
||||
ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
|
||||
_ => return None,
|
||||
};
|
||||
(&ctor_name == ty_name).then(|| ())
|
||||
(ctor_name == ty_name).then(|| ())
|
||||
}
|
||||
|
||||
/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
|
||||
@ -584,7 +584,7 @@ fn check_chains(ra_fixture: &str) {
|
||||
|
||||
#[track_caller]
|
||||
fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
|
||||
let (analysis, file_id) = fixture::file(&ra_fixture);
|
||||
let (analysis, file_id) = fixture::file(ra_fixture);
|
||||
let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
|
||||
let inlay_hints = analysis.inlay_hints(&config, file_id).unwrap();
|
||||
let actual =
|
||||
@ -594,7 +594,7 @@ fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
|
||||
|
||||
#[track_caller]
|
||||
fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
|
||||
let (analysis, file_id) = fixture::file(&ra_fixture);
|
||||
let (analysis, file_id) = fixture::file(ra_fixture);
|
||||
let inlay_hints = analysis.inlay_hints(&config, file_id).unwrap();
|
||||
expect.assert_debug_eq(&inlay_hints)
|
||||
}
|
||||
|
@ -311,7 +311,7 @@ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<Expande
|
||||
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
|
||||
self.with_db(|db| {
|
||||
let parse = db.parse(frange.file_id);
|
||||
join_lines::join_lines(&config, &parse.tree(), frange.range)
|
||||
join_lines::join_lines(config, &parse.tree(), frange.range)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -120,13 +120,10 @@ fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>(
|
||||
range: TextRange,
|
||||
direction: Direction,
|
||||
) -> Option<TextEdit> {
|
||||
let list_lookup = list
|
||||
.tuple_windows()
|
||||
.filter(|(l, r)| match direction {
|
||||
Direction::Up => r.syntax().text_range().contains_range(range),
|
||||
Direction::Down => l.syntax().text_range().contains_range(range),
|
||||
})
|
||||
.next();
|
||||
let list_lookup = list.tuple_windows().find(|(l, r)| match direction {
|
||||
Direction::Up => r.syntax().text_range().contains_range(range),
|
||||
Direction::Down => l.syntax().text_range().contains_range(range),
|
||||
});
|
||||
|
||||
if let Some((l, r)) = list_lookup {
|
||||
Some(replace_nodes(range, l.syntax(), r.syntax()))
|
||||
|
@ -107,14 +107,14 @@ fn find_definition(
|
||||
{
|
||||
bail!("Renaming aliases is currently unsupported")
|
||||
}
|
||||
ast::NameLike::Name(name) => NameClass::classify(sema, &name).map(|class| match class {
|
||||
ast::NameLike::Name(name) => NameClass::classify(sema, name).map(|class| match class {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
|
||||
Definition::Local(local_def)
|
||||
}
|
||||
}),
|
||||
ast::NameLike::NameRef(name_ref) => {
|
||||
if let Some(def) = NameRefClass::classify(sema, &name_ref).map(|class| match class {
|
||||
if let Some(def) = NameRefClass::classify(sema, name_ref).map(|class| match class {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
@ -129,13 +129,13 @@ fn find_definition(
|
||||
None
|
||||
}
|
||||
}
|
||||
ast::NameLike::Lifetime(lifetime) => NameRefClass::classify_lifetime(sema, &lifetime)
|
||||
ast::NameLike::Lifetime(lifetime) => NameRefClass::classify_lifetime(sema, lifetime)
|
||||
.and_then(|class| match class {
|
||||
NameRefClass::Definition(def) => Some(def),
|
||||
_ => None,
|
||||
})
|
||||
.or_else(|| {
|
||||
NameClass::classify_lifetime(sema, &lifetime).and_then(|it| match it {
|
||||
NameClass::classify_lifetime(sema, lifetime).and_then(|it| match it {
|
||||
NameClass::Definition(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
@ -305,7 +305,6 @@ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
.skip("error:".len())
|
||||
.collect::<String>();
|
||||
assert_eq!(error_message.trim(), err.to_string());
|
||||
return;
|
||||
} else {
|
||||
panic!("Rename to '{}' failed unexpectedly: {}", new_name, err)
|
||||
}
|
||||
|
@ -120,9 +120,11 @@ fn add_file(&mut self, file_id: FileId) {
|
||||
});
|
||||
let hover_config =
|
||||
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
|
||||
let tokens = tokens.filter(|token| match token.kind() {
|
||||
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true,
|
||||
_ => false,
|
||||
let tokens = tokens.filter(|token| {
|
||||
matches!(
|
||||
token.kind(),
|
||||
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate]
|
||||
)
|
||||
});
|
||||
let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
|
||||
for token in tokens {
|
||||
@ -158,7 +160,7 @@ fn add_file(&mut self, file_id: FileId) {
|
||||
self.files.push(result);
|
||||
}
|
||||
|
||||
pub fn compute<'a>(analysis: &'a Analysis) -> StaticIndex<'a> {
|
||||
pub fn compute(analysis: &Analysis) -> StaticIndex {
|
||||
let db = &*analysis.db;
|
||||
let work = all_modules(db).into_iter().filter(|module| {
|
||||
let file_id = module.definition_source(db).file_id.original_file(db);
|
||||
@ -189,7 +191,7 @@ pub fn compute<'a>(analysis: &'a Analysis) -> StaticIndex<'a> {
|
||||
|
||||
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
||||
for token in sema.descend_into_macros(token) {
|
||||
let def = Definition::from_token(&sema, &token);
|
||||
let def = Definition::from_token(sema, &token);
|
||||
if let [x] = def.as_slice() {
|
||||
return Some(*x);
|
||||
} else {
|
||||
|
@ -342,7 +342,7 @@ fn traverse(
|
||||
element_to_highlight.clone(),
|
||||
) {
|
||||
if inside_attribute {
|
||||
highlight = highlight | HlMod::Attribute;
|
||||
highlight |= HlMod::Attribute
|
||||
}
|
||||
|
||||
hl.add(HlRange { range, highlight, binding_hash });
|
||||
|
@ -538,7 +538,7 @@ fn highlight_def(
|
||||
Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
|
||||
};
|
||||
|
||||
let famous_defs = FamousDefs(&sema, krate);
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
let def_crate = def.module(db).map(hir::Module::krate).or_else(|| match def {
|
||||
Definition::ModuleDef(hir::ModuleDef::Module(module)) => Some(module.krate()),
|
||||
_ => None,
|
||||
@ -591,7 +591,7 @@ fn highlight_method_call(
|
||||
h |= HlMod::Trait;
|
||||
}
|
||||
|
||||
let famous_defs = FamousDefs(&sema, krate);
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
let def_crate = func.module(sema.db).krate();
|
||||
let is_from_other_crate = Some(def_crate) != krate;
|
||||
let is_from_builtin_crate = famous_defs.builtin_crates().any(|it| def_crate == it);
|
||||
|
@ -26,7 +26,7 @@ pub(super) fn add(&mut self, hl_range: HlRange) {
|
||||
self.root.add(hl_range);
|
||||
}
|
||||
|
||||
pub(super) fn to_vec(self) -> Vec<HlRange> {
|
||||
pub(super) fn to_vec(&self) -> Vec<HlRange> {
|
||||
let mut res = Vec::new();
|
||||
self.root.flatten(&mut res);
|
||||
res
|
||||
|
@ -227,9 +227,7 @@ fn on_arrow_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
|
||||
if file_text.char_at(after_arrow) != Some('{') {
|
||||
return None;
|
||||
}
|
||||
if find_node_at_offset::<ast::RetType>(file.syntax(), offset).is_none() {
|
||||
return None;
|
||||
}
|
||||
find_node_at_offset::<ast::RetType>(file.syntax(), offset)?;
|
||||
|
||||
Some(TextEdit::insert(after_arrow, " ".to_string()))
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ pub(crate) struct Logger {
|
||||
|
||||
impl Logger {
|
||||
pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger {
|
||||
let filter = filter.map_or(EnvFilter::default(), |dirs| EnvFilter::new(dirs));
|
||||
let filter = filter.map_or(EnvFilter::default(), EnvFilter::new);
|
||||
|
||||
Logger { filter, file }
|
||||
}
|
||||
|
@ -673,7 +673,7 @@ pub fn files(&self) -> FilesConfig {
|
||||
FilesConfig {
|
||||
watcher: match self.data.files_watcher.as_str() {
|
||||
"notify" => FilesWatcher::Notify,
|
||||
"client" | _ => FilesWatcher::Client,
|
||||
_ => FilesWatcher::Client,
|
||||
},
|
||||
exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
|
||||
}
|
||||
|
@ -367,9 +367,8 @@ fn flatten_document_symbol(
|
||||
let mut tags = Vec::new();
|
||||
|
||||
#[allow(deprecated)]
|
||||
match symbol.deprecated {
|
||||
Some(true) => tags.push(SymbolTag::Deprecated),
|
||||
_ => {}
|
||||
if let Some(true) = symbol.deprecated {
|
||||
tags.push(SymbolTag::Deprecated)
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
@ -1094,7 +1093,7 @@ pub(crate) fn handle_code_action_resolve(
|
||||
let _p = profile::span("handle_code_action_resolve");
|
||||
let params = match code_action.data.take() {
|
||||
Some(it) => it,
|
||||
None => return Err(invalid_params_error(format!("code action without data")).into()),
|
||||
None => return Err(invalid_params_error("code action without data".to_string()).into()),
|
||||
};
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?;
|
||||
@ -1153,7 +1152,7 @@ pub(crate) fn handle_code_action_resolve(
|
||||
fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
|
||||
let id_parts = action_id.split(':').collect_vec();
|
||||
match id_parts.as_slice() {
|
||||
&[assist_id_string, assist_kind_string, index_string] => {
|
||||
[assist_id_string, assist_kind_string, index_string] => {
|
||||
let assist_kind: AssistKind = assist_kind_string.parse()?;
|
||||
let index: usize = match index_string.parse() {
|
||||
Ok(index) => index,
|
||||
|
@ -77,7 +77,7 @@ pub(crate) fn report_progress(
|
||||
return;
|
||||
}
|
||||
let percentage = fraction.map(|f| {
|
||||
assert!(0.0 <= f && f <= 1.0);
|
||||
assert!((0.0..=1.0).contains(&f));
|
||||
(f * 100.0) as u32
|
||||
});
|
||||
let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title));
|
||||
|
@ -166,7 +166,7 @@ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
|
||||
self.handle_event(event)?
|
||||
}
|
||||
|
||||
Err("client exited without proper shutdown sequence")?
|
||||
return Err("client exited without proper shutdown sequence".into());
|
||||
}
|
||||
|
||||
fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
|
||||
@ -769,7 +769,6 @@ fn update_diagnostics(&mut self) {
|
||||
if !is_cancelled(&*err) {
|
||||
tracing::error!("failed to compute diagnostics: {:?}", err);
|
||||
}
|
||||
()
|
||||
})
|
||||
.ok()
|
||||
.map(|diags| (file_id, diags))
|
||||
|
@ -211,7 +211,7 @@ fn eq_ignore_build_data<'a>(
|
||||
|
||||
if same_workspaces {
|
||||
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
|
||||
if Arc::ptr_eq(&workspaces, &self.workspaces) {
|
||||
if Arc::ptr_eq(workspaces, &self.workspaces) {
|
||||
let workspaces = workspaces
|
||||
.iter()
|
||||
.cloned()
|
||||
@ -417,7 +417,7 @@ fn reload_flycheck(&mut self) {
|
||||
id,
|
||||
Box::new(move |msg| sender.send(msg).unwrap()),
|
||||
config.clone(),
|
||||
root.to_path_buf().into(),
|
||||
root.to_path_buf(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
@ -91,7 +91,7 @@ fn files_are_tidy() {
|
||||
tidy_marks.finish();
|
||||
}
|
||||
|
||||
fn check_cargo_toml(path: &Path, text: String) -> () {
|
||||
fn check_cargo_toml(path: &Path, text: String) {
|
||||
let mut section = None;
|
||||
for (line_no, text) in text.lines().enumerate() {
|
||||
let text = text.trim();
|
||||
|
Loading…
Reference in New Issue
Block a user