8063: couple clippy::complexity fixes r=matklad a=matthiaskrgr

avoid redundant `.into()` calls to convert T into identical T (`let x: String = String::from("hello").into();`)
use `if let Some(x)` instead of `.is_some()` + `.unwrap()`
don't clone Copy types
remove redundant wrapped ?s:  `Some(Some(3)?)` can just be `Some(3)`
use `.map(|x| y)` instead of `and_then(|x| Some(y)` on `Option`s

Co-authored-by: Matthias Krüger <matthias.krueger@famsik.de>
This commit is contained in:
bors[bot] 2021-03-17 08:12:34 +00:00 committed by GitHub
commit f7fbea509f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 97 additions and 122 deletions

View File

@ -197,7 +197,7 @@ impl ChangeFixture {
change.change_file(file_id, Some(Arc::new(text)));
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path.into());
file_set.insert(file_id, path);
files.push(file_id);
file_id.0 += 1;
}

View File

@ -124,5 +124,5 @@ fn resolve_doc_path(
Some(Namespace::Macros) => return None,
None => resolved.iter_items().find_map(|it| it.as_module_def_id())?,
};
Some(def.into())
Some(def)
}

View File

@ -1335,7 +1335,7 @@ impl Local {
// FIXME: why is this an option? It shouldn't be?
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
match &body[self.pat_id] {
Pat::Bind { name, .. } => Some(name.clone()),
_ => None,
@ -1347,7 +1347,7 @@ impl Local {
}
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
}
@ -1360,7 +1360,7 @@ impl Local {
}
pub fn ty(self, db: &dyn HirDatabase) -> Type {
let def = DefWithBodyId::from(self.parent);
let def = self.parent;
let infer = db.infer(def);
let ty = infer[self.pat_id].clone();
let krate = def.module(db.upcast()).krate();
@ -1368,7 +1368,7 @@ impl Local {
}
pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
let (_body, source_map) = db.body_with_source_map(self.parent.into());
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
src.map(|ast| {
@ -1393,12 +1393,12 @@ impl Label {
}
pub fn name(self, db: &dyn HirDatabase) -> Name {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
body[self.label_id].name.clone()
}
pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
let (_body, source_map) = db.body_with_source_map(self.parent.into());
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.label_syntax(self.label_id);
let root = src.file_syntax(db.upcast());
src.map(|ast| ast.to_node(&root))

View File

@ -835,7 +835,7 @@ impl<'a> SemanticsScope<'a> {
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
resolver::ScopeDef::Local(pat_id) => {
let parent = resolver.body_owner().unwrap().into();
let parent = resolver.body_owner().unwrap();
ScopeDef::Local(Local { parent, pat_id })
}
};

View File

@ -484,7 +484,7 @@ fn resolve_hir_path_(
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: body_owner?.into(), pat_id };
let var = Local { parent: body_owner?, pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),

View File

@ -325,7 +325,7 @@ impl Attrs {
if docs.is_empty() {
None
} else {
Some(Documentation(docs.into()))
Some(Documentation(docs))
}
}
}

View File

@ -23,7 +23,7 @@ pub struct Hygiene {
impl Hygiene {
pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id.clone())) }
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
}
pub fn new_unhygienic() -> Hygiene {
@ -129,10 +129,7 @@ impl HygieneInfo {
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
mbe::Origin::Def => (
&self.macro_def.1,
self.def_start
.as_ref()
.expect("`Origin::Def` used with non-`macro_rules!` macro")
.clone(),
*self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"),
),
};

View File

@ -475,7 +475,7 @@ fn original_range_opt(
let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
== skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
Some(node.value.descendants().find_map(|it| {
node.value.descendants().find_map(|it| {
let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
let first = ascend_call_token(db, &expansion, node.with_value(first))?;
@ -487,7 +487,7 @@ fn original_range_opt(
}
Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
})?)
})
}
fn ascend_call_token(

View File

@ -203,7 +203,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: fn_src.file_id,
ident_type: IdentType::Function,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -261,7 +261,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: fn_src.file_id,
ident_type: IdentType::Argument,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: param_to_rename.expected_case,
ident_text: param_to_rename.current_name.to_string(),
suggested_text: param_to_rename.suggested_text,
@ -313,7 +313,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: source_ptr.file_id,
ident_type: IdentType::Variable,
ident: AstPtr::new(&name_ast).into(),
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -403,7 +403,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: struct_src.file_id,
ident_type: IdentType::Structure,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -448,7 +448,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: struct_src.file_id,
ident_type: IdentType::Field,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: field_to_rename.expected_case,
ident_text: field_to_rename.current_name.to_string(),
suggested_text: field_to_rename.suggested_text,
@ -527,7 +527,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: enum_src.file_id,
ident_type: IdentType::Enum,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -572,7 +572,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: enum_src.file_id,
ident_type: IdentType::Variant,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: variant_to_rename.expected_case,
ident_text: variant_to_rename.current_name.to_string(),
suggested_text: variant_to_rename.suggested_text,
@ -617,7 +617,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: const_src.file_id,
ident_type: IdentType::Constant,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -665,7 +665,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: static_src.file_id,
ident_type: IdentType::StaticVariable,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,

View File

@ -44,7 +44,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
self.check_for_filter_map_next(db);
let body = db.body(self.owner.into());
let body = db.body(self.owner);
for (id, expr) in body.exprs.iter() {
if let Some((variant_def, missed_fields, true)) =
@ -98,7 +98,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(id) {
let root = source_ptr.file_syntax(db.upcast());
@ -128,7 +128,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.pat_syntax(id) {
if let Some(expr) = source_ptr.value.as_ref().left() {
@ -175,7 +175,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
};
// Search function body for instances of .filter_map(..).next()
let body = db.body(self.owner.into());
let body = db.body(self.owner);
let mut prev = None;
for (id, expr) in body.exprs.iter() {
if let Expr::MethodCall { receiver, .. } = expr {
@ -192,7 +192,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
if function_id == *next_function_id {
if let Some(filter_map_id) = prev {
if *receiver == filter_map_id {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(next_source_ptr) = source_map.expr_syntax(id) {
self.sink.push(ReplaceFilterMapNextWithFindMap {
file: next_source_ptr.file_id,
@ -262,7 +262,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let mut arg_count = args.len();
if arg_count != param_count {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
if is_method_call {
param_count -= 1;
@ -287,7 +287,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
infer: Arc<InferenceResult>,
) {
let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
db.body_with_source_map(self.owner.into());
db.body_with_source_map(self.owner);
let match_expr_ty = if infer.type_of_expr[match_expr].is_unknown() {
return;
@ -393,7 +393,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
};
if params.len() > 0 && params[0] == mismatch.actual {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(id) {
self.sink.push(MissingOkOrSomeInTailExpr {
@ -425,7 +425,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
return;
}
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(possible_tail_id) {
self.sink

View File

@ -29,7 +29,7 @@ impl<'a, 'b> UnsafeValidator<'a, 'b> {
}
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
let def = self.owner.into();
let def = self.owner;
let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
let is_unsafe = match self.owner {
DefWithBodyId::FunctionId(it) => db.function_data(it).qualifier.is_unsafe,

View File

@ -52,7 +52,7 @@ impl ToChalk for Ty {
TyKind::Tuple(cardinality, substs) => {
let substitution = substs.to_chalk(db);
chalk_ir::TyKind::Tuple(cardinality.into(), substitution).intern(&Interner)
chalk_ir::TyKind::Tuple(cardinality, substitution).intern(&Interner)
}
TyKind::Raw(mutability, ty) => {
let ty = ty.to_chalk(db);

View File

@ -180,7 +180,7 @@ fn missing_record_expr_field_fix(
let def_id = sema.resolve_variant(record_lit)?;
let module;
let def_file_id;
let record_fields = match VariantDef::from(def_id) {
let record_fields = match def_id {
VariantDef::Struct(s) => {
module = s.module(sema.db);
let source = s.source(sema.db)?;

View File

@ -93,7 +93,7 @@ pub(crate) fn remove_links(markdown: &str) -> String {
let mut cb = |_: BrokenLink| {
let empty = InlineStr::try_from("").unwrap();
Some((CowStr::Inlined(empty.clone()), CowStr::Inlined(empty)))
Some((CowStr::Inlined(empty), CowStr::Inlined(empty)))
};
let doc = Parser::new_with_broken_link_callback(markdown, opts, Some(&mut cb));
let doc = doc.filter_map(move |evt| match evt {
@ -147,7 +147,7 @@ fn get_doc_link(db: &RootDatabase, definition: Definition) -> Option<String> {
_ => return None,
};
let ns = ItemInNs::from(target_def.clone());
let ns = ItemInNs::from(target_def);
let module = definition.module(db)?;
let krate = module.krate();

View File

@ -102,7 +102,7 @@ fn extract_positioned_link_from_comment(
None => comment_range.end(),
}
})?;
Some((def_link.to_string(), ns.clone()))
Some((def_link.to_string(), *ns))
}
fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {

View File

@ -56,7 +56,7 @@ pub(super) fn ra_fixture(
for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) {
hl_range.range = range;
hl.add(hl_range.clone());
hl.add(hl_range);
}
}
}

View File

@ -73,8 +73,8 @@ fn find_parent_and_path(
) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
return star.ancestors().find_map(|n| {
find_use_tree_list(n.clone())
.and_then(|(u, p)| Some((Either::Right(u), p)))
.or_else(|| find_use_tree(n).and_then(|(u, p)| Some((Either::Left(u), p))))
.map(|(u, p)| (Either::Right(u), p))
.or_else(|| find_use_tree(n).map(|(u, p)| (Either::Left(u), p)))
});
fn find_use_tree_list(n: SyntaxNode) -> Option<(ast::UseTreeList, ast::Path)> {

View File

@ -145,11 +145,8 @@ fn insert_import(
variant_hir_name: &Name,
) -> Option<()> {
let db = ctx.db();
let mod_path = module.find_use_path_prefixed(
db,
enum_module_def.clone(),
ctx.config.insert_use.prefix_kind,
);
let mod_path =
module.find_use_path_prefixed(db, *enum_module_def, ctx.config.insert_use.prefix_kind);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
mod_path.push_segment(variant_hir_name.clone());

View File

@ -56,7 +56,7 @@ impl Builder {
impl Completions {
pub(crate) fn add(&mut self, item: CompletionItem) {
self.buf.push(item.into())
self.buf.push(item)
}
pub(crate) fn add_all<I>(&mut self, items: I)

View File

@ -26,11 +26,11 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
let add_resolution = match &res {
hir::ScopeDef::ModuleDef(def) => match def {
hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => {
acc.add_struct_pat(ctx, strukt.clone(), Some(name.clone()));
acc.add_struct_pat(ctx, *strukt, Some(name.clone()));
true
}
hir::ModuleDef::Variant(variant) if !ctx.is_irrefutable_pat_binding => {
acc.add_variant_pat(ctx, variant.clone(), Some(name.clone()));
acc.add_variant_pat(ctx, *variant, Some(name.clone()));
true
}
hir::ModuleDef::Adt(hir::Adt::Enum(..))

View File

@ -89,7 +89,7 @@ enum State {
impl FormatStrParser {
pub(crate) fn new(input: String) -> Self {
Self {
input: input.into(),
input: input,
output: String::new(),
extracted_expressions: Vec::new(),
state: State::NotExpr,

View File

@ -81,7 +81,7 @@ impl<'a> RenderContext<'a> {
}
fn snippet_cap(&self) -> Option<SnippetCap> {
self.completion.config.snippet_cap.clone()
self.completion.config.snippet_cap
}
fn db(&self) -> &'a RootDatabase {

View File

@ -181,7 +181,7 @@ impl NameClass {
},
ast::SelfParam(it) => {
let def = sema.to_def(&it)?;
Some(NameClass::Definition(Definition::Local(def.into())))
Some(NameClass::Definition(Definition::Local(def)))
},
ast::RecordField(it) => {
let field: hir::Field = sema.to_def(&it)?;

View File

@ -80,7 +80,7 @@ impl ImportScope {
})
.last()
.map(|last_inner_element| {
(InsertPosition::After(last_inner_element.into()), AddBlankLine::BeforeTwice)
(InsertPosition::After(last_inner_element), AddBlankLine::BeforeTwice)
})
.unwrap_or_else(|| self.first_insert_pos())
}

View File

@ -161,15 +161,9 @@ impl Definition {
if let Definition::Local(var) = self {
let range = match var.parent(db) {
DefWithBody::Function(f) => {
f.source(db).and_then(|src| Some(src.value.syntax().text_range()))
}
DefWithBody::Const(c) => {
c.source(db).and_then(|src| Some(src.value.syntax().text_range()))
}
DefWithBody::Static(s) => {
s.source(db).and_then(|src| Some(src.value.syntax().text_range()))
}
DefWithBody::Function(f) => f.source(db).map(|src| src.value.syntax().text_range()),
DefWithBody::Const(c) => c.source(db).map(|src| src.value.syntax().text_range()),
DefWithBody::Static(s) => s.source(db).map(|src| src.value.syntax().text_range()),
};
let mut res = FxHashMap::default();
res.insert(file_id, range);
@ -179,33 +173,29 @@ impl Definition {
if let Definition::GenericParam(hir::GenericParam::LifetimeParam(param)) = self {
let range = match param.parent(db) {
hir::GenericDef::Function(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::GenericDef::Adt(it) => match it {
hir::Adt::Struct(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
}
hir::Adt::Union(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
}
hir::Adt::Enum(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::Adt::Union(it) => it.source(db).map(|src| src.value.syntax().text_range()),
hir::Adt::Enum(it) => it.source(db).map(|src| src.value.syntax().text_range()),
},
hir::GenericDef::Trait(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::GenericDef::TypeAlias(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::GenericDef::Impl(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::GenericDef::Variant(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
hir::GenericDef::Const(it) => {
it.source(db).and_then(|src| Some(src.value.syntax().text_range()))
it.source(db).map(|src| src.value.syntax().text_range())
}
};
let mut res = FxHashMap::default();

View File

@ -139,11 +139,8 @@ impl<'db> MatchFinder<'db> {
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::symbol_index::SymbolsDatabase;
if let Some(first_file_id) = db
.local_roots()
.iter()
.next()
.and_then(|root| db.source_root(root.clone()).iter().next())
if let Some(first_file_id) =
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
{
Ok(MatchFinder::in_context(
db,

View File

@ -127,7 +127,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> {
let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
// First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?;

View File

@ -120,7 +120,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
Some("pat") => parent.token_trees.push(make_ident("foo")),
Some("path") => parent.token_trees.push(make_ident("foo")),
Some("literal") => parent.token_trees.push(make_literal("1")),
Some("expr") => parent.token_trees.push(make_ident("foo").into()),
Some("expr") => parent.token_trees.push(make_ident("foo")),
Some("lifetime") => {
parent.token_trees.push(make_punct('\''));
parent.token_trees.push(make_ident("a"));
@ -157,17 +157,15 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
if i + 1 != cnt {
if let Some(sep) = separator {
match sep {
Separator::Literal(it) => parent
.token_trees
.push(tt::Leaf::Literal(it.clone().into()).into()),
Separator::Ident(it) => parent
.token_trees
.push(tt::Leaf::Ident(it.clone().into()).into()),
Separator::Literal(it) => {
parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
}
Separator::Ident(it) => {
parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
}
Separator::Puncts(puncts) => {
for it in puncts {
parent
.token_trees
.push(tt::Leaf::Punct(it.clone().into()).into())
parent.token_trees.push(tt::Leaf::Punct(*it).into())
}
}
};
@ -176,8 +174,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
}
}
Op::Subtree { tokens, delimiter } => {
let mut subtree =
tt::Subtree { delimiter: delimiter.clone(), token_trees: Vec::new() };
let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
tokens.iter().for_each(|it| {
collect_from_op(it, &mut subtree, seed);
});

View File

@ -540,7 +540,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_reover_item = None;
let mut error_recover_item = None;
let mut bindings_builder = BindingsBuilder::default();
@ -579,9 +579,9 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
stdx::always!(cur_items.is_empty());
if error_items.len() > 0 {
error_reover_item = error_items.pop().map(|it| it.bindings);
error_recover_item = error_items.pop().map(|it| it.bindings);
} else if eof_items.len() > 0 {
error_reover_item = Some(eof_items[0].bindings.clone());
error_recover_item = Some(eof_items[0].bindings.clone());
}
// We need to do some post processing after the `match_loop_inner`.
@ -594,8 +594,8 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
res.bindings = bindings_builder.build(&eof_items[0].bindings);
} else {
// Error recovery
if error_reover_item.is_some() {
res.bindings = bindings_builder.build(&error_reover_item.unwrap());
if let Some(item) = error_recover_item {
res.bindings = bindings_builder.build(&item);
}
res.add_err(ExpandError::UnexpectedToken);
}
@ -618,7 +618,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
}
res.add_err(err!("leftover tokens"));
if let Some(error_reover_item) = error_reover_item {
if let Some(error_reover_item) = error_recover_item {
res.bindings = bindings_builder.build(&error_reover_item);
}
return res;
@ -722,7 +722,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
input
.expect_literal()
.map(|literal| {
let lit = tt::Leaf::from(literal.clone());
let lit = literal.clone();
match neg {
None => Some(lit.into()),
Some(neg) => Some(tt::TokenTree::Subtree(tt::Subtree {

View File

@ -262,7 +262,7 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Par
if puncts.len() == 3 {
return Err(ParseError::InvalidRepeat);
}
puncts.push(punct.clone())
puncts.push(*punct)
}
_ => return Err(ParseError::InvalidRepeat),
}

View File

@ -130,7 +130,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res.push(match expanded.value {
None => break,
Some(tt @ tt::TokenTree::Leaf(_)) => {
tt::Subtree { delimiter: None, token_trees: vec![tt.into()] }
tt::Subtree { delimiter: None, token_trees: vec![tt] }
}
Some(tt::TokenTree::Subtree(tt)) => tt,
});
@ -727,7 +727,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
// Note: We always assume the semi-colon would be the last token in
// other parts of RA such that we don't add whitespace here.
if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
self.inner.token(WHITESPACE, " ".into());
self.inner.token(WHITESPACE, " ");
self.text_pos += TextSize::of(' ');
}
}

View File

@ -35,7 +35,7 @@ mod rule_parsing {
fn test_invalid_arms() {
fn check(macro_body: &str, err: ParseError) {
let m = parse_macro_arm(macro_body);
assert_eq!(m, Err(err.into()));
assert_eq!(m, Err(err));
}
check("invalid", ParseError::Expected("expected subtree".into()));

View File

@ -154,5 +154,5 @@ fn send_request(
req: Request,
) -> io::Result<Option<Response>> {
req.write(&mut writer)?;
Ok(Response::read(&mut reader)?)
Response::read(&mut reader)
}

View File

@ -236,13 +236,10 @@ mod tests {
subtree
.token_trees
.push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
subtree.token_trees.push(TokenTree::Subtree(
Subtree {
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
token_trees: vec![],
}
.into(),
));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
token_trees: vec![],
}));
subtree
}

View File

@ -161,7 +161,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
return Vec::new();
}
let severity = diagnostic_severity(config, rd.level.clone(), rd.code.clone());
let severity = diagnostic_severity(config, rd.level, rd.code.clone());
let mut source = String::from("rustc");
let mut code = rd.code.as_ref().map(|c| c.code.clone());

View File

@ -1134,7 +1134,7 @@ pub(crate) fn handle_code_lens_resolve(
) -> Result<CodeLens> {
let annotation = from_proto::annotation(&snap, code_lens)?;
Ok(to_proto::code_lens(&snap, snap.analysis.resolve_annotation(annotation)?)?)
to_proto::code_lens(&snap, snap.analysis.resolve_annotation(annotation)?)
}
pub(crate) fn handle_document_highlight(

View File

@ -36,7 +36,7 @@ impl Progress {
impl GlobalState {
pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
let message = message.into();
let message = message;
self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams { typ, message },
)

View File

@ -287,7 +287,7 @@ pub(crate) fn signature_help(
let params = call_info
.parameter_ranges()
.iter()
.map(|it| [u32::from(it.start()).into(), u32::from(it.end()).into()])
.map(|it| [u32::from(it.start()), u32::from(it.end())])
.map(|label_offsets| lsp_types::ParameterInformation {
label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
documentation: None,

View File

@ -479,7 +479,7 @@ impl ast::MatchArmList {
Some(t) => t,
None => return self.clone(),
};
let position = InsertPosition::Before(r_curly.into());
let position = InsertPosition::Before(r_curly);
let arm_ws = tokens::WsBuilder::new(" ");
let match_indent = &leading_indent(self.syntax()).unwrap_or_default();
let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent));

View File

@ -122,5 +122,5 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken
if right.kind() == T![;] || right.kind() == T![,] {
return None;
}
Some(make::tokens::single_space().into())
Some(make::tokens::single_space())
}