don't clone types that are copy (clippy::clone_on_copy)

This commit is contained in:
Matthias Krüger 2021-03-17 01:56:31 +01:00
parent c5d654d513
commit 048dad8c2e
12 changed files with 18 additions and 28 deletions

View File

@ -23,7 +23,7 @@ pub struct Hygiene {
impl Hygiene {
pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id.clone())) }
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
}
pub fn new_unhygienic() -> Hygiene {
@ -129,10 +129,7 @@ fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)>
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
mbe::Origin::Def => (
&self.macro_def.1,
self.def_start
.as_ref()
.expect("`Origin::Def` used with non-`macro_rules!` macro")
.clone(),
*self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"),
),
};

View File

@ -93,7 +93,7 @@ pub(crate) fn remove_links(markdown: &str) -> String {
let mut cb = |_: BrokenLink| {
let empty = InlineStr::try_from("").unwrap();
Some((CowStr::Inlined(empty.clone()), CowStr::Inlined(empty)))
Some((CowStr::Inlined(empty), CowStr::Inlined(empty)))
};
let doc = Parser::new_with_broken_link_callback(markdown, opts, Some(&mut cb));
let doc = doc.filter_map(move |evt| match evt {
@ -147,7 +147,7 @@ fn get_doc_link(db: &RootDatabase, definition: Definition) -> Option<String> {
_ => return None,
};
let ns = ItemInNs::from(target_def.clone());
let ns = ItemInNs::from(target_def);
let module = definition.module(db)?;
let krate = module.krate();

View File

@ -102,7 +102,7 @@ fn extract_positioned_link_from_comment(
None => comment_range.end(),
}
})?;
Some((def_link.to_string(), ns.clone()))
Some((def_link.to_string(), *ns))
}
fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {

View File

@ -56,7 +56,7 @@ pub(super) fn ra_fixture(
for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) {
hl_range.range = range;
hl.add(hl_range.clone());
hl.add(hl_range);
}
}
}

View File

@ -145,11 +145,8 @@ fn insert_import(
variant_hir_name: &Name,
) -> Option<()> {
let db = ctx.db();
let mod_path = module.find_use_path_prefixed(
db,
enum_module_def.clone(),
ctx.config.insert_use.prefix_kind,
);
let mod_path =
module.find_use_path_prefixed(db, *enum_module_def, ctx.config.insert_use.prefix_kind);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
mod_path.push_segment(variant_hir_name.clone());

View File

@ -26,11 +26,11 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
let add_resolution = match &res {
hir::ScopeDef::ModuleDef(def) => match def {
hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => {
acc.add_struct_pat(ctx, strukt.clone(), Some(name.clone()));
acc.add_struct_pat(ctx, *strukt, Some(name.clone()));
true
}
hir::ModuleDef::Variant(variant) if !ctx.is_irrefutable_pat_binding => {
acc.add_variant_pat(ctx, variant.clone(), Some(name.clone()));
acc.add_variant_pat(ctx, *variant, Some(name.clone()));
true
}
hir::ModuleDef::Adt(hir::Adt::Enum(..))

View File

@ -81,7 +81,7 @@ pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> {
}
fn snippet_cap(&self) -> Option<SnippetCap> {
self.completion.config.snippet_cap.clone()
self.completion.config.snippet_cap
}
fn db(&self) -> &'a RootDatabase {

View File

@ -139,11 +139,8 @@ pub fn in_context(
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::symbol_index::SymbolsDatabase;
if let Some(first_file_id) = db
.local_roots()
.iter()
.next()
.and_then(|root| db.source_root(root.clone()).iter().next())
if let Some(first_file_id) =
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
{
Ok(MatchFinder::in_context(
db,

View File

@ -127,7 +127,7 @@ fn try_match(
restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> {
let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
// First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?;

View File

@ -165,7 +165,7 @@ fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
}
Separator::Puncts(puncts) => {
for it in puncts {
parent.token_trees.push(tt::Leaf::Punct(it.clone()).into())
parent.token_trees.push(tt::Leaf::Punct(*it).into())
}
}
};
@ -174,8 +174,7 @@ fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
}
}
Op::Subtree { tokens, delimiter } => {
let mut subtree =
tt::Subtree { delimiter: delimiter.clone(), token_trees: Vec::new() };
let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
tokens.iter().for_each(|it| {
collect_from_op(it, &mut subtree, seed);
});

View File

@ -262,7 +262,7 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Par
if puncts.len() == 3 {
return Err(ParseError::InvalidRepeat);
}
puncts.push(punct.clone())
puncts.push(*punct)
}
_ => return Err(ParseError::InvalidRepeat),
}

View File

@ -161,7 +161,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
return Vec::new();
}
let severity = diagnostic_severity(config, rd.level.clone(), rd.code.clone());
let severity = diagnostic_severity(config, rd.level, rd.code.clone());
let mut source = String::from("rustc");
let mut code = rd.code.as_ref().map(|c| c.code.clone());