Clippy trivially_copy_pass_by_ref

This commit is contained in:
Jeremy Kolb 2019-07-05 12:02:32 -04:00
parent ec6f71576a
commit 001e34e6e3
11 changed files with 30 additions and 30 deletions

View File

@ -34,8 +34,8 @@ impl AdtDef {
}
impl Struct {
pub(crate) fn variant_data(&self, db: &impl DefDatabase) -> Arc<VariantData> {
db.struct_data(*self).variant_data.clone()
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.struct_data(self).variant_data.clone()
}
}
@ -69,7 +69,7 @@ fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant>
impl EnumVariant {
pub(crate) fn source_impl(
&self,
self,
db: &(impl DefDatabase + AstDatabase),
) -> Source<TreeArc<ast::EnumVariant>> {
let src = self.parent.source(db);
@ -81,7 +81,7 @@ impl EnumVariant {
.to_owned();
Source { file_id: src.file_id, ast }
}
pub(crate) fn variant_data(&self, db: &impl DefDatabase) -> Arc<VariantData> {
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.enum_data(self.parent).variants[self.id].variant_data.clone()
}
}

View File

@ -31,7 +31,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let body = self.func.body(db);
for e in body.exprs() {
if let (id, Expr::StructLit { path, fields, spread }) = e {
self.validate_struct_literal(id, path, fields, spread, db);
self.validate_struct_literal(id, path, fields, *spread, db);
}
}
}
@ -41,7 +41,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
id: ExprId,
_path: &Option<Path>,
fields: &[StructLitField],
spread: &Option<ExprId>,
spread: Option<ExprId>,
db: &impl HirDatabase,
) {
if spread.is_some() {

View File

@ -359,8 +359,8 @@ impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
}
impl MacroCallId {
pub fn debug_dump(&self, db: &impl AstDatabase) -> String {
let loc = self.clone().loc(db);
pub fn debug_dump(self, db: &impl AstDatabase) -> String {
let loc = self.loc(db);
let node = loc.ast_id.to_node(db);
let syntax_str = node.syntax().text().chunks().collect::<Vec<_>>().join(" ");

View File

@ -85,7 +85,7 @@ enum BindingMode {
}
impl BindingMode {
pub fn convert(annotation: &BindingAnnotation) -> BindingMode {
pub fn convert(annotation: BindingAnnotation) -> BindingMode {
match annotation {
BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared),
@ -778,7 +778,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let mode = if mode == &BindingAnnotation::Unannotated {
default_bm
} else {
BindingMode::convert(mode)
BindingMode::convert(*mode)
};
let inner_ty = if let Some(subpat) = subpat {
self.infer_pat(*subpat, expected, default_bm)

View File

@ -58,7 +58,7 @@ impl CrateImplBlocks {
pub fn lookup_impl_blocks_for_trait<'a>(
&'a self,
tr: &Trait,
tr: Trait,
) -> impl Iterator<Item = ImplBlock> + 'a {
self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map(
move |(module_id, impl_id)| {
@ -68,8 +68,8 @@ impl CrateImplBlocks {
)
}
fn collect_recursive(&mut self, db: &impl HirDatabase, module: &Module) {
let module_impl_blocks = db.impls_in_module(module.clone());
fn collect_recursive(&mut self, db: &impl HirDatabase, module: Module) {
let module_impl_blocks = db.impls_in_module(module);
for (impl_id, _) in module_impl_blocks.impls.iter() {
let impl_block = ImplBlock::from_id(module_impl_blocks.module, impl_id);
@ -94,7 +94,7 @@ impl CrateImplBlocks {
}
for child in module.children(db) {
self.collect_recursive(db, &child);
self.collect_recursive(db, child);
}
}
@ -108,7 +108,7 @@ impl CrateImplBlocks {
impls_by_trait: FxHashMap::default(),
};
if let Some(module) = krate.root_module(db) {
crate_impl_blocks.collect_recursive(db, &module);
crate_impl_blocks.collect_recursive(db, module);
}
Arc::new(crate_impl_blocks)
}

View File

@ -131,7 +131,7 @@ impl IntTy {
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }
}
pub(crate) fn ty_to_string(&self) -> &'static str {
pub(crate) fn ty_to_string(self) -> &'static str {
match (self.signedness, self.bitness) {
(Signedness::Signed, IntBitness::Xsize) => "isize",
(Signedness::Signed, IntBitness::X8) => "i8",

View File

@ -50,7 +50,7 @@ pub(crate) fn impls_for_trait_query(
impls.extend(db.impls_for_trait(dep.krate, trait_).iter());
}
let crate_impl_blocks = db.impls_in_crate(krate);
impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(&trait_));
impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_));
impls.into_iter().collect::<Vec<_>>().into()
}

View File

@ -65,7 +65,7 @@ fn impls_for_trait(
Some(
impls
.lookup_impl_blocks_for_trait(&tr)
.lookup_impl_blocks_for_trait(tr)
.map(|imp| NavigationTarget::from_impl_block(db, imp))
.collect(),
)

View File

@ -213,7 +213,7 @@ impl RunningLineCol {
self.col_adjust = TextUnit::from(0);
}
fn adjust_col(&mut self, range: &TextRange) {
fn adjust_col(&mut self, range: TextRange) {
self.col_adjust += range.len() - TextUnit::from(1);
}
}
@ -244,7 +244,7 @@ pub fn translate_offset_with_edit(
let clamp = offset.min(x.start());
return res.to_line_col(clamp);
} else {
res.adjust_col(x);
res.adjust_col(*x);
}
}
}

View File

@ -145,7 +145,7 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
}
fn convert_punct(p: &tt::Punct) -> TtToken {
fn convert_punct(p: tt::Punct) -> TtToken {
let kind = match p.char {
// lexer may produce compound tokens for these ones
'.' => T![.],
@ -167,6 +167,6 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
match leaf {
tt::Leaf::Literal(l) => convert_literal(l),
tt::Leaf::Ident(ident) => convert_ident(ident),
tt::Leaf::Punct(punct) => convert_punct(punct),
tt::Leaf::Punct(punct) => convert_punct(*punct),
}
}

View File

@ -170,20 +170,20 @@ impl<'a> TtCursor<'a> {
}
}
fn eat_punct3(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
fn eat_punct3(&mut self, p: tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = *self.eat_punct()?;
let third = *self.eat_punct()?;
Some(smallvec![*p, sec, third])
Some(smallvec![p, sec, third])
}
fn eat_punct2(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
fn eat_punct2(&mut self, p: tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = *self.eat_punct()?;
Some(smallvec![*p, sec])
Some(smallvec![p, sec])
}
fn eat_multi_char_punct<'b, I>(
&mut self,
p: &tt::Punct,
p: tt::Punct,
iter: &mut TokenPeek<'b, I>,
) -> Option<SmallVec<[tt::Punct; 3]>>
where
@ -250,7 +250,7 @@ impl<'a> TtCursor<'a> {
// But at this phase, some punct still is jointed.
// So we by pass that check here.
let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter());
let puncts = self.eat_multi_char_punct(punct, &mut peekable);
let puncts = self.eat_multi_char_punct(*punct, &mut peekable);
let puncts = puncts.unwrap_or_else(|| smallvec![*punct]);
Some(crate::Separator::Puncts(puncts))
@ -292,7 +292,7 @@ where
TokenPeek { iter: itertools::multipeek(iter) }
}
pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> {
pub fn current_punct2(&mut self, p: tt::Punct) -> Option<((char, char), bool)> {
if p.spacing != tt::Spacing::Joint {
return None;
}
@ -302,7 +302,7 @@ where
Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint))
}
pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> {
pub fn current_punct3(&mut self, p: tt::Punct) -> Option<((char, char, char), bool)> {
self.current_punct2(p).and_then(|((p0, p1), last_joint)| {
if !last_joint {
None