rust/clippy_lints/src/bytecount.rs

118 lines
4.9 KiB
Rust
Raw Normal View History

use crate::utils::{
contains_name, get_pat_name, match_type, paths, single_segment_path, snippet_with_applicability, span_lint_and_sugg,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
2020-02-21 02:39:38 -06:00
use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, UnOp};
2020-01-12 00:08:41 -06:00
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, UintTy};
2020-01-11 05:37:08 -06:00
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
2020-05-08 06:57:01 -05:00
use rustc_span::Symbol;
2017-08-22 16:45:08 -05:00
2018-03-28 08:24:26 -05:00
declare_clippy_lint! {
/// **What it does:** Checks for naive byte counts
///
/// **Why is this bad?** The [`bytecount`](https://crates.io/crates/bytecount)
/// crate has methods to count your bytes faster, especially for large slices.
///
/// **Known problems:** If you have predominantly small slices, the
/// `bytecount::count(..)` method may actually be slower. However, if you can
/// ensure that less than 2³²-1 matches arise, the `naive_count_32(..)` can be
/// faster in those cases.
///
/// **Example:**
///
/// ```rust
/// # let vec = vec![1_u8];
/// &vec.iter().filter(|x| **x == 0u8).count(); // use bytecount::count instead
/// ```
2017-08-22 16:45:08 -05:00
pub NAIVE_BYTECOUNT,
2018-03-28 08:24:26 -05:00
perf,
2017-08-22 16:45:08 -05:00
"use of naive `<slice>.filter(|&x| x == y).count()` to count byte values"
}
2019-04-08 15:43:55 -05:00
declare_lint_pass!(ByteCount => [NAIVE_BYTECOUNT]);
2017-08-22 16:45:08 -05:00
impl<'tcx> LateLintPass<'tcx> for ByteCount {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
2020-06-09 16:44:04 -05:00
if let ExprKind::MethodCall(ref count, _, ref count_args, _) = expr.kind;
2019-05-17 16:53:54 -05:00
if count.ident.name == sym!(count);
if count_args.len() == 1;
2020-06-09 16:44:04 -05:00
if let ExprKind::MethodCall(ref filter, _, ref filter_args, _) = count_args[0].kind;
2019-05-17 16:53:54 -05:00
if filter.ident.name == sym!(filter);
if filter_args.len() == 2;
2019-09-27 10:16:06 -05:00
if let ExprKind::Closure(_, _, body_id, _, _) = filter_args[1].kind;
then {
let body = cx.tcx.hir().body(body_id);
if_chain! {
2019-08-28 04:27:06 -05:00
if body.params.len() == 1;
if let Some(argname) = get_pat_name(&body.params[0].pat);
2019-09-27 10:16:06 -05:00
if let ExprKind::Binary(ref op, ref l, ref r) = body.value.kind;
2018-07-12 02:50:09 -05:00
if op.node == BinOpKind::Eq;
if match_type(cx,
cx.typeck_results().expr_ty(&filter_args[0]).peel_refs(),
2019-05-17 16:53:54 -05:00
&paths::SLICE_ITER);
then {
let needle = match get_path_name(l) {
Some(name) if check_arg(name, argname, r) => r,
_ => match get_path_name(r) {
Some(name) if check_arg(name, argname, l) => l,
_ => { return; }
}
};
if ty::Uint(UintTy::U8) != *cx.typeck_results().expr_ty(needle).peel_refs().kind() {
return;
}
2020-06-09 16:44:04 -05:00
let haystack = if let ExprKind::MethodCall(ref path, _, ref args, _) =
2019-09-27 10:16:06 -05:00
filter_args[0].kind {
2018-06-28 08:46:58 -05:00
let p = path.ident.name;
if (p == sym::iter || p == sym!(iter_mut)) && args.len() == 1 {
&args[0]
} else {
&filter_args[0]
}
} else {
&filter_args[0]
};
let mut applicability = Applicability::MaybeIncorrect;
span_lint_and_sugg(
cx,
NAIVE_BYTECOUNT,
expr.span,
"you appear to be counting bytes the naive way",
"consider using the bytecount crate",
format!("bytecount::count({}, {})",
snippet_with_applicability(cx, haystack.span, "..", &mut applicability),
snippet_with_applicability(cx, needle.span, "..", &mut applicability)),
2018-11-27 10:31:17 -06:00
applicability,
);
}
};
}
};
2017-08-22 16:45:08 -05:00
}
}
2020-05-08 06:57:01 -05:00
fn check_arg(name: Symbol, arg: Symbol, needle: &Expr<'_>) -> bool {
name == arg && !contains_name(name, needle)
}
2020-05-08 06:57:01 -05:00
fn get_path_name(expr: &Expr<'_>) -> Option<Symbol> {
2019-09-27 10:16:06 -05:00
match expr.kind {
ExprKind::Box(ref e) | ExprKind::AddrOf(BorrowKind::Ref, _, ref e) | ExprKind::Unary(UnOp::Deref, ref e) => {
2019-11-25 08:20:10 -06:00
get_path_name(e)
},
2018-11-27 14:14:15 -06:00
ExprKind::Block(ref b, _) => {
if b.stmts.is_empty() {
b.expr.as_ref().and_then(|p| get_path_name(p))
} else {
None
}
2017-09-03 16:15:15 -05:00
},
2018-07-12 02:30:57 -05:00
ExprKind::Path(ref qpath) => single_segment_path(qpath).map(|ps| ps.ident.name),
2017-09-03 16:15:15 -05:00
_ => None,
}
}