2015-08-16 01:54:43 -05:00
|
|
|
use rustc::lint::*;
|
2015-09-03 09:42:17 -05:00
|
|
|
use rustc_front::hir::*;
|
|
|
|
use reexport::*;
|
|
|
|
use rustc_front::util::{is_comparison_binop, binop_to_string};
|
2015-08-21 15:53:47 -05:00
|
|
|
use syntax::codemap::Span;
|
2015-09-03 09:42:17 -05:00
|
|
|
use rustc_front::visit::{FnKind, Visitor, walk_ty};
|
2015-08-12 04:31:09 -05:00
|
|
|
use rustc::middle::ty;
|
2015-09-16 19:01:41 -05:00
|
|
|
use syntax::ast::IntTy::*;
|
|
|
|
use syntax::ast::UintTy::*;
|
|
|
|
use syntax::ast::FloatTy::*;
|
2014-11-19 02:57:34 -06:00
|
|
|
|
2015-10-11 16:12:21 -05:00
|
|
|
use utils::{match_type, snippet, span_lint, span_help_and_lint};
|
|
|
|
use utils::{is_from_for_desugar, in_macro, in_external_macro};
|
2015-08-21 11:48:36 -05:00
|
|
|
use utils::{LL_PATH, VEC_PATH};
|
2015-07-26 09:53:11 -05:00
|
|
|
|
2014-11-19 03:02:47 -06:00
|
|
|
/// Handles all the linting of funky types
|
2014-12-25 17:04:49 -06:00
|
|
|
#[allow(missing_copy_implementations)]
|
2014-11-19 02:57:34 -06:00
|
|
|
pub struct TypePass;
|
|
|
|
|
2014-12-25 17:54:44 -06:00
|
|
|
declare_lint!(pub BOX_VEC, Warn,
|
2015-08-13 03:32:35 -05:00
|
|
|
"usage of `Box<Vec<T>>`, vector elements are already on the heap");
|
2015-03-02 04:43:44 -06:00
|
|
|
declare_lint!(pub LINKEDLIST, Warn,
|
2015-08-13 03:32:35 -05:00
|
|
|
"usage of LinkedList, usually a vector is faster, or a more specialized data \
|
2015-10-05 15:02:05 -05:00
|
|
|
structure like a VecDeque");
|
2014-11-19 02:57:34 -06:00
|
|
|
|
|
|
|
impl LintPass for TypePass {
|
|
|
|
fn get_lints(&self) -> LintArray {
|
2015-03-02 04:43:44 -06:00
|
|
|
lint_array!(BOX_VEC, LINKEDLIST)
|
2014-11-19 02:57:34 -06:00
|
|
|
}
|
2015-09-18 21:53:04 -05:00
|
|
|
}
|
2014-11-19 02:57:34 -06:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
impl LateLintPass for TypePass {
|
|
|
|
fn check_ty(&mut self, cx: &LateContext, ast_ty: &Ty) {
|
2015-08-21 11:40:36 -05:00
|
|
|
if let Some(ty) = cx.tcx.ast_ty_to_ty_cache.borrow().get(&ast_ty.id) {
|
|
|
|
if let ty::TyBox(ref inner) = ty.sty {
|
2015-08-21 12:00:33 -05:00
|
|
|
if match_type(cx, inner, &VEC_PATH) {
|
2015-08-21 11:40:36 -05:00
|
|
|
span_help_and_lint(
|
2015-08-21 12:00:33 -05:00
|
|
|
cx, BOX_VEC, ast_ty.span,
|
2015-09-02 09:46:12 -05:00
|
|
|
"you seem to be trying to use `Box<Vec<T>>`. Consider using just `Vec<T>`",
|
2015-08-27 00:39:40 -05:00
|
|
|
"`Vec<T>` is already on the heap, `Box<Vec<T>>` makes an extra allocation.");
|
2015-08-21 11:40:36 -05:00
|
|
|
}
|
2014-11-20 01:07:37 -06:00
|
|
|
}
|
2015-08-21 12:00:33 -05:00
|
|
|
else if match_type(cx, ty, &LL_PATH) {
|
|
|
|
span_help_and_lint(
|
|
|
|
cx, LINKEDLIST, ast_ty.span,
|
|
|
|
"I see you're using a LinkedList! Perhaps you meant some other data structure?",
|
2015-10-05 15:02:05 -05:00
|
|
|
"a VecDeque might work");
|
2014-11-20 01:07:37 -06:00
|
|
|
}
|
|
|
|
}
|
2014-11-19 02:57:34 -06:00
|
|
|
}
|
2014-12-04 06:35:49 -06:00
|
|
|
}
|
2015-08-12 04:31:09 -05:00
|
|
|
|
|
|
|
#[allow(missing_copy_implementations)]
|
|
|
|
pub struct LetPass;
|
|
|
|
|
|
|
|
declare_lint!(pub LET_UNIT_VALUE, Warn,
|
2015-08-13 03:32:35 -05:00
|
|
|
"creating a let binding to a value of unit type, which usually can't be used afterwards");
|
2015-08-12 04:31:09 -05:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_let_unit(cx: &LateContext, decl: &Decl) {
|
2015-08-12 04:31:09 -05:00
|
|
|
if let DeclLocal(ref local) = decl.node {
|
2015-08-25 07:41:35 -05:00
|
|
|
let bindtype = &cx.tcx.pat_ty(&local.pat).sty;
|
2015-08-12 04:31:09 -05:00
|
|
|
if *bindtype == ty::TyTuple(vec![]) {
|
2015-09-10 01:51:14 -05:00
|
|
|
if in_external_macro(cx, decl.span) ||
|
|
|
|
in_macro(cx, local.pat.span) { return; }
|
2015-10-11 16:12:21 -05:00
|
|
|
if is_from_for_desugar(decl) { return; }
|
|
|
|
span_lint(cx, LET_UNIT_VALUE, decl.span, &format!(
|
|
|
|
"this let-binding has unit value. Consider omitting `let {} =`",
|
|
|
|
snippet(cx, local.pat.span, "..")));
|
2015-08-12 04:31:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl LintPass for LetPass {
|
|
|
|
fn get_lints(&self) -> LintArray {
|
|
|
|
lint_array!(LET_UNIT_VALUE)
|
|
|
|
}
|
2015-09-18 21:53:04 -05:00
|
|
|
}
|
2015-08-12 04:31:09 -05:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
impl LateLintPass for LetPass {
|
|
|
|
fn check_decl(&mut self, cx: &LateContext, decl: &Decl) {
|
2015-08-26 17:31:35 -05:00
|
|
|
check_let_unit(cx, decl)
|
2015-08-12 04:31:09 -05:00
|
|
|
}
|
|
|
|
}
|
2015-08-18 23:54:20 -05:00
|
|
|
|
|
|
|
declare_lint!(pub UNIT_CMP, Warn,
|
|
|
|
"comparing unit values (which is always `true` or `false`, respectively)");
|
|
|
|
|
|
|
|
#[allow(missing_copy_implementations)]
|
|
|
|
pub struct UnitCmp;
|
|
|
|
|
|
|
|
impl LintPass for UnitCmp {
|
|
|
|
fn get_lints(&self) -> LintArray {
|
|
|
|
lint_array!(UNIT_CMP)
|
|
|
|
}
|
2015-09-18 21:53:04 -05:00
|
|
|
}
|
2015-08-18 23:54:20 -05:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
impl LateLintPass for UnitCmp {
|
|
|
|
fn check_expr(&mut self, cx: &LateContext, expr: &Expr) {
|
2015-09-06 12:44:54 -05:00
|
|
|
if in_macro(cx, expr.span) { return; }
|
2015-08-18 23:54:20 -05:00
|
|
|
if let ExprBinary(ref cmp, ref left, _) = expr.node {
|
|
|
|
let op = cmp.node;
|
|
|
|
let sty = &cx.tcx.expr_ty(left).sty;
|
|
|
|
if *sty == ty::TyTuple(vec![]) && is_comparison_binop(op) {
|
|
|
|
let result = match op {
|
|
|
|
BiEq | BiLe | BiGe => "true",
|
|
|
|
_ => "false"
|
|
|
|
};
|
|
|
|
span_lint(cx, UNIT_CMP, expr.span, &format!(
|
|
|
|
"{}-comparison of unit values detected. This will always be {}",
|
|
|
|
binop_to_string(op), result));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-08-19 17:04:01 -05:00
|
|
|
|
|
|
|
pub struct CastPass;
|
|
|
|
|
|
|
|
declare_lint!(pub CAST_PRECISION_LOSS, Allow,
|
2015-08-20 07:24:26 -05:00
|
|
|
"casts that cause loss of precision, e.g `x as f32` where `x: u64`");
|
2015-08-19 17:04:01 -05:00
|
|
|
declare_lint!(pub CAST_SIGN_LOSS, Allow,
|
2015-08-20 07:24:26 -05:00
|
|
|
"casts from signed types to unsigned types, e.g `x as u32` where `x: i32`");
|
2015-08-20 15:44:40 -05:00
|
|
|
declare_lint!(pub CAST_POSSIBLE_TRUNCATION, Allow,
|
|
|
|
"casts that may cause truncation of the value, e.g `x as u8` where `x: u32`, or `x as i32` where `x: f32`");
|
2015-08-22 16:49:03 -05:00
|
|
|
declare_lint!(pub CAST_POSSIBLE_WRAP, Allow,
|
|
|
|
"casts that may cause wrapping around the value, e.g `x as i32` where `x: u32` and `x > i32::MAX`");
|
2015-08-19 17:04:01 -05:00
|
|
|
|
2015-08-20 20:03:37 -05:00
|
|
|
/// Returns the size in bits of an integral type.
|
|
|
|
/// Will return 0 if the type is not an int or uint variant
|
|
|
|
fn int_ty_to_nbits(typ: &ty::TyS) -> usize {
|
2015-08-21 13:44:48 -05:00
|
|
|
let n = match typ.sty {
|
|
|
|
ty::TyInt(i) => 4 << (i as usize),
|
|
|
|
ty::TyUint(u) => 4 << (u as usize),
|
2015-08-21 19:44:05 -05:00
|
|
|
_ => 0
|
2015-08-20 20:03:37 -05:00
|
|
|
};
|
|
|
|
// n == 4 is the usize/isize case
|
|
|
|
if n == 4 { ::std::usize::BITS } else { n }
|
|
|
|
}
|
|
|
|
|
2015-08-21 19:44:05 -05:00
|
|
|
fn is_isize_or_usize(typ: &ty::TyS) -> bool {
|
2015-08-22 17:08:16 -05:00
|
|
|
match typ.sty {
|
2015-09-03 09:42:17 -05:00
|
|
|
ty::TyInt(TyIs) | ty::TyUint(TyUs) => true,
|
2015-08-21 19:44:05 -05:00
|
|
|
_ => false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn span_precision_loss_lint(cx: &LateContext, expr: &Expr, cast_from: &ty::TyS, cast_to_f64: bool) {
|
2015-08-22 18:06:31 -05:00
|
|
|
let mantissa_nbits = if cast_to_f64 {52} else {23};
|
|
|
|
let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
|
|
|
|
let arch_dependent_str = "on targets with 64-bit wide pointers ";
|
|
|
|
let from_nbits_str = if arch_dependent {"64".to_owned()}
|
|
|
|
else if is_isize_or_usize(cast_from) {"32 or 64".to_owned()}
|
|
|
|
else {int_ty_to_nbits(cast_from).to_string()};
|
|
|
|
span_lint(cx, CAST_PRECISION_LOSS, expr.span,
|
2015-08-26 07:26:43 -05:00
|
|
|
&format!("casting {0} to {1} causes a loss of precision {2}\
|
|
|
|
({0} is {3} bits wide, but {1}'s mantissa is only {4} bits wide)",
|
|
|
|
cast_from, if cast_to_f64 {"f64"} else {"f32"},
|
|
|
|
if arch_dependent {arch_dependent_str} else {""},
|
2015-08-27 00:39:40 -05:00
|
|
|
from_nbits_str, mantissa_nbits));
|
2015-08-22 18:06:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
enum ArchSuffix {
|
|
|
|
_32, _64, None
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_truncation_and_wrapping(cx: &LateContext, expr: &Expr, cast_from: &ty::TyS, cast_to: &ty::TyS) {
|
2015-08-22 18:06:31 -05:00
|
|
|
let arch_64_suffix = " on targets with 64-bit wide pointers";
|
|
|
|
let arch_32_suffix = " on targets with 32-bit wide pointers";
|
|
|
|
let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
|
|
|
|
let (from_nbits, to_nbits) = (int_ty_to_nbits(cast_from), int_ty_to_nbits(cast_to));
|
|
|
|
let (span_truncation, suffix_truncation, span_wrap, suffix_wrap) =
|
|
|
|
match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
|
|
|
|
(true, true) | (false, false) => (
|
|
|
|
to_nbits < from_nbits,
|
|
|
|
ArchSuffix::None,
|
|
|
|
to_nbits == from_nbits && cast_unsigned_to_signed,
|
|
|
|
ArchSuffix::None
|
|
|
|
),
|
|
|
|
(true, false) => (
|
|
|
|
to_nbits <= 32,
|
|
|
|
if to_nbits == 32 {ArchSuffix::_64} else {ArchSuffix::None},
|
|
|
|
to_nbits <= 32 && cast_unsigned_to_signed,
|
|
|
|
ArchSuffix::_32
|
|
|
|
),
|
|
|
|
(false, true) => (
|
|
|
|
from_nbits == 64,
|
|
|
|
ArchSuffix::_32,
|
|
|
|
cast_unsigned_to_signed,
|
|
|
|
if from_nbits == 64 {ArchSuffix::_64} else {ArchSuffix::_32}
|
|
|
|
),
|
|
|
|
};
|
|
|
|
if span_truncation {
|
|
|
|
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span,
|
2015-08-26 07:26:43 -05:00
|
|
|
&format!("casting {} to {} may truncate the value{}",
|
|
|
|
cast_from, cast_to,
|
|
|
|
match suffix_truncation {
|
|
|
|
ArchSuffix::_32 => arch_32_suffix,
|
|
|
|
ArchSuffix::_64 => arch_64_suffix,
|
2015-08-27 00:39:40 -05:00
|
|
|
ArchSuffix::None => "" }));
|
2015-08-22 18:06:31 -05:00
|
|
|
}
|
|
|
|
if span_wrap {
|
|
|
|
span_lint(cx, CAST_POSSIBLE_WRAP, expr.span,
|
2015-08-26 07:26:43 -05:00
|
|
|
&format!("casting {} to {} may wrap around the value{}",
|
|
|
|
cast_from, cast_to,
|
|
|
|
match suffix_wrap {
|
|
|
|
ArchSuffix::_32 => arch_32_suffix,
|
|
|
|
ArchSuffix::_64 => arch_64_suffix,
|
2015-08-27 00:39:40 -05:00
|
|
|
ArchSuffix::None => "" }));
|
2015-08-22 18:06:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-19 17:04:01 -05:00
|
|
|
impl LintPass for CastPass {
|
|
|
|
fn get_lints(&self) -> LintArray {
|
|
|
|
lint_array!(CAST_PRECISION_LOSS,
|
|
|
|
CAST_SIGN_LOSS,
|
2015-08-22 16:49:03 -05:00
|
|
|
CAST_POSSIBLE_TRUNCATION,
|
|
|
|
CAST_POSSIBLE_WRAP)
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
2015-09-18 21:53:04 -05:00
|
|
|
}
|
2015-08-19 17:04:01 -05:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
impl LateLintPass for CastPass {
|
|
|
|
fn check_expr(&mut self, cx: &LateContext, expr: &Expr) {
|
2015-08-19 17:04:01 -05:00
|
|
|
if let ExprCast(ref ex, _) = expr.node {
|
2015-08-25 07:41:35 -05:00
|
|
|
let (cast_from, cast_to) = (cx.tcx.expr_ty(ex), cx.tcx.expr_ty(expr));
|
2015-08-20 07:36:26 -05:00
|
|
|
if cast_from.is_numeric() && cast_to.is_numeric() && !in_external_macro(cx, expr.span) {
|
2015-08-19 17:04:01 -05:00
|
|
|
match (cast_from.is_integral(), cast_to.is_integral()) {
|
2015-08-20 14:37:37 -05:00
|
|
|
(true, false) => {
|
2015-08-20 20:03:37 -05:00
|
|
|
let from_nbits = int_ty_to_nbits(cast_from);
|
2015-09-03 09:42:17 -05:00
|
|
|
let to_nbits = if let ty::TyFloat(TyF32) = cast_to.sty {32} else {64};
|
2015-08-22 18:06:31 -05:00
|
|
|
if is_isize_or_usize(cast_from) || from_nbits >= to_nbits {
|
|
|
|
span_precision_loss_lint(cx, expr, cast_from, to_nbits == 64);
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
|
|
|
},
|
2015-08-22 16:49:03 -05:00
|
|
|
(false, true) => {
|
2015-08-20 15:44:40 -05:00
|
|
|
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span,
|
2015-08-27 00:39:40 -05:00
|
|
|
&format!("casting {} to {} may truncate the value",
|
|
|
|
cast_from, cast_to));
|
2015-08-20 07:50:26 -05:00
|
|
|
if !cast_to.is_signed() {
|
2015-08-20 07:37:35 -05:00
|
|
|
span_lint(cx, CAST_SIGN_LOSS, expr.span,
|
2015-08-27 00:39:40 -05:00
|
|
|
&format!("casting {} to {} may lose the sign of the value",
|
|
|
|
cast_from, cast_to));
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
|
|
|
},
|
|
|
|
(true, true) => {
|
2015-08-20 14:37:37 -05:00
|
|
|
if cast_from.is_signed() && !cast_to.is_signed() {
|
|
|
|
span_lint(cx, CAST_SIGN_LOSS, expr.span,
|
2015-08-27 00:39:40 -05:00
|
|
|
&format!("casting {} to {} may lose the sign of the value",
|
|
|
|
cast_from, cast_to));
|
2015-08-20 14:37:37 -05:00
|
|
|
}
|
2015-08-22 18:06:31 -05:00
|
|
|
check_truncation_and_wrapping(cx, expr, cast_from, cast_to);
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
|
|
|
(false, false) => {
|
2015-09-03 09:42:17 -05:00
|
|
|
if let (&ty::TyFloat(TyF64),
|
|
|
|
&ty::TyFloat(TyF32)) = (&cast_from.sty, &cast_to.sty) {
|
2015-08-27 00:39:40 -05:00
|
|
|
span_lint(cx, CAST_POSSIBLE_TRUNCATION,
|
|
|
|
expr.span,
|
|
|
|
"casting f64 to f32 may truncate the value");
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-08-20 07:37:35 -05:00
|
|
|
}
|
2015-08-19 17:04:01 -05:00
|
|
|
}
|
2015-08-21 15:53:47 -05:00
|
|
|
|
|
|
|
declare_lint!(pub TYPE_COMPLEXITY, Warn,
|
|
|
|
"usage of very complex types; recommends factoring out parts into `type` definitions");
|
|
|
|
|
|
|
|
#[allow(missing_copy_implementations)]
|
|
|
|
pub struct TypeComplexityPass;
|
|
|
|
|
|
|
|
impl LintPass for TypeComplexityPass {
|
|
|
|
fn get_lints(&self) -> LintArray {
|
|
|
|
lint_array!(TYPE_COMPLEXITY)
|
|
|
|
}
|
2015-09-18 21:53:04 -05:00
|
|
|
}
|
2015-08-21 15:53:47 -05:00
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
impl LateLintPass for TypeComplexityPass {
|
|
|
|
fn check_fn(&mut self, cx: &LateContext, _: FnKind, decl: &FnDecl, _: &Block, _: Span, _: NodeId) {
|
2015-08-21 15:53:47 -05:00
|
|
|
check_fndecl(cx, decl);
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_struct_field(&mut self, cx: &LateContext, field: &StructField) {
|
2015-08-25 07:41:35 -05:00
|
|
|
check_type(cx, &field.node.ty);
|
2015-08-21 15:53:47 -05:00
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_variant(&mut self, cx: &LateContext, var: &Variant, _: &Generics) {
|
2015-08-21 15:53:47 -05:00
|
|
|
// StructVariant is covered by check_struct_field
|
2015-10-15 09:02:19 -05:00
|
|
|
if let VariantData::Tuple(ref args, _) = *var.node.data {
|
2015-08-21 15:53:47 -05:00
|
|
|
for arg in args {
|
2015-10-15 09:02:19 -05:00
|
|
|
check_type(cx, &arg.node.ty);
|
2015-08-21 15:53:47 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_item(&mut self, cx: &LateContext, item: &Item) {
|
2015-08-21 15:53:47 -05:00
|
|
|
match item.node {
|
|
|
|
ItemStatic(ref ty, _, _) |
|
|
|
|
ItemConst(ref ty, _) => check_type(cx, ty),
|
|
|
|
// functions, enums, structs, impls and traits are covered
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_trait_item(&mut self, cx: &LateContext, item: &TraitItem) {
|
2015-08-21 15:53:47 -05:00
|
|
|
match item.node {
|
|
|
|
ConstTraitItem(ref ty, _) |
|
|
|
|
TypeTraitItem(_, Some(ref ty)) => check_type(cx, ty),
|
|
|
|
MethodTraitItem(MethodSig { ref decl, .. }, None) => check_fndecl(cx, decl),
|
|
|
|
// methods with default impl are covered by check_fn
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_impl_item(&mut self, cx: &LateContext, item: &ImplItem) {
|
2015-08-21 15:53:47 -05:00
|
|
|
match item.node {
|
|
|
|
ConstImplItem(ref ty, _) |
|
|
|
|
TypeImplItem(ref ty) => check_type(cx, ty),
|
|
|
|
// methods are covered by check_fn
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_local(&mut self, cx: &LateContext, local: &Local) {
|
2015-08-21 15:53:47 -05:00
|
|
|
if let Some(ref ty) = local.ty {
|
|
|
|
check_type(cx, ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_fndecl(cx: &LateContext, decl: &FnDecl) {
|
2015-08-21 15:53:47 -05:00
|
|
|
for arg in &decl.inputs {
|
2015-08-25 07:41:35 -05:00
|
|
|
check_type(cx, &arg.ty);
|
2015-08-21 15:53:47 -05:00
|
|
|
}
|
|
|
|
if let Return(ref ty) = decl.output {
|
|
|
|
check_type(cx, ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-18 21:53:04 -05:00
|
|
|
fn check_type(cx: &LateContext, ty: &Ty) {
|
2015-09-06 03:53:55 -05:00
|
|
|
if in_macro(cx, ty.span) { return; }
|
2015-08-21 15:53:47 -05:00
|
|
|
let score = {
|
|
|
|
let mut visitor = TypeComplexityVisitor { score: 0, nest: 1 };
|
|
|
|
visitor.visit_ty(ty);
|
|
|
|
visitor.score
|
|
|
|
};
|
|
|
|
// println!("{:?} --> {}", ty, score);
|
|
|
|
if score > 250 {
|
|
|
|
span_lint(cx, TYPE_COMPLEXITY, ty.span, &format!(
|
|
|
|
"very complex type used. Consider factoring parts into `type` definitions"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Walks a type and assigns a complexity score to it.
|
|
|
|
struct TypeComplexityVisitor {
|
|
|
|
/// total complexity score of the type
|
|
|
|
score: u32,
|
|
|
|
/// current nesting level
|
|
|
|
nest: u32,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'v> Visitor<'v> for TypeComplexityVisitor {
|
2015-09-03 09:42:17 -05:00
|
|
|
fn visit_ty(&mut self, ty: &'v Ty) {
|
2015-08-21 15:53:47 -05:00
|
|
|
let (add_score, sub_nest) = match ty.node {
|
|
|
|
// _, &x and *x have only small overhead; don't mess with nesting level
|
|
|
|
TyInfer |
|
|
|
|
TyPtr(..) |
|
|
|
|
TyRptr(..) => (1, 0),
|
|
|
|
|
|
|
|
// the "normal" components of a type: named types, arrays/tuples
|
|
|
|
TyPath(..) |
|
|
|
|
TyVec(..) |
|
|
|
|
TyTup(..) |
|
|
|
|
TyFixedLengthVec(..) => (10 * self.nest, 1),
|
|
|
|
|
|
|
|
// "Sum" of trait bounds
|
|
|
|
TyObjectSum(..) => (20 * self.nest, 0),
|
|
|
|
|
|
|
|
// function types and "for<...>" bring a lot of overhead
|
|
|
|
TyBareFn(..) |
|
|
|
|
TyPolyTraitRef(..) => (50 * self.nest, 1),
|
|
|
|
|
|
|
|
_ => (0, 0)
|
|
|
|
};
|
|
|
|
self.score += add_score;
|
|
|
|
self.nest += sub_nest;
|
|
|
|
walk_ty(self, ty);
|
|
|
|
self.nest -= sub_nest;
|
|
|
|
}
|
|
|
|
}
|