rust/clippy_lints/src/escape.rs

200 lines
6.4 KiB
Rust
Raw Normal View History

use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::contains_ty;
2020-02-18 07:28:18 -06:00
use rustc_hir::intravisit;
use rustc_hir::{self, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node};
2020-02-16 20:07:26 -06:00
use rustc_infer::infer::TyCtxtInferExt;
2020-01-12 00:08:41 -06:00
use rustc_lint::{LateContext, LateLintPass};
2021-02-25 14:33:18 -06:00
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, TraitRef, Ty};
2020-01-11 05:37:08 -06:00
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::symbol::kw;
use rustc_target::spec::abi::Abi;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
2015-12-04 04:12:53 -06:00
2019-04-08 15:43:55 -05:00
#[derive(Copy, Clone)]
pub struct BoxedLocal {
2016-07-10 08:23:50 -05:00
pub too_large_for_stack: u64,
}
2015-12-04 04:12:53 -06:00
2018-03-28 08:24:26 -05:00
declare_clippy_lint! {
/// ### What it does
/// Checks for usage of `Box<T>` where an unboxed `T` would
/// work fine.
///
/// ### Why is this bad?
/// This is an unnecessary allocation, and bad for
/// performance. It is only necessary to allocate if you wish to move the box
/// into something.
///
/// ### Example
/// ```rust
/// # fn foo(bar: usize) {}
/// // Bad
/// let x = Box::new(1);
/// foo(*x);
/// println!("{}", *x);
///
/// // Good
/// let x = 1;
/// foo(x);
/// println!("{}", x);
/// ```
pub BOXED_LOCAL,
2018-03-28 08:24:26 -05:00
perf,
"using `Box<T>` where unnecessary"
}
2015-12-04 04:12:53 -06:00
2018-07-23 06:01:12 -05:00
fn is_non_trait_box(ty: Ty<'_>) -> bool {
2017-02-03 04:52:13 -06:00
ty.is_box() && !ty.boxed_ty().is_trait()
2015-12-28 08:12:57 -06:00
}
struct EscapeDelegate<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
2019-03-01 06:26:06 -06:00
set: HirIdSet,
2021-07-17 09:43:23 -05:00
trait_self_ty: Option<Ty<'tcx>>,
2016-07-10 08:23:50 -05:00
too_large_for_stack: u64,
2015-12-04 04:12:53 -06:00
}
2019-04-08 15:43:55 -05:00
impl_lint_pass!(BoxedLocal => [BOXED_LOCAL]);
impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
fn_kind: intravisit::FnKind<'tcx>,
2019-12-29 22:02:10 -06:00
_: &'tcx FnDecl<'_>,
2019-12-22 08:42:41 -06:00
body: &'tcx Body<'_>,
_: Span,
2019-02-20 04:11:11 -06:00
hir_id: HirId,
) {
if let Some(header) = fn_kind.header() {
if header.abi != Abi::Rust {
return;
}
}
2019-02-20 04:11:11 -06:00
let parent_id = cx.tcx.hir().get_parent_item(hir_id);
2019-06-25 16:34:07 -05:00
let parent_node = cx.tcx.hir().find(parent_id);
let mut trait_self_ty = None;
if let Some(Node::Item(item)) = parent_node {
// If the method is an impl for a trait, don't warn.
if let ItemKind::Impl(Impl { of_trait: Some(_), .. }) = item.kind {
return;
}
// find `self` ty for this trait if relevant
if let ItemKind::Trait(_, _, _, _, items) = item.kind {
for trait_item in items {
if trait_item.id.hir_id() == hir_id {
// be sure we have `self` parameter in this function
if let AssocItemKind::Fn { has_self: true } = trait_item.kind {
trait_self_ty =
2021-09-15 23:12:56 -05:00
Some(TraitRef::identity(cx.tcx, trait_item.id.def_id.to_def_id()).self_ty().skip_binder());
}
}
}
}
}
2015-12-04 04:12:53 -06:00
let mut v = EscapeDelegate {
cx,
2019-03-01 06:26:06 -06:00
set: HirIdSet::default(),
trait_self_ty,
2016-07-10 08:23:50 -05:00
too_large_for_stack: self.too_large_for_stack,
2015-12-04 04:12:53 -06:00
};
let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
2019-11-29 04:12:19 -06:00
cx.tcx.infer_ctxt().enter(|infcx| {
2020-07-17 03:47:04 -05:00
ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body);
2019-11-29 04:12:19 -06:00
});
for node in v.set {
2017-08-09 02:30:56 -05:00
span_lint(
cx,
BOXED_LOCAL,
cx.tcx.hir().span(node),
2017-08-09 02:30:56 -05:00
"local variable doesn't need to be boxed here",
);
2015-12-04 04:12:53 -06:00
}
}
}
// TODO: Replace with Map::is_argument(..) when it's fixed
fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
match map.find(id) {
Some(Node::Binding(_)) => (),
_ => return false,
}
matches!(map.find(map.get_parent_node(id)), Some(Node::Param(_)))
}
impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
fn consume(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
if let PlaceBase::Local(lid) = cmt.place.base {
self.set.remove(&lid);
let map = &self.cx.tcx.hir();
if let Some(Node::Binding(_)) = map.find(cmt.hir_id) {
if self.set.contains(&lid) {
// let y = x where x is known
// remove x, insert y
self.set.insert(cmt.hir_id);
self.set.remove(&lid);
}
}
2015-12-04 04:12:53 -06:00
}
}
}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {
if cmt.place.projections.is_empty() {
if let PlaceBase::Local(lid) = cmt.place.base {
self.set.remove(&lid);
}
2015-12-04 04:12:53 -06:00
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
let map = &self.cx.tcx.hir();
if is_argument(*map, cmt.hir_id) {
// Skip closure arguments
let parent_id = map.get_parent_node(cmt.hir_id);
if let Some(Node::Expr(..)) = map.find(map.get_parent_node(parent_id)) {
return;
}
2019-10-06 07:49:26 -05:00
// skip if there is a `self` parameter binding to a type
// that contains `Self` (i.e.: `self: Box<Self>`), see #4804
if let Some(trait_self_ty) = self.trait_self_ty {
if map.name(cmt.hir_id) == kw::SelfLower && contains_ty(self.cx.tcx, cmt.place.ty(), trait_self_ty)
{
return;
}
}
if is_non_trait_box(cmt.place.ty()) && !self.is_large_box(cmt.place.ty()) {
self.set.insert(cmt.hir_id);
}
2019-10-06 07:49:26 -05:00
}
}
}
2021-02-25 14:33:18 -06:00
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
2015-12-04 04:12:53 -06:00
}
2016-07-10 08:23:50 -05:00
impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
2019-01-30 19:15:29 -06:00
// Large types need to be boxed to avoid stack overflows.
2017-02-03 04:52:13 -06:00
if ty.is_box() {
2019-11-26 08:14:28 -06:00
self.cx.layout_of(ty.boxed_ty()).map_or(0, |l| l.size.bytes()) > self.too_large_for_stack
} else {
false
2016-07-10 08:23:50 -05:00
}
}
}