2021-10-14 05:16:35 -05:00
|
|
|
|
use clippy_utils::attrs::is_doc_hidden;
|
2021-07-01 11:17:38 -05:00
|
|
|
|
use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_note};
|
|
|
|
|
use clippy_utils::source::first_line_of_span;
|
2021-03-25 13:29:11 -05:00
|
|
|
|
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
|
|
|
|
|
use clippy_utils::{is_entrypoint_fn, is_expn_of, match_panic_def_id, method_chain_args, return_ty};
|
2020-02-17 05:18:00 -06:00
|
|
|
|
use if_chain::if_chain;
|
2018-11-27 14:14:15 -06:00
|
|
|
|
use itertools::Itertools;
|
2021-01-29 01:31:08 -06:00
|
|
|
|
use rustc_ast::ast::{Async, AttrKind, Attribute, FnKind, FnRetTy, ItemKind};
|
2020-07-22 09:59:17 -05:00
|
|
|
|
use rustc_ast::token::CommentKind;
|
2018-12-29 11:07:10 -06:00
|
|
|
|
use rustc_data_structures::fx::FxHashSet;
|
2020-08-28 09:10:16 -05:00
|
|
|
|
use rustc_data_structures::sync::Lrc;
|
|
|
|
|
use rustc_errors::emitter::EmitterWriter;
|
|
|
|
|
use rustc_errors::Handler;
|
2020-01-06 10:39:50 -06:00
|
|
|
|
use rustc_hir as hir;
|
2021-02-02 22:43:30 -06:00
|
|
|
|
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
|
2021-04-08 10:50:13 -05:00
|
|
|
|
use rustc_hir::{AnonConst, Expr, ExprKind, QPath};
|
2020-01-12 00:08:41 -06:00
|
|
|
|
use rustc_lint::{LateContext, LateLintPass};
|
2021-02-02 22:43:30 -06:00
|
|
|
|
use rustc_middle::hir::map::Map;
|
2020-03-30 04:02:14 -05:00
|
|
|
|
use rustc_middle::lint::in_external_macro;
|
|
|
|
|
use rustc_middle::ty;
|
2020-08-28 09:10:16 -05:00
|
|
|
|
use rustc_parse::maybe_new_parser_from_source_str;
|
2021-01-18 15:47:37 -06:00
|
|
|
|
use rustc_parse::parser::ForceCollect;
|
2020-08-28 09:10:16 -05:00
|
|
|
|
use rustc_session::parse::ParseSess;
|
2020-01-11 05:37:08 -06:00
|
|
|
|
use rustc_session::{declare_tool_lint, impl_lint_pass};
|
2021-07-28 17:07:32 -05:00
|
|
|
|
use rustc_span::def_id::LocalDefId;
|
2020-12-20 10:19:49 -06:00
|
|
|
|
use rustc_span::edition::Edition;
|
2020-08-28 09:10:16 -05:00
|
|
|
|
use rustc_span::source_map::{BytePos, FilePathMapping, MultiSpan, SourceMap, Span};
|
2020-11-05 07:29:48 -06:00
|
|
|
|
use rustc_span::{sym, FileName, Pos};
|
2020-08-28 09:10:16 -05:00
|
|
|
|
use std::io;
|
2019-04-24 00:47:01 -05:00
|
|
|
|
use std::ops::Range;
|
2021-05-05 14:31:25 -05:00
|
|
|
|
use std::thread;
|
2017-06-19 14:23:50 -05:00
|
|
|
|
use url::Url;
|
2016-03-19 11:59:12 -05:00
|
|
|
|
|
2018-03-28 08:24:26 -05:00
|
|
|
|
declare_clippy_lint! {
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### What it does
|
|
|
|
|
/// Checks for the presence of `_`, `::` or camel-case words
|
2019-03-05 10:50:33 -06:00
|
|
|
|
/// outside ticks in documentation.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Why is this bad?
|
|
|
|
|
/// *Rustdoc* supports markdown formatting, `_`, `::` and
|
2019-03-05 10:50:33 -06:00
|
|
|
|
/// camel-case probably indicates some code which should be included between
|
|
|
|
|
/// ticks. `_` can also be used for emphasis in markdown, this lint tries to
|
|
|
|
|
/// consider that.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Known problems
|
|
|
|
|
/// Lots of bad docs won’t be fixed, what the lint checks
|
2021-07-01 11:17:38 -05:00
|
|
|
|
/// for is limited, and there are still false positives. HTML elements and their
|
|
|
|
|
/// content are not linted.
|
2019-03-05 10:50:33 -06:00
|
|
|
|
///
|
2020-10-23 15:16:59 -05:00
|
|
|
|
/// In addition, when writing documentation comments, including `[]` brackets
|
|
|
|
|
/// inside a link text would trip the parser. Therfore, documenting link with
|
|
|
|
|
/// `[`SmallVec<[T; INLINE_CAPACITY]>`]` and then [`SmallVec<[T; INLINE_CAPACITY]>`]: SmallVec
|
|
|
|
|
/// would fail.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Examples
|
2019-03-05 10:50:33 -06:00
|
|
|
|
/// ```rust
|
|
|
|
|
/// /// Do something with the foo_bar parameter. See also
|
|
|
|
|
/// /// that::other::module::foo.
|
|
|
|
|
/// // ^ `foo_bar` and `that::other::module::foo` should be ticked.
|
2019-08-03 11:42:05 -05:00
|
|
|
|
/// fn doit(foo_bar: usize) {}
|
2019-03-05 10:50:33 -06:00
|
|
|
|
/// ```
|
2020-10-23 15:16:59 -05:00
|
|
|
|
///
|
|
|
|
|
/// ```rust
|
|
|
|
|
/// // Link text with `[]` brackets should be written as following:
|
|
|
|
|
/// /// Consume the array and return the inner
|
|
|
|
|
/// /// [`SmallVec<[T; INLINE_CAPACITY]>`][SmallVec].
|
|
|
|
|
/// /// [SmallVec]: SmallVec
|
|
|
|
|
/// fn main() {}
|
|
|
|
|
/// ```
|
2016-08-06 03:18:36 -05:00
|
|
|
|
pub DOC_MARKDOWN,
|
2018-03-28 08:24:26 -05:00
|
|
|
|
pedantic,
|
2016-08-06 03:18:36 -05:00
|
|
|
|
"presence of `_`, `::` or camel-case outside backticks in documentation"
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-11 11:39:02 -05:00
|
|
|
|
declare_clippy_lint! {
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### What it does
|
|
|
|
|
/// Checks for the doc comments of publicly visible
|
2019-09-11 11:39:02 -05:00
|
|
|
|
/// unsafe functions and warns if there is no `# Safety` section.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Why is this bad?
|
|
|
|
|
/// Unsafe functions should document their safety
|
2019-09-11 11:39:02 -05:00
|
|
|
|
/// preconditions, so that users can be sure they are using them safely.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Examples
|
2019-09-11 11:39:02 -05:00
|
|
|
|
/// ```rust
|
|
|
|
|
///# type Universe = ();
|
|
|
|
|
/// /// This function should really be documented
|
|
|
|
|
/// pub unsafe fn start_apocalypse(u: &mut Universe) {
|
|
|
|
|
/// unimplemented!();
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
///
|
|
|
|
|
/// At least write a line about safety:
|
|
|
|
|
///
|
|
|
|
|
/// ```rust
|
|
|
|
|
///# type Universe = ();
|
|
|
|
|
/// /// # Safety
|
|
|
|
|
/// ///
|
|
|
|
|
/// /// This function should not be called before the horsemen are ready.
|
|
|
|
|
/// pub unsafe fn start_apocalypse(u: &mut Universe) {
|
|
|
|
|
/// unimplemented!();
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
pub MISSING_SAFETY_DOC,
|
|
|
|
|
style,
|
|
|
|
|
"`pub unsafe fn` without `# Safety` docs"
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-05 18:00:23 -06:00
|
|
|
|
declare_clippy_lint! {
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### What it does
|
|
|
|
|
/// Checks the doc comments of publicly visible functions that
|
2019-12-05 18:00:23 -06:00
|
|
|
|
/// return a `Result` type and warns if there is no `# Errors` section.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Why is this bad?
|
|
|
|
|
/// Documenting the type of errors that can be returned from a
|
2019-12-05 18:00:23 -06:00
|
|
|
|
/// function can help callers write code to handle the errors appropriately.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Examples
|
2019-12-05 18:00:23 -06:00
|
|
|
|
/// Since the following function returns a `Result` it has an `# Errors` section in
|
|
|
|
|
/// its doc comment:
|
|
|
|
|
///
|
|
|
|
|
/// ```rust
|
|
|
|
|
///# use std::io;
|
|
|
|
|
/// /// # Errors
|
|
|
|
|
/// ///
|
|
|
|
|
/// /// Will return `Err` if `filename` does not exist or the user does not have
|
|
|
|
|
/// /// permission to read it.
|
|
|
|
|
/// pub fn read(filename: String) -> io::Result<String> {
|
|
|
|
|
/// unimplemented!();
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
pub MISSING_ERRORS_DOC,
|
|
|
|
|
pedantic,
|
|
|
|
|
"`pub fn` returns `Result` without `# Errors` in doc comment"
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-02 22:43:30 -06:00
|
|
|
|
declare_clippy_lint! {
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### What it does
|
|
|
|
|
/// Checks the doc comments of publicly visible functions that
|
2021-02-02 22:43:30 -06:00
|
|
|
|
/// may panic and warns if there is no `# Panics` section.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Why is this bad?
|
|
|
|
|
/// Documenting the scenarios in which panicking occurs
|
2021-02-02 22:43:30 -06:00
|
|
|
|
/// can help callers who do not want to panic to avoid those situations.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Examples
|
2021-02-02 22:43:30 -06:00
|
|
|
|
/// Since the following function may panic it has a `# Panics` section in
|
|
|
|
|
/// its doc comment:
|
|
|
|
|
///
|
|
|
|
|
/// ```rust
|
|
|
|
|
/// /// # Panics
|
|
|
|
|
/// ///
|
|
|
|
|
/// /// Will panic if y is 0
|
|
|
|
|
/// pub fn divide_by(x: i32, y: i32) -> i32 {
|
|
|
|
|
/// if y == 0 {
|
|
|
|
|
/// panic!("Cannot divide by 0")
|
|
|
|
|
/// } else {
|
|
|
|
|
/// x / y
|
|
|
|
|
/// }
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
pub MISSING_PANICS_DOC,
|
|
|
|
|
pedantic,
|
|
|
|
|
"`pub fn` may panic without `# Panics` in doc comment"
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-30 17:10:24 -05:00
|
|
|
|
declare_clippy_lint! {
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### What it does
|
|
|
|
|
/// Checks for `fn main() { .. }` in doctests
|
2019-09-30 17:10:24 -05:00
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Why is this bad?
|
|
|
|
|
/// The test can be shorter (and likely more readable)
|
2019-09-30 17:10:24 -05:00
|
|
|
|
/// if the `fn main()` is left implicit.
|
|
|
|
|
///
|
2021-07-29 05:16:06 -05:00
|
|
|
|
/// ### Examples
|
2019-09-30 17:10:24 -05:00
|
|
|
|
/// ``````rust
|
|
|
|
|
/// /// An example of a doctest with a `main()` function
|
|
|
|
|
/// ///
|
|
|
|
|
/// /// # Examples
|
|
|
|
|
/// ///
|
|
|
|
|
/// /// ```
|
|
|
|
|
/// /// fn main() {
|
|
|
|
|
/// /// // this needs not be in an `fn`
|
|
|
|
|
/// /// }
|
|
|
|
|
/// /// ```
|
|
|
|
|
/// fn needless_main() {
|
|
|
|
|
/// unimplemented!();
|
|
|
|
|
/// }
|
|
|
|
|
/// ``````
|
|
|
|
|
pub NEEDLESS_DOCTEST_MAIN,
|
|
|
|
|
style,
|
|
|
|
|
"presence of `fn main() {` in code examples"
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-08 15:43:55 -05:00
|
|
|
|
#[allow(clippy::module_name_repetitions)]
|
2016-04-04 13:18:17 -05:00
|
|
|
|
#[derive(Clone)]
|
2019-04-08 15:43:55 -05:00
|
|
|
|
pub struct DocMarkdown {
|
2018-12-29 11:07:10 -06:00
|
|
|
|
valid_idents: FxHashSet<String>,
|
2019-10-02 10:19:30 -05:00
|
|
|
|
in_trait_impl: bool,
|
2016-04-04 13:18:17 -05:00
|
|
|
|
}
|
|
|
|
|
|
2019-04-08 15:43:55 -05:00
|
|
|
|
impl DocMarkdown {
|
2018-12-29 11:07:10 -06:00
|
|
|
|
pub fn new(valid_idents: FxHashSet<String>) -> Self {
|
2019-10-02 10:19:30 -05:00
|
|
|
|
Self {
|
|
|
|
|
valid_idents,
|
|
|
|
|
in_trait_impl: false,
|
|
|
|
|
}
|
2016-04-04 13:18:17 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-03-19 11:59:12 -05:00
|
|
|
|
|
2021-02-02 22:43:30 -06:00
|
|
|
|
impl_lint_pass!(DocMarkdown =>
|
|
|
|
|
[DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, NEEDLESS_DOCTEST_MAIN]
|
|
|
|
|
);
|
2016-03-19 11:59:12 -05:00
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
|
2021-09-12 04:58:27 -05:00
|
|
|
|
fn check_crate(&mut self, cx: &LateContext<'tcx>) {
|
2020-11-26 16:38:53 -06:00
|
|
|
|
let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
|
|
|
|
|
check_attrs(cx, &self.valid_idents, attrs);
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
|
2021-01-24 06:17:54 -06:00
|
|
|
|
let attrs = cx.tcx.hir().attrs(item.hir_id());
|
|
|
|
|
let headers = check_attrs(cx, &self.valid_idents, attrs);
|
2019-10-02 10:19:30 -05:00
|
|
|
|
match item.kind {
|
2020-02-16 19:38:48 -06:00
|
|
|
|
hir::ItemKind::Fn(ref sig, _, body_id) => {
|
2021-02-25 04:25:22 -06:00
|
|
|
|
if !(is_entrypoint_fn(cx, item.def_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
|
2021-02-02 22:43:30 -06:00
|
|
|
|
let body = cx.tcx.hir().body(body_id);
|
|
|
|
|
let mut fpu = FindPanicUnwrap {
|
|
|
|
|
cx,
|
2021-01-30 10:47:51 -06:00
|
|
|
|
typeck_results: cx.tcx.typeck(item.def_id),
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panic_span: None,
|
|
|
|
|
};
|
|
|
|
|
fpu.visit_expr(&body.value);
|
2021-08-12 04:16:25 -05:00
|
|
|
|
lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, Some(body_id), fpu.panic_span);
|
2020-01-04 13:19:14 -06:00
|
|
|
|
}
|
2019-10-02 10:19:30 -05:00
|
|
|
|
},
|
2020-11-22 16:46:21 -06:00
|
|
|
|
hir::ItemKind::Impl(ref impl_) => {
|
|
|
|
|
self.in_trait_impl = impl_.of_trait.is_some();
|
2019-10-02 10:19:30 -05:00
|
|
|
|
},
|
2021-09-28 10:33:58 -05:00
|
|
|
|
hir::ItemKind::Trait(_, unsafety, ..) => {
|
|
|
|
|
if !headers.safety && unsafety == hir::Unsafety::Unsafe {
|
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
MISSING_SAFETY_DOC,
|
|
|
|
|
item.span,
|
2021-09-28 11:37:11 -05:00
|
|
|
|
"docs for unsafe trait missing `# Safety` section",
|
2021-09-28 10:33:58 -05:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
_ => (),
|
2019-10-02 10:19:30 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
|
2020-01-17 23:14:36 -06:00
|
|
|
|
if let hir::ItemKind::Impl { .. } = item.kind {
|
2019-10-02 10:19:30 -05:00
|
|
|
|
self.in_trait_impl = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
|
2020-11-27 02:41:53 -06:00
|
|
|
|
let attrs = cx.tcx.hir().attrs(item.hir_id());
|
|
|
|
|
let headers = check_attrs(cx, &self.valid_idents, attrs);
|
2020-03-12 14:56:55 -05:00
|
|
|
|
if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
|
2020-01-04 13:19:14 -06:00
|
|
|
|
if !in_external_macro(cx.tcx.sess, item.span) {
|
2021-07-28 17:07:32 -05:00
|
|
|
|
lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, None, None);
|
2020-01-04 13:19:14 -06:00
|
|
|
|
}
|
2019-10-02 10:19:30 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
|
2020-11-27 02:55:10 -06:00
|
|
|
|
let attrs = cx.tcx.hir().attrs(item.hir_id());
|
|
|
|
|
let headers = check_attrs(cx, &self.valid_idents, attrs);
|
2020-01-04 13:19:14 -06:00
|
|
|
|
if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) {
|
2019-10-02 10:19:30 -05:00
|
|
|
|
return;
|
|
|
|
|
}
|
2020-03-16 10:00:16 -05:00
|
|
|
|
if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind {
|
2021-02-02 22:43:30 -06:00
|
|
|
|
let body = cx.tcx.hir().body(body_id);
|
|
|
|
|
let mut fpu = FindPanicUnwrap {
|
|
|
|
|
cx,
|
2021-01-30 16:25:03 -06:00
|
|
|
|
typeck_results: cx.tcx.typeck(item.def_id),
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panic_span: None,
|
|
|
|
|
};
|
|
|
|
|
fpu.visit_expr(&body.value);
|
2021-08-12 04:16:25 -05:00
|
|
|
|
lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, Some(body_id), fpu.panic_span);
|
2019-09-11 11:39:02 -05:00
|
|
|
|
}
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn lint_for_missing_headers<'tcx>(
|
|
|
|
|
cx: &LateContext<'tcx>,
|
2021-07-28 17:07:32 -05:00
|
|
|
|
def_id: LocalDefId,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
span: impl Into<MultiSpan> + Copy,
|
2019-12-29 22:02:10 -06:00
|
|
|
|
sig: &hir::FnSig<'_>,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
headers: DocHeaders,
|
2020-02-16 19:38:48 -06:00
|
|
|
|
body_id: Option<hir::BodyId>,
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panic_span: Option<Span>,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
) {
|
2021-07-28 17:07:32 -05:00
|
|
|
|
if !cx.access_levels.is_exported(def_id) {
|
2019-12-05 18:00:23 -06:00
|
|
|
|
return; // Private functions do not require doc comments
|
|
|
|
|
}
|
2021-10-14 05:16:35 -05:00
|
|
|
|
|
|
|
|
|
// do not lint if any parent has `#[doc(hidden)]` attribute (#7347)
|
|
|
|
|
if cx
|
|
|
|
|
.tcx
|
|
|
|
|
.hir()
|
|
|
|
|
.parent_iter(cx.tcx.hir().local_def_id_to_hir_id(def_id))
|
|
|
|
|
.any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id)))
|
|
|
|
|
{
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-05 18:00:23 -06:00
|
|
|
|
if !headers.safety && sig.header.unsafety == hir::Unsafety::Unsafe {
|
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
MISSING_SAFETY_DOC,
|
|
|
|
|
span,
|
|
|
|
|
"unsafe function's docs miss `# Safety` section",
|
|
|
|
|
);
|
|
|
|
|
}
|
2021-02-02 22:43:30 -06:00
|
|
|
|
if !headers.panics && panic_span.is_some() {
|
|
|
|
|
span_lint_and_note(
|
|
|
|
|
cx,
|
|
|
|
|
MISSING_PANICS_DOC,
|
|
|
|
|
span,
|
|
|
|
|
"docs for function which may panic missing `# Panics` section",
|
|
|
|
|
panic_span,
|
|
|
|
|
"first possible panic found here",
|
|
|
|
|
);
|
|
|
|
|
}
|
2020-02-16 15:49:47 -06:00
|
|
|
|
if !headers.errors {
|
2021-07-28 17:07:32 -05:00
|
|
|
|
let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
|
2021-10-02 18:51:01 -05:00
|
|
|
|
if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) {
|
2020-02-16 15:49:47 -06:00
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
MISSING_ERRORS_DOC,
|
|
|
|
|
span,
|
|
|
|
|
"docs for function returning `Result` missing `# Errors` section",
|
|
|
|
|
);
|
2020-02-17 05:18:00 -06:00
|
|
|
|
} else {
|
|
|
|
|
if_chain! {
|
|
|
|
|
if let Some(body_id) = body_id;
|
2020-03-03 03:52:56 -06:00
|
|
|
|
if let Some(future) = cx.tcx.lang_items().future_trait();
|
2021-03-12 08:30:50 -06:00
|
|
|
|
let typeck = cx.tcx.typeck_body(body_id);
|
|
|
|
|
let body = cx.tcx.hir().body(body_id);
|
|
|
|
|
let ret_ty = typeck.expr_ty(&body.value);
|
2020-02-17 05:18:00 -06:00
|
|
|
|
if implements_trait(cx, ret_ty, future, &[]);
|
2020-08-03 17:18:29 -05:00
|
|
|
|
if let ty::Opaque(_, subs) = ret_ty.kind();
|
2020-02-17 07:19:09 -06:00
|
|
|
|
if let Some(gen) = subs.types().next();
|
2020-08-03 17:18:29 -05:00
|
|
|
|
if let ty::Generator(_, subs, _) = gen.kind();
|
2021-10-02 18:51:01 -05:00
|
|
|
|
if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym::Result);
|
2020-02-17 05:18:00 -06:00
|
|
|
|
then {
|
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
MISSING_ERRORS_DOC,
|
|
|
|
|
span,
|
|
|
|
|
"docs for function returning `Result` missing `# Errors` section",
|
|
|
|
|
);
|
2020-02-16 15:49:47 -06:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-05 18:00:23 -06:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-22 09:59:17 -05:00
|
|
|
|
/// Cleanup documentation decoration.
|
2016-07-08 11:18:45 -05:00
|
|
|
|
///
|
2020-02-29 21:23:33 -06:00
|
|
|
|
/// We can't use `rustc_ast::attr::AttributeMethods::with_desugared_doc` or
|
|
|
|
|
/// `rustc_ast::parse::lexer::comments::strip_doc_comment_decoration` because we
|
2017-08-09 02:30:56 -05:00
|
|
|
|
/// need to keep track of
|
2017-05-28 17:12:43 -05:00
|
|
|
|
/// the spans but this function is inspired from the later.
|
2018-08-01 15:48:41 -05:00
|
|
|
|
#[allow(clippy::cast_possible_truncation)]
|
2019-09-18 01:37:41 -05:00
|
|
|
|
#[must_use]
|
2020-07-22 09:59:17 -05:00
|
|
|
|
pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: Span) -> (String, Vec<(usize, Span)>) {
|
2016-07-08 11:18:45 -05:00
|
|
|
|
// one-line comments lose their prefix
|
2020-07-22 09:59:17 -05:00
|
|
|
|
if comment_kind == CommentKind::Line {
|
|
|
|
|
let mut doc = doc.to_owned();
|
|
|
|
|
doc.push('\n');
|
|
|
|
|
let len = doc.len();
|
2020-08-04 16:26:23 -05:00
|
|
|
|
// +3 skips the opening delimiter
|
2020-07-22 09:59:17 -05:00
|
|
|
|
return (doc, vec![(len, span.with_lo(span.lo() + BytePos(3)))]);
|
2016-07-08 11:18:45 -05:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-22 09:59:17 -05:00
|
|
|
|
let mut sizes = vec![];
|
|
|
|
|
let mut contains_initial_stars = false;
|
|
|
|
|
for line in doc.lines() {
|
|
|
|
|
let offset = line.as_ptr() as usize - doc.as_ptr() as usize;
|
|
|
|
|
debug_assert_eq!(offset as u32 as usize, offset);
|
|
|
|
|
contains_initial_stars |= line.trim_start().starts_with('*');
|
2020-08-04 16:26:23 -05:00
|
|
|
|
// +1 adds the newline, +3 skips the opening delimiter
|
2020-07-22 09:59:17 -05:00
|
|
|
|
sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(3 + offset as u32))));
|
|
|
|
|
}
|
|
|
|
|
if !contains_initial_stars {
|
|
|
|
|
return (doc.to_string(), sizes);
|
|
|
|
|
}
|
|
|
|
|
// remove the initial '*'s if any
|
|
|
|
|
let mut no_stars = String::with_capacity(doc.len());
|
|
|
|
|
for line in doc.lines() {
|
|
|
|
|
let mut chars = line.chars();
|
2021-05-20 05:30:31 -05:00
|
|
|
|
for c in &mut chars {
|
2020-07-22 09:59:17 -05:00
|
|
|
|
if c.is_whitespace() {
|
|
|
|
|
no_stars.push(c);
|
|
|
|
|
} else {
|
|
|
|
|
no_stars.push(if c == '*' { ' ' } else { c });
|
|
|
|
|
break;
|
2017-08-19 15:52:49 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-07-22 09:59:17 -05:00
|
|
|
|
no_stars.push_str(chars.as_str());
|
|
|
|
|
no_stars.push('\n');
|
2016-07-08 11:18:45 -05:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-22 09:59:17 -05:00
|
|
|
|
(no_stars, sizes)
|
2016-07-08 11:18:45 -05:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-05 18:00:23 -06:00
|
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
|
struct DocHeaders {
|
|
|
|
|
safety: bool,
|
|
|
|
|
errors: bool,
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panics: bool,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &'a [Attribute]) -> DocHeaders {
|
2021-10-06 02:01:40 -05:00
|
|
|
|
use pulldown_cmark::{BrokenLink, CowStr, Options};
|
|
|
|
|
/// We don't want the parser to choke on intra doc links. Since we don't
|
|
|
|
|
/// actually care about rendering them, just pretend that all broken links are
|
|
|
|
|
/// point to a fake address.
|
|
|
|
|
#[allow(clippy::unnecessary_wraps)] // we're following a type signature
|
|
|
|
|
fn fake_broken_link_callback<'a>(_: BrokenLink<'_>) -> Option<(CowStr<'a>, CowStr<'a>)> {
|
|
|
|
|
Some(("fake".into(), "fake".into()))
|
|
|
|
|
}
|
|
|
|
|
|
2017-05-28 17:12:43 -05:00
|
|
|
|
let mut doc = String::new();
|
|
|
|
|
let mut spans = vec![];
|
2016-05-26 15:53:38 -05:00
|
|
|
|
|
2016-05-02 07:36:33 -05:00
|
|
|
|
for attr in attrs {
|
2020-07-22 09:59:17 -05:00
|
|
|
|
if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
|
|
|
|
|
let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span);
|
2019-11-07 02:34:45 -06:00
|
|
|
|
spans.extend_from_slice(¤t_spans);
|
|
|
|
|
doc.push_str(&comment);
|
2020-11-05 07:29:48 -06:00
|
|
|
|
} else if attr.has_name(sym::doc) {
|
2017-05-30 12:50:07 -05:00
|
|
|
|
// ignore mix of sugared and non-sugared doc
|
2019-12-05 18:00:23 -06:00
|
|
|
|
// don't trigger the safety or errors check
|
|
|
|
|
return DocHeaders {
|
|
|
|
|
safety: true,
|
|
|
|
|
errors: true,
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panics: true,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
};
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-04-11 16:22:30 -05:00
|
|
|
|
|
2017-05-28 17:12:43 -05:00
|
|
|
|
let mut current = 0;
|
|
|
|
|
for &mut (ref mut offset, _) in &mut spans {
|
|
|
|
|
let offset_copy = *offset;
|
|
|
|
|
*offset = current;
|
|
|
|
|
current += offset_copy;
|
2016-05-26 15:53:38 -05:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-11 11:39:02 -05:00
|
|
|
|
if doc.is_empty() {
|
2019-12-05 18:00:23 -06:00
|
|
|
|
return DocHeaders {
|
|
|
|
|
safety: false,
|
|
|
|
|
errors: false,
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panics: false,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
};
|
2016-05-26 15:53:38 -05:00
|
|
|
|
}
|
2019-09-11 11:39:02 -05:00
|
|
|
|
|
2021-10-06 02:01:40 -05:00
|
|
|
|
let mut cb = fake_broken_link_callback;
|
|
|
|
|
|
|
|
|
|
let parser =
|
|
|
|
|
pulldown_cmark::Parser::new_with_broken_link_callback(&doc, Options::empty(), Some(&mut cb)).into_offset_iter();
|
2019-09-11 11:39:02 -05:00
|
|
|
|
// Iterate over all `Events` and combine consecutive events into one
|
|
|
|
|
let events = parser.coalesce(|previous, current| {
|
2020-02-21 02:39:38 -06:00
|
|
|
|
use pulldown_cmark::Event::Text;
|
2019-09-11 11:39:02 -05:00
|
|
|
|
|
|
|
|
|
let previous_range = previous.1;
|
|
|
|
|
let current_range = current.1;
|
|
|
|
|
|
|
|
|
|
match (previous.0, current.0) {
|
|
|
|
|
(Text(previous), Text(current)) => {
|
|
|
|
|
let mut previous = previous.to_string();
|
|
|
|
|
previous.push_str(¤t);
|
|
|
|
|
Ok((Text(previous.into()), previous_range))
|
|
|
|
|
},
|
|
|
|
|
(previous, current) => Err(((previous, previous_range), (current, current_range))),
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
check_doc(cx, valid_idents, events, &spans)
|
2017-05-28 17:12:43 -05:00
|
|
|
|
}
|
2016-05-26 15:53:38 -05:00
|
|
|
|
|
2020-12-20 10:19:49 -06:00
|
|
|
|
const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"];
|
2020-03-10 14:19:37 -05:00
|
|
|
|
|
2019-04-24 00:47:01 -05:00
|
|
|
|
fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
|
2020-06-25 15:41:36 -05:00
|
|
|
|
cx: &LateContext<'_>,
|
2018-12-29 11:07:10 -06:00
|
|
|
|
valid_idents: &FxHashSet<String>,
|
2019-04-24 00:47:01 -05:00
|
|
|
|
events: Events,
|
2017-08-09 02:30:56 -05:00
|
|
|
|
spans: &[(usize, Span)],
|
2019-12-05 18:00:23 -06:00
|
|
|
|
) -> DocHeaders {
|
2019-09-11 11:39:02 -05:00
|
|
|
|
// true if a safety header was found
|
2020-02-21 02:39:38 -06:00
|
|
|
|
use pulldown_cmark::Event::{
|
|
|
|
|
Code, End, FootnoteReference, HardBreak, Html, Rule, SoftBreak, Start, TaskListMarker, Text,
|
|
|
|
|
};
|
2021-07-01 11:17:38 -05:00
|
|
|
|
use pulldown_cmark::Tag::{CodeBlock, Heading, Item, Link, Paragraph};
|
|
|
|
|
use pulldown_cmark::{CodeBlockKind, CowStr};
|
2017-05-28 17:12:43 -05:00
|
|
|
|
|
2019-12-05 18:00:23 -06:00
|
|
|
|
let mut headers = DocHeaders {
|
|
|
|
|
safety: false,
|
|
|
|
|
errors: false,
|
2021-02-02 22:43:30 -06:00
|
|
|
|
panics: false,
|
2019-12-05 18:00:23 -06:00
|
|
|
|
};
|
2017-05-28 17:12:43 -05:00
|
|
|
|
let mut in_code = false;
|
2017-06-18 16:00:14 -05:00
|
|
|
|
let mut in_link = None;
|
2019-09-11 11:39:02 -05:00
|
|
|
|
let mut in_heading = false;
|
2020-03-10 14:19:37 -05:00
|
|
|
|
let mut is_rust = false;
|
2020-12-20 10:19:49 -06:00
|
|
|
|
let mut edition = None;
|
2021-07-01 11:17:38 -05:00
|
|
|
|
let mut ticks_unbalanced = false;
|
|
|
|
|
let mut text_to_check: Vec<(CowStr<'_>, Span)> = Vec::new();
|
|
|
|
|
let mut paragraph_span = spans.get(0).expect("function isn't called if doc comment is empty").1;
|
2019-04-24 00:47:01 -05:00
|
|
|
|
for (event, range) in events {
|
2017-05-28 17:12:43 -05:00
|
|
|
|
match event {
|
2020-03-10 14:19:37 -05:00
|
|
|
|
Start(CodeBlock(ref kind)) => {
|
|
|
|
|
in_code = true;
|
|
|
|
|
if let CodeBlockKind::Fenced(lang) = kind {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
for item in lang.split(',') {
|
|
|
|
|
if item == "ignore" {
|
|
|
|
|
is_rust = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if let Some(stripped) = item.strip_prefix("edition") {
|
|
|
|
|
is_rust = true;
|
|
|
|
|
edition = stripped.parse::<Edition>().ok();
|
|
|
|
|
} else if item.is_empty() || RUST_CODE.contains(&item) {
|
|
|
|
|
is_rust = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-10 14:19:37 -05:00
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
End(CodeBlock(_)) => {
|
|
|
|
|
in_code = false;
|
|
|
|
|
is_rust = false;
|
|
|
|
|
},
|
2019-04-24 00:47:01 -05:00
|
|
|
|
Start(Link(_, url, _)) => in_link = Some(url),
|
|
|
|
|
End(Link(..)) => in_link = None,
|
2021-07-01 11:17:38 -05:00
|
|
|
|
Start(Heading(_) | Paragraph | Item) => {
|
|
|
|
|
if let Start(Heading(_)) = event {
|
|
|
|
|
in_heading = true;
|
|
|
|
|
}
|
|
|
|
|
ticks_unbalanced = false;
|
|
|
|
|
let (_, span) = get_current_span(spans, range.start);
|
|
|
|
|
paragraph_span = first_line_of_span(cx, span);
|
|
|
|
|
},
|
|
|
|
|
End(Heading(_) | Paragraph | Item) => {
|
|
|
|
|
if let End(Heading(_)) = event {
|
|
|
|
|
in_heading = false;
|
|
|
|
|
}
|
|
|
|
|
if ticks_unbalanced {
|
|
|
|
|
span_lint_and_help(
|
|
|
|
|
cx,
|
|
|
|
|
DOC_MARKDOWN,
|
|
|
|
|
paragraph_span,
|
|
|
|
|
"backticks are unbalanced",
|
|
|
|
|
None,
|
|
|
|
|
"a backtick may be missing a pair",
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
for (text, span) in text_to_check {
|
|
|
|
|
check_text(cx, valid_idents, &text, span);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
text_to_check = Vec::new();
|
|
|
|
|
},
|
2019-09-11 11:39:02 -05:00
|
|
|
|
Start(_tag) | End(_tag) => (), // We don't care about other tags
|
|
|
|
|
Html(_html) => (), // HTML is weird, just ignore it
|
|
|
|
|
SoftBreak | HardBreak | TaskListMarker(_) | Code(_) | Rule => (),
|
2017-09-05 04:33:04 -05:00
|
|
|
|
FootnoteReference(text) | Text(text) => {
|
2021-07-01 11:17:38 -05:00
|
|
|
|
let (begin, span) = get_current_span(spans, range.start);
|
|
|
|
|
paragraph_span = paragraph_span.with_hi(span.hi());
|
2021-07-15 03:44:10 -05:00
|
|
|
|
ticks_unbalanced |= text.contains('`') && !in_code;
|
2021-07-01 11:17:38 -05:00
|
|
|
|
if Some(&text) == in_link.as_ref() || ticks_unbalanced {
|
2017-06-18 16:00:14 -05:00
|
|
|
|
// Probably a link of the form `<http://example.com>`
|
|
|
|
|
// Which are represented as a link to "http://example.com" with
|
|
|
|
|
// text "http://example.com" by pulldown-cmark
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2021-10-21 16:40:33 -05:00
|
|
|
|
let trimmed_text = text.trim();
|
|
|
|
|
headers.safety |= in_heading && trimmed_text == "Safety";
|
|
|
|
|
headers.safety |= in_heading && trimmed_text == "Implementation safety";
|
|
|
|
|
headers.safety |= in_heading && trimmed_text == "Implementation Safety";
|
|
|
|
|
headers.errors |= in_heading && trimmed_text == "Errors";
|
|
|
|
|
headers.panics |= in_heading && trimmed_text == "Panics";
|
2019-09-30 17:10:24 -05:00
|
|
|
|
if in_code {
|
2020-03-10 14:19:37 -05:00
|
|
|
|
if is_rust {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
let edition = edition.unwrap_or_else(|| cx.tcx.sess.edition());
|
|
|
|
|
check_code(cx, &text, edition, span);
|
2020-03-10 14:19:37 -05:00
|
|
|
|
}
|
2019-09-30 17:10:24 -05:00
|
|
|
|
} else {
|
2017-10-31 02:34:27 -05:00
|
|
|
|
// Adjust for the beginning of the current `Event`
|
2019-04-24 00:47:01 -05:00
|
|
|
|
let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin));
|
2021-07-01 11:17:38 -05:00
|
|
|
|
text_to_check.push((text, span));
|
2016-05-27 20:18:52 -05:00
|
|
|
|
}
|
2017-05-28 17:12:43 -05:00
|
|
|
|
},
|
2016-05-26 15:53:38 -05:00
|
|
|
|
}
|
2016-05-02 07:36:48 -05:00
|
|
|
|
}
|
2019-12-05 18:00:23 -06:00
|
|
|
|
headers
|
2017-05-28 17:12:43 -05:00
|
|
|
|
}
|
2016-05-02 07:36:48 -05:00
|
|
|
|
|
2021-07-01 11:17:38 -05:00
|
|
|
|
fn get_current_span(spans: &[(usize, Span)], idx: usize) -> (usize, Span) {
|
|
|
|
|
let index = match spans.binary_search_by(|c| c.0.cmp(&idx)) {
|
|
|
|
|
Ok(o) => o,
|
|
|
|
|
Err(e) => e - 1,
|
|
|
|
|
};
|
|
|
|
|
spans[index]
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-20 10:19:49 -06:00
|
|
|
|
fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) {
|
2021-05-05 14:31:25 -05:00
|
|
|
|
fn has_needless_main(code: String, edition: Edition) -> bool {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
rustc_driver::catch_fatal_errors(|| {
|
2021-05-05 14:31:25 -05:00
|
|
|
|
rustc_span::create_session_globals_then(edition, || {
|
|
|
|
|
let filename = FileName::anon_source_code(&code);
|
2020-12-20 10:19:49 -06:00
|
|
|
|
|
|
|
|
|
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
2021-08-06 10:14:27 -05:00
|
|
|
|
let emitter = EmitterWriter::new(Box::new(io::sink()), None, false, false, false, None, false);
|
|
|
|
|
let handler = Handler::with_emitter(false, None, Box::new(emitter));
|
2020-12-20 10:19:49 -06:00
|
|
|
|
let sess = ParseSess::with_span_handler(handler, sm);
|
|
|
|
|
|
2021-07-15 03:44:10 -05:00
|
|
|
|
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
Ok(p) => p,
|
|
|
|
|
Err(errs) => {
|
|
|
|
|
for mut err in errs {
|
|
|
|
|
err.cancel();
|
2020-08-28 09:10:16 -05:00
|
|
|
|
}
|
2020-12-20 10:19:49 -06:00
|
|
|
|
return false;
|
2020-08-28 09:10:16 -05:00
|
|
|
|
},
|
2020-12-20 10:19:49 -06:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut relevant_main_found = false;
|
|
|
|
|
loop {
|
2021-01-18 15:47:37 -06:00
|
|
|
|
match parser.parse_item(ForceCollect::No) {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
Ok(Some(item)) => match &item.kind {
|
|
|
|
|
// Tests with one of these items are ignored
|
|
|
|
|
ItemKind::Static(..)
|
|
|
|
|
| ItemKind::Const(..)
|
|
|
|
|
| ItemKind::ExternCrate(..)
|
|
|
|
|
| ItemKind::ForeignMod(..) => return false,
|
|
|
|
|
// We found a main function ...
|
2021-02-25 04:25:22 -06:00
|
|
|
|
ItemKind::Fn(box FnKind(_, sig, _, Some(block))) if item.ident.name == sym::main => {
|
2020-12-20 10:19:49 -06:00
|
|
|
|
let is_async = matches!(sig.header.asyncness, Async::Yes { .. });
|
|
|
|
|
let returns_nothing = match &sig.decl.output {
|
|
|
|
|
FnRetTy::Default(..) => true,
|
|
|
|
|
FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
|
2021-03-25 13:29:11 -05:00
|
|
|
|
FnRetTy::Ty(_) => false,
|
2020-12-20 10:19:49 -06:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if returns_nothing && !is_async && !block.stmts.is_empty() {
|
|
|
|
|
// This main function should be linted, but only if there are no other functions
|
|
|
|
|
relevant_main_found = true;
|
|
|
|
|
} else {
|
|
|
|
|
// This main function should not be linted, we're done
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
// Another function was found; this case is ignored too
|
|
|
|
|
ItemKind::Fn(..) => return false,
|
|
|
|
|
_ => {},
|
|
|
|
|
},
|
|
|
|
|
Ok(None) => break,
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.cancel();
|
|
|
|
|
return false;
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-08-28 09:10:16 -05:00
|
|
|
|
|
2020-12-20 10:19:49 -06:00
|
|
|
|
relevant_main_found
|
|
|
|
|
})
|
|
|
|
|
})
|
|
|
|
|
.ok()
|
|
|
|
|
.unwrap_or_default()
|
2020-08-28 09:10:16 -05:00
|
|
|
|
}
|
|
|
|
|
|
2021-05-05 14:31:25 -05:00
|
|
|
|
// Because of the global session, we need to create a new session in a different thread with
|
|
|
|
|
// the edition we need.
|
|
|
|
|
let text = text.to_owned();
|
2021-07-15 03:44:10 -05:00
|
|
|
|
if thread::spawn(move || has_needless_main(text, edition))
|
|
|
|
|
.join()
|
|
|
|
|
.expect("thread::spawn failed")
|
|
|
|
|
{
|
2019-09-30 17:10:24 -05:00
|
|
|
|
span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_text(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
|
2018-12-07 04:48:06 -06:00
|
|
|
|
for word in text.split(|c: char| c.is_whitespace() || c == '\'') {
|
2017-05-28 17:12:43 -05:00
|
|
|
|
// Trim punctuation as in `some comment (see foo::bar).`
|
|
|
|
|
// ^^
|
|
|
|
|
// Or even as in `_foo bar_` which is emphasized.
|
|
|
|
|
let word = word.trim_matches(|c: char| !c.is_alphanumeric());
|
2016-05-05 14:42:59 -05:00
|
|
|
|
|
2018-12-29 11:07:10 -06:00
|
|
|
|
if valid_idents.contains(word) {
|
2017-05-28 17:12:43 -05:00
|
|
|
|
continue;
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
2016-05-26 15:53:38 -05:00
|
|
|
|
|
2017-05-29 17:11:08 -05:00
|
|
|
|
// Adjust for the current word
|
|
|
|
|
let offset = word.as_ptr() as usize - text.as_ptr() as usize;
|
2017-08-31 07:47:45 -05:00
|
|
|
|
let span = Span::new(
|
|
|
|
|
span.lo() + BytePos::from_usize(offset),
|
|
|
|
|
span.lo() + BytePos::from_usize(offset + word.len()),
|
|
|
|
|
span.ctxt(),
|
2021-04-18 07:27:04 -05:00
|
|
|
|
span.parent(),
|
2017-08-31 07:47:45 -05:00
|
|
|
|
);
|
2017-05-29 17:11:08 -05:00
|
|
|
|
|
2017-05-28 17:12:43 -05:00
|
|
|
|
check_word(cx, word, span);
|
|
|
|
|
}
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-25 15:41:36 -05:00
|
|
|
|
fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
|
2019-01-30 19:15:29 -06:00
|
|
|
|
/// Checks if a string is camel-case, i.e., contains at least two uppercase
|
2019-03-10 12:19:47 -05:00
|
|
|
|
/// letters (`Clippy` is ok) and one lower-case letter (`NASA` is ok).
|
|
|
|
|
/// Plurals are also excluded (`IDs` is ok).
|
2016-03-19 11:59:12 -05:00
|
|
|
|
fn is_camel_case(s: &str) -> bool {
|
2016-04-04 13:18:17 -05:00
|
|
|
|
if s.starts_with(|c: char| c.is_digit(10)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2020-09-24 07:49:22 -05:00
|
|
|
|
let s = s.strip_suffix('s').unwrap_or(s);
|
2016-03-19 11:59:12 -05:00
|
|
|
|
|
2018-11-27 14:14:15 -06:00
|
|
|
|
s.chars().all(char::is_alphanumeric)
|
|
|
|
|
&& s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
|
2017-11-04 14:55:56 -05:00
|
|
|
|
&& s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
|
2016-03-28 11:00:24 -05:00
|
|
|
|
fn has_underscore(s: &str) -> bool {
|
|
|
|
|
s != "_" && !s.contains("\\_") && s.contains('_')
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-07 15:38:45 -06:00
|
|
|
|
fn has_hyphen(s: &str) -> bool {
|
|
|
|
|
s != "-" && s.contains('-')
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-19 14:23:50 -05:00
|
|
|
|
if let Ok(url) = Url::parse(word) {
|
|
|
|
|
// try to get around the fact that `foo::bar` parses as a valid URL
|
|
|
|
|
if !url.cannot_be_a_base() {
|
2017-11-04 14:55:56 -05:00
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
DOC_MARKDOWN,
|
|
|
|
|
span,
|
|
|
|
|
"you should put bare URLs between `<`/`>` or make a proper Markdown link",
|
|
|
|
|
);
|
2017-06-19 14:23:50 -05:00
|
|
|
|
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-07 15:38:45 -06:00
|
|
|
|
// We assume that mixed-case words are not meant to be put inside bacticks. (Issue #2343)
|
|
|
|
|
if has_underscore(word) && has_hyphen(word) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-28 11:00:24 -05:00
|
|
|
|
if has_underscore(word) || word.contains("::") || is_camel_case(word) {
|
2017-08-09 02:30:56 -05:00
|
|
|
|
span_lint(
|
|
|
|
|
cx,
|
|
|
|
|
DOC_MARKDOWN,
|
|
|
|
|
span,
|
|
|
|
|
&format!("you should put `{}` between ticks in the documentation", word),
|
|
|
|
|
);
|
2016-03-19 11:59:12 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-02-02 22:43:30 -06:00
|
|
|
|
|
|
|
|
|
struct FindPanicUnwrap<'a, 'tcx> {
|
|
|
|
|
cx: &'a LateContext<'tcx>,
|
|
|
|
|
panic_span: Option<Span>,
|
|
|
|
|
typeck_results: &'tcx ty::TypeckResults<'tcx>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
|
|
|
|
|
type Map = Map<'tcx>;
|
|
|
|
|
|
|
|
|
|
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
|
|
|
|
|
if self.panic_span.is_some() {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check for `begin_panic`
|
|
|
|
|
if_chain! {
|
2021-04-08 10:50:13 -05:00
|
|
|
|
if let ExprKind::Call(func_expr, _) = expr.kind;
|
|
|
|
|
if let ExprKind::Path(QPath::Resolved(_, path)) = func_expr.kind;
|
2021-02-02 22:43:30 -06:00
|
|
|
|
if let Some(path_def_id) = path.res.opt_def_id();
|
|
|
|
|
if match_panic_def_id(self.cx, path_def_id);
|
2021-02-25 04:25:22 -06:00
|
|
|
|
if is_expn_of(expr.span, "unreachable").is_none();
|
2021-04-08 10:50:13 -05:00
|
|
|
|
if !is_expn_of_debug_assertions(expr.span);
|
2021-02-02 22:43:30 -06:00
|
|
|
|
then {
|
|
|
|
|
self.panic_span = Some(expr.span);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-08 10:50:13 -05:00
|
|
|
|
// check for `assert_eq` or `assert_ne`
|
|
|
|
|
if is_expn_of(expr.span, "assert_eq").is_some() || is_expn_of(expr.span, "assert_ne").is_some() {
|
|
|
|
|
self.panic_span = Some(expr.span);
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-02 22:43:30 -06:00
|
|
|
|
// check for `unwrap`
|
|
|
|
|
if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
|
|
|
|
|
let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
2021-10-02 18:51:01 -05:00
|
|
|
|
if is_type_diagnostic_item(self.cx, reciever_ty, sym::Option)
|
|
|
|
|
|| is_type_diagnostic_item(self.cx, reciever_ty, sym::Result)
|
2021-02-02 22:43:30 -06:00
|
|
|
|
{
|
|
|
|
|
self.panic_span = Some(expr.span);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// and check sub-expressions
|
|
|
|
|
intravisit::walk_expr(self, expr);
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-08 10:50:13 -05:00
|
|
|
|
// Panics in const blocks will cause compilation to fail.
|
|
|
|
|
fn visit_anon_const(&mut self, _: &'tcx AnonConst) {}
|
|
|
|
|
|
2021-02-02 22:43:30 -06:00
|
|
|
|
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
|
|
|
|
|
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-04-08 10:50:13 -05:00
|
|
|
|
|
|
|
|
|
fn is_expn_of_debug_assertions(span: Span) -> bool {
|
|
|
|
|
const MACRO_NAMES: &[&str] = &["debug_assert", "debug_assert_eq", "debug_assert_ne"];
|
|
|
|
|
MACRO_NAMES.iter().any(|name| is_expn_of(span, name).is_some())
|
|
|
|
|
}
|