Auto merge of #5878 - flip1995:rustup, r=flip1995

Rustup

r? @ghost

changelog: none
This commit is contained in:
bors 2020-08-08 17:28:34 +00:00
commit 3899d6001c
9 changed files with 52 additions and 81 deletions

View File

@ -2,6 +2,7 @@
use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast::ast::{AttrKind, Attribute};
use rustc_ast::token::CommentKind;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
@ -249,7 +250,7 @@ fn lint_for_missing_headers<'tcx>(
}
}
/// Cleanup documentation decoration (`///` and such).
/// Cleanup documentation decoration.
///
/// We can't use `rustc_ast::attr::AttributeMethods::with_desugared_doc` or
/// `rustc_ast::parse::lexer::comments::strip_doc_comment_decoration` because we
@ -257,54 +258,45 @@ fn lint_for_missing_headers<'tcx>(
/// the spans but this function is inspired from the later.
#[allow(clippy::cast_possible_truncation)]
#[must_use]
pub fn strip_doc_comment_decoration(comment: &str, span: Span) -> (String, Vec<(usize, Span)>) {
pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: Span) -> (String, Vec<(usize, Span)>) {
// one-line comments lose their prefix
const ONELINERS: &[&str] = &["///!", "///", "//!", "//"];
for prefix in ONELINERS {
if comment.starts_with(*prefix) {
let doc = &comment[prefix.len()..];
let mut doc = doc.to_owned();
doc.push('\n');
return (
doc.to_owned(),
vec![(doc.len(), span.with_lo(span.lo() + BytePos(prefix.len() as u32)))],
);
}
if comment_kind == CommentKind::Line {
let mut doc = doc.to_owned();
doc.push('\n');
let len = doc.len();
// +3 skips the opening delimiter
return (doc, vec![(len, span.with_lo(span.lo() + BytePos(3)))]);
}
if comment.starts_with("/*") {
let doc = &comment[3..comment.len() - 2];
let mut sizes = vec![];
let mut contains_initial_stars = false;
for line in doc.lines() {
let offset = line.as_ptr() as usize - comment.as_ptr() as usize;
debug_assert_eq!(offset as u32 as usize, offset);
contains_initial_stars |= line.trim_start().starts_with('*');
// +1 for the newline
sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(offset as u32))));
}
if !contains_initial_stars {
return (doc.to_string(), sizes);
}
// remove the initial '*'s if any
let mut no_stars = String::with_capacity(doc.len());
for line in doc.lines() {
let mut chars = line.chars();
while let Some(c) = chars.next() {
if c.is_whitespace() {
no_stars.push(c);
} else {
no_stars.push(if c == '*' { ' ' } else { c });
break;
}
let mut sizes = vec![];
let mut contains_initial_stars = false;
for line in doc.lines() {
let offset = line.as_ptr() as usize - doc.as_ptr() as usize;
debug_assert_eq!(offset as u32 as usize, offset);
contains_initial_stars |= line.trim_start().starts_with('*');
// +1 adds the newline, +3 skips the opening delimiter
sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(3 + offset as u32))));
}
if !contains_initial_stars {
return (doc.to_string(), sizes);
}
// remove the initial '*'s if any
let mut no_stars = String::with_capacity(doc.len());
for line in doc.lines() {
let mut chars = line.chars();
while let Some(c) = chars.next() {
if c.is_whitespace() {
no_stars.push(c);
} else {
no_stars.push(if c == '*' { ' ' } else { c });
break;
}
no_stars.push_str(chars.as_str());
no_stars.push('\n');
}
return (no_stars, sizes);
no_stars.push_str(chars.as_str());
no_stars.push('\n');
}
panic!("not a doc-comment: {}", comment);
(no_stars, sizes)
}
#[derive(Copy, Clone)]
@ -318,9 +310,8 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs
let mut spans = vec![];
for attr in attrs {
if let AttrKind::DocComment(ref comment) = attr.kind {
let comment = comment.to_string();
let (comment, current_spans) = strip_doc_comment_decoration(&comment, attr.span);
if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span);
spans.extend_from_slice(&current_spans);
doc.push_str(&comment);
} else if attr.has_name(sym!(doc)) {

View File

@ -239,7 +239,7 @@ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
return;
}
if cx.access_levels.is_exported(item.hir_id)
&& !is_proc_macro(&item.attrs)
&& !is_proc_macro(cx.sess(), &item.attrs)
&& attr_by_name(&item.attrs, "no_mangle").is_none()
{
check_must_use_candidate(
@ -262,7 +262,7 @@ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<
let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
check_needless_must_use(cx, &sig.decl, item.hir_id, item.span, fn_header_span, attr);
} else if cx.access_levels.is_exported(item.hir_id)
&& !is_proc_macro(&item.attrs)
&& !is_proc_macro(cx.sess(), &item.attrs)
&& trait_ref_of_method(cx, item.hir_id).is_none()
{
check_must_use_candidate(
@ -294,7 +294,8 @@ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitIte
let body = cx.tcx.hir().body(eid);
Self::check_raw_ptr(cx, sig.header.unsafety, &sig.decl, body, item.hir_id);
if attr.is_none() && cx.access_levels.is_exported(item.hir_id) && !is_proc_macro(&item.attrs) {
if attr.is_none() && cx.access_levels.is_exported(item.hir_id) && !is_proc_macro(cx.sess(), &item.attrs)
{
check_must_use_candidate(
cx,
&sig.decl,

View File

@ -102,7 +102,7 @@ fn is_doc_hidden(attr: &Attribute) -> bool {
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if !attr::contains_name(&item.attrs, sym!(non_exhaustive));
if !item.attrs.iter().any(|attr| attr.has_name(sym!(non_exhaustive)));
let header_span = cx.sess.source_map().span_until_char(item.span, '{');
if let Some(snippet) = snippet_opt(cx, header_span);
then {
@ -154,7 +154,7 @@ fn find_header_span(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) -> S
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if !attr::contains_name(&item.attrs, sym!(non_exhaustive));
if !item.attrs.iter().any(|attr| attr.has_name(sym!(non_exhaustive)));
let header_span = find_header_span(cx, item, data);
if let Some(snippet) = snippet_opt(cx, header_span);
then {

View File

@ -2,7 +2,6 @@
use rustc_ast::ast::{
Arm, AssocItem, AssocItemKind, Attribute, Block, FnDecl, Item, ItemKind, Local, MacCall, Pat, PatKind,
};
use rustc_ast::attr;
use rustc_ast::visit::{walk_block, walk_expr, walk_pat, Visitor};
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_middle::lint::in_external_macro;
@ -385,7 +384,7 @@ fn check_impl_item(&mut self, cx: &EarlyContext<'_>, item: &AssocItem) {
}
fn do_check(lint: &mut NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) {
if !attr::contains_name(attrs, sym!(test)) {
if !attrs.iter().any(|attr| attr.has_name(sym!(test))) {
let mut visitor = SimilarNamesLocalVisitor {
names: Vec::new(),
cx,

View File

@ -60,13 +60,14 @@
impl TabsInDocComments {
fn warn_if_tabs_in_doc(cx: &EarlyContext<'_>, attr: &ast::Attribute) {
if let ast::AttrKind::DocComment(comment) = attr.kind {
if let ast::AttrKind::DocComment(_, comment) = attr.kind {
let comment = comment.as_str();
for (lo, hi) in get_chunks_of_tabs(&comment) {
// +3 skips the opening delimiter
let new_span = Span::new(
attr.span.lo() + BytePos(lo),
attr.span.lo() + BytePos(hi),
attr.span.lo() + BytePos(3 + lo),
attr.span.lo() + BytePos(3 + hi),
attr.span.ctxt(),
);
span_lint_and_sugg(

View File

@ -506,7 +506,7 @@ pub fn eq_attr(l: &Attribute, r: &Attribute) -> bool {
use AttrKind::*;
l.style == r.style
&& match (&l.kind, &r.kind) {
(DocComment(l), DocComment(r)) => l == r,
(DocComment(l1, l2), DocComment(r1, r2)) => l1 == r1 && l2 == r2,
(Normal(l), Normal(r)) => eq_path(&l.path, &r.path) && eq_mac_args(&l.args, &r.args),
_ => false,
}

View File

@ -1,5 +1,4 @@
use rustc_ast::ast;
use rustc_ast::expand::is_proc_macro_attr;
use rustc_errors::Applicability;
use rustc_session::Session;
use std::str::FromStr;
@ -126,6 +125,6 @@ fn parse_attrs<F: FnMut(u64)>(sess: &Session, attrs: &[ast::Attribute], name: &'
/// Return true if the attributes contain any of `proc_macro`,
/// `proc_macro_derive` or `proc_macro_attribute`, false otherwise
pub fn is_proc_macro(attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(is_proc_macro_attr)
pub fn is_proc_macro(sess: &Session, attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(|attr| sess.is_proc_macro_attr(attr))
}

View File

@ -931,7 +931,7 @@ fn are_refutable<'a, I: Iterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>, mut
/// Checks for the `#[automatically_derived]` attribute all `#[derive]`d
/// implementations have.
pub fn is_automatically_derived(attrs: &[ast::Attribute]) -> bool {
attr::contains_name(attrs, sym!(automatically_derived))
attrs.iter().any(|attr| attr.has_name(sym!(automatically_derived)))
}
/// Remove blocks around an expression.

View File

@ -1,23 +1,3 @@
error: this operation will panic at runtime
--> $DIR/indexing_slicing_index.rs:11:5
|
LL | x[4]; // Ok, let rustc's `const_err` lint handle `usize` indexing on arrays.
| ^^^^ index out of bounds: the len is 4 but the index is 4
|
= note: `#[deny(unconditional_panic)]` on by default
error: this operation will panic at runtime
--> $DIR/indexing_slicing_index.rs:12:5
|
LL | x[1 << 3]; // Ok, let rustc's `const_err` lint handle `usize` indexing on arrays.
| ^^^^^^^^^ index out of bounds: the len is 4 but the index is 8
error: this operation will panic at runtime
--> $DIR/indexing_slicing_index.rs:27:5
|
LL | x[N]; // Ok, let rustc's `const_err` lint handle `usize` indexing on arrays.
| ^^^^ index out of bounds: the len is 4 but the index is 15
error: indexing may panic.
--> $DIR/indexing_slicing_index.rs:10:5
|
@ -75,5 +55,5 @@ LL | v[M];
|
= help: Consider using `.get(n)` or `.get_mut(n)` instead
error: aborting due to 10 previous errors
error: aborting due to 7 previous errors