9124: Apply a few clippy suggestions r=lnicola a=clemenswasser



Co-authored-by: Clemens Wasser <clemens.wasser@gmail.com>
This commit is contained in:
bors[bot] 2021-06-03 12:09:10 +00:00 committed by GitHub
commit 48ea50bf04
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 43 additions and 60 deletions

View File

@ -187,7 +187,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
let a = 1664525;
let c = 1013904223;
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
return *seed;
*seed
}
fn make_ident(ident: &str) -> tt::TokenTree {
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })

View File

@ -219,7 +219,7 @@ impl BindingsBuilder {
bindings
}
fn build_inner(&self, bindings: &mut Bindings, link_nodes: &Vec<LinkNode<Rc<BindingKind>>>) {
fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
let mut nodes = Vec::new();
self.collect_nodes(&link_nodes, &mut nodes);
@ -301,7 +301,7 @@ impl BindingsBuilder {
fn collect_nodes<'a>(
&'a self,
link_nodes: &'a Vec<LinkNode<Rc<BindingKind>>>,
link_nodes: &'a [LinkNode<Rc<BindingKind>>],
nodes: &mut Vec<&'a Rc<BindingKind>>,
) {
link_nodes.iter().for_each(|it| match it {
@ -494,15 +494,8 @@ fn match_loop_inner<'t>(
}
Some(err) => {
res.add_err(err);
match match_res.value {
Some(fragment) => {
bindings_builder.push_fragment(
&mut item.bindings,
&name,
fragment,
);
}
_ => {}
if let Some(fragment) = match_res.value {
bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
}
item.is_error = true;
error_items.push(item);
@ -578,9 +571,9 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
);
stdx::always!(cur_items.is_empty());
if error_items.len() > 0 {
if !error_items.is_empty() {
error_recover_item = error_items.pop().map(|it| it.bindings);
} else if eof_items.len() > 0 {
} else if !eof_items.is_empty() {
error_recover_item = Some(eof_items[0].bindings.clone());
}
@ -793,7 +786,7 @@ impl<'a> TtIter<'a> {
_ => (),
}
let tt = self.next().ok_or_else(|| ())?.clone();
let tt = self.next().ok_or(())?.clone();
let punct = match tt {
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
punct

View File

@ -295,8 +295,8 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
// Checks that no repetition which could match an empty token
// https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
if separator.is_none() {
if subtree.iter().all(|child_op| {
if separator.is_none()
&& subtree.iter().all(|child_op| {
match child_op {
Op::Var { kind, .. } => {
// vis is optional
@ -314,9 +314,9 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
Op::Subtree { .. } => {}
}
false
}) {
return Err(ParseError::RepetitionEmptyTokenTree);
}
})
{
return Err(ParseError::RepetitionEmptyTokenTree);
}
validate(subtree)?
}

View File

@ -213,7 +213,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
Ok(res)
}
fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
if let Mode::Pattern = mode {
src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?;
let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?;

View File

@ -243,8 +243,7 @@ trait TokenConvertor {
type Token: SrcToken;
fn go(&mut self) -> tt::Subtree {
let mut subtree = tt::Subtree::default();
subtree.delimiter = None;
let mut subtree = tt::Subtree { delimiter: None, ..Default::default() };
while self.peek().is_some() {
self.collect_leaf(&mut subtree.token_trees);
}
@ -506,7 +505,7 @@ impl TokenConvertor for Convertor {
fn peek(&self) -> Option<Self::Token> {
if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset = offset + TextSize::of('.');
offset += TextSize::of('.');
if usize::from(offset) < punct.text().len() {
return Some(SynToken::Punch(punct, offset));
}

View File

@ -138,7 +138,7 @@ impl<'a> TtIter<'a> {
}
}
self.inner = self.inner.as_slice()[res.len()..].iter();
if res.len() == 0 && err.is_none() {
if res.is_empty() && err.is_none() {
err = Some(err!("no tokens consumed"));
}
let res = match res.len() {

View File

@ -252,12 +252,10 @@ fn closure_expr(p: &mut Parser) -> CompletedMarker {
// test lambda_ret_block
// fn main() { || -> i32 { 92 }(); }
block_expr(p);
} else if p.at_ts(EXPR_FIRST) {
expr(p);
} else {
if p.at_ts(EXPR_FIRST) {
expr(p);
} else {
p.error("expected expression");
}
p.error("expected expression");
}
m.complete(p, CLOSURE_EXPR)
}

View File

@ -36,8 +36,8 @@ pub(crate) fn incremental_reparse(
None
}
fn reparse_token<'node>(
root: &'node SyntaxNode,
fn reparse_token(
root: &SyntaxNode,
edit: &Indel,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let prev_token = root.covering_element(edit.delete).as_token()?.clone();
@ -84,8 +84,8 @@ fn reparse_token<'node>(
}
}
fn reparse_block<'node>(
root: &'node SyntaxNode,
fn reparse_block(
root: &SyntaxNode,
edit: &Indel,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?;

View File

@ -236,7 +236,7 @@ where
}
});
dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
if let Ok(_) = f(text) {
if f(text).is_ok() {
panic!("'{:?}' successfully parsed when it should have errored", path);
} else {
"ERROR\n".to_owned()

View File

@ -238,14 +238,9 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
let mut res = Vec::new();
let mut offset: TextSize = 0.into();
let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' };
loop {
match line.find(marker) {
Some(idx) => {
offset += TextSize::try_from(idx).unwrap();
line = &line[idx..];
}
None => break,
};
while let Some(idx) = line.find(marker) {
offset += TextSize::try_from(idx).unwrap();
line = &line[idx..];
let mut len = line.chars().take_while(|&it| it == '^').count();
let mut continuation = false;

View File

@ -94,18 +94,16 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
support::children(&self.syntax)
}
}
} else {
if let Some(token_kind) = field.token_kind() {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::token(&self.syntax, #token_kind)
}
} else if let Some(token_kind) = field.token_kind() {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::token(&self.syntax, #token_kind)
}
} else {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::child(&self.syntax)
}
}
} else {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::child(&self.syntax)
}
}
}

View File

@ -132,7 +132,7 @@ fn parse_changelog_line(s: &str) -> Option<PrInfo> {
return Some(PrInfo { kind, message: Some(message) });
}
};
let res = PrInfo { kind, message };
let res = PrInfo { message, kind };
Some(res)
}
@ -152,7 +152,7 @@ fn parse_title_line(s: &str) -> PrInfo {
PrKind::Skip => None,
_ => Some(s[prefix.len()..].to_string()),
};
return PrInfo { kind, message };
return PrInfo { message, kind };
}
}
PrInfo { kind: PrKind::Other, message: Some(s.to_string()) }

View File

@ -33,7 +33,7 @@ fn check_code_formatting() {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
crate::ensure_rustfmt().unwrap();
let res = cmd!("cargo fmt -- --check").run();
if !res.is_ok() {
if res.is_err() {
let _ = cmd!("cargo fmt").run();
}
res.unwrap()
@ -244,19 +244,19 @@ Zlib OR Apache-2.0 OR MIT
.map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
.collect::<Vec<_>>();
licenses.sort();
licenses.sort_unstable();
licenses.dedup();
if licenses != expected {
let mut diff = String::new();
diff += &format!("New Licenses:\n");
diff.push_str("New Licenses:\n");
for &l in licenses.iter() {
if !expected.contains(&l) {
diff += &format!(" {}\n", l)
}
}
diff += &format!("\nMissing Licenses:\n");
diff.push_str("\nMissing Licenses:\n");
for &l in expected.iter() {
if !licenses.contains(&l) {
diff += &format!(" {}\n", l)