Rollup merge of #56516 - frewsxcv:frewsxcv-eq, r=Mark-Simulacrum

Replace usages of `..i + 1` ranges with `..=i`.

Before this change we were using old computer code techniques. After this change we use the new and improved computer code techniques.
This commit is contained in:
kennytm 2018-12-07 12:42:32 +08:00 committed by GitHub
commit 0e41ef13aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 36 additions and 36 deletions

View File

@ -2795,7 +2795,7 @@ fn test_remove() {
// 0, 1, 2, .., len - 1
let expected = (0..).take(len).collect::<VecDeque<_>>();
for tail_pos in 0..cap {
for to_remove in 0..len + 1 {
for to_remove in 0..=len {
tester.tail = tail_pos;
tester.head = tail_pos;
for i in 0..len {
@ -2821,10 +2821,10 @@ fn test_drain() {
let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
let cap = tester.capacity();
for len in 0..cap + 1 {
for tail in 0..cap + 1 {
for drain_start in 0..len + 1 {
for drain_end in drain_start..len + 1 {
for len in 0..=cap {
for tail in 0..=cap {
for drain_start in 0..=len {
for drain_end in drain_start..=len {
tester.tail = tail;
tester.head = tail;
for i in 0..len {
@ -2866,10 +2866,10 @@ fn test_shrink_to_fit() {
tester.reserve(63);
let max_cap = tester.capacity();
for len in 0..cap + 1 {
for len in 0..=cap {
// 0, 1, 2, .., len - 1
let expected = (0..).take(len).collect::<VecDeque<_>>();
for tail_pos in 0..max_cap + 1 {
for tail_pos in 0..=max_cap {
tester.tail = tail_pos;
tester.head = tail_pos;
tester.reserve(63);
@ -2899,7 +2899,7 @@ fn test_split_off() {
// len is the length *before* splitting
for len in 0..cap {
// index to split at
for at in 0..len + 1 {
for at in 0..=len {
// 0, 1, 2, .., at - 1 (may be empty)
let expected_self = (0..).take(at).collect::<VecDeque<_>>();
// at, at + 1, .., len - 1 (may be empty)
@ -2927,7 +2927,7 @@ fn test_split_off() {
fn test_from_vec() {
use vec::Vec;
for cap in 0..35 {
for len in 0..cap + 1 {
for len in 0..=cap {
let mut vec = Vec::with_capacity(cap);
vec.extend(0..len);

View File

@ -318,11 +318,11 @@ fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
const NTEST: usize = 10;
// don't use 0 in the data -- we want to catch the zeroed-out case.
let data = (1..DATASZ + 1).collect::<Vec<_>>();
let data = (1..=DATASZ).collect::<Vec<_>>();
// since it's a fuzzy test, run several tries.
for _ in 0..NTEST {
for i in 1..DATASZ + 1 {
for i in 1..=DATASZ {
DROP_COUNTER.store(0, Ordering::SeqCst);
let mut panic_ords: Vec<_> = data.iter()

View File

@ -302,7 +302,7 @@ fn test_range() {
for i in 0..size {
for j in i..size {
let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
let mut pairs = (i..j + 1).map(|i| (i, i));
let mut pairs = (i..=j).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair);
@ -321,7 +321,7 @@ fn test_range_mut() {
for i in 0..size {
for j in i..size {
let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
let mut pairs = (i..j + 1).map(|i| (i, i));
let mut pairs = (i..=j).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair);

View File

@ -1378,7 +1378,7 @@ fn test_bool_from_str() {
fn check_contains_all_substrings(s: &str) {
assert!(s.contains(""));
for i in 0..s.len() {
for j in i+1..s.len() + 1 {
for j in i+1..=s.len() {
assert!(s.contains(&s[i..j]));
}
}

View File

@ -861,7 +861,7 @@ fn test_as_slices() {
ring.push_back(i);
let (left, right) = ring.as_slices();
let expected: Vec<_> = (0..i + 1).collect();
let expected: Vec<_> = (0..=i).collect();
assert_eq!(left, &expected[..]);
assert_eq!(right, []);
}
@ -869,7 +869,7 @@ fn test_as_slices() {
for j in -last..0 {
ring.push_front(j);
let (left, right) = ring.as_slices();
let expected_left: Vec<_> = (-last..j + 1).rev().collect();
let expected_left: Vec<_> = (-last..=j).rev().collect();
let expected_right: Vec<_> = (0..first).collect();
assert_eq!(left, &expected_left[..]);
assert_eq!(right, &expected_right[..]);
@ -889,7 +889,7 @@ fn test_as_mut_slices() {
ring.push_back(i);
let (left, right) = ring.as_mut_slices();
let expected: Vec<_> = (0..i + 1).collect();
let expected: Vec<_> = (0..=i).collect();
assert_eq!(left, &expected[..]);
assert_eq!(right, []);
}
@ -897,7 +897,7 @@ fn test_as_mut_slices() {
for j in -last..0 {
ring.push_front(j);
let (left, right) = ring.as_mut_slices();
let expected_left: Vec<_> = (-last..j + 1).rev().collect();
let expected_left: Vec<_> = (-last..=j).rev().collect();
let expected_right: Vec<_> = (0..first).collect();
assert_eq!(left, &expected_left[..]);
assert_eq!(right, &expected_right[..]);

View File

@ -100,7 +100,7 @@ fn check<F: FnOnce(&mut HirIdValidator<'a, 'hir>)>(&mut self,
if max != self.hir_ids_seen.len() - 1 {
// Collect the missing ItemLocalIds
let missing: Vec<_> = (0 .. max as u32 + 1)
let missing: Vec<_> = (0 ..= max as u32)
.filter(|&i| !self.hir_ids_seen.contains_key(&ItemLocalId::from_u32(i)))
.collect();

View File

@ -339,7 +339,7 @@ pub fn mut_vars_and_args_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a
#[inline]
pub fn args_iter(&self) -> impl Iterator<Item = Local> {
let arg_count = self.arg_count;
(1..arg_count + 1).map(Local::new)
(1..=arg_count).map(Local::new)
}
/// Returns an iterator over all user-defined variables and compiler-generated temporaries (all

View File

@ -571,7 +571,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
// Fill with zeros up to precision.
if !truncate_zero && precision > digits - 1 {
for _ in 0..precision - digits + 1 {
for _ in 0..=precision - digits {
f.write_char('0')?;
}
}
@ -1969,7 +1969,7 @@ fn from_decimal_string(s: &str, round: Round) -> Result<StatusAnd<Self>, ParseEr
// in a Limb. When this would overflow do we do a single
// bignum multiplication, and then revert again to multiplication
// in a Limb.
let mut chars = s[first_sig_digit..last_sig_digit + 1].chars();
let mut chars = s[first_sig_digit..=last_sig_digit].chars();
loop {
let mut val = 0;
let mut multiplier = 1;

View File

@ -549,7 +549,7 @@ fn render_source_line(&self,
// 3 |
// 4 | }
// |
for pos in 0..line_len + 1 {
for pos in 0..=line_len {
draw_col_separator(buffer, line_offset + pos + 1, width_offset - 2);
buffer.putc(line_offset + pos + 1,
width_offset - 2,
@ -617,7 +617,7 @@ fn render_source_line(&self,
let pos = pos + 1;
if pos > 1 && (annotation.has_label() || annotation.takes_space()) {
for p in line_offset + 1..line_offset + pos + 1 {
for p in line_offset + 1..=line_offset + pos {
buffer.putc(p,
code_offset + annotation.start_col,
'|',
@ -634,7 +634,7 @@ fn render_source_line(&self,
}
}
AnnotationType::MultilineEnd(depth) => {
for p in line_offset..line_offset + pos + 1 {
for p in line_offset..=line_offset + pos {
buffer.putc(p,
width_offset + depth - 1,
'|',

View File

@ -354,7 +354,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) {
}
// State: "s-{timestamp}-{random-number}-"
let mut new_sub_dir_name = String::from(&old_sub_dir_name[.. dash_indices[2] + 1]);
let mut new_sub_dir_name = String::from(&old_sub_dir_name[..= dash_indices[2]]);
// Append the svh
base_n::push_str(svh.as_u64() as u128, INT_ENCODE_BASE, &mut new_sub_dir_name);

View File

@ -48,7 +48,7 @@ impl RegionValueElements {
let mut basic_blocks = IndexVec::with_capacity(num_points);
for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
basic_blocks.extend((0..bb_data.statements.len() + 1).map(|_| bb));
basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb));
}
Self {

View File

@ -101,7 +101,7 @@ pub fn match_expr(
// create binding start block for link them by false edges
let candidate_count = arms.iter().fold(0, |ac, c| ac + c.patterns.len());
let pre_binding_blocks: Vec<_> = (0..candidate_count + 1)
let pre_binding_blocks: Vec<_> = (0..=candidate_count)
.map(|_| self.cfg.start_new_block())
.collect();

View File

@ -3617,7 +3617,7 @@ fn resolve_qpath(&mut self,
let res = self.smart_resolve_path_fragment(
id,
None,
&path[..qself.position + 1],
&path[..=qself.position],
span,
PathSource::TraitItem(ns),
CrateLint::QPathTrait {

View File

@ -4798,7 +4798,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
tmp /= 10;
}
write!(fmt, "<pre class=\"line-numbers\">")?;
for i in 1..lines + 1 {
for i in 1..=lines {
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;

View File

@ -3610,7 +3610,7 @@ fn test_lots_of_insertions() {
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..i + 1 {
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
@ -3629,7 +3629,7 @@ fn test_lots_of_insertions() {
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..i + 1 {
for j in 1..=i {
assert!(!m.contains_key(&j));
}

View File

@ -918,7 +918,7 @@ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// some data then we *must* report that we wrote that data, so future
// errors are ignored. We set our internal `need_flush` flag, though, in
// case flushing fails and we need to try it first next time.
let n = self.inner.write(&buf[..i + 1])?;
let n = self.inner.write(&buf[..=i])?;
self.need_flush = true;
if self.flush().is_err() || n != i + 1 {
return Ok(n)

View File

@ -1261,7 +1261,7 @@ fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>)
};
match memchr::memchr(delim, available) {
Some(i) => {
buf.extend_from_slice(&available[..i + 1]);
buf.extend_from_slice(&available[..=i]);
(true, i + 1)
}
None => {

View File

@ -487,7 +487,7 @@ fn append_arg(cmd: &mut Vec<u16>, arg: &OsStr, force_quotes: bool) -> io::Result
} else {
if x == '"' as u16 {
// Add n+1 backslashes to total 2n+1 before internal '"'.
cmd.extend((0..(backslashes + 1)).map(|_| '\\' as u16));
cmd.extend((0..=backslashes).map(|_| '\\' as u16));
}
backslashes = 0;
}

View File

@ -20,7 +20,7 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
return a.chars().count();
}
let mut dcol: Vec<_> = (0..b.len() + 1).collect();
let mut dcol: Vec<_> = (0..=b.len()).collect();
let mut t_last = 0;
for (i, sc) in a.chars().enumerate() {