auto merge of #19677 : japaric/rust/deprecate-tupleN, r=alexcrichton
r? @alexcrichton or anyone else closes #18006
This commit is contained in:
commit
10ac5b72f1
@ -348,7 +348,6 @@ def emit_conversions_module(f, lowerupper, upperlower):
|
||||
f.write("""
|
||||
use core::cmp::Ordering::{Equal, Less, Greater};
|
||||
use core::slice::SlicePrelude;
|
||||
use core::tuple::Tuple2;
|
||||
use core::option::Option;
|
||||
use core::option::Option::{Some, None};
|
||||
use core::slice;
|
||||
@ -356,14 +355,14 @@ def emit_conversions_module(f, lowerupper, upperlower):
|
||||
pub fn to_lower(c: char) -> char {
|
||||
match bsearch_case_table(c, LuLl_table) {
|
||||
None => c,
|
||||
Some(index) => LuLl_table[index].val1()
|
||||
Some(index) => LuLl_table[index].1
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_upper(c: char) -> char {
|
||||
match bsearch_case_table(c, LlLu_table) {
|
||||
None => c,
|
||||
Some(index) => LlLu_table[index].val1()
|
||||
Some(index) => LlLu_table[index].1
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,7 @@ use core::iter::AdditiveIterator;
|
||||
use core::kinds::Sized;
|
||||
use core::prelude::{Char, Clone, Eq, Equiv};
|
||||
use core::prelude::{Iterator, IteratorExt, SlicePrelude, None, Option, Ord, Ordering};
|
||||
use core::prelude::{PartialEq, PartialOrd, Result, AsSlice, Some, Tuple2};
|
||||
use core::prelude::{PartialEq, PartialOrd, Result, AsSlice, Some};
|
||||
use core::prelude::{range};
|
||||
|
||||
use hash;
|
||||
@ -185,8 +185,8 @@ fn canonical_sort(comb: &mut [(char, u8)]) {
|
||||
for i in range(0, len) {
|
||||
let mut swapped = false;
|
||||
for j in range(1, len-i) {
|
||||
let class_a = *comb[j-1].ref1();
|
||||
let class_b = *comb[j].ref1();
|
||||
let class_a = comb[j-1].1;
|
||||
let class_b = comb[j].1;
|
||||
if class_a != 0 && class_b != 0 && class_a > class_b {
|
||||
comb.swap(j-1, j);
|
||||
swapped = true;
|
||||
|
@ -38,9 +38,12 @@
|
||||
//! Using methods:
|
||||
//!
|
||||
//! ```
|
||||
//! #[allow(deprecated)]
|
||||
//! # fn main() {
|
||||
//! let pair = ("pi", 3.14f64);
|
||||
//! assert_eq!(pair.val0(), "pi");
|
||||
//! assert_eq!(pair.val1(), 3.14f64);
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! Using traits implemented for tuples:
|
||||
@ -72,25 +75,28 @@ use default::Default;
|
||||
use option::Option;
|
||||
use option::Option::Some;
|
||||
|
||||
// FIXME(#19630) Remove this work-around
|
||||
macro_rules! e {
|
||||
($e:expr) => { $e }
|
||||
}
|
||||
|
||||
// macro for implementing n-ary tuple functions and operations
|
||||
macro_rules! tuple_impls {
|
||||
($(
|
||||
$Tuple:ident {
|
||||
$(($valN:ident, $refN:ident, $mutN:ident) -> $T:ident {
|
||||
($($x:ident),+) => $ret:expr
|
||||
})+
|
||||
$(($valN:ident, $refN:ident, $mutN:ident, $idx:tt) -> $T:ident)+
|
||||
}
|
||||
)+) => {
|
||||
$(
|
||||
#[allow(missing_docs)]
|
||||
#[stable]
|
||||
#[deprecated]
|
||||
pub trait $Tuple<$($T),+> {
|
||||
$(
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: `tuple.N`"]
|
||||
fn $valN(self) -> $T;
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: `&tuple.N`"]
|
||||
fn $refN<'a>(&'a self) -> &'a $T;
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: `&mut tuple.N`"]
|
||||
fn $mutN<'a>(&'a mut self) -> &'a mut $T;
|
||||
)+
|
||||
}
|
||||
@ -99,23 +105,23 @@ macro_rules! tuple_impls {
|
||||
$(
|
||||
#[inline]
|
||||
#[allow(unused_variables)]
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: `tuple.N`"]
|
||||
fn $valN(self) -> $T {
|
||||
let ($($x,)+) = self; $ret
|
||||
e!(self.$idx)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(unused_variables)]
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: `&tuple.N`"]
|
||||
fn $refN<'a>(&'a self) -> &'a $T {
|
||||
let ($(ref $x,)+) = *self; $ret
|
||||
e!(&self.$idx)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(unused_variables)]
|
||||
#[unstable = "may rename pending accessor naming conventions"]
|
||||
#[deprecated = "use tuple indexing: &mut tuple.N"]
|
||||
fn $mutN<'a>(&'a mut self) -> &'a mut $T {
|
||||
let ($(ref mut $x,)+) = *self; $ret
|
||||
e!(&mut self.$idx)
|
||||
}
|
||||
)+
|
||||
}
|
||||
@ -123,7 +129,7 @@ macro_rules! tuple_impls {
|
||||
#[unstable = "waiting for Clone to stabilize"]
|
||||
impl<$($T:Clone),+> Clone for ($($T,)+) {
|
||||
fn clone(&self) -> ($($T,)+) {
|
||||
($(self.$refN().clone(),)+)
|
||||
($(e!(self.$idx.clone()),)+)
|
||||
}
|
||||
}
|
||||
|
||||
@ -131,11 +137,11 @@ macro_rules! tuple_impls {
|
||||
impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
|
||||
#[inline]
|
||||
fn eq(&self, other: &($($T,)+)) -> bool {
|
||||
$(*self.$refN() == *other.$refN())&&+
|
||||
e!($(self.$idx == other.$idx)&&+)
|
||||
}
|
||||
#[inline]
|
||||
fn ne(&self, other: &($($T,)+)) -> bool {
|
||||
$(*self.$refN() != *other.$refN())||+
|
||||
e!($(self.$idx != other.$idx)||+)
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,23 +152,23 @@ macro_rules! tuple_impls {
|
||||
impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
|
||||
lexical_partial_cmp!($(self.$refN(), other.$refN()),+)
|
||||
lexical_partial_cmp!($(self.$idx, other.$idx),+)
|
||||
}
|
||||
#[inline]
|
||||
fn lt(&self, other: &($($T,)+)) -> bool {
|
||||
lexical_ord!(lt, $(self.$refN(), other.$refN()),+)
|
||||
lexical_ord!(lt, $(self.$idx, other.$idx),+)
|
||||
}
|
||||
#[inline]
|
||||
fn le(&self, other: &($($T,)+)) -> bool {
|
||||
lexical_ord!(le, $(self.$refN(), other.$refN()),+)
|
||||
lexical_ord!(le, $(self.$idx, other.$idx),+)
|
||||
}
|
||||
#[inline]
|
||||
fn ge(&self, other: &($($T,)+)) -> bool {
|
||||
lexical_ord!(ge, $(self.$refN(), other.$refN()),+)
|
||||
lexical_ord!(ge, $(self.$idx, other.$idx),+)
|
||||
}
|
||||
#[inline]
|
||||
fn gt(&self, other: &($($T,)+)) -> bool {
|
||||
lexical_ord!(gt, $(self.$refN(), other.$refN()),+)
|
||||
lexical_ord!(gt, $(self.$idx, other.$idx),+)
|
||||
}
|
||||
}
|
||||
|
||||
@ -170,7 +176,7 @@ macro_rules! tuple_impls {
|
||||
impl<$($T:Ord),+> Ord for ($($T,)+) {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &($($T,)+)) -> Ordering {
|
||||
lexical_cmp!($(self.$refN(), other.$refN()),+)
|
||||
lexical_cmp!($(self.$idx, other.$idx),+)
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,134 +197,134 @@ macro_rules! tuple_impls {
|
||||
// a3, b3)` (and similarly for `lexical_cmp`)
|
||||
macro_rules! lexical_ord {
|
||||
($rel: ident, $a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
|
||||
if *$a != *$b { lexical_ord!($rel, $a, $b) }
|
||||
if $a != $b { lexical_ord!($rel, $a, $b) }
|
||||
else { lexical_ord!($rel, $($rest_a, $rest_b),+) }
|
||||
};
|
||||
($rel: ident, $a:expr, $b:expr) => { (*$a) . $rel ($b) };
|
||||
($rel: ident, $a:expr, $b:expr) => { ($a) . $rel (& $b) };
|
||||
}
|
||||
|
||||
macro_rules! lexical_partial_cmp {
|
||||
($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
|
||||
match ($a).partial_cmp($b) {
|
||||
match ($a).partial_cmp(&$b) {
|
||||
Some(Equal) => lexical_partial_cmp!($($rest_a, $rest_b),+),
|
||||
ordering => ordering
|
||||
}
|
||||
};
|
||||
($a:expr, $b:expr) => { ($a).partial_cmp($b) };
|
||||
($a:expr, $b:expr) => { ($a).partial_cmp(&$b) };
|
||||
}
|
||||
|
||||
macro_rules! lexical_cmp {
|
||||
($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
|
||||
match ($a).cmp($b) {
|
||||
match ($a).cmp(&$b) {
|
||||
Equal => lexical_cmp!($($rest_a, $rest_b),+),
|
||||
ordering => ordering
|
||||
}
|
||||
};
|
||||
($a:expr, $b:expr) => { ($a).cmp($b) };
|
||||
($a:expr, $b:expr) => { ($a).cmp(&$b) };
|
||||
}
|
||||
|
||||
tuple_impls! {
|
||||
Tuple1 {
|
||||
(val0, ref0, mut0) -> A { (a) => a }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
}
|
||||
Tuple2 {
|
||||
(val0, ref0, mut0) -> A { (a, b) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b) => b }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
}
|
||||
Tuple3 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c) => c }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
}
|
||||
Tuple4 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d) => d }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
}
|
||||
Tuple5 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e) => e }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
}
|
||||
Tuple6 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f) => f }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
}
|
||||
Tuple7 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g) => g }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
}
|
||||
Tuple8 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h) => g }
|
||||
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h) => h }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
(val7, ref7, mut7, 7) -> H
|
||||
}
|
||||
Tuple9 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i) => g }
|
||||
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i) => h }
|
||||
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i) => i }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
(val7, ref7, mut7, 7) -> H
|
||||
(val8, ref8, mut8, 8) -> I
|
||||
}
|
||||
Tuple10 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j) => g }
|
||||
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j) => h }
|
||||
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j) => i }
|
||||
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j) => j }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
(val7, ref7, mut7, 7) -> H
|
||||
(val8, ref8, mut8, 8) -> I
|
||||
(val9, ref9, mut9, 9) -> J
|
||||
}
|
||||
Tuple11 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j, k) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j, k) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j, k) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j, k) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j, k) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j, k) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j, k) => g }
|
||||
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j, k) => h }
|
||||
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j, k) => i }
|
||||
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j, k) => j }
|
||||
(val10, ref10, mut10) -> K { (a, b, c, d, e, f, g, h, i, j, k) => k }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
(val7, ref7, mut7, 7) -> H
|
||||
(val8, ref8, mut8, 8) -> I
|
||||
(val9, ref9, mut9, 9) -> J
|
||||
(val10, ref10, mut10, 10) -> K
|
||||
}
|
||||
Tuple12 {
|
||||
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j, k, l) => a }
|
||||
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j, k, l) => b }
|
||||
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j, k, l) => c }
|
||||
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j, k, l) => d }
|
||||
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j, k, l) => e }
|
||||
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j, k, l) => f }
|
||||
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j, k, l) => g }
|
||||
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j, k, l) => h }
|
||||
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j, k, l) => i }
|
||||
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j, k, l) => j }
|
||||
(val10, ref10, mut10) -> K { (a, b, c, d, e, f, g, h, i, j, k, l) => k }
|
||||
(val11, ref11, mut11) -> L { (a, b, c, d, e, f, g, h, i, j, k, l) => l }
|
||||
(val0, ref0, mut0, 0) -> A
|
||||
(val1, ref1, mut1, 1) -> B
|
||||
(val2, ref2, mut2, 2) -> C
|
||||
(val3, ref3, mut3, 3) -> D
|
||||
(val4, ref4, mut4, 4) -> E
|
||||
(val5, ref5, mut5, 5) -> F
|
||||
(val6, ref6, mut6, 6) -> G
|
||||
(val7, ref7, mut7, 7) -> H
|
||||
(val8, ref8, mut8, 8) -> I
|
||||
(val9, ref9, mut9, 9) -> J
|
||||
(val10, ref10, mut10, 10) -> K
|
||||
(val11, ref11, mut11, 11) -> L
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -334,7 +334,7 @@ fn test_iterator_size_hint() {
|
||||
assert_eq!(vi.size_hint(), (10, Some(10)));
|
||||
|
||||
assert_eq!(c.take(5).size_hint(), (5, Some(5)));
|
||||
assert_eq!(c.skip(5).size_hint().val1(), None);
|
||||
assert_eq!(c.skip(5).size_hint().1, None);
|
||||
assert_eq!(c.take_while(|_| false).size_hint(), (0, None));
|
||||
assert_eq!(c.skip_while(|_| false).size_hint(), (0, None));
|
||||
assert_eq!(c.enumerate().size_hint(), (uint::MAX, None));
|
||||
|
@ -218,7 +218,7 @@
|
||||
//! impl<'a> dot::Labeller<'a, Nd<'a>, Ed<'a>> for Graph {
|
||||
//! fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new("example3").unwrap() }
|
||||
//! fn node_id(&'a self, n: &Nd<'a>) -> dot::Id<'a> {
|
||||
//! dot::Id::new(format!("N{}", n.val0())).unwrap()
|
||||
//! dot::Id::new(format!("N{}", n.0)).unwrap()
|
||||
//! }
|
||||
//! fn node_label<'a>(&'a self, n: &Nd<'a>) -> dot::LabelText<'a> {
|
||||
//! let &(i, _) = n;
|
||||
|
@ -1031,7 +1031,7 @@ fn is_valid_cap(c: char) -> bool {
|
||||
|
||||
fn find_class(classes: NamedClasses, name: &str) -> Option<Vec<(char, char)>> {
|
||||
match classes.binary_search(|&(s, _)| s.cmp(name)) {
|
||||
BinarySearchResult::Found(i) => Some(classes[i].val1().to_vec()),
|
||||
BinarySearchResult::Found(i) => Some(classes[i].1.to_vec()),
|
||||
BinarySearchResult::NotFound(_) => None,
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,6 @@ use lint::builtin;
|
||||
use util::nodemap::FnvHashMap;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::tuple::Tuple2;
|
||||
use std::mem;
|
||||
use syntax::ast_util::IdVisitingOperation;
|
||||
use syntax::attr::AttrMetaMethods;
|
||||
@ -87,7 +86,7 @@ impl LintStore {
|
||||
}
|
||||
|
||||
fn set_level(&mut self, lint: LintId, lvlsrc: LevelSource) {
|
||||
if lvlsrc.val0() == Allow {
|
||||
if lvlsrc.0 == Allow {
|
||||
self.levels.remove(&lint);
|
||||
} else {
|
||||
self.levels.insert(lint, lvlsrc);
|
||||
@ -110,8 +109,8 @@ impl LintStore {
|
||||
|
||||
pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
|
||||
self.lint_groups.iter().map(|(k, v)| (*k,
|
||||
v.ref0().clone(),
|
||||
*v.ref1())).collect()
|
||||
v.0.clone(),
|
||||
v.1)).collect()
|
||||
}
|
||||
|
||||
pub fn register_pass(&mut self, sess: Option<&Session>,
|
||||
@ -275,7 +274,7 @@ impl LintStore {
|
||||
match self.find_lint(lint_name.as_slice(), sess, None) {
|
||||
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
|
||||
None => {
|
||||
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.ref0().clone()))
|
||||
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
|
||||
.collect::<FnvHashMap<&'static str,
|
||||
Vec<LintId>>>()
|
||||
.get(lint_name.as_slice()) {
|
||||
@ -443,7 +442,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
None => return,
|
||||
Some(&(Warn, src)) => {
|
||||
let lint_id = LintId::of(builtin::WARNINGS);
|
||||
(self.lints.get_level_source(lint_id).val0(), src)
|
||||
(self.lints.get_level_source(lint_id).0, src)
|
||||
}
|
||||
Some(&pair) => pair,
|
||||
};
|
||||
@ -503,7 +502,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
};
|
||||
|
||||
for (lint_id, level, span) in v.into_iter() {
|
||||
let now = self.lints.get_level_source(lint_id).val0();
|
||||
let now = self.lints.get_level_source(lint_id).0;
|
||||
if now == Forbid && level != Forbid {
|
||||
let lint_name = lint_id.as_str();
|
||||
self.tcx.sess.span_err(span,
|
||||
@ -511,7 +510,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
level.as_str(), lint_name,
|
||||
lint_name).as_slice());
|
||||
} else if now != level {
|
||||
let src = self.lints.get_level_source(lint_id).val1();
|
||||
let src = self.lints.get_level_source(lint_id).1;
|
||||
self.level_stack.push((lint_id, (now, src)));
|
||||
pushed += 1;
|
||||
self.lints.set_level(lint_id, (level, Node(span)));
|
||||
|
@ -364,7 +364,7 @@ impl<'a> Context<'a> {
|
||||
let dypair = self.dylibname();
|
||||
|
||||
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
|
||||
let dylib_prefix = format!("{}{}", dypair.ref0(), self.crate_name);
|
||||
let dylib_prefix = format!("{}{}", dypair.0, self.crate_name);
|
||||
let rlib_prefix = format!("lib{}", self.crate_name);
|
||||
|
||||
let mut candidates = HashMap::new();
|
||||
@ -392,8 +392,8 @@ impl<'a> Context<'a> {
|
||||
(file.slice(rlib_prefix.len(), file.len() - ".rlib".len()),
|
||||
true)
|
||||
} else if file.starts_with(dylib_prefix.as_slice()) &&
|
||||
file.ends_with(dypair.ref1().as_slice()) {
|
||||
(file.slice(dylib_prefix.len(), file.len() - dypair.ref1().len()),
|
||||
file.ends_with(dypair.1.as_slice()) {
|
||||
(file.slice(dylib_prefix.len(), file.len() - dypair.1.len()),
|
||||
false)
|
||||
} else {
|
||||
return FileDoesntMatch
|
||||
|
@ -220,7 +220,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
let matrix: Matrix = inlined_arms
|
||||
.iter()
|
||||
.filter(|&&(_, guard)| guard.is_none())
|
||||
.flat_map(|arm| arm.ref0().iter())
|
||||
.flat_map(|arm| arm.0.iter())
|
||||
.map(|pat| vec![&**pat])
|
||||
.collect();
|
||||
check_exhaustive(cx, ex.span, &matrix);
|
||||
|
@ -53,7 +53,7 @@ pub struct DataflowLabeller<'a, 'tcx: 'a> {
|
||||
|
||||
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String {
|
||||
let id = n.val1().data.id;
|
||||
let id = n.1.data.id;
|
||||
debug!("dataflow_for({}, id={}) {}", e, id, self.variants);
|
||||
let mut sets = "".to_string();
|
||||
let mut seen_one = false;
|
||||
@ -67,7 +67,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node, v: Variant) -> String {
|
||||
let cfgidx = n.val0();
|
||||
let cfgidx = n.0;
|
||||
match v {
|
||||
Loans => self.dataflow_loans_for(e, cfgidx),
|
||||
Moves => self.dataflow_moves_for(e, cfgidx),
|
||||
|
@ -91,7 +91,7 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr,
|
||||
let vec_ty = ty::expr_ty(cx.tcx(), e);
|
||||
let unit_ty = ty::sequence_element_type(cx.tcx(), vec_ty);
|
||||
let llunitty = type_of::type_of(cx, unit_ty);
|
||||
let vs = es.iter().map(|e| const_expr(cx, &**e).val0())
|
||||
let vs = es.iter().map(|e| const_expr(cx, &**e).0)
|
||||
.collect::<Vec<_>>();
|
||||
// If the vector contains enums, an LLVM array won't work.
|
||||
let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
|
||||
@ -302,7 +302,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
|
||||
// if it's assigned to a static.
|
||||
fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
|
||||
let map_list = |exprs: &[P<ast::Expr>]| {
|
||||
exprs.iter().map(|e| const_expr(cx, &**e).val0())
|
||||
exprs.iter().map(|e| const_expr(cx, &**e).0)
|
||||
.fold(Vec::new(), |mut l, val| { l.push(val); l })
|
||||
};
|
||||
unsafe {
|
||||
@ -575,7 +575,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
|
||||
let cs = field_tys.iter().enumerate()
|
||||
.map(|(ix, &field_ty)| {
|
||||
match fs.iter().find(|f| field_ty.name == f.ident.node.name) {
|
||||
Some(ref f) => const_expr(cx, &*f.expr).val0(),
|
||||
Some(ref f) => const_expr(cx, &*f.expr).0,
|
||||
None => {
|
||||
match base_val {
|
||||
Some((bv, _)) => {
|
||||
@ -594,7 +594,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
|
||||
})
|
||||
}
|
||||
ast::ExprVec(ref es) => {
|
||||
const_vec(cx, e, es.as_slice()).val0()
|
||||
const_vec(cx, e, es.as_slice()).0
|
||||
}
|
||||
ast::ExprRepeat(ref elem, ref count) => {
|
||||
let vec_ty = ty::expr_ty(cx.tcx(), e);
|
||||
@ -605,7 +605,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
|
||||
const_eval::const_uint(i) => i as uint,
|
||||
_ => cx.sess().span_bug(count.span, "count must be integral const expression.")
|
||||
};
|
||||
let vs = Vec::from_elem(n, const_expr(cx, &**elem).val0());
|
||||
let vs = Vec::from_elem(n, const_expr(cx, &**elem).0);
|
||||
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
|
||||
C_struct(cx, vs.as_slice(), false)
|
||||
} else {
|
||||
@ -673,10 +673,10 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
|
||||
_ => cx.sess().span_bug(e.span, "expected a struct or variant def")
|
||||
}
|
||||
}
|
||||
ast::ExprParen(ref e) => const_expr(cx, &**e).val0(),
|
||||
ast::ExprParen(ref e) => const_expr(cx, &**e).0,
|
||||
ast::ExprBlock(ref block) => {
|
||||
match block.expr {
|
||||
Some(ref expr) => const_expr(cx, &**expr).val0(),
|
||||
Some(ref expr) => const_expr(cx, &**expr).0,
|
||||
None => C_nil(cx)
|
||||
}
|
||||
}
|
||||
|
@ -346,7 +346,7 @@ fn primitive_link(f: &mut fmt::Formatter,
|
||||
Some(root) => {
|
||||
try!(write!(f, "<a href='{}{}/primitive.{}.html'>",
|
||||
root,
|
||||
path.ref0().head().unwrap(),
|
||||
path.0.head().unwrap(),
|
||||
prim.to_url_str()));
|
||||
needs_termination = true;
|
||||
}
|
||||
|
@ -323,7 +323,7 @@ pub fn run(mut krate: clean::Crate,
|
||||
}).unwrap_or(HashMap::new());
|
||||
let mut cache = Cache {
|
||||
impls: HashMap::new(),
|
||||
external_paths: paths.iter().map(|(&k, v)| (k, v.ref0().clone()))
|
||||
external_paths: paths.iter().map(|(&k, v)| (k, v.0.clone()))
|
||||
.collect(),
|
||||
paths: paths,
|
||||
implementors: HashMap::new(),
|
||||
|
@ -400,7 +400,7 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche
|
||||
.position(|&(p, _, _)| {
|
||||
p == *pass
|
||||
}) {
|
||||
Some(i) => PASSES[i].val1(),
|
||||
Some(i) => PASSES[i].1,
|
||||
None => {
|
||||
error!("unknown pass {}, skipping", *pass);
|
||||
continue
|
||||
|
@ -151,14 +151,14 @@ fn summarize_item(item: &Item) -> (Counts, Option<ModuleSummary>) {
|
||||
ImplItem(Impl { items: ref subitems, trait_: None, .. }) => {
|
||||
let subcounts = subitems.iter().filter(|i| visible(*i))
|
||||
.map(summarize_item)
|
||||
.map(|s| s.val0())
|
||||
.map(|s| s.0)
|
||||
.fold(Counts::zero(), |acc, x| acc + x);
|
||||
(subcounts, None)
|
||||
}
|
||||
// `pub` automatically
|
||||
EnumItem(Enum { variants: ref subitems, .. }) => {
|
||||
let subcounts = subitems.iter().map(summarize_item)
|
||||
.map(|s| s.val0())
|
||||
.map(|s| s.0)
|
||||
.fold(Counts::zero(), |acc, x| acc + x);
|
||||
(item_counts + subcounts, None)
|
||||
}
|
||||
@ -176,7 +176,7 @@ fn summarize_item(item: &Item) -> (Counts, Option<ModuleSummary>) {
|
||||
let subcounts = trait_items.iter()
|
||||
.map(extract_item)
|
||||
.map(summarize_item)
|
||||
.map(|s| s.val0())
|
||||
.map(|s| s.0)
|
||||
.fold(Counts::zero(), |acc, x| acc + x);
|
||||
(item_counts + subcounts, None)
|
||||
}
|
||||
@ -212,7 +212,7 @@ pub fn build(krate: &Crate) -> ModuleSummary {
|
||||
submodules: Vec::new(),
|
||||
},
|
||||
Some(ref item) => ModuleSummary {
|
||||
name: krate.name.clone(), .. summarize_item(item).val1().unwrap()
|
||||
name: krate.name.clone(), .. summarize_item(item).1.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -379,8 +379,8 @@ impl Process {
|
||||
// wait indefinitely for a message to arrive.
|
||||
//
|
||||
// FIXME: sure would be nice to not have to scan the entire array
|
||||
let min = active.iter().map(|a| *a.ref2()).enumerate().min_by(|p| {
|
||||
p.val1()
|
||||
let min = active.iter().map(|a| a.2).enumerate().min_by(|p| {
|
||||
p.1
|
||||
});
|
||||
let (p, idx) = match min {
|
||||
Some((idx, deadline)) => {
|
||||
@ -564,9 +564,9 @@ fn with_envp<K, V, T>(env: Option<&collections::HashMap<K, V>>,
|
||||
|
||||
for pair in env.iter() {
|
||||
let mut kv = Vec::new();
|
||||
kv.push_all(pair.ref0().container_as_bytes());
|
||||
kv.push_all(pair.0.container_as_bytes());
|
||||
kv.push('=' as u8);
|
||||
kv.push_all(pair.ref1().container_as_bytes());
|
||||
kv.push_all(pair.1.container_as_bytes());
|
||||
kv.push(0); // terminating null
|
||||
tmps.push(kv);
|
||||
}
|
||||
|
@ -430,8 +430,8 @@ fn with_envp<K, V, T, F>(env: Option<&collections::HashMap<K, V>>, cb: F) -> T w
|
||||
|
||||
for pair in env.iter() {
|
||||
let kv = format!("{}={}",
|
||||
pair.ref0().container_as_str().unwrap(),
|
||||
pair.ref1().container_as_str().unwrap());
|
||||
pair.0.container_as_str().unwrap(),
|
||||
pair.1.container_as_str().unwrap());
|
||||
blk.extend(kv.utf16_units());
|
||||
blk.push(0);
|
||||
}
|
||||
|
@ -431,7 +431,7 @@ pub fn str_lit(lit: &str) -> String {
|
||||
/// Eat everything up to a non-whitespace
|
||||
fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharOffsets<'a>>) {
|
||||
loop {
|
||||
match it.peek().map(|x| x.val1()) {
|
||||
match it.peek().map(|x| x.1) {
|
||||
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
|
||||
it.next();
|
||||
},
|
||||
@ -448,7 +448,7 @@ pub fn str_lit(lit: &str) -> String {
|
||||
'\\' => {
|
||||
let ch = chars.peek().unwrap_or_else(|| {
|
||||
panic!("{}", error(i).as_slice())
|
||||
}).val1();
|
||||
}).1;
|
||||
|
||||
if ch == '\n' {
|
||||
eat(&mut chars);
|
||||
@ -456,7 +456,7 @@ pub fn str_lit(lit: &str) -> String {
|
||||
chars.next();
|
||||
let ch = chars.peek().unwrap_or_else(|| {
|
||||
panic!("{}", error(i).as_slice())
|
||||
}).val1();
|
||||
}).1;
|
||||
|
||||
if ch != '\n' {
|
||||
panic!("lexer accepted bare CR");
|
||||
@ -474,7 +474,7 @@ pub fn str_lit(lit: &str) -> String {
|
||||
'\r' => {
|
||||
let ch = chars.peek().unwrap_or_else(|| {
|
||||
panic!("{}", error(i).as_slice())
|
||||
}).val1();
|
||||
}).1;
|
||||
|
||||
if ch != '\n' {
|
||||
panic!("lexer accepted bare CR");
|
||||
@ -600,7 +600,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||
/// Eat everything up to a non-whitespace
|
||||
fn eat<'a, I: Iterator<(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
|
||||
loop {
|
||||
match it.peek().map(|x| x.val1()) {
|
||||
match it.peek().map(|x| x.1) {
|
||||
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
|
||||
it.next();
|
||||
},
|
||||
@ -615,11 +615,11 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||
match chars.next() {
|
||||
Some((i, b'\\')) => {
|
||||
let em = error(i);
|
||||
match chars.peek().expect(em.as_slice()).val1() {
|
||||
match chars.peek().expect(em.as_slice()).1 {
|
||||
b'\n' => eat(&mut chars),
|
||||
b'\r' => {
|
||||
chars.next();
|
||||
if chars.peek().expect(em.as_slice()).val1() != b'\n' {
|
||||
if chars.peek().expect(em.as_slice()).1 != b'\n' {
|
||||
panic!("lexer accepted bare CR");
|
||||
}
|
||||
eat(&mut chars);
|
||||
@ -637,7 +637,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||
},
|
||||
Some((i, b'\r')) => {
|
||||
let em = error(i);
|
||||
if chars.peek().expect(em.as_slice()).val1() != b'\n' {
|
||||
if chars.peek().expect(em.as_slice()).1 != b'\n' {
|
||||
panic!("lexer accepted bare CR");
|
||||
}
|
||||
chars.next();
|
||||
|
@ -1740,8 +1740,8 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
token::Literal(lit, suf) => {
|
||||
let (suffix_illegal, out) = match lit {
|
||||
token::Byte(i) => (true, LitByte(parse::byte_lit(i.as_str()).val0())),
|
||||
token::Char(i) => (true, LitChar(parse::char_lit(i.as_str()).val0())),
|
||||
token::Byte(i) => (true, LitByte(parse::byte_lit(i.as_str()).0)),
|
||||
token::Char(i) => (true, LitChar(parse::char_lit(i.as_str()).0)),
|
||||
|
||||
// there are some valid suffixes for integer and
|
||||
// float literals, so all the handling is done
|
||||
|
@ -6959,7 +6959,6 @@ pub mod normalization {
|
||||
pub mod conversions {
|
||||
use core::cmp::Ordering::{Equal, Less, Greater};
|
||||
use core::slice::SlicePrelude;
|
||||
use core::tuple::Tuple2;
|
||||
use core::option::Option;
|
||||
use core::option::Option::{Some, None};
|
||||
use core::slice;
|
||||
@ -6967,14 +6966,14 @@ pub mod conversions {
|
||||
pub fn to_lower(c: char) -> char {
|
||||
match bsearch_case_table(c, LuLl_table) {
|
||||
None => c,
|
||||
Some(index) => LuLl_table[index].val1()
|
||||
Some(index) => LuLl_table[index].1
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_upper(c: char) -> char {
|
||||
match bsearch_case_table(c, LlLu_table) {
|
||||
None => c,
|
||||
Some(index) => LlLu_table[index].val1()
|
||||
Some(index) => LlLu_table[index].1
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ impl<'a> Iterator<u8> for AAGen<'a> {
|
||||
fn next(&mut self) -> Option<u8> {
|
||||
let r = self.rng.gen();
|
||||
self.data.iter()
|
||||
.skip_while(|pc| pc.val0() < r)
|
||||
.skip_while(|pc| pc.0 < r)
|
||||
.map(|&(_, c)| c)
|
||||
.next()
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user