add size hints: 167ns vs 290ns
This commit is contained in:
parent
db242fed53
commit
6d13ee2ec8
45
de.rs
45
de.rs
@ -24,7 +24,7 @@ pub enum Token {
|
|||||||
Char(char),
|
Char(char),
|
||||||
Str(&'static str),
|
Str(&'static str),
|
||||||
StrBuf(StrBuf),
|
StrBuf(StrBuf),
|
||||||
CollectionStart,
|
CollectionStart(uint),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
}
|
}
|
||||||
@ -84,7 +84,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
|||||||
fn expect_null(&mut self, token: Token) -> Result<(), E> {
|
fn expect_null(&mut self, token: Token) -> Result<(), E> {
|
||||||
decode_primitive!(
|
decode_primitive!(
|
||||||
Null => Ok(()),
|
Null => Ok(()),
|
||||||
CollectionStart => {
|
CollectionStart(_) => {
|
||||||
let token = try!(self.expect_token());
|
let token = try!(self.expect_token());
|
||||||
self.expect_collection_end(token)
|
self.expect_collection_end(token)
|
||||||
}
|
}
|
||||||
@ -189,7 +189,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
|||||||
T: Deserializable<E, Self>,
|
T: Deserializable<E, Self>,
|
||||||
C: FromIterator<T>
|
C: FromIterator<T>
|
||||||
>(&mut self, token: Token) -> Result<C, E> {
|
>(&mut self, token: Token) -> Result<C, E> {
|
||||||
try!(self.expect_collection_start(token));
|
let len = try!(self.expect_collection_start(token));
|
||||||
|
|
||||||
let iter = self.by_ref().batch(|d| {
|
let iter = self.by_ref().batch(|d| {
|
||||||
let d = d.iter();
|
let d = d.iter();
|
||||||
@ -216,13 +216,13 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
result::collect(iter)
|
result::collect_with_capacity(iter, len)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn expect_collection_start(&mut self, token: Token) -> Result<(), E> {
|
fn expect_collection_start(&mut self, token: Token) -> Result<uint, E> {
|
||||||
match token {
|
match token {
|
||||||
CollectionStart => Ok(()),
|
CollectionStart(len) => Ok(len),
|
||||||
_ => Err(self.syntax_error()),
|
_ => Err(self.syntax_error()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -489,7 +489,8 @@ mod tests {
|
|||||||
match self.state {
|
match self.state {
|
||||||
Start => {
|
Start => {
|
||||||
self.state = Sep;
|
self.state = Sep;
|
||||||
Some(Ok(CollectionStart))
|
let (lower, _) = self.iter.size_hint();
|
||||||
|
Some(Ok(CollectionStart(lower)))
|
||||||
}
|
}
|
||||||
Sep => {
|
Sep => {
|
||||||
match self.iter.next() {
|
match self.iter.next() {
|
||||||
@ -669,7 +670,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_tuple_empty() {
|
fn test_tokens_tuple_empty() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(0),
|
||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -682,7 +683,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_tuple() {
|
fn test_tokens_tuple() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(5),
|
Int(5),
|
||||||
|
|
||||||
@ -700,13 +701,13 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_tuple_compound() {
|
fn test_tokens_tuple_compound() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(0),
|
||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
|
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(5),
|
Int(5),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
@ -724,7 +725,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_vec_empty() {
|
fn test_tokens_vec_empty() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(0),
|
||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -737,7 +738,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_vec() {
|
fn test_tokens_vec() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(3),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(5),
|
Int(5),
|
||||||
|
|
||||||
@ -758,15 +759,15 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_vec_compound() {
|
fn test_tokens_vec_compound() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(0),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(1),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(1),
|
Int(1),
|
||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
|
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(2),
|
Int(2),
|
||||||
|
|
||||||
@ -775,7 +776,7 @@ mod tests {
|
|||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
|
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(3),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(4),
|
Int(4),
|
||||||
|
|
||||||
@ -797,9 +798,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokens_hashmap() {
|
fn test_tokens_hashmap() {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(5),
|
Int(5),
|
||||||
|
|
||||||
@ -808,7 +809,7 @@ mod tests {
|
|||||||
CollectionEnd,
|
CollectionEnd,
|
||||||
|
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
CollectionStart,
|
CollectionStart(2),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(6),
|
Int(6),
|
||||||
|
|
||||||
@ -832,7 +833,7 @@ mod tests {
|
|||||||
fn bench_dummy_deserializer(b: &mut Bencher) {
|
fn bench_dummy_deserializer(b: &mut Bencher) {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let tokens = vec!(
|
let tokens = vec!(
|
||||||
CollectionStart,
|
CollectionStart(3),
|
||||||
CollectionSep,
|
CollectionSep,
|
||||||
Int(5),
|
Int(5),
|
||||||
|
|
||||||
|
4
json.rs
4
json.rs
@ -1172,9 +1172,9 @@ impl<T: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match self.parse() {
|
match self.parse() {
|
||||||
ObjectStart => Some(Ok(de::CollectionStart)),
|
ObjectStart => Some(Ok(de::CollectionStart(0))),
|
||||||
ObjectEnd => Some(Ok(de::CollectionEnd)),
|
ObjectEnd => Some(Ok(de::CollectionEnd)),
|
||||||
ListStart => Some(Ok(de::CollectionStart)),
|
ListStart => Some(Ok(de::CollectionStart(0))),
|
||||||
ListEnd => Some(Ok(de::CollectionEnd)),
|
ListEnd => Some(Ok(de::CollectionEnd)),
|
||||||
NullValue => Some(Ok(de::Null)),
|
NullValue => Some(Ok(de::Null)),
|
||||||
BooleanValue(value) => Some(Ok(de::Bool(value))),
|
BooleanValue(value) => Some(Ok(de::Bool(value))),
|
||||||
|
Loading…
Reference in New Issue
Block a user