Fix handling of option and newtype in IgnoredAny

This commit is contained in:
David Tolnay 2017-04-05 09:19:22 -07:00
parent b2377d4c0b
commit c3d9b42cdf
No known key found for this signature in database
GPG Key ID: F9BA143B95FF6D82
3 changed files with 15 additions and 13 deletions

View File

@ -53,17 +53,17 @@ impl<'de> Deserialize<'de> for IgnoredAny {
}
#[inline]
fn visit_some<D>(self, _: D) -> Result<IgnoredAny, D::Error>
fn visit_some<D>(self, deserializer: D) -> Result<IgnoredAny, D::Error>
where D: Deserializer<'de>
{
Ok(IgnoredAny)
IgnoredAny::deserialize(deserializer)
}
#[inline]
fn visit_newtype_struct<D>(self, _: D) -> Result<IgnoredAny, D::Error>
fn visit_newtype_struct<D>(self, deserializer: D) -> Result<IgnoredAny, D::Error>
where D: Deserializer<'de>
{
Ok(IgnoredAny)
IgnoredAny::deserialize(deserializer)
}
#[inline]

View File

@ -111,6 +111,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
Some(Token::Option(true)) => visitor.visit_some(self),
Some(Token::Unit) => visitor.visit_unit(),
Some(Token::UnitStruct(_name)) => visitor.visit_unit(),
Some(Token::StructNewType(_name)) => visitor.visit_newtype_struct(self),
Some(Token::SeqStart(len)) => {
self.visit_seq(len, Token::SeqSep, Token::SeqEnd, visitor)
}
@ -182,7 +183,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
self.next_token();
visitor.visit_some(self)
}
Some(_) => visitor.visit_some(self),
Some(_) => self.deserialize(visitor),
None => Err(Error::EndOfTokens),
}
}

View File

@ -35,6 +35,9 @@ mod macros;
#[derive(Copy, Clone, PartialEq, Debug, Deserialize)]
struct UnitStruct;
#[derive(PartialEq, Debug, Deserialize)]
struct NewtypeStruct(i32);
#[derive(PartialEq, Debug, Deserialize)]
struct TupleStruct(i32, i32, i32);
@ -166,13 +169,6 @@ fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) {
let mut de = serde_test::Deserializer::new(&concated_tokens);
let v: Result<IgnoreBase, Error> = Deserialize::deserialize(&mut de);
// We run this test on every token stream for convenience, but
// some token streams don't make sense embedded as a map value,
// so we ignore those. SyntaxError is the real sign of trouble.
if let Err(Error::UnexpectedToken(_)) = v {
return;
}
assert_eq!(v.as_ref(), Ok(&expected));
assert_eq!(de.next_token(), None);
}
@ -225,7 +221,6 @@ declare_tests! {
test_option {
None::<i32> => &[Token::Unit],
None::<i32> => &[Token::Option(false)],
Some(1) => &[Token::I32(1)],
Some(1) => &[
Token::Option(true),
Token::I32(1),
@ -260,6 +255,12 @@ declare_tests! {
Token::SeqEnd,
],
}
test_newtype_struct {
NewtypeStruct(1) => &[
Token::StructNewType("NewtypeStruct"),
Token::I32(1),
],
}
test_tuple_struct {
TupleStruct(1, 2, 3) => &[
Token::SeqStart(Some(3)),