Merge pull request #843 from serde-rs/ignore

Stop ignoring error in assert_de_tokens_ignore
This commit is contained in:
David Tolnay 2017-04-05 09:36:22 -07:00 committed by GitHub
commit 9b51be4ba6
3 changed files with 74 additions and 18 deletions

View File

@ -53,17 +53,17 @@ impl<'de> Deserialize<'de> for IgnoredAny {
}
#[inline]
fn visit_some<D>(self, _: D) -> Result<IgnoredAny, D::Error>
fn visit_some<D>(self, deserializer: D) -> Result<IgnoredAny, D::Error>
where D: Deserializer<'de>
{
Ok(IgnoredAny)
IgnoredAny::deserialize(deserializer)
}
#[inline]
fn visit_newtype_struct<D>(self, _: D) -> Result<IgnoredAny, D::Error>
fn visit_newtype_struct<D>(self, deserializer: D) -> Result<IgnoredAny, D::Error>
where D: Deserializer<'de>
{
Ok(IgnoredAny)
IgnoredAny::deserialize(deserializer)
}
#[inline]

View File

@ -111,6 +111,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
Some(Token::Option(true)) => visitor.visit_some(self),
Some(Token::Unit) => visitor.visit_unit(),
Some(Token::UnitStruct(_name)) => visitor.visit_unit(),
Some(Token::StructNewType(_name)) => visitor.visit_newtype_struct(self),
Some(Token::SeqStart(len)) => {
self.visit_seq(len, Token::SeqSep, Token::SeqEnd, visitor)
}
@ -132,12 +133,35 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
Some(Token::StructStart(_, len)) => {
self.visit_map(Some(len), Token::StructSep, Token::StructEnd, visitor)
}
Some(Token::EnumStart(_)) => {
let variant = self.next_token().ok_or(Error::EndOfTokens)?;
let next = *self.tokens.first().ok_or(Error::EndOfTokens)?;
match (variant, next) {
(Token::Str(variant), Token::Unit) => {
self.next_token();
visitor.visit_str(variant)
}
(Token::Bytes(variant), Token::Unit) => {
self.next_token();
visitor.visit_bytes(variant)
}
(Token::U32(variant), Token::Unit) => {
self.next_token();
visitor.visit_u32(variant)
}
(variant, Token::Unit) => {
Err(Error::UnexpectedToken(variant))
}
(variant, _) => {
visitor.visit_map(EnumMapVisitor::new(self, variant))
}
}
}
Some(Token::EnumUnit(_, variant)) => visitor.visit_str(variant),
Some(Token::EnumStart(variant)) |
Some(Token::EnumNewType(_, variant)) |
Some(Token::EnumSeqStart(_, variant, _)) |
Some(Token::EnumMapStart(_, variant, _)) => {
visitor.visit_map(EnumMapVisitor::new(self, variant))
visitor.visit_map(EnumMapVisitor::new(self, Token::Str(variant)))
}
Some(token) => Err(Error::UnexpectedToken(token)),
None => Err(Error::EndOfTokens),
@ -159,7 +183,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
self.next_token();
visitor.visit_some(self)
}
Some(_) => visitor.visit_some(self),
Some(_) => self.deserialize(visitor),
None => Err(Error::EndOfTokens),
}
}
@ -542,11 +566,11 @@ impl<'de, 'a> VariantVisitor<'de> for DeserializerEnumVisitor<'a, 'de> {
struct EnumMapVisitor<'a, 'de: 'a> {
de: &'a mut Deserializer<'de>,
variant: Option<&'a str>,
variant: Option<Token>,
}
impl<'a, 'de> EnumMapVisitor<'a, 'de> {
fn new(de: &'a mut Deserializer<'de>, variant: &'a str) -> Self {
fn new(de: &'a mut Deserializer<'de>, variant: Token) -> Self {
EnumMapVisitor {
de: de,
variant: Some(variant),
@ -561,7 +585,18 @@ impl<'de, 'a> MapVisitor<'de> for EnumMapVisitor<'a, 'de> {
where K: DeserializeSeed<'de>
{
match self.variant.take() {
Some(variant) => seed.deserialize(variant.into_deserializer()).map(Some),
Some(Token::Str(variant)) => {
seed.deserialize(variant.into_deserializer()).map(Some)
}
Some(Token::Bytes(variant)) => {
seed.deserialize(BytesDeserializer { value: variant }).map(Some)
}
Some(Token::U32(variant)) => {
seed.deserialize(variant.into_deserializer()).map(Some)
}
Some(other) => {
Err(Error::UnexpectedToken(other))
}
None => Ok(None),
}
}
@ -600,3 +635,23 @@ impl<'de, 'a> MapVisitor<'de> for EnumMapVisitor<'a, 'de> {
}
}
}
struct BytesDeserializer {
value: &'static [u8],
}
impl<'de> de::Deserializer<'de> for BytesDeserializer {
type Error = Error;
fn deserialize<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where V: de::Visitor<'de>
{
visitor.visit_bytes(self.value)
}
forward_to_deserialize! {
bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit option
seq seq_fixed_size bytes map unit_struct newtype_struct tuple_struct
struct struct_field tuple enum ignored_any byte_buf
}
}

View File

@ -35,6 +35,9 @@ mod macros;
#[derive(Copy, Clone, PartialEq, Debug, Deserialize)]
struct UnitStruct;
#[derive(PartialEq, Debug, Deserialize)]
struct NewtypeStruct(i32);
#[derive(PartialEq, Debug, Deserialize)]
struct TupleStruct(i32, i32, i32);
@ -166,13 +169,6 @@ fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) {
let mut de = serde_test::Deserializer::new(&concated_tokens);
let v: Result<IgnoreBase, Error> = Deserialize::deserialize(&mut de);
// We run this test on every token stream for convenience, but
// some token streams don't make sense embedded as a map value,
// so we ignore those. SyntaxError is the real sign of trouble.
if let Err(Error::UnexpectedToken(_)) = v {
return;
}
assert_eq!(v.as_ref(), Ok(&expected));
assert_eq!(de.next_token(), None);
}
@ -225,7 +221,6 @@ declare_tests! {
test_option {
None::<i32> => &[Token::Unit],
None::<i32> => &[Token::Option(false)],
Some(1) => &[Token::I32(1)],
Some(1) => &[
Token::Option(true),
Token::I32(1),
@ -260,6 +255,12 @@ declare_tests! {
Token::SeqEnd,
],
}
test_newtype_struct {
NewtypeStruct(1) => &[
Token::StructNewType("NewtypeStruct"),
Token::I32(1),
],
}
test_tuple_struct {
TupleStruct(1, 2, 3) => &[
Token::SeqStart(Some(3)),