Remove error from serde_test public API
This commit is contained in:
parent
974c8434e9
commit
26a6ba177c
@ -9,7 +9,6 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use de::Deserializer;
|
||||
use error::Error;
|
||||
use ser::Serializer;
|
||||
use token::Token;
|
||||
|
||||
@ -41,14 +40,53 @@ where
|
||||
}
|
||||
|
||||
/// Asserts that `value` serializes to the given `tokens`, and then yields `error`.
|
||||
pub fn assert_ser_tokens_error<T>(value: &T, tokens: &[Token], expected: Error)
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate serde_derive;
|
||||
/// #
|
||||
/// # extern crate serde_test;
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// use std::sync::{Arc, Mutex};
|
||||
/// use std::thread;
|
||||
///
|
||||
/// use serde_test::{assert_ser_tokens_error, Token, Error};
|
||||
///
|
||||
/// #[derive(Serialize)]
|
||||
/// struct Example {
|
||||
/// lock: Arc<Mutex<u32>>,
|
||||
/// }
|
||||
///
|
||||
/// let example = Example { lock: Arc::new(Mutex::new(0)) };
|
||||
/// let lock = example.lock.clone();
|
||||
///
|
||||
/// let _ = thread::spawn(move || {
|
||||
/// // This thread will acquire the mutex first, unwrapping the result
|
||||
/// // of `lock` because the lock has not been poisoned.
|
||||
/// let _guard = lock.lock().unwrap();
|
||||
///
|
||||
/// // This panic while holding the lock (`_guard` is in scope) will
|
||||
/// // poison the mutex.
|
||||
/// panic!()
|
||||
/// }).join();
|
||||
///
|
||||
/// let expected = &[
|
||||
/// Token::Struct("Example", 1),
|
||||
/// Token::Str("lock"),
|
||||
/// ];
|
||||
/// let error = Error::Message("lock poison error while serializing".to_owned());
|
||||
/// assert_ser_tokens_error(&example, expected, error);
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn assert_ser_tokens_error<T>(value: &T, tokens: &[Token], error: &str)
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let mut ser = Serializer::new(tokens);
|
||||
match value.serialize(&mut ser) {
|
||||
Ok(_) => panic!("value serialized successfully"),
|
||||
Err(err) => assert_eq!(err, expected),
|
||||
Err(e) => assert_eq!(e, *error),
|
||||
}
|
||||
|
||||
if ser.remaining() > 0 {
|
||||
@ -64,7 +102,7 @@ where
|
||||
let mut de = Deserializer::new(tokens);
|
||||
match T::deserialize(&mut de) {
|
||||
Ok(v) => assert_eq!(v, *value),
|
||||
Err(err) => panic!("tokens failed to deserialize: {}", err),
|
||||
Err(e) => panic!("tokens failed to deserialize: {}", e),
|
||||
}
|
||||
|
||||
if de.remaining() > 0 {
|
||||
@ -72,19 +110,19 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that the given `tokens` yield `expected` error when deserializing.
|
||||
pub fn assert_de_tokens_error<'de, T>(tokens: &'de [Token], expected: Error)
|
||||
/// Asserts that the given `tokens` yield `error` when deserializing.
|
||||
pub fn assert_de_tokens_error<'de, T>(tokens: &'de [Token], error: &str)
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
let mut de = Deserializer::new(tokens);
|
||||
match T::deserialize(&mut de) {
|
||||
Ok(_) => panic!("tokens deserialized successfully"),
|
||||
Err(err) => assert_eq!(err, expected),
|
||||
Err(e) => assert_eq!(e, *error),
|
||||
}
|
||||
|
||||
// There may be one token left if a peek caused the error
|
||||
de.next_token();
|
||||
de.next_token_opt();
|
||||
|
||||
if de.remaining() > 0 {
|
||||
panic!("{} remaining tokens", de.remaining());
|
||||
|
@ -13,39 +13,72 @@ use serde::de::value::{MapAccessDeserializer, SeqAccessDeserializer};
|
||||
use error::Error;
|
||||
use token::Token;
|
||||
|
||||
/// A `Deserializer` that reads from a list of tokens.
|
||||
#[derive(Debug)]
|
||||
pub struct Deserializer<'de> {
|
||||
tokens: &'de [Token],
|
||||
}
|
||||
|
||||
macro_rules! assert_next_token {
|
||||
($de:expr, $expected:expr) => {
|
||||
match $de.next_token_opt() {
|
||||
Some(token) if token == $expected => {}
|
||||
Some(other) => {
|
||||
panic!("expected Token::{} but deserialization wants Token::{}",
|
||||
other, $expected)
|
||||
}
|
||||
None => {
|
||||
panic!("end of tokens but deserialization wants Token::{}",
|
||||
$expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! unexpected {
|
||||
($token:expr) => {
|
||||
panic!("deserialization did not expect this token: {}", $token)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! end_of_tokens {
|
||||
() => {
|
||||
panic!("ran out of tokens to deserialize")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> {
|
||||
/// Creates the deserializer.
|
||||
pub fn new(tokens: &'de [Token]) -> Self {
|
||||
Deserializer { tokens: tokens }
|
||||
}
|
||||
|
||||
/// Pulls the next token off of the deserializer, ignoring it.
|
||||
pub fn next_token(&mut self) -> Option<Token> {
|
||||
if let Some((&first, rest)) = self.tokens.split_first() {
|
||||
self.tokens = rest;
|
||||
Some(first)
|
||||
} else {
|
||||
None
|
||||
fn peek_token_opt(&self) -> Option<Token> {
|
||||
self.tokens.first().cloned()
|
||||
}
|
||||
|
||||
fn peek_token(&self) -> Token {
|
||||
match self.peek_token_opt() {
|
||||
Some(token) => token,
|
||||
None => end_of_tokens!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Pulls the next token off of the deserializer and checks if it matches an expected token.
|
||||
pub fn expect_token(&mut self, expected: Token) -> Result<(), Error> {
|
||||
match self.next_token() {
|
||||
Some(token) => {
|
||||
if expected == token {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
}
|
||||
pub fn next_token_opt(&mut self) -> Option<Token> {
|
||||
match self.tokens.split_first() {
|
||||
Some((&first, rest)) => {
|
||||
self.tokens = rest;
|
||||
Some(first)
|
||||
}
|
||||
None => Err(Error::EndOfTokens),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn next_token(&mut self) -> Token {
|
||||
match self.tokens.split_first() {
|
||||
Some((&first, rest)) => {
|
||||
self.tokens = rest;
|
||||
first
|
||||
}
|
||||
None => end_of_tokens!(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -71,7 +104,7 @@ impl<'de> Deserializer<'de> {
|
||||
},
|
||||
)
|
||||
);
|
||||
try!(self.expect_token(end));
|
||||
assert_next_token!(self, end);
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
@ -93,7 +126,7 @@ impl<'de> Deserializer<'de> {
|
||||
},
|
||||
)
|
||||
);
|
||||
try!(self.expect_token(end));
|
||||
assert_next_token!(self, end);
|
||||
Ok(value)
|
||||
}
|
||||
}
|
||||
@ -110,7 +143,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
let token = self.next_token().ok_or(Error::EndOfTokens)?;
|
||||
let token = self.next_token();
|
||||
match token {
|
||||
Token::Bool(v) => visitor.visit_bool(v),
|
||||
Token::I8(v) => visitor.visit_i8(v),
|
||||
@ -141,8 +174,8 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
Token::Map(len) => self.visit_map(len, Token::MapEnd, visitor),
|
||||
Token::Struct(_, len) => self.visit_map(Some(len), Token::StructEnd, visitor),
|
||||
Token::Enum(_) => {
|
||||
let variant = self.next_token().ok_or(Error::EndOfTokens)?;
|
||||
let next = *self.tokens.first().ok_or(Error::EndOfTokens)?;
|
||||
let variant = self.next_token();
|
||||
let next = self.peek_token();
|
||||
match (variant, next) {
|
||||
(Token::Str(variant), Token::Unit) => {
|
||||
self.next_token();
|
||||
@ -156,7 +189,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
self.next_token();
|
||||
visitor.visit_u32(variant)
|
||||
}
|
||||
(variant, Token::Unit) => Err(Error::UnexpectedToken(variant)),
|
||||
(variant, Token::Unit) => unexpected!(variant),
|
||||
(variant, _) => {
|
||||
visitor.visit_map(EnumMapVisitor::new(self, variant, EnumFormat::Any))
|
||||
}
|
||||
@ -174,94 +207,79 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
}
|
||||
Token::SeqEnd | Token::TupleEnd | Token::TupleStructEnd | Token::MapEnd |
|
||||
Token::StructEnd | Token::TupleVariantEnd | Token::StructVariantEnd => {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
unexpected!(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Hook into `Option` deserializing so we can treat `Unit` as a
|
||||
/// `None`, or a regular value as `Some(value)`.
|
||||
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::Unit) |
|
||||
Some(&Token::None) => {
|
||||
match self.peek_token() {
|
||||
Token::Unit |
|
||||
Token::None => {
|
||||
self.next_token();
|
||||
visitor.visit_none()
|
||||
}
|
||||
Some(&Token::Some) => {
|
||||
Token::Some => {
|
||||
self.next_token();
|
||||
visitor.visit_some(self)
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self,
|
||||
name: &str,
|
||||
name: &'static str,
|
||||
_variants: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::Enum(n)) if name == n => {
|
||||
match self.peek_token() {
|
||||
Token::Enum(n) if name == n => {
|
||||
self.next_token();
|
||||
|
||||
visitor.visit_enum(DeserializerEnumVisitor { de: self })
|
||||
}
|
||||
Some(&Token::UnitVariant(n, _)) |
|
||||
Some(&Token::NewtypeVariant(n, _)) |
|
||||
Some(&Token::TupleVariant(n, _, _)) |
|
||||
Some(&Token::StructVariant(n, _, _)) if name == n => {
|
||||
Token::UnitVariant(n, _) |
|
||||
Token::NewtypeVariant(n, _) |
|
||||
Token::TupleVariant(n, _, _) |
|
||||
Token::StructVariant(n, _, _) if name == n => {
|
||||
visitor.visit_enum(DeserializerEnumVisitor { de: self })
|
||||
}
|
||||
Some(_) => {
|
||||
let token = self.next_token().unwrap();
|
||||
Err(Error::UnexpectedToken(token))
|
||||
_ => {
|
||||
unexpected!(self.next_token());
|
||||
}
|
||||
None => Err(Error::EndOfTokens),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(self, name: &str, visitor: V) -> Result<V::Value, Error>
|
||||
fn deserialize_unit_struct<V>(self, name: &'static str, visitor: V) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::UnitStruct(n)) => {
|
||||
self.next_token();
|
||||
if name == n {
|
||||
visitor.visit_unit()
|
||||
} else {
|
||||
Err(Error::InvalidName(n))
|
||||
}
|
||||
match self.peek_token() {
|
||||
Token::UnitStruct(_) => {
|
||||
assert_next_token!(self, Token::UnitStruct(name));
|
||||
visitor.visit_unit()
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(self, name: &str, visitor: V) -> Result<V::Value, Error>
|
||||
fn deserialize_newtype_struct<V>(self, name: &'static str, visitor: V) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::NewtypeStruct(n)) => {
|
||||
self.next_token();
|
||||
if name == n {
|
||||
visitor.visit_newtype_struct(self)
|
||||
} else {
|
||||
Err(Error::InvalidName(n))
|
||||
}
|
||||
match self.peek_token() {
|
||||
Token::NewtypeStruct(_) => {
|
||||
assert_next_token!(self, Token::NewtypeStruct(name));
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
|
||||
@ -269,96 +287,81 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::Unit) |
|
||||
Some(&Token::UnitStruct(_)) => {
|
||||
match self.peek_token() {
|
||||
Token::Unit |
|
||||
Token::UnitStruct(_) => {
|
||||
self.next_token();
|
||||
visitor.visit_unit()
|
||||
}
|
||||
Some(&Token::Seq(_)) => {
|
||||
Token::Seq(_) => {
|
||||
self.next_token();
|
||||
self.visit_seq(Some(len), Token::SeqEnd, visitor)
|
||||
}
|
||||
Some(&Token::Tuple(_)) => {
|
||||
Token::Tuple(_) => {
|
||||
self.next_token();
|
||||
self.visit_seq(Some(len), Token::TupleEnd, visitor)
|
||||
}
|
||||
Some(&Token::TupleStruct(_, _)) => {
|
||||
Token::TupleStruct(_, _) => {
|
||||
self.next_token();
|
||||
self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
name: &str,
|
||||
name: &'static str,
|
||||
len: usize,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::Unit) => {
|
||||
match self.peek_token() {
|
||||
Token::Unit => {
|
||||
self.next_token();
|
||||
visitor.visit_unit()
|
||||
}
|
||||
Some(&Token::UnitStruct(n)) => {
|
||||
self.next_token();
|
||||
if name == n {
|
||||
visitor.visit_unit()
|
||||
} else {
|
||||
Err(Error::InvalidName(n))
|
||||
}
|
||||
Token::UnitStruct(_) => {
|
||||
assert_next_token!(self, Token::UnitStruct(name));
|
||||
visitor.visit_unit()
|
||||
}
|
||||
Some(&Token::Seq(_)) => {
|
||||
Token::Seq(_) => {
|
||||
self.next_token();
|
||||
self.visit_seq(Some(len), Token::SeqEnd, visitor)
|
||||
}
|
||||
Some(&Token::Tuple(_)) => {
|
||||
Token::Tuple(_) => {
|
||||
self.next_token();
|
||||
self.visit_seq(Some(len), Token::TupleEnd, visitor)
|
||||
}
|
||||
Some(&Token::TupleStruct(n, _)) => {
|
||||
self.next_token();
|
||||
if name == n {
|
||||
self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
|
||||
} else {
|
||||
Err(Error::InvalidName(n))
|
||||
}
|
||||
Token::TupleStruct(_, n) => {
|
||||
assert_next_token!(self, Token::TupleStruct(name, n));
|
||||
self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
name: &str,
|
||||
name: &'static str,
|
||||
fields: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.tokens.first() {
|
||||
Some(&Token::Struct(n, _)) => {
|
||||
self.next_token();
|
||||
if name == n {
|
||||
self.visit_map(Some(fields.len()), Token::StructEnd, visitor)
|
||||
} else {
|
||||
Err(Error::InvalidName(n))
|
||||
}
|
||||
match self.peek_token() {
|
||||
Token::Struct(_, n) => {
|
||||
assert_next_token!(self, Token::Struct(name, n));
|
||||
self.visit_map(Some(fields.len()), Token::StructEnd, visitor)
|
||||
}
|
||||
Some(&Token::Map(_)) => {
|
||||
Token::Map(_) => {
|
||||
self.next_token();
|
||||
self.visit_map(Some(fields.len()), Token::MapEnd, visitor)
|
||||
}
|
||||
Some(_) => self.deserialize_any(visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => self.deserialize_any(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -378,7 +381,7 @@ impl<'de, 'a> SeqAccess<'de> for DeserializerSeqVisitor<'a, 'de> {
|
||||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
if self.de.tokens.first() == Some(&self.end) {
|
||||
if self.de.peek_token_opt() == Some(self.end) {
|
||||
return Ok(None);
|
||||
}
|
||||
self.len = self.len.map(|len| len.saturating_sub(1));
|
||||
@ -405,7 +408,7 @@ impl<'de, 'a> MapAccess<'de> for DeserializerMapVisitor<'a, 'de> {
|
||||
where
|
||||
K: DeserializeSeed<'de>,
|
||||
{
|
||||
if self.de.tokens.first() == Some(&self.end) {
|
||||
if self.de.peek_token_opt() == Some(self.end) {
|
||||
return Ok(None);
|
||||
}
|
||||
self.len = self.len.map(|len| len.saturating_sub(1));
|
||||
@ -438,20 +441,19 @@ impl<'de, 'a> EnumAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
|
||||
where
|
||||
V: DeserializeSeed<'de>,
|
||||
{
|
||||
match self.de.tokens.first() {
|
||||
Some(&Token::UnitVariant(_, v)) |
|
||||
Some(&Token::NewtypeVariant(_, v)) |
|
||||
Some(&Token::TupleVariant(_, v, _)) |
|
||||
Some(&Token::StructVariant(_, v, _)) => {
|
||||
match self.de.peek_token() {
|
||||
Token::UnitVariant(_, v) |
|
||||
Token::NewtypeVariant(_, v) |
|
||||
Token::TupleVariant(_, v, _) |
|
||||
Token::StructVariant(_, v, _) => {
|
||||
let de = v.into_deserializer();
|
||||
let value = try!(seed.deserialize(de));
|
||||
Ok((value, self))
|
||||
}
|
||||
Some(_) => {
|
||||
_ => {
|
||||
let value = try!(seed.deserialize(&mut *self.de));
|
||||
Ok((value, self))
|
||||
}
|
||||
None => Err(Error::EndOfTokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -460,13 +462,12 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
|
||||
type Error = Error;
|
||||
|
||||
fn unit_variant(self) -> Result<(), Error> {
|
||||
match self.de.tokens.first() {
|
||||
Some(&Token::UnitVariant(_, _)) => {
|
||||
match self.de.peek_token() {
|
||||
Token::UnitVariant(_, _) => {
|
||||
self.de.next_token();
|
||||
Ok(())
|
||||
}
|
||||
Some(_) => Deserialize::deserialize(self.de),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => Deserialize::deserialize(self.de),
|
||||
}
|
||||
}
|
||||
|
||||
@ -474,13 +475,12 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
|
||||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
match self.de.tokens.first() {
|
||||
Some(&Token::NewtypeVariant(_, _)) => {
|
||||
match self.de.peek_token() {
|
||||
Token::NewtypeVariant(_, _) => {
|
||||
self.de.next_token();
|
||||
seed.deserialize(self.de)
|
||||
}
|
||||
Some(_) => seed.deserialize(self.de),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => seed.deserialize(self.de),
|
||||
}
|
||||
}
|
||||
|
||||
@ -488,28 +488,27 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.de.tokens.first() {
|
||||
Some(&Token::TupleVariant(_, _, enum_len)) => {
|
||||
let token = self.de.next_token().unwrap();
|
||||
match self.de.peek_token() {
|
||||
Token::TupleVariant(_, _, enum_len) => {
|
||||
let token = self.de.next_token();
|
||||
|
||||
if len == enum_len {
|
||||
self.de
|
||||
.visit_seq(Some(len), Token::TupleVariantEnd, visitor)
|
||||
} else {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
unexpected!(token);
|
||||
}
|
||||
}
|
||||
Some(&Token::Seq(Some(enum_len))) => {
|
||||
let token = self.de.next_token().unwrap();
|
||||
Token::Seq(Some(enum_len)) => {
|
||||
let token = self.de.next_token();
|
||||
|
||||
if len == enum_len {
|
||||
self.de.visit_seq(Some(len), Token::SeqEnd, visitor)
|
||||
} else {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
unexpected!(token);
|
||||
}
|
||||
}
|
||||
Some(_) => de::Deserializer::deserialize_any(self.de, visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => de::Deserializer::deserialize_any(self.de, visitor),
|
||||
}
|
||||
}
|
||||
|
||||
@ -517,29 +516,28 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match self.de.tokens.first() {
|
||||
Some(&Token::StructVariant(_, _, enum_len)) => {
|
||||
let token = self.de.next_token().unwrap();
|
||||
match self.de.peek_token() {
|
||||
Token::StructVariant(_, _, enum_len) => {
|
||||
let token = self.de.next_token();
|
||||
|
||||
if fields.len() == enum_len {
|
||||
self.de
|
||||
.visit_map(Some(fields.len()), Token::StructVariantEnd, visitor)
|
||||
} else {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
unexpected!(token);
|
||||
}
|
||||
}
|
||||
Some(&Token::Map(Some(enum_len))) => {
|
||||
let token = self.de.next_token().unwrap();
|
||||
Token::Map(Some(enum_len)) => {
|
||||
let token = self.de.next_token();
|
||||
|
||||
if fields.len() == enum_len {
|
||||
self.de
|
||||
.visit_map(Some(fields.len()), Token::MapEnd, visitor)
|
||||
} else {
|
||||
Err(Error::UnexpectedToken(token))
|
||||
unexpected!(token);
|
||||
}
|
||||
}
|
||||
Some(_) => de::Deserializer::deserialize_any(self.de, visitor),
|
||||
None => Err(Error::EndOfTokens),
|
||||
_ => de::Deserializer::deserialize_any(self.de, visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -582,7 +580,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
|
||||
.map(Some)
|
||||
}
|
||||
Some(Token::U32(variant)) => seed.deserialize(variant.into_deserializer()).map(Some),
|
||||
Some(other) => Err(Error::UnexpectedToken(other)),
|
||||
Some(other) => unexpected!(other),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
@ -601,7 +599,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
|
||||
};
|
||||
try!(seed.deserialize(SeqAccessDeserializer::new(visitor)))
|
||||
};
|
||||
try!(self.de.expect_token(Token::TupleVariantEnd));
|
||||
assert_next_token!(self.de, Token::TupleVariantEnd);
|
||||
Ok(value)
|
||||
}
|
||||
EnumFormat::Map => {
|
||||
@ -613,7 +611,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
|
||||
};
|
||||
try!(seed.deserialize(MapAccessDeserializer::new(visitor)))
|
||||
};
|
||||
try!(self.de.expect_token(Token::StructVariantEnd));
|
||||
assert_next_token!(self.de, Token::StructVariantEnd);
|
||||
Ok(value)
|
||||
}
|
||||
EnumFormat::Any => seed.deserialize(&mut *self.de),
|
||||
|
@ -11,93 +11,37 @@ use std::fmt::{self, Display};
|
||||
|
||||
use serde::{ser, de};
|
||||
|
||||
use token::Token;
|
||||
|
||||
/// Error expected in `assert_ser_tokens_error` and `assert_de_tokens_error`.
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub enum Error {
|
||||
/// A custom error.
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate serde_derive;
|
||||
/// #
|
||||
/// # extern crate serde_test;
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// use std::sync::{Arc, Mutex};
|
||||
/// use std::thread;
|
||||
///
|
||||
/// use serde_test::{assert_ser_tokens_error, Token, Error};
|
||||
///
|
||||
/// #[derive(Serialize)]
|
||||
/// struct Example {
|
||||
/// lock: Arc<Mutex<u32>>,
|
||||
/// }
|
||||
///
|
||||
/// let example = Example { lock: Arc::new(Mutex::new(0)) };
|
||||
/// let lock = example.lock.clone();
|
||||
///
|
||||
/// let _ = thread::spawn(move || {
|
||||
/// // This thread will acquire the mutex first, unwrapping the result
|
||||
/// // of `lock` because the lock has not been poisoned.
|
||||
/// let _guard = lock.lock().unwrap();
|
||||
///
|
||||
/// // This panic while holding the lock (`_guard` is in scope) will
|
||||
/// // poison the mutex.
|
||||
/// panic!()
|
||||
/// }).join();
|
||||
///
|
||||
/// let expected = &[
|
||||
/// Token::Struct("Example", 1),
|
||||
/// Token::Str("lock"),
|
||||
/// ];
|
||||
/// let error = Error::Message("lock poison error while serializing".to_owned());
|
||||
/// assert_ser_tokens_error(&example, expected, error);
|
||||
/// # }
|
||||
/// ```
|
||||
Message(String),
|
||||
|
||||
/// `Deserialize` was expecting a struct of one name, and another was found.
|
||||
InvalidName(&'static str),
|
||||
|
||||
/// `Serialize` generated a token that didn't match the test.
|
||||
UnexpectedToken(Token),
|
||||
|
||||
/// The expected token list was too short.
|
||||
EndOfTokens,
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Error {
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl ser::Error for Error {
|
||||
fn custom<T: Display>(msg: T) -> Error {
|
||||
Error::Message(msg.to_string())
|
||||
Error { msg: msg.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl de::Error for Error {
|
||||
fn custom<T: Display>(msg: T) -> Error {
|
||||
Error::Message(msg.to_string())
|
||||
Error { msg: msg.to_string() }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Error::Message(ref msg) => formatter.write_str(msg),
|
||||
Error::InvalidName(name) => write!(formatter, "invalid name `{}`", name),
|
||||
Error::UnexpectedToken(_) => formatter.write_str("unexpected token"),
|
||||
Error::EndOfTokens => formatter.write_str("end of tokens"),
|
||||
}
|
||||
formatter.write_str(&self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for Error {
|
||||
fn description(&self) -> &str {
|
||||
match *self {
|
||||
Error::Message(ref msg) => msg,
|
||||
Error::InvalidName(_) => "invalid name",
|
||||
Error::UnexpectedToken(_) => "unexpected token",
|
||||
Error::EndOfTokens => "end of tokens",
|
||||
}
|
||||
&self.msg
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for Error {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.msg == other
|
||||
}
|
||||
}
|
||||
|
@ -11,15 +11,17 @@
|
||||
#[macro_use]
|
||||
extern crate serde;
|
||||
|
||||
mod ser;
|
||||
mod de;
|
||||
mod error;
|
||||
|
||||
mod token;
|
||||
mod assert;
|
||||
|
||||
pub use token::Token;
|
||||
pub use assert::{assert_tokens, assert_ser_tokens, assert_ser_tokens_error, assert_de_tokens,
|
||||
assert_de_tokens_error};
|
||||
|
||||
mod ser;
|
||||
mod de;
|
||||
|
||||
mod token;
|
||||
pub use token::Token;
|
||||
|
||||
mod error;
|
||||
pub use error::Error;
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub use de::Deserializer;
|
||||
|
@ -6,6 +6,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::fmt::{self, Debug, Display};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub enum Token {
|
||||
/// A serialized `bool`.
|
||||
@ -146,3 +148,9 @@ pub enum Token {
|
||||
/// An indicator of the end of a struct, similar to `StructEnd`.
|
||||
StructVariantEnd,
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(self, formatter)
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ extern crate serde;
|
||||
use self::serde::{Serialize, Serializer, Deserialize, Deserializer};
|
||||
|
||||
extern crate serde_test;
|
||||
use self::serde_test::{Error, Token, assert_tokens, assert_ser_tokens, assert_de_tokens,
|
||||
use self::serde_test::{Token, assert_tokens, assert_ser_tokens, assert_de_tokens,
|
||||
assert_de_tokens_error};
|
||||
|
||||
trait MyDefault: Sized {
|
||||
@ -363,7 +363,7 @@ fn test_ignore_unknown() {
|
||||
|
||||
Token::Str("whoops"),
|
||||
],
|
||||
Error::Message("unknown field `whoops`, expected `a1`".to_owned()),
|
||||
"unknown field `whoops`, expected `a1`",
|
||||
);
|
||||
}
|
||||
|
||||
@ -875,7 +875,7 @@ fn test_missing_renamed_field_struct() {
|
||||
|
||||
Token::StructEnd,
|
||||
],
|
||||
Error::Message("missing field `a3`".to_owned()),
|
||||
"missing field `a3`",
|
||||
);
|
||||
|
||||
assert_de_tokens_error::<RenameStructSerializeDeserialize>(
|
||||
@ -887,7 +887,7 @@ fn test_missing_renamed_field_struct() {
|
||||
|
||||
Token::StructEnd,
|
||||
],
|
||||
Error::Message("missing field `a5`".to_owned()),
|
||||
"missing field `a5`",
|
||||
);
|
||||
}
|
||||
|
||||
@ -899,7 +899,7 @@ fn test_missing_renamed_field_enum() {
|
||||
|
||||
Token::StructVariantEnd,
|
||||
],
|
||||
Error::Message("missing field `b`".to_owned()),
|
||||
"missing field `b`",
|
||||
);
|
||||
|
||||
assert_de_tokens_error::<RenameEnumSerializeDeserialize<i8>>(
|
||||
@ -911,7 +911,7 @@ fn test_missing_renamed_field_enum() {
|
||||
|
||||
Token::StructVariantEnd,
|
||||
],
|
||||
Error::Message("missing field `d`".to_owned()),
|
||||
"missing field `d`",
|
||||
);
|
||||
}
|
||||
|
||||
@ -934,7 +934,7 @@ fn test_invalid_length_enum() {
|
||||
Token::I32(1),
|
||||
Token::TupleVariantEnd,
|
||||
],
|
||||
Error::Message("invalid length 1, expected tuple of 3 elements".to_owned()),
|
||||
"invalid length 1, expected tuple of 3 elements",
|
||||
);
|
||||
assert_de_tokens_error::<InvalidLengthEnum>(
|
||||
&[
|
||||
@ -942,7 +942,7 @@ fn test_invalid_length_enum() {
|
||||
Token::I32(1),
|
||||
Token::TupleVariantEnd,
|
||||
],
|
||||
Error::Message("invalid length 1, expected tuple of 2 elements".to_owned()),
|
||||
"invalid length 1, expected tuple of 2 elements",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,7 @@ extern crate serde;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
extern crate serde_test;
|
||||
use serde_test::{Error, Token, assert_de_tokens, assert_de_tokens_error};
|
||||
use serde_test::{Token, assert_de_tokens, assert_de_tokens_error};
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
@ -26,7 +26,7 @@ fn test_borrowed_str() {
|
||||
fn test_borrowed_str_from_string() {
|
||||
assert_de_tokens_error::<&str>(
|
||||
&[Token::String("borrowed")],
|
||||
Error::Message("invalid type: string \"borrowed\", expected a borrowed string".to_owned(),),
|
||||
"invalid type: string \"borrowed\", expected a borrowed string",
|
||||
);
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@ fn test_borrowed_str_from_string() {
|
||||
fn test_borrowed_str_from_str() {
|
||||
assert_de_tokens_error::<&str>(
|
||||
&[Token::Str("borrowed")],
|
||||
Error::Message("invalid type: string \"borrowed\", expected a borrowed string".to_owned(),),
|
||||
"invalid type: string \"borrowed\", expected a borrowed string",
|
||||
);
|
||||
}
|
||||
|
||||
@ -52,7 +52,7 @@ fn test_borrowed_bytes() {
|
||||
fn test_borrowed_bytes_from_bytebuf() {
|
||||
assert_de_tokens_error::<&[u8]>(
|
||||
&[Token::ByteBuf(b"borrowed")],
|
||||
Error::Message("invalid type: byte array, expected a borrowed byte array".to_owned(),),
|
||||
"invalid type: byte array, expected a borrowed byte array",
|
||||
);
|
||||
}
|
||||
|
||||
@ -60,7 +60,7 @@ fn test_borrowed_bytes_from_bytebuf() {
|
||||
fn test_borrowed_bytes_from_bytes() {
|
||||
assert_de_tokens_error::<&[u8]>(
|
||||
&[Token::Bytes(b"borrowed")],
|
||||
Error::Message("invalid type: byte array, expected a borrowed byte array".to_owned(),),
|
||||
"invalid type: byte array, expected a borrowed byte array",
|
||||
);
|
||||
}
|
||||
|
||||
@ -128,7 +128,6 @@ fn test_cow() {
|
||||
|
||||
let mut de = serde_test::Deserializer::new(tokens);
|
||||
let cows = Cows::deserialize(&mut de).unwrap();
|
||||
assert_eq!(de.next_token(), None);
|
||||
|
||||
match cows.copied {
|
||||
Cow::Owned(ref s) if s == "copied" => {}
|
||||
|
@ -28,7 +28,7 @@ extern crate fnv;
|
||||
use self::fnv::FnvHasher;
|
||||
|
||||
extern crate serde_test;
|
||||
use self::serde_test::{Error, Token, assert_de_tokens, assert_de_tokens_error};
|
||||
use self::serde_test::{Token, assert_de_tokens, assert_de_tokens_error};
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
@ -149,8 +149,6 @@ fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) {
|
||||
a: i32,
|
||||
}
|
||||
|
||||
let expected = IgnoreBase { a: 1 };
|
||||
|
||||
// Embed the tokens to be ignored in the normal token
|
||||
// stream for an IgnoreBase type
|
||||
let concated_tokens: Vec<Token> = vec![
|
||||
@ -166,10 +164,8 @@ fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) {
|
||||
.collect();
|
||||
|
||||
let mut de = serde_test::Deserializer::new(&concated_tokens);
|
||||
let v: Result<IgnoreBase, Error> = Deserialize::deserialize(&mut de);
|
||||
|
||||
assert_eq!(v.as_ref(), Ok(&expected));
|
||||
assert_eq!(de.next_token(), None);
|
||||
let base = IgnoreBase::deserialize(&mut de).unwrap();
|
||||
assert_eq!(base, IgnoreBase { a: 1 });
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
@ -789,7 +785,7 @@ fn test_net_ipaddr() {
|
||||
fn test_cstr_internal_null() {
|
||||
assert_de_tokens_error::<Box<CStr>>(
|
||||
&[Token::Bytes(b"a\0c")],
|
||||
Error::Message("nul byte found in provided data at position: 1".into()),
|
||||
"nul byte found in provided data at position: 1",
|
||||
);
|
||||
}
|
||||
|
||||
@ -798,7 +794,7 @@ fn test_cstr_internal_null() {
|
||||
fn test_cstr_internal_null_end() {
|
||||
assert_de_tokens_error::<Box<CStr>>(
|
||||
&[Token::Bytes(b"ac\0")],
|
||||
Error::Message("nul byte found in provided data at position: 2".into()),
|
||||
"nul byte found in provided data at position: 2",
|
||||
);
|
||||
}
|
||||
|
||||
@ -811,48 +807,39 @@ declare_error_tests! {
|
||||
|
||||
Token::Str("d"),
|
||||
],
|
||||
Error::Message("unknown field `d`, expected `a`".to_owned()),
|
||||
"unknown field `d`, expected `a`",
|
||||
}
|
||||
test_skipped_field_is_unknown<StructDenyUnknown> {
|
||||
&[
|
||||
Token::Struct("StructDenyUnknown", 2),
|
||||
Token::Str("b"),
|
||||
],
|
||||
Error::Message("unknown field `b`, expected `a`".to_owned()),
|
||||
"unknown field `b`, expected `a`",
|
||||
}
|
||||
test_skip_all_deny_unknown<StructSkipAllDenyUnknown> {
|
||||
&[
|
||||
Token::Struct("StructSkipAllDenyUnknown", 1),
|
||||
Token::Str("a"),
|
||||
],
|
||||
Error::Message("unknown field `a`, there are no fields".to_owned()),
|
||||
"unknown field `a`, there are no fields",
|
||||
}
|
||||
test_unknown_variant<Enum> {
|
||||
&[
|
||||
Token::UnitVariant("Enum", "Foo"),
|
||||
],
|
||||
Error::Message("unknown variant `Foo`, expected one of `Unit`, `Simple`, `Seq`, `Map`".to_owned()),
|
||||
"unknown variant `Foo`, expected one of `Unit`, `Simple`, `Seq`, `Map`",
|
||||
}
|
||||
test_enum_skipped_variant<Enum> {
|
||||
&[
|
||||
Token::UnitVariant("Enum", "Skipped"),
|
||||
],
|
||||
Error::Message("unknown variant `Skipped`, expected one of `Unit`, `Simple`, `Seq`, `Map`".to_owned()),
|
||||
"unknown variant `Skipped`, expected one of `Unit`, `Simple`, `Seq`, `Map`",
|
||||
}
|
||||
test_enum_skip_all<EnumSkipAll> {
|
||||
&[
|
||||
Token::UnitVariant("EnumSkipAll", "Skipped"),
|
||||
],
|
||||
Error::Message("unknown variant `Skipped`, there are no variants".to_owned()),
|
||||
}
|
||||
test_struct_seq_too_long<Struct> {
|
||||
&[
|
||||
Token::Seq(Some(4)),
|
||||
Token::I32(1),
|
||||
Token::I32(2),
|
||||
Token::I32(3),
|
||||
],
|
||||
Error::UnexpectedToken(Token::I32(3)),
|
||||
"unknown variant `Skipped`, there are no variants",
|
||||
}
|
||||
test_duplicate_field_struct<Struct> {
|
||||
&[
|
||||
@ -862,7 +849,7 @@ declare_error_tests! {
|
||||
|
||||
Token::Str("a"),
|
||||
],
|
||||
Error::Message("duplicate field `a`".to_owned()),
|
||||
"duplicate field `a`",
|
||||
}
|
||||
test_duplicate_field_enum<Enum> {
|
||||
&[
|
||||
@ -872,7 +859,7 @@ declare_error_tests! {
|
||||
|
||||
Token::Str("a"),
|
||||
],
|
||||
Error::Message("duplicate field `a`".to_owned()),
|
||||
"duplicate field `a`",
|
||||
}
|
||||
test_enum_out_of_range<Enum> {
|
||||
&[
|
||||
@ -880,7 +867,7 @@ declare_error_tests! {
|
||||
Token::U32(4),
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid value: integer `4`, expected variant index 0 <= i < 4".into()),
|
||||
"invalid value: integer `4`, expected variant index 0 <= i < 4",
|
||||
}
|
||||
test_short_tuple<(u8, u8, u8)> {
|
||||
&[
|
||||
@ -888,7 +875,7 @@ declare_error_tests! {
|
||||
Token::U8(1),
|
||||
Token::TupleEnd,
|
||||
],
|
||||
Error::Message("invalid length 1, expected a tuple of size 3".into()),
|
||||
"invalid length 1, expected a tuple of size 3",
|
||||
}
|
||||
test_short_array<[u8; 3]> {
|
||||
&[
|
||||
@ -896,142 +883,142 @@ declare_error_tests! {
|
||||
Token::U8(1),
|
||||
Token::SeqEnd,
|
||||
],
|
||||
Error::Message("invalid length 1, expected an array of length 3".into()),
|
||||
"invalid length 1, expected an array of length 3",
|
||||
}
|
||||
test_cstring_internal_null<CString> {
|
||||
&[
|
||||
Token::Bytes(b"a\0c"),
|
||||
],
|
||||
Error::Message("nul byte found in provided data at position: 1".into()),
|
||||
"nul byte found in provided data at position: 1",
|
||||
}
|
||||
test_cstring_internal_null_end<CString> {
|
||||
&[
|
||||
Token::Bytes(b"ac\0"),
|
||||
],
|
||||
Error::Message("nul byte found in provided data at position: 2".into()),
|
||||
"nul byte found in provided data at position: 2",
|
||||
}
|
||||
test_unit_from_empty_seq<()> {
|
||||
&[
|
||||
Token::Seq(Some(0)),
|
||||
Token::SeqEnd,
|
||||
],
|
||||
Error::Message("invalid type: sequence, expected unit".into()),
|
||||
"invalid type: sequence, expected unit",
|
||||
}
|
||||
test_unit_from_empty_seq_without_len<()> {
|
||||
&[
|
||||
Token::Seq(None),
|
||||
Token::SeqEnd,
|
||||
],
|
||||
Error::Message("invalid type: sequence, expected unit".into()),
|
||||
"invalid type: sequence, expected unit",
|
||||
}
|
||||
test_unit_from_tuple_struct<()> {
|
||||
&[
|
||||
Token::TupleStruct("Anything", 0),
|
||||
Token::TupleStructEnd,
|
||||
],
|
||||
Error::Message("invalid type: sequence, expected unit".into()),
|
||||
"invalid type: sequence, expected unit",
|
||||
}
|
||||
test_string_from_unit<String> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a string".into()),
|
||||
"invalid type: unit value, expected a string",
|
||||
}
|
||||
test_btreeset_from_unit<BTreeSet<isize>> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_btreeset_from_unit_struct<BTreeSet<isize>> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_hashset_from_unit<HashSet<isize>> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_hashset_from_unit_struct<HashSet<isize>> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_vec_from_unit<Vec<isize>> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_vec_from_unit_struct<Vec<isize>> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a sequence".into()),
|
||||
"invalid type: unit value, expected a sequence",
|
||||
}
|
||||
test_zero_array_from_unit<[isize; 0]> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected an empty array".into()),
|
||||
"invalid type: unit value, expected an empty array",
|
||||
}
|
||||
test_zero_array_from_unit_struct<[isize; 0]> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected an empty array".into()),
|
||||
"invalid type: unit value, expected an empty array",
|
||||
}
|
||||
test_btreemap_from_unit<BTreeMap<isize, isize>> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a map".into()),
|
||||
"invalid type: unit value, expected a map",
|
||||
}
|
||||
test_btreemap_from_unit_struct<BTreeMap<isize, isize>> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a map".into()),
|
||||
"invalid type: unit value, expected a map",
|
||||
}
|
||||
test_hashmap_from_unit<HashMap<isize, isize>> {
|
||||
&[
|
||||
Token::Unit,
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a map".into()),
|
||||
"invalid type: unit value, expected a map",
|
||||
}
|
||||
test_hashmap_from_unit_struct<HashMap<isize, isize>> {
|
||||
&[
|
||||
Token::UnitStruct("Anything"),
|
||||
],
|
||||
Error::Message("invalid type: unit value, expected a map".into()),
|
||||
"invalid type: unit value, expected a map",
|
||||
}
|
||||
test_bool_from_string<bool> {
|
||||
&[
|
||||
Token::Str("false"),
|
||||
],
|
||||
Error::Message("invalid type: string \"false\", expected a boolean".into()),
|
||||
"invalid type: string \"false\", expected a boolean",
|
||||
}
|
||||
test_number_from_string<isize> {
|
||||
&[
|
||||
Token::Str("1"),
|
||||
],
|
||||
Error::Message("invalid type: string \"1\", expected isize".into()),
|
||||
"invalid type: string \"1\", expected isize",
|
||||
}
|
||||
test_integer_from_float<isize> {
|
||||
&[
|
||||
Token::F32(0.0),
|
||||
],
|
||||
Error::Message("invalid type: floating point `0`, expected isize".into()),
|
||||
"invalid type: floating point `0`, expected isize",
|
||||
}
|
||||
test_unit_struct_from_seq<UnitStruct> {
|
||||
&[
|
||||
Token::Seq(Some(0)),
|
||||
Token::SeqEnd,
|
||||
],
|
||||
Error::Message("invalid type: sequence, expected unit struct UnitStruct".into()),
|
||||
"invalid type: sequence, expected unit struct UnitStruct",
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,7 @@
|
||||
extern crate serde_derive;
|
||||
|
||||
extern crate serde_test;
|
||||
use self::serde_test::{Error, Token, assert_tokens, assert_ser_tokens, assert_de_tokens,
|
||||
use self::serde_test::{Token, assert_tokens, assert_ser_tokens, assert_de_tokens,
|
||||
assert_de_tokens_error};
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
@ -555,12 +555,12 @@ fn test_untagged_enum() {
|
||||
|
||||
assert_de_tokens_error::<Untagged>(
|
||||
&[Token::None],
|
||||
Error::Message("data did not match any variant of untagged enum Untagged".to_owned(),),
|
||||
"data did not match any variant of untagged enum Untagged",
|
||||
);
|
||||
|
||||
assert_de_tokens_error::<Untagged>(
|
||||
&[Token::Tuple(1), Token::U8(1), Token::TupleEnd],
|
||||
Error::Message("data did not match any variant of untagged enum Untagged".to_owned(),),
|
||||
"data did not match any variant of untagged enum Untagged",
|
||||
);
|
||||
|
||||
assert_de_tokens_error::<Untagged>(
|
||||
@ -571,7 +571,7 @@ fn test_untagged_enum() {
|
||||
Token::U8(3),
|
||||
Token::TupleEnd,
|
||||
],
|
||||
Error::Message("data did not match any variant of untagged enum Untagged".to_owned(),),
|
||||
"data did not match any variant of untagged enum Untagged",
|
||||
);
|
||||
}
|
||||
|
||||
@ -679,7 +679,7 @@ fn test_internally_tagged_enum() {
|
||||
|
||||
assert_de_tokens_error::<InternallyTagged>(
|
||||
&[Token::Map(Some(0)), Token::MapEnd],
|
||||
Error::Message("missing field `type`".to_owned()),
|
||||
"missing field `type`",
|
||||
);
|
||||
|
||||
assert_de_tokens_error::<InternallyTagged>(
|
||||
@ -691,7 +691,7 @@ fn test_internally_tagged_enum() {
|
||||
|
||||
Token::MapEnd,
|
||||
],
|
||||
Error::Message("unknown variant `Z`, expected one of `A`, `B`, `C`, `D`, `E`, `F`".to_owned(),),
|
||||
"unknown variant `Z`, expected one of `A`, `B`, `C`, `D`, `E`, `F`",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ use std::str;
|
||||
extern crate serde;
|
||||
|
||||
extern crate serde_test;
|
||||
use self::serde_test::{Error, Token, assert_ser_tokens, assert_ser_tokens_error};
|
||||
use self::serde_test::{Token, assert_ser_tokens, assert_ser_tokens_error};
|
||||
|
||||
extern crate fnv;
|
||||
use self::fnv::FnvHasher;
|
||||
@ -379,7 +379,7 @@ fn test_cannot_serialize_paths() {
|
||||
assert_ser_tokens_error(
|
||||
&Path::new(path),
|
||||
&[],
|
||||
Error::Message("path contains invalid UTF-8 characters".to_owned()),
|
||||
"path contains invalid UTF-8 characters",
|
||||
);
|
||||
|
||||
let mut path_buf = PathBuf::new();
|
||||
@ -388,7 +388,7 @@ fn test_cannot_serialize_paths() {
|
||||
assert_ser_tokens_error(
|
||||
&path_buf,
|
||||
&[],
|
||||
Error::Message("path contains invalid UTF-8 characters".to_owned()),
|
||||
"path contains invalid UTF-8 characters",
|
||||
);
|
||||
}
|
||||
|
||||
@ -397,21 +397,21 @@ fn test_enum_skipped() {
|
||||
assert_ser_tokens_error(
|
||||
&Enum::SkippedUnit,
|
||||
&[],
|
||||
Error::Message("the enum variant Enum::SkippedUnit cannot be serialized".to_owned(),),
|
||||
"the enum variant Enum::SkippedUnit cannot be serialized",
|
||||
);
|
||||
assert_ser_tokens_error(
|
||||
&Enum::SkippedOne(42),
|
||||
&[],
|
||||
Error::Message("the enum variant Enum::SkippedOne cannot be serialized".to_owned(),),
|
||||
"the enum variant Enum::SkippedOne cannot be serialized",
|
||||
);
|
||||
assert_ser_tokens_error(
|
||||
&Enum::SkippedSeq(1, 2),
|
||||
&[],
|
||||
Error::Message("the enum variant Enum::SkippedSeq cannot be serialized".to_owned(),),
|
||||
"the enum variant Enum::SkippedSeq cannot be serialized",
|
||||
);
|
||||
assert_ser_tokens_error(
|
||||
&Enum::SkippedMap { _a: 1, _b: 2 },
|
||||
&[],
|
||||
Error::Message("the enum variant Enum::SkippedMap cannot be serialized".to_owned(),),
|
||||
"the enum variant Enum::SkippedMap cannot be serialized",
|
||||
);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user