Clean up warnings in serde

This commit is contained in:
Erick Tryzelaar 2015-02-13 10:26:13 -08:00
parent b70d77b5c9
commit 4dccf8cdec
15 changed files with 2607 additions and 2629 deletions

View File

@ -1,7 +1,5 @@
#![feature(plugin)]
#[plugin]
extern crate serde_macros;
#![feature(plugin, test)]
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;

View File

@ -1,8 +1,6 @@
#![feature(plugin, io)]
#![feature(collections, core, io, plugin, test)]
#![allow(non_camel_case_types)]
#[plugin]
extern crate serde_macros;
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;

View File

@ -1,7 +1,5 @@
#![feature(plugin)]
#[plugin]
extern crate serde_macros;
#![feature(core, plugin, test)]
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;

View File

@ -1,7 +1,5 @@
#![feature(plugin)]
#[plugin]
extern crate serde_macros;
#![feature(plugin, test)]
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;

View File

@ -1,7 +1,5 @@
#![feature(plugin)]
#[plugin]
extern crate serde_macros;
#![feature(core, plugin, test)]
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;

311
benches/json.rs Normal file
View File

@ -0,0 +1,311 @@
#![feature(core, plugin, test)]
#![plugin(serde_macros)]
extern crate serde;
extern crate "rustc-serialize" as rustc_serialize;
extern crate test;
use std::collections::BTreeMap;
use std::string;
use rustc_serialize as serialize;
use test::Bencher;
use serde::de::Token;
use serde::json::{Parser, Value, from_str};
macro_rules! treemap {
($($k:expr => $v:expr),*) => ({
let mut _m = ::std::collections::BTreeMap::new();
$(_m.insert($k, $v);)*
_m
})
}
fn json_str(count: usize) -> string::String {
let mut src = "[".to_string();
for _ in range(0, count) {
src.push_str(r#"{"a":true,"b":null,"c":3.1415,"d":"Hello world","e":[1,2,3]},"#);
}
src.push_str("{}]");
src
}
fn pretty_json_str(count: usize) -> string::String {
let mut src = "[\n".to_string();
for _ in range(0, count) {
src.push_str(
concat!(
" {\n",
" \"a\": true,\n",
" \"b\": null,\n",
" \"c\": 3.1415,\n",
" \"d\": \"Hello world\",\n",
" \"e\": [\n",
" 1,\n",
" 2,\n",
" 3\n",
" ]\n",
" },\n"
)
);
}
src.push_str(" {}\n]");
src
}
fn encoder_json(count: usize) -> serialize::json::Json {
use rustc_serialize::json::Json;
let mut list = vec!();
for _ in range(0, count) {
list.push(Json::Object(treemap!(
"a".to_string() => Json::Boolean(true),
"b".to_string() => Json::Null,
"c".to_string() => Json::F64(3.1415),
"d".to_string() => Json::String("Hello world".to_string()),
"e".to_string() => Json::Array(vec!(
Json::U64(1),
Json::U64(2),
Json::U64(3)
))
)));
}
list.push(Json::Object(BTreeMap::new()));
Json::Array(list)
}
fn serializer_json(count: usize) -> Value {
let mut list = vec!();
for _ in range(0, count) {
list.push(Value::Object(treemap!(
"a".to_string() => Value::Boolean(true),
"b".to_string() => Value::Null,
"c".to_string() => Value::Floating(3.1415),
"d".to_string() => Value::String("Hello world".to_string()),
"e".to_string() => Value::Array(vec!(
Value::Integer(1),
Value::Integer(2),
Value::Integer(3)
))
)));
}
list.push(Value::Object(BTreeMap::new()));
Value::Array(list)
}
fn bench_encoder(b: &mut Bencher, count: usize) {
let src = json_str(count);
let json = encoder_json(count);
b.iter(|| {
assert_eq!(json.to_string(), src);
});
}
fn bench_encoder_pretty(b: &mut Bencher, count: usize) {
let src = pretty_json_str(count);
let json = encoder_json(count);
b.iter(|| {
assert_eq!(json.pretty().to_string(), src);
});
}
fn bench_serializer(b: &mut Bencher, count: usize) {
let src = json_str(count);
let json = serializer_json(count);
b.iter(|| {
assert_eq!(json.to_string(), src);
});
}
fn bench_serializer_pretty(b: &mut Bencher, count: usize) {
let src = pretty_json_str(count);
let json = serializer_json(count);
b.iter(|| {
assert_eq!(json.to_pretty_string(), src);
});
}
fn bench_decoder(b: &mut Bencher, count: usize) {
let src = json_str(count);
let json = encoder_json(count);
b.iter(|| {
assert_eq!(json, serialize::json::Json::from_str(&src).unwrap());
});
}
fn bench_deserializer(b: &mut Bencher, count: usize) {
let src = json_str(count);
let json = encoder_json(count);
b.iter(|| {
assert_eq!(json, serialize::json::Json::from_str(&src).unwrap());
});
}
fn bench_decoder_streaming(b: &mut Bencher, count: usize) {
let src = json_str(count);
b.iter( || {
use rustc_serialize::json::{Parser, JsonEvent, StackElement};
let mut parser = Parser::new(src.chars());
assert_eq!(parser.next(), Some(JsonEvent::ArrayStart));
for _ in range(0, count) {
assert_eq!(parser.next(), Some(JsonEvent::ObjectStart));
assert_eq!(parser.next(), Some(JsonEvent::BooleanValue(true)));
assert_eq!(parser.stack().top(), Some(StackElement::Key("a")));
assert_eq!(parser.next(), Some(JsonEvent::NullValue));
assert_eq!(parser.stack().top(), Some(StackElement::Key("b")));
assert_eq!(parser.next(), Some(JsonEvent::F64Value(3.1415)));
assert_eq!(parser.stack().top(), Some(StackElement::Key("c")));
assert_eq!(parser.next(), Some(JsonEvent::StringValue("Hello world".to_string())));
assert_eq!(parser.stack().top(), Some(StackElement::Key("d")));
assert_eq!(parser.next(), Some(JsonEvent::ArrayStart));
assert_eq!(parser.stack().top(), Some(StackElement::Key("e")));
assert_eq!(parser.next(), Some(JsonEvent::U64Value(1)));
assert_eq!(parser.next(), Some(JsonEvent::U64Value(2)));
assert_eq!(parser.next(), Some(JsonEvent::U64Value(3)));
assert_eq!(parser.next(), Some(JsonEvent::ArrayEnd));
assert_eq!(parser.next(), Some(JsonEvent::ObjectEnd));
}
assert_eq!(parser.next(), Some(JsonEvent::ObjectStart));
assert_eq!(parser.next(), Some(JsonEvent::ObjectEnd));
assert_eq!(parser.next(), Some(JsonEvent::ArrayEnd));
assert_eq!(parser.next(), None);
});
}
fn bench_deserializer_streaming(b: &mut Bencher, count: usize) {
let src = json_str(count);
b.iter( || {
let mut parser = Parser::new(src.bytes());
assert_eq!(parser.next(), Some(Ok(Token::SeqStart(0))));
for _ in range(0, count) {
assert_eq!(parser.next(), Some(Ok(Token::MapStart(0))));
assert_eq!(parser.next(), Some(Ok(Token::String("a".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::Bool(true))));
assert_eq!(parser.next(), Some(Ok(Token::String("b".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::Null)));
assert_eq!(parser.next(), Some(Ok(Token::String("c".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::F64(3.1415))));
assert_eq!(parser.next(), Some(Ok(Token::String("d".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::String("Hello world".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::String("e".to_string()))));
assert_eq!(parser.next(), Some(Ok(Token::SeqStart(0))));
assert_eq!(parser.next(), Some(Ok(Token::I64(1))));
assert_eq!(parser.next(), Some(Ok(Token::I64(2))));
assert_eq!(parser.next(), Some(Ok(Token::I64(3))));
assert_eq!(parser.next(), Some(Ok(Token::End)));
assert_eq!(parser.next(), Some(Ok(Token::End)));
}
assert_eq!(parser.next(), Some(Ok(Token::MapStart(0))));
assert_eq!(parser.next(), Some(Ok(Token::End)));
assert_eq!(parser.next(), Some(Ok(Token::End)));
assert_eq!(parser.next(), None);
loop {
match parser.next() {
None => return,
Some(Ok(_)) => { }
Some(Err(err)) => { panic!("error: {:?}", err); }
}
}
});
}
#[bench]
fn bench_encoder_001(b: &mut Bencher) {
bench_encoder(b, 1)
}
#[bench]
fn bench_encoder_500(b: &mut Bencher) {
bench_encoder(b, 500)
}
#[bench]
fn bench_encoder_001_pretty(b: &mut Bencher) {
bench_encoder_pretty(b, 1)
}
#[bench]
fn bench_encoder_500_pretty(b: &mut Bencher) {
bench_encoder_pretty(b, 500)
}
#[bench]
fn bench_serializer_001(b: &mut Bencher) {
bench_serializer(b, 1)
}
#[bench]
fn bench_serializer_500(b: &mut Bencher) {
bench_serializer(b, 500)
}
#[bench]
fn bench_serializer_001_pretty(b: &mut Bencher) {
bench_serializer_pretty(b, 1)
}
#[bench]
fn bench_serializer_500_pretty(b: &mut Bencher) {
bench_serializer_pretty(b, 500)
}
#[bench]
fn bench_decoder_001(b: &mut Bencher) {
bench_decoder(b, 1)
}
#[bench]
fn bench_decoder_500(b: &mut Bencher) {
bench_decoder(b, 500)
}
#[bench]
fn bench_deserializer_001(b: &mut Bencher) {
bench_deserializer(b, 1)
}
#[bench]
fn bench_deserializer_500(b: &mut Bencher) {
bench_deserializer(b, 500)
}
#[bench]
fn bench_decoder_001_streaming(b: &mut Bencher) {
bench_decoder_streaming(b, 1)
}
#[bench]
fn bench_decoder_500_streaming(b: &mut Bencher) {
bench_decoder_streaming(b, 500)
}
#[bench]
fn bench_deserializer_001_streaming(b: &mut Bencher) {
bench_deserializer_streaming(b, 1)
}
#[bench]
fn bench_deserializer_500_streaming(b: &mut Bencher) {
bench_deserializer_streaming(b, 500)
}

View File

@ -488,13 +488,13 @@ fn deserialize_static_fields<F>(
}
}
fn find_serial_name<'a, I>(mut iterator: I) -> Option<token::InternedString> where
fn find_serial_name<'a, I>(iterator: I) -> Option<token::InternedString> where
I: Iterator<Item=&'a Attribute>
{
for at in iterator {
match at.node.value.node {
MetaNameValue(ref at_name, ref value) => {
match (at_name.get(), &value.node) {
match (&at_name[], &value.node) {
("serial_name", &LitStr(ref string, _)) => {
attr::mark_used(at);
return Some(string.clone());

370
src/de.rs
View File

@ -1070,373 +1070,3 @@ impl<D: Deserializer<E>, E> Deserialize<D, E> for GatherTokens {
Ok(tokens)
}
}
//////////////////////////////////////////////////////////////////////////////
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::{option, string};
use serialize::Decoder;
use super::{Deserializer, Deserialize, Token, TokenKind, IgnoreTokens};
macro_rules! treemap {
($($k:expr => $v:expr),*) => ({
let mut _m = ::std::collections::BTreeMap::new();
$(_m.insert($k, $v);)*
_m
})
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
struct Inner {
a: (),
b: usize,
c: BTreeMap<string::String, option::Option<char>>,
}
impl<
D: Deserializer<E>,
E
> Deserialize<D, E> for Inner {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Inner, E> {
try!(d.expect_struct_start(token, "Inner"));
let mut a = None;
let mut b = None;
let mut c = None;
static FIELDS: &'static [&'static str] = &["a", "b", "c"];
loop {
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
Some(idx) => idx,
None => { break; }
};
match idx {
Some(0) => { a = Some(try!(d.expect_struct_value())); }
Some(1) => { b = Some(try!(d.expect_struct_value())); }
Some(2) => { c = Some(try!(d.expect_struct_value())); }
Some(_) => unreachable!(),
None => { let _: IgnoreTokens = try!(Deserialize::deserialize(d)); }
}
}
Ok(Inner { a: a.unwrap(), b: b.unwrap(), c: c.unwrap() })
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
struct Outer {
inner: Vec<Inner>,
}
impl<D: Deserializer<E>, E> Deserialize<D, E> for Outer {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Outer, E> {
try!(d.expect_struct_start(token, "Outer"));
static FIELDS: &'static [&'static str] = &["inner"];
let mut inner = None;
loop {
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
Some(idx) => idx,
None => { break; }
};
match idx {
Some(0) => { inner = Some(try!(d.expect_struct_value())); }
Some(_) => unreachable!(),
None => { let _: IgnoreTokens = try!(Deserialize::deserialize(d)); }
}
}
Ok(Outer { inner: inner.unwrap() })
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
enum Animal {
Dog,
Frog(string::String, isize)
}
impl<D: Deserializer<E>, E> Deserialize<D, E> for Animal {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Animal, E> {
match try!(d.expect_enum_start(token, "Animal", &["Dog", "Frog"])) {
0 => {
try!(d.expect_enum_end());
Ok(Animal::Dog)
}
1 => {
let x0 = try!(Deserialize::deserialize(d));
let x1 = try!(Deserialize::deserialize(d));
try!(d.expect_enum_end());
Ok(Animal::Frog(x0, x1))
}
_ => unreachable!(),
}
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Debug)]
enum Error {
EndOfStream,
SyntaxError(Vec<TokenKind>),
UnexpectedName,
ConversionError,
MissingField(&'static str),
}
//////////////////////////////////////////////////////////////////////////////
struct TokenDeserializer<Iter> {
tokens: Iter,
}
impl<Iter: Iterator<Item=Token>> TokenDeserializer<Iter> {
#[inline]
fn new(tokens: Iter) -> TokenDeserializer<Iter> {
TokenDeserializer {
tokens: tokens,
}
}
}
impl<Iter: Iterator<Item=Token>> Iterator for TokenDeserializer<Iter> {
type Item = Result<Token, Error>;
#[inline]
fn next(&mut self) -> option::Option<Result<Token, Error>> {
self.tokens.next().map(|token| Ok(token))
}
}
impl<Iter: Iterator<Item=Token>> Deserializer<Error> for TokenDeserializer<Iter> {
fn end_of_stream_error(&mut self) -> Error {
Error::EndOfStream
}
fn syntax_error(&mut self, _token: Token, expected: &[TokenKind]) -> Error {
Error::SyntaxError(expected.to_vec())
}
fn unexpected_name_error(&mut self, _token: Token) -> Error {
Error::UnexpectedName
}
fn conversion_error(&mut self, _token: Token) -> Error {
Error::ConversionError
}
#[inline]
fn missing_field<
T: Deserialize<TokenDeserializer<Iter>, Error>
>(&mut self, field: &'static str) -> Result<T, Error> {
Err(Error::MissingField(field))
}
}
//////////////////////////////////////////////////////////////////////////////
macro_rules! test_value {
($name:ident, [$($tokens:expr => $value:expr, $ty:ty),*]) => {
#[test]
fn $name() {
$(
let mut deserializer = TokenDeserializer::new($tokens.into_iter());
let value: $ty = Deserialize::deserialize(&mut deserializer).unwrap();
assert_eq!(value, $value);
)+
}
}
}
test_value!(test_primitives, [
vec!(Token::Null) => (), (),
vec!(Token::Bool(true)) => true, bool,
vec!(Token::Bool(false)) => false, bool,
vec!(Token::Isize(5)) => 5, isize,
vec!(Token::I8(5)) => 5, i8,
vec!(Token::I16(5)) => 5, i16,
vec!(Token::I32(5)) => 5, i32,
vec!(Token::I64(5)) => 5, i64,
vec!(Token::Usize(5)) => 5, usize,
vec!(Token::U8(5)) => 5, u8,
vec!(Token::U16(5)) => 5, u16,
vec!(Token::U32(5)) => 5, u32,
vec!(Token::U64(5)) => 5, u64,
vec!(Token::F32(5.0)) => 5.0, f32,
vec!(Token::F64(5.0)) => 5.0, f64,
vec!(Token::Char('c')) => 'c', char,
vec!(Token::Str("abc")) => "abc", &str,
vec!(Token::String("abc".to_string())) => "abc".to_string(), string::String
]);
test_value!(test_tuples, [
vec!(
Token::TupleStart(0),
Token::End,
) => (), (),
vec!(
Token::TupleStart(2),
Token::Isize(5),
Token::Str("a"),
Token::End,
) => (5, "a"), (isize, &'static str),
vec!(
Token::TupleStart(3),
Token::Null,
Token::TupleStart(0),
Token::End,
Token::TupleStart(2),
Token::Isize(5),
Token::Str("a"),
Token::End,
Token::End,
) => ((), (), (5, "a")), ((), (), (isize, &'static str))
]);
test_value!(test_options, [
vec!(Token::Option(false)) => None, option::Option<isize>,
vec!(
Token::Option(true),
Token::Isize(5),
) => Some(5), option::Option<isize>
]);
test_value!(test_structs, [
vec!(
Token::StructStart("Outer", 1),
Token::Str("inner"),
Token::SeqStart(0),
Token::End,
Token::End,
) => Outer { inner: vec!() }, Outer,
vec!(
Token::StructStart("Outer", 1),
Token::Str("inner"),
Token::SeqStart(1),
Token::StructStart("Inner", 3),
Token::Str("a"),
Token::Null,
Token::Str("b"),
Token::Usize(5),
Token::Str("c"),
Token::MapStart(1),
Token::String("abc".to_string()),
Token::Option(true),
Token::Char('c'),
Token::End,
Token::End,
Token::End,
Token::End,
) => Outer {
inner: vec!(
Inner {
a: (),
b: 5,
c: treemap!("abc".to_string() => Some('c')),
},
),
}, Outer
]);
test_value!(test_enums, [
vec!(
Token::EnumStart("Animal", "Dog", 0),
Token::End,
) => Animal::Dog, Animal,
vec!(
Token::EnumStart("Animal", "Frog", 2),
Token::String("Henry".to_string()),
Token::Isize(349),
Token::End,
) => Animal::Frog("Henry".to_string(), 349), Animal
]);
test_value!(test_vecs, [
vec!(
Token::SeqStart(0),
Token::End,
) => vec!(), Vec<isize>,
vec!(
Token::SeqStart(3),
Token::Isize(5),
Token::Isize(6),
Token::Isize(7),
Token::End,
) => vec!(5, 6, 7), Vec<isize>,
vec!(
Token::SeqStart(3),
Token::SeqStart(1),
Token::Isize(1),
Token::End,
Token::SeqStart(2),
Token::Isize(2),
Token::Isize(3),
Token::End,
Token::SeqStart(3),
Token::Isize(4),
Token::Isize(5),
Token::Isize(6),
Token::End,
Token::End,
) => vec!(vec!(1), vec!(2, 3), vec!(4, 5, 6)), Vec<Vec<isize>>
]);
test_value!(test_treemaps, [
vec!(
Token::MapStart(0),
Token::End,
) => treemap!(), BTreeMap<isize, string::String>,
vec!(
Token::MapStart(2),
Token::Isize(5),
Token::String("a".to_string()),
Token::Isize(6),
Token::String("b".to_string()),
Token::End,
) => treemap!(5is => "a".to_string(), 6is => "b".to_string()), BTreeMap<isize, string::
String>
]);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +1,6 @@
#![feature(plugin, io)]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![feature(collections, core, hash, io, std_misc, plugin, unicode)]
#![plugin(serde_macros)]
// test harness access
#[cfg(test)]
extern crate test;
#[plugin]
extern crate serde_macros;
#[cfg(test)]
extern crate serialize;
extern crate "rustc-serialize" as rustc_serialize;
extern crate unicode;
pub use de::{Deserializer, Deserialize};

View File

@ -313,524 +313,3 @@ macro_rules! impl_serialize_tuple {
}
impl_serialize_tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, }
//////////////////////////////////////////////////////////////////////////////
#[cfg(test)]
mod tests {
use std::collections::{HashMap, BTreeMap};
use std::{option, string};
use serialize::Decoder;
use super::{Serializer, Serialize};
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
#[derive_serialize]
struct Inner {
a: (),
b: usize,
c: HashMap<string::String, option::Option<char>>,
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
#[derive_serialize]
struct Outer {
inner: Vec<Inner>,
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug, RustcDecodable)]
#[derive_serialize]
enum Animal {
Dog,
Frog(String, isize)
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
pub enum Token<'a> {
Null,
Bool(bool),
Isize(isize),
I8(i8),
I16(i16),
I32(i32),
I64(i64),
Usize(usize),
U8(u8),
U16(u16),
U32(u32),
U64(u64),
F32(f32),
F64(f64),
Char(char),
Str(&'a str),
TupleStart(usize),
TupleSep,
TupleEnd,
StructStart(&'a str, usize),
StructSep(&'a str),
StructEnd,
EnumStart(&'a str, &'a str, usize),
EnumSep,
EnumEnd,
Option(bool),
SeqStart(usize),
SeqEnd,
MapStart(usize),
MapEnd,
}
#[derive(Debug)]
#[allow(dead_code)]
enum Error {
EndOfStream,
SyntaxError,
}
//////////////////////////////////////////////////////////////////////////////
struct AssertSerializer<Iter> {
iter: Iter,
}
impl<'a, Iter: Iterator<Item=Token<'a>>> AssertSerializer<Iter> {
fn new(iter: Iter) -> AssertSerializer<Iter> {
AssertSerializer {
iter: iter,
}
}
fn serialize<'b>(&mut self, token: Token<'b>) -> Result<(), Error> {
let t = match self.iter.next() {
Some(t) => t,
None => { panic!(); }
};
assert_eq!(t, token);
Ok(())
}
}
impl<'a, Iter: Iterator<Item=Token<'a>>> Serializer<Error> for AssertSerializer<Iter> {
fn serialize_null(&mut self) -> Result<(), Error> {
self.serialize(Token::Null)
}
fn serialize_bool(&mut self, v: bool) -> Result<(), Error> {
self.serialize(Token::Bool(v))
}
fn serialize_isize(&mut self, v: isize) -> Result<(), Error> {
self.serialize(Token::Isize(v))
}
fn serialize_i8(&mut self, v: i8) -> Result<(), Error> {
self.serialize(Token::I8(v))
}
fn serialize_i16(&mut self, v: i16) -> Result<(), Error> {
self.serialize(Token::I16(v))
}
fn serialize_i32(&mut self, v: i32) -> Result<(), Error> {
self.serialize(Token::I32(v))
}
fn serialize_i64(&mut self, v: i64) -> Result<(), Error> {
self.serialize(Token::I64(v))
}
fn serialize_usize(&mut self, v: usize) -> Result<(), Error> {
self.serialize(Token::Usize(v))
}
fn serialize_u8(&mut self, v: u8) -> Result<(), Error> {
self.serialize(Token::U8(v))
}
fn serialize_u16(&mut self, v: u16) -> Result<(), Error> {
self.serialize(Token::U16(v))
}
fn serialize_u32(&mut self, v: u32) -> Result<(), Error> {
self.serialize(Token::U32(v))
}
fn serialize_u64(&mut self, v: u64) -> Result<(), Error> {
self.serialize(Token::U64(v))
}
fn serialize_f32(&mut self, v: f32) -> Result<(), Error> {
self.serialize(Token::F32(v))
}
fn serialize_f64(&mut self, v: f64) -> Result<(), Error> {
self.serialize(Token::F64(v))
}
fn serialize_char(&mut self, v: char) -> Result<(), Error> {
self.serialize(Token::Char(v))
}
fn serialize_str(&mut self, v: &str) -> Result<(), Error> {
self.serialize(Token::Str(v))
}
fn serialize_tuple_start(&mut self, len: usize) -> Result<(), Error> {
self.serialize(Token::TupleStart(len))
}
fn serialize_tuple_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::TupleSep));
value.serialize(self)
}
fn serialize_tuple_end(&mut self) -> Result<(), Error> {
self.serialize(Token::TupleEnd)
}
fn serialize_struct_start(&mut self, name: &str, len: usize) -> Result<(), Error> {
self.serialize(Token::StructStart(name, len))
}
fn serialize_struct_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, name: &str, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::StructSep(name)));
value.serialize(self)
}
fn serialize_struct_end(&mut self) -> Result<(), Error> {
self.serialize(Token::StructEnd)
}
fn serialize_enum_start(&mut self, name: &str, variant: &str, len: usize) -> Result<(), Error> {
self.serialize(Token::EnumStart(name, variant, len))
}
fn serialize_enum_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::EnumSep));
value.serialize(self)
}
fn serialize_enum_end(&mut self) -> Result<(), Error> {
self.serialize(Token::EnumEnd)
}
fn serialize_option<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, v: &option::Option<T>) -> Result<(), Error> {
match *v {
Some(ref v) => {
try!(self.serialize(Token::Option(true)));
v.serialize(self)
}
None => {
self.serialize(Token::Option(false))
}
}
}
fn serialize_seq<
T: Serialize<AssertSerializer<Iter>, Error>,
SeqIter: Iterator<Item=T>
>(&mut self, iter: SeqIter) -> Result<(), Error> {
let (len, _) = iter.size_hint();
try!(self.serialize(Token::SeqStart(len)));
for elt in iter {
try!(elt.serialize(self));
}
self.serialize(Token::SeqEnd)
}
fn serialize_map<
K: Serialize<AssertSerializer<Iter>, Error>,
V: Serialize<AssertSerializer<Iter>, Error>,
MapIter: Iterator<Item=(K, V)>
>(&mut self, iter: MapIter) -> Result<(), Error> {
let (len, _) = iter.size_hint();
try!(self.serialize(Token::MapStart(len)));
for (key, value) in iter {
try!(key.serialize(self));
try!(value.serialize(self));
}
self.serialize(Token::MapEnd)
}
}
//////////////////////////////////////////////////////////////////////////////
#[test]
fn test_tokens_int() {
let tokens = vec!(
Token::Isize(5)
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
5is.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_str() {
let tokens = vec!(
Token::Str("a"),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
"a".serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_null() {
let tokens = vec!(
Token::Null,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
().serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_option_none() {
let tokens = vec!(
Token::Option(false),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
None::<isize>.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_option_some() {
let tokens = vec!(
Token::Option(true),
Token::Isize(5),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Some(5is).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_tuple() {
let tokens = vec!(
Token::TupleStart(2),
Token::TupleSep,
Token::Isize(5),
Token::TupleSep,
Token::Str("a"),
Token::TupleEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(5is, "a").serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_tuple_compound() {
let tokens = vec!(
Token::TupleStart(3),
Token::TupleSep,
Token::Null,
Token::TupleSep,
Token::Null,
Token::TupleSep,
Token::TupleStart(2),
Token::TupleSep,
Token::Isize(5),
Token::TupleSep,
Token::Str("a"),
Token::TupleEnd,
Token::TupleEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
((), (), (5is, "a")).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_struct_empty() {
let tokens = vec!(
Token::StructStart("Outer", 1),
Token::StructSep("inner"),
Token::SeqStart(0),
Token::SeqEnd,
Token::StructEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Outer { inner: vec!() }.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_struct() {
let tokens = vec!(
Token::StructStart("Outer", 1),
Token::StructSep("inner"),
Token::SeqStart(1),
Token::StructStart("Inner", 3),
Token::StructSep("a"),
Token::Null,
Token::StructSep("b"),
Token::Usize(5),
Token::StructSep("c"),
Token::MapStart(1),
Token::Str("abc"),
Token::Option(true),
Token::Char('c'),
Token::MapEnd,
Token::StructEnd,
Token::SeqEnd,
Token::StructEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let mut map = HashMap::new();
map.insert("abc".to_string(), Some('c'));
Outer {
inner: vec!(
Inner {
a: (),
b: 5,
c: map,
},
)
}.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_enum() {
let tokens = vec!(
Token::EnumStart("Animal", "Dog", 0),
Token::EnumEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Animal::Dog.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
let tokens = vec!(
Token::EnumStart("Animal", "Frog", 2),
Token::EnumSep,
Token::Str("Henry"),
Token::EnumSep,
Token::Isize(349),
Token::EnumEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Animal::Frog("Henry".to_string(), 349).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec_empty() {
let tokens = vec!(
Token::SeqStart(0),
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let v: Vec<isize> = vec!();
v.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec() {
let tokens = vec!(
Token::SeqStart(3),
Token::Isize(5),
Token::Isize(6),
Token::Isize(7),
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(vec!(5is, 6, 7)).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec_compound() {
let tokens = vec!(
Token::SeqStart(3),
Token::SeqStart(1),
Token::Isize(1),
Token::SeqEnd,
Token::SeqStart(2),
Token::Isize(2),
Token::Isize(3),
Token::SeqEnd,
Token::SeqStart(3),
Token::Isize(4),
Token::Isize(5),
Token::Isize(6),
Token::SeqEnd,
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(vec!(vec!(1is), vec!(2, 3), vec!(4, 5, 6))).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_treemap() {
let tokens = vec!(
Token::MapStart(2),
Token::Isize(5),
Token::Str("a"),
Token::Isize(6),
Token::Str("b"),
Token::MapEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let mut map = BTreeMap::new();
map.insert(5is, "a".to_string());
map.insert(6is, "b".to_string());
map.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
}

368
tests/de.rs Normal file
View File

@ -0,0 +1,368 @@
#![feature(plugin)]
#![plugin(serde_macros)]
extern crate serde;
use std::collections::BTreeMap;
use std::{option, string};
use serde::de::{Deserializer, Deserialize, Token, TokenKind, IgnoreTokens};
macro_rules! treemap {
($($k:expr => $v:expr),*) => ({
let mut _m = ::std::collections::BTreeMap::new();
$(_m.insert($k, $v);)*
_m
})
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
struct Inner {
a: (),
b: usize,
c: BTreeMap<string::String, option::Option<char>>,
}
impl<
D: Deserializer<E>,
E
> Deserialize<D, E> for Inner {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Inner, E> {
try!(d.expect_struct_start(token, "Inner"));
let mut a = None;
let mut b = None;
let mut c = None;
static FIELDS: &'static [&'static str] = &["a", "b", "c"];
loop {
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
Some(idx) => idx,
None => { break; }
};
match idx {
Some(0) => { a = Some(try!(d.expect_struct_value())); }
Some(1) => { b = Some(try!(d.expect_struct_value())); }
Some(2) => { c = Some(try!(d.expect_struct_value())); }
Some(_) => unreachable!(),
None => { let _: IgnoreTokens = try!(Deserialize::deserialize(d)); }
}
}
Ok(Inner { a: a.unwrap(), b: b.unwrap(), c: c.unwrap() })
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
struct Outer {
inner: Vec<Inner>,
}
impl<D: Deserializer<E>, E> Deserialize<D, E> for Outer {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Outer, E> {
try!(d.expect_struct_start(token, "Outer"));
static FIELDS: &'static [&'static str] = &["inner"];
let mut inner = None;
loop {
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
Some(idx) => idx,
None => { break; }
};
match idx {
Some(0) => { inner = Some(try!(d.expect_struct_value())); }
Some(_) => unreachable!(),
None => { let _: IgnoreTokens = try!(Deserialize::deserialize(d)); }
}
}
Ok(Outer { inner: inner.unwrap() })
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
enum Animal {
Dog,
Frog(string::String, isize)
}
impl<D: Deserializer<E>, E> Deserialize<D, E> for Animal {
#[inline]
fn deserialize_token(d: &mut D, token: Token) -> Result<Animal, E> {
match try!(d.expect_enum_start(token, "Animal", &["Dog", "Frog"])) {
0 => {
try!(d.expect_enum_end());
Ok(Animal::Dog)
}
1 => {
let x0 = try!(Deserialize::deserialize(d));
let x1 = try!(Deserialize::deserialize(d));
try!(d.expect_enum_end());
Ok(Animal::Frog(x0, x1))
}
_ => unreachable!(),
}
}
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Debug)]
enum Error {
EndOfStream,
SyntaxError(Vec<TokenKind>),
UnexpectedName,
ConversionError,
MissingField(&'static str),
}
//////////////////////////////////////////////////////////////////////////////
struct TokenDeserializer<Iter> {
tokens: Iter,
}
impl<Iter: Iterator<Item=Token>> TokenDeserializer<Iter> {
#[inline]
fn new(tokens: Iter) -> TokenDeserializer<Iter> {
TokenDeserializer {
tokens: tokens,
}
}
}
impl<Iter: Iterator<Item=Token>> Iterator for TokenDeserializer<Iter> {
type Item = Result<Token, Error>;
#[inline]
fn next(&mut self) -> option::Option<Result<Token, Error>> {
self.tokens.next().map(|token| Ok(token))
}
}
impl<Iter: Iterator<Item=Token>> Deserializer<Error> for TokenDeserializer<Iter> {
fn end_of_stream_error(&mut self) -> Error {
Error::EndOfStream
}
fn syntax_error(&mut self, _token: Token, expected: &[TokenKind]) -> Error {
Error::SyntaxError(expected.to_vec())
}
fn unexpected_name_error(&mut self, _token: Token) -> Error {
Error::UnexpectedName
}
fn conversion_error(&mut self, _token: Token) -> Error {
Error::ConversionError
}
#[inline]
fn missing_field<
T: Deserialize<TokenDeserializer<Iter>, Error>
>(&mut self, field: &'static str) -> Result<T, Error> {
Err(Error::MissingField(field))
}
}
//////////////////////////////////////////////////////////////////////////////
macro_rules! test_value {
($name:ident, [$($tokens:expr => $value:expr, $ty:ty),*]) => {
#[test]
fn $name() {
$(
let mut deserializer = TokenDeserializer::new($tokens.into_iter());
let value: $ty = Deserialize::deserialize(&mut deserializer).unwrap();
assert_eq!(value, $value);
)+
}
}
}
test_value!(test_primitives, [
vec!(Token::Null) => (), (),
vec!(Token::Bool(true)) => true, bool,
vec!(Token::Bool(false)) => false, bool,
vec!(Token::Isize(5)) => 5, isize,
vec!(Token::I8(5)) => 5, i8,
vec!(Token::I16(5)) => 5, i16,
vec!(Token::I32(5)) => 5, i32,
vec!(Token::I64(5)) => 5, i64,
vec!(Token::Usize(5)) => 5, usize,
vec!(Token::U8(5)) => 5, u8,
vec!(Token::U16(5)) => 5, u16,
vec!(Token::U32(5)) => 5, u32,
vec!(Token::U64(5)) => 5, u64,
vec!(Token::F32(5.0)) => 5.0, f32,
vec!(Token::F64(5.0)) => 5.0, f64,
vec!(Token::Char('c')) => 'c', char,
vec!(Token::Str("abc")) => "abc", &str,
vec!(Token::String("abc".to_string())) => "abc".to_string(), string::String
]);
test_value!(test_tuples, [
vec!(
Token::TupleStart(0),
Token::End,
) => (), (),
vec!(
Token::TupleStart(2),
Token::Isize(5),
Token::Str("a"),
Token::End,
) => (5, "a"), (isize, &'static str),
vec!(
Token::TupleStart(3),
Token::Null,
Token::TupleStart(0),
Token::End,
Token::TupleStart(2),
Token::Isize(5),
Token::Str("a"),
Token::End,
Token::End,
) => ((), (), (5, "a")), ((), (), (isize, &'static str))
]);
test_value!(test_options, [
vec!(Token::Option(false)) => None, option::Option<isize>,
vec!(
Token::Option(true),
Token::Isize(5),
) => Some(5), option::Option<isize>
]);
test_value!(test_structs, [
vec!(
Token::StructStart("Outer", 1),
Token::Str("inner"),
Token::SeqStart(0),
Token::End,
Token::End,
) => Outer { inner: vec!() }, Outer,
vec!(
Token::StructStart("Outer", 1),
Token::Str("inner"),
Token::SeqStart(1),
Token::StructStart("Inner", 3),
Token::Str("a"),
Token::Null,
Token::Str("b"),
Token::Usize(5),
Token::Str("c"),
Token::MapStart(1),
Token::String("abc".to_string()),
Token::Option(true),
Token::Char('c'),
Token::End,
Token::End,
Token::End,
Token::End,
) => Outer {
inner: vec!(
Inner {
a: (),
b: 5,
c: treemap!("abc".to_string() => Some('c')),
},
),
}, Outer
]);
test_value!(test_enums, [
vec!(
Token::EnumStart("Animal", "Dog", 0),
Token::End,
) => Animal::Dog, Animal,
vec!(
Token::EnumStart("Animal", "Frog", 2),
Token::String("Henry".to_string()),
Token::Isize(349),
Token::End,
) => Animal::Frog("Henry".to_string(), 349), Animal
]);
test_value!(test_vecs, [
vec!(
Token::SeqStart(0),
Token::End,
) => vec!(), Vec<isize>,
vec!(
Token::SeqStart(3),
Token::Isize(5),
Token::Isize(6),
Token::Isize(7),
Token::End,
) => vec!(5, 6, 7), Vec<isize>,
vec!(
Token::SeqStart(3),
Token::SeqStart(1),
Token::Isize(1),
Token::End,
Token::SeqStart(2),
Token::Isize(2),
Token::Isize(3),
Token::End,
Token::SeqStart(3),
Token::Isize(4),
Token::Isize(5),
Token::Isize(6),
Token::End,
Token::End,
) => vec!(vec!(1), vec!(2, 3), vec!(4, 5, 6)), Vec<Vec<isize>>
]);
test_value!(test_treemaps, [
vec!(
Token::MapStart(0),
Token::End,
) => treemap!(), BTreeMap<isize, string::String>,
vec!(
Token::MapStart(2),
Token::Isize(5),
Token::String("a".to_string()),
Token::Isize(6),
Token::String("b".to_string()),
Token::End,
) => treemap!(5is => "a".to_string(), 6is => "b".to_string()), BTreeMap<isize, string::
String>
]);

1384
tests/json.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,7 @@
#![feature(plugin)]
#![plugin(serde_macros)]
extern crate serde;
#[plugin]
extern crate serde_macros;
#[derive(PartialEq, Debug)]
#[derive_serialize]

517
tests/ser.rs Normal file
View File

@ -0,0 +1,517 @@
#![feature(plugin)]
#![plugin(serde_macros)]
extern crate serde;
use std::collections::{HashMap, BTreeMap};
use std::{option, string};
use serde::ser::{Serializer, Serialize};
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
#[derive_serialize]
struct Inner {
a: (),
b: usize,
c: HashMap<string::String, option::Option<char>>,
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
#[derive_serialize]
struct Outer {
inner: Vec<Inner>,
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
#[derive_serialize]
enum Animal {
Dog,
Frog(String, isize)
}
//////////////////////////////////////////////////////////////////////////////
#[derive(Clone, PartialEq, Debug)]
pub enum Token<'a> {
Null,
Bool(bool),
Isize(isize),
I8(i8),
I16(i16),
I32(i32),
I64(i64),
Usize(usize),
U8(u8),
U16(u16),
U32(u32),
U64(u64),
F32(f32),
F64(f64),
Char(char),
Str(&'a str),
TupleStart(usize),
TupleSep,
TupleEnd,
StructStart(&'a str, usize),
StructSep(&'a str),
StructEnd,
EnumStart(&'a str, &'a str, usize),
EnumSep,
EnumEnd,
Option(bool),
SeqStart(usize),
SeqEnd,
MapStart(usize),
MapEnd,
}
#[derive(Debug)]
#[allow(dead_code)]
enum Error {
EndOfStream,
SyntaxError,
}
//////////////////////////////////////////////////////////////////////////////
struct AssertSerializer<Iter> {
iter: Iter,
}
impl<'a, Iter: Iterator<Item=Token<'a>>> AssertSerializer<Iter> {
fn new(iter: Iter) -> AssertSerializer<Iter> {
AssertSerializer {
iter: iter,
}
}
fn serialize<'b>(&mut self, token: Token<'b>) -> Result<(), Error> {
let t = match self.iter.next() {
Some(t) => t,
None => { panic!(); }
};
assert_eq!(t, token);
Ok(())
}
}
impl<'a, Iter: Iterator<Item=Token<'a>>> Serializer<Error> for AssertSerializer<Iter> {
fn serialize_null(&mut self) -> Result<(), Error> {
self.serialize(Token::Null)
}
fn serialize_bool(&mut self, v: bool) -> Result<(), Error> {
self.serialize(Token::Bool(v))
}
fn serialize_isize(&mut self, v: isize) -> Result<(), Error> {
self.serialize(Token::Isize(v))
}
fn serialize_i8(&mut self, v: i8) -> Result<(), Error> {
self.serialize(Token::I8(v))
}
fn serialize_i16(&mut self, v: i16) -> Result<(), Error> {
self.serialize(Token::I16(v))
}
fn serialize_i32(&mut self, v: i32) -> Result<(), Error> {
self.serialize(Token::I32(v))
}
fn serialize_i64(&mut self, v: i64) -> Result<(), Error> {
self.serialize(Token::I64(v))
}
fn serialize_usize(&mut self, v: usize) -> Result<(), Error> {
self.serialize(Token::Usize(v))
}
fn serialize_u8(&mut self, v: u8) -> Result<(), Error> {
self.serialize(Token::U8(v))
}
fn serialize_u16(&mut self, v: u16) -> Result<(), Error> {
self.serialize(Token::U16(v))
}
fn serialize_u32(&mut self, v: u32) -> Result<(), Error> {
self.serialize(Token::U32(v))
}
fn serialize_u64(&mut self, v: u64) -> Result<(), Error> {
self.serialize(Token::U64(v))
}
fn serialize_f32(&mut self, v: f32) -> Result<(), Error> {
self.serialize(Token::F32(v))
}
fn serialize_f64(&mut self, v: f64) -> Result<(), Error> {
self.serialize(Token::F64(v))
}
fn serialize_char(&mut self, v: char) -> Result<(), Error> {
self.serialize(Token::Char(v))
}
fn serialize_str(&mut self, v: &str) -> Result<(), Error> {
self.serialize(Token::Str(v))
}
fn serialize_tuple_start(&mut self, len: usize) -> Result<(), Error> {
self.serialize(Token::TupleStart(len))
}
fn serialize_tuple_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::TupleSep));
value.serialize(self)
}
fn serialize_tuple_end(&mut self) -> Result<(), Error> {
self.serialize(Token::TupleEnd)
}
fn serialize_struct_start(&mut self, name: &str, len: usize) -> Result<(), Error> {
self.serialize(Token::StructStart(name, len))
}
fn serialize_struct_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, name: &str, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::StructSep(name)));
value.serialize(self)
}
fn serialize_struct_end(&mut self) -> Result<(), Error> {
self.serialize(Token::StructEnd)
}
fn serialize_enum_start(&mut self, name: &str, variant: &str, len: usize) -> Result<(), Error> {
self.serialize(Token::EnumStart(name, variant, len))
}
fn serialize_enum_elt<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, value: &T) -> Result<(), Error> {
try!(self.serialize(Token::EnumSep));
value.serialize(self)
}
fn serialize_enum_end(&mut self) -> Result<(), Error> {
self.serialize(Token::EnumEnd)
}
fn serialize_option<
T: Serialize<AssertSerializer<Iter>, Error>
>(&mut self, v: &option::Option<T>) -> Result<(), Error> {
match *v {
Some(ref v) => {
try!(self.serialize(Token::Option(true)));
v.serialize(self)
}
None => {
self.serialize(Token::Option(false))
}
}
}
fn serialize_seq<
T: Serialize<AssertSerializer<Iter>, Error>,
SeqIter: Iterator<Item=T>
>(&mut self, iter: SeqIter) -> Result<(), Error> {
let (len, _) = iter.size_hint();
try!(self.serialize(Token::SeqStart(len)));
for elt in iter {
try!(elt.serialize(self));
}
self.serialize(Token::SeqEnd)
}
fn serialize_map<
K: Serialize<AssertSerializer<Iter>, Error>,
V: Serialize<AssertSerializer<Iter>, Error>,
MapIter: Iterator<Item=(K, V)>
>(&mut self, iter: MapIter) -> Result<(), Error> {
let (len, _) = iter.size_hint();
try!(self.serialize(Token::MapStart(len)));
for (key, value) in iter {
try!(key.serialize(self));
try!(value.serialize(self));
}
self.serialize(Token::MapEnd)
}
}
//////////////////////////////////////////////////////////////////////////////
#[test]
fn test_tokens_int() {
let tokens = vec!(
Token::Isize(5)
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
5is.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_str() {
let tokens = vec!(
Token::Str("a"),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
"a".serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_null() {
let tokens = vec!(
Token::Null,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
().serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_option_none() {
let tokens = vec!(
Token::Option(false),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
None::<isize>.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_option_some() {
let tokens = vec!(
Token::Option(true),
Token::Isize(5),
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Some(5is).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_tuple() {
let tokens = vec!(
Token::TupleStart(2),
Token::TupleSep,
Token::Isize(5),
Token::TupleSep,
Token::Str("a"),
Token::TupleEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(5is, "a").serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_tuple_compound() {
let tokens = vec!(
Token::TupleStart(3),
Token::TupleSep,
Token::Null,
Token::TupleSep,
Token::Null,
Token::TupleSep,
Token::TupleStart(2),
Token::TupleSep,
Token::Isize(5),
Token::TupleSep,
Token::Str("a"),
Token::TupleEnd,
Token::TupleEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
((), (), (5is, "a")).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_struct_empty() {
let tokens = vec!(
Token::StructStart("Outer", 1),
Token::StructSep("inner"),
Token::SeqStart(0),
Token::SeqEnd,
Token::StructEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Outer { inner: vec!() }.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_struct() {
let tokens = vec!(
Token::StructStart("Outer", 1),
Token::StructSep("inner"),
Token::SeqStart(1),
Token::StructStart("Inner", 3),
Token::StructSep("a"),
Token::Null,
Token::StructSep("b"),
Token::Usize(5),
Token::StructSep("c"),
Token::MapStart(1),
Token::Str("abc"),
Token::Option(true),
Token::Char('c'),
Token::MapEnd,
Token::StructEnd,
Token::SeqEnd,
Token::StructEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let mut map = HashMap::new();
map.insert("abc".to_string(), Some('c'));
Outer {
inner: vec!(
Inner {
a: (),
b: 5,
c: map,
},
)
}.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_enum() {
let tokens = vec!(
Token::EnumStart("Animal", "Dog", 0),
Token::EnumEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Animal::Dog.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
let tokens = vec!(
Token::EnumStart("Animal", "Frog", 2),
Token::EnumSep,
Token::Str("Henry"),
Token::EnumSep,
Token::Isize(349),
Token::EnumEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
Animal::Frog("Henry".to_string(), 349).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec_empty() {
let tokens = vec!(
Token::SeqStart(0),
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let v: Vec<isize> = vec!();
v.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec() {
let tokens = vec!(
Token::SeqStart(3),
Token::Isize(5),
Token::Isize(6),
Token::Isize(7),
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(vec!(5is, 6, 7)).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_vec_compound() {
let tokens = vec!(
Token::SeqStart(3),
Token::SeqStart(1),
Token::Isize(1),
Token::SeqEnd,
Token::SeqStart(2),
Token::Isize(2),
Token::Isize(3),
Token::SeqEnd,
Token::SeqStart(3),
Token::Isize(4),
Token::Isize(5),
Token::Isize(6),
Token::SeqEnd,
Token::SeqEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
(vec!(vec!(1is), vec!(2, 3), vec!(4, 5, 6))).serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}
#[test]
fn test_tokens_treemap() {
let tokens = vec!(
Token::MapStart(2),
Token::Isize(5),
Token::Str("a"),
Token::Isize(6),
Token::Str("b"),
Token::MapEnd,
);
let mut serializer = AssertSerializer::new(tokens.into_iter());
let mut map = BTreeMap::new();
map.insert(5is, "a".to_string());
map.insert(6is, "b".to_string());
map.serialize(&mut serializer).unwrap();
assert_eq!(serializer.iter.next(), None);
}