Change the json enum serialization to {"variant":["fields", ...]}
test bench_enum::bench_decoder_dog ... bench: 112 ns/iter (+/- 15) test bench_enum::bench_decoder_frog ... bench: 336 ns/iter (+/- 6) test bench_enum::bench_deserializer_dog ... bench: 120 ns/iter (+/- 4) test bench_enum::bench_deserializer_frog ... bench: 317 ns/iter (+/- 15) test bench_map::bench_decoder_000 ... bench: 528 ns/iter (+/- 13) test bench_map::bench_decoder_003 ... bench: 2459 ns/iter (+/- 74) test bench_map::bench_decoder_100 ... bench: 70756 ns/iter (+/- 1979) test bench_map::bench_deserializer_000 ... bench: 753 ns/iter (+/- 30) test bench_map::bench_deserializer_003 ... bench: 2574 ns/iter (+/- 111) test bench_map::bench_deserializer_100 ... bench: 62374 ns/iter (+/- 1714) test bench_struct::bench_decoder_0_0 ... bench: 609 ns/iter (+/- 14) test bench_struct::bench_decoder_1_0 ... bench: 1620 ns/iter (+/- 44) test bench_struct::bench_decoder_1_5 ... bench: 4393 ns/iter (+/- 88) test bench_struct::bench_deserializer_0_0 ... bench: 699 ns/iter (+/- 10) test bench_struct::bench_deserializer_1_0 ... bench: 2160 ns/iter (+/- 53) test bench_struct::bench_deserializer_1_5 ... bench: 4987 ns/iter (+/- 87) test bench_vec::bench_decoder_int_000 ... bench: 20 ns/iter (+/- 1) test bench_vec::bench_decoder_int_003 ... bench: 148 ns/iter (+/- 3) test bench_vec::bench_decoder_int_100 ... bench: 1009 ns/iter (+/- 44) test bench_vec::bench_decoder_u8_000 ... bench: 16 ns/iter (+/- 0) test bench_vec::bench_decoder_u8_003 ... bench: 152 ns/iter (+/- 12) test bench_vec::bench_decoder_u8_100 ... bench: 1457 ns/iter (+/- 95) test bench_vec::bench_deserializer_int_000 ... bench: 16 ns/iter (+/- 0) test bench_vec::bench_deserializer_int_003 ... bench: 153 ns/iter (+/- 9) test bench_vec::bench_deserializer_int_100 ... bench: 1015 ns/iter (+/- 38) test bench_vec::bench_deserializer_u8_000 ... bench: 16 ns/iter (+/- 1) test bench_vec::bench_deserializer_u8_003 ... bench: 160 ns/iter (+/- 130) test bench_vec::bench_deserializer_u8_100 ... bench: 1225 ns/iter (+/- 112) test json::tests::bench_decoder_large ... bench: 1979093 ns/iter (+/- 67769) test json::tests::bench_decoder_small ... bench: 4644 ns/iter (+/- 208) test json::tests::bench_decoder_streaming_large ... bench: 848383 ns/iter (+/- 96301) test json::tests::bench_decoder_streaming_small ... bench: 1834 ns/iter (+/- 120) test json::tests::bench_deserializer_large ... bench: 1882598 ns/iter (+/- 137262) test json::tests::bench_deserializer_small ... bench: 3945 ns/iter (+/- 161) test json::tests::bench_deserializer_streaming_large ... bench: 990086 ns/iter (+/- 157794) test json::tests::bench_deserializer_streaming_small ... bench: 2135 ns/iter (+/- 211)
This commit is contained in:
parent
ee3dee8893
commit
d8fb2abd03
@ -17,14 +17,14 @@ impl<E, D: Deserializer<E>> Deserializable<E, D> for Animal {
|
||||
fn deserialize_token(d: &mut D, token: Token) -> Result<Animal, E> {
|
||||
match try!(d.expect_enum_start(token, "Animal", ["Dog", "Frog"])) {
|
||||
0 => {
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
Ok(Dog)
|
||||
}
|
||||
1 => {
|
||||
let x0 = try!(Deserializable::deserialize(d));
|
||||
let x1 = try!(Deserializable::deserialize(d));
|
||||
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
|
||||
Ok(Frog(x0, x1))
|
||||
}
|
||||
|
47
de.rs
47
de.rs
@ -159,6 +159,14 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_tuple_end(&mut self) -> Result<(), E> {
|
||||
match try!(self.expect_token()) {
|
||||
End => Ok(()),
|
||||
_ => self.syntax_error(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_struct_start(&mut self, token: Token, name: &str) -> Result<(), E> {
|
||||
match token {
|
||||
@ -194,6 +202,14 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
||||
Deserializable::deserialize(self)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_struct_end(&mut self) -> Result<(), E> {
|
||||
match try!(self.expect_token()) {
|
||||
End => Ok(()),
|
||||
_ => self.syntax_error(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_enum_start(&mut self, token: Token, name: &str, variants: &[&str]) -> Result<uint, E> {
|
||||
match token {
|
||||
@ -211,6 +227,14 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_enum_end(&mut self) -> Result<(), E> {
|
||||
match try!(self.expect_token()) {
|
||||
End => Ok(()),
|
||||
_ => self.syntax_error(),
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
#[inline]
|
||||
fn expect_collection<
|
||||
@ -246,14 +270,6 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
||||
_ => self.syntax_error(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expect_end(&mut self) -> Result<(), E> {
|
||||
match try!(self.expect_token()) {
|
||||
End => Ok(()),
|
||||
_ => self.syntax_error(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
@ -473,10 +489,9 @@ macro_rules! impl_deserialize_tuple {
|
||||
$name
|
||||
},)*);
|
||||
|
||||
match try!(d.expect_token()) {
|
||||
End => Ok(result),
|
||||
_ => d.syntax_error(),
|
||||
}
|
||||
try!(d.expect_tuple_end());
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
peel!($($name,)*)
|
||||
@ -714,7 +729,7 @@ mod tests {
|
||||
let a = try!(d.expect_struct_field("a"));
|
||||
let b = try!(d.expect_struct_field("b"));
|
||||
let c = try!(d.expect_struct_field("c"));
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_struct_end());
|
||||
Ok(Inner { a: a, b: b, c: c })
|
||||
}
|
||||
}
|
||||
@ -731,7 +746,7 @@ mod tests {
|
||||
fn deserialize_token(d: &mut D, token: Token) -> Result<Outer, E> {
|
||||
try!(d.expect_struct_start(token, "Outer"));
|
||||
let inner = try!(d.expect_struct_field("inner"));
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_struct_end());
|
||||
Ok(Outer { inner: inner })
|
||||
}
|
||||
}
|
||||
@ -749,13 +764,13 @@ mod tests {
|
||||
fn deserialize_token(d: &mut D, token: Token) -> Result<Animal, E> {
|
||||
match try!(d.expect_enum_start(token, "Animal", ["Dog", "Frog"])) {
|
||||
0 => {
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
Ok(Dog)
|
||||
}
|
||||
1 => {
|
||||
let x0 = try!(Deserializable::deserialize(d));
|
||||
let x1 = try!(Deserializable::deserialize(d));
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
Ok(Frog(x0, x1))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
|
250
json.rs
250
json.rs
@ -228,7 +228,7 @@ fn main() {
|
||||
*/
|
||||
|
||||
use std::char;
|
||||
use std::collections::{Deque, HashMap, RingBuf, TreeMap};
|
||||
use std::collections::{HashMap, TreeMap};
|
||||
use std::collections::treemap;
|
||||
use std::f64;
|
||||
use std::fmt;
|
||||
@ -293,14 +293,9 @@ impl<E, D: de::Deserializer<E>> de::Deserializable<E, D> for Json {
|
||||
de::EnumStart(_, name, len) => {
|
||||
let token = de::SeqStart(len);
|
||||
let fields: Vec<Json> = try!(de::Deserializable::deserialize_token(d, token));
|
||||
if fields.is_empty() {
|
||||
Ok(String(name.to_string()))
|
||||
} else {
|
||||
let mut object = TreeMap::new();
|
||||
object.insert("variant".to_string(), String(name.to_string()));
|
||||
object.insert("fields".to_string(), List(fields));
|
||||
Ok(Object(object))
|
||||
}
|
||||
let mut object = TreeMap::new();
|
||||
object.insert(name.to_string(), List(fields));
|
||||
Ok(Object(object))
|
||||
}
|
||||
de::End => d.syntax_error(),
|
||||
}
|
||||
@ -415,10 +410,6 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
|
||||
_name: &str,
|
||||
variants: &[&str]) -> Result<uint, ParserError> {
|
||||
let variant = match token {
|
||||
de::String(variant) => {
|
||||
self.stack.push(JsonDeserializerEndState);
|
||||
variant
|
||||
}
|
||||
de::MapStart(_) => {
|
||||
let state = match self.stack.pop() {
|
||||
Some(state) => state,
|
||||
@ -430,39 +421,22 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
|
||||
_ => { fail!("state machine error, expected an object"); }
|
||||
};
|
||||
|
||||
let mut variant = None;
|
||||
let mut fields = None;
|
||||
|
||||
for (key, value) in iter {
|
||||
if key.equiv(&"variant") {
|
||||
match value {
|
||||
String(v) => { variant = Some(v); }
|
||||
value => {
|
||||
return Err(ExpectedError("String".to_string(),
|
||||
format!("{}", value)))
|
||||
}
|
||||
}
|
||||
} else if key.equiv(&"fields") {
|
||||
match value {
|
||||
List(v) => { fields = Some(v); }
|
||||
value => {
|
||||
return Err(ExpectedError("List".to_string(),
|
||||
format!("{}", value)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (variant, fields) = match (variant, fields) {
|
||||
(Some(variant), Some(fields)) => (variant, fields),
|
||||
(None, _) => {
|
||||
return Err(MissingFieldError("variant".to_string()))
|
||||
}
|
||||
(_, None) => {
|
||||
return Err(MissingFieldError("fields".to_string()))
|
||||
let (variant, fields) = match iter.next() {
|
||||
Some((variant, List(fields))) => (variant, fields),
|
||||
Some((key, value)) => {
|
||||
return Err(ExpectedError("List".to_string(), format!("{} => {}", key, value)));
|
||||
}
|
||||
None => { return Err(MissingFieldError("<variant-name>".to_string())); }
|
||||
};
|
||||
|
||||
// Error out if there are other fields in the enum.
|
||||
match iter.next() {
|
||||
Some((key, value)) => {
|
||||
return Err(ExpectedError("None".to_string(), format!("{} => {}", key, value)));
|
||||
}
|
||||
None => { }
|
||||
}
|
||||
|
||||
self.stack.push(JsonDeserializerEndState);
|
||||
|
||||
for field in fields.move_iter().rev() {
|
||||
@ -673,20 +647,15 @@ impl<'a> serialize::Encoder<io::IoError> for Encoder<'a> {
|
||||
fn emit_enum_variant(&mut self,
|
||||
name: &str,
|
||||
_id: uint,
|
||||
cnt: uint,
|
||||
_cnt: uint,
|
||||
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
|
||||
// enums are encoded as strings or objects
|
||||
// Bunny => "Bunny"
|
||||
// Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]}
|
||||
if cnt == 0 {
|
||||
write!(self.wr, "{}", escape_str(name))
|
||||
} else {
|
||||
try!(write!(self.wr, "\\{\"variant\":"));
|
||||
try!(write!(self.wr, "{}", escape_str(name)));
|
||||
try!(write!(self.wr, ",\"fields\":["));
|
||||
try!(f(self));
|
||||
write!(self.wr, "]\\}")
|
||||
}
|
||||
// enums are encoded as objects
|
||||
// Kangaroo(34,"William") => {"Kangaroo": [34,"William"]}
|
||||
try!(write!(self.wr, "\\{"));
|
||||
try!(write!(self.wr, "{}", escape_str(name)));
|
||||
try!(write!(self.wr, ":["));
|
||||
try!(f(self));
|
||||
write!(self.wr, "]\\}")
|
||||
}
|
||||
|
||||
fn emit_enum_variant_arg(&mut self,
|
||||
@ -872,15 +841,20 @@ impl<'a> serialize::Encoder<io::IoError> for PrettyEncoder<'a> {
|
||||
_: uint,
|
||||
cnt: uint,
|
||||
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
|
||||
// enums are encoded as objects
|
||||
// Kangaroo(34,"William") => {"Kangaroo": [34,"William"]}
|
||||
if cnt == 0 {
|
||||
write!(self.wr, "{}", escape_str(name))
|
||||
write!(self.wr, "\\{{}: []\\}", escape_str(name))
|
||||
} else {
|
||||
self.indent += 2;
|
||||
try!(write!(self.wr, "[\n{}{},\n", spaces(self.indent),
|
||||
try!(write!(self.wr, "\\{\n{}{}: [\n", spaces(self.indent),
|
||||
escape_str(name)));
|
||||
self.indent += 2;
|
||||
try!(f(self));
|
||||
self.indent -= 2;
|
||||
write!(self.wr, "\n{}]", spaces(self.indent))
|
||||
try!(write!(self.wr, "\n{}]", spaces(self.indent)));
|
||||
self.indent -= 2;
|
||||
write!(self.wr, "\n{}\\}", spaces(self.indent))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1394,20 +1368,13 @@ pub struct Parser<T> {
|
||||
line: uint,
|
||||
col: uint,
|
||||
// A state machine is kept to make it possible to interupt and resume parsing.
|
||||
state: Vec<ParserState>,
|
||||
tokens: RingBuf<de::Token>,
|
||||
state_stack: Vec<ParserState>,
|
||||
}
|
||||
|
||||
impl<T: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<T> {
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<Result<de::Token, ParserError>> {
|
||||
// If we've cached any tokens return them now.
|
||||
match self.tokens.pop_front() {
|
||||
Some(token) => { return Some(Ok(token)); }
|
||||
None => { }
|
||||
}
|
||||
|
||||
let state = match self.state.pop() {
|
||||
let state = match self.state_stack.pop() {
|
||||
Some(state) => state,
|
||||
None => {
|
||||
// If we have no state left, then we're expecting the structure
|
||||
@ -1443,8 +1410,7 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
ch: Some('\x00'),
|
||||
line: 1,
|
||||
col: 0,
|
||||
state: vec!(ParseValue),
|
||||
tokens: RingBuf::new(),
|
||||
state_stack: vec!(ParseValue),
|
||||
};
|
||||
p.bump();
|
||||
return p;
|
||||
@ -1704,7 +1670,7 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
self.bump();
|
||||
Ok(de::End)
|
||||
} else {
|
||||
self.state.push(ParseListCommaOrEnd);
|
||||
self.state_stack.push(ParseListCommaOrEnd);
|
||||
self.parse_value()
|
||||
}
|
||||
}
|
||||
@ -1714,7 +1680,7 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
|
||||
if self.ch_is(',') {
|
||||
self.bump();
|
||||
self.state.push(ParseListCommaOrEnd);
|
||||
self.state_stack.push(ParseListCommaOrEnd);
|
||||
self.parse_value()
|
||||
} else if self.ch_is(']') {
|
||||
self.bump();
|
||||
@ -1756,7 +1722,7 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
fn parse_object_key(&mut self) -> Result<de::Token, ParserError> {
|
||||
self.parse_whitespace();
|
||||
|
||||
self.state.push(ParseObjectValue);
|
||||
self.state_stack.push(ParseObjectValue);
|
||||
|
||||
if self.eof() {
|
||||
return self.error_event(EOFWhileParsingString);
|
||||
@ -1776,7 +1742,7 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
|
||||
if self.ch_is(':') {
|
||||
self.bump();
|
||||
self.state.push(ParseObjectCommaOrEnd);
|
||||
self.state_stack.push(ParseObjectCommaOrEnd);
|
||||
self.parse_value()
|
||||
} else if self.eof() {
|
||||
self.error_event(EOFWhileParsingObject)
|
||||
@ -1806,12 +1772,12 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
}
|
||||
'[' => {
|
||||
self.bump();
|
||||
self.state.push(ParseListStart);
|
||||
self.state_stack.push(ParseListStart);
|
||||
Ok(de::SeqStart(0))
|
||||
}
|
||||
'{' => {
|
||||
self.bump();
|
||||
self.state.push(ParseObjectStart);
|
||||
self.state_stack.push(ParseObjectStart);
|
||||
Ok(de::MapStart(0))
|
||||
}
|
||||
_ => {
|
||||
@ -1820,17 +1786,17 @@ impl<T: Iterator<char>> Parser<T> {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ident(&mut self, ident: &str, value: de::Token) -> Result<de::Token, ParserError> {
|
||||
fn parse_ident(&mut self, ident: &str, token: de::Token) -> Result<de::Token, ParserError> {
|
||||
if ident.chars().all(|c| Some(c) == self.next_char()) {
|
||||
self.bump();
|
||||
Ok(value)
|
||||
Ok(token)
|
||||
} else {
|
||||
Err(SyntaxError(InvalidSyntax, self.line, self.col))
|
||||
self.error_event(InvalidSyntax)
|
||||
}
|
||||
}
|
||||
|
||||
fn error_event(&mut self, reason: ErrorCode) -> Result<de::Token, ParserError> {
|
||||
self.state.clear();
|
||||
self.state_stack.clear();
|
||||
Err(SyntaxError(reason, self.line, self.col))
|
||||
}
|
||||
}
|
||||
@ -1858,75 +1824,47 @@ impl<T: Iterator<char>> de::Deserializer<ParserError> for Parser<T> {
|
||||
}
|
||||
}
|
||||
|
||||
// Special case treating enums as a String or a `{"variant": "...", "fields": [...]}`.
|
||||
// Special case treating enums as a `{"<variant-name>": [<fields>]}`.
|
||||
#[inline]
|
||||
fn expect_enum_start(&mut self,
|
||||
token: de::Token,
|
||||
_name: &str,
|
||||
variants: &[&str]) -> Result<uint, ParserError> {
|
||||
// It's a little tricky to deserialize enums. Strings are simple to
|
||||
// parse, but objects require us to preparse the entire object because
|
||||
// we can't guarantee the order of the map.
|
||||
|
||||
let variant = match token {
|
||||
de::String(variant) => {
|
||||
// Make sure to terminate the enum.
|
||||
self.tokens.push_front(de::End);
|
||||
|
||||
variant
|
||||
}
|
||||
de::MapStart(_len) => {
|
||||
let mut variant = None;
|
||||
let mut fields = None;
|
||||
|
||||
// Extract all the fields.
|
||||
loop {
|
||||
let field = match try!(self.expect_token()) {
|
||||
de::End => { break; }
|
||||
de::String(field) => field,
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
};
|
||||
|
||||
match field.as_slice() {
|
||||
"variant" => {
|
||||
let v: String = try!(de::Deserializable::deserialize(self));
|
||||
variant = Some(v);
|
||||
}
|
||||
"fields" => {
|
||||
let f: de::GatherTokens = try!(de::Deserializable::deserialize(self));
|
||||
fields = Some(f.unwrap());
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
}
|
||||
|
||||
let (variant, fields) = match (variant, fields) {
|
||||
(Some(variant), Some(fields)) => (variant, fields),
|
||||
_ => { return self.error(MissingField); }
|
||||
};
|
||||
|
||||
// Add all the field's tokens to our buffer. We need to skip
|
||||
// over the `SeqStart` because we're pretending we're an
|
||||
// `EnumStart`.
|
||||
let mut iter = fields.move_iter();
|
||||
match iter.next() {
|
||||
Some(de::SeqStart(_)) => { }
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
}
|
||||
self.tokens.extend(iter);
|
||||
|
||||
variant
|
||||
}
|
||||
match token {
|
||||
de::MapStart(_) => { }
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
};
|
||||
|
||||
// Enums only have one field in them, which is the variant name.
|
||||
let variant = match try!(self.expect_token()) {
|
||||
de::String(variant) => variant,
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
};
|
||||
|
||||
// The variant's field is a list of the values.
|
||||
match try!(self.expect_token()) {
|
||||
de::SeqStart(_) => { }
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
}
|
||||
|
||||
match variants.iter().position(|v| *v == variant.as_slice()) {
|
||||
Some(idx) => {
|
||||
Ok(idx)
|
||||
}
|
||||
Some(idx) => Ok(idx),
|
||||
None => self.error(UnknownVariant),
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_enum_end(&mut self) -> Result<(), ParserError> {
|
||||
// There will be one `End` for the list, and one for the object.
|
||||
match try!(self.expect_token()) {
|
||||
de::End => {
|
||||
match try!(self.expect_token()) {
|
||||
de::End => Ok(()),
|
||||
_ => self.error(InvalidSyntax),
|
||||
}
|
||||
}
|
||||
_ => self.error(InvalidSyntax),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@ -2582,14 +2520,14 @@ mod tests {
|
||||
fn deserialize_token(d: &mut D, token: de::Token) -> Result<Animal, E> {
|
||||
match try!(d.expect_enum_start(token, "Animal", ["Dog", "Frog"])) {
|
||||
0 => {
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
Ok(Dog)
|
||||
}
|
||||
1 => {
|
||||
let x0 = try!(de::Deserializable::deserialize(d));
|
||||
let x1 = try!(de::Deserializable::deserialize(d));
|
||||
|
||||
try!(d.expect_end());
|
||||
try!(d.expect_enum_end());
|
||||
|
||||
Ok(Frog(x0, x1))
|
||||
}
|
||||
@ -2601,12 +2539,17 @@ mod tests {
|
||||
impl ToJson for Animal {
|
||||
fn to_json(&self) -> Json {
|
||||
match *self {
|
||||
Dog => String("Dog".to_string()),
|
||||
Dog => {
|
||||
Object(
|
||||
treemap!(
|
||||
"Dog".to_string() => List(vec!())
|
||||
)
|
||||
)
|
||||
}
|
||||
Frog(ref x0, x1) => {
|
||||
Object(
|
||||
treemap!(
|
||||
"variant".to_string() => "Frog".to_json(),
|
||||
"fields".to_string() => List(vec!(x0.to_json(), x1.to_json()))
|
||||
"Frog".to_string() => List(vec!(x0.to_json(), x1.to_json()))
|
||||
)
|
||||
)
|
||||
}
|
||||
@ -2905,14 +2848,14 @@ mod tests {
|
||||
let mut encoder = Encoder::new(wr);
|
||||
animal.encode(&mut encoder).unwrap();
|
||||
}),
|
||||
"\"Dog\"".to_string()
|
||||
"{\"Dog\":[]}".to_string()
|
||||
);
|
||||
assert_eq!(
|
||||
with_str_writer(|wr| {
|
||||
let mut encoder = PrettyEncoder::new(wr);
|
||||
animal.encode(&mut encoder).unwrap();
|
||||
}),
|
||||
"\"Dog\"".to_string()
|
||||
"{\"Dog\": []}".to_string()
|
||||
);
|
||||
|
||||
let animal = Frog("Henry".to_string(), 349);
|
||||
@ -2921,7 +2864,7 @@ mod tests {
|
||||
let mut encoder = Encoder::new(wr);
|
||||
animal.encode(&mut encoder).unwrap();
|
||||
}),
|
||||
"{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}".to_string()
|
||||
"{\"Frog\":[\"Henry\",349]}".to_string()
|
||||
);
|
||||
assert_eq!(
|
||||
with_str_writer(|wr| {
|
||||
@ -2929,11 +2872,12 @@ mod tests {
|
||||
animal.encode(&mut encoder).unwrap();
|
||||
}),
|
||||
"\
|
||||
[\n \
|
||||
\"Frog\",\n \
|
||||
\"Henry\",\n \
|
||||
349\n\
|
||||
]".to_string()
|
||||
{\n \
|
||||
\"Frog\": [\n \
|
||||
\"Henry\",\n \
|
||||
349\n \
|
||||
]\n\
|
||||
}".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
@ -3293,16 +3237,16 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_enum() {
|
||||
test_parse_ok([
|
||||
("\"Dog\"", Dog),
|
||||
("{\"Dog\": []}", Dog),
|
||||
(
|
||||
"{\"variant\": \"Frog\", \"fields\": [\"Henry\", 349]}",
|
||||
"{\"Frog\": [\"Henry\", 349]}",
|
||||
Frog("Henry".to_string(), 349),
|
||||
),
|
||||
]);
|
||||
|
||||
test_parse_ok([
|
||||
(
|
||||
"{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\"fields\":[\"Henry\", 349]}}",
|
||||
"{\"a\": {\"Dog\": []}, \"b\": {\"Frog\":[\"Henry\", 349]}}",
|
||||
treemap!(
|
||||
"a".to_string() => Dog,
|
||||
"b".to_string() => Frog("Henry".to_string(), 349)
|
||||
|
Loading…
Reference in New Issue
Block a user