Merge pull request #5 from kvark/name

`serial_name` attribute
This commit is contained in:
Erick Tryzelaar 2014-09-07 11:18:22 -07:00
commit 906e9db9d5
4 changed files with 197 additions and 64 deletions

View File

@ -10,14 +10,23 @@ extern crate rustc;
use std::gc::Gc; use std::gc::Gc;
use syntax::ast::{ use syntax::ast::{
Attribute,
Ident, Ident,
MetaItem, MetaItem,
MetaNameValue,
Item, Item,
ItemEnum,
ItemStruct,
Expr, Expr,
MutMutable, MutMutable,
LitNil, LitNil,
LitStr,
P,
StructField,
Variant,
}; };
use syntax::ast; use syntax::ast;
use syntax::attr;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, ItemDecorator}; use syntax::ext::base::{ExtCtxt, ItemDecorator};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
@ -103,7 +112,7 @@ fn expand_deriving_serializable(cx: &mut ExtCtxt,
), ),
attributes: attrs, attributes: attrs,
combine_substructure: combine_substructure(|a, b, c| { combine_substructure: combine_substructure(|a, b, c| {
serializable_substructure(a, b, c) serializable_substructure(a, b, c, item)
}), }),
}) })
}; };
@ -111,12 +120,15 @@ fn expand_deriving_serializable(cx: &mut ExtCtxt,
trait_def.expand(cx, mitem, item, push) trait_def.expand(cx, mitem, item, push)
} }
fn serializable_substructure(cx: &ExtCtxt, span: Span, fn serializable_substructure(cx: &ExtCtxt,
substr: &Substructure) -> Gc<Expr> { span: Span,
substr: &Substructure,
item: Gc<Item>
) -> Gc<Expr> {
let serializer = substr.nonself_args[0]; let serializer = substr.nonself_args[0];
return match *substr.fields { match (&item.deref().node, substr.fields) {
Struct(ref fields) => { (&ItemStruct(ref definition, _), &Struct(ref fields)) => {
if fields.is_empty() { if fields.is_empty() {
// unit structs have no fields and need to return `Ok()` // unit structs have no fields and need to return `Ok()`
quote_expr!(cx, Ok(())) quote_expr!(cx, Ok(()))
@ -127,12 +139,15 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
); );
let len = fields.len(); let len = fields.len();
let mut stmts: Vec<Gc<ast::Stmt>> = fields.iter() let mut stmts: Vec<Gc<ast::Stmt>> = definition.fields.iter()
.zip(fields.iter())
.enumerate() .enumerate()
.map(|(i, &FieldInfo { name, self_, span, .. })| { .map(|(i, (def, &FieldInfo { name, self_, span, .. }))| {
let name = match name { let serial_name = find_serial_name(def.node.attrs.iter());
Some(id) => token::get_ident(id), let name = match (serial_name, name) {
None => token::intern_and_get_ident(format!("_field{}", i).as_slice()), (Some(serial), _) => serial.clone(),
(None, Some(id)) => token::get_ident(id),
(None, None) => token::intern_and_get_ident(format!("_field{}", i).as_slice()),
}; };
let name = cx.expr_str(span, name); let name = cx.expr_str(span, name);
@ -152,7 +167,7 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
} }
} }
EnumMatching(_idx, variant, ref fields) => { (&ItemEnum(ref definition, _), &EnumMatching(_idx, variant, ref fields)) => {
let type_name = cx.expr_str( let type_name = cx.expr_str(
span, span,
token::get_ident(substr.type_ident) token::get_ident(substr.type_ident)
@ -163,8 +178,10 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
); );
let len = fields.len(); let len = fields.len();
let stmts: Vec<Gc<ast::Stmt>> = fields.iter() let stmts: Vec<Gc<ast::Stmt>> = definition.variants.iter()
.map(|&FieldInfo { self_, span, .. }| { .zip(fields.iter())
.map(|(def, &FieldInfo { self_, span, .. })| {
let _serial_name = find_serial_name(def.node.attrs.iter());
quote_stmt!( quote_stmt!(
cx, cx,
try!($serializer.serialize_enum_elt(&$self_)) try!($serializer.serialize_enum_elt(&$self_))
@ -241,20 +258,22 @@ fn deserializable_substructure(cx: &mut ExtCtxt, span: Span,
let token = substr.nonself_args[1]; let token = substr.nonself_args[1];
match *substr.fields { match *substr.fields {
StaticStruct(_, ref fields) => { StaticStruct(ref definition, ref fields) => {
deserialize_struct( deserialize_struct(
cx, cx,
span, span,
substr.type_ident, substr.type_ident,
definition.fields.as_slice(),
fields, fields,
deserializer, deserializer,
token) token)
} }
StaticEnum(_, ref fields) => { StaticEnum(ref definition, ref fields) => {
deserialize_enum( deserialize_enum(
cx, cx,
span, span,
substr.type_ident, substr.type_ident,
definition.variants.as_slice(),
fields.as_slice(), fields.as_slice(),
deserializer, deserializer,
token) token)
@ -267,14 +286,21 @@ fn deserialize_struct(
cx: &ExtCtxt, cx: &ExtCtxt,
span: Span, span: Span,
type_ident: Ident, type_ident: Ident,
definitions: &[StructField],
fields: &StaticFields, fields: &StaticFields,
deserializer: Gc<ast::Expr>, deserializer: Gc<ast::Expr>,
token: Gc<ast::Expr> token: Gc<ast::Expr>
) -> Gc<ast::Expr> { ) -> Gc<ast::Expr> {
let serial_names: Vec<Option<token::InternedString>> =
definitions.iter().map(|def|
find_serial_name(def.node.attrs.iter())
).collect();
let struct_block = deserialize_struct_from_struct( let struct_block = deserialize_struct_from_struct(
cx, cx,
span, span,
type_ident, type_ident,
serial_names.as_slice(),
fields, fields,
deserializer deserializer
); );
@ -283,6 +309,7 @@ fn deserialize_struct(
cx, cx,
span, span,
type_ident, type_ident,
serial_names.as_slice(),
fields, fields,
deserializer deserializer
); );
@ -307,6 +334,7 @@ fn deserialize_struct_from_struct(
cx: &ExtCtxt, cx: &ExtCtxt,
span: Span, span: Span,
type_ident: Ident, type_ident: Ident,
serial_names: &[Option<token::InternedString>],
fields: &StaticFields, fields: &StaticFields,
deserializer: Gc<ast::Expr> deserializer: Gc<ast::Expr>
) -> Gc<ast::Expr> { ) -> Gc<ast::Expr> {
@ -316,6 +344,7 @@ fn deserialize_struct_from_struct(
cx, cx,
span, span,
type_ident, type_ident,
serial_names.as_slice(),
fields, fields,
|cx, span, name| { |cx, span, name| {
let name = cx.expr_str(span, name); let name = cx.expr_str(span, name);
@ -337,6 +366,7 @@ fn deserialize_struct_from_map(
cx: &ExtCtxt, cx: &ExtCtxt,
span: Span, span: Span,
type_ident: Ident, type_ident: Ident,
serial_names: &[Option<token::InternedString>],
fields: &StaticFields, fields: &StaticFields,
deserializer: Gc<ast::Expr> deserializer: Gc<ast::Expr>
) -> Gc<ast::Expr> { ) -> Gc<ast::Expr> {
@ -353,9 +383,14 @@ fn deserialize_struct_from_map(
.collect(); .collect();
// Declare key arms. // Declare key arms.
let key_arms: Vec<ast::Arm> = fields.iter() let key_arms: Vec<ast::Arm> = serial_names.iter()
.map(|&(name, span)| { .zip(fields.iter())
let s = cx.expr_str(span, token::get_ident(name)); .map(|(serial, &(name, span))| {
let serial_name = match serial {
&Some(ref string) => string.clone(),
&None => token::get_ident(name),
};
let s = cx.expr_str(span, serial_name);
quote_arm!(cx, quote_arm!(cx,
$s => { $s => {
$name = Some( $name = Some(
@ -366,9 +401,14 @@ fn deserialize_struct_from_map(
}) })
.collect(); .collect();
let extract_fields: Vec<Gc<ast::Stmt>> = fields.iter() let extract_fields: Vec<Gc<ast::Stmt>> = serial_names.iter()
.map(|&(name, span)| { .zip(fields.iter())
let name_str = cx.expr_str(span, token::get_ident(name)); .map(|(serial, &(name, span))| {
let serial_name = match serial {
&Some(ref string) => string.clone(),
&None => token::get_ident(name),
};
let name_str = cx.expr_str(span, serial_name);
quote_stmt!(cx, quote_stmt!(cx,
let $name = match $name { let $name = match $name {
Some($name) => $name, Some($name) => $name,
@ -428,12 +468,17 @@ fn deserialize_enum(
cx: &ExtCtxt, cx: &ExtCtxt,
span: Span, span: Span,
type_ident: Ident, type_ident: Ident,
definitions: &[P<Variant>],
fields: &[(Ident, Span, StaticFields)], fields: &[(Ident, Span, StaticFields)],
deserializer: Gc<ast::Expr>, deserializer: Gc<ast::Expr>,
token: Gc<ast::Expr> token: Gc<ast::Expr>
) -> Gc<ast::Expr> { ) -> Gc<ast::Expr> {
let type_name = cx.expr_str(span, token::get_ident(type_ident)); let type_name = cx.expr_str(span, token::get_ident(type_ident));
let serial_names = definitions.iter().map(|def|
find_serial_name(def.node.attrs.iter())
).collect::<Vec<Option<token::InternedString>>>();
let variants = fields.iter() let variants = fields.iter()
.map(|&(name, span, _)| { .map(|&(name, span, _)| {
cx.expr_str(span, token::get_ident(name)) cx.expr_str(span, token::get_ident(name))
@ -449,6 +494,7 @@ fn deserialize_enum(
cx, cx,
span, span,
name, name,
serial_names.as_slice(),
parts, parts,
|cx, span, _| { |cx, span, _| {
quote_expr!(cx, try!($deserializer.expect_enum_elt())) quote_expr!(cx, try!($deserializer.expect_enum_elt()))
@ -480,6 +526,7 @@ fn deserializable_static_fields(
cx: &ExtCtxt, cx: &ExtCtxt,
span: Span, span: Span,
outer_pat_ident: Ident, outer_pat_ident: Ident,
serial_names: &[Option<token::InternedString>],
fields: &StaticFields, fields: &StaticFields,
getarg: |&ExtCtxt, Span, token::InternedString| -> Gc<Expr> getarg: |&ExtCtxt, Span, token::InternedString| -> Gc<Expr>
) -> Gc<Expr> { ) -> Gc<Expr> {
@ -501,11 +548,15 @@ fn deserializable_static_fields(
} }
Named(ref fields) => { Named(ref fields) => {
// use the field's span to get nicer error messages. // use the field's span to get nicer error messages.
let fields = fields.iter().map(|&(name, span)| { let fields = serial_names.iter()
.zip(fields.iter()).map(|(serial_name, &(name, span))| {
let effective_name = serial_name.clone().unwrap_or(
token::get_ident(name)
);
let arg = getarg( let arg = getarg(
cx, cx,
span, span,
token::get_ident(name) effective_name
); );
cx.field_imm(span, name, arg) cx.field_imm(span, name, arg)
}).collect(); }).collect();
@ -514,3 +565,22 @@ fn deserializable_static_fields(
} }
} }
} }
fn find_serial_name<'a, I: Iterator<&'a Attribute>>(mut iterator: I)
-> Option<token::InternedString> {
for at in iterator {
match at.node.value.node {
MetaNameValue(ref at_name, ref value) => {
match (at_name.get(), &value.node) {
("serial_name", &LitStr(ref string, _)) => {
attr::mark_used(at);
return Some(string.clone());
},
_ => ()
}
},
_ => ()
}
}
None
}

View File

@ -1100,8 +1100,11 @@ mod tests {
#[deriving(Show)] #[deriving(Show)]
enum Error { enum Error {
EndOfStream, EndOfStream,
SyntaxError, SyntaxError(Vec<TokenKind>),
UnexpectedName,
ConversionError,
IncompleteValue, IncompleteValue,
MissingField(&'static str),
} }
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
@ -1134,23 +1137,23 @@ mod tests {
EndOfStream EndOfStream
} }
fn syntax_error(&mut self, _token: Token, _expected: &[TokenKind]) -> Error { fn syntax_error(&mut self, _token: Token, expected: &[TokenKind]) -> Error {
SyntaxError SyntaxError(expected.to_vec())
} }
fn unexpected_name_error(&mut self, _token: Token) -> Error { fn unexpected_name_error(&mut self, _token: Token) -> Error {
SyntaxError UnexpectedName
} }
fn conversion_error(&mut self, _token: Token) -> Error { fn conversion_error(&mut self, _token: Token) -> Error {
SyntaxError ConversionError
} }
#[inline] #[inline]
fn missing_field< fn missing_field<
T: Deserializable<TokenDeserializer<Iter>, Error> T: Deserializable<TokenDeserializer<Iter>, Error>
>(&mut self, _field: &'static str) -> Result<T, Error> { >(&mut self, field: &'static str) -> Result<T, Error> {
Err(SyntaxError) Err(MissingField(field))
} }
} }

View File

@ -645,16 +645,16 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
SyntaxError(EOFWhileParsingValue, 0, 0) SyntaxError(EOFWhileParsingValue, 0, 0)
} }
fn syntax_error(&mut self, _token: de::Token, _expected: &[de::TokenKind]) -> ParserError { fn syntax_error(&mut self, token: de::Token, expected: &[de::TokenKind]) -> ParserError {
SyntaxError(InvalidSyntax, 0, 0) SyntaxError(DeserializerError(token, ExpectTokens(expected.to_vec())), 0, 0)
} }
fn unexpected_name_error(&mut self, _token: de::Token) -> ParserError { fn unexpected_name_error(&mut self, token: de::Token) -> ParserError {
SyntaxError(InvalidSyntax, 0, 0) SyntaxError(DeserializerError(token, ExpectName), 0, 0)
} }
fn conversion_error(&mut self, _token: de::Token) -> ParserError { fn conversion_error(&mut self, token: de::Token) -> ParserError {
SyntaxError(InvalidSyntax, 0, 0) SyntaxError(DeserializerError(token, ExpectConversion), 0, 0)
} }
#[inline] #[inline]
@ -735,9 +735,32 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
} }
} }
/// The failed expectation of InvalidSyntax
#[deriving(Clone, PartialEq, Show)]
pub enum SyntaxExpectation {
ListCommaOrEnd,
ObjectCommaOrEnd,
SomeValue,
SomeIdent,
EnumMapStart,
EnumVariantString,
EnumToken,
EnumEndToken,
EnumEnd,
}
/// JSON deserializer expectations
#[deriving(Clone, PartialEq, Show)]
pub enum DeserializerExpectation {
ExpectTokens(Vec<de::TokenKind>),
ExpectName,
ExpectConversion,
}
/// The errors that can arise while parsing a JSON stream. /// The errors that can arise while parsing a JSON stream.
#[deriving(Clone, PartialEq)] #[deriving(Clone, PartialEq)]
pub enum ErrorCode { pub enum ErrorCode {
DeserializerError(de::Token, DeserializerExpectation),
EOFWhileParsingList, EOFWhileParsingList,
EOFWhileParsingObject, EOFWhileParsingObject,
EOFWhileParsingString, EOFWhileParsingString,
@ -745,7 +768,7 @@ pub enum ErrorCode {
ExpectedColon, ExpectedColon,
InvalidEscape, InvalidEscape,
InvalidNumber, InvalidNumber,
InvalidSyntax, InvalidSyntax(SyntaxExpectation),
InvalidUnicodeCodePoint, InvalidUnicodeCodePoint,
KeyMustBeAString, KeyMustBeAString,
LoneLeadingSurrogateInHexEscape, LoneLeadingSurrogateInHexEscape,
@ -784,6 +807,8 @@ pub enum DecoderError {
impl fmt::Show for ErrorCode { impl fmt::Show for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
DeserializerError(ref token, ref expect) => write!(f,
"deserializer found {} when {}", token, expect),
EOFWhileParsingList => "EOF While parsing list".fmt(f), EOFWhileParsingList => "EOF While parsing list".fmt(f),
EOFWhileParsingObject => "EOF While parsing object".fmt(f), EOFWhileParsingObject => "EOF While parsing object".fmt(f),
EOFWhileParsingString => "EOF While parsing string".fmt(f), EOFWhileParsingString => "EOF While parsing string".fmt(f),
@ -791,7 +816,7 @@ impl fmt::Show for ErrorCode {
ExpectedColon => "expected `:`".fmt(f), ExpectedColon => "expected `:`".fmt(f),
InvalidEscape => "invalid escape".fmt(f), InvalidEscape => "invalid escape".fmt(f),
InvalidNumber => "invalid number".fmt(f), InvalidNumber => "invalid number".fmt(f),
InvalidSyntax => "invalid syntax".fmt(f), InvalidSyntax(expect) => write!(f, "invalid syntax, expected: {}", expect),
InvalidUnicodeCodePoint => "invalid unicode code point".fmt(f), InvalidUnicodeCodePoint => "invalid unicode code point".fmt(f),
KeyMustBeAString => "key must be a string".fmt(f), KeyMustBeAString => "key must be a string".fmt(f),
LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape".fmt(f), LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape".fmt(f),
@ -1925,7 +1950,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
} else if self.eof() { } else if self.eof() {
self.error_event(EOFWhileParsingList) self.error_event(EOFWhileParsingList)
} else { } else {
self.error_event(InvalidSyntax) self.error_event(InvalidSyntax(ListCommaOrEnd))
} }
} }
@ -1952,7 +1977,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
} else if self.eof() { } else if self.eof() {
self.error_event(EOFWhileParsingObject) self.error_event(EOFWhileParsingObject)
} else { } else {
self.error_event(InvalidSyntax) self.error_event(InvalidSyntax(ObjectCommaOrEnd))
} }
} }
@ -2015,7 +2040,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
Ok(de::MapStart(0)) Ok(de::MapStart(0))
} }
_ => { _ => {
self.error_event(InvalidSyntax) self.error_event(InvalidSyntax(SomeValue))
} }
} }
} }
@ -2025,7 +2050,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
self.bump(); self.bump();
Ok(token) Ok(token)
} else { } else {
self.error_event(InvalidSyntax) self.error_event(InvalidSyntax(SomeIdent))
} }
} }
@ -2040,16 +2065,16 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
SyntaxError(EOFWhileParsingValue, self.line, self.col) SyntaxError(EOFWhileParsingValue, self.line, self.col)
} }
fn syntax_error(&mut self, _token: de::Token, _expected: &[de::TokenKind]) -> ParserError { fn syntax_error(&mut self, token: de::Token, expected: &[de::TokenKind]) -> ParserError {
SyntaxError(InvalidSyntax, self.line, self.col) SyntaxError(DeserializerError(token, ExpectTokens(expected.to_vec())), self.line, self.col)
} }
fn unexpected_name_error(&mut self, _token: de::Token) -> ParserError { fn unexpected_name_error(&mut self, token: de::Token) -> ParserError {
SyntaxError(InvalidSyntax, self.line, self.col) SyntaxError(DeserializerError(token, ExpectName), self.line, self.col)
} }
fn conversion_error(&mut self, _token: de::Token) -> ParserError { fn conversion_error(&mut self, token: de::Token) -> ParserError {
SyntaxError(InvalidSyntax, self.line, self.col) SyntaxError(DeserializerError(token, ExpectConversion), self.line, self.col)
} }
#[inline] #[inline]
@ -2083,19 +2108,19 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
variants: &[&str]) -> Result<uint, ParserError> { variants: &[&str]) -> Result<uint, ParserError> {
match token { match token {
de::MapStart(_) => { } de::MapStart(_) => { }
_ => { return self.error(InvalidSyntax); } _ => { return self.error(InvalidSyntax(EnumMapStart)); }
}; };
// Enums only have one field in them, which is the variant name. // Enums only have one field in them, which is the variant name.
let variant = match try!(self.expect_token()) { let variant = match try!(self.expect_token()) {
de::String(variant) => variant, de::String(variant) => variant,
_ => { return self.error(InvalidSyntax); } _ => { return self.error(InvalidSyntax(EnumVariantString)); }
}; };
// The variant's field is a list of the values. // The variant's field is a list of the values.
match try!(self.expect_token()) { match try!(self.expect_token()) {
de::SeqStart(_) => { } de::SeqStart(_) => { }
_ => { return self.error(InvalidSyntax); } _ => { return self.error(InvalidSyntax(EnumToken)); }
} }
match variants.iter().position(|v| *v == variant.as_slice()) { match variants.iter().position(|v| *v == variant.as_slice()) {
@ -2110,10 +2135,10 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
de::End => { de::End => {
match try!(self.expect_token()) { match try!(self.expect_token()) {
de::End => Ok(()), de::End => Ok(()),
_ => self.error(InvalidSyntax), _ => self.error(InvalidSyntax(EnumEndToken)),
} }
} }
_ => self.error(InvalidSyntax), _ => self.error(InvalidSyntax(EnumEnd)),
} }
} }
} }
@ -2331,6 +2356,10 @@ mod tests {
KeyMustBeAString, KeyMustBeAString,
TrailingCharacters, TrailingCharacters,
SyntaxError, SyntaxError,
SomeIdent,
SomeValue,
ObjectCommaOrEnd,
ListCommaOrEnd,
}; };
use de; use de;
use ser::{Serializable, Serializer}; use ser::{Serializable, Serializer};
@ -2800,8 +2829,8 @@ mod tests {
#[test] #[test]
fn test_parse_null() { fn test_parse_null() {
test_parse_err::<()>([ test_parse_err::<()>([
("n", SyntaxError(InvalidSyntax, 1, 2)), ("n", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
("nul", SyntaxError(InvalidSyntax, 1, 4)), ("nul", SyntaxError(InvalidSyntax(SomeIdent), 1, 4)),
("nulla", SyntaxError(TrailingCharacters, 1, 5)), ("nulla", SyntaxError(TrailingCharacters, 1, 5)),
]); ]);
@ -2820,10 +2849,10 @@ mod tests {
#[test] #[test]
fn test_parse_bool() { fn test_parse_bool() {
test_parse_err::<bool>([ test_parse_err::<bool>([
("t", SyntaxError(InvalidSyntax, 1, 2)), ("t", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
("truz", SyntaxError(InvalidSyntax, 1, 4)), ("truz", SyntaxError(InvalidSyntax(SomeIdent), 1, 4)),
("f", SyntaxError(InvalidSyntax, 1, 2)), ("f", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
("faz", SyntaxError(InvalidSyntax, 1, 3)), ("faz", SyntaxError(InvalidSyntax(SomeIdent), 1, 3)),
("truea", SyntaxError(TrailingCharacters, 1, 5)), ("truea", SyntaxError(TrailingCharacters, 1, 5)),
("falsea", SyntaxError(TrailingCharacters, 1, 6)), ("falsea", SyntaxError(TrailingCharacters, 1, 6)),
]); ]);
@ -2845,8 +2874,8 @@ mod tests {
#[test] #[test]
fn test_parse_number_errors() { fn test_parse_number_errors() {
test_parse_err::<f64>([ test_parse_err::<f64>([
("+", SyntaxError(InvalidSyntax, 1, 1)), ("+", SyntaxError(InvalidSyntax(SomeValue), 1, 1)),
(".", SyntaxError(InvalidSyntax, 1, 1)), (".", SyntaxError(InvalidSyntax(SomeValue), 1, 1)),
("-", SyntaxError(InvalidNumber, 1, 2)), ("-", SyntaxError(InvalidNumber, 1, 2)),
("00", SyntaxError(InvalidNumber, 1, 2)), ("00", SyntaxError(InvalidNumber, 1, 2)),
("1.", SyntaxError(InvalidNumber, 1, 3)), ("1.", SyntaxError(InvalidNumber, 1, 3)),
@ -2934,8 +2963,8 @@ mod tests {
("[ ", SyntaxError(EOFWhileParsingValue, 1, 3)), ("[ ", SyntaxError(EOFWhileParsingValue, 1, 3)),
("[1", SyntaxError(EOFWhileParsingList, 1, 3)), ("[1", SyntaxError(EOFWhileParsingList, 1, 3)),
("[1,", SyntaxError(EOFWhileParsingValue, 1, 4)), ("[1,", SyntaxError(EOFWhileParsingValue, 1, 4)),
("[1,]", SyntaxError(InvalidSyntax, 1, 4)), ("[1,]", SyntaxError(InvalidSyntax(SomeValue), 1, 4)),
("[1 2]", SyntaxError(InvalidSyntax, 1, 4)), ("[1 2]", SyntaxError(InvalidSyntax(ListCommaOrEnd), 1, 4)),
("[]a", SyntaxError(TrailingCharacters, 1, 3)), ("[]a", SyntaxError(TrailingCharacters, 1, 3)),
]); ]);
@ -2999,7 +3028,7 @@ mod tests {
("{\"a\" 1", SyntaxError(ExpectedColon, 1, 6)), ("{\"a\" 1", SyntaxError(ExpectedColon, 1, 6)),
("{\"a\":", SyntaxError(EOFWhileParsingValue, 1, 6)), ("{\"a\":", SyntaxError(EOFWhileParsingValue, 1, 6)),
("{\"a\":1", SyntaxError(EOFWhileParsingObject, 1, 7)), ("{\"a\":1", SyntaxError(EOFWhileParsingObject, 1, 7)),
("{\"a\":1 1", SyntaxError(InvalidSyntax, 1, 8)), ("{\"a\":1 1", SyntaxError(InvalidSyntax(ObjectCommaOrEnd), 1, 8)),
("{\"a\":1,", SyntaxError(EOFWhileParsingString, 1, 8)), ("{\"a\":1,", SyntaxError(EOFWhileParsingString, 1, 8)),
("{}a", SyntaxError(TrailingCharacters, 1, 3)), ("{}a", SyntaxError(TrailingCharacters, 1, 3)),
]); ]);

31
tests/json_struct.rs Normal file
View File

@ -0,0 +1,31 @@
#![feature(phase)]
extern crate serde;
#[phase(plugin)]
extern crate serde_macros;
#[deriving(PartialEq, Show)]
#[deriving_serializable]
#[deriving_deserializable]
struct Test {
#[serial_name = "$schema"]
schema: String,
title: String,
#[serial_name = "type"]
ty: int
}
#[test]
fn test_json_struct() {
let input = Test {
schema: "a".to_string(),
title: "b".to_string(),
ty: 3,
};
let s = serde::json::to_string(&input).unwrap();
assert_eq!(s.as_slice(), r#"{"$schema":"a","title":"b","type":3}"#);
let output: Test = serde::json::from_str(s.as_slice()).unwrap();
assert_eq!(input, output);
}