commit
906e9db9d5
@ -10,14 +10,23 @@ extern crate rustc;
|
||||
use std::gc::Gc;
|
||||
|
||||
use syntax::ast::{
|
||||
Attribute,
|
||||
Ident,
|
||||
MetaItem,
|
||||
MetaNameValue,
|
||||
Item,
|
||||
ItemEnum,
|
||||
ItemStruct,
|
||||
Expr,
|
||||
MutMutable,
|
||||
LitNil,
|
||||
LitStr,
|
||||
P,
|
||||
StructField,
|
||||
Variant,
|
||||
};
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ext::base::{ExtCtxt, ItemDecorator};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
@ -103,7 +112,7 @@ fn expand_deriving_serializable(cx: &mut ExtCtxt,
|
||||
),
|
||||
attributes: attrs,
|
||||
combine_substructure: combine_substructure(|a, b, c| {
|
||||
serializable_substructure(a, b, c)
|
||||
serializable_substructure(a, b, c, item)
|
||||
}),
|
||||
})
|
||||
};
|
||||
@ -111,12 +120,15 @@ fn expand_deriving_serializable(cx: &mut ExtCtxt,
|
||||
trait_def.expand(cx, mitem, item, push)
|
||||
}
|
||||
|
||||
fn serializable_substructure(cx: &ExtCtxt, span: Span,
|
||||
substr: &Substructure) -> Gc<Expr> {
|
||||
fn serializable_substructure(cx: &ExtCtxt,
|
||||
span: Span,
|
||||
substr: &Substructure,
|
||||
item: Gc<Item>
|
||||
) -> Gc<Expr> {
|
||||
let serializer = substr.nonself_args[0];
|
||||
|
||||
return match *substr.fields {
|
||||
Struct(ref fields) => {
|
||||
match (&item.deref().node, substr.fields) {
|
||||
(&ItemStruct(ref definition, _), &Struct(ref fields)) => {
|
||||
if fields.is_empty() {
|
||||
// unit structs have no fields and need to return `Ok()`
|
||||
quote_expr!(cx, Ok(()))
|
||||
@ -127,12 +139,15 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
|
||||
);
|
||||
let len = fields.len();
|
||||
|
||||
let mut stmts: Vec<Gc<ast::Stmt>> = fields.iter()
|
||||
let mut stmts: Vec<Gc<ast::Stmt>> = definition.fields.iter()
|
||||
.zip(fields.iter())
|
||||
.enumerate()
|
||||
.map(|(i, &FieldInfo { name, self_, span, .. })| {
|
||||
let name = match name {
|
||||
Some(id) => token::get_ident(id),
|
||||
None => token::intern_and_get_ident(format!("_field{}", i).as_slice()),
|
||||
.map(|(i, (def, &FieldInfo { name, self_, span, .. }))| {
|
||||
let serial_name = find_serial_name(def.node.attrs.iter());
|
||||
let name = match (serial_name, name) {
|
||||
(Some(serial), _) => serial.clone(),
|
||||
(None, Some(id)) => token::get_ident(id),
|
||||
(None, None) => token::intern_and_get_ident(format!("_field{}", i).as_slice()),
|
||||
};
|
||||
|
||||
let name = cx.expr_str(span, name);
|
||||
@ -152,7 +167,7 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
|
||||
}
|
||||
}
|
||||
|
||||
EnumMatching(_idx, variant, ref fields) => {
|
||||
(&ItemEnum(ref definition, _), &EnumMatching(_idx, variant, ref fields)) => {
|
||||
let type_name = cx.expr_str(
|
||||
span,
|
||||
token::get_ident(substr.type_ident)
|
||||
@ -163,8 +178,10 @@ fn serializable_substructure(cx: &ExtCtxt, span: Span,
|
||||
);
|
||||
let len = fields.len();
|
||||
|
||||
let stmts: Vec<Gc<ast::Stmt>> = fields.iter()
|
||||
.map(|&FieldInfo { self_, span, .. }| {
|
||||
let stmts: Vec<Gc<ast::Stmt>> = definition.variants.iter()
|
||||
.zip(fields.iter())
|
||||
.map(|(def, &FieldInfo { self_, span, .. })| {
|
||||
let _serial_name = find_serial_name(def.node.attrs.iter());
|
||||
quote_stmt!(
|
||||
cx,
|
||||
try!($serializer.serialize_enum_elt(&$self_))
|
||||
@ -241,20 +258,22 @@ fn deserializable_substructure(cx: &mut ExtCtxt, span: Span,
|
||||
let token = substr.nonself_args[1];
|
||||
|
||||
match *substr.fields {
|
||||
StaticStruct(_, ref fields) => {
|
||||
StaticStruct(ref definition, ref fields) => {
|
||||
deserialize_struct(
|
||||
cx,
|
||||
span,
|
||||
substr.type_ident,
|
||||
definition.fields.as_slice(),
|
||||
fields,
|
||||
deserializer,
|
||||
token)
|
||||
}
|
||||
StaticEnum(_, ref fields) => {
|
||||
StaticEnum(ref definition, ref fields) => {
|
||||
deserialize_enum(
|
||||
cx,
|
||||
span,
|
||||
substr.type_ident,
|
||||
definition.variants.as_slice(),
|
||||
fields.as_slice(),
|
||||
deserializer,
|
||||
token)
|
||||
@ -267,14 +286,21 @@ fn deserialize_struct(
|
||||
cx: &ExtCtxt,
|
||||
span: Span,
|
||||
type_ident: Ident,
|
||||
definitions: &[StructField],
|
||||
fields: &StaticFields,
|
||||
deserializer: Gc<ast::Expr>,
|
||||
token: Gc<ast::Expr>
|
||||
) -> Gc<ast::Expr> {
|
||||
let serial_names: Vec<Option<token::InternedString>> =
|
||||
definitions.iter().map(|def|
|
||||
find_serial_name(def.node.attrs.iter())
|
||||
).collect();
|
||||
|
||||
let struct_block = deserialize_struct_from_struct(
|
||||
cx,
|
||||
span,
|
||||
type_ident,
|
||||
serial_names.as_slice(),
|
||||
fields,
|
||||
deserializer
|
||||
);
|
||||
@ -283,6 +309,7 @@ fn deserialize_struct(
|
||||
cx,
|
||||
span,
|
||||
type_ident,
|
||||
serial_names.as_slice(),
|
||||
fields,
|
||||
deserializer
|
||||
);
|
||||
@ -307,6 +334,7 @@ fn deserialize_struct_from_struct(
|
||||
cx: &ExtCtxt,
|
||||
span: Span,
|
||||
type_ident: Ident,
|
||||
serial_names: &[Option<token::InternedString>],
|
||||
fields: &StaticFields,
|
||||
deserializer: Gc<ast::Expr>
|
||||
) -> Gc<ast::Expr> {
|
||||
@ -316,6 +344,7 @@ fn deserialize_struct_from_struct(
|
||||
cx,
|
||||
span,
|
||||
type_ident,
|
||||
serial_names.as_slice(),
|
||||
fields,
|
||||
|cx, span, name| {
|
||||
let name = cx.expr_str(span, name);
|
||||
@ -337,6 +366,7 @@ fn deserialize_struct_from_map(
|
||||
cx: &ExtCtxt,
|
||||
span: Span,
|
||||
type_ident: Ident,
|
||||
serial_names: &[Option<token::InternedString>],
|
||||
fields: &StaticFields,
|
||||
deserializer: Gc<ast::Expr>
|
||||
) -> Gc<ast::Expr> {
|
||||
@ -353,9 +383,14 @@ fn deserialize_struct_from_map(
|
||||
.collect();
|
||||
|
||||
// Declare key arms.
|
||||
let key_arms: Vec<ast::Arm> = fields.iter()
|
||||
.map(|&(name, span)| {
|
||||
let s = cx.expr_str(span, token::get_ident(name));
|
||||
let key_arms: Vec<ast::Arm> = serial_names.iter()
|
||||
.zip(fields.iter())
|
||||
.map(|(serial, &(name, span))| {
|
||||
let serial_name = match serial {
|
||||
&Some(ref string) => string.clone(),
|
||||
&None => token::get_ident(name),
|
||||
};
|
||||
let s = cx.expr_str(span, serial_name);
|
||||
quote_arm!(cx,
|
||||
$s => {
|
||||
$name = Some(
|
||||
@ -366,9 +401,14 @@ fn deserialize_struct_from_map(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let extract_fields: Vec<Gc<ast::Stmt>> = fields.iter()
|
||||
.map(|&(name, span)| {
|
||||
let name_str = cx.expr_str(span, token::get_ident(name));
|
||||
let extract_fields: Vec<Gc<ast::Stmt>> = serial_names.iter()
|
||||
.zip(fields.iter())
|
||||
.map(|(serial, &(name, span))| {
|
||||
let serial_name = match serial {
|
||||
&Some(ref string) => string.clone(),
|
||||
&None => token::get_ident(name),
|
||||
};
|
||||
let name_str = cx.expr_str(span, serial_name);
|
||||
quote_stmt!(cx,
|
||||
let $name = match $name {
|
||||
Some($name) => $name,
|
||||
@ -428,12 +468,17 @@ fn deserialize_enum(
|
||||
cx: &ExtCtxt,
|
||||
span: Span,
|
||||
type_ident: Ident,
|
||||
definitions: &[P<Variant>],
|
||||
fields: &[(Ident, Span, StaticFields)],
|
||||
deserializer: Gc<ast::Expr>,
|
||||
token: Gc<ast::Expr>
|
||||
) -> Gc<ast::Expr> {
|
||||
let type_name = cx.expr_str(span, token::get_ident(type_ident));
|
||||
|
||||
let serial_names = definitions.iter().map(|def|
|
||||
find_serial_name(def.node.attrs.iter())
|
||||
).collect::<Vec<Option<token::InternedString>>>();
|
||||
|
||||
let variants = fields.iter()
|
||||
.map(|&(name, span, _)| {
|
||||
cx.expr_str(span, token::get_ident(name))
|
||||
@ -449,6 +494,7 @@ fn deserialize_enum(
|
||||
cx,
|
||||
span,
|
||||
name,
|
||||
serial_names.as_slice(),
|
||||
parts,
|
||||
|cx, span, _| {
|
||||
quote_expr!(cx, try!($deserializer.expect_enum_elt()))
|
||||
@ -480,6 +526,7 @@ fn deserializable_static_fields(
|
||||
cx: &ExtCtxt,
|
||||
span: Span,
|
||||
outer_pat_ident: Ident,
|
||||
serial_names: &[Option<token::InternedString>],
|
||||
fields: &StaticFields,
|
||||
getarg: |&ExtCtxt, Span, token::InternedString| -> Gc<Expr>
|
||||
) -> Gc<Expr> {
|
||||
@ -501,11 +548,15 @@ fn deserializable_static_fields(
|
||||
}
|
||||
Named(ref fields) => {
|
||||
// use the field's span to get nicer error messages.
|
||||
let fields = fields.iter().map(|&(name, span)| {
|
||||
let fields = serial_names.iter()
|
||||
.zip(fields.iter()).map(|(serial_name, &(name, span))| {
|
||||
let effective_name = serial_name.clone().unwrap_or(
|
||||
token::get_ident(name)
|
||||
);
|
||||
let arg = getarg(
|
||||
cx,
|
||||
span,
|
||||
token::get_ident(name)
|
||||
effective_name
|
||||
);
|
||||
cx.field_imm(span, name, arg)
|
||||
}).collect();
|
||||
@ -514,3 +565,22 @@ fn deserializable_static_fields(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_serial_name<'a, I: Iterator<&'a Attribute>>(mut iterator: I)
|
||||
-> Option<token::InternedString> {
|
||||
for at in iterator {
|
||||
match at.node.value.node {
|
||||
MetaNameValue(ref at_name, ref value) => {
|
||||
match (at_name.get(), &value.node) {
|
||||
("serial_name", &LitStr(ref string, _)) => {
|
||||
attr::mark_used(at);
|
||||
return Some(string.clone());
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
17
src/de.rs
17
src/de.rs
@ -1100,8 +1100,11 @@ mod tests {
|
||||
#[deriving(Show)]
|
||||
enum Error {
|
||||
EndOfStream,
|
||||
SyntaxError,
|
||||
SyntaxError(Vec<TokenKind>),
|
||||
UnexpectedName,
|
||||
ConversionError,
|
||||
IncompleteValue,
|
||||
MissingField(&'static str),
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
@ -1134,23 +1137,23 @@ mod tests {
|
||||
EndOfStream
|
||||
}
|
||||
|
||||
fn syntax_error(&mut self, _token: Token, _expected: &[TokenKind]) -> Error {
|
||||
SyntaxError
|
||||
fn syntax_error(&mut self, _token: Token, expected: &[TokenKind]) -> Error {
|
||||
SyntaxError(expected.to_vec())
|
||||
}
|
||||
|
||||
fn unexpected_name_error(&mut self, _token: Token) -> Error {
|
||||
SyntaxError
|
||||
UnexpectedName
|
||||
}
|
||||
|
||||
fn conversion_error(&mut self, _token: Token) -> Error {
|
||||
SyntaxError
|
||||
ConversionError
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn missing_field<
|
||||
T: Deserializable<TokenDeserializer<Iter>, Error>
|
||||
>(&mut self, _field: &'static str) -> Result<T, Error> {
|
||||
Err(SyntaxError)
|
||||
>(&mut self, field: &'static str) -> Result<T, Error> {
|
||||
Err(MissingField(field))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -645,16 +645,16 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
|
||||
SyntaxError(EOFWhileParsingValue, 0, 0)
|
||||
}
|
||||
|
||||
fn syntax_error(&mut self, _token: de::Token, _expected: &[de::TokenKind]) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, 0, 0)
|
||||
fn syntax_error(&mut self, token: de::Token, expected: &[de::TokenKind]) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectTokens(expected.to_vec())), 0, 0)
|
||||
}
|
||||
|
||||
fn unexpected_name_error(&mut self, _token: de::Token) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, 0, 0)
|
||||
fn unexpected_name_error(&mut self, token: de::Token) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectName), 0, 0)
|
||||
}
|
||||
|
||||
fn conversion_error(&mut self, _token: de::Token) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, 0, 0)
|
||||
fn conversion_error(&mut self, token: de::Token) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectConversion), 0, 0)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -735,9 +735,32 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
|
||||
}
|
||||
}
|
||||
|
||||
/// The failed expectation of InvalidSyntax
|
||||
#[deriving(Clone, PartialEq, Show)]
|
||||
pub enum SyntaxExpectation {
|
||||
ListCommaOrEnd,
|
||||
ObjectCommaOrEnd,
|
||||
SomeValue,
|
||||
SomeIdent,
|
||||
EnumMapStart,
|
||||
EnumVariantString,
|
||||
EnumToken,
|
||||
EnumEndToken,
|
||||
EnumEnd,
|
||||
}
|
||||
|
||||
/// JSON deserializer expectations
|
||||
#[deriving(Clone, PartialEq, Show)]
|
||||
pub enum DeserializerExpectation {
|
||||
ExpectTokens(Vec<de::TokenKind>),
|
||||
ExpectName,
|
||||
ExpectConversion,
|
||||
}
|
||||
|
||||
/// The errors that can arise while parsing a JSON stream.
|
||||
#[deriving(Clone, PartialEq)]
|
||||
pub enum ErrorCode {
|
||||
DeserializerError(de::Token, DeserializerExpectation),
|
||||
EOFWhileParsingList,
|
||||
EOFWhileParsingObject,
|
||||
EOFWhileParsingString,
|
||||
@ -745,7 +768,7 @@ pub enum ErrorCode {
|
||||
ExpectedColon,
|
||||
InvalidEscape,
|
||||
InvalidNumber,
|
||||
InvalidSyntax,
|
||||
InvalidSyntax(SyntaxExpectation),
|
||||
InvalidUnicodeCodePoint,
|
||||
KeyMustBeAString,
|
||||
LoneLeadingSurrogateInHexEscape,
|
||||
@ -784,6 +807,8 @@ pub enum DecoderError {
|
||||
impl fmt::Show for ErrorCode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
DeserializerError(ref token, ref expect) => write!(f,
|
||||
"deserializer found {} when {}", token, expect),
|
||||
EOFWhileParsingList => "EOF While parsing list".fmt(f),
|
||||
EOFWhileParsingObject => "EOF While parsing object".fmt(f),
|
||||
EOFWhileParsingString => "EOF While parsing string".fmt(f),
|
||||
@ -791,7 +816,7 @@ impl fmt::Show for ErrorCode {
|
||||
ExpectedColon => "expected `:`".fmt(f),
|
||||
InvalidEscape => "invalid escape".fmt(f),
|
||||
InvalidNumber => "invalid number".fmt(f),
|
||||
InvalidSyntax => "invalid syntax".fmt(f),
|
||||
InvalidSyntax(expect) => write!(f, "invalid syntax, expected: {}", expect),
|
||||
InvalidUnicodeCodePoint => "invalid unicode code point".fmt(f),
|
||||
KeyMustBeAString => "key must be a string".fmt(f),
|
||||
LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape".fmt(f),
|
||||
@ -1925,7 +1950,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
||||
} else if self.eof() {
|
||||
self.error_event(EOFWhileParsingList)
|
||||
} else {
|
||||
self.error_event(InvalidSyntax)
|
||||
self.error_event(InvalidSyntax(ListCommaOrEnd))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1952,7 +1977,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
||||
} else if self.eof() {
|
||||
self.error_event(EOFWhileParsingObject)
|
||||
} else {
|
||||
self.error_event(InvalidSyntax)
|
||||
self.error_event(InvalidSyntax(ObjectCommaOrEnd))
|
||||
}
|
||||
}
|
||||
|
||||
@ -2015,7 +2040,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
||||
Ok(de::MapStart(0))
|
||||
}
|
||||
_ => {
|
||||
self.error_event(InvalidSyntax)
|
||||
self.error_event(InvalidSyntax(SomeValue))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2025,7 +2050,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
||||
self.bump();
|
||||
Ok(token)
|
||||
} else {
|
||||
self.error_event(InvalidSyntax)
|
||||
self.error_event(InvalidSyntax(SomeIdent))
|
||||
}
|
||||
}
|
||||
|
||||
@ -2040,16 +2065,16 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
|
||||
SyntaxError(EOFWhileParsingValue, self.line, self.col)
|
||||
}
|
||||
|
||||
fn syntax_error(&mut self, _token: de::Token, _expected: &[de::TokenKind]) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, self.line, self.col)
|
||||
fn syntax_error(&mut self, token: de::Token, expected: &[de::TokenKind]) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectTokens(expected.to_vec())), self.line, self.col)
|
||||
}
|
||||
|
||||
fn unexpected_name_error(&mut self, _token: de::Token) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, self.line, self.col)
|
||||
fn unexpected_name_error(&mut self, token: de::Token) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectName), self.line, self.col)
|
||||
}
|
||||
|
||||
fn conversion_error(&mut self, _token: de::Token) -> ParserError {
|
||||
SyntaxError(InvalidSyntax, self.line, self.col)
|
||||
fn conversion_error(&mut self, token: de::Token) -> ParserError {
|
||||
SyntaxError(DeserializerError(token, ExpectConversion), self.line, self.col)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -2083,19 +2108,19 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
|
||||
variants: &[&str]) -> Result<uint, ParserError> {
|
||||
match token {
|
||||
de::MapStart(_) => { }
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
_ => { return self.error(InvalidSyntax(EnumMapStart)); }
|
||||
};
|
||||
|
||||
// Enums only have one field in them, which is the variant name.
|
||||
let variant = match try!(self.expect_token()) {
|
||||
de::String(variant) => variant,
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
_ => { return self.error(InvalidSyntax(EnumVariantString)); }
|
||||
};
|
||||
|
||||
// The variant's field is a list of the values.
|
||||
match try!(self.expect_token()) {
|
||||
de::SeqStart(_) => { }
|
||||
_ => { return self.error(InvalidSyntax); }
|
||||
_ => { return self.error(InvalidSyntax(EnumToken)); }
|
||||
}
|
||||
|
||||
match variants.iter().position(|v| *v == variant.as_slice()) {
|
||||
@ -2110,10 +2135,10 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
|
||||
de::End => {
|
||||
match try!(self.expect_token()) {
|
||||
de::End => Ok(()),
|
||||
_ => self.error(InvalidSyntax),
|
||||
_ => self.error(InvalidSyntax(EnumEndToken)),
|
||||
}
|
||||
}
|
||||
_ => self.error(InvalidSyntax),
|
||||
_ => self.error(InvalidSyntax(EnumEnd)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2331,6 +2356,10 @@ mod tests {
|
||||
KeyMustBeAString,
|
||||
TrailingCharacters,
|
||||
SyntaxError,
|
||||
SomeIdent,
|
||||
SomeValue,
|
||||
ObjectCommaOrEnd,
|
||||
ListCommaOrEnd,
|
||||
};
|
||||
use de;
|
||||
use ser::{Serializable, Serializer};
|
||||
@ -2800,8 +2829,8 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_null() {
|
||||
test_parse_err::<()>([
|
||||
("n", SyntaxError(InvalidSyntax, 1, 2)),
|
||||
("nul", SyntaxError(InvalidSyntax, 1, 4)),
|
||||
("n", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
|
||||
("nul", SyntaxError(InvalidSyntax(SomeIdent), 1, 4)),
|
||||
("nulla", SyntaxError(TrailingCharacters, 1, 5)),
|
||||
]);
|
||||
|
||||
@ -2820,10 +2849,10 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_bool() {
|
||||
test_parse_err::<bool>([
|
||||
("t", SyntaxError(InvalidSyntax, 1, 2)),
|
||||
("truz", SyntaxError(InvalidSyntax, 1, 4)),
|
||||
("f", SyntaxError(InvalidSyntax, 1, 2)),
|
||||
("faz", SyntaxError(InvalidSyntax, 1, 3)),
|
||||
("t", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
|
||||
("truz", SyntaxError(InvalidSyntax(SomeIdent), 1, 4)),
|
||||
("f", SyntaxError(InvalidSyntax(SomeIdent), 1, 2)),
|
||||
("faz", SyntaxError(InvalidSyntax(SomeIdent), 1, 3)),
|
||||
("truea", SyntaxError(TrailingCharacters, 1, 5)),
|
||||
("falsea", SyntaxError(TrailingCharacters, 1, 6)),
|
||||
]);
|
||||
@ -2845,8 +2874,8 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_number_errors() {
|
||||
test_parse_err::<f64>([
|
||||
("+", SyntaxError(InvalidSyntax, 1, 1)),
|
||||
(".", SyntaxError(InvalidSyntax, 1, 1)),
|
||||
("+", SyntaxError(InvalidSyntax(SomeValue), 1, 1)),
|
||||
(".", SyntaxError(InvalidSyntax(SomeValue), 1, 1)),
|
||||
("-", SyntaxError(InvalidNumber, 1, 2)),
|
||||
("00", SyntaxError(InvalidNumber, 1, 2)),
|
||||
("1.", SyntaxError(InvalidNumber, 1, 3)),
|
||||
@ -2934,8 +2963,8 @@ mod tests {
|
||||
("[ ", SyntaxError(EOFWhileParsingValue, 1, 3)),
|
||||
("[1", SyntaxError(EOFWhileParsingList, 1, 3)),
|
||||
("[1,", SyntaxError(EOFWhileParsingValue, 1, 4)),
|
||||
("[1,]", SyntaxError(InvalidSyntax, 1, 4)),
|
||||
("[1 2]", SyntaxError(InvalidSyntax, 1, 4)),
|
||||
("[1,]", SyntaxError(InvalidSyntax(SomeValue), 1, 4)),
|
||||
("[1 2]", SyntaxError(InvalidSyntax(ListCommaOrEnd), 1, 4)),
|
||||
("[]a", SyntaxError(TrailingCharacters, 1, 3)),
|
||||
]);
|
||||
|
||||
@ -2999,7 +3028,7 @@ mod tests {
|
||||
("{\"a\" 1", SyntaxError(ExpectedColon, 1, 6)),
|
||||
("{\"a\":", SyntaxError(EOFWhileParsingValue, 1, 6)),
|
||||
("{\"a\":1", SyntaxError(EOFWhileParsingObject, 1, 7)),
|
||||
("{\"a\":1 1", SyntaxError(InvalidSyntax, 1, 8)),
|
||||
("{\"a\":1 1", SyntaxError(InvalidSyntax(ObjectCommaOrEnd), 1, 8)),
|
||||
("{\"a\":1,", SyntaxError(EOFWhileParsingString, 1, 8)),
|
||||
("{}a", SyntaxError(TrailingCharacters, 1, 3)),
|
||||
]);
|
||||
|
31
tests/json_struct.rs
Normal file
31
tests/json_struct.rs
Normal file
@ -0,0 +1,31 @@
|
||||
#![feature(phase)]
|
||||
|
||||
extern crate serde;
|
||||
#[phase(plugin)]
|
||||
extern crate serde_macros;
|
||||
|
||||
#[deriving(PartialEq, Show)]
|
||||
#[deriving_serializable]
|
||||
#[deriving_deserializable]
|
||||
struct Test {
|
||||
#[serial_name = "$schema"]
|
||||
schema: String,
|
||||
title: String,
|
||||
#[serial_name = "type"]
|
||||
ty: int
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_json_struct() {
|
||||
let input = Test {
|
||||
schema: "a".to_string(),
|
||||
title: "b".to_string(),
|
||||
ty: 3,
|
||||
};
|
||||
|
||||
let s = serde::json::to_string(&input).unwrap();
|
||||
assert_eq!(s.as_slice(), r#"{"$schema":"a","title":"b","type":3}"#);
|
||||
|
||||
let output: Test = serde::json::from_str(s.as_slice()).unwrap();
|
||||
assert_eq!(input, output);
|
||||
}
|
Loading…
Reference in New Issue
Block a user