Serialize to binary if the serde format is not human readable
This implements the KISS suggested in https://github.com/serde-rs/serde/issues/790. It is possible that one of the other approaches may be better but this seemed like the simplest one to reignite som discussion. Personally I find the original suggestion of adding two traits perhaps slightly cleaner in theory but I think it ends up more complicated in the end since the added traits also need to be duplicated to to the `Seed` traits. Closes #790
This commit is contained in:
parent
d4042872f5
commit
0dccbb1f11
@ -1011,6 +1011,12 @@ pub trait Deserializer<'de>: Sized {
|
|||||||
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||||
where
|
where
|
||||||
V: Visitor<'de>;
|
V: Visitor<'de>;
|
||||||
|
|
||||||
|
/// Returns whether the serialized data is human readable or not.
|
||||||
|
///
|
||||||
|
/// Some formats are not intended to be human readable. For these formats
|
||||||
|
/// a type being serialized may opt to serialize into a more compact form.
|
||||||
|
fn is_human_readable(&self) -> bool { true }
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -1363,6 +1363,12 @@ pub trait Serializer: Sized {
|
|||||||
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||||
where
|
where
|
||||||
T: Display;
|
T: Display;
|
||||||
|
|
||||||
|
/// Returns wheter the data format is human readable or not.
|
||||||
|
///
|
||||||
|
/// Some formats are not intended to be human readable. For these formats
|
||||||
|
/// a type being serialized may opt to serialize into a more compact form.
|
||||||
|
fn is_human_readable(&self) -> bool { true }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returned from `Serializer::serialize_seq`.
|
/// Returned from `Serializer::serialize_seq`.
|
||||||
|
@ -84,7 +84,17 @@ pub fn assert_ser_tokens<T>(value: &T, tokens: &[Token])
|
|||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize,
|
||||||
{
|
{
|
||||||
let mut ser = Serializer::new(tokens);
|
assert_ser_tokens_readable(value, tokens, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Asserts that `value` serializes to the given `tokens`.
|
||||||
|
///
|
||||||
|
/// See: `assert_ser_tokens`
|
||||||
|
pub fn assert_ser_tokens_readable<T>(value: &T, tokens: &[Token], human_readable: bool)
|
||||||
|
where
|
||||||
|
T: Serialize,
|
||||||
|
{
|
||||||
|
let mut ser = Serializer::readable(tokens, human_readable);
|
||||||
match value.serialize(&mut ser) {
|
match value.serialize(&mut ser) {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(err) => panic!("value failed to serialize: {}", err),
|
Err(err) => panic!("value failed to serialize: {}", err),
|
||||||
@ -183,7 +193,14 @@ pub fn assert_de_tokens<'de, T>(value: &T, tokens: &'de [Token])
|
|||||||
where
|
where
|
||||||
T: Deserialize<'de> + PartialEq + Debug,
|
T: Deserialize<'de> + PartialEq + Debug,
|
||||||
{
|
{
|
||||||
let mut de = Deserializer::new(tokens);
|
assert_de_tokens_readable(value, tokens, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn assert_de_tokens_readable<'de, T>(value: &T, tokens: &'de [Token], human_readable: bool)
|
||||||
|
where
|
||||||
|
T: Deserialize<'de> + PartialEq + Debug,
|
||||||
|
{
|
||||||
|
let mut de = Deserializer::readable(tokens, human_readable);
|
||||||
match T::deserialize(&mut de) {
|
match T::deserialize(&mut de) {
|
||||||
Ok(v) => assert_eq!(v, *value),
|
Ok(v) => assert_eq!(v, *value),
|
||||||
Err(e) => panic!("tokens failed to deserialize: {}", e),
|
Err(e) => panic!("tokens failed to deserialize: {}", e),
|
||||||
|
@ -16,6 +16,7 @@ use token::Token;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Deserializer<'de> {
|
pub struct Deserializer<'de> {
|
||||||
tokens: &'de [Token],
|
tokens: &'de [Token],
|
||||||
|
is_human_readable: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_next_token {
|
macro_rules! assert_next_token {
|
||||||
@ -48,7 +49,11 @@ macro_rules! end_of_tokens {
|
|||||||
|
|
||||||
impl<'de> Deserializer<'de> {
|
impl<'de> Deserializer<'de> {
|
||||||
pub fn new(tokens: &'de [Token]) -> Self {
|
pub fn new(tokens: &'de [Token]) -> Self {
|
||||||
Deserializer { tokens: tokens }
|
Deserializer::readable(tokens, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn readable(tokens: &'de [Token], is_human_readable: bool) -> Self {
|
||||||
|
Deserializer { tokens: tokens, is_human_readable: is_human_readable }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek_token_opt(&self) -> Option<Token> {
|
fn peek_token_opt(&self) -> Option<Token> {
|
||||||
@ -364,6 +369,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||||||
_ => self.deserialize_any(visitor),
|
_ => self.deserialize_any(visitor),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_human_readable(&self) -> bool {
|
||||||
|
self.is_human_readable
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -168,8 +168,8 @@ mod token;
|
|||||||
mod assert;
|
mod assert;
|
||||||
|
|
||||||
pub use token::Token;
|
pub use token::Token;
|
||||||
pub use assert::{assert_tokens, assert_ser_tokens, assert_ser_tokens_error, assert_de_tokens,
|
pub use assert::{assert_tokens, assert_ser_tokens, assert_ser_tokens_error, assert_ser_tokens_readable,
|
||||||
assert_de_tokens_error};
|
assert_de_tokens, assert_de_tokens_error, assert_de_tokens_readable};
|
||||||
|
|
||||||
// Not public API.
|
// Not public API.
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
|
@ -15,12 +15,17 @@ use token::Token;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Serializer<'a> {
|
pub struct Serializer<'a> {
|
||||||
tokens: &'a [Token],
|
tokens: &'a [Token],
|
||||||
|
is_human_readable: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Serializer<'a> {
|
impl<'a> Serializer<'a> {
|
||||||
/// Creates the serializer.
|
/// Creates the serializer.
|
||||||
pub fn new(tokens: &'a [Token]) -> Self {
|
pub fn new(tokens: &'a [Token]) -> Self {
|
||||||
Serializer { tokens: tokens }
|
Serializer::readable(tokens, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn readable(tokens: &'a [Token], is_human_readable: bool) -> Self {
|
||||||
|
Serializer { tokens: tokens, is_human_readable: is_human_readable }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pulls the next token off of the serializer, ignoring it.
|
/// Pulls the next token off of the serializer, ignoring it.
|
||||||
@ -282,6 +287,10 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> {
|
|||||||
Ok(Variant { ser: self, end: Token::StructVariantEnd })
|
Ok(Variant { ser: self, end: Token::StructVariantEnd })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_human_readable(&self) -> bool {
|
||||||
|
self.is_human_readable
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Variant<'s, 'a: 's> {
|
pub struct Variant<'s, 'a: 's> {
|
||||||
|
@ -28,7 +28,7 @@ extern crate fnv;
|
|||||||
use self::fnv::FnvHasher;
|
use self::fnv::FnvHasher;
|
||||||
|
|
||||||
extern crate serde_test;
|
extern crate serde_test;
|
||||||
use self::serde_test::{Token, assert_de_tokens, assert_de_tokens_error};
|
use self::serde_test::{Token, assert_de_tokens, assert_de_tokens_error, assert_de_tokens_readable};
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod macros;
|
mod macros;
|
||||||
@ -1078,3 +1078,39 @@ declare_error_tests! {
|
|||||||
"invalid type: sequence, expected unit struct UnitStruct",
|
"invalid type: sequence, expected unit struct UnitStruct",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
struct CompactBinary((u8, u8));
|
||||||
|
|
||||||
|
impl<'de> serde::Deserialize<'de> for CompactBinary {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<CompactBinary, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
if deserializer.is_human_readable() {
|
||||||
|
<(u8, u8)>::deserialize(deserializer).map(CompactBinary)
|
||||||
|
} else {
|
||||||
|
<&[u8]>::deserialize(deserializer).map(|bytes| {
|
||||||
|
CompactBinary((bytes[0], bytes[1]))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_human_readable() {
|
||||||
|
assert_de_tokens(
|
||||||
|
&CompactBinary((1, 2)),
|
||||||
|
&[
|
||||||
|
Token::Tuple { len: 2},
|
||||||
|
Token::U8(1),
|
||||||
|
Token::U8(2),
|
||||||
|
Token::TupleEnd,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
assert_de_tokens_readable(
|
||||||
|
&CompactBinary((1, 2)),
|
||||||
|
&[Token::BorrowedBytes(&[1, 2])],
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ -23,7 +23,8 @@ use std::str;
|
|||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
|
||||||
extern crate serde_test;
|
extern crate serde_test;
|
||||||
use self::serde_test::{Token, assert_ser_tokens, assert_ser_tokens_error};
|
use self::serde_test::{Token, assert_ser_tokens, assert_ser_tokens_error,
|
||||||
|
assert_ser_tokens_readable};
|
||||||
|
|
||||||
extern crate fnv;
|
extern crate fnv;
|
||||||
use self::fnv::FnvHasher;
|
use self::fnv::FnvHasher;
|
||||||
@ -474,3 +475,26 @@ fn test_enum_skipped() {
|
|||||||
"the enum variant Enum::SkippedMap cannot be serialized",
|
"the enum variant Enum::SkippedMap cannot be serialized",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct CompactBinary(String);
|
||||||
|
|
||||||
|
impl serde::Serialize for CompactBinary {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer
|
||||||
|
{
|
||||||
|
if serializer.is_human_readable() {
|
||||||
|
serializer.serialize_str(&self.0)
|
||||||
|
} else {
|
||||||
|
serializer.serialize_bytes(self.0.as_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_human_readable() {
|
||||||
|
let value = CompactBinary("test".to_string());
|
||||||
|
assert_ser_tokens(&value, &[Token::String("test")]);
|
||||||
|
|
||||||
|
assert_ser_tokens_readable(&value, &[Token::Bytes(b"test")], false);
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user