Format with rustfmt 0.4.1
This commit is contained in:
parent
00178ba795
commit
9f38ca032e
@ -1276,10 +1276,7 @@ parse_socket_impl!(net::SocketAddrV4, net::SocketAddrV4::new);
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
parse_socket_impl!(net::SocketAddrV6, |ip, port| net::SocketAddrV6::new(
|
||||
ip,
|
||||
port,
|
||||
0,
|
||||
0
|
||||
ip, port, 0, 0
|
||||
));
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -90,10 +90,13 @@
|
||||
#![cfg_attr(feature = "alloc", feature(alloc))]
|
||||
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
|
||||
// Whitelisted clippy lints
|
||||
#![cfg_attr(feature = "cargo-clippy",
|
||||
allow(cast_lossless, const_static_lifetime, doc_markdown, linkedlist,
|
||||
needless_pass_by_value, redundant_field_names, type_complexity,
|
||||
unreadable_literal, zero_prefixed_literal))]
|
||||
#![cfg_attr(
|
||||
feature = "cargo-clippy",
|
||||
allow(
|
||||
cast_lossless, const_static_lifetime, doc_markdown, linkedlist, needless_pass_by_value,
|
||||
redundant_field_names, type_complexity, unreadable_literal, zero_prefixed_literal
|
||||
)
|
||||
)]
|
||||
// Whitelisted clippy_pedantic lints
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(
|
||||
// integer and float ser/de requires these sorts of casts
|
||||
@ -143,8 +146,8 @@ mod lib {
|
||||
|
||||
pub use self::core::{cmp, iter, mem, ops, slice, str};
|
||||
pub use self::core::{f32, f64};
|
||||
pub use self::core::{isize, i16, i32, i64, i8};
|
||||
pub use self::core::{usize, u16, u32, u64, u8};
|
||||
pub use self::core::{i16, i32, i64, i8, isize};
|
||||
pub use self::core::{u16, u32, u64, u8, usize};
|
||||
|
||||
pub use self::core::cell::{Cell, RefCell};
|
||||
pub use self::core::clone::{self, Clone};
|
||||
|
@ -9,10 +9,10 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use syn;
|
||||
use syn::punctuated::{Punctuated, Pair};
|
||||
use syn::punctuated::{Pair, Punctuated};
|
||||
use syn::visit::{self, Visit};
|
||||
|
||||
use internals::ast::{Data, Container};
|
||||
use internals::ast::{Container, Data};
|
||||
use internals::attr;
|
||||
|
||||
use proc_macro2::Span;
|
||||
@ -26,13 +26,11 @@ pub fn without_defaults(generics: &syn::Generics) -> syn::Generics {
|
||||
.params
|
||||
.iter()
|
||||
.map(|param| match *param {
|
||||
syn::GenericParam::Type(ref param) => {
|
||||
syn::GenericParam::Type(syn::TypeParam {
|
||||
eq_token: None,
|
||||
default: None,
|
||||
..param.clone()
|
||||
})
|
||||
}
|
||||
syn::GenericParam::Type(ref param) => syn::GenericParam::Type(syn::TypeParam {
|
||||
eq_token: None,
|
||||
default: None,
|
||||
..param.clone()
|
||||
}),
|
||||
_ => param.clone(),
|
||||
})
|
||||
.collect(),
|
||||
@ -45,7 +43,8 @@ pub fn with_where_predicates(
|
||||
predicates: &[syn::WherePredicate],
|
||||
) -> syn::Generics {
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause()
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(predicates.into_iter().cloned());
|
||||
generics
|
||||
@ -62,9 +61,7 @@ pub fn with_where_predicates_from_fields(
|
||||
.flat_map(|predicates| predicates.to_vec());
|
||||
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause()
|
||||
.predicates
|
||||
.extend(predicates);
|
||||
generics.make_where_clause().predicates.extend(predicates);
|
||||
generics
|
||||
}
|
||||
|
||||
@ -137,9 +134,7 @@ pub fn with_bound(
|
||||
fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
|
||||
}
|
||||
|
||||
let all_type_params = generics.type_params()
|
||||
.map(|param| param.ident)
|
||||
.collect();
|
||||
let all_type_params = generics.type_params().map(|param| param.ident).collect();
|
||||
|
||||
let mut visitor = FindTyParams {
|
||||
all_type_params: all_type_params,
|
||||
@ -165,10 +160,14 @@ pub fn with_bound(
|
||||
|
||||
let relevant_type_params = visitor.relevant_type_params;
|
||||
let associated_type_usage = visitor.associated_type_usage;
|
||||
let new_predicates = generics.type_params()
|
||||
let new_predicates = generics
|
||||
.type_params()
|
||||
.map(|param| param.ident)
|
||||
.filter(|id| relevant_type_params.contains(id))
|
||||
.map(|id| syn::TypePath { qself: None, path: id.into() })
|
||||
.map(|id| syn::TypePath {
|
||||
qself: None,
|
||||
path: id.into(),
|
||||
})
|
||||
.chain(associated_type_usage.into_iter().cloned())
|
||||
.map(|bounded_ty| {
|
||||
syn::WherePredicate::Type(syn::PredicateType {
|
||||
@ -177,19 +176,19 @@ pub fn with_bound(
|
||||
bounded_ty: syn::Type::Path(bounded_ty),
|
||||
colon_token: Default::default(),
|
||||
// the bound e.g. Serialize
|
||||
bounds: vec![
|
||||
syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
}),
|
||||
].into_iter().collect(),
|
||||
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
})].into_iter()
|
||||
.collect(),
|
||||
})
|
||||
});
|
||||
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause()
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(new_predicates);
|
||||
generics
|
||||
@ -201,7 +200,8 @@ pub fn with_self_bound(
|
||||
bound: &syn::Path,
|
||||
) -> syn::Generics {
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause()
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.push(syn::WherePredicate::Type(syn::PredicateType {
|
||||
lifetimes: None,
|
||||
@ -209,14 +209,13 @@ pub fn with_self_bound(
|
||||
bounded_ty: type_of_item(cont),
|
||||
colon_token: Default::default(),
|
||||
// the bound e.g. Default
|
||||
bounds: vec![
|
||||
syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
}),
|
||||
].into_iter().collect(),
|
||||
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
})].into_iter()
|
||||
.collect(),
|
||||
}));
|
||||
generics
|
||||
}
|
||||
@ -232,21 +231,18 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen
|
||||
|
||||
let params = Some(syn::GenericParam::Lifetime(def))
|
||||
.into_iter()
|
||||
.chain(generics.params
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut param| {
|
||||
match param {
|
||||
syn::GenericParam::Lifetime(ref mut param) => {
|
||||
param.bounds.push(bound);
|
||||
}
|
||||
syn::GenericParam::Type(ref mut param) => {
|
||||
param.bounds.push(syn::TypeParamBound::Lifetime(bound));
|
||||
}
|
||||
syn::GenericParam::Const(_) => {}
|
||||
.chain(generics.params.iter().cloned().map(|mut param| {
|
||||
match param {
|
||||
syn::GenericParam::Lifetime(ref mut param) => {
|
||||
param.bounds.push(bound);
|
||||
}
|
||||
param
|
||||
}))
|
||||
syn::GenericParam::Type(ref mut param) => {
|
||||
param.bounds.push(syn::TypeParamBound::Lifetime(bound));
|
||||
}
|
||||
syn::GenericParam::Const(_) => {}
|
||||
}
|
||||
param
|
||||
}))
|
||||
.collect();
|
||||
|
||||
syn::Generics {
|
||||
@ -260,36 +256,35 @@ fn type_of_item(cont: &Container) -> syn::Type {
|
||||
qself: None,
|
||||
path: syn::Path {
|
||||
leading_colon: None,
|
||||
segments: vec![
|
||||
syn::PathSegment {
|
||||
ident: cont.ident,
|
||||
arguments: syn::PathArguments::AngleBracketed(
|
||||
syn::AngleBracketedGenericArguments {
|
||||
colon2_token: None,
|
||||
lt_token: Default::default(),
|
||||
args: cont.generics
|
||||
.params
|
||||
.iter()
|
||||
.map(|param| match *param {
|
||||
syn::GenericParam::Type(ref param) => {
|
||||
syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
|
||||
qself: None,
|
||||
path: param.ident.into(),
|
||||
}))
|
||||
}
|
||||
syn::GenericParam::Lifetime(ref param) => {
|
||||
syn::GenericArgument::Lifetime(param.lifetime)
|
||||
}
|
||||
syn::GenericParam::Const(_) => {
|
||||
panic!("Serde does not support const generics yet");
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gt_token: Default::default(),
|
||||
},
|
||||
),
|
||||
},
|
||||
].into_iter().collect(),
|
||||
segments: vec![syn::PathSegment {
|
||||
ident: cont.ident,
|
||||
arguments: syn::PathArguments::AngleBracketed(
|
||||
syn::AngleBracketedGenericArguments {
|
||||
colon2_token: None,
|
||||
lt_token: Default::default(),
|
||||
args: cont.generics
|
||||
.params
|
||||
.iter()
|
||||
.map(|param| match *param {
|
||||
syn::GenericParam::Type(ref param) => {
|
||||
syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
|
||||
qself: None,
|
||||
path: param.ident.into(),
|
||||
}))
|
||||
}
|
||||
syn::GenericParam::Lifetime(ref param) => {
|
||||
syn::GenericArgument::Lifetime(param.lifetime)
|
||||
}
|
||||
syn::GenericParam::Const(_) => {
|
||||
panic!("Serde does not support const generics yet");
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gt_token: Default::default(),
|
||||
},
|
||||
),
|
||||
}].into_iter()
|
||||
.collect(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -6,15 +6,15 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use syn::{self, Ident, Index, Member};
|
||||
use proc_macro2::{Literal, Span};
|
||||
use quote::{ToTokens, Tokens};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::spanned::Spanned;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use proc_macro2::{Literal, Span};
|
||||
use syn::{self, Ident, Index, Member};
|
||||
|
||||
use bound;
|
||||
use fragment::{Expr, Fragment, Match, Stmts};
|
||||
use internals::ast::{Data, Container, Field, Style, Variant};
|
||||
use internals::ast::{Container, Data, Field, Style, Variant};
|
||||
use internals::{self, attr};
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
@ -27,7 +27,10 @@ pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result<Tokens, Str
|
||||
let ident = &cont.ident;
|
||||
let params = Parameters::new(&cont);
|
||||
let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(¶ms);
|
||||
let dummy_const = Ident::new(&format!("_IMPL_DESERIALIZE_FOR_{}", ident), Span::call_site());
|
||||
let dummy_const = Ident::new(
|
||||
&format!("_IMPL_DESERIALIZE_FOR_{}", ident),
|
||||
Span::call_site(),
|
||||
);
|
||||
let body = Stmts(deserialize_body(&cont, ¶ms));
|
||||
let delife = params.borrowed.de_lifetime();
|
||||
|
||||
@ -268,7 +271,8 @@ fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option<St
|
||||
|
||||
let code = match cont.data {
|
||||
Data::Struct(Style::Struct, ref fields) => {
|
||||
if let Some(code) = deserialize_struct_in_place(None, params, fields, &cont.attrs, None) {
|
||||
if let Some(code) = deserialize_struct_in_place(None, params, fields, &cont.attrs, None)
|
||||
{
|
||||
code
|
||||
} else {
|
||||
return None;
|
||||
@ -550,7 +554,8 @@ fn deserialize_seq(
|
||||
None => {
|
||||
let field_ty = &field.ty;
|
||||
let span = field.original.span();
|
||||
let func = quote_spanned!(span=> _serde::de::SeqAccess::next_element::<#field_ty>);
|
||||
let func =
|
||||
quote_spanned!(span=> _serde::de::SeqAccess::next_element::<#field_ty>);
|
||||
quote!(try!(#func(&mut __seq)))
|
||||
}
|
||||
Some(path) => {
|
||||
@ -637,13 +642,12 @@ fn deserialize_seq_in_place(
|
||||
.enumerate()
|
||||
.map(|(field_index, (_, field))| {
|
||||
// If there's no field name, assume we're a tuple-struct and use a numeric index
|
||||
let field_name = field
|
||||
.ident
|
||||
.map(Member::Named)
|
||||
.unwrap_or_else(|| Member::Unnamed(Index {
|
||||
let field_name = field.ident.map(Member::Named).unwrap_or_else(|| {
|
||||
Member::Unnamed(Index {
|
||||
index: field_index as u32,
|
||||
span: Span::call_site(),
|
||||
}));
|
||||
})
|
||||
});
|
||||
|
||||
let dot = quote_spanned!(Span::call_site()=> .);
|
||||
if field.attrs.skip_deserializing() {
|
||||
@ -919,8 +923,8 @@ fn deserialize_struct_in_place(
|
||||
|
||||
let visit_seq = Stmts(deserialize_seq_in_place(params, fields, cattrs));
|
||||
|
||||
let (field_visitor, fields_stmt, visit_map) = deserialize_struct_as_struct_in_place_visitor(
|
||||
params, fields, cattrs);
|
||||
let (field_visitor, fields_stmt, visit_map) =
|
||||
deserialize_struct_as_struct_in_place_visitor(params, fields, cattrs);
|
||||
|
||||
let field_visitor = Stmts(field_visitor);
|
||||
let fields_stmt = Stmts(fields_stmt);
|
||||
@ -1059,9 +1063,7 @@ fn deserialize_externally_tagged_enum(
|
||||
let variant_name = field_i(i);
|
||||
|
||||
let block = Match(deserialize_externally_tagged_variant(
|
||||
params,
|
||||
variant,
|
||||
cattrs,
|
||||
params, variant, cattrs,
|
||||
));
|
||||
|
||||
quote! {
|
||||
@ -1161,9 +1163,9 @@ fn deserialize_internally_tagged_enum(
|
||||
params,
|
||||
variant,
|
||||
cattrs,
|
||||
quote!(
|
||||
quote! {
|
||||
_serde::private::de::ContentDeserializer::<__D::Error>::new(__tagged.content)
|
||||
),
|
||||
},
|
||||
));
|
||||
|
||||
quote! {
|
||||
@ -1525,8 +1527,7 @@ fn deserialize_externally_tagged_variant(
|
||||
cattrs: &attr::Container,
|
||||
) -> Fragment {
|
||||
if let Some(path) = variant.attrs.deserialize_with() {
|
||||
let (wrapper, wrapper_ty, unwrap_fn) =
|
||||
wrap_deserialize_variant_with(params, variant, path);
|
||||
let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path);
|
||||
return quote_block! {
|
||||
#wrapper
|
||||
_serde::export::Result::map(
|
||||
@ -1608,8 +1609,7 @@ fn deserialize_untagged_variant(
|
||||
deserializer: Tokens,
|
||||
) -> Fragment {
|
||||
if let Some(path) = variant.attrs.deserialize_with() {
|
||||
let (wrapper, wrapper_ty, unwrap_fn) =
|
||||
wrap_deserialize_variant_with(params, variant, path);
|
||||
let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path);
|
||||
return quote_block! {
|
||||
#wrapper
|
||||
_serde::export::Result::map(
|
||||
@ -1715,7 +1715,7 @@ fn deserialize_untagged_newtype_variant(
|
||||
fn deserialize_generated_identifier(
|
||||
fields: &[(String, Ident)],
|
||||
cattrs: &attr::Container,
|
||||
is_variant: bool
|
||||
is_variant: bool,
|
||||
) -> Fragment {
|
||||
let this = quote!(__Field);
|
||||
let field_idents: &Vec<_> = &fields.iter().map(|&(_, ref ident)| ident).collect();
|
||||
@ -1810,12 +1810,7 @@ fn deserialize_custom_identifier(
|
||||
|
||||
let names_idents: Vec<_> = ordinary
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
(
|
||||
variant.attrs.name().deserialize_name(),
|
||||
variant.ident,
|
||||
)
|
||||
})
|
||||
.map(|variant| (variant.attrs.name().deserialize_name(), variant.ident))
|
||||
.collect();
|
||||
|
||||
let names = names_idents.iter().map(|&(ref name, _)| name);
|
||||
@ -1872,12 +1867,16 @@ fn deserialize_identifier(
|
||||
fields: &[(String, Ident)],
|
||||
is_variant: bool,
|
||||
fallthrough: Option<Tokens>,
|
||||
collect_other_fields: bool
|
||||
collect_other_fields: bool,
|
||||
) -> Fragment {
|
||||
let field_strs = fields.iter().map(|&(ref name, _)| name);
|
||||
let field_borrowed_strs = fields.iter().map(|&(ref name, _)| name);
|
||||
let field_bytes = fields.iter().map(|&(ref name, _)| Literal::byte_string(name.as_bytes()));
|
||||
let field_borrowed_bytes = fields.iter().map(|&(ref name, _)| Literal::byte_string(name.as_bytes()));
|
||||
let field_bytes = fields
|
||||
.iter()
|
||||
.map(|&(ref name, _)| Literal::byte_string(name.as_bytes()));
|
||||
let field_borrowed_bytes = fields
|
||||
.iter()
|
||||
.map(|&(ref name, _)| Literal::byte_string(name.as_bytes()));
|
||||
|
||||
let constructors: &Vec<_> = &fields
|
||||
.iter()
|
||||
@ -1999,8 +1998,12 @@ fn deserialize_identifier(
|
||||
})
|
||||
};
|
||||
|
||||
let (value_as_str_content, value_as_borrowed_str_content,
|
||||
value_as_bytes_content, value_as_borrowed_bytes_content) = if !collect_other_fields {
|
||||
let (
|
||||
value_as_str_content,
|
||||
value_as_borrowed_str_content,
|
||||
value_as_bytes_content,
|
||||
value_as_borrowed_bytes_content,
|
||||
) = if !collect_other_fields {
|
||||
(None, None, None, None)
|
||||
} else {
|
||||
(
|
||||
@ -2015,7 +2018,7 @@ fn deserialize_identifier(
|
||||
}),
|
||||
Some(quote! {
|
||||
let __value = _serde::private::de::Content::Bytes(__value);
|
||||
})
|
||||
}),
|
||||
)
|
||||
};
|
||||
|
||||
@ -2194,7 +2197,8 @@ fn deserialize_map(
|
||||
None => {
|
||||
let field_ty = &field.ty;
|
||||
let span = field.original.span();
|
||||
let func = quote_spanned!(span=> _serde::de::MapAccess::next_value::<#field_ty>);
|
||||
let func =
|
||||
quote_spanned!(span=> _serde::de::MapAccess::next_value::<#field_ty>);
|
||||
quote! {
|
||||
try!(#func(&mut __map))
|
||||
}
|
||||
@ -2473,7 +2477,12 @@ fn deserialize_map_in_place(
|
||||
// If missing_expr unconditionally returns an error, don't try
|
||||
// to assign its value to self.place. Maybe this could be handled
|
||||
// more elegantly.
|
||||
if missing_expr.as_ref().into_tokens().to_string().starts_with("return ") {
|
||||
if missing_expr
|
||||
.as_ref()
|
||||
.into_tokens()
|
||||
.to_string()
|
||||
.starts_with("return ")
|
||||
{
|
||||
let missing_expr = Stmts(missing_expr);
|
||||
quote! {
|
||||
if !#name {
|
||||
@ -2582,10 +2591,12 @@ fn wrap_deserialize_variant_with(
|
||||
let (wrapper, wrapper_ty) =
|
||||
wrap_deserialize_with(params, "e!((#(#field_tys),*)), deserialize_with);
|
||||
|
||||
let field_access = (0..variant.fields.len()).map(|n| Member::Unnamed(Index {
|
||||
index: n as u32,
|
||||
span: Span::call_site(),
|
||||
}));
|
||||
let field_access = (0..variant.fields.len()).map(|n| {
|
||||
Member::Unnamed(Index {
|
||||
index: n as u32,
|
||||
span: Span::call_site(),
|
||||
})
|
||||
});
|
||||
let unwrap_fn = match variant.style {
|
||||
Style::Struct => {
|
||||
let field_idents = variant
|
||||
@ -2683,7 +2694,8 @@ impl<'a> ToTokens for InPlaceImplGenerics<'a> {
|
||||
param.bounds.push(place_lifetime.lifetime);
|
||||
}
|
||||
syn::GenericParam::Type(ref mut param) => {
|
||||
param.bounds
|
||||
param
|
||||
.bounds
|
||||
.push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime));
|
||||
}
|
||||
syn::GenericParam::Const(_) => {}
|
||||
|
@ -23,8 +23,10 @@
|
||||
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.37")]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(enum_variant_names, redundant_field_names,
|
||||
too_many_arguments, used_underscore_binding))]
|
||||
#![cfg_attr(
|
||||
feature = "cargo-clippy",
|
||||
allow(enum_variant_names, redundant_field_names, too_many_arguments, used_underscore_binding)
|
||||
)]
|
||||
// The `quote!` macro requires deep recursion.
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
@ -54,8 +56,8 @@ mod bound;
|
||||
#[macro_use]
|
||||
mod fragment;
|
||||
|
||||
mod ser;
|
||||
mod de;
|
||||
mod ser;
|
||||
|
||||
#[proc_macro_derive(Serialize, attributes(serde))]
|
||||
pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
|
@ -6,14 +6,14 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use syn::{self, Ident, Index, Member};
|
||||
use syn::spanned::Spanned;
|
||||
use quote::Tokens;
|
||||
use proc_macro2::Span;
|
||||
use quote::Tokens;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{self, Ident, Index, Member};
|
||||
|
||||
use bound;
|
||||
use fragment::{Fragment, Match, Stmts};
|
||||
use internals::ast::{Data, Container, Field, Style, Variant};
|
||||
use internals::ast::{Container, Data, Field, Style, Variant};
|
||||
use internals::{attr, Ctxt};
|
||||
|
||||
use std::u32;
|
||||
@ -204,10 +204,14 @@ fn serialize_newtype_struct(
|
||||
) -> Fragment {
|
||||
let type_name = cattrs.name().serialize_name();
|
||||
|
||||
let mut field_expr = get_member(params, field, &Member::Unnamed(Index {
|
||||
index: 0,
|
||||
span: Span::call_site(),
|
||||
}));
|
||||
let mut field_expr = get_member(
|
||||
params,
|
||||
field,
|
||||
&Member::Unnamed(Index {
|
||||
index: 0,
|
||||
span: Span::call_site(),
|
||||
}),
|
||||
);
|
||||
if let Some(path) = field.attrs.serialize_with() {
|
||||
field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr);
|
||||
}
|
||||
@ -224,12 +228,8 @@ fn serialize_tuple_struct(
|
||||
fields: &[Field],
|
||||
cattrs: &attr::Container,
|
||||
) -> Fragment {
|
||||
let serialize_stmts = serialize_tuple_struct_visitor(
|
||||
fields,
|
||||
params,
|
||||
false,
|
||||
&TupleTrait::SerializeTupleStruct,
|
||||
);
|
||||
let serialize_stmts =
|
||||
serialize_tuple_struct_visitor(fields, params, false, &TupleTrait::SerializeTupleStruct);
|
||||
|
||||
let type_name = cattrs.name().serialize_name();
|
||||
let len = serialize_stmts.len();
|
||||
@ -252,13 +252,13 @@ fn serialize_struct(params: &Parameters, fields: &[Field], cattrs: &attr::Contai
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_struct_as_struct(params: &Parameters, fields: &[Field], cattrs: &attr::Container) -> Fragment {
|
||||
let serialize_fields = serialize_struct_visitor(
|
||||
fields,
|
||||
params,
|
||||
false,
|
||||
&StructTrait::SerializeStruct,
|
||||
);
|
||||
fn serialize_struct_as_struct(
|
||||
params: &Parameters,
|
||||
fields: &[Field],
|
||||
cattrs: &attr::Container,
|
||||
) -> Fragment {
|
||||
let serialize_fields =
|
||||
serialize_struct_visitor(fields, params, false, &StructTrait::SerializeStruct);
|
||||
|
||||
let type_name = cattrs.name().serialize_name();
|
||||
|
||||
@ -287,13 +287,13 @@ fn serialize_struct_as_struct(params: &Parameters, fields: &[Field], cattrs: &at
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_struct_as_map(params: &Parameters, fields: &[Field], cattrs: &attr::Container) -> Fragment {
|
||||
let serialize_fields = serialize_struct_visitor(
|
||||
fields,
|
||||
params,
|
||||
false,
|
||||
&StructTrait::SerializeMap,
|
||||
);
|
||||
fn serialize_struct_as_map(
|
||||
params: &Parameters,
|
||||
fields: &[Field],
|
||||
cattrs: &attr::Container,
|
||||
) -> Fragment {
|
||||
let serialize_fields =
|
||||
serialize_struct_visitor(fields, params, false, &StructTrait::SerializeMap);
|
||||
|
||||
let mut serialized_fields = fields
|
||||
.iter()
|
||||
@ -385,8 +385,8 @@ fn serialize_variant(
|
||||
}
|
||||
}
|
||||
Style::Tuple => {
|
||||
let field_names =
|
||||
(0..variant.fields.len()).map(|i| Ident::new(&format!("__field{}", i), Span::call_site()));
|
||||
let field_names = (0..variant.fields.len())
|
||||
.map(|i| Ident::new(&format!("__field{}", i), Span::call_site()));
|
||||
quote! {
|
||||
#this::#variant_ident(#(ref #field_names),*)
|
||||
}
|
||||
@ -776,12 +776,10 @@ fn serialize_struct_variant<'a>(
|
||||
name: &str,
|
||||
) -> Fragment {
|
||||
let struct_trait = match context {
|
||||
StructVariant::ExternallyTagged { .. } => (
|
||||
StructTrait::SerializeStructVariant
|
||||
),
|
||||
StructVariant::InternallyTagged { .. } | StructVariant::Untagged => (
|
||||
StructTrait::SerializeStruct
|
||||
),
|
||||
StructVariant::ExternallyTagged { .. } => (StructTrait::SerializeStructVariant),
|
||||
StructVariant::InternallyTagged { .. } | StructVariant::Untagged => {
|
||||
(StructTrait::SerializeStruct)
|
||||
}
|
||||
};
|
||||
|
||||
let serialize_fields = serialize_struct_visitor(fields, params, true, &struct_trait);
|
||||
@ -865,10 +863,14 @@ fn serialize_tuple_struct_visitor(
|
||||
let id = Ident::new(&format!("__field{}", i), Span::call_site());
|
||||
quote!(#id)
|
||||
} else {
|
||||
get_member(params, field, &Member::Unnamed(Index {
|
||||
index: i as u32,
|
||||
span: Span::call_site(),
|
||||
}))
|
||||
get_member(
|
||||
params,
|
||||
field,
|
||||
&Member::Unnamed(Index {
|
||||
index: i as u32,
|
||||
span: Span::call_site(),
|
||||
}),
|
||||
)
|
||||
};
|
||||
|
||||
let skip = field
|
||||
@ -1009,10 +1011,12 @@ fn wrap_serialize_with(
|
||||
};
|
||||
let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl();
|
||||
|
||||
let field_access = (0..field_exprs.len()).map(|n| Member::Unnamed(Index {
|
||||
index: n as u32,
|
||||
span: Span::call_site(),
|
||||
}));
|
||||
let field_access = (0..field_exprs.len()).map(|n| {
|
||||
Member::Unnamed(Index {
|
||||
index: n as u32,
|
||||
span: Span::call_site(),
|
||||
})
|
||||
});
|
||||
|
||||
quote!({
|
||||
struct __SerializeWith #wrapper_impl_generics #where_clause {
|
||||
@ -1052,9 +1056,7 @@ fn mut_if(is_mut: bool) -> Option<Tokens> {
|
||||
fn get_member(params: &Parameters, field: &Field, member: &Member) -> Tokens {
|
||||
let self_var = ¶ms.self_var;
|
||||
match (params.is_remote, field.attrs.getter()) {
|
||||
(false, None) => {
|
||||
quote_spanned!(Span::call_site()=> &#self_var.#member)
|
||||
}
|
||||
(false, None) => quote_spanned!(Span::call_site()=> &#self_var.#member),
|
||||
(true, None) => {
|
||||
let inner = quote_spanned!(Span::call_site()=> &#self_var.#member);
|
||||
let ty = field.ty;
|
||||
@ -1094,12 +1096,12 @@ impl StructTrait {
|
||||
fn skip_field(&self, span: Span) -> Option<Tokens> {
|
||||
match *self {
|
||||
StructTrait::SerializeMap => None,
|
||||
StructTrait::SerializeStruct => Some({
|
||||
quote_spanned!(span=> _serde::ser::SerializeStruct::skip_field)
|
||||
}),
|
||||
StructTrait::SerializeStructVariant => Some({
|
||||
quote_spanned!(span=> _serde::ser::SerializeStructVariant::skip_field)
|
||||
})
|
||||
StructTrait::SerializeStruct => {
|
||||
Some(quote_spanned!(span=> _serde::ser::SerializeStruct::skip_field))
|
||||
}
|
||||
StructTrait::SerializeStructVariant => {
|
||||
Some(quote_spanned!(span=> _serde::ser::SerializeStructVariant::skip_field))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,11 +6,11 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use syn;
|
||||
use attr;
|
||||
use check;
|
||||
use Ctxt;
|
||||
use syn;
|
||||
use syn::punctuated::Punctuated;
|
||||
use Ctxt;
|
||||
|
||||
pub struct Container<'a> {
|
||||
pub ident: syn::Ident,
|
||||
|
@ -6,16 +6,16 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use Ctxt;
|
||||
use proc_macro2::{Group, Span, TokenStream, TokenTree};
|
||||
use std::collections::BTreeSet;
|
||||
use std::str::FromStr;
|
||||
use syn;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::synom::{ParseError, Synom};
|
||||
use syn::Ident;
|
||||
use syn::Meta::{List, NameValue, Word};
|
||||
use syn::NestedMeta::{Literal, Meta};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::synom::{Synom, ParseError};
|
||||
use std::collections::BTreeSet;
|
||||
use std::str::FromStr;
|
||||
use proc_macro2::{Span, TokenStream, TokenTree, Group};
|
||||
use Ctxt;
|
||||
|
||||
// This module handles parsing of `#[serde(...)]` attributes. The entrypoints
|
||||
// are `attr::Container::from_ast`, `attr::Variant::from_ast`, and
|
||||
@ -234,7 +234,10 @@ impl Container {
|
||||
|
||||
// Parse `#[serde(default)]`
|
||||
Meta(Word(word)) if word == "default" => match item.data {
|
||||
syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(_), .. }) => {
|
||||
syn::Data::Struct(syn::DataStruct {
|
||||
fields: syn::Fields::Named(_),
|
||||
..
|
||||
}) => {
|
||||
default.set(Default::Default);
|
||||
}
|
||||
_ => cx.error(
|
||||
@ -247,7 +250,10 @@ impl Container {
|
||||
Meta(NameValue(ref m)) if m.ident == "default" => {
|
||||
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
|
||||
match item.data {
|
||||
syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(_), .. }) => {
|
||||
syn::Data::Struct(syn::DataStruct {
|
||||
fields: syn::Fields::Named(_),
|
||||
..
|
||||
}) => {
|
||||
default.set(Default::Path(path));
|
||||
}
|
||||
_ => cx.error(
|
||||
@ -503,13 +509,11 @@ fn decide_identifier(
|
||||
}
|
||||
(&syn::Data::Enum(_), true, false) => Identifier::Field,
|
||||
(&syn::Data::Enum(_), false, true) => Identifier::Variant,
|
||||
(&syn::Data::Struct(_), true, false)
|
||||
| (&syn::Data::Union(_), true, false) => {
|
||||
(&syn::Data::Struct(_), true, false) | (&syn::Data::Union(_), true, false) => {
|
||||
cx.error("`field_identifier` can only be used on an enum");
|
||||
Identifier::No
|
||||
}
|
||||
(&syn::Data::Struct(_), false, true)
|
||||
| (&syn::Data::Union(_), false, true) => {
|
||||
(&syn::Data::Struct(_), false, true) | (&syn::Data::Union(_), false, true) => {
|
||||
cx.error("`variant_identifier` can only be used on an enum");
|
||||
Identifier::No
|
||||
}
|
||||
@ -594,10 +598,16 @@ impl Variant {
|
||||
Meta(NameValue(ref m)) if m.ident == "with" => {
|
||||
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
|
||||
let mut ser_path = path.clone();
|
||||
ser_path.path.segments.push(Ident::new("serialize", Span::call_site()).into());
|
||||
ser_path
|
||||
.path
|
||||
.segments
|
||||
.push(Ident::new("serialize", Span::call_site()).into());
|
||||
serialize_with.set(ser_path);
|
||||
let mut de_path = path;
|
||||
de_path.path.segments.push(Ident::new("deserialize", Span::call_site()).into());
|
||||
de_path
|
||||
.path
|
||||
.segments
|
||||
.push(Ident::new("deserialize", Span::call_site()).into());
|
||||
deserialize_with.set(de_path);
|
||||
}
|
||||
}
|
||||
@ -838,10 +848,16 @@ impl Field {
|
||||
Meta(NameValue(ref m)) if m.ident == "with" => {
|
||||
if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
|
||||
let mut ser_path = path.clone();
|
||||
ser_path.path.segments.push(Ident::new("serialize", Span::call_site()).into());
|
||||
ser_path
|
||||
.path
|
||||
.segments
|
||||
.push(Ident::new("serialize", Span::call_site()).into());
|
||||
serialize_with.set(ser_path);
|
||||
let mut de_path = path;
|
||||
de_path.path.segments.push(Ident::new("deserialize", Span::call_site()).into());
|
||||
de_path
|
||||
.path
|
||||
.segments
|
||||
.push(Ident::new("deserialize", Span::call_site()).into());
|
||||
deserialize_with.set(de_path);
|
||||
}
|
||||
}
|
||||
@ -873,7 +889,9 @@ impl Field {
|
||||
|
||||
// Parse `#[serde(borrow = "'a + 'b")]`
|
||||
Meta(NameValue(ref m)) if m.ident == "borrow" => {
|
||||
if let Ok(lifetimes) = parse_lit_into_lifetimes(cx, m.ident.as_ref(), &m.lit) {
|
||||
if let Ok(lifetimes) =
|
||||
parse_lit_into_lifetimes(cx, m.ident.as_ref(), &m.lit)
|
||||
{
|
||||
if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) {
|
||||
for lifetime in &lifetimes {
|
||||
if !borrowable.contains(lifetime) {
|
||||
@ -939,10 +957,14 @@ impl Field {
|
||||
leading_colon: None,
|
||||
segments: Punctuated::new(),
|
||||
};
|
||||
path.segments.push(Ident::new("_serde", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("private", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("de", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("borrow_cow_str", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("_serde", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("private", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("de", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("borrow_cow_str", Span::call_site()).into());
|
||||
let expr = syn::ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself: None,
|
||||
@ -954,10 +976,14 @@ impl Field {
|
||||
leading_colon: None,
|
||||
segments: Punctuated::new(),
|
||||
};
|
||||
path.segments.push(Ident::new("_serde", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("private", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("de", Span::call_site()).into());
|
||||
path.segments.push(Ident::new("borrow_cow_bytes", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("_serde", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("private", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("de", Span::call_site()).into());
|
||||
path.segments
|
||||
.push(Ident::new("borrow_cow_bytes", Span::call_site()).into());
|
||||
let expr = syn::ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself: None,
|
||||
@ -1097,7 +1123,10 @@ where
|
||||
Ok((ser_meta.get(), de_meta.get()))
|
||||
}
|
||||
|
||||
fn get_renames<'a>(cx: &Ctxt, items: &'a Punctuated<syn::NestedMeta, Token![,]>) -> Result<SerAndDe<&'a syn::LitStr>, ()> {
|
||||
fn get_renames<'a>(
|
||||
cx: &Ctxt,
|
||||
items: &'a Punctuated<syn::NestedMeta, Token![,]>,
|
||||
) -> Result<SerAndDe<&'a syn::LitStr>, ()> {
|
||||
get_ser_and_de(cx, "rename", items, get_lit_str)
|
||||
}
|
||||
|
||||
@ -1141,12 +1170,18 @@ fn get_lit_str<'a>(
|
||||
|
||||
fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::Path, ()> {
|
||||
let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
|
||||
parse_lit_str(string).map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
|
||||
parse_lit_str(string)
|
||||
.map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
|
||||
}
|
||||
|
||||
fn parse_lit_into_expr_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::ExprPath, ()> {
|
||||
fn parse_lit_into_expr_path(
|
||||
cx: &Ctxt,
|
||||
attr_name: &str,
|
||||
lit: &syn::Lit,
|
||||
) -> Result<syn::ExprPath, ()> {
|
||||
let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
|
||||
parse_lit_str(string).map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
|
||||
parse_lit_str(string)
|
||||
.map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
|
||||
}
|
||||
|
||||
fn parse_lit_into_where(
|
||||
@ -1173,7 +1208,8 @@ fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::
|
||||
parse_lit_str(string).map_err(|_| {
|
||||
cx.error(format!(
|
||||
"failed to parse type: {} = {:?}",
|
||||
attr_name, string.value()
|
||||
attr_name,
|
||||
string.value()
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -1210,7 +1246,10 @@ fn parse_lit_into_lifetimes(
|
||||
return Ok(set);
|
||||
}
|
||||
|
||||
cx.error(format!("failed to parse borrowed lifetimes: {:?}", string.value()));
|
||||
cx.error(format!(
|
||||
"failed to parse borrowed lifetimes: {:?}",
|
||||
string.value()
|
||||
));
|
||||
Err(())
|
||||
}
|
||||
|
||||
@ -1255,14 +1294,10 @@ fn is_cow(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
seg.ident == "Cow"
|
||||
&& args.len() == 2
|
||||
&& match (&args[0], &args[1]) {
|
||||
(&syn::GenericArgument::Lifetime(_), &syn::GenericArgument::Type(ref arg)) => {
|
||||
elem(arg)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
seg.ident == "Cow" && args.len() == 2 && match (&args[0], &args[1]) {
|
||||
(&syn::GenericArgument::Lifetime(_), &syn::GenericArgument::Type(ref arg)) => elem(arg),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
// Whether the type looks like it might be `&T` where elem="T". This can have
|
||||
@ -1287,9 +1322,7 @@ fn is_cow(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
|
||||
// }
|
||||
fn is_rptr(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
|
||||
match *ty {
|
||||
syn::Type::Reference(ref ty) => {
|
||||
ty.mutability.is_none() && elem(&ty.elem)
|
||||
}
|
||||
syn::Type::Reference(ref ty) => ty.mutability.is_none() && elem(&ty.elem),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -1307,17 +1340,13 @@ fn is_slice_u8(ty: &syn::Type) -> bool {
|
||||
|
||||
fn is_primitive_type(ty: &syn::Type, primitive: &str) -> bool {
|
||||
match *ty {
|
||||
syn::Type::Path(ref ty) => {
|
||||
ty.qself.is_none() && is_primitive_path(&ty.path, primitive)
|
||||
}
|
||||
syn::Type::Path(ref ty) => ty.qself.is_none() && is_primitive_path(&ty.path, primitive),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool {
|
||||
path.leading_colon.is_none()
|
||||
&& path.segments.len() == 1
|
||||
&& path.segments[0].ident == primitive
|
||||
path.leading_colon.is_none() && path.segments.len() == 1 && path.segments[0].ident == primitive
|
||||
&& path.segments[0].arguments.is_empty()
|
||||
}
|
||||
|
||||
@ -1414,7 +1443,10 @@ fn spanned_tokens(s: &syn::LitStr) -> Result<TokenStream, ParseError> {
|
||||
}
|
||||
|
||||
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
|
||||
stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
|
||||
stream
|
||||
.into_iter()
|
||||
.map(|token| respan_token_tree(token, span))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
|
||||
|
@ -113,14 +113,7 @@ impl FromStr for RenameRule {
|
||||
fn rename_variants() {
|
||||
for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
|
||||
(
|
||||
"Outcome",
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"outcome",
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
|
||||
),
|
||||
(
|
||||
"VeryTasty",
|
||||
@ -154,13 +147,7 @@ fn rename_variants() {
|
||||
fn rename_fields() {
|
||||
for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
|
||||
(
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"Outcome",
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"outcome",
|
||||
"OUTCOME",
|
||||
"outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
|
||||
),
|
||||
(
|
||||
"very_tasty",
|
||||
|
@ -6,8 +6,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast::{Data, Container, Style};
|
||||
use attr::{Identifier, EnumTag};
|
||||
use ast::{Container, Data, Style};
|
||||
use attr::{EnumTag, Identifier};
|
||||
use Ctxt;
|
||||
|
||||
/// Cross-cutting checks that require looking at more than a single attrs
|
||||
@ -66,17 +66,17 @@ fn check_flatten(cx: &Ctxt, cont: &Container) {
|
||||
if field.attrs.skip_serializing() {
|
||||
cx.error(
|
||||
"#[serde(flatten] can not be combined with \
|
||||
#[serde(skip_serializing)]"
|
||||
#[serde(skip_serializing)]",
|
||||
);
|
||||
} else if field.attrs.skip_serializing_if().is_some() {
|
||||
cx.error(
|
||||
"#[serde(flatten] can not be combined with \
|
||||
#[serde(skip_serializing_if = \"...\")]"
|
||||
#[serde(skip_serializing_if = \"...\")]",
|
||||
);
|
||||
} else if field.attrs.skip_deserializing() {
|
||||
cx.error(
|
||||
"#[serde(flatten] can not be combined with \
|
||||
#[serde(skip_deserializing)]"
|
||||
#[serde(skip_deserializing)]",
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -220,10 +220,7 @@ fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
|
||||
/// the same as either one of its fields, as this would result in
|
||||
/// duplicate keys in the serialized output and/or ambiguity in
|
||||
/// the to-be-deserialized input.
|
||||
fn check_internal_tag_field_name_conflict(
|
||||
cx: &Ctxt,
|
||||
cont: &Container,
|
||||
) {
|
||||
fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match cont.data {
|
||||
Data::Enum(ref variants) => variants,
|
||||
Data::Struct(_, _) => return,
|
||||
@ -235,10 +232,7 @@ fn check_internal_tag_field_name_conflict(
|
||||
};
|
||||
|
||||
let diagnose_conflict = || {
|
||||
let message = format!(
|
||||
"variant field name `{}` conflicts with internal tag",
|
||||
tag
|
||||
);
|
||||
let message = format!("variant field name `{}` conflicts with internal tag", tag);
|
||||
cx.error(message);
|
||||
};
|
||||
|
||||
@ -257,8 +251,8 @@ fn check_internal_tag_field_name_conflict(
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
Style::Unit | Style::Newtype | Style::Tuple => {},
|
||||
}
|
||||
Style::Unit | Style::Newtype | Style::Tuple => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -267,7 +261,10 @@ fn check_internal_tag_field_name_conflict(
|
||||
/// contents tag must differ, for the same reason.
|
||||
fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let (type_tag, content_tag) = match *cont.attrs.tag() {
|
||||
EnumTag::Adjacent { ref tag, ref content } => (tag, content),
|
||||
EnumTag::Adjacent {
|
||||
ref tag,
|
||||
ref content,
|
||||
} => (tag, content),
|
||||
EnumTag::Internal { .. } | EnumTag::External | EnumTag::None => return,
|
||||
};
|
||||
|
||||
|
@ -6,8 +6,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::fmt::Display;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Display;
|
||||
use std::thread;
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -7,8 +7,10 @@
|
||||
// except according to those terms.
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive_internals/0.23.0")]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity, doc_markdown, match_same_arms,
|
||||
redundant_field_names))]
|
||||
#![cfg_attr(
|
||||
feature = "cargo-clippy",
|
||||
allow(cyclomatic_complexity, doc_markdown, match_same_arms, redundant_field_names)
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate syn;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6,9 +6,9 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use serde::de::value::{MapAccessDeserializer, SeqAccessDeserializer};
|
||||
use serde::de::{self, Deserialize, DeserializeSeed, EnumAccess, IntoDeserializer, MapAccess,
|
||||
SeqAccess, VariantAccess, Visitor};
|
||||
use serde::de::value::{MapAccessDeserializer, SeqAccessDeserializer};
|
||||
|
||||
use error::Error;
|
||||
use token::Token;
|
||||
@ -22,28 +22,28 @@ macro_rules! assert_next_token {
|
||||
($de:expr, $expected:expr) => {
|
||||
match $de.next_token_opt() {
|
||||
Some(token) if token == $expected => {}
|
||||
Some(other) => {
|
||||
panic!("expected Token::{} but deserialization wants Token::{}",
|
||||
other, $expected)
|
||||
}
|
||||
None => {
|
||||
panic!("end of tokens but deserialization wants Token::{}",
|
||||
$expected)
|
||||
}
|
||||
Some(other) => panic!(
|
||||
"expected Token::{} but deserialization wants Token::{}",
|
||||
other, $expected
|
||||
),
|
||||
None => panic!(
|
||||
"end of tokens but deserialization wants Token::{}",
|
||||
$expected
|
||||
),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! unexpected {
|
||||
($token:expr) => {
|
||||
panic!("deserialization did not expect this token: {}", $token)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! end_of_tokens {
|
||||
() => {
|
||||
panic!("ran out of tokens to deserialize")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> {
|
||||
|
@ -160,24 +160,28 @@
|
||||
// Whitelisted clippy lints
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(float_cmp))]
|
||||
// Whitelisted clippy_pedantic lints
|
||||
#![cfg_attr(feature = "cargo-clippy",
|
||||
allow(empty_line_after_outer_attr, missing_docs_in_private_items,
|
||||
redundant_field_names, stutter, use_debug, use_self))]
|
||||
#![cfg_attr(
|
||||
feature = "cargo-clippy",
|
||||
allow(
|
||||
empty_line_after_outer_attr, missing_docs_in_private_items, redundant_field_names, stutter,
|
||||
use_debug, use_self
|
||||
)
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde;
|
||||
|
||||
mod ser;
|
||||
mod de;
|
||||
mod error;
|
||||
mod ser;
|
||||
|
||||
mod assert;
|
||||
mod configure;
|
||||
mod token;
|
||||
mod assert;
|
||||
|
||||
pub use token::Token;
|
||||
pub use assert::{assert_de_tokens, assert_de_tokens_error, assert_ser_tokens,
|
||||
assert_ser_tokens_error, assert_tokens};
|
||||
pub use token::Token;
|
||||
|
||||
pub use configure::{Compact, Configure, Readable};
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user