Following consensus on: #2580 (review)
This PR phases out the precompiled per final consensus made in #2580 i# Fix a cfg
This commit is contained in:
parent
151b45ae36
commit
360606b9a6
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
@ -98,7 +98,6 @@ jobs:
|
||||
- run: cd serde && cargo check --no-default-features
|
||||
- run: cd serde && cargo check
|
||||
- run: cd serde_derive && cargo check
|
||||
- run: cd precompiled/serde_derive && cargo check
|
||||
|
||||
alloc:
|
||||
name: Rust 1.36.0
|
||||
@ -109,27 +108,6 @@ jobs:
|
||||
- uses: dtolnay/rust-toolchain@1.36.0
|
||||
- run: cd serde && cargo build --no-default-features --features alloc
|
||||
|
||||
precompiled:
|
||||
name: Precompiled
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
components: rust-src
|
||||
targets: x86_64-unknown-linux-musl
|
||||
- run: precompiled/build.sh
|
||||
- name: replace serde_derive dependency with precompiled
|
||||
run: |
|
||||
cargo rm serde_derive --package serde
|
||||
cargo rm serde_derive --package serde --dev
|
||||
sed -i '/"serde_derive"/d' Cargo.toml
|
||||
sed -i '/\[workspace\]/d' precompiled/serde_derive/Cargo.toml
|
||||
cargo add --dev serde_derive --path precompiled/serde_derive --package serde_test_suite
|
||||
git diff
|
||||
- run: cd test_suite && cargo test --features unstable -- --skip ui --exact
|
||||
|
||||
macos:
|
||||
name: macOS
|
||||
runs-on: macos-latest
|
||||
@ -137,7 +115,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- run: cd precompiled/serde_derive && cargo check
|
||||
- run: cd serde_derive && cargo check
|
||||
|
||||
minimal:
|
||||
name: Minimal versions
|
||||
|
1
precompiled/.gitignore
vendored
1
precompiled/.gitignore
vendored
@ -1 +0,0 @@
|
||||
/serde_derive/serde_derive-x86_64-unknown-linux-gnu
|
@ -1,14 +0,0 @@
|
||||
[workspace]
|
||||
members = ["bin", "proc-macro2"]
|
||||
resolver = "2"
|
||||
|
||||
[patch.crates-io]
|
||||
proc-macro2 = { path = "proc-macro2" }
|
||||
|
||||
[profile.precompiled]
|
||||
inherits = "release"
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
opt-level = "z"
|
||||
panic = "abort"
|
||||
strip = true
|
@ -1,17 +0,0 @@
|
||||
[package]
|
||||
name = "serde_derive"
|
||||
version = "1.0.184"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[[bin]]
|
||||
name = "serde_derive"
|
||||
path = "main.rs"
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1"
|
||||
quote = { version = "1", default-features = false }
|
||||
syn = { version = "2.0.28", default-features = false, features = ["clone-impls", "derive", "full", "parsing", "printing"] }
|
@ -1,4 +0,0 @@
|
||||
fn main() {
|
||||
println!("cargo:rustc-cfg=precompiled");
|
||||
println!("cargo:rustc-cfg=feature=\"deserialize_in_place\"");
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::watt;
|
||||
use proc_macro2::watt::buffer::InputBuffer;
|
||||
use std::alloc::{GlobalAlloc, Layout, System};
|
||||
use std::io::{self, Read, Write};
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
struct MonotonicAllocator;
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: MonotonicAllocator = MonotonicAllocator;
|
||||
|
||||
unsafe impl GlobalAlloc for MonotonicAllocator {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
System.alloc(layout)
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
|
||||
// Leak: this cuts 3% of code size from the precompiled macro binary.
|
||||
// There is no way that serde_derive would fill up all memory on the
|
||||
// host. When the subprocess exits, operating system will clean this up.
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut buf = Vec::new();
|
||||
io::stdin().read_to_end(&mut buf).unwrap();
|
||||
|
||||
let mut buf = InputBuffer::new(&buf);
|
||||
let derive = match buf.read_u8() {
|
||||
0 => serde_derive::derive_serialize,
|
||||
1 => serde_derive::derive_deserialize,
|
||||
2 => {
|
||||
serde_derive::DESERIALIZE_IN_PLACE.store(true, Ordering::Relaxed);
|
||||
serde_derive::derive_deserialize
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let input = watt::load(&mut buf);
|
||||
let output = derive(input);
|
||||
let bytes = watt::linearize(output);
|
||||
io::stdout().write_all(&bytes).unwrap();
|
||||
}
|
@ -1 +0,0 @@
|
||||
../../serde_derive/src
|
@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null
|
||||
set -e -x
|
||||
|
||||
# TODO: Sanitize host filesystem paths. https://github.com/rust-lang/cargo/issues/12137
|
||||
|
||||
cargo +nightly build \
|
||||
--manifest-path bin/Cargo.toml \
|
||||
--bin serde_derive \
|
||||
--profile precompiled \
|
||||
-Z unstable-options \
|
||||
-Z build-std=std,panic_abort \
|
||||
-Z build-std-features=panic_immediate_abort \
|
||||
--target x86_64-unknown-linux-musl \
|
||||
--out-dir serde_derive
|
||||
|
||||
rm -f serde_derive/serde_derive-x86_64-unknown-linux-gnu
|
||||
mv serde_derive/serde_derive{,-x86_64-unknown-linux-gnu}
|
@ -1,8 +0,0 @@
|
||||
[package]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.66"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = { package = "proc-macro2-fallback", version = "1" }
|
@ -1,815 +0,0 @@
|
||||
#[doc(hidden)]
|
||||
pub mod watt;
|
||||
|
||||
use crate::extra::DelimSpan;
|
||||
use crate::watt::Identity;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::RangeBounds;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub use proc_macro2::{Delimiter, Spacing};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Span {
|
||||
lo: u32,
|
||||
hi: u32,
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Self {
|
||||
Span { lo: 0, hi: 0 }
|
||||
}
|
||||
|
||||
pub fn join(&self, other: Self) -> Option<Self> {
|
||||
Some(Span {
|
||||
lo: self.lo,
|
||||
hi: other.hi,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenTree {
|
||||
Group(Group),
|
||||
Ident(Ident),
|
||||
Punct(Punct),
|
||||
Literal(Literal),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenTree::Group(group) => group.span(),
|
||||
TokenTree::Ident(ident) => ident.span(),
|
||||
TokenTree::Punct(punct) => punct.span(),
|
||||
TokenTree::Literal(literal) => literal.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match self {
|
||||
TokenTree::Group(group) => group.set_span(span),
|
||||
TokenTree::Ident(ident) => ident.set_span(span),
|
||||
TokenTree::Punct(punct) => punct.set_span(span),
|
||||
TokenTree::Literal(literal) => literal.set_span(span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Group> for TokenTree {
|
||||
fn from(group: Group) -> Self {
|
||||
TokenTree::Group(group)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Ident> for TokenTree {
|
||||
fn from(ident: Ident) -> Self {
|
||||
TokenTree::Ident(ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Punct> for TokenTree {
|
||||
fn from(punct: Punct) -> Self {
|
||||
TokenTree::Punct(punct)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Literal> for TokenTree {
|
||||
fn from(literal: Literal) -> Self {
|
||||
TokenTree::Literal(literal)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenTree {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenTree::Group(group) => Debug::fmt(group, formatter),
|
||||
TokenTree::Ident(ident) => {
|
||||
let mut debug = formatter.debug_struct("Ident");
|
||||
debug.field("sym", &format_args!("{}", ident));
|
||||
debug.finish()
|
||||
}
|
||||
TokenTree::Punct(punct) => Debug::fmt(punct, formatter),
|
||||
TokenTree::Literal(literal) => Debug::fmt(literal, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Group {
|
||||
delimiter: Delimiter,
|
||||
stream: Vec<TokenTree>,
|
||||
span: Span,
|
||||
span_open: Span,
|
||||
span_close: Span,
|
||||
identity: u32,
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
|
||||
Group {
|
||||
delimiter,
|
||||
stream: stream.content,
|
||||
span: Span::call_site(),
|
||||
span_open: Span::call_site(),
|
||||
span_close: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
TokenStream {
|
||||
content: self.stream.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
self.delimiter
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn span_open(&self) -> Span {
|
||||
self.span_open
|
||||
}
|
||||
|
||||
pub fn span_close(&self) -> Span {
|
||||
self.span_close
|
||||
}
|
||||
|
||||
pub fn delim_span(&self) -> DelimSpan {
|
||||
DelimSpan {
|
||||
join: self.span,
|
||||
open: self.span_open,
|
||||
close: self.span_close,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
self.identity |= Identity::RESPANNED;
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let (open, close) = match self.delimiter {
|
||||
Delimiter::Parenthesis => ("(", ")"),
|
||||
Delimiter::Brace => ("{ ", "}"),
|
||||
Delimiter::Bracket => ("[", "]"),
|
||||
Delimiter::None => ("", ""),
|
||||
};
|
||||
|
||||
formatter.write_str(open)?;
|
||||
display_tokens(&self.stream, formatter)?;
|
||||
if self.delimiter == Delimiter::Brace && !self.stream.is_empty() {
|
||||
formatter.write_str(" ")?;
|
||||
}
|
||||
formatter.write_str(close)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = formatter.debug_struct("Group");
|
||||
debug.field("delimiter", &self.delimiter);
|
||||
debug.field("stream", &self.stream);
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Ident {
|
||||
fallback: proc_macro2::Ident,
|
||||
span: Span,
|
||||
identity: u32,
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub fn new(string: &str, span: Span) -> Self {
|
||||
Ident {
|
||||
fallback: proc_macro2::Ident::new(string, proc_macro2::Span::call_site()),
|
||||
span,
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Self {
|
||||
Ident {
|
||||
fallback: proc_macro2::Ident::new_raw(string, proc_macro2::Span::call_site()),
|
||||
span,
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
self.identity |= Identity::RESPANNED;
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Display::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Ident {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Ident {}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
PartialEq::eq(&self.fallback, &other.fallback)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
PartialEq::eq(&self.fallback, other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Ident {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
Ord::cmp(&self.fallback, &other.fallback)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Ident {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
PartialOrd::partial_cmp(&self.fallback, &other.fallback)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Ident {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
Hash::hash(&self.fallback, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Punct {
|
||||
fallback: proc_macro2::Punct,
|
||||
span: Span,
|
||||
identity: u32,
|
||||
}
|
||||
|
||||
impl Punct {
|
||||
pub fn new(ch: char, spacing: Spacing) -> Self {
|
||||
Punct {
|
||||
fallback: proc_macro2::Punct::new(ch, spacing),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_char(&self) -> char {
|
||||
self.fallback.as_char()
|
||||
}
|
||||
|
||||
pub fn spacing(&self) -> Spacing {
|
||||
self.fallback.spacing()
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
self.identity |= Identity::RESPANNED;
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Punct {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Display::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Punct {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Literal {
|
||||
fallback: proc_macro2::Literal,
|
||||
span: Span,
|
||||
identity: u32,
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub fn u8_suffixed(n: u8) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u8_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u16_suffixed(n: u16) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u16_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u32_suffixed(n: u32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u32_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u64_suffixed(n: u64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u64_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u128_suffixed(n: u128) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u128_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn usize_suffixed(n: usize) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::usize_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i8_suffixed(n: i8) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i8_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i16_suffixed(n: i16) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i16_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i32_suffixed(n: i32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i32_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i64_suffixed(n: i64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i64_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i128_suffixed(n: i128) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i128_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isize_suffixed(n: isize) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::isize_suffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u8_unsuffixed(n: u8) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u8_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u16_unsuffixed(n: u16) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u16_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u32_unsuffixed(n: u32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u32_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u64_unsuffixed(n: u64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u64_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn u128_unsuffixed(n: u128) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::u128_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn usize_unsuffixed(n: usize) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::usize_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i8_unsuffixed(n: i8) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i8_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i16_unsuffixed(n: i16) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i16_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i32_unsuffixed(n: i32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i32_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i64_unsuffixed(n: i64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i64_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn i128_unsuffixed(n: i128) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::i128_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isize_unsuffixed(n: isize) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::isize_unsuffixed(n),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::f64_unsuffixed(f),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_suffixed(f: f64) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::f64_suffixed(f),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::f32_unsuffixed(f),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f32_suffixed(f: f32) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::f32_suffixed(f),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(string: &str) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::string(string),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn character(ch: char) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::character(ch),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_string(s: &[u8]) -> Self {
|
||||
Literal {
|
||||
fallback: proc_macro2::Literal::byte_string(s),
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
self.identity |= Identity::RESPANNED;
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
let _ = range;
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(repr: &str) -> Result<Self, Self::Err> {
|
||||
let fallback = match proc_macro2::Literal::from_str(repr) {
|
||||
Ok(literal) => literal,
|
||||
Err(error) => {
|
||||
return Err(LexError {
|
||||
fallback: error,
|
||||
span: Span::call_site(),
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(Literal {
|
||||
fallback,
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Literal {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Display::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Literal {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStream {
|
||||
content: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> Self {
|
||||
TokenStream {
|
||||
content: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.content.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = token_stream::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
token_stream::IntoIter {
|
||||
iter: self.content.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
self.content.extend(streams.into_iter().flatten());
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
self.content.extend(streams);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let content = streams.into_iter().flatten().collect();
|
||||
TokenStream { content }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
|
||||
let content = streams.into_iter().collect();
|
||||
TokenStream { content }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
||||
let fallback = match proc_macro2::TokenStream::from_str(string) {
|
||||
Ok(token_stream) => token_stream,
|
||||
Err(error) => {
|
||||
return Err(LexError {
|
||||
fallback: error,
|
||||
span: Span::call_site(),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fn convert_token_stream(stream: proc_macro2::TokenStream) -> TokenStream {
|
||||
TokenStream {
|
||||
content: stream.into_iter().map(convert_token_tree).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_token_tree(token: proc_macro2::TokenTree) -> TokenTree {
|
||||
match token {
|
||||
proc_macro2::TokenTree::Group(group) => TokenTree::Group(Group::new(
|
||||
group.delimiter(),
|
||||
convert_token_stream(group.stream()),
|
||||
)),
|
||||
proc_macro2::TokenTree::Ident(ident) => TokenTree::Ident(Ident {
|
||||
fallback: ident,
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}),
|
||||
proc_macro2::TokenTree::Punct(punct) => TokenTree::Punct(Punct {
|
||||
fallback: punct,
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}),
|
||||
proc_macro2::TokenTree::Literal(literal) => TokenTree::Literal(Literal {
|
||||
fallback: literal,
|
||||
span: Span::call_site(),
|
||||
identity: Identity::NOVEL,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(convert_token_stream(fallback))
|
||||
}
|
||||
}
|
||||
|
||||
fn display_tokens(tokens: &[TokenTree], formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut joint = false;
|
||||
for (i, token) in tokens.iter().enumerate() {
|
||||
if i != 0 && !joint {
|
||||
write!(formatter, " ")?;
|
||||
}
|
||||
joint = false;
|
||||
match token {
|
||||
TokenTree::Group(group) => Display::fmt(group, formatter),
|
||||
TokenTree::Ident(ident) => Display::fmt(ident, formatter),
|
||||
TokenTree::Punct(punct) => {
|
||||
joint = punct.spacing() == Spacing::Joint;
|
||||
Display::fmt(punct, formatter)
|
||||
}
|
||||
TokenTree::Literal(literal) => Display::fmt(literal, formatter),
|
||||
}?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Display for TokenStream {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
display_tokens(&self.content, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenStream {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("TokenStream ")?;
|
||||
formatter.debug_list().entries(&self.content).finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LexError {
|
||||
fallback: proc_macro2::LexError,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for LexError {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.fallback, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
pub mod token_stream {
|
||||
use super::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IntoIter {
|
||||
pub(crate) iter: <Vec<TokenTree> as IntoIterator>::IntoIter,
|
||||
}
|
||||
|
||||
impl Iterator for IntoIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.iter.size_hint()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod extra {
|
||||
use crate::Span;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DelimSpan {
|
||||
pub(crate) join: Span,
|
||||
pub(crate) open: Span,
|
||||
pub(crate) close: Span,
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub fn join(&self) -> Span {
|
||||
self.join
|
||||
}
|
||||
|
||||
pub fn open(&self) -> Span {
|
||||
self.open
|
||||
}
|
||||
|
||||
pub fn close(&self) -> Span {
|
||||
self.close
|
||||
}
|
||||
}
|
||||
}
|
@ -1,69 +0,0 @@
|
||||
use std::str;
|
||||
|
||||
pub struct OutputBuffer {
|
||||
bytes: Vec<u8>,
|
||||
}
|
||||
|
||||
impl OutputBuffer {
|
||||
pub fn new() -> Self {
|
||||
OutputBuffer { bytes: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn write_u8(&mut self, value: u8) {
|
||||
self.bytes.push(value);
|
||||
}
|
||||
|
||||
pub fn write_u16(&mut self, value: u16) {
|
||||
self.bytes.extend_from_slice(&value.to_le_bytes());
|
||||
}
|
||||
|
||||
pub fn write_u32(&mut self, value: u32) {
|
||||
self.bytes.extend_from_slice(&value.to_le_bytes());
|
||||
}
|
||||
|
||||
pub fn write_str(&mut self, value: &str) {
|
||||
self.bytes.extend_from_slice(value.as_bytes());
|
||||
}
|
||||
|
||||
pub fn into_bytes(self) -> Vec<u8> {
|
||||
self.bytes
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InputBuffer<'a> {
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> InputBuffer<'a> {
|
||||
pub fn new(bytes: &'a [u8]) -> Self {
|
||||
InputBuffer { bytes }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.bytes.is_empty()
|
||||
}
|
||||
|
||||
pub fn read_u8(&mut self) -> u8 {
|
||||
let (first, rest) = self.bytes.split_first().unwrap();
|
||||
self.bytes = rest;
|
||||
*first
|
||||
}
|
||||
|
||||
pub fn read_u16(&mut self) -> u16 {
|
||||
let (value, rest) = self.bytes.split_at(2);
|
||||
self.bytes = rest;
|
||||
u16::from_le_bytes([value[0], value[1]])
|
||||
}
|
||||
|
||||
pub fn read_u32(&mut self) -> u32 {
|
||||
let (value, rest) = self.bytes.split_at(4);
|
||||
self.bytes = rest;
|
||||
u32::from_le_bytes([value[0], value[1], value[2], value[3]])
|
||||
}
|
||||
|
||||
pub fn read_str(&mut self, len: usize) -> &'a str {
|
||||
let (string, rest) = self.bytes.split_at(len);
|
||||
self.bytes = rest;
|
||||
str::from_utf8(string).unwrap()
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
pub enum Bytecode {}
|
||||
|
||||
impl Bytecode {
|
||||
pub const GROUP_PARENTHESIS: u8 = 0;
|
||||
pub const GROUP_BRACE: u8 = 1;
|
||||
pub const GROUP_BRACKET: u8 = 2;
|
||||
pub const GROUP_NONE: u8 = 3;
|
||||
pub const IDENT: u8 = 4;
|
||||
pub const PUNCT_ALONE: u8 = 5;
|
||||
pub const PUNCT_JOINT: u8 = 6;
|
||||
pub const LITERAL: u8 = 7;
|
||||
pub const LOAD_GROUP: u8 = 8;
|
||||
pub const LOAD_IDENT: u8 = 9;
|
||||
pub const LOAD_PUNCT: u8 = 10;
|
||||
pub const LOAD_LITERAL: u8 = 11;
|
||||
pub const SET_SPAN: u8 = 12;
|
||||
}
|
@ -1,205 +0,0 @@
|
||||
pub mod buffer;
|
||||
pub mod bytecode;
|
||||
|
||||
use crate::watt::buffer::{InputBuffer, OutputBuffer};
|
||||
use crate::watt::bytecode::Bytecode;
|
||||
use crate::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::str::FromStr;
|
||||
|
||||
pub enum Kind {
|
||||
Group(Delimiter),
|
||||
Ident,
|
||||
Punct(Spacing),
|
||||
Literal,
|
||||
}
|
||||
|
||||
pub enum Identity {}
|
||||
|
||||
impl Identity {
|
||||
pub const RESPANNED: u32 = 1 << 31;
|
||||
pub const NOVEL: u32 = u32::MAX;
|
||||
}
|
||||
|
||||
impl Span {
|
||||
fn is_call_site(&self) -> bool {
|
||||
self.lo == 0 && self.hi == 0
|
||||
}
|
||||
}
|
||||
|
||||
fn post_increment(counter: &mut u32) -> impl FnMut() -> u32 + '_ {
|
||||
|| {
|
||||
let value = *counter;
|
||||
*counter += 1;
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(buf: &mut InputBuffer) -> TokenStream {
|
||||
let mut span_counter = 1;
|
||||
let mut next_span = post_increment(&mut span_counter);
|
||||
let mut next_span = || {
|
||||
let next = next_span();
|
||||
Span { lo: next, hi: next }
|
||||
};
|
||||
|
||||
let [mut group_counter, mut ident_counter, mut punct_counter, mut literal_counter] = [0; 4];
|
||||
let mut next_group = post_increment(&mut group_counter);
|
||||
let mut next_ident = post_increment(&mut ident_counter);
|
||||
let mut next_punct = post_increment(&mut punct_counter);
|
||||
let mut next_literal = post_increment(&mut literal_counter);
|
||||
|
||||
let mut trees = Vec::new();
|
||||
while !buf.is_empty() {
|
||||
match match buf.read_u8() {
|
||||
Bytecode::GROUP_PARENTHESIS => Kind::Group(Delimiter::Parenthesis),
|
||||
Bytecode::GROUP_BRACE => Kind::Group(Delimiter::Brace),
|
||||
Bytecode::GROUP_BRACKET => Kind::Group(Delimiter::Bracket),
|
||||
Bytecode::GROUP_NONE => Kind::Group(Delimiter::None),
|
||||
Bytecode::IDENT => Kind::Ident,
|
||||
Bytecode::PUNCT_ALONE => Kind::Punct(Spacing::Alone),
|
||||
Bytecode::PUNCT_JOINT => Kind::Punct(Spacing::Joint),
|
||||
Bytecode::LITERAL => Kind::Literal,
|
||||
_ => unreachable!(),
|
||||
} {
|
||||
Kind::Group(delimiter) => {
|
||||
let len = buf.read_u32();
|
||||
let stream = trees.drain(trees.len() - len as usize..).collect();
|
||||
trees.push(TokenTree::Group(Group {
|
||||
delimiter,
|
||||
stream,
|
||||
span: next_span(),
|
||||
span_open: next_span(),
|
||||
span_close: next_span(),
|
||||
identity: next_group(),
|
||||
}));
|
||||
}
|
||||
Kind::Ident => {
|
||||
let len = buf.read_u16();
|
||||
let repr = buf.read_str(len as usize);
|
||||
let ident = if let Some(repr) = repr.strip_prefix("r#") {
|
||||
proc_macro2::Ident::new_raw(repr, proc_macro2::Span::call_site())
|
||||
} else if repr == "$crate" {
|
||||
proc_macro2::Ident::new("crate", proc_macro2::Span::call_site())
|
||||
} else {
|
||||
proc_macro2::Ident::new(repr, proc_macro2::Span::call_site())
|
||||
};
|
||||
trees.push(TokenTree::Ident(Ident {
|
||||
fallback: ident,
|
||||
span: next_span(),
|
||||
identity: next_ident(),
|
||||
}));
|
||||
}
|
||||
Kind::Punct(spacing) => {
|
||||
let ch = buf.read_u8();
|
||||
assert!(ch.is_ascii());
|
||||
let punct = proc_macro2::Punct::new(ch as char, spacing);
|
||||
trees.push(TokenTree::Punct(Punct {
|
||||
fallback: punct,
|
||||
span: next_span(),
|
||||
identity: next_punct(),
|
||||
}));
|
||||
}
|
||||
Kind::Literal => {
|
||||
let len = buf.read_u16();
|
||||
let repr = buf.read_str(len as usize);
|
||||
let literal = proc_macro2::Literal::from_str(repr).unwrap();
|
||||
trees.push(TokenTree::Literal(Literal {
|
||||
fallback: literal,
|
||||
span: next_span(),
|
||||
identity: next_literal(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TokenStream { content: trees }
|
||||
}
|
||||
|
||||
pub fn linearize(tokens: TokenStream) -> Vec<u8> {
|
||||
let mut buf = OutputBuffer::new();
|
||||
for token in &tokens.content {
|
||||
linearize_token(token, &mut buf);
|
||||
}
|
||||
buf.into_bytes()
|
||||
}
|
||||
|
||||
fn linearize_token(token: &TokenTree, buf: &mut OutputBuffer) {
|
||||
let needs_span;
|
||||
match token {
|
||||
TokenTree::Group(group) => {
|
||||
if group.identity < Identity::NOVEL {
|
||||
buf.write_u8(Bytecode::LOAD_GROUP);
|
||||
buf.write_u32(group.identity & !Identity::RESPANNED);
|
||||
needs_span = group.identity >= Identity::RESPANNED;
|
||||
} else {
|
||||
let len = group.stream.len();
|
||||
assert!(len <= u32::MAX as usize);
|
||||
for token in &group.stream {
|
||||
linearize_token(token, buf);
|
||||
}
|
||||
buf.write_u8(match group.delimiter {
|
||||
Delimiter::Parenthesis => Bytecode::GROUP_PARENTHESIS,
|
||||
Delimiter::Brace => Bytecode::GROUP_BRACE,
|
||||
Delimiter::Bracket => Bytecode::GROUP_BRACKET,
|
||||
Delimiter::None => Bytecode::GROUP_NONE,
|
||||
});
|
||||
buf.write_u32(len as u32);
|
||||
needs_span = !group.span.is_call_site();
|
||||
}
|
||||
}
|
||||
TokenTree::Ident(ident) => {
|
||||
if ident.identity < Identity::NOVEL {
|
||||
buf.write_u8(Bytecode::LOAD_IDENT);
|
||||
buf.write_u32(ident.identity & !Identity::RESPANNED);
|
||||
needs_span = ident.identity >= Identity::RESPANNED;
|
||||
} else {
|
||||
buf.write_u8(Bytecode::IDENT);
|
||||
let repr = ident.to_string();
|
||||
assert!(repr.len() <= u16::MAX as usize);
|
||||
buf.write_u16(repr.len() as u16);
|
||||
buf.write_str(&repr);
|
||||
linearize_span(ident.span, buf);
|
||||
needs_span = false;
|
||||
}
|
||||
}
|
||||
TokenTree::Punct(punct) => {
|
||||
if punct.identity < Identity::NOVEL {
|
||||
buf.write_u8(Bytecode::LOAD_PUNCT);
|
||||
buf.write_u32(punct.identity & !Identity::RESPANNED);
|
||||
needs_span = punct.identity >= Identity::RESPANNED;
|
||||
} else {
|
||||
buf.write_u8(match punct.spacing() {
|
||||
Spacing::Alone => Bytecode::PUNCT_ALONE,
|
||||
Spacing::Joint => Bytecode::PUNCT_JOINT,
|
||||
});
|
||||
let ch = punct.as_char();
|
||||
assert!(ch.is_ascii());
|
||||
buf.write_u8(ch as u8);
|
||||
needs_span = !punct.span.is_call_site();
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(literal) => {
|
||||
if literal.identity < Identity::NOVEL {
|
||||
buf.write_u8(Bytecode::LOAD_LITERAL);
|
||||
buf.write_u32(literal.identity & !Identity::RESPANNED);
|
||||
needs_span = literal.identity >= Identity::RESPANNED;
|
||||
} else {
|
||||
buf.write_u8(Bytecode::LITERAL);
|
||||
let repr = literal.to_string();
|
||||
assert!(repr.len() <= u16::MAX as usize);
|
||||
buf.write_u16(repr.len() as u16);
|
||||
buf.write_str(&repr);
|
||||
needs_span = !literal.span.is_call_site();
|
||||
}
|
||||
}
|
||||
}
|
||||
if needs_span {
|
||||
buf.write_u8(Bytecode::SET_SPAN);
|
||||
linearize_span(token.span(), buf);
|
||||
}
|
||||
}
|
||||
|
||||
fn linearize_span(span: Span, buf: &mut OutputBuffer) {
|
||||
buf.write_u32(span.lo);
|
||||
buf.write_u32(span.hi);
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
[package]
|
||||
name = "serde_derive"
|
||||
version = "1.0.184"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
categories = ["no-std", "no-std::no-alloc"]
|
||||
description = "Implementation of #[derive(Serialize, Deserialize)]"
|
||||
documentation = "https://serde.rs/derive.html"
|
||||
edition = "2015"
|
||||
homepage = "https://serde.rs"
|
||||
include = ["serde_derive-x86_64-unknown-linux-gnu", "src", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
keywords = ["serde", "serialization", "no_std", "derive"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "crates-io.md"
|
||||
repository = "https://github.com/serde-rs/serde"
|
||||
rust-version = "1.56"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
deserialize_in_place = []
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[target.'cfg(not(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu")))'.dependencies]
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = "2.0.28"
|
||||
|
||||
[dev-dependencies]
|
||||
serde = { version = "1", path = "../../serde" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
[workspace]
|
@ -1 +0,0 @@
|
||||
../../LICENSE-APACHE
|
@ -1 +0,0 @@
|
||||
../../LICENSE-MIT
|
@ -1 +0,0 @@
|
||||
../../README.md
|
@ -1 +0,0 @@
|
||||
../../crates-io.md
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/bound.rs
|
@ -1 +0,0 @@
|
||||
../../proc-macro2/src/watt/buffer.rs
|
@ -1 +0,0 @@
|
||||
../../proc-macro2/src/watt/bytecode.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/de.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/dummy.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/fragment.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/internals/
|
@ -1,22 +0,0 @@
|
||||
//! This crate provides Serde's two derive macros.
|
||||
//!
|
||||
//! ```edition2021
|
||||
//! # use serde_derive::{Deserialize, Serialize};
|
||||
//! #
|
||||
//! #[derive(Serialize, Deserialize)]
|
||||
//! # struct S;
|
||||
//! #
|
||||
//! # fn main() {}
|
||||
//! ```
|
||||
//!
|
||||
//! Please refer to [https://serde.rs/derive.html] for how to set this up.
|
||||
//!
|
||||
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.184")]
|
||||
|
||||
#[cfg(not(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu")))]
|
||||
include!("lib_from_source.rs");
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))]
|
||||
include!("lib_precompiled.rs");
|
@ -1,35 +0,0 @@
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
#[macro_use]
|
||||
mod bound;
|
||||
#[macro_use]
|
||||
mod fragment;
|
||||
|
||||
mod de;
|
||||
mod dummy;
|
||||
mod internals;
|
||||
mod pretend;
|
||||
mod ser;
|
||||
mod this;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_derive(Serialize, attributes(serde))]
|
||||
pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
ser::expand_derive_serialize(&mut input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(Deserialize, attributes(serde))]
|
||||
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
de::expand_derive_deserialize(&mut input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
@ -1,234 +0,0 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
mod buffer;
|
||||
mod bytecode;
|
||||
|
||||
use crate::buffer::{InputBuffer, OutputBuffer};
|
||||
use crate::bytecode::Bytecode;
|
||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::io::{ErrorKind, Read, Write};
|
||||
use std::iter::FromIterator;
|
||||
use std::path::Path;
|
||||
use std::process::{Command, ExitStatus, Stdio};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[proc_macro_derive(Serialize, attributes(serde))]
|
||||
pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
derive(0, input)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(Deserialize, attributes(serde))]
|
||||
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
|
||||
derive(1 + cfg!(feature = "deserialize_in_place") as u8, input)
|
||||
}
|
||||
|
||||
fn derive(select: u8, input: TokenStream) -> TokenStream {
|
||||
let mut memory = TokenMemory::default();
|
||||
let mut buf = OutputBuffer::new();
|
||||
buf.write_u8(select);
|
||||
|
||||
memory.spans.push(Span::call_site());
|
||||
for token in input {
|
||||
memory.linearize_token(token, &mut buf);
|
||||
}
|
||||
|
||||
let exe_path = Path::new(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/serde_derive-x86_64-unknown-linux-gnu",
|
||||
));
|
||||
let mut child = match Command::new(exe_path)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
{
|
||||
Ok(child) => child,
|
||||
Err(io_error) => {
|
||||
if io_error.kind() == ErrorKind::NotFound {
|
||||
panic!(
|
||||
"file missing from serde_derive manifest directory during macro expansion: {}",
|
||||
exe_path.display(),
|
||||
);
|
||||
} else {
|
||||
panic!("failed to spawn process: {}", io_error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
let mut buf = buf.into_bytes();
|
||||
stdin.write_all(&buf).unwrap();
|
||||
drop(stdin);
|
||||
|
||||
let mut stdout = child.stdout.take().unwrap();
|
||||
buf.clear();
|
||||
stdout.read_to_end(&mut buf).unwrap();
|
||||
|
||||
let success = child.wait().as_ref().map_or(true, ExitStatus::success);
|
||||
if !success || buf.is_empty() {
|
||||
panic!();
|
||||
}
|
||||
|
||||
let mut buf = InputBuffer::new(&buf);
|
||||
memory.receive(&mut buf)
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TokenMemory {
|
||||
spans: Vec<Span>,
|
||||
groups: Vec<Group>,
|
||||
idents: Vec<Ident>,
|
||||
puncts: Vec<Punct>,
|
||||
literals: Vec<Literal>,
|
||||
}
|
||||
|
||||
enum Kind {
|
||||
Group(Delimiter),
|
||||
Ident,
|
||||
Punct(Spacing),
|
||||
Literal,
|
||||
}
|
||||
|
||||
impl TokenMemory {
|
||||
// Depth-first post-order traversal.
|
||||
fn linearize_token(&mut self, token: TokenTree, buf: &mut OutputBuffer) {
|
||||
match token {
|
||||
TokenTree::Group(group) => {
|
||||
let mut len = 0usize;
|
||||
for token in group.stream() {
|
||||
self.linearize_token(token, buf);
|
||||
len += 1;
|
||||
}
|
||||
assert!(len <= u32::MAX as usize);
|
||||
buf.write_u8(match group.delimiter() {
|
||||
Delimiter::Parenthesis => Bytecode::GROUP_PARENTHESIS,
|
||||
Delimiter::Brace => Bytecode::GROUP_BRACE,
|
||||
Delimiter::Bracket => Bytecode::GROUP_BRACKET,
|
||||
Delimiter::None => Bytecode::GROUP_NONE,
|
||||
});
|
||||
buf.write_u32(len as u32);
|
||||
self.spans
|
||||
.extend([group.span(), group.span_open(), group.span_close()]);
|
||||
self.groups.push(group);
|
||||
}
|
||||
TokenTree::Ident(ident) => {
|
||||
buf.write_u8(Bytecode::IDENT);
|
||||
let repr = ident.to_string();
|
||||
assert!(repr.len() <= u16::MAX as usize);
|
||||
buf.write_u16(repr.len() as u16);
|
||||
buf.write_str(&repr);
|
||||
self.spans.push(ident.span());
|
||||
self.idents.push(ident);
|
||||
}
|
||||
TokenTree::Punct(punct) => {
|
||||
buf.write_u8(match punct.spacing() {
|
||||
Spacing::Alone => Bytecode::PUNCT_ALONE,
|
||||
Spacing::Joint => Bytecode::PUNCT_JOINT,
|
||||
});
|
||||
let ch = punct.as_char();
|
||||
assert!(ch.is_ascii());
|
||||
buf.write_u8(ch as u8);
|
||||
self.spans.push(punct.span());
|
||||
self.puncts.push(punct);
|
||||
}
|
||||
TokenTree::Literal(literal) => {
|
||||
buf.write_u8(Bytecode::LITERAL);
|
||||
let repr = literal.to_string();
|
||||
assert!(repr.len() <= u16::MAX as usize);
|
||||
buf.write_u16(repr.len() as u16);
|
||||
buf.write_str(&repr);
|
||||
self.spans.push(literal.span());
|
||||
self.literals.push(literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn receive(&self, buf: &mut InputBuffer) -> TokenStream {
|
||||
let mut trees = Vec::new();
|
||||
while !buf.is_empty() {
|
||||
match match buf.read_u8() {
|
||||
Bytecode::GROUP_PARENTHESIS => Kind::Group(Delimiter::Parenthesis),
|
||||
Bytecode::GROUP_BRACE => Kind::Group(Delimiter::Brace),
|
||||
Bytecode::GROUP_BRACKET => Kind::Group(Delimiter::Bracket),
|
||||
Bytecode::GROUP_NONE => Kind::Group(Delimiter::None),
|
||||
Bytecode::IDENT => Kind::Ident,
|
||||
Bytecode::PUNCT_ALONE => Kind::Punct(Spacing::Alone),
|
||||
Bytecode::PUNCT_JOINT => Kind::Punct(Spacing::Joint),
|
||||
Bytecode::LITERAL => Kind::Literal,
|
||||
Bytecode::LOAD_GROUP => {
|
||||
let identity = buf.read_u32();
|
||||
let group = self.groups[identity as usize].clone();
|
||||
trees.push(TokenTree::Group(group));
|
||||
continue;
|
||||
}
|
||||
Bytecode::LOAD_IDENT => {
|
||||
let identity = buf.read_u32();
|
||||
let ident = self.idents[identity as usize].clone();
|
||||
trees.push(TokenTree::Ident(ident));
|
||||
continue;
|
||||
}
|
||||
Bytecode::LOAD_PUNCT => {
|
||||
let identity = buf.read_u32();
|
||||
let punct = self.puncts[identity as usize].clone();
|
||||
trees.push(TokenTree::Punct(punct));
|
||||
continue;
|
||||
}
|
||||
Bytecode::LOAD_LITERAL => {
|
||||
let identity = buf.read_u32();
|
||||
let literal = self.literals[identity as usize].clone();
|
||||
trees.push(TokenTree::Literal(literal));
|
||||
continue;
|
||||
}
|
||||
Bytecode::SET_SPAN => {
|
||||
trees.last_mut().unwrap().set_span(self.read_span(buf));
|
||||
continue;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
} {
|
||||
Kind::Group(delimiter) => {
|
||||
let len = buf.read_u32();
|
||||
let stream = trees.drain(trees.len() - len as usize..).collect();
|
||||
let group = Group::new(delimiter, stream);
|
||||
trees.push(TokenTree::Group(group));
|
||||
}
|
||||
Kind::Ident => {
|
||||
let len = buf.read_u16();
|
||||
let repr = buf.read_str(len as usize);
|
||||
let span = self.read_span(buf);
|
||||
let ident = if let Some(repr) = repr.strip_prefix("r#") {
|
||||
Ident::new_raw(repr, span)
|
||||
} else {
|
||||
Ident::new(repr, span)
|
||||
};
|
||||
trees.push(TokenTree::Ident(ident));
|
||||
}
|
||||
Kind::Punct(spacing) => {
|
||||
let ch = buf.read_u8();
|
||||
assert!(ch.is_ascii());
|
||||
let punct = Punct::new(ch as char, spacing);
|
||||
trees.push(TokenTree::Punct(punct));
|
||||
}
|
||||
Kind::Literal => {
|
||||
let len = buf.read_u16();
|
||||
let repr = buf.read_str(len as usize);
|
||||
let literal = Literal::from_str(repr).unwrap();
|
||||
trees.push(TokenTree::Literal(literal));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TokenStream::from_iter(trees)
|
||||
}
|
||||
|
||||
fn read_span(&self, buf: &mut InputBuffer) -> Span {
|
||||
let lo = buf.read_u32();
|
||||
let hi = buf.read_u32();
|
||||
let span = self.spans[lo as usize];
|
||||
if lo == hi {
|
||||
span
|
||||
} else {
|
||||
#[cfg(any())] // FIXME
|
||||
return span.join(self.spans[hi as usize]).unwrap_or(span);
|
||||
span
|
||||
}
|
||||
}
|
||||
}
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/pretend.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/ser.rs
|
@ -1 +0,0 @@
|
||||
../../../serde_derive/src/this.rs
|
@ -6,8 +6,6 @@ use proc_macro2::{Literal, Span, TokenStream};
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use std::collections::BTreeSet;
|
||||
use std::ptr;
|
||||
#[cfg(precompiled)]
|
||||
use std::sync::atomic::Ordering;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{parse_quote, Ident, Index, Member};
|
||||
@ -301,11 +299,6 @@ fn deserialize_body(cont: &Container, params: &Parameters) -> Fragment {
|
||||
|
||||
#[cfg(feature = "deserialize_in_place")]
|
||||
fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option<Stmts> {
|
||||
#[cfg(precompiled)]
|
||||
if !crate::DESERIALIZE_IN_PLACE.load(Ordering::Relaxed) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Only remote derives have getters, and we do not generate
|
||||
// deserialize_in_place for remote derives.
|
||||
assert!(!params.has_getter);
|
||||
|
@ -66,17 +66,11 @@ extern crate proc_macro2;
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
#[cfg(not(precompiled))]
|
||||
extern crate proc_macro;
|
||||
#[cfg(precompiled)]
|
||||
extern crate proc_macro2 as proc_macro;
|
||||
|
||||
mod internals;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
#[cfg(precompiled)]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(not(precompiled))]
|
||||
use syn::parse_macro_input;
|
||||
use syn::DeriveInput;
|
||||
|
||||
@ -91,20 +85,7 @@ mod pretend;
|
||||
mod ser;
|
||||
mod this;
|
||||
|
||||
#[cfg(precompiled)]
|
||||
macro_rules! parse_macro_input {
|
||||
($tokenstream:ident as $ty:ty) => {
|
||||
match syn::parse2::<$ty>($tokenstream) {
|
||||
Ok(data) => data,
|
||||
Err(err) => return err.to_compile_error(),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(precompiled)]
|
||||
pub static DESERIALIZE_IN_PLACE: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg_attr(not(precompiled), proc_macro_derive(Serialize, attributes(serde)))]
|
||||
#[proc_macro_derive(Serialize, attributes(serde))]
|
||||
pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
ser::expand_derive_serialize(&mut input)
|
||||
@ -112,7 +93,7 @@ pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
.into()
|
||||
}
|
||||
|
||||
#[cfg_attr(not(precompiled), proc_macro_derive(Deserialize, attributes(serde)))]
|
||||
#[proc_macro_derive(Deserialize, attributes(serde))]
|
||||
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
de::expand_derive_deserialize(&mut input)
|
||||
|
Loading…
Reference in New Issue
Block a user