Format code

This commit is contained in:
pjht 2024-06-18 16:45:30 -05:00
parent 5f82526318
commit 93682cb310
Signed by: pjht
GPG Key ID: 7B5F6AFBEC7EE78E
13 changed files with 119 additions and 294 deletions

View File

@ -234,7 +234,8 @@ pub fn open(path: &Path, _opts: &OpenOptions) -> io::Result<File> {
pub fn file_attr(&self) -> io::Result<FileAttr> { pub fn file_attr(&self) -> io::Result<FileAttr> {
let size_res: Option<u64> = postcard::from_bytes( let size_res: Option<u64> = postcard::from_bytes(
&rpc::send_call(self.fs_pid, 1, 3, &postcard::to_allocvec(&self.fd).unwrap()).get_return(), &rpc::send_call(self.fs_pid, 1, 3, &postcard::to_allocvec(&self.fd).unwrap())
.get_return(),
) )
.unwrap(); .unwrap();
let size = size_res.unwrap_or(0); let size = size_res.unwrap_or(0);
@ -377,13 +378,7 @@ pub fn set_times(&self, _times: FileTimes) -> io::Result<()> {
impl Drop for File { impl Drop for File {
fn drop(&mut self) { fn drop(&mut self) {
let _ = &rpc::send_call( let _ = &rpc::send_call(self.fs_pid, 1, 2, &postcard::to_allocvec(&self.fd).unwrap())
self.fs_pid,
1,
2,
&postcard::to_allocvec(&self.fd)
.unwrap(),
)
.get_return(); .get_return();
} }
} }

View File

@ -31,8 +31,7 @@ pub enum DecoderState {
} }
fn add(to: &mut [u8], idx: usize, data: u8) -> Result<(), ()> { fn add(to: &mut [u8], idx: usize, data: u8) -> Result<(), ()> {
*to.get_mut(idx) *to.get_mut(idx).ok_or_else(|| ())? = data;
.ok_or_else(|| ())? = data;
Ok(()) Ok(())
} }
@ -65,8 +64,8 @@ impl DecoderState {
/// NOTE: Sentinel value must be included in the input to this function for the /// NOTE: Sentinel value must be included in the input to this function for the
/// decoding to complete /// decoding to complete
pub fn feed(&mut self, data: u8) -> Result<DecodeResult, ()> { pub fn feed(&mut self, data: u8) -> Result<DecodeResult, ()> {
use DecoderState::*;
use DecodeResult::*; use DecodeResult::*;
use DecoderState::*;
let (ret, state) = match (&self, data) { let (ret, state) = match (&self, data) {
// Currently Idle, received a terminator, ignore, stay idle // Currently Idle, received a terminator, ignore, stay idle
(Idle, 0x00) => (Ok(NoData), Idle), (Idle, 0x00) => (Ok(NoData), Idle),
@ -88,36 +87,26 @@ pub fn feed(&mut self, data: u8) -> Result<DecodeResult, ()> {
// We have reached the end of a data run indicated by an overhead // We have reached the end of a data run indicated by an overhead
// byte, and the next segment of 254 bytes will have no modified // byte, and the next segment of 254 bytes will have no modified
// sentinel bytes // sentinel bytes
(Grab(0), 0xFF) => { (Grab(0), 0xFF) => (Ok(DataContinue(0)), GrabChain(0xFE)),
(Ok(DataContinue(0)), GrabChain(0xFE))
},
// We have reached the end of a data run indicated by an overhead // We have reached the end of a data run indicated by an overhead
// byte, and we will treat this byte as a modified sentinel byte. // byte, and we will treat this byte as a modified sentinel byte.
// place the sentinel byte in the output, and begin processing the // place the sentinel byte in the output, and begin processing the
// next non-sentinel sequence // next non-sentinel sequence
(Grab(0), n) => { (Grab(0), n) => (Ok(DataContinue(0)), Grab(n - 1)),
(Ok(DataContinue(0)), Grab(n - 1))
},
// We were not expecting the sequence to terminate, but here we are. // We were not expecting the sequence to terminate, but here we are.
// Report an error due to early terminated message // Report an error due to early terminated message
(Grab(_), 0) => { (Grab(_), 0) => (Err(()), Idle),
(Err(()), Idle)
}
// We have not yet reached the end of a data run, decrement the run // We have not yet reached the end of a data run, decrement the run
// counter, and place the byte into the decoded output // counter, and place the byte into the decoded output
(Grab(i), n) => { (Grab(i), n) => (Ok(DataContinue(n)), Grab(*i - 1)),
(Ok(DataContinue(n)), Grab(*i - 1))
},
// We have reached the end of a data run indicated by an overhead // We have reached the end of a data run indicated by an overhead
// byte, AND we have recieved the message terminator. This was a // byte, AND we have recieved the message terminator. This was a
// well framed message! // well framed message!
(GrabChain(0), 0x00) => { (GrabChain(0), 0x00) => (Ok(DataComplete), Idle),
(Ok(DataComplete), Idle)
}
// We have reached the end of a data run, and we will begin another // We have reached the end of a data run, and we will begin another
// data run with an overhead byte expected at the end // data run with an overhead byte expected at the end
@ -129,15 +118,11 @@ pub fn feed(&mut self, data: u8) -> Result<DecodeResult, ()> {
// We were not expecting the sequence to terminate, but here we are. // We were not expecting the sequence to terminate, but here we are.
// Report an error due to early terminated message // Report an error due to early terminated message
(GrabChain(_), 0) => { (GrabChain(_), 0) => (Err(()), Idle),
(Err(()), Idle)
}
// We have not yet reached the end of a data run, decrement the run // We have not yet reached the end of a data run, decrement the run
// counter, and place the byte into the decoded output // counter, and place the byte into the decoded output
(GrabChain(i), n) => { (GrabChain(i), n) => (Ok(DataContinue(n)), GrabChain(*i - 1)),
(Ok(DataContinue(n)), GrabChain(*i - 1))
},
}; };
*self = state; *self = state;
@ -146,15 +131,10 @@ pub fn feed(&mut self, data: u8) -> Result<DecodeResult, ()> {
} }
impl<'a> CobsDecoder<'a> { impl<'a> CobsDecoder<'a> {
/// Create a new streaming Cobs Decoder. Provide the output buffer /// Create a new streaming Cobs Decoder. Provide the output buffer
/// for the decoded message to be placed in /// for the decoded message to be placed in
pub fn new(dest: &'a mut [u8]) -> CobsDecoder<'a> { pub fn new(dest: &'a mut [u8]) -> CobsDecoder<'a> {
CobsDecoder { CobsDecoder { dest, dest_idx: 0, state: DecoderState::Idle }
dest,
dest_idx: 0,
state: DecoderState::Idle,
}
} }
/// Push a single byte into the streaming CobsDecoder. Return values mean: /// Push a single byte into the streaming CobsDecoder. Return values mean:
@ -174,9 +154,7 @@ pub fn feed(&mut self, data: u8) -> Result<Option<usize>, usize> {
self.dest_idx += 1; self.dest_idx += 1;
Ok(None) Ok(None)
} }
Ok(DecodeResult::DataComplete) => { Ok(DecodeResult::DataComplete) => Ok(Some(self.dest_idx)),
Ok(Some(self.dest_idx))
}
} }
} }
@ -252,13 +230,13 @@ macro_rules! decode_raw (
/// ///
/// This will return `Err(())` if there was a decoding error. Otherwise, /// This will return `Err(())` if there was a decoding error. Otherwise,
/// it will return `Ok(n)` where `n` is the length of the decoded message. /// it will return `Ok(n)` where `n` is the length of the decoded message.
pub fn decode(source: &[u8], dest: &mut[u8]) -> Result<usize, ()> { pub fn decode(source: &[u8], dest: &mut [u8]) -> Result<usize, ()> {
let mut dec = CobsDecoder::new(dest); let mut dec = CobsDecoder::new(dest);
// Did we decode a message, using some or all of the buffer? // Did we decode a message, using some or all of the buffer?
match dec.push(source).or(Err(()))? { match dec.push(source).or(Err(()))? {
Some((d_used, _s_used)) => return Ok(d_used), Some((d_used, _s_used)) => return Ok(d_used),
None => {}, None => {}
} }
// If we consumed the entire buffer, but did NOT get a message, // If we consumed the entire buffer, but did NOT get a message,
@ -267,7 +245,7 @@ pub fn decode(source: &[u8], dest: &mut[u8]) -> Result<usize, ()> {
if source.last() != Some(&0) { if source.last() != Some(&0) {
// Explicitly push sentinel of zero // Explicitly push sentinel of zero
if let Some((d_used, _s_used)) = dec.push(&[0]).or(Err(()))? { if let Some((d_used, _s_used)) = dec.push(&[0]).or(Err(()))? {
return Ok(d_used) return Ok(d_used);
} }
} }
@ -292,7 +270,7 @@ pub struct DecodeReport {
/// This is the same function as `decode_in_place`, but provides a report /// This is the same function as `decode_in_place`, but provides a report
/// of both the number of source bytes consumed as well as the size of the /// of both the number of source bytes consumed as well as the size of the
/// destination used. /// destination used.
pub fn decode_in_place_report(buff: &mut[u8]) -> Result<DecodeReport, ()> { pub fn decode_in_place_report(buff: &mut [u8]) -> Result<DecodeReport, ()> {
Ok(decode_raw!(buff, buff)) Ok(decode_raw!(buff, buff))
} }
@ -303,7 +281,7 @@ pub fn decode_in_place_report(buff: &mut[u8]) -> Result<DecodeReport, ()> {
/// ///
/// The returned `usize` is the number of bytes used for the DECODED value, /// The returned `usize` is the number of bytes used for the DECODED value,
/// NOT the number of source bytes consumed during decoding. /// NOT the number of source bytes consumed during decoding.
pub fn decode_in_place(buff: &mut[u8]) -> Result<usize, ()> { pub fn decode_in_place(buff: &mut [u8]) -> Result<usize, ()> {
Ok(decode_raw!(buff, buff).dst_used) Ok(decode_raw!(buff, buff).dst_used)
} }
@ -315,7 +293,7 @@ pub fn decode_in_place(buff: &mut[u8]) -> Result<usize, ()> {
/// ///
/// The returned `usize` is the number of bytes used for the DECODED value, /// The returned `usize` is the number of bytes used for the DECODED value,
/// NOT the number of source bytes consumed during decoding. /// NOT the number of source bytes consumed during decoding.
pub fn decode_with_sentinel(source: &[u8], dest: &mut[u8], sentinel: u8) -> Result<usize, ()> { pub fn decode_with_sentinel(source: &[u8], dest: &mut [u8], sentinel: u8) -> Result<usize, ()> {
for (x, y) in source.iter().zip(dest.iter_mut()) { for (x, y) in source.iter().zip(dest.iter_mut()) {
*y = *x ^ sentinel; *y = *x ^ sentinel;
} }
@ -326,7 +304,7 @@ pub fn decode_with_sentinel(source: &[u8], dest: &mut[u8], sentinel: u8) -> Resu
/// ///
/// The returned `usize` is the number of bytes used for the DECODED value, /// The returned `usize` is the number of bytes used for the DECODED value,
/// NOT the number of source bytes consumed during decoding. /// NOT the number of source bytes consumed during decoding.
pub fn decode_in_place_with_sentinel(buff: &mut[u8], sentinel: u8) -> Result<usize, ()> { pub fn decode_in_place_with_sentinel(buff: &mut [u8], sentinel: u8) -> Result<usize, ()> {
for x in buff.iter_mut() { for x in buff.iter_mut() {
*x ^= sentinel; *x ^= sentinel;
} }
@ -341,7 +319,7 @@ pub fn decode_vec(source: &[u8]) -> Result<Vec<u8>, ()> {
Ok(n) => { Ok(n) => {
decoded.truncate(n); decoded.truncate(n);
Ok(decoded) Ok(decoded)
}, }
Err(()) => Err(()), Err(()) => Err(()),
} }
} }
@ -354,7 +332,7 @@ pub fn decode_vec_with_sentinel(source: &[u8], sentinel: u8) -> Result<Vec<u8>,
Ok(n) => { Ok(n) => {
decoded.truncate(n); decoded.truncate(n);
Ok(decoded) Ok(decoded)
}, }
Err(()) => Err(()), Err(()) => Err(()),
} }
} }

View File

@ -44,17 +44,13 @@ pub enum PushResult {
/// Then, the last u8 in this tuple should be inserted at the end of the /// Then, the last u8 in this tuple should be inserted at the end of the
/// current output buffer. Finally, a placeholder byte should be inserted at /// current output buffer. Finally, a placeholder byte should be inserted at
/// the current end of the output buffer to be later modified /// the current end of the output buffer to be later modified
ModifyFromStartAndPushAndSkip((usize, u8, u8)) ModifyFromStartAndPushAndSkip((usize, u8, u8)),
} }
impl Default for EncoderState { impl Default for EncoderState {
/// Create a default initial state representation for a COBS encoder /// Create a default initial state representation for a COBS encoder
fn default() -> Self { fn default() -> Self {
Self { Self { code_idx: 0, num_bt_sent: 1, offset_idx: 1 }
code_idx: 0,
num_bt_sent: 1,
offset_idx: 1,
}
} }
} }
@ -72,7 +68,11 @@ pub fn push(&mut self, data: u8) -> PushResult {
self.offset_idx += 1; self.offset_idx += 1;
if 0xFF == self.num_bt_sent { if 0xFF == self.num_bt_sent {
let ret = PushResult::ModifyFromStartAndPushAndSkip((self.code_idx, self.num_bt_sent, data)); let ret = PushResult::ModifyFromStartAndPushAndSkip((
self.code_idx,
self.num_bt_sent,
data,
));
self.num_bt_sent = 1; self.num_bt_sent = 1;
self.code_idx += usize::from(self.offset_idx); self.code_idx += usize::from(self.offset_idx);
self.offset_idx = 1; self.offset_idx = 1;
@ -93,14 +93,9 @@ pub fn finalize(self) -> (usize, u8) {
} }
impl<'a> CobsEncoder<'a> { impl<'a> CobsEncoder<'a> {
/// Create a new streaming Cobs Encoder /// Create a new streaming Cobs Encoder
pub fn new(out_buf: &'a mut [u8]) -> CobsEncoder<'a> { pub fn new(out_buf: &'a mut [u8]) -> CobsEncoder<'a> {
CobsEncoder { CobsEncoder { dest: out_buf, dest_idx: 1, state: EncoderState::default() }
dest: out_buf,
dest_idx: 1,
state: EncoderState::default(),
}
} }
/// Push a slice of data to be encoded /// Push a slice of data to be encoded
@ -111,18 +106,14 @@ pub fn push(&mut self, data: &[u8]) -> Result<(), ()> {
use PushResult::*; use PushResult::*;
match self.state.push(*x) { match self.state.push(*x) {
AddSingle(y) => { AddSingle(y) => {
*self.dest.get_mut(self.dest_idx) *self.dest.get_mut(self.dest_idx).ok_or_else(|| ())? = y;
.ok_or_else(|| ())? = y;
} }
ModifyFromStartAndSkip((idx, mval)) => { ModifyFromStartAndSkip((idx, mval)) => {
*self.dest.get_mut(idx) *self.dest.get_mut(idx).ok_or_else(|| ())? = mval;
.ok_or_else(|| ())? = mval;
} }
ModifyFromStartAndPushAndSkip((idx, mval, nval1)) => { ModifyFromStartAndPushAndSkip((idx, mval, nval1)) => {
*self.dest.get_mut(idx) *self.dest.get_mut(idx).ok_or_else(|| ())? = mval;
.ok_or_else(|| ())? = mval; *self.dest.get_mut(self.dest_idx).ok_or_else(|| ())? = nval1;
*self.dest.get_mut(self.dest_idx)
.ok_or_else(|| ())? = nval1;
self.dest_idx += 1; self.dest_idx += 1;
} }
} }
@ -164,7 +155,7 @@ pub fn finalize(self) -> Result<usize, ()> {
/// This function will panic if the `dest` buffer is not large enough for the /// This function will panic if the `dest` buffer is not large enough for the
/// encoded message. You can calculate the size the `dest` buffer needs to be with /// encoded message. You can calculate the size the `dest` buffer needs to be with
/// the `max_encoding_length` function. /// the `max_encoding_length` function.
pub fn encode(source: &[u8], dest: &mut[u8]) -> usize { pub fn encode(source: &[u8], dest: &mut [u8]) -> usize {
let mut enc = CobsEncoder::new(dest); let mut enc = CobsEncoder::new(dest);
enc.push(source).unwrap(); enc.push(source).unwrap();
enc.finalize().unwrap() enc.finalize().unwrap()
@ -176,7 +167,7 @@ pub fn encode(source: &[u8], dest: &mut[u8]) -> usize {
/// written to in the `dest` buffer. /// written to in the `dest` buffer.
/// ///
/// If the destination buffer does not have enough room, an error will be returned /// If the destination buffer does not have enough room, an error will be returned
pub fn try_encode(source: &[u8], dest: &mut[u8]) -> Result<usize, ()> { pub fn try_encode(source: &[u8], dest: &mut [u8]) -> Result<usize, ()> {
let mut enc = CobsEncoder::new(dest); let mut enc = CobsEncoder::new(dest);
enc.push(source)?; enc.push(source)?;
enc.finalize() enc.finalize()
@ -189,7 +180,7 @@ pub fn try_encode(source: &[u8], dest: &mut[u8]) -> Result<usize, ()> {
/// of 0, then XOR-ing each byte of the encoded message with the chosen sentinel /// of 0, then XOR-ing each byte of the encoded message with the chosen sentinel
/// value. This will ensure that the sentinel value doesn't show up in the encoded /// value. This will ensure that the sentinel value doesn't show up in the encoded
/// message. See the paper "Consistent Overhead Byte Stuffing" for details. /// message. See the paper "Consistent Overhead Byte Stuffing" for details.
pub fn encode_with_sentinel(source: &[u8], dest: &mut[u8], sentinel: u8) -> usize { pub fn encode_with_sentinel(source: &[u8], dest: &mut [u8], sentinel: u8) -> usize {
let encoded_size = encode(source, dest); let encoded_size = encode(source, dest);
for x in &mut dest[..encoded_size] { for x in &mut dest[..encoded_size] {
*x ^= sentinel; *x ^= sentinel;

View File

@ -1,10 +1,10 @@
extern crate cobs; extern crate cobs;
extern crate quickcheck; extern crate quickcheck;
use cobs::{decode, decode_vec, encode, encode_vec, max_encoding_length};
use cobs::{decode_vec_with_sentinel, encode_vec_with_sentinel};
use cobs::{CobsDecoder, CobsEncoder};
use quickcheck::{quickcheck, TestResult}; use quickcheck::{quickcheck, TestResult};
use cobs::{max_encoding_length, encode, decode, encode_vec, decode_vec};
use cobs::{encode_vec_with_sentinel, decode_vec_with_sentinel};
use cobs::{CobsEncoder, CobsDecoder};
fn test_pair(source: Vec<u8>, encoded: Vec<u8>) { fn test_pair(source: Vec<u8>, encoded: Vec<u8>) {
let mut test_encoded = encoded.clone(); let mut test_encoded = encoded.clone();
@ -30,9 +30,12 @@ fn test_roundtrip(source: Vec<u8>) {
#[test] #[test]
fn decode_malforemd() { fn decode_malforemd() {
let malformed_buf: [u8;32] = [68, 69, 65, 68, 66, 69, 69, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let malformed_buf: [u8; 32] = [
let mut dest_buf : [u8;32] = [0;32]; 68, 69, 65, 68, 66, 69, 69, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
if let Err(()) = decode(&malformed_buf, &mut dest_buf){ 0, 0, 0, 0,
];
let mut dest_buf: [u8; 32] = [0; 32];
if let Err(()) = decode(&malformed_buf, &mut dest_buf) {
return; return;
} else { } else {
assert!(false, "invalid test result."); assert!(false, "invalid test result.");
@ -42,9 +45,7 @@ fn decode_malforemd() {
#[test] #[test]
fn stream_roundtrip() { fn stream_roundtrip() {
for ct in 1..=1000 { for ct in 1..=1000 {
let source: Vec<u8> = (ct..2*ct) let source: Vec<u8> = (ct..2 * ct).map(|x: usize| (x & 0xFF) as u8).collect();
.map(|x: usize| (x & 0xFF) as u8)
.collect();
let mut dest = vec![0u8; max_encoding_length(source.len())]; let mut dest = vec![0u8; max_encoding_length(source.len())];
@ -73,7 +74,6 @@ fn stream_roundtrip() {
assert_eq!(sz_de, source.len()); assert_eq!(sz_de, source.len());
assert_eq!(source, decoded); assert_eq!(source, decoded);
} }
} }
#[test] #[test]
@ -107,14 +107,14 @@ fn test_encode_4() {
#[test] #[test]
fn test_roundtrip_1() { fn test_roundtrip_1() {
test_roundtrip(vec![1,2,3]); test_roundtrip(vec![1, 2, 3]);
} }
#[test] #[test]
fn test_roundtrip_2() { fn test_roundtrip_2() {
for i in 0..5usize { for i in 0..5usize {
let mut v = Vec::new(); let mut v = Vec::new();
for j in 0..252+i { for j in 0..252 + i {
v.push(j as u8); v.push(j as u8);
} }
test_roundtrip(v); test_roundtrip(v);
@ -246,18 +246,12 @@ fn issue_15() {
assert!(max_len < 128); assert!(max_len < 128);
let mut buf = [0u8; 128]; let mut buf = [0u8; 128];
let len = cobs::encode_with_sentinel( let len = cobs::encode_with_sentinel(my_string_buf, &mut buf, b'\x00');
my_string_buf,
&mut buf,
b'\x00');
let cobs_buf = &buf[0..len]; let cobs_buf = &buf[0..len];
let mut decoded_dest_buf = [0u8; 128]; let mut decoded_dest_buf = [0u8; 128];
let new_len = cobs::decode_with_sentinel( let new_len = cobs::decode_with_sentinel(cobs_buf, &mut decoded_dest_buf, b'\x00').unwrap();
cobs_buf,
&mut decoded_dest_buf,
b'\x00').unwrap();
let decoded_buf = &decoded_dest_buf[0..new_len]; let decoded_buf = &decoded_dest_buf[0..new_len];
println!("{:?} {:?} {:?}", my_string_buf, cobs_buf, decoded_buf); println!("{:?} {:?} {:?}", my_string_buf, cobs_buf, decoded_buf);

View File

@ -93,10 +93,7 @@ pub enum FeedResult<'a, T> {
impl<const N: usize> CobsAccumulator<N> { impl<const N: usize> CobsAccumulator<N> {
/// Create a new accumulator. /// Create a new accumulator.
pub const fn new() -> Self { pub const fn new() -> Self {
CobsAccumulator { CobsAccumulator { buf: [0; N], idx: 0 }
buf: [0; N],
idx: 0,
}
} }
/// Appends data to the internal buffer and attempts to deserialize the accumulated data into /// Appends data to the internal buffer and attempts to deserialize the accumulated data into
@ -137,10 +134,7 @@ pub fn feed_ref<'de, 'a, T>(&'de mut self, input: &'a [u8]) -> FeedResult<'a, T>
self.extend_unchecked(take); self.extend_unchecked(take);
let retval = match crate::from_bytes_cobs::<T>(&mut self.buf[..self.idx]) { let retval = match crate::from_bytes_cobs::<T>(&mut self.buf[..self.idx]) {
Ok(t) => FeedResult::Success { Ok(t) => FeedResult::Success { data: t, remaining: release },
data: t,
remaining: release,
},
Err(_) => FeedResult::DeserError(release), Err(_) => FeedResult::DeserError(release),
}; };
self.idx = 0; self.idx = 0;

View File

@ -20,10 +20,7 @@ impl<'de, F> Deserializer<'de, F>
{ {
/// Obtain a Deserializer from a slice of bytes /// Obtain a Deserializer from a slice of bytes
pub fn from_flavor(flavor: F) -> Self { pub fn from_flavor(flavor: F) -> Self {
Deserializer { Deserializer { flavor, _plt: PhantomData }
flavor,
_plt: PhantomData,
}
} }
/// Return the remaining (unused) bytes in the Deserializer along with any /// Return the remaining (unused) bytes in the Deserializer along with any
@ -36,10 +33,7 @@ pub fn finalize(self) -> Result<F::Remainder> {
impl<'de> Deserializer<'de, Slice<'de>> { impl<'de> Deserializer<'de, Slice<'de>> {
/// Obtain a Deserializer from a slice of bytes /// Obtain a Deserializer from a slice of bytes
pub fn from_bytes(input: &'de [u8]) -> Self { pub fn from_bytes(input: &'de [u8]) -> Self {
Deserializer { Deserializer { flavor: Slice::new(input), _plt: PhantomData }
flavor: Slice::new(input),
_plt: PhantomData,
}
} }
} }
@ -152,10 +146,7 @@ impl<'a, 'b: 'a, F: Flavor<'b>> serde::de::SeqAccess<'b> for SeqAccess<'a, 'b, F
fn next_element_seed<V: DeserializeSeed<'b>>(&mut self, seed: V) -> Result<Option<V::Value>> { fn next_element_seed<V: DeserializeSeed<'b>>(&mut self, seed: V) -> Result<Option<V::Value>> {
if self.len > 0 { if self.len > 0 {
self.len -= 1; self.len -= 1;
Ok(Some(DeserializeSeed::deserialize( Ok(Some(DeserializeSeed::deserialize(seed, &mut *self.deserializer)?))
seed,
&mut *self.deserializer,
)?))
} else { } else {
Ok(None) Ok(None)
} }
@ -179,10 +170,7 @@ impl<'a, 'b: 'a, F: Flavor<'b>> serde::de::MapAccess<'b> for MapAccess<'a, 'b, F
fn next_key_seed<K: DeserializeSeed<'b>>(&mut self, seed: K) -> Result<Option<K::Value>> { fn next_key_seed<K: DeserializeSeed<'b>>(&mut self, seed: K) -> Result<Option<K::Value>> {
if self.len > 0 { if self.len > 0 {
self.len -= 1; self.len -= 1;
Ok(Some(DeserializeSeed::deserialize( Ok(Some(DeserializeSeed::deserialize(seed, &mut *self.deserializer)?))
seed,
&mut *self.deserializer,
)?))
} else { } else {
Ok(None) Ok(None)
} }
@ -448,10 +436,7 @@ fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value>
{ {
let len = self.try_take_varint_usize()?; let len = self.try_take_varint_usize()?;
visitor.visit_seq(SeqAccess { visitor.visit_seq(SeqAccess { deserializer: self, len })
deserializer: self,
len,
})
} }
#[inline] #[inline]
@ -459,10 +444,7 @@ fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value>
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
visitor.visit_seq(SeqAccess { visitor.visit_seq(SeqAccess { deserializer: self, len })
deserializer: self,
len,
})
} }
#[inline] #[inline]
@ -485,10 +467,7 @@ fn deserialize_map<V>(self, visitor: V) -> Result<V::Value>
{ {
let len = self.try_take_varint_usize()?; let len = self.try_take_varint_usize()?;
visitor.visit_map(MapAccess { visitor.visit_map(MapAccess { deserializer: self, len })
deserializer: self,
len,
})
} }
#[inline] #[inline]

View File

@ -117,11 +117,7 @@ pub struct Slice<'de> {
impl<'de> Slice<'de> { impl<'de> Slice<'de> {
/// Create a new [Slice] from the given buffer /// Create a new [Slice] from the given buffer
pub fn new(sli: &'de [u8]) -> Self { pub fn new(sli: &'de [u8]) -> Self {
Self { Self { cursor: sli.as_ptr(), end: unsafe { sli.as_ptr().add(sli.len()) }, _pl: PhantomData }
cursor: sli.as_ptr(),
end: unsafe { sli.as_ptr().add(sli.len()) },
_pl: PhantomData,
}
} }
} }
@ -227,10 +223,7 @@ impl<'de, T> EIOReader<'de, T>
T: embedded_io::blocking::Read, T: embedded_io::blocking::Read,
{ {
pub(crate) fn new(reader: T, buff: &'de mut [u8]) -> Self { pub(crate) fn new(reader: T, buff: &'de mut [u8]) -> Self {
Self { Self { reader, buff: SlidingBuffer::new(buff) }
reader,
buff: SlidingBuffer::new(buff),
}
} }
} }
@ -244,18 +237,14 @@ impl<'de, T> Flavor<'de> for EIOReader<'de, T>
#[inline] #[inline]
fn pop(&mut self) -> Result<u8> { fn pop(&mut self) -> Result<u8> {
let mut val = [0; 1]; let mut val = [0; 1];
self.reader self.reader.read(&mut val).map_err(|_| Error::DeserializeUnexpectedEnd)?;
.read(&mut val)
.map_err(|_| Error::DeserializeUnexpectedEnd)?;
Ok(val[0]) Ok(val[0])
} }
#[inline] #[inline]
fn try_take_n(&mut self, ct: usize) -> Result<&'de [u8]> { fn try_take_n(&mut self, ct: usize) -> Result<&'de [u8]> {
let buff = self.buff.take_n(ct)?; let buff = self.buff.take_n(ct)?;
self.reader self.reader.read_exact(buff).map_err(|_| Error::DeserializeUnexpectedEnd)?;
.read_exact(buff)
.map_err(|_| Error::DeserializeUnexpectedEnd)?;
Ok(buff) Ok(buff)
} }
@ -288,10 +277,7 @@ impl<'de, T> IOReader<'de, T>
T: std::io::Read, T: std::io::Read,
{ {
pub(crate) fn new(reader: T, buff: &'de mut [u8]) -> Self { pub(crate) fn new(reader: T, buff: &'de mut [u8]) -> Self {
Self { Self { reader, buff: SlidingBuffer::new(buff) }
reader,
buff: SlidingBuffer::new(buff),
}
} }
} }
@ -305,18 +291,14 @@ impl<'de, T> Flavor<'de> for IOReader<'de, T>
#[inline] #[inline]
fn pop(&mut self) -> Result<u8> { fn pop(&mut self) -> Result<u8> {
let mut val = [0; 1]; let mut val = [0; 1];
self.reader self.reader.read(&mut val).map_err(|_| Error::DeserializeUnexpectedEnd)?;
.read(&mut val)
.map_err(|_| Error::DeserializeUnexpectedEnd)?;
Ok(val[0]) Ok(val[0])
} }
#[inline] #[inline]
fn try_take_n(&mut self, ct: usize) -> Result<&'de [u8]> { fn try_take_n(&mut self, ct: usize) -> Result<&'de [u8]> {
let buff = self.buff.take_n(ct)?; let buff = self.buff.take_n(ct)?;
self.reader self.reader.read_exact(buff).map_err(|_| Error::DeserializeUnexpectedEnd)?;
.read_exact(buff)
.map_err(|_| Error::DeserializeUnexpectedEnd)?;
Ok(buff) Ok(buff)
} }

View File

@ -76,7 +76,6 @@
// pub use postcard_derive::Schema; // pub use postcard_derive::Schema;
// } // }
// } // }
pub use de::deserializer::Deserializer; pub use de::deserializer::Deserializer;
pub use de::flavors as de_flavors; pub use de::flavors as de_flavors;
pub use de::{from_bytes, from_bytes_cobs, take_from_bytes, take_from_bytes_cobs}; pub use de::{from_bytes, from_bytes_cobs, take_from_bytes, take_from_bytes_cobs};

View File

@ -225,9 +225,5 @@ const fn varint_size(max_n: usize) -> usize {
} }
const fn max(lhs: usize, rhs: usize) -> usize { const fn max(lhs: usize, rhs: usize) -> usize {
if lhs > rhs { if lhs > rhs { lhs } else { rhs }
lhs
} else {
rhs
}
} }

View File

@ -189,23 +189,15 @@ impl<$($generic: Schema),*> Schema for ($($generic,)*) {
]); ]);
impl<T: Schema> Schema for Option<T> { impl<T: Schema> Schema for Option<T> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType =
name: "Option<T>", &NamedType { name: "Option<T>", ty: &SdmTy::Option(T::SCHEMA) };
ty: &SdmTy::Option(T::SCHEMA),
};
} }
impl<T: Schema, E: Schema> Schema for Result<T, E> { impl<T: Schema, E: Schema> Schema for Result<T, E> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType {
name: "Result<T, E>", name: "Result<T, E>",
ty: &SdmTy::Enum(&[ ty: &SdmTy::Enum(&[
&NamedVariant { &NamedVariant { name: "Ok", ty: &SdmTy::TupleVariant(&[T::SCHEMA]) },
name: "Ok", &NamedVariant { name: "Err", ty: &SdmTy::TupleVariant(&[E::SCHEMA]) },
ty: &SdmTy::TupleVariant(&[T::SCHEMA]),
},
&NamedVariant {
name: "Err",
ty: &SdmTy::TupleVariant(&[E::SCHEMA]),
},
]), ]),
}; };
} }
@ -215,51 +207,36 @@ impl<T: Schema> Schema for &'_ T {
} }
impl<T: Schema> Schema for [T] { impl<T: Schema> Schema for [T] {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType { name: "&[T]", ty: &SdmTy::Seq(T::SCHEMA) };
name: "&[T]",
ty: &SdmTy::Seq(T::SCHEMA),
};
} }
impl<T: Schema, const N: usize> Schema for [T; N] { impl<T: Schema, const N: usize> Schema for [T; N] {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType =
name: "[T; N]", &NamedType { name: "[T; N]", ty: &SdmTy::Tuple(&[T::SCHEMA; N]) };
ty: &SdmTy::Tuple(&[T::SCHEMA; N]),
};
} }
#[cfg(feature = "heapless")] #[cfg(feature = "heapless")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))]
impl<T: Schema, const N: usize> Schema for heapless::Vec<T, N> { impl<T: Schema, const N: usize> Schema for heapless::Vec<T, N> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType =
name: "heapless::Vec<T, N>", &NamedType { name: "heapless::Vec<T, N>", ty: &SdmTy::Seq(T::SCHEMA) };
ty: &SdmTy::Seq(T::SCHEMA),
};
} }
#[cfg(feature = "heapless")] #[cfg(feature = "heapless")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))] #[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))]
impl<const N: usize> Schema for heapless::String<N> { impl<const N: usize> Schema for heapless::String<N> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType =
name: "heapless::String<N>", &NamedType { name: "heapless::String<N>", ty: &SdmTy::String };
ty: &SdmTy::String,
};
} }
#[cfg(feature = "use-std")] #[cfg(feature = "use-std")]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "alloc", feature = "use-std"))))] #[cfg_attr(doc_cfg, doc(cfg(any(feature = "alloc", feature = "use-std"))))]
impl<T: Schema> Schema for std::vec::Vec<T> { impl<T: Schema> Schema for std::vec::Vec<T> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType { name: "Vec<T>", ty: &SdmTy::Seq(T::SCHEMA) };
name: "Vec<T>",
ty: &SdmTy::Seq(T::SCHEMA),
};
} }
#[cfg(feature = "use-std")] #[cfg(feature = "use-std")]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "alloc", feature = "use-std"))))] #[cfg_attr(doc_cfg, doc(cfg(any(feature = "alloc", feature = "use-std"))))]
impl Schema for std::string::String { impl Schema for std::string::String {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType { name: "String", ty: &SdmTy::String };
name: "String",
ty: &SdmTy::String,
};
} }
#[cfg(all(not(feature = "use-std"), feature = "alloc"))] #[cfg(all(not(feature = "use-std"), feature = "alloc"))]
@ -267,16 +244,10 @@ impl Schema for std::string::String {
#[cfg(all(not(feature = "use-std"), feature = "alloc"))] #[cfg(all(not(feature = "use-std"), feature = "alloc"))]
impl<T: Schema> Schema for alloc::vec::Vec<T> { impl<T: Schema> Schema for alloc::vec::Vec<T> {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType { name: "Vec<T>", ty: &SdmTy::Seq(T::SCHEMA) };
name: "Vec<T>",
ty: &SdmTy::Seq(T::SCHEMA),
};
} }
#[cfg(all(not(feature = "use-std"), feature = "alloc"))] #[cfg(all(not(feature = "use-std"), feature = "alloc"))]
impl Schema for alloc::string::String { impl Schema for alloc::string::String {
const SCHEMA: &'static NamedType = &NamedType { const SCHEMA: &'static NamedType = &NamedType { name: "String", ty: &SdmTy::String };
name: "String",
ty: &SdmTy::String,
};
} }

View File

@ -147,12 +147,7 @@ impl<'a> Slice<'a> {
/// Create a new `Slice` flavor from a given backing buffer /// Create a new `Slice` flavor from a given backing buffer
pub fn new(buf: &'a mut [u8]) -> Self { pub fn new(buf: &'a mut [u8]) -> Self {
let ptr = buf.as_mut_ptr(); let ptr = buf.as_mut_ptr();
Slice { Slice { start: ptr, cursor: ptr, end: unsafe { ptr.add(buf.len()) }, _pl: PhantomData }
start: ptr,
cursor: ptr,
end: unsafe { ptr.add(buf.len()) },
_pl: PhantomData,
}
} }
} }
@ -280,24 +275,18 @@ impl<T> Flavor for WriteFlavor<T>
#[inline(always)] #[inline(always)]
fn try_push(&mut self, data: u8) -> Result<()> { fn try_push(&mut self, data: u8) -> Result<()> {
self.writer self.writer.write_all(&[data]).map_err(|_| Error::SerializeBufferFull)?;
.write_all(&[data])
.map_err(|_| Error::SerializeBufferFull)?;
Ok(()) Ok(())
} }
#[inline(always)] #[inline(always)]
fn try_extend(&mut self, b: &[u8]) -> Result<()> { fn try_extend(&mut self, b: &[u8]) -> Result<()> {
self.writer self.writer.write_all(b).map_err(|_| Error::SerializeBufferFull)?;
.write_all(b)
.map_err(|_| Error::SerializeBufferFull)?;
Ok(()) Ok(())
} }
fn finalize(mut self) -> Result<Self::Output> { fn finalize(mut self) -> Result<Self::Output> {
self.writer self.writer.flush().map_err(|_| Error::SerializeBufferFull)?;
.flush()
.map_err(|_| Error::SerializeBufferFull)?;
Ok(self.writer) Ok(self.writer)
} }
} }
@ -333,24 +322,18 @@ impl<T> Flavor for WriteFlavor<T>
#[inline(always)] #[inline(always)]
fn try_push(&mut self, data: u8) -> Result<()> { fn try_push(&mut self, data: u8) -> Result<()> {
self.writer self.writer.write_all(&[data]).map_err(|_| Error::SerializeBufferFull)?;
.write_all(&[data])
.map_err(|_| Error::SerializeBufferFull)?;
Ok(()) Ok(())
} }
#[inline(always)] #[inline(always)]
fn try_extend(&mut self, b: &[u8]) -> Result<()> { fn try_extend(&mut self, b: &[u8]) -> Result<()> {
self.writer self.writer.write_all(b).map_err(|_| Error::SerializeBufferFull)?;
.write_all(b)
.map_err(|_| Error::SerializeBufferFull)?;
Ok(()) Ok(())
} }
fn finalize(mut self) -> Result<Self::Output> { fn finalize(mut self) -> Result<Self::Output> {
self.writer self.writer.flush().map_err(|_| Error::SerializeBufferFull)?;
.flush()
.map_err(|_| Error::SerializeBufferFull)?;
Ok(self.writer) Ok(self.writer)
} }
} }
@ -389,9 +372,7 @@ impl<const B: usize> Flavor for HVec<B> {
#[inline(always)] #[inline(always)]
fn try_extend(&mut self, data: &[u8]) -> Result<()> { fn try_extend(&mut self, data: &[u8]) -> Result<()> {
self.vec self.vec.extend_from_slice(data).map_err(|_| Error::SerializeBufferFull)
.extend_from_slice(data)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline(always)] #[inline(always)]
@ -430,11 +411,11 @@ mod std_vec {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
mod alloc_vec { mod alloc_vec {
// extern crate alloc; // extern crate alloc;
use alloc;
use super::Flavor; use super::Flavor;
use super::Index; use super::Index;
use super::IndexMut; use super::IndexMut;
use crate::Result; use crate::Result;
use alloc;
use alloc::vec::Vec; use alloc::vec::Vec;
/// The `AllocVec` flavor is a wrapper type around an [alloc::vec::Vec]. /// The `AllocVec` flavor is a wrapper type around an [alloc::vec::Vec].
@ -522,10 +503,7 @@ impl<B> Cobs<B>
/// to push the leading header byte, the method will return an Error /// to push the leading header byte, the method will return an Error
pub fn try_new(mut bee: B) -> Result<Self> { pub fn try_new(mut bee: B) -> Result<Self> {
bee.try_push(0).map_err(|_| Error::SerializeBufferFull)?; bee.try_push(0).map_err(|_| Error::SerializeBufferFull)?;
Ok(Self { Ok(Self { flav: bee, cobs: EncoderState::default() })
flav: bee,
cobs: EncoderState::default(),
})
} }
} }

View File

@ -481,10 +481,7 @@ pub fn serialize_with_flavor<T, S, O>(value: &T, storage: S) -> Result<O>
{ {
let mut serializer = Serializer { output: storage }; let mut serializer = Serializer { output: storage };
value.serialize(&mut serializer)?; value.serialize(&mut serializer)?;
serializer serializer.output.finalize().map_err(|_| Error::SerializeBufferFull)
.output
.finalize()
.map_err(|_| Error::SerializeBufferFull)
} }
// /// Compute the size of the postcard serialization of `T`. // /// Compute the size of the postcard serialization of `T`.

View File

@ -101,76 +101,62 @@ fn serialize_i8(self, v: i8) -> Result<()> {
#[inline] #[inline]
fn serialize_i16(self, v: i16) -> Result<()> { fn serialize_i16(self, v: i16) -> Result<()> {
let zzv = zig_zag_i16(v); let zzv = zig_zag_i16(v);
self.try_push_varint_u16(zzv) self.try_push_varint_u16(zzv).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_i32(self, v: i32) -> Result<()> { fn serialize_i32(self, v: i32) -> Result<()> {
let zzv = zig_zag_i32(v); let zzv = zig_zag_i32(v);
self.try_push_varint_u32(zzv) self.try_push_varint_u32(zzv).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_i64(self, v: i64) -> Result<()> { fn serialize_i64(self, v: i64) -> Result<()> {
let zzv = zig_zag_i64(v); let zzv = zig_zag_i64(v);
self.try_push_varint_u64(zzv) self.try_push_varint_u64(zzv).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_i128(self, v: i128) -> Result<()> { fn serialize_i128(self, v: i128) -> Result<()> {
let zzv = zig_zag_i128(v); let zzv = zig_zag_i128(v);
self.try_push_varint_u128(zzv) self.try_push_varint_u128(zzv).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_u8(self, v: u8) -> Result<()> { fn serialize_u8(self, v: u8) -> Result<()> {
self.output self.output.try_push(v).map_err(|_| Error::SerializeBufferFull)
.try_push(v)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_u16(self, v: u16) -> Result<()> { fn serialize_u16(self, v: u16) -> Result<()> {
self.try_push_varint_u16(v) self.try_push_varint_u16(v).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_u32(self, v: u32) -> Result<()> { fn serialize_u32(self, v: u32) -> Result<()> {
self.try_push_varint_u32(v) self.try_push_varint_u32(v).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_u64(self, v: u64) -> Result<()> { fn serialize_u64(self, v: u64) -> Result<()> {
self.try_push_varint_u64(v) self.try_push_varint_u64(v).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_u128(self, v: u128) -> Result<()> { fn serialize_u128(self, v: u128) -> Result<()> {
self.try_push_varint_u128(v) self.try_push_varint_u128(v).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_f32(self, v: f32) -> Result<()> { fn serialize_f32(self, v: f32) -> Result<()> {
let buf = v.to_bits().to_le_bytes(); let buf = v.to_bits().to_le_bytes();
self.output self.output.try_extend(&buf).map_err(|_| Error::SerializeBufferFull)
.try_extend(&buf)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
fn serialize_f64(self, v: f64) -> Result<()> { fn serialize_f64(self, v: f64) -> Result<()> {
let buf = v.to_bits().to_le_bytes(); let buf = v.to_bits().to_le_bytes();
self.output self.output.try_extend(&buf).map_err(|_| Error::SerializeBufferFull)
.try_extend(&buf)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
@ -182,21 +168,15 @@ fn serialize_char(self, v: char) -> Result<()> {
#[inline] #[inline]
fn serialize_str(self, v: &str) -> Result<()> { fn serialize_str(self, v: &str) -> Result<()> {
self.try_push_varint_usize(v.len()) self.try_push_varint_usize(v.len()).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?; self.output.try_extend(v.as_bytes()).map_err(|_| Error::SerializeBufferFull)?;
self.output
.try_extend(v.as_bytes())
.map_err(|_| Error::SerializeBufferFull)?;
Ok(()) Ok(())
} }
#[inline] #[inline]
fn serialize_bytes(self, v: &[u8]) -> Result<()> { fn serialize_bytes(self, v: &[u8]) -> Result<()> {
self.try_push_varint_usize(v.len()) self.try_push_varint_usize(v.len()).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?; self.output.try_extend(v).map_err(|_| Error::SerializeBufferFull)
self.output
.try_extend(v)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
@ -230,8 +210,7 @@ fn serialize_unit_variant(
variant_index: u32, variant_index: u32,
_variant: &'static str, _variant: &'static str,
) -> Result<()> { ) -> Result<()> {
self.try_push_varint_u32(variant_index) self.try_push_varint_u32(variant_index).map_err(|_| Error::SerializeBufferFull)
.map_err(|_| Error::SerializeBufferFull)
} }
#[inline] #[inline]
@ -253,8 +232,7 @@ fn serialize_newtype_variant<T>(
where where
T: ?Sized + Serialize, T: ?Sized + Serialize,
{ {
self.try_push_varint_u32(variant_index) self.try_push_varint_u32(variant_index).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?;
value.serialize(self) value.serialize(self)
} }
@ -287,8 +265,7 @@ fn serialize_tuple_variant(
_variant: &'static str, _variant: &'static str,
_len: usize, _len: usize,
) -> Result<Self::SerializeTupleVariant> { ) -> Result<Self::SerializeTupleVariant> {
self.try_push_varint_u32(variant_index) self.try_push_varint_u32(variant_index).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?;
Ok(self) Ok(self)
} }
@ -312,8 +289,7 @@ fn serialize_struct_variant(
_variant: &'static str, _variant: &'static str,
_len: usize, _len: usize,
) -> Result<Self::SerializeStructVariant> { ) -> Result<Self::SerializeStructVariant> {
self.try_push_varint_u32(variant_index) self.try_push_varint_u32(variant_index).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?;
Ok(self) Ok(self)
} }
@ -358,8 +334,7 @@ fn write_str(&mut self, s: &str) -> core::result::Result<(), core::fmt::Error> {
// data that we are given // data that we are given
write!(&mut ctr, "{}", value).map_err(|_| Error::CollectStrError)?; write!(&mut ctr, "{}", value).map_err(|_| Error::CollectStrError)?;
let len = ctr.ct; let len = ctr.ct;
self.try_push_varint_usize(len) self.try_push_varint_usize(len).map_err(|_| Error::SerializeBufferFull)?;
.map_err(|_| Error::SerializeBufferFull)?;
struct FmtWriter<'a, IF> struct FmtWriter<'a, IF>
where where
@ -372,16 +347,12 @@ impl<'a, IF> Write for FmtWriter<'a, IF>
IF: Flavor, IF: Flavor,
{ {
fn write_str(&mut self, s: &str) -> core::result::Result<(), core::fmt::Error> { fn write_str(&mut self, s: &str) -> core::result::Result<(), core::fmt::Error> {
self.output self.output.try_extend(s.as_bytes()).map_err(|_| core::fmt::Error::default())
.try_extend(s.as_bytes())
.map_err(|_| core::fmt::Error::default())
} }
} }
// This second pass actually inserts the data. // This second pass actually inserts the data.
let mut fw = FmtWriter { let mut fw = FmtWriter { output: &mut self.output };
output: &mut self.output,
};
write!(&mut fw, "{}", value).map_err(|_| Error::CollectStrError)?; write!(&mut fw, "{}", value).map_err(|_| Error::CollectStrError)?;
Ok(()) Ok(())