Format code

This commit is contained in:
pjht 2024-06-12 10:22:46 -05:00
parent dd38365992
commit 0f099f3223
Signed by: pjht
GPG Key ID: 7B5F6AFBEC7EE78E
100 changed files with 1642 additions and 4158 deletions

View File

@ -1,5 +1,5 @@
use crate::spec::{LinkerFlavor, TargetOptions, RelocModel, Cc, Lld, StackProbeType};
use crate::spec::crt_objects;
use crate::spec::{Cc, LinkerFlavor, Lld, RelocModel, StackProbeType, TargetOptions};
pub fn opts() -> TargetOptions {
TargetOptions {

View File

@ -126,7 +126,6 @@ pub(super) fn pre_mikros() -> CrtObjects {
all("/home/pterpstra/projects/os-rust/crt0.o")
}
pub(super) fn post_mikros() -> CrtObjects {
new(&[])
}

View File

@ -1,4 +1,4 @@
use crate::spec::{Target, PanicStrategy, TargetMetadata, base};
use crate::spec::{base, PanicStrategy, Target, TargetMetadata};
pub fn target() -> Target {
let mut base = base::mikros::opts();
@ -10,7 +10,8 @@ pub fn target() -> Target {
Target {
llvm_target: "x86_64-unknown-none".into(),
pointer_width: 64,
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128".into(),
data_layout:
"e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128".into(),
arch: "x86_64".into(),
options: base,
metadata: TargetMetadata {
@ -18,6 +19,6 @@ pub fn target() -> Target {
tier: Some(3),
host_tools: None,
std: Some(true),
}
},
}
}

View File

@ -1,17 +1,16 @@
#![stable(feature = "mikros", since = "1.80.0")]
#![allow(exported_private_dependencies)]
use crate::sys::{
buffers::KernelBufferAllocator,
syscalls::{copy_to, drop_space, map_assert_unused, map_only_unused, map_free, new_space, adsp_zero},
};
use alloc::vec::Vec;
use crate::mem;
use crate::sync::Mutex;
use x86_64::{
structures::paging::PageTableFlags,
VirtAddr,
use crate::sys::{
buffers::KernelBufferAllocator,
syscalls::{
adsp_zero, copy_to, drop_space, map_assert_unused, map_free, map_only_unused, new_space,
},
};
use alloc::vec::Vec;
use x86_64::{structures::paging::PageTableFlags, VirtAddr};
#[stable(feature = "mikros", since = "1.80.0")]
pub use x86_64::structures::paging::Page;

View File

@ -5,7 +5,8 @@
use crate::sync::{LazyLock, RwLock};
use crate::sys::{
buffers::KernelBufferAllocator, syscalls::{get_pid, ipc_recv, ipc_send, wait_for_ipc_message}
buffers::KernelBufferAllocator,
syscalls::{get_pid, ipc_recv, ipc_send, wait_for_ipc_message},
};
use crate::collections::HashMap;

View File

@ -2,7 +2,7 @@
use core::sync::atomic::{AtomicU64, Ordering};
use crate::sync::{Mutex, RwLock, LazyLock};
use crate::sync::{LazyLock, Mutex, RwLock};
use crate::collections::HashMap;
@ -11,7 +11,8 @@
static CALLBACKS: LazyLock<RwLock<HashMap<u16, MessageCallback>>> =
LazyLock::new(|| RwLock::new(HashMap::new()));
static RETURNS: LazyLock<Mutex<HashMap<CallId, Vec<u8>>>> = LazyLock::new(|| Mutex::new(HashMap::new()));
static RETURNS: LazyLock<Mutex<HashMap<CallId, Vec<u8>>>> =
LazyLock::new(|| Mutex::new(HashMap::new()));
#[stable(feature = "mikros", since = "1.80.0")]
#[derive(Copy, Clone, Eq, Hash, PartialEq)]
@ -31,13 +32,8 @@ pub struct IncomingCall {
impl IncomingCall {
#[stable(feature = "mikros", since = "1.80.0")]
pub fn send_return(self, data: &[u8]) {
let msg = Message {
typ: MessageType::Return,
call_id: self.call_id,
proto: 0,
func: 0,
data,
};
let msg =
Message { typ: MessageType::Return, call_id: self.call_id, proto: 0, func: 0, data };
let mut buf = Vec::new();
msg.serialize(&mut buf);
super::send_msg(self.from, 0, &buf);
@ -75,7 +71,7 @@ fn try_from(val: u8) -> Result<Self, Self::Error> {
match val {
0 => Ok(MessageType::Call),
1 => Ok(MessageType::Return),
_ => Err(val)
_ => Err(val),
}
}
}
@ -121,13 +117,7 @@ pub fn unregister_callback(proto: u16) {
#[stable(feature = "mikros", since = "1.80.0")]
pub fn send_call(dst: u64, proto: u16, func: u16, data: &[u8]) -> CallId {
let call_id = CallId(NEXT_ID.fetch_add(1, Ordering::Relaxed));
let msg = Message {
typ: MessageType::Call,
call_id,
proto,
func,
data,
};
let msg = Message { typ: MessageType::Call, call_id, proto, func, data };
let mut buf = Vec::new();
msg.serialize(&mut buf);
super::send_msg(dst, 0, &buf);

View File

@ -1,6 +1,13 @@
#![stable(feature = "mikros", since = "1.80.0")]
use elf::{abi::{PT_DYNAMIC, PT_GNU_EH_FRAME, PT_GNU_RELRO, PT_GNU_STACK, PT_LOAD, PT_NULL, PT_PHDR, R_X86_64_RELATIVE, SHT_REL, SHT_RELA}, endian::AnyEndian, ElfBytes};
use elf::{
abi::{
PT_DYNAMIC, PT_GNU_EH_FRAME, PT_GNU_RELRO, PT_GNU_STACK, PT_LOAD, PT_NULL, PT_PHDR,
R_X86_64_RELATIVE, SHT_REL, SHT_RELA,
},
endian::AnyEndian,
ElfBytes,
};
use x86_64::{structures::paging::Page, VirtAddr};
use super::address_space::AddressSpace;
@ -18,14 +25,14 @@ pub fn load(binary_raw: &[u8]) -> (AddressSpace, *const ()) {
for mut pheader in binary.segments().unwrap().iter() {
match pheader.p_type {
PT_NULL => (),
PT_LOAD => {
PT_LOAD => {
if pheader.p_vaddr < 0x1000 {
if pheader.p_memsz < 0x1000 {
continue;
}
pheader.p_offset += 0x1000-pheader.p_vaddr;
pheader.p_memsz -= 0x1000-pheader.p_vaddr;
pheader.p_filesz -= 0x1000-pheader.p_vaddr;
pheader.p_offset += 0x1000 - pheader.p_vaddr;
pheader.p_memsz -= 0x1000 - pheader.p_vaddr;
pheader.p_filesz -= 0x1000 - pheader.p_vaddr;
pheader.p_vaddr = 0x1000;
}
let start_page = Page::containing_address(VirtAddr::new(pheader.p_vaddr));
@ -40,15 +47,30 @@ pub fn load(binary_raw: &[u8]) -> (AddressSpace, *const ()) {
addr_space
.map_only_unused(start_page, num_pages)
.expect("Unable to map region");
addr_space.copy_to(ptr::with_exposed_provenance_mut(pheader.p_vaddr as usize), &binary_raw[(pheader.p_offset as usize)..((pheader.p_offset+pheader.p_filesz) as usize)]).unwrap();
addr_space.zero(ptr::with_exposed_provenance_mut((pheader.p_vaddr + pheader.p_filesz) as usize), (pheader.p_memsz - pheader.p_filesz) as usize).unwrap();
addr_space
.copy_to(
ptr::with_exposed_provenance_mut(pheader.p_vaddr as usize),
&binary_raw[(pheader.p_offset as usize)
..((pheader.p_offset + pheader.p_filesz) as usize)],
)
.unwrap();
addr_space
.zero(
ptr::with_exposed_provenance_mut(
(pheader.p_vaddr + pheader.p_filesz) as usize,
),
(pheader.p_memsz - pheader.p_filesz) as usize,
)
.unwrap();
}
PT_GNU_RELRO => (),
PT_GNU_EH_FRAME => (),
PT_GNU_STACK => (),
PT_DYNAMIC => (),
PT_PHDR => (),
_ => println!("Warning: Unimplemented ELF program header type {:#x}", pheader.p_type),
_ => {
println!("Warning: Unimplemented ELF program header type {:#x}", pheader.p_type)
}
}
}
for section in binary.section_headers().unwrap().iter() {
@ -63,7 +85,12 @@ pub fn load(binary_raw: &[u8]) -> (AddressSpace, *const ()) {
for rela in binary.section_data_as_relas(&section).unwrap() {
match rela.r_type {
R_X86_64_RELATIVE => {
addr_space.copy_to(ptr::with_exposed_provenance_mut(rela.r_offset as usize), (rela.r_addend as u64).to_ne_bytes().as_ref()).unwrap();
addr_space
.copy_to(
ptr::with_exposed_provenance_mut(rela.r_offset as usize),
(rela.r_addend as u64).to_ne_bytes().as_ref(),
)
.unwrap();
}
_ => unimplemented!("ELF relocation type {}", rela.r_type),
}

View File

@ -2,11 +2,11 @@
#![stable(feature = "mikros", since = "1.80.0")]
pub mod ipc;
pub mod address_space;
pub mod ffi;
pub mod ipc;
pub mod loader;
pub mod syscalls;
pub mod ffi;
/// A prelude for conveniently writing platform-specific code.
///

View File

@ -1,8 +1,8 @@
#![stable(feature = "mikros", since = "1.80.0")]
#![allow(clippy::result_unit_err)]
use crate::sys::syscalls;
use crate::os::mikros::address_space::AddressSpace;
use crate::sys::syscalls;
#[must_use]
#[stable(feature = "mikros", since = "1.80.0")]
@ -12,7 +12,7 @@ pub fn get_initrd() -> &'static [u8] {
#[stable(feature = "mikros", since = "1.80.0")]
pub fn new_process(entry_point: u64, space: AddressSpace) -> Result<u64, ()> {
syscalls::new_process(entry_point, space)
syscalls::new_process(entry_point, space)
}
#[stable(feature = "mikros", since = "1.80.0")]
@ -23,7 +23,7 @@ pub fn register(typ: u64) {
#[stable(feature = "mikros", since = "1.80.0")]
#[must_use]
pub fn try_get_registered(typ: u64) -> Option<u64> {
syscalls::try_get_registered(typ)
syscalls::try_get_registered(typ)
}
#[stable(feature = "mikros", since = "1.80.0")]

View File

@ -2212,7 +2212,9 @@ pub fn is_absolute(&self) -> bool {
// FIXME: Allow Redox prefixes
self.has_root() || has_redox_scheme(self.as_u8_slice())
} else {
self.has_root() && (cfg!(any(unix, target_os = "wasi", target_os = "mikros")) || self.prefix().is_some())
self.has_root()
&& (cfg!(any(unix, target_os = "wasi", target_os = "mikros"))
|| self.prefix().is_some())
}
}

View File

@ -1,12 +1,12 @@
mod linked_list_allocator;
use crate::os::mikros::address_space;
use crate::{
alloc::{GlobalAlloc, Layout, System},
ptr::NonNull,
sync::Mutex,
ptr::NonNull
};
use linked_list_allocator::hole::HoleList;
use crate::os::mikros::address_space;
struct Wrap(Mutex<HoleList>);
@ -50,10 +50,7 @@ unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
#[inline]
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
unsafe {
let _ = HEAP.0
.lock()
.unwrap()
.deallocate(NonNull::new_unchecked(ptr), layout);
let _ = HEAP.0.lock().unwrap().deallocate(NonNull::new_unchecked(ptr), layout);
}
}
}

View File

@ -13,16 +13,12 @@ pub struct Heap {
holes: HoleList,
}
unsafe impl Send for Heap {}
impl Heap {
/// Creates an empty heap. All allocate calls will return `None`.
pub const fn empty() -> Heap {
Heap {
used: 0,
holes: HoleList::empty(),
}
Heap { used: 0, holes: HoleList::empty() }
}
/// Initializes an empty heap
@ -82,10 +78,7 @@ pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) {
/// store the required metadata. Depending on the alignment of the slice, the minimum
/// size is between `2 * size_of::<usize>` and `3 * size_of::<usize>`.
pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) {
assert!(
self.bottom().is_null(),
"The heap has already been initialized."
);
assert!(self.bottom().is_null(), "The heap has already been initialized.");
let size = mem.len();
let address = mem.as_mut_ptr().cast();
// SAFETY: All initialization requires the bottom address to be valid, which implies it
@ -120,12 +113,7 @@ pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) {
///
/// The provided memory range must be valid for the `'static` lifetime.
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap {
unsafe {
Heap {
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
}
}
unsafe { Heap { used: 0, holes: HoleList::new(heap_bottom, heap_size) } }
}
/// Creates a new heap from a slice of raw memory.
@ -238,7 +226,6 @@ pub unsafe fn extend(&mut self, by: usize) {
}
}
/// Align downwards. Returns the greatest x with alignment `align`
/// so that x <= addr. The alignment must be a power of 2.
pub fn align_down_size(size: usize, align: usize) -> usize {

View File

@ -4,7 +4,7 @@
use core::ptr::null_mut;
use core::ptr::NonNull;
use super::{align_up, align_down_size, align_up_size};
use super::{align_down_size, align_up, align_up_size};
/// A sorted list of holes. It uses the the holes itself to store its nodes.
pub struct HoleList {
@ -134,10 +134,7 @@ fn split_current(self, required_layout: Layout) -> Result<(*mut u8, usize), Self
// Will the proposed new back padding actually fit in the old hole slot?
if back_padding_end <= hole_end {
// Yes, it does! Place a back padding node
Some(HoleInfo {
addr: back_padding_start,
size: back_padding_size,
})
Some(HoleInfo { addr: back_padding_start, size: back_padding_size })
} else {
// No, it does not. We don't want to leak any heap bytes, so we
// consider this hole unsuitable for the requested allocation.
@ -149,9 +146,7 @@ fn split_current(self, required_layout: Layout) -> Result<(*mut u8, usize), Self
////////////////////////////////////////////////////////////////////////////
// This is where we actually perform surgery on the linked list.
////////////////////////////////////////////////////////////////////////////
let Cursor {
mut prev, mut hole, ..
} = self;
let Cursor { mut prev, mut hole, .. } = self;
// Remove the current location from the previous node
unsafe {
prev.as_mut().next = None;
@ -257,10 +252,7 @@ impl HoleList {
/// Creates an empty `HoleList`.
pub const fn empty() -> HoleList {
HoleList {
first: Hole {
size: 0,
next: None,
},
first: Hole { size: 0, next: None },
bottom: null_mut(),
top: null_mut(),
pending_extend: 0,
@ -269,11 +261,7 @@ pub const fn empty() -> HoleList {
pub(crate) fn cursor(&mut self) -> Option<Cursor> {
if let Some(hole) = self.first.next {
Some(Cursor {
hole,
prev: NonNull::new(&mut self.first)?,
top: self.top,
})
Some(Cursor { hole, prev: NonNull::new(&mut self.first)?, top: self.top })
} else {
None
}
@ -305,15 +293,13 @@ pub unsafe fn new(hole_addr: *mut u8, hole_size: usize) -> HoleList {
assert!(hole_size >= size_of::<Hole>());
let aligned_hole_addr = align_up(hole_addr, align_of::<Hole>());
let requested_hole_size = hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize));
let requested_hole_size =
hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize));
let aligned_hole_size = align_down_size(requested_hole_size, align_of::<Hole>());
assert!(aligned_hole_size >= size_of::<Hole>());
let ptr = aligned_hole_addr as *mut Hole;
ptr.write(Hole {
size: aligned_hole_size,
next: None,
});
ptr.write(Hole { size: aligned_hole_size, next: None });
assert_eq!(
hole_addr.wrapping_add(hole_size),
@ -321,10 +307,7 @@ pub unsafe fn new(hole_addr: *mut u8, hole_size: usize) -> HoleList {
);
HoleList {
first: Hole {
size: 0,
next: Some(NonNull::new_unchecked(ptr)),
},
first: Hole { size: 0, next: Some(NonNull::new_unchecked(ptr)) },
bottom: aligned_hole_addr,
top: aligned_hole_addr.wrapping_add(aligned_hole_size),
pending_extend: (requested_hole_size - aligned_hole_size) as u8,
@ -448,11 +431,7 @@ pub(crate) unsafe fn extend(&mut self, by: usize) {
unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull<Hole> {
unsafe {
let hole_addr = addr.cast::<Hole>();
debug_assert_eq!(
addr as usize % align_of::<Hole>(),
0,
"Hole address not aligned!",
);
debug_assert_eq!(addr as usize % align_of::<Hole>(), 0, "Hole address not aligned!",);
hole_addr.write(Hole { size, next: None });
NonNull::new_unchecked(hole_addr)
}
@ -473,21 +452,13 @@ fn try_insert_back(self, node: NonNull<Hole>, bottom: *mut u8) -> Result<Self, S
);
debug_assert_eq!(self.previous().size, 0);
let Cursor {
mut prev,
hole,
top,
} = self;
let Cursor { mut prev, hole, top } = self;
unsafe {
let mut node = check_merge_bottom(node, bottom);
prev.as_mut().next = Some(node);
node.as_mut().next = Some(hole);
}
Ok(Cursor {
prev,
hole: node,
top,
})
Ok(Cursor { prev, hole: node, top })
} else {
Err(self)
}
@ -539,12 +510,7 @@ fn try_insert_after(&mut self, mut node: NonNull<Hole>) -> Result<(), ()> {
// Merge the current node with up to n following nodes
fn try_merge_next_n(self, max: usize) {
let Cursor {
prev: _,
mut hole,
top,
..
} = self;
let Cursor { prev: _, mut hole, top, .. } = self;
for _ in 0..max {
// Is there a next node?
@ -631,9 +597,8 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
Err(mut cursor) => {
// Nope. It lives somewhere else. Advance the list until we find its home
while let Err(()) = cursor.try_insert_after(hole) {
cursor = cursor
.next()
.expect("Reached end of holes without finding deallocation hole!");
cursor =
cursor.next().expect("Reached end of holes without finding deallocation hole!");
}
// Great! We found a home for it, our cursor is now JUST BEFORE the new
// node we inserted, so we need to try to merge up to twice: One to combine

View File

@ -75,11 +75,8 @@ unsafe fn grow(
assert!(new_layout.align() <= 4096);
if new_layout.size() <= self.0.lock().unwrap().unwrap().1 {
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(
ptr.as_ptr(),
new_layout.size(),
))
.unwrap())
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(ptr.as_ptr(), new_layout.size()))
.unwrap())
} else {
let (new_ptr, new_id, new_true_size) = allocate_buf_from_layout(new_layout)?;
@ -112,19 +109,13 @@ unsafe fn grow_zeroed(
assert!(new_layout.align() <= 4096);
if new_layout.size() <= self.0.lock().unwrap().unwrap().1 {
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(
ptr.as_ptr(),
new_layout.size(),
))
.unwrap())
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(ptr.as_ptr(), new_layout.size()))
.unwrap())
} else {
let (new_ptr, new_id, new_true_size) = allocate_buf_from_layout(new_layout)?;
// SAFETY: `allocate_buf_from_layout` returns a valid memory block
unsafe {
new_ptr
.as_non_null_ptr()
.as_ptr()
.write_bytes(0, new_ptr.len());
new_ptr.as_non_null_ptr().as_ptr().write_bytes(0, new_ptr.len());
}
// SAFETY: because `new_layout.size()` must be greater than or equal to
@ -155,11 +146,7 @@ unsafe fn shrink(
);
assert!(new_layout.align() <= 4096);
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(
ptr.as_ptr(),
new_layout.size(),
))
.unwrap())
Ok(NonNull::new(ptr::slice_from_raw_parts_mut(ptr.as_ptr(), new_layout.size())).unwrap())
}
}

View File

@ -1,12 +1,12 @@
use super::rmp_serde;
use crate::ffi::OsString;
use crate::fmt;
use crate::hash::{Hash, Hasher};
use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom};
use crate::os::mikros::{ipc::rpc, syscalls};
use crate::path::{Path, PathBuf};
use crate::sys::time::SystemTime;
use crate::sys::unsupported;
use crate::os::mikros::{syscalls, ipc::rpc};
use super::rmp_serde;
use serde::{Serialize, Serializer};
@ -90,9 +90,7 @@ pub fn created(&self) -> io::Result<SystemTime> {
impl Clone for FileAttr {
fn clone(&self) -> FileAttr {
Self {
size: self.size
}
Self { size: self.size }
}
}
@ -229,23 +227,18 @@ pub fn open(path: &Path, _opts: &OpenOptions) -> io::Result<File> {
&rpc::send_call(vfs_pid, 2, 2, &rmp_serde::to_vec(path).unwrap()).get_return(),
)
.unwrap();
let (fs_pid, fd) = open_res.map_err(|_| io::Error::new(io::ErrorKind::NotFound, "No such file"))?;
Ok(Self {
fs_pid,
fd,
pos: AtomicUsize::new(0),
})
let (fs_pid, fd) =
open_res.map_err(|_| io::Error::new(io::ErrorKind::NotFound, "No such file"))?;
Ok(Self { fs_pid, fd, pos: AtomicUsize::new(0) })
}
pub fn file_attr(&self) -> io::Result<FileAttr> {
let size_res: Option<u64> = rmp_serde::from_slice(
&rpc::send_call(self.fs_pid, 1, 3, &rmp_serde::to_vec(&self.fd).unwrap()).get_return()
&rpc::send_call(self.fs_pid, 1, 3, &rmp_serde::to_vec(&self.fd).unwrap()).get_return(),
)
.unwrap();
let size = size_res.unwrap_or(0);
Ok(FileAttr {
size
})
Ok(FileAttr { size })
}
pub fn fsync(&self) -> io::Result<()> {
@ -262,7 +255,18 @@ pub fn truncate(&self, _size: u64) -> io::Result<()> {
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
let read_res: Result<Vec<u8>, ()> = rmp_serde::from_slice(
&rpc::send_call(self.fs_pid, 1, 0, &rmp_serde::to_vec(&(self.fd, self.pos.load(atomic::Ordering::Relaxed) as u64, buf.len())).unwrap()).get_return()
&rpc::send_call(
self.fs_pid,
1,
0,
&rmp_serde::to_vec(&(
self.fd,
self.pos.load(atomic::Ordering::Relaxed) as u64,
buf.len(),
))
.unwrap(),
)
.get_return(),
)
.unwrap();
let read_data = read_res.unwrap();
@ -282,7 +286,18 @@ pub fn is_read_vectored(&self) -> bool {
pub fn read_buf(&self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> {
let read_res: Result<Vec<u8>, ()> = rmp_serde::from_slice(
&rpc::send_call(self.fs_pid, 1, 0, &rmp_serde::to_vec(&(self.fd, self.pos.load(atomic::Ordering::Relaxed) as u64, cursor.capacity())).unwrap()).get_return()
&rpc::send_call(
self.fs_pid,
1,
0,
&rmp_serde::to_vec(&(
self.fd,
self.pos.load(atomic::Ordering::Relaxed) as u64,
cursor.capacity(),
))
.unwrap(),
)
.get_return(),
)
.unwrap();
let read_data = read_res.unwrap();
@ -294,7 +309,18 @@ pub fn read_buf(&self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> {
pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
let write_res: Result<(), ()> = rmp_serde::from_slice(
&rpc::send_call(self.fs_pid, 1, 1, &rmp_serde::to_vec(&(self.fd, self.pos.load(atomic::Ordering::Relaxed) as u64, buf)).unwrap()).get_return(),
&rpc::send_call(
self.fs_pid,
1,
1,
&rmp_serde::to_vec(&(
self.fd,
self.pos.load(atomic::Ordering::Relaxed) as u64,
buf,
))
.unwrap(),
)
.get_return(),
)
.unwrap();
write_res.unwrap();
@ -323,7 +349,10 @@ pub fn seek(&self, pos: SeekFrom) -> io::Result<u64> {
} else {
let offset = (-offset) as u64;
if offset > (self.pos.load(atomic::Ordering::Relaxed) as u64) {
return Err(io::Error::new(io::ErrorKind::InvalidInput, "tried to seek before byte 0"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"tried to seek before byte 0",
));
}
self.pos.fetch_sub(offset as usize, atomic::Ordering::Relaxed);
}

View File

@ -1,4 +1,3 @@
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(dead_code)]
#![allow(exported_private_dependencies)]
@ -9,8 +8,8 @@
#[unstable(feature = "rmp", issue = "none")]
mod rmp_serde;
pub mod syscalls;
pub mod buffers;
pub mod syscalls;
pub mod alloc;
pub mod args;

View File

@ -1,11 +1,11 @@
use super::unsupported;
use crate::arch::asm;
use crate::error::Error as StdError;
use crate::ffi::{OsStr, OsString};
use crate::fmt;
use crate::io;
use crate::marker::PhantomData;
use crate::path::{self, PathBuf};
use crate::arch::asm;
pub fn errno() -> i32 {
0

View File

@ -108,16 +108,7 @@ pub fn spawn(
let binary = crate::fs::read(path).unwrap();
let (space, entry) = crate::os::mikros::loader::Loader::load(&binary);
super::syscalls::new_process(entry as _, space).unwrap();
Ok((
Process {
dummy: ()
},
StdioPipes {
stdin: None,
stdout: None,
stderr: None,
}
))
Ok((Process { dummy: () }, StdioPipes { stdin: None, stdout: None, stderr: None }))
}
pub fn output(&mut self) -> io::Result<(ExitStatus, Vec<u8>, Vec<u8>)> {
@ -291,7 +282,7 @@ fn from(code: u8) -> Self {
}
pub struct Process {
dummy: ()
dummy: (),
}
impl Process {

View File

@ -1,7 +1,7 @@
//! Implementation of the [Bytes] type
use super::RmpRead;
use super::super::decode::RmpReadErr;
use super::RmpRead;
use core::fmt::{Display, Formatter};
/// Indicates that an error occurred reading from [Bytes]
@ -10,18 +10,17 @@
// NOTE: We can't use thiserror because of no_std :(
pub enum BytesReadError {
/// Indicates that there were not enough bytes.
InsufficientBytes {
expected: usize,
actual: usize,
position: u64,
},
InsufficientBytes { expected: usize, actual: usize, position: u64 },
}
impl Display for BytesReadError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match *self {
BytesReadError::InsufficientBytes { expected, actual, position } => {
write!(f, "Expected at least bytes {expected}, but only got {actual} (pos {position})")
write!(
f,
"Expected at least bytes {expected}, but only got {actual} (pos {position})"
)
}
}
}

View File

@ -1,5 +1,5 @@
use super::{read_marker, RmpRead, ValueReadError};
use super::super::Marker;
use super::{read_marker, RmpRead, ValueReadError};
/// Attempts to read exactly 5 bytes from the given reader and to decode them as `f32` value.
///

View File

@ -1,5 +1,5 @@
use super::{read_marker, RmpRead, ValueReadError};
use super::super::Marker;
use super::{read_marker, RmpRead, ValueReadError};
/// Attempts to read exactly 3 bytes from the given reader and interpret them as a fixext1 type
/// with data attached.
@ -121,7 +121,10 @@ pub fn read_fixext1<R: RmpRead>(rd: &mut R) -> Result<(i8, u8), ValueReadError<R
}
}
fn read_fixext_data<R: RmpRead>(rd: &mut R, buf: &mut [u8]) -> Result<i8, ValueReadError<R::Error>> {
fn read_fixext_data<R: RmpRead>(
rd: &mut R,
buf: &mut [u8],
) -> Result<i8, ValueReadError<R::Error>> {
let id = rd.read_data_i8()?;
match rd.read_exact_buf(buf) {
Ok(()) => Ok(id),

View File

@ -25,8 +25,8 @@
pub use self::str::{read_str, read_str_from_slice, read_str_len, read_str_ref, DecodeStringError};
pub use self::uint::{read_pfix, read_u16, read_u32, read_u64, read_u8};
use core::fmt::{self, Debug, Display, Formatter};
use crate::error;
use core::fmt::{self, Debug, Display, Formatter};
use num_traits::cast::FromPrimitive;
@ -190,8 +190,8 @@ impl error::Error for ValueReadError {
#[cold]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
ValueReadError::InvalidMarkerRead(ref err) |
ValueReadError::InvalidDataRead(ref err) => Some(err),
ValueReadError::InvalidMarkerRead(ref err)
| ValueReadError::InvalidDataRead(ref err) => Some(err),
ValueReadError::TypeMismatch(..) => None,
}
}
@ -298,10 +298,9 @@ pub enum NumValueReadError<E: RmpReadErr = Error> {
impl error::Error for NumValueReadError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
NumValueReadError::InvalidMarkerRead(ref err) |
NumValueReadError::InvalidDataRead(ref err) => Some(err),
NumValueReadError::TypeMismatch(..) |
NumValueReadError::OutOfRange => None,
NumValueReadError::InvalidMarkerRead(ref err)
| NumValueReadError::InvalidDataRead(ref err) => Some(err),
NumValueReadError::TypeMismatch(..) | NumValueReadError::OutOfRange => None,
}
}
}
@ -369,7 +368,9 @@ fn from(err: ValueReadError<E>) -> NumValueReadError<E> {
/// assert_eq!(300usize, rmp::decode::read_int(&mut &buf[..]).unwrap());
/// assert_eq!(300isize, rmp::decode::read_int(&mut &buf[..]).unwrap());
/// ```
pub fn read_int<T: FromPrimitive, R: RmpRead>(rd: &mut R) -> Result<T, NumValueReadError<R::Error>> {
pub fn read_int<T: FromPrimitive, R: RmpRead>(
rd: &mut R,
) -> Result<T, NumValueReadError<R::Error>> {
let val = match read_marker(rd)? {
Marker::FixPos(val) => T::from_u8(val),
Marker::FixNeg(val) => T::from_i8(val),
@ -427,7 +428,10 @@ pub fn read_map_len<R: RmpRead>(rd: &mut R) -> Result<u32, ValueReadError<R::Err
marker_to_len(rd, marker)
}
pub fn marker_to_len<R: RmpRead>(rd: &mut R, marker: Marker) -> Result<u32, ValueReadError<R::Error>> {
pub fn marker_to_len<R: RmpRead>(
rd: &mut R,
marker: Marker,
) -> Result<u32, ValueReadError<R::Error>> {
match marker {
Marker::FixMap(size) => Ok(u32::from(size)),
Marker::Map16 => Ok(u32::from(rd.read_data_u16()?)),

View File

@ -1,5 +1,5 @@
use super::{read_marker, RmpRead, ValueReadError};
use super::super::Marker;
use super::{read_marker, RmpRead, ValueReadError};
/// Attempts to read a single byte from the given reader and to decode it as a negative fixnum
/// value.

View File

@ -2,8 +2,8 @@
use core::fmt::{self, Display, Formatter};
use core::str::{from_utf8, Utf8Error};
use super::{read_marker, RmpRead, RmpReadErr, ValueReadError};
use super::super::Marker;
use super::{read_marker, RmpRead, RmpReadErr, ValueReadError};
#[derive(Debug)]
#[allow(deprecated)] // Only for compatibility
@ -20,10 +20,9 @@ impl<'a, E: RmpReadErr> error::Error for DecodeStringError<'a, E> {
#[cold]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
DecodeStringError::InvalidMarkerRead(ref err) |
DecodeStringError::InvalidDataRead(ref err) => Some(err),
DecodeStringError::TypeMismatch(..) |
DecodeStringError::BufferSizeTooSmall(..) => None,
DecodeStringError::InvalidMarkerRead(ref err)
| DecodeStringError::InvalidDataRead(ref err) => Some(err),
DecodeStringError::TypeMismatch(..) | DecodeStringError::BufferSizeTooSmall(..) => None,
DecodeStringError::InvalidUtf8(_, ref err) => Some(err),
}
}
@ -66,7 +65,8 @@ pub fn read_str_len<R: RmpRead>(rd: &mut R) -> Result<u32, ValueReadError<R::Err
}
fn read_str_len_with_nread<R>(rd: &mut R) -> Result<(u32, usize), ValueReadError<R::Error>>
where R: RmpRead
where
R: RmpRead,
{
match read_marker(rd)? {
Marker::FixStr(size) => Ok((u32::from(size), 1)),
@ -107,7 +107,10 @@ fn read_str_len_with_nread<R>(rd: &mut R) -> Result<(u32, usize), ValueReadError
///
/// This function is **unstable**, because it needs review.
// TODO: Stabilize. Mark error values for each error case (in docs).
pub fn read_str<'r, R>(rd: &mut R, buf: &'r mut [u8]) -> Result<&'r str, DecodeStringError<'r, R::Error>>
pub fn read_str<'r, R>(
rd: &mut R,
buf: &'r mut [u8],
) -> Result<&'r str, DecodeStringError<'r, R::Error>>
where
R: RmpRead,
{
@ -121,11 +124,13 @@ pub fn read_str<'r, R>(rd: &mut R, buf: &'r mut [u8]) -> Result<&'r str, DecodeS
read_str_data(rd, len, &mut buf[0..ulen])
}
pub fn read_str_data<'r, R>(rd: &mut R,
len: u32,
buf: &'r mut [u8])
-> Result<&'r str, DecodeStringError<'r, R::Error>>
where R: RmpRead
pub fn read_str_data<'r, R>(
rd: &mut R,
len: u32,
buf: &'r mut [u8],
) -> Result<&'r str, DecodeStringError<'r, R::Error>>
where
R: RmpRead,
{
debug_assert_eq!(len as usize, buf.len());
@ -142,7 +147,9 @@ pub fn read_str_data<'r, R>(rd: &mut R,
/// Attempts to read and decode a string value from the reader, returning a borrowed slice from it.
///
// TODO: Also it's possible to implement all borrowing functions for all `BufRead` implementors.
pub fn read_str_ref(rd: &[u8]) -> Result<&[u8], DecodeStringError<'_, super::bytes::BytesReadError>> {
pub fn read_str_ref(
rd: &[u8],
) -> Result<&[u8], DecodeStringError<'_, super::bytes::BytesReadError>> {
let mut cur = super::Bytes::new(rd);
let len = read_str_len(&mut cur)?;
Ok(&cur.remaining_slice()[..len as usize])

View File

@ -1,5 +1,5 @@
use super::{read_marker, RmpRead, ValueReadError};
use super::super::Marker;
use super::{read_marker, RmpRead, ValueReadError};
/// Attempts to read a single byte from the given reader and to decode it as a positive fixnum
/// value.

View File

@ -1,6 +1,6 @@
use super::RmpWrite;
use super::super::encode::{write_marker, ValueWriteError};
use super::super::Marker;
use super::RmpWrite;
/// Encodes and attempts to write the most efficient binary array length implementation to the given
/// write, returning the marker used.
@ -12,7 +12,10 @@
///
/// This function will return `ValueWriteError` on any I/O error occurred while writing either the
/// marker or the data.
pub fn write_bin_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError<W::Error>> {
pub fn write_bin_len<W: RmpWrite>(
wr: &mut W,
len: u32,
) -> Result<Marker, ValueWriteError<W::Error>> {
let marker = if len < 256 {
Marker::Bin8
} else if len <= u16::MAX as u32 {
@ -40,6 +43,5 @@ pub fn write_bin_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueW
// TODO: Docs, range check, example, visibility.
pub fn write_bin<W: RmpWrite>(wr: &mut W, data: &[u8]) -> Result<(), ValueWriteError<W::Error>> {
write_bin_len(wr, data.len() as u32)?;
wr.write_bytes(data)
.map_err(ValueWriteError::InvalidDataWrite)
wr.write_bytes(data).map_err(ValueWriteError::InvalidDataWrite)
}

View File

@ -9,7 +9,6 @@
#[doc(hidden)]
pub type FixedBufCapacityOverflow = crate::io::Error;
/// A wrapper around `Vec<u8>` to serialize more efficiently.
///
/// This has a specialized implementation of `RmpWrite`
@ -39,9 +38,7 @@ pub fn new() -> Self {
#[must_use]
#[unstable(feature = "rmp", issue = "none")]
pub fn with_capacity(capacity: usize) -> Self {
ByteBuf {
bytes: Vec::with_capacity(capacity),
}
ByteBuf { bytes: Vec::with_capacity(capacity) }
}
/// Unwrap the underlying buffer of this vector
#[inline]
@ -128,4 +125,3 @@ fn write_bytes(&mut self, buf: &[u8]) -> Result<(), Self::Error> {
Ok(())
}
}

View File

@ -1,6 +1,6 @@
use super::{write_marker, RmpWrite};
use super::super::encode::ValueWriteError;
use super::super::Marker;
use super::{write_marker, RmpWrite};
/// Encodes and attempts to write an `f32` value as a 5-byte sequence into the given write.
///

View File

@ -13,10 +13,12 @@
pub use self::dec::{write_f32, write_f64};
pub use self::sint::{write_i16, write_i32, write_i64, write_i8, write_nfix, write_sint};
pub use self::str::{write_str, write_str_len};
pub use self::uint::{write_pfix, write_u16, write_u32, write_u64, write_u8, write_uint, write_uint8};
pub use self::uint::{
write_pfix, write_u16, write_u32, write_u64, write_u8, write_uint, write_uint8,
};
use core::fmt::{self, Debug, Display, Formatter};
use crate::error;
use core::fmt::{self, Debug, Display, Formatter};
use super::Marker;
@ -213,8 +215,9 @@ impl From<ValueWriteError<crate::io::Error>> for crate::io::Error {
#[cold]
fn from(err: ValueWriteError<crate::io::Error>) -> crate::io::Error {
match err {
ValueWriteError::InvalidMarkerWrite(err) |
ValueWriteError::InvalidDataWrite(err) => err,
ValueWriteError::InvalidMarkerWrite(err) | ValueWriteError::InvalidDataWrite(err) => {
err
}
}
}
}
@ -223,8 +226,8 @@ impl<E: RmpWriteErr> error::Error for ValueWriteError<E> {
#[cold]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
ValueWriteError::InvalidMarkerWrite(ref err) |
ValueWriteError::InvalidDataWrite(ref err) => Some(err),
ValueWriteError::InvalidMarkerWrite(ref err)
| ValueWriteError::InvalidDataWrite(ref err) => Some(err),
}
}
}
@ -243,7 +246,10 @@ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
///
/// This function will return `ValueWriteError` on any I/O error occurred while writing either the
/// marker or the data.
pub fn write_array_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError<W::Error>> {
pub fn write_array_len<W: RmpWrite>(
wr: &mut W,
len: u32,
) -> Result<Marker, ValueWriteError<W::Error>> {
let marker = if len < 16 {
Marker::FixArray(len as u8)
} else if len <= u16::MAX as u32 {
@ -268,7 +274,10 @@ pub fn write_array_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, Valu
///
/// This function will return `ValueWriteError` on any I/O error occurred while writing either the
/// marker or the data.
pub fn write_map_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError<W::Error>> {
pub fn write_map_len<W: RmpWrite>(
wr: &mut W,
len: u32,
) -> Result<Marker, ValueWriteError<W::Error>> {
let marker = if len < 16 {
Marker::FixMap(len as u8)
} else if len <= u16::MAX as u32 {
@ -298,7 +307,11 @@ pub fn write_map_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueW
///
/// Panics if `ty` is negative, because it is reserved for future MessagePack extension including
/// 2-byte type information.
pub fn write_ext_meta<W: RmpWrite>(wr: &mut W, len: u32, ty: i8) -> Result<Marker, ValueWriteError<W::Error>> {
pub fn write_ext_meta<W: RmpWrite>(
wr: &mut W,
len: u32,
ty: i8,
) -> Result<Marker, ValueWriteError<W::Error>> {
let marker = match len {
1 => Marker::FixExt1,
2 => Marker::FixExt2,

View File

@ -1,6 +1,8 @@
use super::{write_marker, RmpWrite};
use super::super::encode::{write_pfix, write_u16, write_u32, write_u64, write_u8, ValueWriteError};
use super::super::encode::{
write_pfix, write_u16, write_u32, write_u64, write_u8, ValueWriteError,
};
use super::super::Marker;
use super::{write_marker, RmpWrite};
/// Encodes and attempts to write a negative small integer value as a negative fixnum into the
/// given write.
@ -146,20 +148,16 @@ pub fn write_i64<W: RmpWrite>(wr: &mut W, val: i64) -> Result<(), ValueWriteErro
/// marker or the data.
pub fn write_sint<W: RmpWrite>(wr: &mut W, val: i64) -> Result<Marker, ValueWriteError<W::Error>> {
match val {
val if -32 <= val && val < 0 => {
write_nfix(wr, val as i8)
.and(Ok(Marker::FixNeg(val as i8)))
.map_err(ValueWriteError::InvalidMarkerWrite)
}
val if -32 <= val && val < 0 => write_nfix(wr, val as i8)
.and(Ok(Marker::FixNeg(val as i8)))
.map_err(ValueWriteError::InvalidMarkerWrite),
val if -128 <= val && val < -32 => write_i8(wr, val as i8).and(Ok(Marker::I8)),
val if -32768 <= val && val < -128 => write_i16(wr, val as i16).and(Ok(Marker::I16)),
val if -2147483648 <= val && val < -32768 => write_i32(wr, val as i32).and(Ok(Marker::I32)),
val if val < -2147483648 => write_i64(wr, val).and(Ok(Marker::I64)),
val if 0 <= val && val < 128 => {
write_pfix(wr, val as u8)
.and(Ok(Marker::FixPos(val as u8)))
.map_err(ValueWriteError::InvalidMarkerWrite)
}
val if 0 <= val && val < 128 => write_pfix(wr, val as u8)
.and(Ok(Marker::FixPos(val as u8)))
.map_err(ValueWriteError::InvalidMarkerWrite),
val if val < 256 => write_u8(wr, val as u8).and(Ok(Marker::U8)),
val if val < 65536 => write_u16(wr, val as u16).and(Ok(Marker::U16)),
val if val < 4294967296 => write_u32(wr, val as u32).and(Ok(Marker::U32)),

View File

@ -1,6 +1,6 @@
use super::{write_marker, RmpWrite};
use super::super::encode::ValueWriteError;
use super::super::Marker;
use super::{write_marker, RmpWrite};
/// Encodes and attempts to write the most efficient string length implementation to the given
/// write, returning the marker used.
@ -9,7 +9,10 @@
///
/// This function will return `ValueWriteError` on any I/O error occurred while writing either the
/// marker or the data.
pub fn write_str_len<W: RmpWrite>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError<W::Error>> {
pub fn write_str_len<W: RmpWrite>(
wr: &mut W,
len: u32,
) -> Result<Marker, ValueWriteError<W::Error>> {
let marker = if len < 32 {
Marker::FixStr(len as u8)
} else if len < 256 {

View File

@ -1,6 +1,6 @@
use super::{write_marker, RmpWrite};
use super::super::encode::ValueWriteError;
use super::super::Marker;
use super::{write_marker, RmpWrite};
/// Encodes and attempts to write an unsigned small integer value as a positive fixint into the
/// given write.

View File

@ -19,4 +19,3 @@
#[doc(hidden)]
pub trait MaybeErrBound: crate::error::Error {}
impl<T: ?Sized + crate::error::Error> MaybeErrBound for T {}

View File

@ -1,6 +1,6 @@
const FIXSTR_SIZE : u8 = 0x1f;
const FIXARRAY_SIZE : u8 = 0x0f;
const FIXMAP_SIZE : u8 = 0x0f;
const FIXSTR_SIZE: u8 = 0x1f;
const FIXARRAY_SIZE: u8 = 0x0f;
const FIXMAP_SIZE: u8 = 0x0f;
/// Format markers.
#[derive(Clone, Copy, Debug, PartialEq)]
@ -54,11 +54,11 @@ impl Marker {
#[unstable(feature = "rmp", issue = "none")]
pub fn from_u8(n: u8) -> Marker {
match n {
0x00 ..= 0x7f => Marker::FixPos(n),
0xe0 ..= 0xff => Marker::FixNeg(n as i8),
0x80 ..= 0x8f => Marker::FixMap(n & FIXMAP_SIZE),
0x90 ..= 0x9f => Marker::FixArray(n & FIXARRAY_SIZE),
0xa0 ..= 0xbf => Marker::FixStr(n & FIXSTR_SIZE),
0x00..=0x7f => Marker::FixPos(n),
0xe0..=0xff => Marker::FixNeg(n as i8),
0x80..=0x8f => Marker::FixMap(n & FIXMAP_SIZE),
0x90..=0x9f => Marker::FixArray(n & FIXARRAY_SIZE),
0xa0..=0xbf => Marker::FixStr(n & FIXSTR_SIZE),
0xc0 => Marker::Null,
// Marked in MessagePack spec as never used.
0xc1 => Marker::Reserved,
@ -101,54 +101,54 @@ pub fn from_u8(n: u8) -> Marker {
#[unstable(feature = "rmp", issue = "none")]
pub fn to_u8(&self) -> u8 {
match *self {
Marker::FixPos(val) => val,
Marker::FixNeg(val) => val as u8,
Marker::FixPos(val) => val,
Marker::FixNeg(val) => val as u8,
Marker::Null => 0xc0,
Marker::Null => 0xc0,
Marker::True => 0xc3,
Marker::False => 0xc2,
Marker::True => 0xc3,
Marker::False => 0xc2,
Marker::U8 => 0xcc,
Marker::U16 => 0xcd,
Marker::U32 => 0xce,
Marker::U64 => 0xcf,
Marker::U8 => 0xcc,
Marker::U16 => 0xcd,
Marker::U32 => 0xce,
Marker::U64 => 0xcf,
Marker::I8 => 0xd0,
Marker::I16 => 0xd1,
Marker::I32 => 0xd2,
Marker::I64 => 0xd3,
Marker::I8 => 0xd0,
Marker::I16 => 0xd1,
Marker::I32 => 0xd2,
Marker::I64 => 0xd3,
Marker::F32 => 0xca,
Marker::F64 => 0xcb,
Marker::F32 => 0xca,
Marker::F64 => 0xcb,
Marker::FixStr(len) => 0xa0 | (len & FIXSTR_SIZE),
Marker::Str8 => 0xd9,
Marker::Str16 => 0xda,
Marker::Str32 => 0xdb,
Marker::FixStr(len) => 0xa0 | (len & FIXSTR_SIZE),
Marker::Str8 => 0xd9,
Marker::Str16 => 0xda,
Marker::Str32 => 0xdb,
Marker::Bin8 => 0xc4,
Marker::Bin16 => 0xc5,
Marker::Bin32 => 0xc6,
Marker::Bin8 => 0xc4,
Marker::Bin16 => 0xc5,
Marker::Bin32 => 0xc6,
Marker::FixArray(len) => 0x90 | (len & FIXARRAY_SIZE),
Marker::Array16 => 0xdc,
Marker::Array32 => 0xdd,
Marker::Array16 => 0xdc,
Marker::Array32 => 0xdd,
Marker::FixMap(len) => 0x80 | (len & FIXMAP_SIZE),
Marker::Map16 => 0xde,
Marker::Map32 => 0xdf,
Marker::FixMap(len) => 0x80 | (len & FIXMAP_SIZE),
Marker::Map16 => 0xde,
Marker::Map32 => 0xdf,
Marker::FixExt1 => 0xd4,
Marker::FixExt2 => 0xd5,
Marker::FixExt4 => 0xd6,
Marker::FixExt8 => 0xd7,
Marker::FixExt16 => 0xd8,
Marker::Ext8 => 0xc7,
Marker::Ext16 => 0xc8,
Marker::Ext32 => 0xc9,
Marker::FixExt1 => 0xd4,
Marker::FixExt2 => 0xd5,
Marker::FixExt4 => 0xd6,
Marker::FixExt8 => 0xd7,
Marker::FixExt16 => 0xd8,
Marker::Ext8 => 0xc7,
Marker::Ext16 => 0xc8,
Marker::Ext32 => 0xc9,
Marker::Reserved => 0xc1,
Marker::Reserved => 0xc1,
}
}
}

View File

@ -670,10 +670,7 @@ fn read_u64_into<T: ByteOrder>(&mut self, dst: &mut [u64]) -> Result<()> {
/// assert_eq!([517, 768], dst);
/// ```
#[inline]
fn read_u128_into<T: ByteOrder>(
&mut self,
dst: &mut [u128],
) -> Result<()> {
fn read_u128_into<T: ByteOrder>(&mut self, dst: &mut [u128]) -> Result<()> {
{
let buf = unsafe { slice_to_u8_mut(dst) };
self.read_exact(buf)?;
@ -855,10 +852,7 @@ fn read_i64_into<T: ByteOrder>(&mut self, dst: &mut [i64]) -> Result<()> {
/// assert_eq!([517, 768], dst);
/// ```
#[inline]
fn read_i128_into<T: ByteOrder>(
&mut self,
dst: &mut [i128],
) -> Result<()> {
fn read_i128_into<T: ByteOrder>(&mut self, dst: &mut [i128]) -> Result<()> {
{
let buf = unsafe { slice_to_u8_mut(dst) };
self.read_exact(buf)?;
@ -944,10 +938,7 @@ fn read_f32_into<T: ByteOrder>(&mut self, dst: &mut [f32]) -> Result<()> {
/// assert_eq!([f32::consts::PI, 1.0], dst);
/// ```
#[inline]
fn read_f32_into_unchecked<T: ByteOrder>(
&mut self,
dst: &mut [f32],
) -> Result<()> {
fn read_f32_into_unchecked<T: ByteOrder>(&mut self, dst: &mut [f32]) -> Result<()> {
self.read_f32_into::<T>(dst)
}
@ -1034,10 +1025,7 @@ fn read_f64_into<T: ByteOrder>(&mut self, dst: &mut [f64]) -> Result<()> {
/// assert_eq!([f64::consts::PI, 1.0], dst);
/// ```
#[inline]
fn read_f64_into_unchecked<T: ByteOrder>(
&mut self,
dst: &mut [f64],
) -> Result<()> {
fn read_f64_into_unchecked<T: ByteOrder>(&mut self, dst: &mut [f64]) -> Result<()> {
self.read_f64_into::<T>(dst)
}
}
@ -1437,11 +1425,7 @@ fn write_i128<T: ByteOrder>(&mut self, n: i128) -> Result<()> {
/// assert_eq!(wtr, b"\x48\xc5\x74\x62\xe9\x00\x00\x00\x00\x2b");
/// ```
#[inline]
fn write_uint<T: ByteOrder>(
&mut self,
n: u64,
nbytes: usize,
) -> Result<()> {
fn write_uint<T: ByteOrder>(&mut self, n: u64, nbytes: usize) -> Result<()> {
let mut buf = [0; 8];
T::write_uint(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1473,11 +1457,7 @@ fn write_uint<T: ByteOrder>(
/// assert_eq!(wtr, b"\xf3\x64\xf4\xd1\xfd\xb0\x81\x00\x00\x00\x00\x00\x00\x2b");
/// ```
#[inline]
fn write_int<T: ByteOrder>(
&mut self,
n: i64,
nbytes: usize,
) -> Result<()> {
fn write_int<T: ByteOrder>(&mut self, n: i64, nbytes: usize) -> Result<()> {
let mut buf = [0; 8];
T::write_int(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1488,11 +1468,7 @@ fn write_int<T: ByteOrder>(
/// If the given integer is not representable in the given number of bytes,
/// this method panics. If `nbytes > 16`, this method panics.
#[inline]
fn write_uint128<T: ByteOrder>(
&mut self,
n: u128,
nbytes: usize,
) -> Result<()> {
fn write_uint128<T: ByteOrder>(&mut self, n: u128, nbytes: usize) -> Result<()> {
let mut buf = [0; 16];
T::write_uint128(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1503,11 +1479,7 @@ fn write_uint128<T: ByteOrder>(
/// If the given integer is not representable in the given number of bytes,
/// this method panics. If `nbytes > 16`, this method panics.
#[inline]
fn write_int128<T: ByteOrder>(
&mut self,
n: i128,
nbytes: usize,
) -> Result<()> {
fn write_int128<T: ByteOrder>(&mut self, n: i128, nbytes: usize) -> Result<()> {
let mut buf = [0; 16];
T::write_int128(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])

View File

@ -1,5 +1,4 @@
/// Hacky serializer that only allows `u8`
use crate::fmt;
use serde::ser::Impossible;
use serde::Serialize;
@ -7,8 +6,7 @@
pub(super) struct OnlyBytes;
pub(super) struct Nope;
impl crate::error::Error for Nope {
}
impl crate::error::Error for Nope {}
impl crate::fmt::Display for Nope {
fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -105,7 +103,10 @@ fn serialize_none(self) -> Result<u8, Nope> {
Err(Nope)
}
fn serialize_some<T: ?Sized>(self, _: &T) -> Result<u8, Nope> where T: Serialize {
fn serialize_some<T: ?Sized>(self, _: &T) -> Result<u8, Nope>
where
T: Serialize,
{
Err(Nope)
}
@ -121,11 +122,23 @@ fn serialize_unit_variant(self, _: &'static str, _: u32, _: &'static str) -> Res
Err(Nope)
}
fn serialize_newtype_struct<T: ?Sized>(self, _: &'static str, _: &T) -> Result<u8, Nope> where T: Serialize {
fn serialize_newtype_struct<T: ?Sized>(self, _: &'static str, _: &T) -> Result<u8, Nope>
where
T: Serialize,
{
Err(Nope)
}
fn serialize_newtype_variant<T: ?Sized>(self, _: &'static str, _: u32, _: &'static str, _: &T) -> Result<u8, Nope> where T: Serialize {
fn serialize_newtype_variant<T: ?Sized>(
self,
_: &'static str,
_: u32,
_: &'static str,
_: &T,
) -> Result<u8, Nope>
where
T: Serialize,
{
Err(Nope)
}
@ -137,11 +150,21 @@ fn serialize_tuple(self, _: usize) -> Result<Self::SerializeTuple, Nope> {
Err(Nope)
}
fn serialize_tuple_struct(self, _: &'static str, _: usize) -> Result<Self::SerializeTupleStruct, Nope> {
fn serialize_tuple_struct(
self,
_: &'static str,
_: usize,
) -> Result<Self::SerializeTupleStruct, Nope> {
Err(Nope)
}
fn serialize_tuple_variant(self, _: &'static str, _: u32, _: &'static str, _: usize) -> Result<Self::SerializeTupleVariant, Nope> {
fn serialize_tuple_variant(
self,
_: &'static str,
_: u32,
_: &'static str,
_: usize,
) -> Result<Self::SerializeTupleVariant, Nope> {
Err(Nope)
}
@ -153,19 +176,37 @@ fn serialize_struct(self, _: &'static str, _: usize) -> Result<Self::SerializeSt
Err(Nope)
}
fn serialize_struct_variant(self, _: &'static str, _: u32, _: &'static str, _: usize) -> Result<Self::SerializeStructVariant, Nope> {
fn serialize_struct_variant(
self,
_: &'static str,
_: u32,
_: &'static str,
_: usize,
) -> Result<Self::SerializeStructVariant, Nope> {
Err(Nope)
}
fn collect_seq<I>(self, _: I) -> Result<u8, Nope> where I: IntoIterator, <I as IntoIterator>::Item: Serialize {
fn collect_seq<I>(self, _: I) -> Result<u8, Nope>
where
I: IntoIterator,
<I as IntoIterator>::Item: Serialize,
{
Err(Nope)
}
fn collect_map<K, V, I>(self, _: I) -> Result<u8, Nope> where K: Serialize, V: Serialize, I: IntoIterator<Item = (K, V)> {
fn collect_map<K, V, I>(self, _: I) -> Result<u8, Nope>
where
K: Serialize,
V: Serialize,
I: IntoIterator<Item = (K, V)>,
{
Err(Nope)
}
fn collect_str<T: ?Sized>(self, _: &T) -> Result<u8, Nope> where T: fmt::Display {
fn collect_str<T: ?Sized>(self, _: &T) -> Result<u8, Nope>
where
T: fmt::Display,
{
Err(Nope)
}
}

View File

@ -17,7 +17,9 @@
use serde::forward_to_deserialize_any;
use super::super::rmp;
use rmp::decode::{self, DecodeStringError, MarkerReadError, NumValueReadError, RmpRead, ValueReadError};
use rmp::decode::{
self, DecodeStringError, MarkerReadError, NumValueReadError, RmpRead, ValueReadError,
};
use rmp::Marker;
use super::config::{BinaryConfig, DefaultConfig, HumanReadableConfig, SerializerConfig};
@ -92,16 +94,17 @@ impl Display for Error {
#[cold]
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), fmt::Error> {
match *self {
Error::InvalidMarkerRead(ref err) => write!(fmt, "IO error while reading marker: {err}"),
Error::InvalidMarkerRead(ref err) => {
write!(fmt, "IO error while reading marker: {err}")
}
Error::InvalidDataRead(ref err) => write!(fmt, "IO error while reading data: {err}"),
Error::TypeMismatch(ref actual_marker) => {
write!(fmt, "wrong msgpack marker {actual_marker:?}")
}
Error::OutOfRange => fmt.write_str("numeric cast found out of range"),
Error::LengthMismatch(expected_length) => write!(
fmt,
"array had incorrect length, expected {expected_length}"
),
Error::LengthMismatch(expected_length) => {
write!(fmt, "array had incorrect length, expected {expected_length}")
}
Error::Uncategorized(ref msg) => write!(fmt, "uncategorized error: {msg}"),
Error::Syntax(ref msg) => fmt.write_str(msg),
Error::Utf8Error(ref err) => write!(fmt, "string found to be invalid utf8: {err}"),
@ -156,7 +159,9 @@ fn from(err: DecodeStringError<'_>) -> Error {
DecodeStringError::InvalidMarkerRead(err) => Error::InvalidMarkerRead(err),
DecodeStringError::InvalidDataRead(err) => Error::InvalidDataRead(err),
DecodeStringError::TypeMismatch(marker) => Error::TypeMismatch(marker),
DecodeStringError::BufferSizeTooSmall(..) => Error::Uncategorized("BufferSizeTooSmall".to_string()),
DecodeStringError::BufferSizeTooSmall(..) => {
Error::Uncategorized("BufferSizeTooSmall".to_string())
}
DecodeStringError::InvalidUtf8(..) => Error::Uncategorized("InvalidUtf8".to_string()),
}
}
@ -187,9 +192,7 @@ pub struct Deserializer<R, C = DefaultConfig> {
impl<R: Read, C> Deserializer<R, C> {
#[inline]
fn take_or_read_marker(&mut self) -> Result<Marker, MarkerReadError> {
self.marker
.take()
.map_or_else(|| rmp::decode::read_marker(&mut self.rd), Ok)
self.marker.take().map_or_else(|| rmp::decode::read_marker(&mut self.rd), Ok)
}
#[inline]
@ -247,13 +250,7 @@ impl<R: Read, C: SerializerConfig> Deserializer<R, C> {
#[inline]
pub fn with_human_readable(self) -> Deserializer<R, HumanReadableConfig<C>> {
let Deserializer { rd, _config: _, is_human_readable: _, marker, depth } = self;
Deserializer {
rd,
is_human_readable: true,
_config: PhantomData,
marker,
depth,
}
Deserializer { rd, is_human_readable: true, _config: PhantomData, marker, depth }
}
/// Consumes this deserializer and returns a new one, which will deserialize types with
@ -264,13 +261,7 @@ pub fn with_human_readable(self) -> Deserializer<R, HumanReadableConfig<C>> {
#[inline]
pub fn with_binary(self) -> Deserializer<R, BinaryConfig<C>> {
let Deserializer { rd, _config: _, is_human_readable: _, marker, depth } = self;
Deserializer {
rd,
is_human_readable: false,
_config: PhantomData,
marker,
depth,
}
Deserializer { rd, is_human_readable: false, _config: PhantomData, marker, depth }
}
}
@ -330,10 +321,8 @@ fn read_i128_marker<'de, R: ReadSlice<'de>>(marker: Marker, rd: &mut R) -> Resul
Marker::Bin8 => {
let len = read_u8(&mut *rd)?;
read_128_buf(rd, len)?
},
Marker::FixArray(len) => {
read_128_buf(rd, len)?
},
}
Marker::FixArray(len) => read_128_buf(rd, len)?,
marker => return Err(Error::TypeMismatch(marker)),
})
}
@ -350,7 +339,9 @@ fn read_128_buf<'de, R: ReadSlice<'de>>(rd: &mut R, len: u8) -> Result<i128, Err
}
fn read_str_data<'de, V, R>(rd: &mut R, len: u32, visitor: V) -> Result<V::Value, Error>
where V: Visitor<'de>, R: ReadSlice<'de>
where
V: Visitor<'de>,
R: ReadSlice<'de>,
{
match read_bin_data(rd, len)? {
Reference::Borrowed(buf) => {
@ -380,7 +371,10 @@ fn read_str_data<'de, V, R>(rd: &mut R, len: u32, visitor: V) -> Result<V::Value
}
}
fn read_bin_data<'a, 'de, R: ReadSlice<'de>>(rd: &'a mut R, len: u32) -> Result<Reference<'de, 'a, [u8]>, Error> {
fn read_bin_data<'a, 'de, R: ReadSlice<'de>>(
rd: &'a mut R,
len: u32,
) -> Result<Reference<'de, 'a, [u8]>, Error> {
rd.read_slice(len as usize).map_err(Error::InvalidDataRead)
}
@ -389,13 +383,11 @@ fn read_u8<R: Read>(rd: &mut R) -> Result<u8, Error> {
}
fn read_u16<R: Read>(rd: &mut R) -> Result<u16, Error> {
rd.read_u16::<byteorder::BigEndian>()
.map_err(Error::InvalidDataRead)
rd.read_u16::<byteorder::BigEndian>().map_err(Error::InvalidDataRead)
}
fn read_u32<R: Read>(rd: &mut R) -> Result<u32, Error> {
rd.read_u32::<byteorder::BigEndian>()
.map_err(Error::InvalidDataRead)
rd.read_u32::<byteorder::BigEndian>().map_err(Error::InvalidDataRead)
}
fn ext_len<R: Read>(rd: &mut R, marker: Marker) -> Result<u32, Error> {
@ -429,21 +421,19 @@ struct ExtDeserializer<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> ExtDeserializer<'a, R, C> {
fn new(d: &'a mut Deserializer<R, C>, len: u32) -> Self {
ExtDeserializer {
rd: &mut d.rd,
_config: d._config,
len,
state: ExtDeserializerState::New,
}
ExtDeserializer { rd: &mut d.rd, _config: d._config, len, state: ExtDeserializerState::New }
}
}
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::Deserializer<'de> for ExtDeserializer<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::Deserializer<'de>
for ExtDeserializer<'a, R, C>
{
type Error = Error;
#[inline(always)]
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where V: Visitor<'de>
where
V: Visitor<'de>,
{
visitor.visit_seq(self)
}
@ -455,7 +445,9 @@ struct identifier tuple enum ignored_any tuple_struct
}
}
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::SeqAccess<'de> for ExtDeserializer<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::SeqAccess<'de>
for ExtDeserializer<'a, R, C>
{
type Error = Error;
#[inline]
@ -464,18 +456,23 @@ fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Error>
T: DeserializeSeed<'de>,
{
match self.state {
ExtDeserializerState::New | ExtDeserializerState::ReadTag => Ok(Some(seed.deserialize(self)?)),
ExtDeserializerState::New | ExtDeserializerState::ReadTag => {
Ok(Some(seed.deserialize(self)?))
}
ExtDeserializerState::ReadBinary => Ok(None),
}
}
}
/// Deserializer for Ext `SeqAccess`
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::Deserializer<'de> for &mut ExtDeserializer<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::Deserializer<'de>
for &mut ExtDeserializer<'a, R, C>
{
type Error = Error;
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where V: Visitor<'de>
where
V: Visitor<'de>,
{
match self.state {
ExtDeserializerState::New => {
@ -494,7 +491,7 @@ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
ExtDeserializerState::ReadBinary => {
debug_assert!(false);
Err(Error::TypeMismatch(Marker::Reserved))
},
}
}
}
@ -506,11 +503,14 @@ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
}
#[inline(never)]
fn any_num<'de, R: ReadSlice<'de>, V: Visitor<'de>>(rd: &mut R, visitor: V, marker: Marker) -> Result<V::Value, Error> {
fn any_num<'de, R: ReadSlice<'de>, V: Visitor<'de>>(
rd: &mut R,
visitor: V,
marker: Marker,
) -> Result<V::Value, Error> {
match marker {
Marker::Null => visitor.visit_unit(),
Marker::True |
Marker::False => visitor.visit_bool(marker == Marker::True),
Marker::True | Marker::False => visitor.visit_bool(marker == Marker::True),
Marker::FixPos(val) => visitor.visit_u8(val),
Marker::FixNeg(val) => visitor.visit_i8(val),
Marker::U8 => visitor.visit_u8(rd.read_data_u8()?),
@ -528,24 +528,28 @@ fn any_num<'de, R: ReadSlice<'de>, V: Visitor<'de>>(rd: &mut R, visitor: V, mark
}
impl<'de, R: ReadSlice<'de>, C: SerializerConfig> Deserializer<R, C> {
fn any_inner<V: Visitor<'de>>(&mut self, visitor: V, allow_bytes: bool) -> Result<V::Value, Error> {
fn any_inner<V: Visitor<'de>>(
&mut self,
visitor: V,
allow_bytes: bool,
) -> Result<V::Value, Error> {
let marker = self.take_or_read_marker()?;
match marker {
Marker::Null |
Marker::True |
Marker::False |
Marker::FixPos(_) |
Marker::FixNeg(_) |
Marker::U8 |
Marker::U16 |
Marker::U32 |
Marker::U64 |
Marker::I8 |
Marker::I16 |
Marker::I32 |
Marker::I64 |
Marker::F32 |
Marker::F64 => any_num(&mut self.rd, visitor, marker),
Marker::Null
| Marker::True
| Marker::False
| Marker::FixPos(_)
| Marker::FixNeg(_)
| Marker::U8
| Marker::U16
| Marker::U32
| Marker::U64
| Marker::I8
| Marker::I16
| Marker::I32
| Marker::I64
| Marker::F32
| Marker::F64 => any_num(&mut self.rd, visitor, marker),
Marker::FixStr(_) | Marker::Str8 | Marker::Str16 | Marker::Str32 => {
let len = match marker {
Marker::FixStr(len) => Ok(len.into()),
@ -556,9 +560,7 @@ fn any_inner<V: Visitor<'de>>(&mut self, visitor: V, allow_bytes: bool) -> Resul
}?;
read_str_data(&mut self.rd, len, visitor)
}
Marker::FixArray(_) |
Marker::Array16 |
Marker::Array32 => {
Marker::FixArray(_) | Marker::Array16 | Marker::Array32 => {
let len = match marker {
Marker::FixArray(len) => len.into(),
Marker::Array16 => read_u16(&mut self.rd)?.into(),
@ -575,9 +577,7 @@ fn any_inner<V: Visitor<'de>>(&mut self, visitor: V, allow_bytes: bool) -> Resul
}
})
}
Marker::FixMap(_) |
Marker::Map16 |
Marker::Map32 => {
Marker::FixMap(_) | Marker::Map16 | Marker::Map32 => {
let len = match marker {
Marker::FixMap(len) => len.into(),
Marker::Map16 => read_u16(&mut self.rd)?.into(),
@ -606,26 +606,31 @@ fn any_inner<V: Visitor<'de>>(&mut self, visitor: V, allow_bytes: bool) -> Resul
Reference::Copied(buf) if allow_bytes => visitor.visit_bytes(buf),
Reference::Borrowed(buf) | Reference::Copied(buf) => {
visitor.visit_seq(SeqDeserializer::new(buf.iter().copied()))
},
}
}
}
Marker::FixExt1 |
Marker::FixExt2 |
Marker::FixExt4 |
Marker::FixExt8 |
Marker::FixExt16 |
Marker::Ext8 |
Marker::Ext16 |
Marker::Ext32 => {
Marker::FixExt1
| Marker::FixExt2
| Marker::FixExt4
| Marker::FixExt8
| Marker::FixExt16
| Marker::Ext8
| Marker::Ext16
| Marker::Ext32 => {
let len = ext_len(&mut self.rd, marker)?;
depth_count!(self.depth, visitor.visit_newtype_struct(ExtDeserializer::new(self, len)))
depth_count!(
self.depth,
visitor.visit_newtype_struct(ExtDeserializer::new(self, len))
)
}
Marker::Reserved => Err(Error::TypeMismatch(Marker::Reserved)),
}
}
}
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> serde::Deserializer<'de> for &'a mut Deserializer<R, C> {
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> serde::Deserializer<'de>
for &'a mut Deserializer<R, C>
{
type Error = Error;
#[inline(always)]
@ -635,13 +640,15 @@ fn is_human_readable(&self) -> bool {
#[inline(always)]
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where V: Visitor<'de>
where
V: Visitor<'de>,
{
self.any_inner(visitor, true)
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where V: Visitor<'de>
where
V: Visitor<'de>,
{
// # Important
//
@ -667,8 +674,14 @@ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
}
}
fn deserialize_enum<V>(self, _name: &str, _variants: &[&str], visitor: V) -> Result<V::Value, Error>
where V: Visitor<'de>
fn deserialize_enum<V>(
self,
_name: &str,
_variants: &[&str],
visitor: V,
) -> Result<V::Value, Error>
where
V: Visitor<'de>,
{
let marker = self.peek_or_read_marker()?;
match rmp::decode::marker_to_len(&mut self.rd, marker) {
@ -687,8 +700,13 @@ fn deserialize_enum<V>(self, _name: &str, _variants: &[&str], visitor: V) -> Res
}
}
fn deserialize_newtype_struct<V>(self, name: &'static str, visitor: V) -> Result<V::Value, Error>
where V: Visitor<'de>
fn deserialize_newtype_struct<V>(
self,
name: &'static str,
visitor: V,
) -> Result<V::Value, Error>
where
V: Visitor<'de>,
{
if name == MSGPACK_EXT_STRUCT_NAME {
let marker = self.take_or_read_marker()?;
@ -701,8 +719,13 @@ fn deserialize_newtype_struct<V>(self, name: &'static str, visitor: V) -> Result
visitor.visit_newtype_struct(self)
}
fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value, Self::Error>
where V: Visitor<'de>
fn deserialize_unit_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
// We need to special case this so that [] is treated as a unit struct when asked for,
// but as a sequence otherwise. This is because we serialize unit structs as [] rather
@ -733,22 +756,44 @@ fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value, Self::Error>
}
#[inline]
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.any_inner(visitor, false)
}
#[inline]
fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.any_inner(visitor, false)
}
#[inline]
fn deserialize_struct<V>(self, _: &'static str, _: &'static [&'static str], visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_struct<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.any_inner(visitor, false)
}
#[inline]
fn deserialize_tuple_struct<V>(self, _: &'static str, _: usize, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_tuple_struct<V>(
self,
_: &'static str,
_: usize,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.any_inner(visitor, false)
}
@ -758,57 +803,90 @@ fn deserialize_tuple_struct<V>(self, _: &'static str, _: usize, visitor: V) -> R
ignored_any
}
fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: Visitor<'de> {
fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let marker = self.take_or_read_marker()?;
any_num(&mut self.rd, visitor, marker)
}
@ -826,12 +904,15 @@ fn new(de: &'a mut Deserializer<R, C>, len: u32) -> Self {
}
}
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::SeqAccess<'de> for SeqAccess<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::SeqAccess<'de>
for SeqAccess<'a, R, C>
{
type Error = Error;
#[inline]
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
where T: DeserializeSeed<'de>
where
T: DeserializeSeed<'de>,
{
if self.left > 0 {
self.left -= 1;
@ -859,12 +940,15 @@ fn new(de: &'a mut Deserializer<R, C>, len: u32) -> Self {
}
}
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::MapAccess<'de> for MapAccess<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de> + 'a, C: SerializerConfig> de::MapAccess<'de>
for MapAccess<'a, R, C>
{
type Error = Error;
#[inline]
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
where K: DeserializeSeed<'de>
where
K: DeserializeSeed<'de>,
{
if self.left > 0 {
self.left -= 1;
@ -876,7 +960,8 @@ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
#[inline]
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
where V: DeserializeSeed<'de>
where
V: DeserializeSeed<'de>,
{
seed.deserialize(&mut *self.de)
}
@ -926,20 +1011,14 @@ fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Error>
where
T: de::DeserializeSeed<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"newtype variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"newtype variant"))
}
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Error>
where
V: de::Visitor<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"tuple variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"tuple variant"))
}
fn struct_variant<V>(
@ -950,10 +1029,7 @@ fn struct_variant<V>(
where
V: de::Visitor<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"struct variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"struct variant"))
}
}
@ -967,19 +1043,24 @@ pub fn new(de: &'a mut Deserializer<R, C>) -> Self {
}
}
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> de::EnumAccess<'de> for VariantAccess<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> de::EnumAccess<'de>
for VariantAccess<'a, R, C>
{
type Error = Error;
type Variant = Self;
#[inline]
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self), Error>
where V: de::DeserializeSeed<'de>,
where
V: de::DeserializeSeed<'de>,
{
Ok((seed.deserialize(&mut *self.de)?, self))
}
}
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> de::VariantAccess<'de> for VariantAccess<'a, R, C> {
impl<'de, 'a, R: ReadSlice<'de>, C: SerializerConfig> de::VariantAccess<'de>
for VariantAccess<'a, R, C>
{
type Error = Error;
#[inline]
@ -990,21 +1071,28 @@ fn unit_variant(self) -> Result<(), Error> {
#[inline]
fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
where T: DeserializeSeed<'de>
where
T: DeserializeSeed<'de>,
{
seed.deserialize(self.de)
}
#[inline]
fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Error>
where V: Visitor<'de>
where
V: Visitor<'de>,
{
de::Deserializer::deserialize_tuple(self.de, len, visitor)
}
#[inline]
fn struct_variant<V>(self, fields: &'static [&'static str], visitor: V) -> Result<V::Value, Error>
where V: Visitor<'de>
fn struct_variant<V>(
self,
fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Error>
where
V: Visitor<'de>,
{
de::Deserializer::deserialize_tuple(self.de, fields.len(), visitor)
}
@ -1037,10 +1125,7 @@ pub struct ReadReader<R: Read> {
impl<R: Read> ReadReader<R> {
#[inline]
fn new(rd: R) -> Self {
ReadReader {
rd,
buf: Vec::with_capacity(128),
}
ReadReader { rd, buf: Vec::with_capacity(128) }
}
}
@ -1087,10 +1172,7 @@ pub fn remaining_slice(&self) -> &'a [u8] {
impl<'a, T: AsRef<[u8]> + ?Sized> ReadRefReader<'a, T> {
#[inline]
fn new(rd: &'a T) -> Self {
Self {
whole_slice: rd,
buf: rd.as_ref(),
}
Self { whole_slice: rd, buf: rd.as_ref() }
}
}
@ -1138,8 +1220,9 @@ fn test_as_ref_reader() {
/// decides that something is wrong with the data, for example required struct fields are missing.
#[inline]
pub fn from_read<R, T>(rd: R) -> Result<T, Error>
where R: Read,
T: DeserializeOwned
where
R: Read,
T: DeserializeOwned,
{
Deserialize::deserialize(&mut Deserializer::new(rd))
}

View File

@ -14,13 +14,13 @@
};
use serde::Serialize;
use super::super::rmp;
use rmp::encode::ValueWriteError;
use rmp::{encode, Marker};
use super::config::{
BinaryConfig, DefaultConfig, HumanReadableConfig, RuntimeConfig, SerializerConfig, StructMapConfig, StructTupleConfig
BinaryConfig, DefaultConfig, HumanReadableConfig, RuntimeConfig, SerializerConfig,
StructMapConfig, StructTupleConfig,
};
use super::MSGPACK_EXT_STRUCT_NAME;
@ -182,8 +182,13 @@ fn compound(&'a mut self) -> Result<Compound<'a, W, C>, Error> {
impl<'a, W: Write + 'a, C: SerializerConfig> Serializer<W, C> {
#[inline]
fn maybe_unknown_len_compound<F>(&'a mut self, len: Option<u32>, f: F) -> Result<MaybeUnknownLengthCompound<'a, W, C>, Error>
where F: Fn(&mut W, u32) -> Result<Marker, ValueWriteError>
fn maybe_unknown_len_compound<F>(
&'a mut self,
len: Option<u32>,
f: F,
) -> Result<MaybeUnknownLengthCompound<'a, W, C>, Error>
where
F: Fn(&mut W, u32) -> Result<Marker, ValueWriteError>,
{
Ok(MaybeUnknownLengthCompound {
compound: match len {
@ -341,8 +346,7 @@ fn end(self) -> Result<Self::Ok, Self::Error> {
} else {
encode::write_bin_len(&mut self.se.wr, self.len)?;
}
self.se.wr.write_all(&buf)
.map_err(ValueWriteError::InvalidDataWrite)?;
self.se.wr.write_all(&buf).map_err(ValueWriteError::InvalidDataWrite)?;
}
Ok(())
}
@ -420,9 +424,11 @@ impl<'a, W: Write + 'a, C: SerializerConfig> SerializeStruct for Compound<'a, W,
type Error = Error;
#[inline]
fn serialize_field<T: ?Sized + Serialize>(&mut self, key: &'static str, value: &T) ->
Result<(), Self::Error>
{
fn serialize_field<T: ?Sized + Serialize>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error> {
if self.se.config.is_named {
encode::write_str(self.se.get_mut(), key)?;
}
@ -454,9 +460,11 @@ impl<'a, W: Write + 'a, C: SerializerConfig> SerializeStructVariant for Compound
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized + Serialize>(&mut self, key: &'static str, value: &T) ->
Result<(), Self::Error>
{
fn serialize_field<T: ?Sized + Serialize>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error> {
if self.se.config.is_named {
encode::write_str(self.se.get_mut(), key)?;
value.serialize(&mut *self.se)
@ -488,7 +496,7 @@ fn from(se: &Serializer<W, C>) -> Self {
depth: se.depth,
_back_compat_config: PhantomData,
},
elem_count: 0
elem_count: 0,
}
}
}
@ -533,7 +541,9 @@ fn serialize_element<T: ?Sized + Serialize>(&mut self, value: &T) -> Result<(),
fn end(self) -> Result<Self::Ok, Self::Error> {
if let Some(compound) = self.compound {
encode::write_array_len(&mut self.se.wr, compound.elem_count)?;
self.se.wr.write_all(&compound.se.into_inner())
self.se
.wr
.write_all(&compound.se.into_inner())
.map_err(ValueWriteError::InvalidDataWrite)?;
}
Ok(())
@ -555,7 +565,9 @@ fn serialize_value<T: ?Sized + Serialize>(&mut self, value: &T) -> Result<(), Se
fn end(self) -> Result<Self::Ok, Self::Error> {
if let Some(compound) = self.compound {
encode::write_map_len(&mut self.se.wr, compound.elem_count / 2)?;
self.se.wr.write_all(&compound.se.into_inner())
self.se
.wr
.write_all(&compound.se.into_inner())
.map_err(ValueWriteError::InvalidDataWrite)?;
}
Ok(())
@ -673,13 +685,20 @@ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Er
Ok(())
}
fn serialize_unit_variant(self, _name: &str, _: u32, variant: &'static str) ->
Result<Self::Ok, Self::Error>
{
fn serialize_unit_variant(
self,
_name: &str,
_: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
fn serialize_newtype_struct<T: ?Sized + serde::Serialize>(self, name: &'static str, value: &T) -> Result<(), Self::Error> {
fn serialize_newtype_struct<T: ?Sized + serde::Serialize>(
self,
name: &'static str,
value: &T,
) -> Result<(), Self::Error> {
if name == MSGPACK_EXT_STRUCT_NAME {
let mut ext_se = ExtSerializer::new(self);
value.serialize(&mut ext_se)?;
@ -691,7 +710,13 @@ fn serialize_newtype_struct<T: ?Sized + serde::Serialize>(self, name: &'static s
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized + serde::Serialize>(self, _name: &'static str, _: u32, variant: &'static str, value: &T) -> Result<Self::Ok, Self::Error> {
fn serialize_newtype_variant<T: ?Sized + serde::Serialize>(
self,
_name: &'static str,
_: u32,
variant: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error> {
// encode as a map from variant idx to its attributed data, like: {idx => value}
encode::write_map_len(&mut self.wr, 1)?;
self.serialize_str(variant)?;
@ -700,7 +725,9 @@ fn serialize_newtype_variant<T: ?Sized + serde::Serialize>(self, _name: &'static
#[inline]
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Error> {
self.maybe_unknown_len_compound(len.map(|len| len as u32), |wr, len| encode::write_array_len(wr, len))
self.maybe_unknown_len_compound(len.map(|len| len as u32), |wr, len| {
encode::write_array_len(wr, len)
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
@ -716,17 +743,23 @@ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error
})
}
fn serialize_tuple_struct(self, _name: &'static str, len: usize) ->
Result<Self::SerializeTupleStruct, Self::Error>
{
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
encode::write_array_len(&mut self.wr, len as u32)?;
self.compound()
}
fn serialize_tuple_variant(self, _name: &'static str, _: u32, variant: &'static str, len: usize) ->
Result<Self::SerializeTupleVariant, Error>
{
fn serialize_tuple_variant(
self,
_name: &'static str,
_: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Error> {
// encode as a map from variant idx to a sequence of its attributed data, like: {idx => [v1,...,vN]}
encode::write_map_len(&mut self.wr, 1)?;
self.serialize_str(variant)?;
@ -736,12 +769,16 @@ fn serialize_tuple_variant(self, _name: &'static str, _: u32, variant: &'static
#[inline]
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Error> {
self.maybe_unknown_len_compound(len.map(|len| len as u32), |wr, len| encode::write_map_len(wr, len))
self.maybe_unknown_len_compound(len.map(|len| len as u32), |wr, len| {
encode::write_map_len(wr, len)
})
}
fn serialize_struct(self, _name: &'static str, len: usize) ->
Result<Self::SerializeStruct, Self::Error>
{
fn serialize_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
if self.config.is_named {
encode::write_map_len(self.get_mut(), len as u32)?;
} else {
@ -750,16 +787,24 @@ fn serialize_struct(self, _name: &'static str, len: usize) ->
self.compound()
}
fn serialize_struct_variant(self, name: &'static str, _: u32, variant: &'static str, len: usize) ->
Result<Self::SerializeStructVariant, Error>
{
fn serialize_struct_variant(
self,
name: &'static str,
_: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant, Error> {
// encode as a map from variant idx to a sequence of its attributed data, like: {idx => [v1,...,vN]}
encode::write_map_len(&mut self.wr, 1)?;
self.serialize_str(variant)?;
self.serialize_struct(name, len)
}
fn collect_seq<I>(self, iter: I) -> Result<Self::Ok, Self::Error> where I: IntoIterator, I::Item: Serialize {
fn collect_seq<I>(self, iter: I) -> Result<Self::Ok, Self::Error>
where
I: IntoIterator,
I::Item: Serialize,
{
let iter = iter.into_iter();
let len = match iter.size_hint() {
(lo, Some(hi)) if lo == hi && lo <= u32::MAX as usize => Some(lo as u32),
@ -792,13 +837,23 @@ fn collect_seq<I>(self, iter: I) -> Result<Self::Ok, Self::Error> where I: IntoI
}
impl<W: Write, C: SerializerConfig> Serializer<W, C> {
fn bytes_from_iter<I>(&mut self, mut iter: I, len: u32) -> Result<(), <&mut Self as serde::Serializer>::Error> where I: Iterator, I::Item: Serialize {
fn bytes_from_iter<I>(
&mut self,
mut iter: I,
len: u32,
) -> Result<(), <&mut Self as serde::Serializer>::Error>
where
I: Iterator,
I::Item: Serialize,
{
encode::write_bin_len(&mut self.wr, len)?;
iter.try_for_each(|item| {
self.wr.write(crate::slice::from_ref(&item.serialize(OnlyBytes)
.map_err(|_| Error::InvalidDataModel("BytesMode"))?))
self.wr
.write(crate::slice::from_ref(
&item.serialize(OnlyBytes).map_err(|_| Error::InvalidDataModel("BytesMode"))?,
))
.map_err(ValueWriteError::InvalidDataWrite)?;
Ok(())
Ok(())
})
}
}
@ -912,19 +967,36 @@ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Er
}
#[inline]
fn serialize_unit_variant(self, _name: &'static str, _idx: u32, _variant: &'static str) -> Result<Self::Ok, Self::Error> {
fn serialize_unit_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
#[inline]
fn serialize_newtype_struct<T: ?Sized>(self, _name: &'static str, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
_value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
fn serialize_newtype_variant<T: ?Sized>(self, _name: &'static str, _idx: u32, _variant: &'static str, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
@ -936,7 +1008,8 @@ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
#[inline]
fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
@ -952,12 +1025,22 @@ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Error> {
}
#[inline]
fn serialize_tuple_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeTupleStruct, Error> {
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct, Error> {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
#[inline]
fn serialize_tuple_variant(self, _name: &'static str, _idx: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeTupleVariant, Error> {
fn serialize_tuple_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Error> {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
@ -967,12 +1050,22 @@ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Error>
}
#[inline]
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct, Error> {
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Error> {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
#[inline]
fn serialize_struct_variant(self, _name: &'static str, _idx: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeStructVariant, Error> {
fn serialize_struct_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Error> {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
}
@ -1070,20 +1163,37 @@ fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Er
}
#[inline]
fn serialize_unit_variant(self, _name: &'static str, _idx: u32, _variant: &'static str) -> Result<Self::Ok, Self::Error> {
fn serialize_unit_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
Err(Error::InvalidDataModel("expected tuple"))
}
#[inline]
fn serialize_newtype_struct<T: ?Sized>(self, _name: &'static str, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
_value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected tuple"))
}
#[inline]
fn serialize_newtype_variant<T: ?Sized>(self, _name: &'static str, _idx: u32, _variant: &'static str, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected tuple"))
}
@ -1095,7 +1205,8 @@ fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
#[inline]
fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<Self::Ok, Self::Error>
where T: Serialize
where
T: Serialize,
{
Err(Error::InvalidDataModel("expected tuple"))
}
@ -1113,12 +1224,22 @@ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Error> {
}
#[inline]
fn serialize_tuple_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeTupleStruct, Error> {
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct, Error> {
Err(Error::InvalidDataModel("expected tuple"))
}
#[inline]
fn serialize_tuple_variant(self, _name: &'static str, _idx: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeTupleVariant, Error> {
fn serialize_tuple_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Error> {
Err(Error::InvalidDataModel("expected tuple"))
}
@ -1128,12 +1249,22 @@ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Error>
}
#[inline]
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct, Error> {
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Error> {
Err(Error::InvalidDataModel("expected tuple"))
}
#[inline]
fn serialize_struct_variant(self, _name: &'static str, _idx: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeStructVariant, Error> {
fn serialize_struct_variant(
self,
_name: &'static str,
_idx: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Error> {
Err(Error::InvalidDataModel("expected tuple"))
}
}
@ -1156,10 +1287,7 @@ fn end(self) -> Result<Self::Ok, Self::Error> {
impl<'a, W: Write + 'a> ExtSerializer<'a, W> {
#[inline]
fn new<C>(ser: &'a mut Serializer<W, C>) -> Self {
Self {
fields_se: ExtFieldSerializer::new(ser),
tuple_received: false,
}
Self { fields_se: ExtFieldSerializer::new(ser), tuple_received: false }
}
#[inline]
@ -1175,20 +1303,12 @@ fn end(self) -> Result<(), Error> {
impl<'a, W: Write + 'a> ExtFieldSerializer<'a, W> {
#[inline]
fn new<C>(ser: &'a mut Serializer<W, C>) -> Self {
Self {
wr: UnderlyingWrite::get_mut(ser),
tag: None,
finish: false,
}
Self { wr: UnderlyingWrite::get_mut(ser), tag: None, finish: false }
}
#[inline]
fn end(self) -> Result<(), Error> {
if self.finish {
Ok(())
} else {
Err(Error::InvalidDataModel("expected i8 and bytes"))
}
if self.finish { Ok(()) } else { Err(Error::InvalidDataModel("expected i8 and bytes")) }
}
}

View File

@ -9,14 +9,14 @@
#[allow(unused_imports)]
pub use decode::from_read_ref;
#[allow(unused_imports)]
pub use decode::from_slice;
#[allow(unused_imports)]
pub use decode::{from_read, Deserializer};
#[allow(unused_imports)]
pub use encode::{to_vec, to_vec_named, Serializer};
#[allow(unused_imports)]
pub use decode::from_slice;
mod bytes;
mod byteorder_io;
mod bytes;
pub mod config;
pub mod decode;
pub mod encode;
@ -67,9 +67,7 @@ pub fn from_utf8(v: Vec<u8>) -> Self {
Ok(v) => Raw::new(v),
Err(err) => {
let e = err.utf8_error();
Self {
s: Err((err.into_bytes(), e)),
}
Self { s: Err((err.into_bytes(), e)) }
}
}
}
@ -166,14 +164,16 @@ fn expecting(&self, fmt: &mut Formatter<'_>) -> Result<(), fmt::Error> {
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: de::Error
where
E: de::Error,
{
Ok(Raw { s: Ok(v.into()) })
}
#[inline]
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: de::Error
where
E: de::Error,
{
let s = match str::from_utf8(v) {
Ok(s) => Ok(s.into()),
@ -202,7 +202,8 @@ fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
impl<'de> Deserialize<'de> for Raw {
#[inline]
fn deserialize<D>(de: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>
where
D: de::Deserializer<'de>,
{
de.deserialize_any(RawVisitor)
}
@ -230,11 +231,7 @@ pub fn new(v: &'a str) -> Self {
pub fn from_utf8(v: &'a [u8]) -> Self {
match str::from_utf8(v) {
Ok(v) => RawRef::new(v),
Err(err) => {
Self {
s: Err((v, err))
}
}
Err(err) => Self { s: Err((v, err)) },
}
}
@ -308,14 +305,16 @@ fn expecting(&self, fmt: &mut Formatter<'_>) -> Result<(), fmt::Error> {
#[inline]
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where E: de::Error
where
E: de::Error,
{
Ok(RawRef { s: Ok(v) })
}
#[inline]
fn visit_borrowed_bytes<E>(self, v: &'de [u8]) -> Result<Self::Value, E>
where E: de::Error
where
E: de::Error,
{
let s = match str::from_utf8(v) {
Ok(s) => Ok(s),
@ -329,7 +328,8 @@ fn visit_borrowed_bytes<E>(self, v: &'de [u8]) -> Result<Self::Value, E>
impl<'de> Deserialize<'de> for RawRef<'de> {
#[inline]
fn deserialize<D>(de: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>
where
D: de::Deserializer<'de>,
{
de.deserialize_any(RawRefVisitor)
}

View File

@ -1,5 +1,5 @@
use crate::io;
use crate::arch::asm;
use crate::io;
pub struct Stdin;
pub struct Stdout;

View File

@ -1,12 +1,12 @@
#![allow(clippy::result_unit_err)]
mod raw;
use crate::ptr;
use alloc::vec::Vec;
use core::slice;
#[allow(clippy::wildcard_imports)]
use raw::*;
use x86_64::structures::paging::{Page, PageTableFlags};
use crate::ptr;
use super::buffers::{IntoId, KernelBuffer, KernelBufferAllocator};
use crate::os::mikros::address_space::AddressSpace;
@ -48,14 +48,7 @@ pub(crate) fn map_assert_unused(
num_pages: usize,
flags: PageTableFlags,
) -> Result<(), ()> {
if syscall4(
6,
space,
start.start_address().as_u64(),
num_pages as u64,
flags.bits(),
) == 1
{
if syscall4(6, space, start.start_address().as_u64(), num_pages as u64, flags.bits()) == 1 {
Err(())
} else {
Ok(())
@ -77,11 +70,7 @@ pub(crate) fn copy_to(
pub fn new_process(entry_point: u64, space: AddressSpace) -> Result<u64, ()> {
let (err, pid) = syscall2r2(8, entry_point, space.into_raw());
if err == 1 {
Err(())
} else {
Ok(pid)
}
if err == 1 { Err(()) } else { Ok(pid) }
}
pub fn register(typ: u64) {
@ -90,22 +79,12 @@ pub fn register(typ: u64) {
#[must_use]
pub fn try_get_registered(typ: u64) -> Option<u64> {
if let (0, channel) = syscall1r2(10, typ) {
Some(channel)
} else {
None
}
if let (0, channel) = syscall1r2(10, typ) { Some(channel) } else { None }
}
pub(crate) fn ipc_send(pid: u64, buffer: KernelBuffer) -> Result<(), ()> {
let len = buffer.len();
if syscall3(
11,
pid,
buffer.into_raw_parts_with_alloc().3.into_id().unwrap(),
len as u64,
) == 0
{
if syscall3(11, pid, buffer.into_raw_parts_with_alloc().3.into_id().unwrap(), len as u64) == 0 {
Ok(())
} else {
Err(())
@ -147,30 +126,15 @@ pub(crate) fn map_only_unused(
num_pages: usize,
flags: PageTableFlags,
) -> Result<(), ()> {
if syscall4(
14,
space,
start.start_address().as_u64(),
num_pages as u64,
flags.bits(),
) == 1
{
if syscall4(14, space, start.start_address().as_u64(), num_pages as u64, flags.bits()) == 1 {
Err(())
} else {
Ok(())
}
}
pub(crate) fn adsp_zero(
space: u64,
dst: *mut u8,
len: usize,
) -> Result<(), ()> {
if syscall3(17, space, dst as u64, len as u64) == 1 {
Err(())
} else {
Ok(())
}
pub(crate) fn adsp_zero(space: u64, dst: *mut u8, len: usize) -> Result<(), ()> {
if syscall3(17, space, dst as u64, len as u64) == 1 { Err(()) } else { Ok(()) }
}
pub fn wait_for_ipc_message() {

View File

@ -111,7 +111,7 @@ fn toggle(&mut self, idx: usize) {
fn set_bitfield<T: BitField>(v: &mut Vec<T>) {
for i in 0..v.len() * T::BIT_LENGTH {
v.as_mut_slice().set_bit(i, true);;
v.as_mut_slice().set_bit(i, true);
}
}

View File

@ -311,10 +311,7 @@ fn get_bits<U: RangeBounds<usize>>(&self, range: U) -> T {
self[slice_start].get_bits(bit_start..T::BIT_LENGTH)
} else {
let mut ret = self[slice_start].get_bits(bit_start..T::BIT_LENGTH);
ret.set_bits(
(T::BIT_LENGTH - bit_start)..len,
self[slice_end].get_bits(0..bit_end),
);
ret.set_bits((T::BIT_LENGTH - bit_start)..len, self[slice_end].get_bits(0..bit_end));
ret
}
}
@ -346,14 +343,10 @@ fn set_bits<U: RangeBounds<usize>>(&mut self, range: U, value: T) {
} else if bit_end == 0 {
self[slice_start].set_bits(bit_start..T::BIT_LENGTH, value);
} else {
self[slice_start].set_bits(
bit_start..T::BIT_LENGTH,
value.get_bits(0..T::BIT_LENGTH - bit_start),
);
self[slice_end].set_bits(
0..bit_end,
value.get_bits(T::BIT_LENGTH - bit_start..T::BIT_LENGTH),
);
self[slice_start]
.set_bits(bit_start..T::BIT_LENGTH, value.get_bits(0..T::BIT_LENGTH - bit_start));
self[slice_end]
.set_bits(0..bit_end, value.get_bits(T::BIT_LENGTH - bit_start..T::BIT_LENGTH));
}
}
}

View File

@ -5,9 +5,7 @@
macro_rules! bench_num {
($name:ident, $read:ident, $bytes:expr, $data:expr) => {
mod $name {
use byteorder::{
BigEndian, ByteOrder, LittleEndian, NativeEndian,
};
use byteorder::{BigEndian, ByteOrder, LittleEndian, NativeEndian};
use test::black_box as bb;
use test::Bencher;
@ -47,9 +45,7 @@ fn read_native_endian(b: &mut Bencher) {
($ty:ident, $max:ident,
$read:ident, $write:ident, $size:expr, $data:expr) => {
mod $ty {
use byteorder::{
BigEndian, ByteOrder, LittleEndian, NativeEndian,
};
use byteorder::{BigEndian, ByteOrder, LittleEndian, NativeEndian};
use std::$ty;
use test::black_box as bb;
use test::Bencher;
@ -177,36 +173,11 @@ fn write_native_endian(b: &mut Bencher) {
bench_num!(uint128_9, read_uint128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
bench_num!(uint128_10, read_uint128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
bench_num!(uint128_11, read_uint128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]);
bench_num!(
uint128_12,
read_uint128,
12,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
);
bench_num!(
uint128_13,
read_uint128,
13,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
);
bench_num!(
uint128_14,
read_uint128,
14,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
);
bench_num!(
uint128_15,
read_uint128,
15,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
);
bench_num!(
uint128_16,
read_uint128,
16,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
);
bench_num!(uint128_12, read_uint128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
bench_num!(uint128_13, read_uint128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]);
bench_num!(uint128_14, read_uint128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]);
bench_num!(uint128_15, read_uint128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
bench_num!(uint128_16, read_uint128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
bench_num!(int128_1, read_int128, 1, [1]);
bench_num!(int128_2, read_int128, 2, [1, 2]);
@ -219,36 +190,11 @@ fn write_native_endian(b: &mut Bencher) {
bench_num!(int128_9, read_int128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
bench_num!(int128_10, read_int128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
bench_num!(int128_11, read_int128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]);
bench_num!(
int128_12,
read_int128,
12,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
);
bench_num!(
int128_13,
read_int128,
13,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
);
bench_num!(
int128_14,
read_int128,
14,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
);
bench_num!(
int128_15,
read_int128,
15,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
);
bench_num!(
int128_16,
read_int128,
16,
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
);
bench_num!(int128_12, read_int128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
bench_num!(int128_13, read_int128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]);
bench_num!(int128_14, read_int128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]);
bench_num!(int128_15, read_int128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
bench_num!(int128_16, read_int128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
macro_rules! bench_slice {
($name:ident, $numty:ty, $read:ident, $write:ident) => {
@ -262,10 +208,8 @@ mod $name {
#[bench]
fn read_big_endian(b: &mut Bencher) {
let mut numbers: Vec<$numty> = rand::thread_rng()
.sample_iter(&distributions::Standard)
.take(100000)
.collect();
let mut numbers: Vec<$numty> =
rand::thread_rng().sample_iter(&distributions::Standard).take(100000).collect();
let mut bytes = vec![0; numbers.len() * size_of::<$numty>()];
BigEndian::$write(&numbers, &mut bytes);
@ -277,10 +221,8 @@ fn read_big_endian(b: &mut Bencher) {
#[bench]
fn read_little_endian(b: &mut Bencher) {
let mut numbers: Vec<$numty> = rand::thread_rng()
.sample_iter(&distributions::Standard)
.take(100000)
.collect();
let mut numbers: Vec<$numty> =
rand::thread_rng().sample_iter(&distributions::Standard).take(100000).collect();
let mut bytes = vec![0; numbers.len() * size_of::<$numty>()];
LittleEndian::$write(&numbers, &mut bytes);
@ -292,10 +234,8 @@ fn read_little_endian(b: &mut Bencher) {
#[bench]
fn write_big_endian(b: &mut Bencher) {
let numbers: Vec<$numty> = rand::thread_rng()
.sample_iter(&distributions::Standard)
.take(100000)
.collect();
let numbers: Vec<$numty> =
rand::thread_rng().sample_iter(&distributions::Standard).take(100000).collect();
let mut bytes = vec![0; numbers.len() * size_of::<$numty>()];
b.bytes = bytes.len() as u64;
@ -306,10 +246,8 @@ fn write_big_endian(b: &mut Bencher) {
#[bench]
fn write_little_endian(b: &mut Bencher) {
let numbers: Vec<$numty> = rand::thread_rng()
.sample_iter(&distributions::Standard)
.take(100000)
.collect();
let numbers: Vec<$numty> =
rand::thread_rng().sample_iter(&distributions::Standard).take(100000).collect();
let mut bytes = vec![0; numbers.len() * size_of::<$numty>()];
b.bytes = bytes.len() as u64;

View File

@ -670,10 +670,7 @@ fn read_u64_into<T: ByteOrder>(&mut self, dst: &mut [u64]) -> Result<()> {
/// assert_eq!([517, 768], dst);
/// ```
#[inline]
fn read_u128_into<T: ByteOrder>(
&mut self,
dst: &mut [u128],
) -> Result<()> {
fn read_u128_into<T: ByteOrder>(&mut self, dst: &mut [u128]) -> Result<()> {
{
let buf = unsafe { slice_to_u8_mut(dst) };
self.read_exact(buf)?;
@ -855,10 +852,7 @@ fn read_i64_into<T: ByteOrder>(&mut self, dst: &mut [i64]) -> Result<()> {
/// assert_eq!([517, 768], dst);
/// ```
#[inline]
fn read_i128_into<T: ByteOrder>(
&mut self,
dst: &mut [i128],
) -> Result<()> {
fn read_i128_into<T: ByteOrder>(&mut self, dst: &mut [i128]) -> Result<()> {
{
let buf = unsafe { slice_to_u8_mut(dst) };
self.read_exact(buf)?;
@ -945,10 +939,7 @@ fn read_f32_into<T: ByteOrder>(&mut self, dst: &mut [f32]) -> Result<()> {
/// ```
#[inline]
#[deprecated(since = "1.2.0", note = "please use `read_f32_into` instead")]
fn read_f32_into_unchecked<T: ByteOrder>(
&mut self,
dst: &mut [f32],
) -> Result<()> {
fn read_f32_into_unchecked<T: ByteOrder>(&mut self, dst: &mut [f32]) -> Result<()> {
self.read_f32_into::<T>(dst)
}
@ -1036,10 +1027,7 @@ fn read_f64_into<T: ByteOrder>(&mut self, dst: &mut [f64]) -> Result<()> {
/// ```
#[inline]
#[deprecated(since = "1.2.0", note = "please use `read_f64_into` instead")]
fn read_f64_into_unchecked<T: ByteOrder>(
&mut self,
dst: &mut [f64],
) -> Result<()> {
fn read_f64_into_unchecked<T: ByteOrder>(&mut self, dst: &mut [f64]) -> Result<()> {
self.read_f64_into::<T>(dst)
}
}
@ -1439,11 +1427,7 @@ fn write_i128<T: ByteOrder>(&mut self, n: i128) -> Result<()> {
/// assert_eq!(wtr, b"\x48\xc5\x74\x62\xe9\x00\x00\x00\x00\x2b");
/// ```
#[inline]
fn write_uint<T: ByteOrder>(
&mut self,
n: u64,
nbytes: usize,
) -> Result<()> {
fn write_uint<T: ByteOrder>(&mut self, n: u64, nbytes: usize) -> Result<()> {
let mut buf = [0; 8];
T::write_uint(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1475,11 +1459,7 @@ fn write_uint<T: ByteOrder>(
/// assert_eq!(wtr, b"\xf3\x64\xf4\xd1\xfd\xb0\x81\x00\x00\x00\x00\x00\x00\x2b");
/// ```
#[inline]
fn write_int<T: ByteOrder>(
&mut self,
n: i64,
nbytes: usize,
) -> Result<()> {
fn write_int<T: ByteOrder>(&mut self, n: i64, nbytes: usize) -> Result<()> {
let mut buf = [0; 8];
T::write_int(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1490,11 +1470,7 @@ fn write_int<T: ByteOrder>(
/// If the given integer is not representable in the given number of bytes,
/// this method panics. If `nbytes > 16`, this method panics.
#[inline]
fn write_uint128<T: ByteOrder>(
&mut self,
n: u128,
nbytes: usize,
) -> Result<()> {
fn write_uint128<T: ByteOrder>(&mut self, n: u128, nbytes: usize) -> Result<()> {
let mut buf = [0; 16];
T::write_uint128(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])
@ -1505,11 +1481,7 @@ fn write_uint128<T: ByteOrder>(
/// If the given integer is not representable in the given number of bytes,
/// this method panics. If `nbytes > 16`, this method panics.
#[inline]
fn write_int128<T: ByteOrder>(
&mut self,
n: i128,
nbytes: usize,
) -> Result<()> {
fn write_int128<T: ByteOrder>(&mut self, n: i128, nbytes: usize) -> Result<()> {
let mut buf = [0; 16];
T::write_int128(&mut buf, n, nbytes);
self.write_all(&buf[0..nbytes])

File diff suppressed because it is too large Load Diff

View File

@ -79,26 +79,12 @@ mod parse_tests {
#[test]
fn parse_dyn32_lsb() {
test_parse_for(
LittleEndian,
Class::ELF32,
Dyn {
d_tag: 0x03020100,
d_un: 0x07060504,
},
);
test_parse_for(LittleEndian, Class::ELF32, Dyn { d_tag: 0x03020100, d_un: 0x07060504 });
}
#[test]
fn parse_dyn32_msb() {
test_parse_for(
BigEndian,
Class::ELF32,
Dyn {
d_tag: 0x00010203,
d_un: 0x04050607,
},
);
test_parse_for(BigEndian, Class::ELF32, Dyn { d_tag: 0x00010203, d_un: 0x04050607 });
}
#[test]
@ -106,10 +92,7 @@ fn parse_dyn64_lsb() {
test_parse_for(
LittleEndian,
Class::ELF64,
Dyn {
d_tag: 0x0706050403020100,
d_un: 0x0F0E0D0C0B0A0908,
},
Dyn { d_tag: 0x0706050403020100, d_un: 0x0F0E0D0C0B0A0908 },
);
}
@ -118,10 +101,7 @@ fn parse_dyn64_msb() {
test_parse_for(
BigEndian,
Class::ELF64,
Dyn {
d_tag: 0x0001020304050607,
d_un: 0x08090A0B0C0D0E0F,
},
Dyn { d_tag: 0x0001020304050607, d_un: 0x08090A0B0C0D0E0F },
);
}

View File

@ -96,16 +96,10 @@ fn find_shdrs<'data, E: EndianParse>(
// Validate shentsize before trying to read the table so that we can error early for corrupted files
let entsize = SectionHeader::validate_entsize(ehdr.class, ehdr.e_shentsize as usize)?;
let size = entsize
.checked_mul(shnum)
.ok_or(ParseError::IntegerOverflow)?;
let size = entsize.checked_mul(shnum).ok_or(ParseError::IntegerOverflow)?;
let end = shoff.checked_add(size).ok_or(ParseError::IntegerOverflow)?;
let buf = data.get_bytes(shoff..end)?;
Ok(Some(SectionHeaderTable::new(
ehdr.endianness,
ehdr.class,
buf,
)))
Ok(Some(SectionHeaderTable::new(ehdr.endianness, ehdr.class, buf)))
}
/// Find the location (if any) of the program headers in the given data buffer and take a
@ -134,9 +128,7 @@ fn find_phdrs<'data, E: EndianParse>(
let entsize = ProgramHeader::validate_entsize(ehdr.class, ehdr.e_phentsize as usize)?;
let phoff: usize = ehdr.e_phoff.try_into()?;
let size = entsize
.checked_mul(phnum)
.ok_or(ParseError::IntegerOverflow)?;
let size = entsize.checked_mul(phnum).ok_or(ParseError::IntegerOverflow)?;
let end = phoff.checked_add(size).ok_or(ParseError::IntegerOverflow)?;
let buf = data.get_bytes(phoff..end)?;
Ok(Some(SegmentTable::new(ehdr.endianness, ehdr.class, buf)))
@ -187,12 +179,7 @@ pub fn minimal_parse(data: &'data [u8]) -> Result<Self, ParseError> {
let shdrs = find_shdrs(&ehdr, data)?;
let phdrs = find_phdrs(&ehdr, data)?;
Ok(ElfBytes {
ehdr,
data,
shdrs,
phdrs,
})
Ok(ElfBytes { ehdr, data, shdrs, phdrs })
}
/// Get this Elf object's zero-alloc lazy-parsing [SegmentTable] (if any).
@ -262,13 +249,8 @@ pub fn section_headers(&self) -> Option<SectionHeaderTable<'data, E>> {
/// ```
pub fn section_headers_with_strtab(
&self,
) -> Result<
(
Option<SectionHeaderTable<'data, E>>,
Option<StringTable<'data>>,
),
ParseError,
> {
) -> Result<(Option<SectionHeaderTable<'data, E>>, Option<StringTable<'data>>), ParseError>
{
// It's Ok to have no section headers
let shdrs = match self.section_headers() {
Some(shdrs) => shdrs,
@ -387,20 +369,14 @@ pub fn find_common_data(&self) -> Result<CommonElfData<'data, E>, ParseError> {
abi::SHT_HASH => {
let (start, end) = shdr.get_data_range()?;
let buf = self.data.get_bytes(start..end)?;
result.sysv_hash = Some(SysVHashTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
)?);
result.sysv_hash =
Some(SysVHashTable::new(self.ehdr.endianness, self.ehdr.class, buf)?);
}
abi::SHT_GNU_HASH => {
let (start, end) = shdr.get_data_range()?;
let buf = self.data.get_bytes(start..end)?;
result.gnu_hash = Some(GnuHashTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
)?);
result.gnu_hash =
Some(GnuHashTable::new(self.ehdr.endianness, self.ehdr.class, buf)?);
}
_ => {
continue;
@ -415,11 +391,8 @@ pub fn find_common_data(&self) -> Result<CommonElfData<'data, E>, ParseError> {
if let Some(dyn_phdr) = phdrs.iter().find(|phdr| phdr.p_type == abi::PT_DYNAMIC) {
let (start, end) = dyn_phdr.get_file_data_range()?;
let buf = self.data.get_bytes(start..end)?;
result.dynamic = Some(DynamicTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
));
result.dynamic =
Some(DynamicTable::new(self.ehdr.endianness, self.ehdr.class, buf));
}
}
}
@ -457,10 +430,9 @@ pub fn section_data(
&mut offset,
buf,
)?;
let compressed_buf = buf.get(offset..).ok_or(ParseError::SliceReadError((
offset,
shdr.sh_size.try_into()?,
)))?;
let compressed_buf = buf
.get(offset..)
.ok_or(ParseError::SliceReadError((offset, shdr.sh_size.try_into()?)))?;
Ok((compressed_buf, Some(chdr)))
}
}
@ -473,10 +445,7 @@ pub fn section_data_as_strtab(
shdr: &SectionHeader,
) -> Result<StringTable<'data>, ParseError> {
if shdr.sh_type != abi::SHT_STRTAB {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_STRTAB,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_STRTAB)));
}
let (buf, _) = self.section_data(shdr)?;
@ -492,10 +461,7 @@ pub fn section_data_as_rels(
shdr: &SectionHeader,
) -> Result<RelIterator<'data, E>, ParseError> {
if shdr.sh_type != abi::SHT_REL {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_REL,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_REL)));
}
let (buf, _) = self.section_data(shdr)?;
@ -511,18 +477,11 @@ pub fn section_data_as_relas(
shdr: &SectionHeader,
) -> Result<RelaIterator<'data, E>, ParseError> {
if shdr.sh_type != abi::SHT_RELA {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_RELA,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_RELA)));
}
let (buf, _) = self.section_data(shdr)?;
Ok(RelaIterator::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
))
Ok(RelaIterator::new(self.ehdr.endianness, self.ehdr.class, buf))
}
/// Get the section data for a given [SectionHeader], and interpret it as an
@ -534,10 +493,7 @@ pub fn section_data_as_notes(
shdr: &SectionHeader,
) -> Result<NoteIterator<'data, E>, ParseError> {
if shdr.sh_type != abi::SHT_NOTE {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_NOTE,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_NOTE)));
}
let (buf, _) = self.section_data(shdr)?;
@ -556,20 +512,13 @@ fn section_data_as_dynamic(
shdr: &SectionHeader,
) -> Result<DynamicTable<'data, E>, ParseError> {
if shdr.sh_type != abi::SHT_DYNAMIC {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_DYNAMIC,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_DYNAMIC)));
}
// Validate entsize before trying to read the table so that we can error early for corrupted files
Dyn::validate_entsize(self.ehdr.class, shdr.sh_entsize.try_into()?)?;
let (buf, _) = self.section_data(shdr)?;
Ok(DynamicTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
))
Ok(DynamicTable::new(self.ehdr.endianness, self.ehdr.class, buf))
}
/// Get the segment's file data for a given segment/[ProgramHeader].
@ -589,19 +538,11 @@ pub fn segment_data_as_notes(
phdr: &ProgramHeader,
) -> Result<NoteIterator<'data, E>, ParseError> {
if phdr.p_type != abi::PT_NOTE {
return Err(ParseError::UnexpectedSegmentType((
phdr.p_type,
abi::PT_NOTE,
)));
return Err(ParseError::UnexpectedSegmentType((phdr.p_type, abi::PT_NOTE)));
}
let buf = self.segment_data(phdr)?;
Ok(NoteIterator::new(
self.ehdr.endianness,
self.ehdr.class,
phdr.p_align as usize,
buf,
))
Ok(NoteIterator::new(self.ehdr.endianness, self.ehdr.class, phdr.p_align as usize, buf))
}
/// Get the .dynamic section or [abi::PT_DYNAMIC] segment contents.
@ -616,11 +557,7 @@ pub fn dynamic(&self) -> Result<Option<DynamicTable<'data, E>>, ParseError> {
if let Some(phdr) = phdrs.iter().find(|phdr| phdr.p_type == abi::PT_DYNAMIC) {
let (start, end) = phdr.get_file_data_range()?;
let buf = self.data.get_bytes(start..end)?;
return Ok(Some(DynamicTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
)));
return Ok(Some(DynamicTable::new(self.ehdr.endianness, self.ehdr.class, buf)));
}
}
@ -672,10 +609,7 @@ pub fn symbol_table(
};
let strtab_shdr = shdrs.get(symtab_shdr.sh_link as usize)?;
Ok(Some(self.section_data_as_symbol_table(
&symtab_shdr,
&strtab_shdr,
)?))
Ok(Some(self.section_data_as_symbol_table(&symtab_shdr, &strtab_shdr)?))
}
/// Get the ELF file's `.dynsym` and associated strtab (if any)
@ -698,10 +632,7 @@ pub fn dynamic_symbol_table(
};
let strtab_shdr = shdrs.get(symtab_shdr.sh_link as usize)?;
Ok(Some(self.section_data_as_symbol_table(
&symtab_shdr,
&strtab_shdr,
)?))
Ok(Some(self.section_data_as_symbol_table(&symtab_shdr, &strtab_shdr)?))
}
/// Locate the section data for the various GNU Symbol Versioning sections (if any)
@ -810,11 +741,7 @@ pub fn symbol_version_table(&self) -> Result<Option<SymbolVersionTable<'data, E>
};
// whew, we're done here!
Ok(Some(SymbolVersionTable::new(
version_ids,
verneeds,
verdefs,
)))
Ok(Some(SymbolVersionTable::new(version_ids, verneeds, verdefs)))
}
}
@ -850,11 +777,8 @@ fn simultaenous_segments_parsing() {
let iter = file.segments().expect("File should have a segment table");
// Concurrently get the segment table again as an iterator and collect the headers into a vec
let segments: Vec<ProgramHeader> = file
.segments()
.expect("File should have a segment table")
.iter()
.collect();
let segments: Vec<ProgramHeader> =
file.segments().expect("File should have a segment table").iter().collect();
let expected_phdr = ProgramHeader {
p_type: abi::PT_PHDR,
@ -871,10 +795,7 @@ fn simultaenous_segments_parsing() {
assert_eq!(segments[0], expected_phdr);
// Now use the original lazy-parsing table to parse out the first entry
assert_eq!(
iter.get(0).expect("should be able to parse phdr"),
expected_phdr
)
assert_eq!(iter.get(0).expect("should be able to parse phdr"), expected_phdr)
}
#[test]
@ -884,11 +805,8 @@ fn segments() {
let slice = file_data.as_slice();
let file = ElfBytes::<AnyEndian>::minimal_parse(slice).expect("Open test1");
let segments: Vec<ProgramHeader> = file
.segments()
.expect("File should have a segment table")
.iter()
.collect();
let segments: Vec<ProgramHeader> =
file.segments().expect("File should have a segment table").iter().collect();
assert_eq!(
segments[0],
ProgramHeader {
@ -911,11 +829,8 @@ fn segments_phnum_in_shdr0() {
let slice = file_data.as_slice();
let file = ElfBytes::<AnyEndian>::minimal_parse(slice).expect("Open test1");
let segments: Vec<ProgramHeader> = file
.segments()
.expect("File should have a segment table")
.iter()
.collect();
let segments: Vec<ProgramHeader> =
file.segments().expect("File should have a segment table").iter().collect();
assert_eq!(
segments[0],
ProgramHeader {
@ -938,9 +853,7 @@ fn section_headers() {
let slice = file_data.as_slice();
let file = ElfBytes::<AnyEndian>::minimal_parse(slice).expect("Open test1");
let shdrs = file
.section_headers()
.expect("File should have a section table");
let shdrs = file.section_headers().expect("File should have a section table");
let shdrs_vec: Vec<SectionHeader> = shdrs.iter().collect();
@ -954,20 +867,13 @@ fn section_headers_with_strtab() {
let slice = file_data.as_slice();
let file = ElfBytes::<AnyEndian>::minimal_parse(slice).expect("Open test1");
let (shdrs, strtab) = file
.section_headers_with_strtab()
.expect("shdrs should be parsable");
let (shdrs, strtab) = file.section_headers_with_strtab().expect("shdrs should be parsable");
let (shdrs, strtab) = (shdrs.unwrap(), strtab.unwrap());
let with_names: Vec<(&str, SectionHeader)> = shdrs
.iter()
.map(|shdr| {
(
strtab
.get(shdr.sh_name as usize)
.expect("Failed to get section name"),
shdr,
)
(strtab.get(shdr.sh_name as usize).expect("Failed to get section name"), shdr)
})
.collect();
@ -983,18 +889,14 @@ fn shnum_and_shstrndx_in_shdr0() {
let slice = file_data.as_slice();
let file = ElfBytes::<AnyEndian>::minimal_parse(slice).unwrap();
let (shdrs, strtab) = file
.section_headers_with_strtab()
.expect("shdrs should be parsable");
let (shdrs, strtab) = file.section_headers_with_strtab().expect("shdrs should be parsable");
let (shdrs, strtab) = (shdrs.unwrap(), strtab.unwrap());
let shdrs_len = shdrs.len();
assert_eq!(shdrs_len, 0xFF15);
let shdr = shdrs.get(shdrs_len - 1).unwrap();
let name = strtab
.get(shdr.sh_name as usize)
.expect("Failed to get section name");
let name = strtab.get(shdr.sh_name as usize).expect("Failed to get section name");
assert_eq!(name, ".shstrtab");
assert_eq!(shdr.sh_type, abi::SHT_STRTAB);
@ -1014,9 +916,8 @@ fn section_header_by_name() {
assert_eq!(shdr.sh_type, SHT_GNU_HASH);
let shdr = file
.section_header_by_name(".not.found")
.expect("section table should be parseable");
let shdr =
file.section_header_by_name(".not.found").expect("section table should be parseable");
assert_eq!(shdr, None);
}
@ -1055,9 +956,7 @@ fn section_data() {
assert_eq!(shdr.sh_type, SHT_NOBITS);
let (data, chdr) = file
.section_data(&shdr)
.expect("Failed to get section data");
let (data, chdr) = file.section_data(&shdr).expect("Failed to get section data");
assert_eq!(chdr, None);
assert_eq!(data, &[]);
@ -1078,36 +977,25 @@ fn section_data_as_wrong_type() {
.get(0)
.expect("shdr should be parsable");
let err = file
.section_data_as_strtab(&shdr)
.expect_err("shdr0 should be the wrong type");
let err = file.section_data_as_strtab(&shdr).expect_err("shdr0 should be the wrong type");
assert!(
matches!(
err,
ParseError::UnexpectedSectionType((SHT_NULL, SHT_STRTAB))
),
matches!(err, ParseError::UnexpectedSectionType((SHT_NULL, SHT_STRTAB))),
"Unexpected Error type found: {err}"
);
let err = file
.section_data_as_rels(&shdr)
.expect_err("shdr0 should be the wrong type");
let err = file.section_data_as_rels(&shdr).expect_err("shdr0 should be the wrong type");
assert!(
matches!(err, ParseError::UnexpectedSectionType((SHT_NULL, SHT_REL))),
"Unexpected Error type found: {err}"
);
let err = file
.section_data_as_relas(&shdr)
.expect_err("shdr0 should be the wrong type");
let err = file.section_data_as_relas(&shdr).expect_err("shdr0 should be the wrong type");
assert!(
matches!(err, ParseError::UnexpectedSectionType((SHT_NULL, SHT_RELA))),
"Unexpected Error type found: {err}"
);
let err = file
.section_data_as_notes(&shdr)
.expect_err("shdr0 should be the wrong type");
let err = file.section_data_as_notes(&shdr).expect_err("shdr0 should be the wrong type");
assert!(
matches!(err, ParseError::UnexpectedSectionType((SHT_NULL, SHT_NOTE))),
"Unexpected Error type found: {err}"
@ -1127,14 +1015,9 @@ fn section_data_as_strtab() {
.get(file.ehdr.e_shstrndx as usize)
.expect("shdr should be parsable");
let strtab = file
.section_data_as_strtab(&shdr)
.expect("Failed to read strtab");
let strtab = file.section_data_as_strtab(&shdr).expect("Failed to read strtab");
assert_eq!(
strtab.get(1).expect("Failed to get strtab entry"),
".symtab"
);
assert_eq!(strtab.get(1).expect("Failed to get strtab entry"), ".symtab");
}
#[test]
@ -1150,26 +1033,14 @@ fn section_data_as_relas() {
.get(10)
.expect("Failed to get rela shdr");
let mut relas = file
.section_data_as_relas(&shdr)
.expect("Failed to read relas section");
let mut relas = file.section_data_as_relas(&shdr).expect("Failed to read relas section");
assert_eq!(
relas.next().expect("Failed to get rela entry"),
Rela {
r_offset: 6293704,
r_sym: 1,
r_type: 7,
r_addend: 0,
}
Rela { r_offset: 6293704, r_sym: 1, r_type: 7, r_addend: 0 }
);
assert_eq!(
relas.next().expect("Failed to get rela entry"),
Rela {
r_offset: 6293712,
r_sym: 2,
r_type: 7,
r_addend: 0,
}
Rela { r_offset: 6293712, r_sym: 2, r_type: 7, r_addend: 0 }
);
assert!(relas.next().is_none());
}
@ -1187,17 +1058,10 @@ fn section_data_as_notes() {
.get(2)
.expect("Failed to get note shdr");
let mut notes = file
.section_data_as_notes(&shdr)
.expect("Failed to read note section");
let mut notes = file.section_data_as_notes(&shdr).expect("Failed to read note section");
assert_eq!(
notes.next().expect("Failed to get first note"),
Note::GnuAbiTag(NoteGnuAbiTag {
os: 0,
major: 2,
minor: 6,
subminor: 32
})
Note::GnuAbiTag(NoteGnuAbiTag { os: 0, major: 2, minor: 6, subminor: 32 })
);
assert!(notes.next().is_none());
}
@ -1215,17 +1079,10 @@ fn segment_data_as_notes() {
.get(5)
.expect("Failed to get notes phdr");
let mut notes = file
.segment_data_as_notes(&phdr)
.expect("Failed to read notes segment");
let mut notes = file.segment_data_as_notes(&phdr).expect("Failed to read notes segment");
assert_eq!(
notes.next().expect("Failed to get first note"),
Note::GnuAbiTag(NoteGnuAbiTag {
os: 0,
major: 2,
minor: 6,
subminor: 32
})
Note::GnuAbiTag(NoteGnuAbiTag { os: 0, major: 2, minor: 6, subminor: 32 })
);
assert_eq!(
notes.next().expect("Failed to get second note"),
@ -1251,17 +1108,11 @@ fn dynamic() {
.iter();
assert_eq!(
dynamic.next().expect("Failed to get dyn entry"),
Dyn {
d_tag: abi::DT_NEEDED,
d_un: 1
}
Dyn { d_tag: abi::DT_NEEDED, d_un: 1 }
);
assert_eq!(
dynamic.next().expect("Failed to get dyn entry"),
Dyn {
d_tag: abi::DT_INIT,
d_un: 4195216
}
Dyn { d_tag: abi::DT_INIT, d_un: 4195216 }
);
}
@ -1289,9 +1140,7 @@ fn symbol_table() {
}
);
assert_eq!(
strtab
.get(symbol.st_name as usize)
.expect("Failed to get name from strtab"),
strtab.get(symbol.st_name as usize).expect("Failed to get name from strtab"),
"__JCR_LIST__"
);
}
@ -1310,19 +1159,10 @@ fn dynamic_symbol_table() {
let symbol = symtab.get(1).expect("Failed to get symbol");
assert_eq!(
symbol,
Symbol {
st_name: 11,
st_value: 0,
st_size: 0,
st_shndx: 0,
st_info: 18,
st_other: 0,
}
Symbol { st_name: 11, st_value: 0, st_size: 0, st_shndx: 0, st_info: 18, st_other: 0 }
);
assert_eq!(
strtab
.get(symbol.st_name as usize)
.expect("Failed to get name from strtab"),
strtab.get(symbol.st_name as usize).expect("Failed to get name from strtab"),
"memset"
);
}
@ -1339,10 +1179,8 @@ fn symbol_version_table() {
.expect("Failed to parse GNU symbol versions")
.expect("Failed to find GNU symbol versions");
let req = vst
.get_requirement(2)
.expect("Failed to parse NEED")
.expect("Failed to find NEED");
let req =
vst.get_requirement(2).expect("Failed to parse NEED").expect("Failed to find NEED");
assert_eq!(req.file, "libc.so.6");
assert_eq!(req.name, "GLIBC_2.2.5");
assert_eq!(req.hash, 0x9691A75);
@ -1353,28 +1191,20 @@ fn symbol_version_table() {
let req = vst.get_requirement(4).expect("Failed to parse NEED");
assert!(req.is_none());
let req = vst
.get_requirement(5)
.expect("Failed to parse NEED")
.expect("Failed to find NEED");
let req =
vst.get_requirement(5).expect("Failed to parse NEED").expect("Failed to find NEED");
assert_eq!(req.file, "libc.so.6");
assert_eq!(req.name, "GLIBC_2.2.5");
assert_eq!(req.hash, 0x9691A75);
let def = vst
.get_definition(3)
.expect("Failed to parse DEF")
.expect("Failed to find DEF");
let def = vst.get_definition(3).expect("Failed to parse DEF").expect("Failed to find DEF");
assert_eq!(def.hash, 0xC33237F);
assert_eq!(def.flags, 1);
assert!(!def.hidden);
let def_names: Vec<&str> = def.names.map(|res| res.expect("should parse")).collect();
assert_eq!(def_names, &["hello.so"]);
let def = vst
.get_definition(7)
.expect("Failed to parse DEF")
.expect("Failed to find DEF");
let def = vst.get_definition(7).expect("Failed to parse DEF").expect("Failed to find DEF");
assert_eq!(def.hash, 0x1570B62);
assert_eq!(def.flags, 0);
assert!(def.hidden);
@ -1416,10 +1246,7 @@ fn sysv_hash_table() {
// Verify that we got the same symbol from the hash table we expected
assert_eq!(sym_idx, 2);
assert_eq!(strtab.get(sym.st_name as usize).unwrap(), "memset");
assert_eq!(
sym,
symtab.get(sym_idx).expect("Failed to get expected sym")
);
assert_eq!(sym, symtab.get(sym_idx).expect("Failed to get expected sym"));
}
#[test]
@ -1445,10 +1272,7 @@ fn gnu_hash_table() {
// Verify that we got the same symbol from the hash table we expected
assert_eq!(sym_idx, 9);
assert_eq!(strtab.get(sym.st_name as usize).unwrap(), "use_memset");
assert_eq!(
sym,
symtab.get(sym_idx).expect("Failed to get expected sym")
);
assert_eq!(sym, symtab.get(sym_idx).expect("Failed to get expected sym"));
}
}

View File

@ -54,23 +54,17 @@ fn parse_section_headers<E: EndianParse, S: Read + Seek>(
let shoff: usize = ehdr.e_shoff.try_into()?;
let mut shnum = ehdr.e_shnum as usize;
if shnum == 0 {
let end = shoff
.checked_add(entsize)
.ok_or(ParseError::IntegerOverflow)?;
let end = shoff.checked_add(entsize).ok_or(ParseError::IntegerOverflow)?;
let mut offset = 0;
let data = reader.read_bytes(shoff, end)?;
let shdr0 = SectionHeader::parse_at(ehdr.endianness, ehdr.class, &mut offset, data)?;
shnum = shdr0.sh_size.try_into()?;
}
let size = entsize
.checked_mul(shnum)
.ok_or(ParseError::IntegerOverflow)?;
let size = entsize.checked_mul(shnum).ok_or(ParseError::IntegerOverflow)?;
let end = shoff.checked_add(size).ok_or(ParseError::IntegerOverflow)?;
let buf = reader.read_bytes(shoff, end)?;
let shdr_vec = SectionHeaderTable::new(ehdr.endianness, ehdr.class, buf)
.iter()
.collect();
let shdr_vec = SectionHeaderTable::new(ehdr.endianness, ehdr.class, buf).iter().collect();
Ok(shdr_vec)
}
@ -102,14 +96,10 @@ fn parse_program_headers<E: EndianParse, S: Read + Seek>(
let entsize = ProgramHeader::validate_entsize(ehdr.class, ehdr.e_phentsize as usize)?;
let phoff: usize = ehdr.e_phoff.try_into()?;
let size = entsize
.checked_mul(phnum)
.ok_or(ParseError::IntegerOverflow)?;
let size = entsize.checked_mul(phnum).ok_or(ParseError::IntegerOverflow)?;
let end = phoff.checked_add(size).ok_or(ParseError::IntegerOverflow)?;
let buf = reader.read_bytes(phoff, end)?;
let phdrs_vec = SegmentTable::new(ehdr.endianness, ehdr.class, buf)
.iter()
.collect();
let phdrs_vec = SegmentTable::new(ehdr.endianness, ehdr.class, buf).iter().collect();
Ok(phdrs_vec)
}
@ -139,12 +129,7 @@ pub fn open_stream(reader: S) -> Result<ElfStream<E, S>, ParseError> {
// around their backing data anymore.
cr.clear_cache();
Ok(ElfStream {
ehdr,
shdrs,
phdrs,
reader: cr,
})
Ok(ElfStream { ehdr, shdrs, phdrs, reader: cr })
}
/// Get the parsed section headers table
@ -194,10 +179,7 @@ pub fn section_headers_with_strtab(
}
// We have a strtab, so wrap it in a zero-copy StringTable
let strtab = self
.shdrs
.get(shstrndx)
.ok_or(ParseError::BadOffset(shstrndx as u64))?;
let strtab = self.shdrs.get(shstrndx).ok_or(ParseError::BadOffset(shstrndx as u64))?;
let (strtab_start, strtab_end) = strtab.get_data_range()?;
let strtab_buf = self.reader.read_bytes(strtab_start, strtab_end)?;
let strtab = StringTable::new(strtab_buf);
@ -297,10 +279,9 @@ pub fn section_data(
&mut offset,
buf,
)?;
let compressed_buf = buf.get(offset..).ok_or(ParseError::SliceReadError((
offset,
shdr.sh_size.try_into()?,
)))?;
let compressed_buf = buf
.get(offset..)
.ok_or(ParseError::SliceReadError((offset, shdr.sh_size.try_into()?)))?;
Ok((compressed_buf, Some(chdr)))
}
}
@ -317,10 +298,7 @@ pub fn section_data_as_strtab(
shdr: &SectionHeader,
) -> Result<StringTable<'_>, ParseError> {
if shdr.sh_type != abi::SHT_STRTAB {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_STRTAB,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_STRTAB)));
}
let (start, end) = shdr.get_data_range()?;
@ -389,33 +367,17 @@ pub fn dynamic_symbol_table(
pub fn dynamic(&mut self) -> Result<Option<DynamicTable<'_, E>>, ParseError> {
// If we have section headers, then look it up there
if !self.shdrs.is_empty() {
if let Some(shdr) = self
.shdrs
.iter()
.find(|shdr| shdr.sh_type == abi::SHT_DYNAMIC)
{
if let Some(shdr) = self.shdrs.iter().find(|shdr| shdr.sh_type == abi::SHT_DYNAMIC) {
let (start, end) = shdr.get_data_range()?;
let buf = self.reader.read_bytes(start, end)?;
return Ok(Some(DynamicTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
)));
return Ok(Some(DynamicTable::new(self.ehdr.endianness, self.ehdr.class, buf)));
}
// Otherwise, look up the PT_DYNAMIC segment (if any)
} else if !self.phdrs.is_empty() {
if let Some(phdr) = self
.phdrs
.iter()
.find(|phdr| phdr.p_type == abi::PT_DYNAMIC)
{
if let Some(phdr) = self.phdrs.iter().find(|phdr| phdr.p_type == abi::PT_DYNAMIC) {
let (start, end) = phdr.get_file_data_range()?;
let buf = self.reader.read_bytes(start, end)?;
return Ok(Some(DynamicTable::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
)));
return Ok(Some(DynamicTable::new(self.ehdr.endianness, self.ehdr.class, buf)));
}
}
Ok(None)
@ -561,11 +523,7 @@ pub fn symbol_version_table(
);
// whew, we're done here!
Ok(Some(SymbolVersionTable::new(
version_ids,
verneeds,
verdefs,
)))
Ok(Some(SymbolVersionTable::new(version_ids, verneeds, verdefs)))
}
/// Read the section data for the given
@ -580,10 +538,7 @@ pub fn section_data_as_rels(
shdr: &SectionHeader,
) -> Result<RelIterator<'_, E>, ParseError> {
if shdr.sh_type != abi::SHT_REL {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_REL,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_REL)));
}
let (start, end) = shdr.get_data_range()?;
@ -603,19 +558,12 @@ pub fn section_data_as_relas(
shdr: &SectionHeader,
) -> Result<RelaIterator<'_, E>, ParseError> {
if shdr.sh_type != abi::SHT_RELA {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_RELA,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_RELA)));
}
let (start, end) = shdr.get_data_range()?;
let buf = self.reader.read_bytes(start, end)?;
Ok(RelaIterator::new(
self.ehdr.endianness,
self.ehdr.class,
buf,
))
Ok(RelaIterator::new(self.ehdr.endianness, self.ehdr.class, buf))
}
/// Read the section data for the given
@ -630,10 +578,7 @@ pub fn section_data_as_notes(
shdr: &SectionHeader,
) -> Result<NoteIterator<'_, E>, ParseError> {
if shdr.sh_type != abi::SHT_NOTE {
return Err(ParseError::UnexpectedSectionType((
shdr.sh_type,
abi::SHT_NOTE,
)));
return Err(ParseError::UnexpectedSectionType((shdr.sh_type, abi::SHT_NOTE)));
}
let (start, end) = shdr.get_data_range()?;
@ -658,20 +603,12 @@ pub fn segment_data_as_notes(
phdr: &ProgramHeader,
) -> Result<NoteIterator<'_, E>, ParseError> {
if phdr.p_type != abi::PT_NOTE {
return Err(ParseError::UnexpectedSegmentType((
phdr.p_type,
abi::PT_NOTE,
)));
return Err(ParseError::UnexpectedSegmentType((phdr.p_type, abi::PT_NOTE)));
}
let (start, end) = phdr.get_file_data_range()?;
let buf = self.reader.read_bytes(start, end)?;
Ok(NoteIterator::new(
self.ehdr.endianness,
self.ehdr.class,
phdr.p_align as usize,
buf,
))
Ok(NoteIterator::new(self.ehdr.endianness, self.ehdr.class, phdr.p_align as usize, buf))
}
}
@ -754,15 +691,11 @@ fn section_headers_with_strtab() {
let io = std::fs::File::open(path).expect("Could not open file.");
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let (shdrs, strtab) = file
.section_headers_with_strtab()
.expect("Failed to get shdrs");
let (shdrs, strtab) = file.section_headers_with_strtab().expect("Failed to get shdrs");
let (shdrs, strtab) = (shdrs, strtab.unwrap());
let shdr_4 = &shdrs[4];
let name = strtab
.get(shdr_4.sh_name as usize)
.expect("Failed to get section name");
let name = strtab.get(shdr_4.sh_name as usize).expect("Failed to get section name");
assert_eq!(name, ".gnu.hash");
assert_eq!(shdr_4.sh_type, abi::SHT_GNU_HASH);
@ -774,18 +707,14 @@ fn shnum_and_shstrndx_in_shdr0() {
let io = std::fs::File::open(path).expect("Could not open file.");
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let (shdrs, strtab) = file
.section_headers_with_strtab()
.expect("shdrs should be parsable");
let (shdrs, strtab) = file.section_headers_with_strtab().expect("shdrs should be parsable");
let (shdrs, strtab) = (shdrs, strtab.unwrap());
let shdrs_len = shdrs.len();
assert_eq!(shdrs_len, 0xFF15);
let shdr = shdrs.get(shdrs_len - 1).unwrap();
let name = strtab
.get(shdr.sh_name as usize)
.expect("Failed to get section name");
let name = strtab.get(shdr.sh_name as usize).expect("Failed to get section name");
assert_eq!(name, ".shstrtab");
assert_eq!(shdr.sh_type, abi::SHT_STRTAB);
@ -804,9 +733,8 @@ fn section_header_by_name() {
assert_eq!(shdr.sh_type, abi::SHT_GNU_HASH);
let shdr = file
.section_header_by_name(".not.found")
.expect("section table should be parseable");
let shdr =
file.section_header_by_name(".not.found").expect("section table should be parseable");
assert_eq!(shdr, None);
}
@ -819,9 +747,7 @@ fn section_data_for_nobits() {
let shdr = file.section_headers()[26];
assert_eq!(shdr.sh_type, abi::SHT_NOBITS);
let (data, chdr) = file
.section_data(&shdr)
.expect("Failed to get section data");
let (data, chdr) = file.section_data(&shdr).expect("Failed to get section data");
assert_eq!(chdr, None);
assert_eq!(data, &[]);
}
@ -834,9 +760,7 @@ fn section_data() {
let shdr = file.section_headers()[7];
assert_eq!(shdr.sh_type, abi::SHT_GNU_VERSYM);
let (data, chdr) = file
.section_data(&shdr)
.expect("Failed to get section data");
let (data, chdr) = file.section_data(&shdr).expect("Failed to get section data");
assert_eq!(chdr, None);
assert_eq!(data, [0, 0, 2, 0, 2, 0, 0, 0]);
}
@ -848,13 +772,8 @@ fn section_data_as_strtab() {
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let shdr = file.section_headers()[file.ehdr.e_shstrndx as usize];
let strtab = file
.section_data_as_strtab(&shdr)
.expect("Failed to read strtab");
assert_eq!(
strtab.get(1).expect("Failed to get strtab entry"),
".symtab"
);
let strtab = file.section_data_as_strtab(&shdr).expect("Failed to read strtab");
assert_eq!(strtab.get(1).expect("Failed to get strtab entry"), ".symtab");
}
#[test]
@ -923,9 +842,7 @@ fn symbol_table() {
}
);
assert_eq!(
strtab
.get(symbol.st_name as usize)
.expect("Failed to get name from strtab"),
strtab.get(symbol.st_name as usize).expect("Failed to get name from strtab"),
"__JCR_LIST__"
);
}
@ -943,19 +860,10 @@ fn dynamic_symbol_table() {
let symbol = symtab.get(1).expect("Failed to get symbol");
assert_eq!(
symbol,
Symbol {
st_name: 11,
st_value: 0,
st_size: 0,
st_shndx: 0,
st_info: 18,
st_other: 0,
}
Symbol { st_name: 11, st_value: 0, st_size: 0, st_shndx: 0, st_info: 18, st_other: 0 }
);
assert_eq!(
strtab
.get(symbol.st_name as usize)
.expect("Failed to get name from strtab"),
strtab.get(symbol.st_name as usize).expect("Failed to get name from strtab"),
"memset"
);
}
@ -973,17 +881,11 @@ fn dynamic() {
.iter();
assert_eq!(
dynamic.next().expect("Failed to get dyn entry"),
Dyn {
d_tag: abi::DT_NEEDED,
d_un: 1
}
Dyn { d_tag: abi::DT_NEEDED, d_un: 1 }
);
assert_eq!(
dynamic.next().expect("Failed to get dyn entry"),
Dyn {
d_tag: abi::DT_INIT,
d_un: 4195216
}
Dyn { d_tag: abi::DT_INIT, d_un: 4195216 }
);
}
@ -994,8 +896,7 @@ fn section_data_as_rels() {
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let shdr = file.section_headers()[10];
file.section_data_as_rels(&shdr)
.expect_err("Expected error parsing non-REL scn as RELs");
file.section_data_as_rels(&shdr).expect_err("Expected error parsing non-REL scn as RELs");
}
#[test]
@ -1005,26 +906,14 @@ fn section_data_as_relas() {
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let shdr = file.section_headers()[10];
let mut relas = file
.section_data_as_relas(&shdr)
.expect("Failed to read relas section");
let mut relas = file.section_data_as_relas(&shdr).expect("Failed to read relas section");
assert_eq!(
relas.next().expect("Failed to get rela entry"),
Rela {
r_offset: 6293704,
r_sym: 1,
r_type: 7,
r_addend: 0,
}
Rela { r_offset: 6293704, r_sym: 1, r_type: 7, r_addend: 0 }
);
assert_eq!(
relas.next().expect("Failed to get rela entry"),
Rela {
r_offset: 6293712,
r_sym: 2,
r_type: 7,
r_addend: 0,
}
Rela { r_offset: 6293712, r_sym: 2, r_type: 7, r_addend: 0 }
);
assert!(relas.next().is_none());
}
@ -1036,17 +925,10 @@ fn section_data_as_notes() {
let mut file = ElfStream::<AnyEndian, _>::open_stream(io).expect("Open test1");
let shdr = file.section_headers()[2];
let mut notes = file
.section_data_as_notes(&shdr)
.expect("Failed to read relas section");
let mut notes = file.section_data_as_notes(&shdr).expect("Failed to read relas section");
assert_eq!(
notes.next().expect("Failed to get first note"),
Note::GnuAbiTag(NoteGnuAbiTag {
os: 0,
major: 2,
minor: 6,
subminor: 32
})
Note::GnuAbiTag(NoteGnuAbiTag { os: 0, major: 2, minor: 6, subminor: 32 })
);
assert!(notes.next().is_none());
}
@ -1059,17 +941,11 @@ fn segment_data_as_notes() {
let phdrs = file.segments();
let note_phdr = phdrs[5];
let mut notes = file
.segment_data_as_notes(&note_phdr)
.expect("Failed to read relas section");
let mut notes =
file.segment_data_as_notes(&note_phdr).expect("Failed to read relas section");
assert_eq!(
notes.next().expect("Failed to get first note"),
Note::GnuAbiTag(NoteGnuAbiTag {
os: 0,
major: 2,
minor: 6,
subminor: 32
})
Note::GnuAbiTag(NoteGnuAbiTag { os: 0, major: 2, minor: 6, subminor: 32 })
);
assert_eq!(
notes.next().expect("Failed to get second note"),
@ -1092,10 +968,8 @@ fn symbol_version_table() {
.expect("Failed to parse GNU symbol versions")
.expect("Failed to find GNU symbol versions");
let req = vst
.get_requirement(2)
.expect("Failed to parse NEED")
.expect("Failed to find NEED");
let req =
vst.get_requirement(2).expect("Failed to parse NEED").expect("Failed to find NEED");
assert_eq!(req.file, "libc.so.6");
assert_eq!(req.name, "GLIBC_2.2.5");
assert_eq!(req.hash, 0x9691A75);
@ -1106,28 +980,20 @@ fn symbol_version_table() {
let req = vst.get_requirement(4).expect("Failed to parse NEED");
assert!(req.is_none());
let req = vst
.get_requirement(5)
.expect("Failed to parse NEED")
.expect("Failed to find NEED");
let req =
vst.get_requirement(5).expect("Failed to parse NEED").expect("Failed to find NEED");
assert_eq!(req.file, "libc.so.6");
assert_eq!(req.name, "GLIBC_2.2.5");
assert_eq!(req.hash, 0x9691A75);
let def = vst
.get_definition(3)
.expect("Failed to parse DEF")
.expect("Failed to find DEF");
let def = vst.get_definition(3).expect("Failed to parse DEF").expect("Failed to find DEF");
assert_eq!(def.hash, 0xC33237F);
assert_eq!(def.flags, 1);
assert!(!def.hidden);
let def_names: Vec<&str> = def.names.map(|res| res.expect("should parse")).collect();
assert_eq!(def_names, &["hello.so"]);
let def = vst
.get_definition(7)
.expect("Failed to parse DEF")
.expect("Failed to find DEF");
let def = vst.get_definition(7).expect("Failed to parse DEF").expect("Failed to find DEF");
assert_eq!(def.hash, 0x1570B62);
assert_eq!(def.flags, 0);
assert!(def.hidden);
@ -1149,9 +1015,7 @@ fn sysv_hash_table() {
// We don't have a file interface for getting the SysV hash section yet, so clone the section bytes
// So we can use them to back a SysVHashTable
let (data, _) = file
.section_data(&hash_shdr)
.expect("Failed to get hash section data");
let (data, _) = file.section_data(&hash_shdr).expect("Failed to get hash section data");
let data_copy: Vec<u8> = data.into();
let hash_table =
SysVHashTable::new(file.ehdr.endianness, file.ehdr.class, data_copy.as_ref())
@ -1180,10 +1044,7 @@ fn sysv_hash_table() {
// Verify that we got the same symbol from the hash table we expected
assert_eq!(sym_idx, 2);
assert_eq!(strtab.get(sym.st_name as usize).unwrap(), "memset");
assert_eq!(
sym,
symtab.get(sym_idx).expect("Failed to get expected sym")
);
assert_eq!(sym, symtab.get(sym_idx).expect("Failed to get expected sym"));
}
}

View File

@ -26,14 +26,10 @@ macro_rules! safe_from {
( $self:ident, $typ:ty, $off:ident, $data:ident) => {{
const SIZE: usize = core::mem::size_of::<$typ>();
let end = (*$off)
.checked_add(SIZE)
.ok_or(ParseError::IntegerOverflow)?;
let end = (*$off).checked_add(SIZE).ok_or(ParseError::IntegerOverflow)?;
let buf: [u8; SIZE] = $data
.get(*$off..end)
.ok_or(ParseError::SliceReadError((*$off, end)))?
.try_into()?;
let buf: [u8; SIZE] =
$data.get(*$off..end).ok_or(ParseError::SliceReadError((*$off, end)))?.try_into()?;
*$off = end;
@ -199,9 +195,7 @@ mod tests {
macro_rules! parse_test {
( $endian:expr, $res_typ:ty, $method:ident, $expect:expr) => {{
let bytes = [
0x01u8, 0x02u8, 0x03u8, 0x04u8, 0x05u8, 0x06u8, 0x07u8, 0x08u8,
];
let bytes = [0x01u8, 0x02u8, 0x03u8, 0x04u8, 0x05u8, 0x06u8, 0x07u8, 0x08u8];
let mut offset = 0;
let result = $endian.$method(&mut offset, &bytes).unwrap();
assert_eq!(result, $expect);
@ -211,16 +205,13 @@ macro_rules! parse_test {
macro_rules! fuzz_too_short_test {
( $endian:expr, $res_typ:ty, $method:ident) => {{
let bytes = [
0x01u8, 0x02u8, 0x03u8, 0x04u8, 0x05u8, 0x06u8, 0x07u8, 0x08u8,
];
let bytes = [0x01u8, 0x02u8, 0x03u8, 0x04u8, 0x05u8, 0x06u8, 0x07u8, 0x08u8];
let size = core::mem::size_of::<$res_typ>();
for n in 0..size {
let buf = bytes.split_at(n).0.as_ref();
let mut offset: usize = 0;
let error = $endian
.$method(&mut offset, buf)
.expect_err("Expected an error, but parsed: ");
let error =
$endian.$method(&mut offset, buf).expect_err("Expected an error, but parsed: ");
assert!(
matches!(error, ParseError::SliceReadError(_)),
"Unexpected Error type found: {error}"

View File

@ -125,18 +125,13 @@ fn verify_ident(buf: &[u8]) -> Result<(), ParseError> {
// Verify the magic number
let magic = buf.split_at(abi::EI_CLASS).0;
if magic != abi::ELFMAGIC {
return Err(ParseError::BadMagic([
magic[0], magic[1], magic[2], magic[3],
]));
return Err(ParseError::BadMagic([magic[0], magic[1], magic[2], magic[3]]));
}
// Verify ELF Version
let version = buf[abi::EI_VERSION];
if version != abi::EV_CURRENT {
return Err(ParseError::UnsupportedVersion((
version as u64,
abi::EV_CURRENT as u64,
)));
return Err(ParseError::UnsupportedVersion((version as u64, abi::EV_CURRENT as u64)));
}
Ok(())
@ -157,12 +152,7 @@ pub fn parse_ident<E: EndianParse>(data: &[u8]) -> Result<(E, Class, u8, u8), Pa
// Verify endianness is something we know how to parse
let file_endian = E::from_ei_data(data[abi::EI_DATA])?;
Ok((
file_endian,
class,
data[abi::EI_OSABI],
data[abi::EI_ABIVERSION],
))
Ok((file_endian, class, data[abi::EI_OSABI], data[abi::EI_ABIVERSION]))
}
impl<E: EndianParse> FileHeader<E> {
@ -267,10 +257,7 @@ fn test_verify_ident_invalid_mag0() {
0,
];
let result = verify_ident(data.as_ref()).expect_err("Expected an error");
assert!(
matches!(result, ParseError::BadMagic(_)),
"Unexpected Error type found: {result}"
);
assert!(matches!(result, ParseError::BadMagic(_)), "Unexpected Error type found: {result}");
}
#[test]
@ -294,10 +281,7 @@ fn test_verify_ident_invalid_mag1() {
0,
];
let result = verify_ident(data.as_ref()).expect_err("Expected an error");
assert!(
matches!(result, ParseError::BadMagic(_)),
"Unexpected Error type found: {result}"
);
assert!(matches!(result, ParseError::BadMagic(_)), "Unexpected Error type found: {result}");
}
#[test]
@ -321,10 +305,7 @@ fn test_verify_ident_invalid_mag2() {
0,
];
let result = verify_ident(data.as_ref()).expect_err("Expected an error");
assert!(
matches!(result, ParseError::BadMagic(_)),
"Unexpected Error type found: {result}"
);
assert!(matches!(result, ParseError::BadMagic(_)), "Unexpected Error type found: {result}");
}
#[test]
@ -348,10 +329,7 @@ fn test_verify_ident_invalid_mag3() {
0,
];
let result = verify_ident(data.as_ref()).expect_err("Expected an error");
assert!(
matches!(result, ParseError::BadMagic(_)),
"Unexpected Error type found: {result}"
);
assert!(matches!(result, ParseError::BadMagic(_)), "Unexpected Error type found: {result}");
}
#[allow(deprecated)]

View File

@ -59,11 +59,7 @@ pub fn new(
verneeds: Option<(VerNeedIterator<'data, E>, StringTable<'data>)>,
verdefs: Option<(VerDefIterator<'data, E>, StringTable<'data>)>,
) -> Self {
SymbolVersionTable {
version_ids,
verneeds,
verdefs,
}
SymbolVersionTable { version_ids, verneeds, verdefs }
}
pub fn get_requirement(
@ -130,10 +126,7 @@ pub fn get_definition(
return Ok(Some(SymbolDefinition {
hash,
flags,
names: SymbolNamesIterator {
vda_iter,
strtab: verdef_strs,
},
names: SymbolNamesIterator { vda_iter, strtab: verdef_strs },
hidden,
}));
}
@ -293,13 +286,7 @@ pub fn new(
starting_offset: usize,
data: &'data [u8],
) -> Self {
VerDefIterator {
endian,
class,
count,
data,
offset: starting_offset,
}
VerDefIterator { endian, class, count, data, offset: starting_offset }
}
}
@ -381,13 +368,7 @@ pub fn new(
starting_offset: usize,
data: &'data [u8],
) -> Self {
VerDefAuxIterator {
endian,
class,
count,
data,
offset: starting_offset,
}
VerDefAuxIterator { endian, class, count, data, offset: starting_offset }
}
}
@ -521,13 +502,7 @@ pub fn new(
starting_offset: usize,
data: &'data [u8],
) -> Self {
VerNeedIterator {
endian,
class,
count,
data,
offset: starting_offset,
}
VerNeedIterator { endian, class, count, data, offset: starting_offset }
}
}
@ -618,13 +593,7 @@ pub fn new(
starting_offset: usize,
data: &'data [u8],
) -> Self {
VerNeedAuxIterator {
endian,
class,
count,
data,
offset: starting_offset,
}
VerNeedAuxIterator { endian, class, count, data, offset: starting_offset }
}
}
@ -921,10 +890,7 @@ fn verdef_iter() {
vd_aux: 20,
vd_next: 28,
},
vec![VerDefAux {
vda_name: 0x1,
vda_next: 0
}]
vec![VerDefAux { vda_name: 0x1, vda_next: 0 }]
),
(
VerDef {
@ -935,10 +901,7 @@ fn verdef_iter() {
vd_aux: 20,
vd_next: 28,
},
vec![VerDefAux {
vda_name: 0xC,
vda_next: 0
}]
vec![VerDefAux { vda_name: 0xC, vda_next: 0 }]
),
(
VerDef {
@ -950,14 +913,8 @@ fn verdef_iter() {
vd_next: 36,
},
vec![
VerDefAux {
vda_name: 0x17,
vda_next: 8
},
VerDefAux {
vda_name: 0xC,
vda_next: 0
}
VerDefAux { vda_name: 0x17, vda_next: 8 },
VerDefAux { vda_name: 0xC, vda_next: 0 }
]
),
(
@ -970,14 +927,8 @@ fn verdef_iter() {
vd_next: 0,
},
vec![
VerDefAux {
vda_name: 0xC,
vda_next: 8
},
VerDefAux {
vda_name: 0x17,
vda_next: 0
}
VerDefAux { vda_name: 0xC, vda_next: 8 },
VerDefAux { vda_name: 0x17, vda_next: 0 }
]
),
]
@ -1000,13 +951,7 @@ fn verdefaux_iter_one_entry() {
let mut iter =
VerDefAuxIterator::new(LittleEndian, Class::ELF64, 1, 0x14, &GNU_VERDEF_DATA);
let aux1 = iter.next().expect("Failed to parse");
assert_eq!(
aux1,
VerDefAux {
vda_name: 0x01,
vda_next: 0
}
);
assert_eq!(aux1, VerDefAux { vda_name: 0x01, vda_next: 0 });
assert!(iter.next().is_none());
}
@ -1015,21 +960,9 @@ fn verdefaux_iter_multiple_entries() {
let mut iter =
VerDefAuxIterator::new(LittleEndian, Class::ELF64, 2, 0x4C, &GNU_VERDEF_DATA);
let aux1 = iter.next().expect("Failed to parse");
assert_eq!(
aux1,
VerDefAux {
vda_name: 0x17,
vda_next: 8
}
);
assert_eq!(aux1, VerDefAux { vda_name: 0x17, vda_next: 8 });
let aux1 = iter.next().expect("Failed to parse");
assert_eq!(
aux1,
VerDefAux {
vda_name: 0xC,
vda_next: 0
}
);
assert_eq!(aux1, VerDefAux { vda_name: 0xC, vda_next: 0 });
assert!(iter.next().is_none());
}
@ -1048,37 +981,13 @@ fn verdefaux_iter_two_lists_interspersed() {
let mut iter2 = VerDefAuxIterator::new(LittleEndian, Class::ELF64, 2, 8, &data);
let aux1_1 = iter1.next().expect("Failed to parse");
assert_eq!(
aux1_1,
VerDefAux {
vda_name: 0x0001,
vda_next: 0x10,
}
);
assert_eq!(aux1_1, VerDefAux { vda_name: 0x0001, vda_next: 0x10 });
let aux2_1 = iter2.next().expect("Failed to parse");
assert_eq!(
aux2_1,
VerDefAux {
vda_name: 0x00A1,
vda_next: 0x10,
}
);
assert_eq!(aux2_1, VerDefAux { vda_name: 0x00A1, vda_next: 0x10 });
let aux1_2 = iter1.next().expect("Failed to parse");
assert_eq!(
aux1_2,
VerDefAux {
vda_name: 0x0002,
vda_next: 0,
}
);
assert_eq!(aux1_2, VerDefAux { vda_name: 0x0002, vda_next: 0 });
let aux2_2 = iter2.next().expect("Failed to parse");
assert_eq!(
aux2_2,
VerDefAux {
vda_name: 0x00A2,
vda_next: 0,
}
);
assert_eq!(aux2_2, VerDefAux { vda_name: 0x00A2, vda_next: 0 });
assert!(iter1.next().is_none());
assert!(iter2.next().is_none());
}
@ -1114,10 +1023,7 @@ fn version_table() {
.expect("Failed to find def");
assert_eq!(def1.hash, 0x088f2f70);
assert_eq!(def1.flags, 0);
let def1_names: Vec<&str> = def1
.names
.map(|res| res.expect("Failed to parse"))
.collect();
let def1_names: Vec<&str> = def1.names.map(|res| res.expect("Failed to parse")).collect();
assert_eq!(def1_names, ["LIBCTF_1.1"]);
assert!(!def1.hidden);
@ -1127,10 +1033,7 @@ fn version_table() {
.expect("Failed to find def");
assert_eq!(def2.hash, 0x088f2f71);
assert_eq!(def2.flags, 0);
let def2_names: Vec<&str> = def2
.names
.map(|res| res.expect("Failed to parse"))
.collect();
let def2_names: Vec<&str> = def2.names.map(|res| res.expect("Failed to parse")).collect();
assert_eq!(def2_names, ["LIBCTF_1.2", "LIBCTF_1.1"]);
assert!(!def2.hidden);
@ -1329,10 +1232,7 @@ fn parse_verdefaux32_lsb() {
test_parse_for(
LittleEndian,
Class::ELF32,
VerDefAux {
vda_name: 0x03020100,
vda_next: 0x07060504,
},
VerDefAux { vda_name: 0x03020100, vda_next: 0x07060504 },
);
}
@ -1341,10 +1241,7 @@ fn parse_verdefaux32_msb() {
test_parse_for(
BigEndian,
Class::ELF32,
VerDefAux {
vda_name: 0x00010203,
vda_next: 0x04050607,
},
VerDefAux { vda_name: 0x00010203, vda_next: 0x04050607 },
);
}
@ -1353,10 +1250,7 @@ fn parse_verdefaux64_lsb() {
test_parse_for(
LittleEndian,
Class::ELF64,
VerDefAux {
vda_name: 0x03020100,
vda_next: 0x07060504,
},
VerDefAux { vda_name: 0x03020100, vda_next: 0x07060504 },
);
}
@ -1365,10 +1259,7 @@ fn parse_verdefaux64_msb() {
test_parse_for(
BigEndian,
Class::ELF64,
VerDefAux {
vda_name: 0x00010203,
vda_next: 0x04050607,
},
VerDefAux { vda_name: 0x00010203, vda_next: 0x04050607 },
);
}

View File

@ -78,9 +78,7 @@ pub fn new(endian: E, class: Class, data: &'data [u8]) -> Result<Self, ParseErro
let buckets_size = size_of::<u32>()
.checked_mul(hdr.nbucket.try_into()?)
.ok_or(ParseError::IntegerOverflow)?;
let buckets_end = offset
.checked_add(buckets_size)
.ok_or(ParseError::IntegerOverflow)?;
let buckets_end = offset.checked_add(buckets_size).ok_or(ParseError::IntegerOverflow)?;
let buckets_buf = data.get_bytes(offset..buckets_end)?;
let buckets = U32Table::new(endian, class, buckets_buf);
offset = buckets_end;
@ -88,9 +86,7 @@ pub fn new(endian: E, class: Class, data: &'data [u8]) -> Result<Self, ParseErro
let chains_size = size_of::<u32>()
.checked_mul(hdr.nchain.try_into()?)
.ok_or(ParseError::IntegerOverflow)?;
let chains_end = offset
.checked_add(chains_size)
.ok_or(ParseError::IntegerOverflow)?;
let chains_end = offset.checked_add(chains_size).ok_or(ParseError::IntegerOverflow)?;
let chains_buf = data.get_bytes(offset..chains_end)?;
let chains = U32Table::new(endian, class, chains_buf);
@ -210,43 +206,31 @@ pub fn new(endian: E, class: Class, data: &'data [u8]) -> Result<Self, ParseErro
// length of the bloom filter in bytes. ELF32 is [u32; nbloom], ELF64 is [u64; nbloom].
let nbloom: usize = hdr.nbloom as usize;
let bloom_size = match class {
Class::ELF32 => nbloom
.checked_mul(size_of::<u32>())
.ok_or(ParseError::IntegerOverflow)?,
Class::ELF64 => nbloom
.checked_mul(size_of::<u64>())
.ok_or(ParseError::IntegerOverflow)?,
Class::ELF32 => {
nbloom.checked_mul(size_of::<u32>()).ok_or(ParseError::IntegerOverflow)?
}
Class::ELF64 => {
nbloom.checked_mul(size_of::<u64>()).ok_or(ParseError::IntegerOverflow)?
}
};
let bloom_end = offset
.checked_add(bloom_size)
.ok_or(ParseError::IntegerOverflow)?;
let bloom_end = offset.checked_add(bloom_size).ok_or(ParseError::IntegerOverflow)?;
let bloom_buf = data.get_bytes(offset..bloom_end)?;
offset = bloom_end;
let buckets_size = size_of::<u32>()
.checked_mul(hdr.nbucket.try_into()?)
.ok_or(ParseError::IntegerOverflow)?;
let buckets_end = offset
.checked_add(buckets_size)
.ok_or(ParseError::IntegerOverflow)?;
let buckets_end = offset.checked_add(buckets_size).ok_or(ParseError::IntegerOverflow)?;
let buckets_buf = data.get_bytes(offset..buckets_end)?;
let buckets = U32Table::new(endian, class, buckets_buf);
offset = buckets_end;
// the rest of the section is the chains
let chains_buf = data
.get(offset..)
.ok_or(ParseError::SliceReadError((offset, data.len())))?;
let chains_buf =
data.get(offset..).ok_or(ParseError::SliceReadError((offset, data.len())))?;
let chains = U32Table::new(endian, class, chains_buf);
Ok(GnuHashTable {
hdr,
endian,
class,
bloom: bloom_buf,
buckets,
chains,
})
Ok(GnuHashTable { hdr, endian, class, bloom: bloom_buf, buckets, chains })
}
/// Use the hash table to find the symbol table entry with the given name.
@ -284,9 +268,7 @@ pub fn find(
if filter & (1 << (hash % bloom_width)) == 0 {
return Ok(None);
}
let hash2 = hash
.checked_shr(self.hdr.nshift)
.ok_or(ParseError::IntegerOverflow)?;
let hash2 = hash.checked_shr(self.hdr.nshift).ok_or(ParseError::IntegerOverflow)?;
if filter & (1 << (hash2 % bloom_width)) == 0 {
return Ok(None);
}
@ -306,9 +288,8 @@ pub fn find(
if hash | 1 == chain_hash | 1 {
// we have a hash match!
// let's see if this symtab[sym_idx].name is what we're looking for
let sym_idx = chain_idx
.checked_add(table_start_idx)
.ok_or(ParseError::IntegerOverflow)?;
let sym_idx =
chain_idx.checked_add(table_start_idx).ok_or(ParseError::IntegerOverflow)?;
let symbol = symtab.get(sym_idx)?;
let r_sym_name = strtab.get_raw(symbol.st_name as usize)?;
@ -338,10 +319,7 @@ fn parse_sysvhdr32_lsb() {
test_parse_for(
LittleEndian,
Class::ELF32,
SysVHashHeader {
nbucket: 0x03020100,
nchain: 0x07060504,
},
SysVHashHeader { nbucket: 0x03020100, nchain: 0x07060504 },
);
}
@ -350,10 +328,7 @@ fn parse_sysvhdr32_msb() {
test_parse_for(
BigEndian,
Class::ELF32,
SysVHashHeader {
nbucket: 0x00010203,
nchain: 0x04050607,
},
SysVHashHeader { nbucket: 0x00010203, nchain: 0x04050607 },
);
}
@ -362,10 +337,7 @@ fn parse_sysvhdr64_lsb() {
test_parse_for(
LittleEndian,
Class::ELF64,
SysVHashHeader {
nbucket: 0x03020100,
nchain: 0x07060504,
},
SysVHashHeader { nbucket: 0x03020100, nchain: 0x07060504 },
);
}
@ -374,10 +346,7 @@ fn parse_sysvhdr64_msb() {
test_parse_for(
BigEndian,
Class::ELF64,
SysVHashHeader {
nbucket: 0x00010203,
nchain: 0x04050607,
},
SysVHashHeader { nbucket: 0x00010203, nchain: 0x04050607 },
);
}

View File

@ -69,16 +69,14 @@ fn parse_at<E: EndianParse>(
let name_start = *offset;
let name_buf_size: usize = nhdr.n_namesz.saturating_sub(1).try_into()?;
let name_buf_end = name_start
.checked_add(name_buf_size)
.ok_or(ParseError::IntegerOverflow)?;
let name_buf_end =
name_start.checked_add(name_buf_size).ok_or(ParseError::IntegerOverflow)?;
let name_buf = data.get_bytes(name_start..name_buf_end)?;
let name = from_utf8(name_buf)?;
// move forward for entire namesz, including the NUL byte we left out of our str
*offset = (*offset)
.checked_add(nhdr.n_namesz.try_into()?)
.ok_or(ParseError::IntegerOverflow)?;
*offset =
(*offset).checked_add(nhdr.n_namesz.try_into()?).ok_or(ParseError::IntegerOverflow)?;
// skip over padding if needed to get back to 4-byte alignment
if *offset % align > 0 {
@ -89,9 +87,7 @@ fn parse_at<E: EndianParse>(
let desc_start = *offset;
let desc_size: usize = nhdr.n_descsz.try_into()?;
let desc_end = desc_start
.checked_add(desc_size)
.ok_or(ParseError::IntegerOverflow)?;
let desc_end = desc_start.checked_add(desc_size).ok_or(ParseError::IntegerOverflow)?;
let raw_desc = data.get_bytes(desc_start..desc_end)?;
*offset = desc_end;
@ -115,17 +111,9 @@ fn parse_at<E: EndianParse>(
)?))
}
abi::NT_GNU_BUILD_ID => Ok(Note::GnuBuildId(NoteGnuBuildId(raw_desc))),
_ => Ok(Note::Unknown(NoteAny {
n_type: nhdr.n_type,
name,
desc: raw_desc,
})),
_ => Ok(Note::Unknown(NoteAny { n_type: nhdr.n_type, name, desc: raw_desc })),
},
_ => Ok(Note::Unknown(NoteAny {
n_type: nhdr.n_type,
name,
desc: raw_desc,
})),
_ => Ok(Note::Unknown(NoteAny { n_type: nhdr.n_type, name, desc: raw_desc })),
}
}
}
@ -192,13 +180,7 @@ pub struct NoteIterator<'data, E: EndianParse> {
impl<'data, E: EndianParse> NoteIterator<'data, E> {
pub fn new(endian: E, class: Class, align: usize, data: &'data [u8]) -> Self {
NoteIterator {
endian,
class,
align,
data,
offset: 0,
}
NoteIterator { endian, class, align, data, offset: 0 }
}
}
@ -209,14 +191,7 @@ fn next(&mut self) -> Option<Self::Item> {
return None;
}
Note::parse_at(
self.endian,
self.class,
self.align,
&mut self.offset,
self.data,
)
.ok()
Note::parse_at(self.endian, self.class, self.align, &mut self.offset, self.data).ok()
}
}
@ -377,11 +352,7 @@ fn parse_note_with_8_byte_alignment_unaligned_namesz() {
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 0x42,
name: &"GNUU",
desc: &[0x01, 0x02],
})
Note::Unknown(NoteAny { n_type: 0x42, name: &"GNUU", desc: &[0x01, 0x02] })
);
assert_eq!(offset, 32);
}
@ -418,14 +389,7 @@ fn parse_note_32_lsb() {
let mut offset = 0;
let note = Note::parse_at(LittleEndian, Class::ELF32, 4, &mut offset, &data)
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 6,
name: "",
desc: &[0x20, 0x0],
})
);
assert_eq!(note, Note::Unknown(NoteAny { n_type: 6, name: "", desc: &[0x20, 0x0] }));
assert_eq!(offset, 16);
}
@ -444,11 +408,7 @@ fn parse_note_32_lsb_with_name_padding() {
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 1,
name: "GN",
desc: &[0x01, 0x02, 0x03, 0x04],
})
Note::Unknown(NoteAny { n_type: 1, name: "GN", desc: &[0x01, 0x02, 0x03, 0x04] })
);
assert_eq!(offset, 20);
}
@ -468,11 +428,7 @@ fn parse_note_32_lsb_with_desc_padding() {
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 0x42,
name: abi::ELF_NOTE_GNU,
desc: &[0x01, 0x02],
})
Note::Unknown(NoteAny { n_type: 0x42, name: abi::ELF_NOTE_GNU, desc: &[0x01, 0x02] })
);
assert_eq!(offset, 20);
}
@ -489,14 +445,7 @@ fn parse_note_32_lsb_with_no_name() {
let mut offset = 0;
let note = Note::parse_at(LittleEndian, Class::ELF32, 4, &mut offset, &data)
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 0x42,
name: "",
desc: &[0x20, 0x0],
})
);
assert_eq!(note, Note::Unknown(NoteAny { n_type: 0x42, name: "", desc: &[0x20, 0x0] }));
assert_eq!(offset, 16);
}
@ -514,11 +463,7 @@ fn parse_note_32_lsb_with_no_desc() {
.expect("Failed to parse");
assert_eq!(
note,
Note::Unknown(NoteAny {
n_type: 0x42,
name: abi::ELF_NOTE_GNU,
desc: &[],
})
Note::Unknown(NoteAny { n_type: 0x42, name: abi::ELF_NOTE_GNU, desc: &[] })
);
assert_eq!(offset, 16);
}
@ -530,11 +475,7 @@ fn parse_nhdr32_lsb() {
test_parse_for(
LittleEndian,
Class::ELF32,
NoteHeader {
n_namesz: 0x03020100,
n_descsz: 0x07060504,
n_type: 0x0B0A0908,
},
NoteHeader { n_namesz: 0x03020100, n_descsz: 0x07060504, n_type: 0x0B0A0908 },
);
}
@ -543,11 +484,7 @@ fn parse_nhdr32_msb() {
test_parse_for(
BigEndian,
Class::ELF32,
NoteHeader {
n_namesz: 0x00010203,
n_descsz: 0x04050607,
n_type: 0x08090A0B,
},
NoteHeader { n_namesz: 0x00010203, n_descsz: 0x04050607, n_type: 0x08090A0B },
);
}

View File

@ -119,43 +119,25 @@ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "Unsupported ELF Endianness: {endianness}")
}
ParseError::UnsupportedVersion((found, expected)) => {
write!(
f,
"Unsupported ELF Version field found: {found} expected: {expected}"
)
write!(f, "Unsupported ELF Version field found: {found} expected: {expected}")
}
ParseError::BadOffset(offset) => {
write!(f, "Bad offset: {offset:#X}")
}
ParseError::StringTableMissingNul(offset) => {
write!(
f,
"Could not find terminating NUL byte starting at offset: {offset:#X}"
)
write!(f, "Could not find terminating NUL byte starting at offset: {offset:#X}")
}
ParseError::BadEntsize((found, expected)) => {
write!(
f,
"Invalid entsize. Expected: {expected:#X}, Found: {found:#X}"
)
write!(f, "Invalid entsize. Expected: {expected:#X}, Found: {found:#X}")
}
ParseError::UnexpectedSectionType((found, expected)) => {
write!(
f,
"Could not interpret section of type {found} as type {expected}"
)
write!(f, "Could not interpret section of type {found} as type {expected}")
}
ParseError::UnexpectedSegmentType((found, expected)) => {
write!(
f,
"Could not interpret section of type {found} as type {expected}"
)
write!(f, "Could not interpret section of type {found} as type {expected}")
}
ParseError::UnexpectedAlignment(align) => {
write!(
f,
"Could not interpret section with unexpected alignment of {align}"
)
write!(f, "Could not interpret section with unexpected alignment of {align}")
}
ParseError::SliceReadError((start, end)) => {
write!(f, "Could not read bytes in range [{start:#X}, {end:#X})")
@ -243,13 +225,7 @@ pub struct ParsingIterator<'data, E: EndianParse, P: ParseAt> {
impl<'data, E: EndianParse, P: ParseAt> ParsingIterator<'data, E, P> {
pub fn new(endian: E, class: Class, data: &'data [u8]) -> Self {
ParsingIterator {
endian,
class,
data,
offset: 0,
pd: PhantomData,
}
ParsingIterator { endian, class, data, offset: 0, pd: PhantomData }
}
}
@ -277,12 +253,7 @@ pub struct ParsingTable<'data, E: EndianParse, P: ParseAt> {
impl<'data, E: EndianParse, P: ParseAt> ParsingTable<'data, E, P> {
pub fn new(endian: E, class: Class, data: &'data [u8]) -> Self {
ParsingTable {
endian,
class,
data,
pd: PhantomData,
}
ParsingTable { endian, class, data, pd: PhantomData }
}
/// Get a lazy-parsing iterator for the table's bytes
@ -307,9 +278,7 @@ pub fn get(&self, index: usize) -> Result<P, ParseError> {
}
let entsize = P::size_for(self.class);
let mut start = index
.checked_mul(entsize)
.ok_or(ParseError::IntegerOverflow)?;
let mut start = index.checked_mul(entsize).ok_or(ParseError::IntegerOverflow)?;
if start > self.data.len() {
return Err(ParseError::BadOffset(index as u64));
}
@ -336,8 +305,7 @@ impl<'data> ReadBytesExt<'data> for &'data [u8] {
fn get_bytes(self, range: Range<usize>) -> Result<&'data [u8], ParseError> {
let start = range.start;
let end = range.end;
self.get(range)
.ok_or(ParseError::SliceReadError((start, end)))
self.get(range).ok_or(ParseError::SliceReadError((start, end)))
}
}
@ -451,10 +419,7 @@ fn test_u32_table_is_empty() {
fn test_u32_table_get_parse_failure() {
let data = vec![0u8, 1];
let table = U32Table::new(LittleEndian, Class::ELF32, data.as_ref());
assert!(matches!(
table.get(0),
Err(ParseError::SliceReadError((0, 4)))
));
assert!(matches!(table.get(0), Err(ParseError::SliceReadError((0, 4)))));
}
#[test]

View File

@ -44,11 +44,7 @@ fn parse_at<E: EndianParse>(
Class::ELF32 => {
let r_offset = endian.parse_u32_at(offset, data)? as u64;
let r_info = endian.parse_u32_at(offset, data)?;
Ok(Rel {
r_offset,
r_sym: r_info >> 8,
r_type: r_info & 0xFF,
})
Ok(Rel { r_offset, r_sym: r_info >> 8, r_type: r_info & 0xFF })
}
Class::ELF64 => {
let r_offset = endian.parse_u64_at(offset, data)?;
@ -113,12 +109,7 @@ fn parse_at<E: EndianParse>(
let r_offset = endian.parse_u32_at(offset, data)? as u64;
let r_info = endian.parse_u32_at(offset, data)?;
let r_addend = endian.parse_i32_at(offset, data)? as i64;
Ok(Rela {
r_offset,
r_sym: r_info >> 8,
r_type: r_info & 0xFF,
r_addend,
})
Ok(Rela { r_offset, r_sym: r_info >> 8, r_type: r_info & 0xFF, r_addend })
}
Class::ELF64 => {
let r_offset = endian.parse_u64_at(offset, data)?;
@ -154,11 +145,7 @@ fn parse_rel32_lsb() {
test_parse_for(
LittleEndian,
Class::ELF32,
Rel {
r_offset: 0x03020100,
r_sym: 0x00070605,
r_type: 0x00000004,
},
Rel { r_offset: 0x03020100, r_sym: 0x00070605, r_type: 0x00000004 },
);
}
@ -167,11 +154,7 @@ fn parse_rel32_msb() {
test_parse_for(
BigEndian,
Class::ELF32,
Rel {
r_offset: 0x00010203,
r_sym: 0x00040506,
r_type: 0x00000007,
},
Rel { r_offset: 0x00010203, r_sym: 0x00040506, r_type: 0x00000007 },
);
}
@ -180,11 +163,7 @@ fn parse_rel64_lsb() {
test_parse_for(
LittleEndian,
Class::ELF64,
Rel {
r_offset: 0x0706050403020100,
r_sym: 0x0F0E0D0C,
r_type: 0x0B0A0908,
},
Rel { r_offset: 0x0706050403020100, r_sym: 0x0F0E0D0C, r_type: 0x0B0A0908 },
);
}
@ -193,11 +172,7 @@ fn parse_rel64_msb() {
test_parse_for(
BigEndian,
Class::ELF64,
Rel {
r_offset: 0x0001020304050607,
r_sym: 0x08090A0B,
r_type: 0x0C0D0E0F,
},
Rel { r_offset: 0x0001020304050607, r_sym: 0x08090A0B, r_type: 0x0C0D0E0F },
);
}

View File

@ -17,10 +17,7 @@ pub fn get_raw(&self, offset: usize) -> Result<&'data [u8], ParseError> {
return Err(ParseError::BadOffset(offset as u64));
};
let start = self
.data
.get(offset..)
.ok_or(ParseError::BadOffset(offset as u64))?;
let start = self.data.get(offset..).ok_or(ParseError::BadOffset(offset as u64))?;
let end = start
.iter()
.position(|&b| b == 0u8)

View File

@ -132,14 +132,7 @@ fn parse_at<E: EndianParse>(
st_size = endian.parse_u64_at(offset, data)?;
}
Ok(Symbol {
st_name,
st_value,
st_size,
st_shndx,
st_info,
st_other,
})
Ok(Symbol { st_name, st_value, st_size, st_shndx, st_info, st_other })
}
#[inline]
@ -157,24 +150,12 @@ mod symbol_tests {
#[test]
fn symbol_undefined() {
let undef_sym = Symbol {
st_name: 0,
st_value: 0,
st_size: 0,
st_shndx: 0,
st_info: 0,
st_other: 0,
};
let undef_sym =
Symbol { st_name: 0, st_value: 0, st_size: 0, st_shndx: 0, st_info: 0, st_other: 0 };
assert!(undef_sym.is_undefined());
let def_sym = Symbol {
st_name: 0,
st_value: 0,
st_size: 0,
st_shndx: 42,
st_info: 0,
st_other: 0,
};
let def_sym =
Symbol { st_name: 0, st_value: 0, st_size: 0, st_shndx: 42, st_info: 0, st_other: 0 };
assert!(!def_sym.is_undefined());
}
}

View File

@ -389,11 +389,7 @@ fn round(self) -> Self {
if f.is_nan() || f.is_zero() {
self
} else if self > Self::zero() {
if f < h {
self - f
} else {
self - f + one
}
if f < h { self - f } else { self - f + one }
} else if -f < h {
self - f
} else {
@ -426,11 +422,7 @@ fn round(self) -> Self {
#[inline]
fn trunc(self) -> Self {
let f = self.fract();
if f.is_nan() {
self
} else {
self - f
}
if f.is_nan() { self } else { self - f }
}
/// Returns the fractional part of a number.
@ -457,11 +449,7 @@ fn trunc(self) -> Self {
/// ```
#[inline]
fn fract(self) -> Self {
if self.is_zero() {
Self::zero()
} else {
self % Self::one()
}
if self.is_zero() { Self::zero() } else { self % Self::one() }
}
/// Computes the absolute value of `self`. Returns `FloatCore::nan()` if the
@ -611,11 +599,7 @@ fn min(self, other: Self) -> Self {
if other.is_nan() {
return self;
}
if self < other {
self
} else {
other
}
if self < other { self } else { other }
}
/// Returns the maximum of the two numbers.
@ -645,11 +629,7 @@ fn max(self, other: Self) -> Self {
if other.is_nan() {
return self;
}
if self > other {
self
} else {
other
}
if self > other { self } else { other }
}
/// A value bounded by a minimum and a maximum
@ -1903,11 +1883,7 @@ fn clamp(self, min: Self, max: Self) -> Self {
/// assert!(f32::nan().copysign(1.0).is_nan());
/// ```
fn copysign(self, sign: Self) -> Self {
if self.is_sign_negative() == sign.is_sign_negative() {
self
} else {
self.neg()
}
if self.is_sign_negative() == sign.is_sign_negative() { self } else { self.neg() }
}
}
@ -2050,11 +2026,8 @@ fn integer_decode_f32(f: f32) -> (u64, i16, i8) {
let bits: u32 = f.to_bits();
let sign: i8 = if bits >> 31 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 23) & 0xff) as i16;
let mantissa = if exponent == 0 {
(bits & 0x7fffff) << 1
} else {
(bits & 0x7fffff) | 0x800000
};
let mantissa =
if exponent == 0 { (bits & 0x7fffff) << 1 } else { (bits & 0x7fffff) | 0x800000 };
// Exponent bias + mantissa shift
exponent -= 127 + 23;
(mantissa as u64, exponent, sign)
@ -2372,10 +2345,7 @@ fn convert_deg_rad_std() {
fn to_degrees_rounding() {
use crate::float::FloatCore;
assert_eq!(
FloatCore::to_degrees(1_f32),
57.2957795130823208767981548141051703
);
assert_eq!(FloatCore::to_degrees(1_f32), 57.2957795130823208767981548141051703);
}
#[test]

View File

@ -514,10 +514,7 @@ mod tests {
pub fn reverse_bits() {
use core::{i16, i32, i64, i8};
assert_eq!(
PrimInt::reverse_bits(0x0123_4567_89ab_cdefu64),
0xf7b3_d591_e6a2_c480
);
assert_eq!(PrimInt::reverse_bits(0x0123_4567_89ab_cdefu64), 0xf7b3_d591_e6a2_c480);
assert_eq!(PrimInt::reverse_bits(0i8), 0);
assert_eq!(PrimInt::reverse_bits(-1i8), -1);

View File

@ -421,11 +421,7 @@ pub fn clamp<T: PartialOrd>(input: T, min: T, max: T) -> T {
#[allow(clippy::eq_op)]
pub fn clamp_min<T: PartialOrd>(input: T, min: T) -> T {
debug_assert!(min == min, "min must not be NAN");
if input < min {
min
} else {
input
}
if input < min { min } else { input }
}
/// A value bounded by a maximum value
@ -439,11 +435,7 @@ pub fn clamp_min<T: PartialOrd>(input: T, min: T) -> T {
#[allow(clippy::eq_op)]
pub fn clamp_max<T: PartialOrd>(input: T, max: T) -> T {
debug_assert!(max == max, "max must not be NAN");
if input > max {
max
} else {
input
}
if input > max { max } else { input }
}
#[test]
@ -527,22 +519,10 @@ fn from_str_radix_multi_byte_fail() {
#[test]
fn from_str_radix_ignore_case() {
assert_eq!(
f32::from_str_radix("InF", 16).unwrap(),
::core::f32::INFINITY
);
assert_eq!(
f32::from_str_radix("InfinitY", 16).unwrap(),
::core::f32::INFINITY
);
assert_eq!(
f32::from_str_radix("-InF", 8).unwrap(),
::core::f32::NEG_INFINITY
);
assert_eq!(
f32::from_str_radix("-InfinitY", 8).unwrap(),
::core::f32::NEG_INFINITY
);
assert_eq!(f32::from_str_radix("InF", 16).unwrap(), ::core::f32::INFINITY);
assert_eq!(f32::from_str_radix("InfinitY", 16).unwrap(), ::core::f32::INFINITY);
assert_eq!(f32::from_str_radix("-InF", 8).unwrap(), ::core::f32::NEG_INFINITY);
assert_eq!(f32::from_str_radix("-InfinitY", 8).unwrap(), ::core::f32::NEG_INFINITY);
assert!(f32::from_str_radix("nAn", 4).unwrap().is_nan());
assert!(f32::from_str_radix("-nAn", 4).unwrap().is_nan());
}

View File

@ -105,11 +105,7 @@ fn div_euclid(&self, v: &f32) -> f32 {
#[inline]
fn rem_euclid(&self, v: &f32) -> f32 {
let r = self % v;
if r < 0.0 {
r + <f32 as crate::float::FloatCore>::abs(*v)
} else {
r
}
if r < 0.0 { r + <f32 as crate::float::FloatCore>::abs(*v) } else { r }
}
}
@ -127,11 +123,7 @@ fn div_euclid(&self, v: &f64) -> f64 {
#[inline]
fn rem_euclid(&self, v: &f64) -> f64 {
let r = self % v;
if r < 0.0 {
r + <f64 as crate::float::FloatCore>::abs(*v)
} else {
r
}
if r < 0.0 { r + <f64 as crate::float::FloatCore>::abs(*v) } else { r }
}
}

View File

@ -118,11 +118,7 @@ fn abs(&self) -> $t {
/// and `other` is returned.
#[inline]
fn abs_sub(&self, other: &$t) -> $t {
if *self <= *other {
0.
} else {
*self - *other
}
if *self <= *other { 0. } else { *self - *other }
}
/// # Returns

View File

@ -481,10 +481,7 @@ fn $visit<E>(self, v: $ty) -> Result<Self::Value, E>
{
Ok(v as Self::Value)
} else {
Err(Error::invalid_value(
Unexpected::Other(stringify!($ty)),
&self,
))
Err(Error::invalid_value(Unexpected::Other(stringify!($ty)), &self))
}
}
};
@ -503,10 +500,7 @@ fn $visit<E>(self, v: $ty) -> Result<Self::Value, E>
Err(Error::invalid_value(Unexpected::Unsigned(0), &self))
}
} else {
Err(Error::invalid_value(
Unexpected::Other(stringify!($ty)),
&self,
))
Err(Error::invalid_value(Unexpected::Other(stringify!($ty)), &self))
}
}
};
@ -630,10 +624,7 @@ fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
{
match String::from_utf8(v) {
Ok(s) => Ok(s),
Err(e) => Err(Error::invalid_value(
Unexpected::Bytes(&e.into_bytes()),
&self,
)),
Err(e) => Err(Error::invalid_value(Unexpected::Bytes(&e.into_bytes()), &self)),
}
}
}
@ -686,10 +677,7 @@ fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
*self.0 = s;
Ok(())
}
Err(e) => Err(Error::invalid_value(
Unexpected::Bytes(&e.into_bytes()),
&self,
)),
Err(e) => Err(Error::invalid_value(Unexpected::Bytes(&e.into_bytes()), &self)),
}
}
}
@ -932,9 +920,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_option(OptionVisitor {
marker: PhantomData,
})
deserializer.deserialize_option(OptionVisitor { marker: PhantomData })
}
// The Some variant's repr is opaque, so we can't play cute tricks with its
@ -977,9 +963,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let visitor = PhantomDataVisitor {
marker: PhantomData,
};
let visitor = PhantomDataVisitor { marker: PhantomData };
deserializer.deserialize_unit_struct("PhantomData", visitor)
}
}
@ -1180,9 +1164,7 @@ fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
}
}
let visitor = VecVisitor {
marker: PhantomData,
};
let visitor = VecVisitor { marker: PhantomData };
deserializer.deserialize_seq(visitor)
}
@ -1243,9 +1225,7 @@ struct ArrayVisitor<A> {
impl<A> ArrayVisitor<A> {
fn new() -> Self {
ArrayVisitor {
marker: PhantomData,
}
ArrayVisitor { marker: PhantomData }
}
}
@ -1478,10 +1458,7 @@ fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
}
#[cfg_attr(docsrs, doc(fake_variadic))]
#[cfg_attr(
docsrs,
doc = "This trait is implemented for tuples up to 16 items long."
)]
#[cfg_attr(docsrs, doc = "This trait is implemented for tuples up to 16 items long.")]
impl<'de, T> Deserialize<'de> for (T,)
where
T: Deserialize<'de>,
@ -1939,9 +1916,9 @@ fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error>
match tri!(data.variant()) {
(OsStringKind::Unix, v) => v.newtype_variant().map(OsString::from_vec),
(OsStringKind::Windows, _) => Err(Error::custom(
"cannot deserialize Windows OS string on Unix",
)),
(OsStringKind::Windows, _) => {
Err(Error::custom("cannot deserialize Windows OS string on Unix"))
}
}
}
@ -1953,12 +1930,12 @@ fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error>
use std::os::windows::ffi::OsStringExt;
match tri!(data.variant()) {
(OsStringKind::Windows, v) => v
.newtype_variant::<Vec<u16>>()
.map(|vec| OsString::from_wide(&vec)),
(OsStringKind::Unix, _) => Err(Error::custom(
"cannot deserialize Unix OS string on Windows",
)),
(OsStringKind::Windows, v) => {
v.newtype_variant::<Vec<u16>>().map(|vec| OsString::from_wide(&vec))
}
(OsStringKind::Unix, _) => {
Err(Error::custom("cannot deserialize Unix OS string on Windows"))
}
}
}
}
@ -2023,10 +2000,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
///
/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc"))))
)]
#[cfg_attr(docsrs, doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))))]
impl<'de, T> Deserialize<'de> for RcWeak<T>
where
T: Deserialize<'de>,
@ -2045,10 +2019,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
///
/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc"))))
)]
#[cfg_attr(docsrs, doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))))]
impl<'de, T> Deserialize<'de> for ArcWeak<T>
where
T: Deserialize<'de>,
@ -2460,10 +2431,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
let (start, end) = tri!(deserializer.deserialize_struct(
"Range",
range::FIELDS,
range::RangeVisitor {
expecting: "struct Range",
phantom: PhantomData,
},
range::RangeVisitor { expecting: "struct Range", phantom: PhantomData },
));
Ok(start..end)
}
@ -2480,10 +2448,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
let (start, end) = tri!(deserializer.deserialize_struct(
"RangeInclusive",
range::FIELDS,
range::RangeVisitor {
expecting: "struct RangeInclusive",
phantom: PhantomData,
},
range::RangeVisitor { expecting: "struct RangeInclusive", phantom: PhantomData },
));
Ok(RangeInclusive::new(start, end))
}
@ -2638,10 +2603,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
let start = tri!(deserializer.deserialize_struct(
"RangeFrom",
range_from::FIELDS,
range_from::RangeFromVisitor {
expecting: "struct RangeFrom",
phantom: PhantomData,
},
range_from::RangeFromVisitor { expecting: "struct RangeFrom", phantom: PhantomData },
));
Ok(start..)
}
@ -2776,10 +2738,7 @@ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
let end = tri!(deserializer.deserialize_struct(
"RangeTo",
range_to::FIELDS,
range_to::RangeToVisitor {
expecting: "struct RangeTo",
phantom: PhantomData,
},
range_to::RangeToVisitor { expecting: "struct RangeTo", phantom: PhantomData },
));
Ok(..end)
}
@ -3169,10 +3128,7 @@ struct FromStrVisitor<T> {
#[cfg(feature = "std")]
impl<T> FromStrVisitor<T> {
fn new(expecting: &'static str) -> Self {
FromStrVisitor {
expecting,
ty: PhantomData,
}
FromStrVisitor { expecting, ty: PhantomData }
}
}

View File

@ -1369,10 +1369,7 @@ fn visit_i128<E>(self, v: i128) -> Result<Self::Value, E>
let mut buf = [0u8; 58];
let mut writer = format::Buf::new(&mut buf);
fmt::Write::write_fmt(&mut writer, format_args!("integer `{}` as i128", v)).unwrap();
Err(Error::invalid_type(
Unexpected::Other(writer.as_str()),
&self,
))
Err(Error::invalid_type(Unexpected::Other(writer.as_str()), &self))
}
/// The input contains a `u8`.
@ -1431,10 +1428,7 @@ fn visit_u128<E>(self, v: u128) -> Result<Self::Value, E>
let mut buf = [0u8; 57];
let mut writer = format::Buf::new(&mut buf);
fmt::Write::write_fmt(&mut writer, format_args!("integer `{}` as u128", v)).unwrap();
Err(Error::invalid_type(
Unexpected::Other(writer.as_str()),
&self,
))
Err(Error::invalid_type(Unexpected::Other(writer.as_str()), &self))
}
/// The input contains an `f32`.
@ -2313,10 +2307,7 @@ fn write_char(&mut self, ch: char) -> fmt::Result {
}
if self.0.is_finite() {
let mut writer = LookForDecimalPoint {
formatter,
has_decimal_point: false,
};
let mut writer = LookForDecimalPoint { formatter, has_decimal_point: false };
tri!(write!(writer, "{}", self.0));
if !writer.has_decimal_point {
tri!(formatter.write_str(".0"));

View File

@ -14,10 +14,7 @@ pub fn cautious<Element>(hint: Option<usize>) -> usize {
if mem::size_of::<Element>() == 0 {
0
} else {
cmp::min(
hint.unwrap_or(0),
MAX_PREALLOC_BYTES / mem::size_of::<Element>(),
)
cmp::min(hint.unwrap_or(0), MAX_PREALLOC_BYTES / mem::size_of::<Element>())
}
}

View File

@ -64,9 +64,7 @@ fn custom<T>(msg: T) -> Self
where
T: Display,
{
Error {
err: msg.to_string().into_boxed_str(),
}
Error { err: msg.to_string().into_boxed_str() }
}
#[cfg(not(any(feature = "std", feature = "alloc")))]
@ -142,9 +140,7 @@ pub struct UnitDeserializer<E> {
impl<E> UnitDeserializer<E> {
#[allow(missing_docs)]
pub fn new() -> Self {
UnitDeserializer {
marker: PhantomData,
}
UnitDeserializer { marker: PhantomData }
}
}
@ -328,10 +324,7 @@ fn into_deserializer(self) -> U32Deserializer<E> {
impl<E> U32Deserializer<E> {
#[allow(missing_docs)]
pub fn new(value: u32) -> Self {
U32Deserializer {
value,
marker: PhantomData,
}
U32Deserializer { value, marker: PhantomData }
}
}
@ -386,10 +379,7 @@ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Err
impl<E> Debug for U32Deserializer<E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("U32Deserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("U32Deserializer").field("value", &self.value).finish()
}
}
@ -417,10 +407,7 @@ fn into_deserializer(self) -> StrDeserializer<'a, E> {
impl<'a, E> StrDeserializer<'a, E> {
#[allow(missing_docs)]
pub fn new(value: &'a str) -> Self {
StrDeserializer {
value,
marker: PhantomData,
}
StrDeserializer { value, marker: PhantomData }
}
}
@ -475,10 +462,7 @@ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Err
impl<'a, E> Debug for StrDeserializer<'a, E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("StrDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("StrDeserializer").field("value", &self.value).finish()
}
}
@ -496,10 +480,7 @@ pub struct BorrowedStrDeserializer<'de, E> {
impl<'de, E> BorrowedStrDeserializer<'de, E> {
/// Create a new borrowed deserializer from the given string.
pub fn new(value: &'de str) -> BorrowedStrDeserializer<'de, E> {
BorrowedStrDeserializer {
value,
marker: PhantomData,
}
BorrowedStrDeserializer { value, marker: PhantomData }
}
}
@ -554,10 +535,7 @@ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Err
impl<'de, E> Debug for BorrowedStrDeserializer<'de, E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("BorrowedStrDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("BorrowedStrDeserializer").field("value", &self.value).finish()
}
}
@ -574,10 +552,7 @@ pub struct StringDeserializer<E> {
#[cfg(any(feature = "std", feature = "alloc"))]
impl<E> Clone for StringDeserializer<E> {
fn clone(&self) -> Self {
StringDeserializer {
value: self.value.clone(),
marker: PhantomData,
}
StringDeserializer { value: self.value.clone(), marker: PhantomData }
}
}
@ -598,10 +573,7 @@ fn into_deserializer(self) -> StringDeserializer<E> {
impl<E> StringDeserializer<E> {
#[allow(missing_docs)]
pub fn new(value: String) -> Self {
StringDeserializer {
value,
marker: PhantomData,
}
StringDeserializer { value, marker: PhantomData }
}
}
@ -659,10 +631,7 @@ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Err
#[cfg(any(feature = "std", feature = "alloc"))]
impl<E> Debug for StringDeserializer<E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("StringDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("StringDeserializer").field("value", &self.value).finish()
}
}
@ -679,10 +648,7 @@ pub struct CowStrDeserializer<'a, E> {
#[cfg(any(feature = "std", feature = "alloc"))]
impl<'a, E> Clone for CowStrDeserializer<'a, E> {
fn clone(&self) -> Self {
CowStrDeserializer {
value: self.value.clone(),
marker: PhantomData,
}
CowStrDeserializer { value: self.value.clone(), marker: PhantomData }
}
}
@ -703,10 +669,7 @@ fn into_deserializer(self) -> CowStrDeserializer<'a, E> {
impl<'a, E> CowStrDeserializer<'a, E> {
#[allow(missing_docs)]
pub fn new(value: Cow<'a, str>) -> Self {
CowStrDeserializer {
value,
marker: PhantomData,
}
CowStrDeserializer { value, marker: PhantomData }
}
}
@ -767,10 +730,7 @@ fn variant_seed<T>(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Err
#[cfg(any(feature = "std", feature = "alloc"))]
impl<'a, E> Debug for CowStrDeserializer<'a, E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("CowStrDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("CowStrDeserializer").field("value", &self.value).finish()
}
}
@ -785,10 +745,7 @@ pub struct BytesDeserializer<'a, E> {
impl<'a, E> BytesDeserializer<'a, E> {
/// Create a new deserializer from the given bytes.
pub fn new(value: &'a [u8]) -> Self {
BytesDeserializer {
value,
marker: PhantomData,
}
BytesDeserializer { value, marker: PhantomData }
}
}
@ -827,10 +784,7 @@ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
impl<'a, E> Debug for BytesDeserializer<'a, E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("BytesDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("BytesDeserializer").field("value", &self.value).finish()
}
}
@ -844,10 +798,7 @@ pub struct BorrowedBytesDeserializer<'de, E> {
impl<'de, E> BorrowedBytesDeserializer<'de, E> {
/// Create a new borrowed deserializer from the given borrowed bytes.
pub fn new(value: &'de [u8]) -> Self {
BorrowedBytesDeserializer {
value,
marker: PhantomData,
}
BorrowedBytesDeserializer { value, marker: PhantomData }
}
}
@ -875,10 +826,7 @@ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
impl<'de, E> Debug for BorrowedBytesDeserializer<'de, E> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("BorrowedBytesDeserializer")
.field("value", &self.value)
.finish()
formatter.debug_struct("BorrowedBytesDeserializer").field("value", &self.value).finish()
}
}
@ -898,11 +846,7 @@ impl<I, E> SeqDeserializer<I, E>
{
/// Construct a new `SeqDeserializer<I, E>`.
pub fn new(iter: I) -> Self {
SeqDeserializer {
iter: iter.fuse(),
count: 0,
marker: PhantomData,
}
SeqDeserializer { iter: iter.fuse(), count: 0, marker: PhantomData }
}
}
@ -920,10 +864,7 @@ pub fn end(self) -> Result<(), E> {
} else {
// First argument is the number of elements in the data, second
// argument is the number of elements expected by the Deserialize.
Err(de::Error::invalid_length(
self.count + remaining,
&ExpectedInSeq(self.count),
))
Err(de::Error::invalid_length(self.count + remaining, &ExpectedInSeq(self.count)))
}
}
}
@ -1130,10 +1071,7 @@ pub fn end(self) -> Result<(), E> {
} else {
// First argument is the number of elements in the data, second
// argument is the number of elements expected by the Deserialize.
Err(de::Error::invalid_length(
self.count + remaining,
&ExpectedInMap(self.count),
))
Err(de::Error::invalid_length(self.count + remaining, &ExpectedInMap(self.count)))
}
}
}
@ -1565,12 +1503,7 @@ pub struct UnitOnly<E> {
}
pub fn unit_only<T, E>(t: T) -> (T, UnitOnly<E>) {
(
t,
UnitOnly {
marker: PhantomData,
},
)
(t, UnitOnly { marker: PhantomData })
}
impl<'de, E> de::VariantAccess<'de> for UnitOnly<E>
@ -1587,20 +1520,14 @@ fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
where
T: de::DeserializeSeed<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"newtype variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"newtype variant"))
}
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"tuple variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"tuple variant"))
}
fn struct_variant<V>(
@ -1611,10 +1538,7 @@ fn struct_variant<V>(
where
V: de::Visitor<'de>,
{
Err(de::Error::invalid_type(
Unexpected::UnitVariant,
&"struct variant",
))
Err(de::Error::invalid_type(Unexpected::UnitVariant, &"struct variant"))
}
}

View File

@ -155,7 +155,6 @@
#![deny(clippy::question_mark_used)]
// Rustc lints.
#![deny(missing_docs, unused_imports)]
#![allow(elided_lifetimes_in_paths)]
#![allow(explicit_outlives_requirements)]

View File

@ -120,10 +120,7 @@ fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
{
match String::from_utf8(v) {
Ok(s) => Ok(Cow::Owned(s)),
Err(e) => Err(Error::invalid_value(
Unexpected::Bytes(&e.into_bytes()),
&self,
)),
Err(e) => Err(Error::invalid_value(Unexpected::Bytes(&e.into_bytes()), &self)),
}
}
}
@ -189,9 +186,7 @@ fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
}
}
deserializer
.deserialize_bytes(CowBytesVisitor)
.map(From::from)
deserializer.deserialize_bytes(CowBytesVisitor).map(From::from)
}
#[cfg(any(feature = "std", feature = "alloc"))]
@ -516,9 +511,7 @@ fn visit_enum<V>(self, _visitor: V) -> Result<Self::Value, V::Error>
where
V: EnumAccess<'de>,
{
Err(de::Error::custom(
"untagged and internally tagged enums do not support enum input",
))
Err(de::Error::custom("untagged and internally tagged enums do not support enum input"))
}
}
@ -539,10 +532,7 @@ struct TagOrContentVisitor<'de> {
impl<'de> TagOrContentVisitor<'de> {
fn new(name: &'static str) -> Self {
TagOrContentVisitor {
name,
value: PhantomData,
}
TagOrContentVisitor { name, value: PhantomData }
}
}
@ -570,108 +560,84 @@ fn visit_bool<F>(self, value: bool) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_bool(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_bool(value).map(TagOrContent::Content)
}
fn visit_i8<F>(self, value: i8) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_i8(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_i8(value).map(TagOrContent::Content)
}
fn visit_i16<F>(self, value: i16) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_i16(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_i16(value).map(TagOrContent::Content)
}
fn visit_i32<F>(self, value: i32) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_i32(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_i32(value).map(TagOrContent::Content)
}
fn visit_i64<F>(self, value: i64) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_i64(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_i64(value).map(TagOrContent::Content)
}
fn visit_u8<F>(self, value: u8) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_u8(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_u8(value).map(TagOrContent::Content)
}
fn visit_u16<F>(self, value: u16) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_u16(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_u16(value).map(TagOrContent::Content)
}
fn visit_u32<F>(self, value: u32) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_u32(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_u32(value).map(TagOrContent::Content)
}
fn visit_u64<F>(self, value: u64) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_u64(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_u64(value).map(TagOrContent::Content)
}
fn visit_f32<F>(self, value: f32) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_f32(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_f32(value).map(TagOrContent::Content)
}
fn visit_f64<F>(self, value: f64) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_f64(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_f64(value).map(TagOrContent::Content)
}
fn visit_char<F>(self, value: char) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_char(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_char(value).map(TagOrContent::Content)
}
fn visit_str<F>(self, value: &str) -> Result<Self::Value, F>
@ -681,9 +647,7 @@ fn visit_str<F>(self, value: &str) -> Result<Self::Value, F>
if value == self.name {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_str(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_str(value).map(TagOrContent::Content)
}
}
@ -694,9 +658,7 @@ fn visit_borrowed_str<F>(self, value: &'de str) -> Result<Self::Value, F>
if value == self.name {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_borrowed_str(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_borrowed_str(value).map(TagOrContent::Content)
}
}
@ -707,9 +669,7 @@ fn visit_string<F>(self, value: String) -> Result<Self::Value, F>
if value == self.name {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_string(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_string(value).map(TagOrContent::Content)
}
}
@ -720,9 +680,7 @@ fn visit_bytes<F>(self, value: &[u8]) -> Result<Self::Value, F>
if value == self.name.as_bytes() {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_bytes(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_bytes(value).map(TagOrContent::Content)
}
}
@ -733,9 +691,7 @@ fn visit_borrowed_bytes<F>(self, value: &'de [u8]) -> Result<Self::Value, F>
if value == self.name.as_bytes() {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_borrowed_bytes(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_borrowed_bytes(value).map(TagOrContent::Content)
}
}
@ -746,9 +702,7 @@ fn visit_byte_buf<F>(self, value: Vec<u8>) -> Result<Self::Value, F>
if value == self.name.as_bytes() {
Ok(TagOrContent::Tag)
} else {
ContentVisitor::new()
.visit_byte_buf(value)
.map(TagOrContent::Content)
ContentVisitor::new().visit_byte_buf(value).map(TagOrContent::Content)
}
}
@ -756,63 +710,49 @@ fn visit_unit<F>(self) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_unit()
.map(TagOrContent::Content)
ContentVisitor::new().visit_unit().map(TagOrContent::Content)
}
fn visit_none<F>(self) -> Result<Self::Value, F>
where
F: de::Error,
{
ContentVisitor::new()
.visit_none()
.map(TagOrContent::Content)
ContentVisitor::new().visit_none().map(TagOrContent::Content)
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
ContentVisitor::new()
.visit_some(deserializer)
.map(TagOrContent::Content)
ContentVisitor::new().visit_some(deserializer).map(TagOrContent::Content)
}
fn visit_newtype_struct<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
ContentVisitor::new()
.visit_newtype_struct(deserializer)
.map(TagOrContent::Content)
ContentVisitor::new().visit_newtype_struct(deserializer).map(TagOrContent::Content)
}
fn visit_seq<V>(self, visitor: V) -> Result<Self::Value, V::Error>
where
V: SeqAccess<'de>,
{
ContentVisitor::new()
.visit_seq(visitor)
.map(TagOrContent::Content)
ContentVisitor::new().visit_seq(visitor).map(TagOrContent::Content)
}
fn visit_map<V>(self, visitor: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de>,
{
ContentVisitor::new()
.visit_map(visitor)
.map(TagOrContent::Content)
ContentVisitor::new().visit_map(visitor).map(TagOrContent::Content)
}
fn visit_enum<V>(self, visitor: V) -> Result<Self::Value, V::Error>
where
V: EnumAccess<'de>,
{
ContentVisitor::new()
.visit_enum(visitor)
.map(TagOrContent::Content)
ContentVisitor::new().visit_enum(visitor).map(TagOrContent::Content)
}
}
@ -832,11 +772,7 @@ impl<T> TaggedContentVisitor<T> {
/// Visitor for the content of an internally tagged enum with the given
/// tag name.
pub fn new(name: &'static str, expecting: &'static str) -> Self {
TaggedContentVisitor {
tag_name: name,
expecting,
value: PhantomData,
}
TaggedContentVisitor { tag_name: name, expecting, value: PhantomData }
}
}
@ -933,10 +869,7 @@ fn visit_u64<E>(self, field_index: u64) -> Result<Self::Value, E>
match field_index {
0 => Ok(TagOrContentField::Tag),
1 => Ok(TagOrContentField::Content),
_ => Err(de::Error::invalid_value(
Unexpected::Unsigned(field_index),
&self,
)),
_ => Err(de::Error::invalid_value(Unexpected::Unsigned(field_index), &self)),
}
}
@ -998,11 +931,7 @@ impl<'de> Visitor<'de> for TagContentOtherFieldVisitor {
type Value = TagContentOtherField;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"{:?}, {:?}, or other ignored fields",
self.tag, self.content
)
write!(formatter, "{:?}, {:?}, or other ignored fields", self.tag, self.content)
}
fn visit_u64<E>(self, field_index: u64) -> Result<Self::Value, E>
@ -1452,10 +1381,7 @@ fn deserialize_enum<V>(
}
s @ Content::String(_) | s @ Content::Str(_) => (s, None),
other => {
return Err(de::Error::invalid_type(
other.unexpected(),
&"string or map",
));
return Err(de::Error::invalid_type(other.unexpected(), &"string or map"));
}
};
@ -1501,10 +1427,7 @@ fn __deserialize_content<V>(
impl<'de, E> ContentDeserializer<'de, E> {
/// private API, don't use
pub fn new(content: Content<'de>) -> Self {
ContentDeserializer {
content,
err: PhantomData,
}
ContentDeserializer { content, err: PhantomData }
}
}
@ -1522,11 +1445,7 @@ impl<'de, E> EnumDeserializer<'de, E>
E: de::Error,
{
pub fn new(variant: Content<'de>, value: Option<Content<'de>>) -> EnumDeserializer<'de, E> {
EnumDeserializer {
variant,
value,
err: PhantomData,
}
EnumDeserializer { variant, value, err: PhantomData }
}
}
@ -1541,12 +1460,8 @@ fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), E>
where
V: de::DeserializeSeed<'de>,
{
let visitor = VariantDeserializer {
value: self.value,
err: PhantomData,
};
seed.deserialize(ContentDeserializer::new(self.variant))
.map(|v| (v, visitor))
let visitor = VariantDeserializer { value: self.value, err: PhantomData };
seed.deserialize(ContentDeserializer::new(self.variant)).map(|v| (v, visitor))
}
}
@ -1577,10 +1492,9 @@ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, E>
{
match self.value {
Some(value) => seed.deserialize(ContentDeserializer::new(value)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"newtype variant",
)),
None => {
Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"newtype variant"))
}
}
}
@ -1592,14 +1506,8 @@ fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Err
Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqDeserializer::new(v.into_iter()), visitor)
}
Some(other) => Err(de::Error::invalid_type(
other.unexpected(),
&"tuple variant",
)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"tuple variant",
)),
Some(other) => Err(de::Error::invalid_type(other.unexpected(), &"tuple variant")),
None => Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"tuple variant")),
}
}
@ -1618,14 +1526,10 @@ fn struct_variant<V>(
Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqDeserializer::new(v.into_iter()), visitor)
}
Some(other) => Err(de::Error::invalid_type(
other.unexpected(),
&"struct variant",
)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"struct variant",
)),
Some(other) => Err(de::Error::invalid_type(other.unexpected(), &"struct variant")),
None => {
Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"struct variant"))
}
}
}
}
@ -1705,12 +1609,9 @@ fn visit_content_map_ref<'a, 'de, V, E>(
V: Visitor<'de>,
E: de::Error,
{
let map = content.iter().map(|(k, v)| {
(
ContentRefDeserializer::new(k),
ContentRefDeserializer::new(v),
)
});
let map = content
.iter()
.map(|(k, v)| (ContentRefDeserializer::new(k), ContentRefDeserializer::new(v)));
let mut map_visitor = MapDeserializer::new(map);
let value = tri!(visitor.visit_map(&mut map_visitor));
tri!(map_visitor.end());
@ -2022,18 +1923,11 @@ fn deserialize_enum<V>(
}
ref s @ Content::String(_) | ref s @ Content::Str(_) => (s, None),
ref other => {
return Err(de::Error::invalid_type(
other.unexpected(),
&"string or map",
));
return Err(de::Error::invalid_type(other.unexpected(), &"string or map"));
}
};
visitor.visit_enum(EnumRefDeserializer {
variant,
value,
err: PhantomData,
})
visitor.visit_enum(EnumRefDeserializer { variant, value, err: PhantomData })
}
fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>
@ -2074,10 +1968,7 @@ fn __deserialize_content<V>(
impl<'a, 'de, E> ContentRefDeserializer<'a, 'de, E> {
/// private API, don't use
pub fn new(content: &'a Content<'de>) -> Self {
ContentRefDeserializer {
content,
err: PhantomData,
}
ContentRefDeserializer { content, err: PhantomData }
}
}
@ -2109,12 +2000,8 @@ fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Err
where
V: de::DeserializeSeed<'de>,
{
let visitor = VariantRefDeserializer {
value: self.value,
err: PhantomData,
};
seed.deserialize(ContentRefDeserializer::new(self.variant))
.map(|v| (v, visitor))
let visitor = VariantRefDeserializer { value: self.value, err: PhantomData };
seed.deserialize(ContentRefDeserializer::new(self.variant)).map(|v| (v, visitor))
}
}
@ -2145,10 +2032,9 @@ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, E>
{
match self.value {
Some(value) => seed.deserialize(ContentRefDeserializer::new(value)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"newtype variant",
)),
None => {
Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"newtype variant"))
}
}
}
@ -2160,14 +2046,8 @@ fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Err
Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor)
}
Some(other) => Err(de::Error::invalid_type(
other.unexpected(),
&"tuple variant",
)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"tuple variant",
)),
Some(other) => Err(de::Error::invalid_type(other.unexpected(), &"tuple variant")),
None => Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"tuple variant")),
}
}
@ -2186,14 +2066,10 @@ fn struct_variant<V>(
Some(Content::Seq(v)) => {
de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor)
}
Some(other) => Err(de::Error::invalid_type(
other.unexpected(),
&"struct variant",
)),
None => Err(de::Error::invalid_type(
de::Unexpected::UnitVariant,
&"struct variant",
)),
Some(other) => Err(de::Error::invalid_type(other.unexpected(), &"struct variant")),
None => {
Err(de::Error::invalid_type(de::Unexpected::UnitVariant, &"struct variant"))
}
}
}
}
@ -2211,10 +2087,7 @@ impl<'a, 'de, E> SeqRefDeserializer<'a, 'de, E>
E: de::Error,
{
fn new(slice: &'a [Content<'de>]) -> Self {
SeqRefDeserializer {
iter: slice.iter(),
err: PhantomData,
}
SeqRefDeserializer { iter: slice.iter(), err: PhantomData }
}
}
@ -2261,9 +2134,7 @@ fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Er
T: de::DeserializeSeed<'de>,
{
match self.iter.next() {
Some(value) => seed
.deserialize(ContentRefDeserializer::new(value))
.map(Some),
Some(value) => seed.deserialize(ContentRefDeserializer::new(value)).map(Some),
None => Ok(None),
}
}
@ -2287,11 +2158,7 @@ impl<'a, 'de, E> MapRefDeserializer<'a, 'de, E>
E: de::Error,
{
fn new(map: &'a [(Content<'de>, Content<'de>)]) -> Self {
MapRefDeserializer {
iter: map.iter(),
value: None,
err: PhantomData,
}
MapRefDeserializer { iter: map.iter(), value: None, err: PhantomData }
}
}
@ -2383,10 +2250,7 @@ pub struct InternallyTaggedUnitVisitor<'a> {
impl<'a> InternallyTaggedUnitVisitor<'a> {
/// Not public API.
pub fn new(type_name: &'a str, variant_name: &'a str) -> Self {
InternallyTaggedUnitVisitor {
type_name,
variant_name,
}
InternallyTaggedUnitVisitor { type_name, variant_name }
}
}
@ -2394,11 +2258,7 @@ impl<'de, 'a> Visitor<'de> for InternallyTaggedUnitVisitor<'a> {
type Value = ();
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"unit variant {}::{}",
self.type_name, self.variant_name
)
write!(formatter, "unit variant {}::{}", self.type_name, self.variant_name)
}
fn visit_seq<S>(self, _: S) -> Result<(), S::Error>
@ -2428,10 +2288,7 @@ pub struct UntaggedUnitVisitor<'a> {
impl<'a> UntaggedUnitVisitor<'a> {
/// Not public API.
pub fn new(type_name: &'a str, variant_name: &'a str) -> Self {
UntaggedUnitVisitor {
type_name,
variant_name,
}
UntaggedUnitVisitor { type_name, variant_name }
}
}
@ -2439,11 +2296,7 @@ impl<'de, 'a> Visitor<'de> for UntaggedUnitVisitor<'a> {
type Value = ();
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"unit variant {}::{}",
self.type_name, self.variant_name
)
write!(formatter, "unit variant {}::{}", self.type_name, self.variant_name)
}
fn visit_unit<E>(self) -> Result<(), E>
@ -2550,10 +2403,7 @@ impl<'a, E> IdentifierDeserializer<'a, E> for &'a str
type Deserializer = StrDeserializer<'a, E>;
fn from(self) -> Self::Deserializer {
StrDeserializer {
value: self,
marker: PhantomData,
}
StrDeserializer { value: self, marker: PhantomData }
}
}
@ -2564,10 +2414,7 @@ impl<'de, E> IdentifierDeserializer<'de, E> for Borrowed<'de, str>
type Deserializer = BorrowedStrDeserializer<'de, E>;
fn from(self) -> Self::Deserializer {
BorrowedStrDeserializer {
value: self.0,
marker: PhantomData,
}
BorrowedStrDeserializer { value: self.0, marker: PhantomData }
}
}
@ -2652,10 +2499,7 @@ fn deserialize_enum<V>(
}
}
Err(Error::custom(format_args!(
"no variant of enum {} found in flattened data",
name
)))
Err(Error::custom(format_args!("no variant of enum {} found in flattened data", name)))
}
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
@ -2839,11 +2683,7 @@ fn flat_map_take_entry<'de>(
Some((k, _v)) => k.as_str().map_or(false, |name| recognized.contains(&name)),
};
if is_recognized {
entry.take()
} else {
None
}
if is_recognized { entry.take() } else { None }
}
pub struct AdjacentlyTaggedEnumVariantSeed<F> {

View File

@ -272,11 +272,7 @@ fn serialize_tuple_variant(
let mut map = tri!(self.delegate.serialize_map(Some(2)));
tri!(map.serialize_entry(self.tag, self.variant_name));
tri!(map.serialize_key(inner_variant));
Ok(SerializeTupleVariantAsMapValue::new(
map,
inner_variant,
len,
))
Ok(SerializeTupleVariantAsMapValue::new(map, inner_variant, len))
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
@ -319,11 +315,7 @@ fn serialize_struct_variant(
let mut map = tri!(self.delegate.serialize_map(Some(2)));
tri!(map.serialize_entry(self.tag, self.variant_name));
tri!(map.serialize_key(inner_variant));
Ok(SerializeStructVariantAsMapValue::new(
map,
inner_variant,
len,
))
Ok(SerializeStructVariantAsMapValue::new(map, inner_variant, len))
}
#[cfg(not(any(feature = "std", feature = "alloc")))]
@ -349,11 +341,7 @@ pub struct SerializeTupleVariantAsMapValue<M> {
impl<M> SerializeTupleVariantAsMapValue<M> {
pub fn new(map: M, name: &'static str, len: usize) -> Self {
SerializeTupleVariantAsMapValue {
map,
name,
fields: Vec::with_capacity(len),
}
SerializeTupleVariantAsMapValue { map, name, fields: Vec::with_capacity(len) }
}
}
@ -374,9 +362,7 @@ fn serialize_field<T>(&mut self, value: &T) -> Result<(), M::Error>
}
fn end(mut self) -> Result<M::Ok, M::Error> {
tri!(self
.map
.serialize_value(&Content::TupleStruct(self.name, self.fields)));
tri!(self.map.serialize_value(&Content::TupleStruct(self.name, self.fields)));
self.map.end()
}
}
@ -389,11 +375,7 @@ pub struct SerializeStructVariantAsMapValue<M> {
impl<M> SerializeStructVariantAsMapValue<M> {
pub fn new(map: M, name: &'static str, len: usize) -> Self {
SerializeStructVariantAsMapValue {
map,
name,
fields: Vec::with_capacity(len),
}
SerializeStructVariantAsMapValue { map, name, fields: Vec::with_capacity(len) }
}
}
@ -414,9 +396,7 @@ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), M::
}
fn end(mut self) -> Result<M::Ok, M::Error> {
tri!(self
.map
.serialize_value(&Content::Struct(self.name, self.fields)));
tri!(self.map.serialize_value(&Content::Struct(self.name, self.fields)));
self.map.end()
}
}
@ -456,12 +436,7 @@ pub enum Content {
TupleVariant(&'static str, u32, &'static str, Vec<Content>),
Map(Vec<(Content, Content)>),
Struct(&'static str, Vec<(&'static str, Content)>),
StructVariant(
&'static str,
u32,
&'static str,
Vec<(&'static str, Content)>,
),
StructVariant(&'static str, u32, &'static str, Vec<(&'static str, Content)>),
}
impl Serialize for Content {
@ -659,10 +634,7 @@ fn serialize_newtype_struct<T>(self, name: &'static str, value: &T) -> Result<Co
where
T: ?Sized + Serialize,
{
Ok(Content::NewtypeStruct(
name,
Box::new(tri!(value.serialize(self))),
))
Ok(Content::NewtypeStruct(name, Box::new(tri!(value.serialize(self)))))
}
fn serialize_newtype_variant<T>(
@ -684,17 +656,11 @@ fn serialize_newtype_variant<T>(
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, E> {
Ok(SerializeSeq {
elements: Vec::with_capacity(len.unwrap_or(0)),
error: PhantomData,
})
Ok(SerializeSeq { elements: Vec::with_capacity(len.unwrap_or(0)), error: PhantomData })
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, E> {
Ok(SerializeTuple {
elements: Vec::with_capacity(len),
error: PhantomData,
})
Ok(SerializeTuple { elements: Vec::with_capacity(len), error: PhantomData })
}
fn serialize_tuple_struct(
@ -702,11 +668,7 @@ fn serialize_tuple_struct(
name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, E> {
Ok(SerializeTupleStruct {
name,
fields: Vec::with_capacity(len),
error: PhantomData,
})
Ok(SerializeTupleStruct { name, fields: Vec::with_capacity(len), error: PhantomData })
}
fn serialize_tuple_variant(
@ -738,11 +700,7 @@ fn serialize_struct(
name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, E> {
Ok(SerializeStruct {
name,
fields: Vec::with_capacity(len),
error: PhantomData,
})
Ok(SerializeStruct { name, fields: Vec::with_capacity(len), error: PhantomData })
}
fn serialize_struct_variant(
@ -866,12 +824,7 @@ fn serialize_field<T>(&mut self, value: &T) -> Result<(), E>
}
fn end(self) -> Result<Content, E> {
Ok(Content::TupleVariant(
self.name,
self.variant_index,
self.variant,
self.fields,
))
Ok(Content::TupleVariant(self.name, self.variant_index, self.variant, self.fields))
}
}
@ -901,10 +854,7 @@ fn serialize_value<T>(&mut self, value: &T) -> Result<(), E>
where
T: ?Sized + Serialize,
{
let key = self
.key
.take()
.expect("serialize_value called before serialize_key");
let key = self.key.take().expect("serialize_value called before serialize_key");
let value = tri!(value.serialize(ContentSerializer::<E>::new()));
self.entries.push((key, value));
Ok(())
@ -978,12 +928,7 @@ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), E>
}
fn end(self) -> Result<Content, E> {
Ok(Content::StructVariant(
self.name,
self.variant_index,
self.variant,
self.fields,
))
Ok(Content::StructVariant(self.name, self.variant_index, self.variant, self.fields))
}
}
}
@ -997,10 +942,7 @@ impl<'a, M> FlatMapSerializer<'a, M>
M: SerializeMap + 'a,
{
fn bad_type(what: Unsupported) -> M::Error {
ser::Error::custom(format_args!(
"can only flatten structs and maps (got {})",
what
))
ser::Error::custom(format_args!("can only flatten structs and maps (got {})", what))
}
}
@ -1176,10 +1118,7 @@ fn serialize_struct_variant(
_: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
tri!(self.0.serialize_key(inner_variant));
Ok(FlatMapSerializeStructVariantAsMapValue::new(
self.0,
inner_variant,
))
Ok(FlatMapSerializeStructVariantAsMapValue::new(self.0, inner_variant))
}
}
@ -1258,10 +1197,7 @@ impl<'a, M> FlatMapSerializeTupleVariantAsMapValue<'a, M>
M: SerializeMap + 'a,
{
fn new(map: &'a mut M) -> Self {
FlatMapSerializeTupleVariantAsMapValue {
map,
fields: Vec::new(),
}
FlatMapSerializeTupleVariantAsMapValue { map, fields: Vec::new() }
}
}
@ -1303,11 +1239,7 @@ impl<'a, M> FlatMapSerializeStructVariantAsMapValue<'a, M>
M: SerializeMap + 'a,
{
fn new(map: &'a mut M, name: &'static str) -> FlatMapSerializeStructVariantAsMapValue<'a, M> {
FlatMapSerializeStructVariantAsMapValue {
map,
name,
fields: Vec::new(),
}
FlatMapSerializeStructVariantAsMapValue { map, name, fields: Vec::new() }
}
}
@ -1329,9 +1261,7 @@ fn serialize_field<T>(&mut self, key: &'static str, value: &T) -> Result<(), Sel
}
fn end(self) -> Result<(), Self::Error> {
tri!(self
.map
.serialize_value(&Content::Struct(self.name, self.fields)));
tri!(self.map.serialize_value(&Content::Struct(self.name, self.fields)));
Ok(())
}
}

View File

@ -414,10 +414,7 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
}
#[cfg_attr(docsrs, doc(fake_variadic))]
#[cfg_attr(
docsrs,
doc = "This trait is implemented for tuples up to 16 items long."
)]
#[cfg_attr(docsrs, doc = "This trait is implemented for tuples up to 16 items long.")]
impl<T> Serialize for (T,)
where
T: Serialize,
@ -578,10 +575,7 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
///
/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc"))))
)]
#[cfg_attr(docsrs, doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))))]
impl<T> Serialize for RcWeak<T>
where
T: ?Sized + Serialize,
@ -598,10 +592,7 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
///
/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc"))))
)]
#[cfg_attr(docsrs, doc(cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))))]
impl<T> Serialize for ArcWeak<T>
where
T: ?Sized + Serialize,

View File

@ -31,10 +31,7 @@ pub fn with_where_predicates(
predicates: &[syn::WherePredicate],
) -> syn::Generics {
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.extend(predicates.iter().cloned());
generics.make_where_clause().predicates.extend(predicates.iter().cloned());
generics
}
@ -241,10 +238,7 @@ fn visit_type_param_bound(&mut self, bound: &'ast syn::TypeParamBound) {
fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
}
let all_type_params = generics
.type_params()
.map(|param| param.ident.clone())
.collect();
let all_type_params = generics.type_params().map(|param| param.ident.clone()).collect();
let mut visitor = FindTyParams {
all_type_params,
@ -276,10 +270,7 @@ fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
.type_params()
.map(|param| param.ident.clone())
.filter(|id| relevant_type_params.contains(id))
.map(|id| syn::TypePath {
qself: None,
path: id.into(),
})
.map(|id| syn::TypePath { qself: None, path: id.into() })
.chain(associated_type_usage.into_iter().cloned())
.map(|bounded_ty| {
syn::WherePredicate::Type(syn::PredicateType {
@ -300,10 +291,7 @@ fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
});
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.extend(new_predicates);
generics.make_where_clause().predicates.extend(new_predicates);
generics
}
@ -313,24 +301,21 @@ pub fn with_self_bound(
bound: &syn::Path,
) -> syn::Generics {
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.push(syn::WherePredicate::Type(syn::PredicateType {
generics.make_where_clause().predicates.push(syn::WherePredicate::Type(syn::PredicateType {
lifetimes: None,
// the type that is being bounded e.g. MyStruct<'a, T>
bounded_ty: type_of_item(cont),
colon_token: <Token![:]>::default(),
// the bound e.g. Default
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
paren_token: None,
modifier: syn::TraitBoundModifier::None,
lifetimes: None,
// the type that is being bounded e.g. MyStruct<'a, T>
bounded_ty: type_of_item(cont),
colon_token: <Token![:]>::default(),
// the bound e.g. Default
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
paren_token: None,
modifier: syn::TraitBoundModifier::None,
lifetimes: None,
path: bound.clone(),
})]
.into_iter()
.collect(),
}));
path: bound.clone(),
})]
.into_iter()
.collect(),
}));
generics
}
@ -351,9 +336,7 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen
param.bounds.push(bound.clone());
}
syn::GenericParam::Type(param) => {
param
.bounds
.push(syn::TypeParamBound::Lifetime(bound.clone()));
param.bounds.push(syn::TypeParamBound::Lifetime(bound.clone()));
}
syn::GenericParam::Const(_) => {}
}
@ -361,10 +344,7 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen
}))
.collect();
syn::Generics {
params,
..generics.clone()
}
syn::Generics { params, ..generics.clone() }
}
fn type_of_item(cont: &Container) -> syn::Type {

View File

@ -60,10 +60,7 @@ fn deserialize<__D>(__deserializer: __D) -> #serde::__private::Result<Self, __D:
}
};
Ok(dummy::wrap_in_const(
cont.attrs.custom_serde_path(),
impl_block,
))
Ok(dummy::wrap_in_const(cont.attrs.custom_serde_path(), impl_block))
}
fn precondition(cx: &Ctxt, cont: &Container) {
@ -75,10 +72,7 @@ fn precondition_sized(cx: &Ctxt, cont: &Container) {
if let Data::Struct(_, fields) = &cont.data {
if let Some(last) = fields.last() {
if let syn::Type::Slice(_) = ungroup(last.ty) {
cx.error_spanned_by(
cont.original,
"cannot deserialize a dynamically sized struct",
);
cx.error_spanned_by(cont.original, "cannot deserialize a dynamically sized struct");
}
}
}
@ -136,15 +130,7 @@ fn new(cont: &Container) -> Self {
let has_getter = cont.data.has_getter();
let is_packed = cont.attrs.is_packed();
Parameters {
local,
this_type,
this_value,
generics,
borrowed,
has_getter,
is_packed,
}
Parameters { local, this_type, this_value, generics, borrowed, has_getter, is_packed }
}
/// Type name to use in error messages and `&'static str` arguments to
@ -215,11 +201,7 @@ fn needs_deserialize_bound(field: &attr::Field, variant: Option<&attr::Variant>)
// Fields with a `default` attribute (not `default=...`), and fields with a
// `skip_deserializing` attribute that do not also have `default=...`.
fn requires_default(field: &attr::Field, _variant: Option<&attr::Variant>) -> bool {
if let attr::Default::Default = *field.default() {
true
} else {
false
}
if let attr::Default::Default = *field.default() { true } else { false }
}
enum BorrowedLifetimes {
@ -307,10 +289,7 @@ fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option<St
|| cont.attrs.type_from().is_some()
|| cont.attrs.type_try_from().is_some()
|| cont.attrs.identifier().is_some()
|| cont
.data
.all_fields()
.all(|f| f.attrs.deserialize_with().is_some())
|| cont.data.all_fields().all(|f| f.attrs.deserialize_with().is_some())
{
return None;
}
@ -463,10 +442,7 @@ fn deserialize_tuple(
) -> Fragment {
assert!(!cattrs.has_flatten());
let field_count = fields
.iter()
.filter(|field| !field.attrs.skip_deserializing())
.count();
let field_count = fields.iter().filter(|field| !field.attrs.skip_deserializing()).count();
let this_type = &params.this_type;
let this_value = &params.this_value;
@ -507,9 +483,7 @@ fn deserialize_tuple(
_ => None,
};
let visit_seq = Stmts(deserialize_seq(
&type_path, params, fields, false, cattrs, expecting,
));
let visit_seq = Stmts(deserialize_seq(&type_path, params, fields, false, cattrs, expecting));
let visitor_expr = quote! {
__Visitor {
@ -538,11 +512,7 @@ fn deserialize_tuple(
},
};
let visitor_var = if field_count == 0 {
quote!(_)
} else {
quote!(mut __seq)
};
let visitor_var = if field_count == 0 { quote!(_) } else { quote!(mut __seq) };
quote_block! {
#[doc(hidden)]
@ -581,10 +551,7 @@ fn deserialize_tuple_in_place(
) -> Fragment {
assert!(!cattrs.has_flatten());
let field_count = fields
.iter()
.filter(|field| !field.attrs.skip_deserializing())
.count();
let field_count = fields.iter().filter(|field| !field.attrs.skip_deserializing()).count();
let this_type = &params.this_type;
let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
@ -630,11 +597,7 @@ fn visit_newtype_struct<__E>(self, __e: __E) -> _serde::__private::Result<Self::
quote!(_serde::Deserializer::deserialize_tuple_struct(__deserializer, #type_name, #field_count, #visitor_expr))
};
let visitor_var = if field_count == 0 {
quote!(_)
} else {
quote!(mut __seq)
};
let visitor_var = if field_count == 0 { quote!(_) } else { quote!(mut __seq) };
let in_place_impl_generics = de_impl_generics.in_place();
let in_place_ty_generics = de_ty_generics.in_place();
@ -679,10 +642,8 @@ fn deserialize_seq(
) -> Fragment {
let vars = (0..fields.len()).map(field_i as fn(_) -> _);
let deserialized_count = fields
.iter()
.filter(|field| !field.attrs.skip_deserializing())
.count();
let deserialized_count =
fields.iter().filter(|field| !field.attrs.skip_deserializing()).count();
let expecting = if deserialized_count == 1 {
format!("{} with 1 element", expecting)
} else {
@ -775,10 +736,8 @@ fn deserialize_seq_in_place(
cattrs: &attr::Container,
expecting: &str,
) -> Fragment {
let deserialized_count = fields
.iter()
.filter(|field| !field.attrs.skip_deserializing())
.count();
let deserialized_count =
fields.iter().filter(|field| !field.attrs.skip_deserializing()).count();
let expecting = if deserialized_count == 1 {
format!("{} with 1 element", expecting)
} else {
@ -951,11 +910,7 @@ fn deserialize_struct(
// so they don't appear in the storage in their literal form
.filter(|&(_, field)| !field.attrs.skip_deserializing() && !field.attrs.flatten())
.map(|(i, field)| {
(
field.attrs.name().deserialize_name(),
field_i(i),
field.attrs.aliases(),
)
(field.attrs.name().deserialize_name(), field_i(i), field.attrs.aliases())
})
.collect();
let field_visitor = deserialize_field_identifier(&field_names_idents, cattrs);
@ -966,15 +921,10 @@ fn deserialize_struct(
StructForm::Untagged(..) => None,
_ if cattrs.has_flatten() => None,
_ => {
let mut_seq = if field_names_idents.is_empty() {
quote!(_)
} else {
quote!(mut __seq)
};
let mut_seq = if field_names_idents.is_empty() { quote!(_) } else { quote!(mut __seq) };
let visit_seq = Stmts(deserialize_seq(
&type_path, params, fields, true, cattrs, expecting,
));
let visit_seq =
Stmts(deserialize_seq(&type_path, params, fields, true, cattrs, expecting));
Some(quote! {
#[inline]
@ -1008,9 +958,7 @@ fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result<Self
let fields_stmt = if cattrs.has_flatten() {
None
} else {
let field_names = field_names_idents
.iter()
.flat_map(|&(_, _, aliases)| aliases);
let field_names = field_names_idents.iter().flat_map(|&(_, _, aliases)| aliases);
Some(quote! {
#[doc(hidden)]
@ -1108,26 +1056,16 @@ fn deserialize_struct_in_place(
.enumerate()
.filter(|&(_, field)| !field.attrs.skip_deserializing())
.map(|(i, field)| {
(
field.attrs.name().deserialize_name(),
field_i(i),
field.attrs.aliases(),
)
(field.attrs.name().deserialize_name(), field_i(i), field.attrs.aliases())
})
.collect();
let field_visitor = deserialize_field_identifier(&field_names_idents, cattrs);
let mut_seq = if field_names_idents.is_empty() {
quote!(_)
} else {
quote!(mut __seq)
};
let mut_seq = if field_names_idents.is_empty() { quote!(_) } else { quote!(mut __seq) };
let visit_seq = Stmts(deserialize_seq_in_place(params, fields, cattrs, expecting));
let visit_map = Stmts(deserialize_map_in_place(params, fields, cattrs));
let field_names = field_names_idents
.iter()
.flat_map(|&(_, _, aliases)| aliases);
let field_names = field_names_idents.iter().flat_map(|&(_, _, aliases)| aliases);
let type_name = cattrs.name().deserialize_name();
let in_place_impl_generics = de_impl_generics.in_place();
@ -1214,25 +1152,18 @@ fn prepare_enum_variant_enum(
variants: &[Variant],
cattrs: &attr::Container,
) -> (TokenStream, Stmts) {
let mut deserialized_variants = variants
.iter()
.enumerate()
.filter(|&(_, variant)| !variant.attrs.skip_deserializing());
let mut deserialized_variants =
variants.iter().enumerate().filter(|&(_, variant)| !variant.attrs.skip_deserializing());
let variant_names_idents: Vec<_> = deserialized_variants
.clone()
.map(|(i, variant)| {
(
variant.attrs.name().deserialize_name(),
field_i(i),
variant.attrs.aliases(),
)
(variant.attrs.name().deserialize_name(), field_i(i), variant.attrs.aliases())
})
.collect();
let fallthrough = deserialized_variants
.position(|(_, variant)| variant.attrs.other())
.map(|other_idx| {
let fallthrough =
deserialized_variants.position(|(_, variant)| variant.attrs.other()).map(|other_idx| {
let ignore_variant = variant_names_idents[other_idx].1.clone();
quote!(_serde::__private::Ok(__Field::#ignore_variant))
});
@ -1280,18 +1211,14 @@ fn deserialize_externally_tagged_enum(
.map(|(i, variant)| {
let variant_name = field_i(i);
let block = Match(deserialize_externally_tagged_variant(
params, variant, cattrs,
));
let block = Match(deserialize_externally_tagged_variant(params, variant, cattrs));
quote! {
(__Field::#variant_name, __variant) => #block
}
});
let all_skipped = variants
.iter()
.all(|variant| variant.attrs.skip_deserializing());
let all_skipped = variants.iter().all(|variant| variant.attrs.skip_deserializing());
let match_variant = if all_skipped {
// This is an empty enum like `enum Impossible {}` or an enum in which
// all variants have `#[serde(skip_deserializing)]`.
@ -1726,16 +1653,9 @@ fn deserialize_untagged_enum_after(
cattrs: &attr::Container,
first_attempt: Option<Expr>,
) -> Fragment {
let attempts = variants
.iter()
.filter(|variant| !variant.attrs.skip_deserializing())
.map(|variant| {
Expr(deserialize_untagged_variant(
params,
variant,
cattrs,
quote!(__deserializer),
))
let attempts =
variants.iter().filter(|variant| !variant.attrs.skip_deserializing()).map(|variant| {
Expr(deserialize_untagged_variant(params, variant, cattrs, quote!(__deserializer)))
});
// TODO this message could be better by saving the errors from the failed
// attempts. The heuristic used by TOML was to count the number of fields
@ -1743,10 +1663,8 @@ fn deserialize_untagged_enum_after(
// largest number of fields. I'm not sure I like that. Maybe it would be
// better to save all the errors and combine them into one message that
// explains why none of the variants matched.
let fallthrough_msg = format!(
"data did not match any variant of untagged enum {}",
params.type_name()
);
let fallthrough_msg =
format!("data did not match any variant of untagged enum {}", params.type_name());
let fallthrough_msg = cattrs.expecting().unwrap_or(&fallthrough_msg);
// Ignore any error associated with non-untagged deserialization so that we
@ -2003,11 +1921,7 @@ fn deserialize_generated_identifier(
None,
));
let lifetime = if !is_variant && cattrs.has_flatten() {
Some(quote!(<'de>))
} else {
None
};
let lifetime = if !is_variant && cattrs.has_flatten() { Some(quote!(<'de>)) } else { None };
quote_block! {
#[allow(non_camel_case_types)]
@ -2056,13 +1970,7 @@ fn deserialize_field_identifier(
(Some(ignore_variant), Some(fallthrough))
};
Stmts(deserialize_generated_identifier(
fields,
cattrs,
false,
ignore_variant,
fallthrough,
))
Stmts(deserialize_generated_identifier(fields, cattrs, false, ignore_variant, fallthrough))
}
// Generates `Deserialize::deserialize` body for an enum with
@ -2104,9 +2012,7 @@ fn deserialize_custom_identifier(
(
ordinary,
Some(fallthrough(quote!(__value))),
Some(fallthrough(quote!(_serde::__private::de::Borrowed(
__value
)))),
Some(fallthrough(quote!(_serde::__private::de::Borrowed(__value)))),
)
} else {
(variants, None, None)
@ -2195,17 +2101,12 @@ fn deserialize_identifier(
});
let bytes_mapping = fields.iter().map(|(_, ident, aliases)| {
// `aliases` also contains a main name
let aliases = aliases
.iter()
.map(|alias| Literal::byte_string(alias.as_bytes()));
let aliases = aliases.iter().map(|alias| Literal::byte_string(alias.as_bytes()));
quote!(#(#aliases)|* => _serde::__private::Ok(#this_value::#ident))
});
let expecting = expecting.unwrap_or(if is_variant {
"variant identifier"
} else {
"field identifier"
});
let expecting =
expecting.unwrap_or(if is_variant { "variant identifier" } else { "field identifier" });
let bytes_to_str = if fallthrough.is_some() || collect_other_fields {
None
@ -2462,11 +2363,8 @@ fn deserialize_map(
cattrs: &attr::Container,
) -> Fragment {
// Create the field names for the fields.
let fields_names: Vec<_> = fields
.iter()
.enumerate()
.map(|(i, field)| (field, field_i(i)))
.collect();
let fields_names: Vec<_> =
fields.iter().enumerate().map(|(i, field)| (field, field_i(i))).collect();
// Declare each field that will be deserialized.
let let_values = fields_names
@ -2681,11 +2579,8 @@ fn deserialize_map_in_place(
assert!(!cattrs.has_flatten());
// Create the field names for the fields.
let fields_names: Vec<_> = fields
.iter()
.enumerate()
.map(|(i, field)| (field, field_i(i)))
.collect();
let fields_names: Vec<_> =
fields.iter().enumerate().map(|(i, field)| (field, field_i(i))).collect();
// For deserialize_in_place, declare booleans for each field that will be
// deserialized.
@ -2905,12 +2800,8 @@ fn unwrap_to_variant_closure(
(quote! { __wrap: (#(#field_tys),*) }, quote! { __wrap })
};
let field_access = (0..variant.fields.len()).map(|n| {
Member::Unnamed(Index {
index: n as u32,
span: Span::call_site(),
})
});
let field_access = (0..variant.fields.len())
.map(|n| Member::Unnamed(Index { index: n as u32, span: Span::call_site() }));
match variant.style {
Style::Struct if variant.fields.len() == 1 => {
@ -3042,9 +2933,9 @@ fn to_tokens(&self, tokens: &mut TokenStream) {
param.bounds.push(place_lifetime.lifetime.clone());
}
syn::GenericParam::Type(param) => {
param.bounds.push(syn::TypeParamBound::Lifetime(
place_lifetime.lifetime.clone(),
));
param
.bounds
.push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime.clone()));
}
syn::GenericParam::Const(_) => {}
}
@ -3088,10 +2979,8 @@ fn de_type_generics_to_tokens(
bounds: Punctuated::new(),
};
// Prepend 'de lifetime to list of generics
generics.params = Some(syn::GenericParam::Lifetime(def))
.into_iter()
.chain(generics.params)
.collect();
generics.params =
Some(syn::GenericParam::Lifetime(def)).into_iter().chain(generics.params).collect();
}
let (_, ty_generics, _) = generics.split_for_impl();
ty_generics.to_tokens(tokens);
@ -3135,12 +3024,7 @@ fn place_lifetime() -> syn::LifetimeParam {
fn split_with_de_lifetime(
params: &Parameters,
) -> (
DeImplGenerics,
DeTypeGenerics,
syn::TypeGenerics,
Option<&syn::WhereClause>,
) {
) -> (DeImplGenerics, DeTypeGenerics, syn::TypeGenerics, Option<&syn::WhereClause>) {
let de_impl_generics = DeImplGenerics(params);
let de_ty_generics = DeTypeGenerics(params);
let (_, ty_generics, where_clause) = params.generics.split_for_impl();

View File

@ -87,10 +87,7 @@ pub fn from_ast(
has_flatten = true;
}
field.attrs.rename_by_rules(
variant
.attrs
.rename_all_rules()
.or(attrs.rename_all_fields_rules()),
variant.attrs.rename_all_rules().or(attrs.rename_all_fields_rules()),
);
}
}
@ -147,19 +144,12 @@ fn enum_from_ast<'a>(
let attrs = attr::Variant::from_ast(cx, variant);
let (style, fields) =
struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
Variant {
ident: variant.ident.clone(),
attrs,
style,
fields,
original: variant,
}
Variant { ident: variant.ident.clone(), attrs, style, fields, original: variant }
})
.collect();
let index_of_last_tagged_variant = variants
.iter()
.rposition(|variant| !variant.attrs.untagged());
let index_of_last_tagged_variant =
variants.iter().rposition(|variant| !variant.attrs.untagged());
if let Some(index_of_last_tagged_variant) = index_of_last_tagged_variant {
for variant in &variants[..index_of_last_tagged_variant] {
if variant.attrs.untagged() {
@ -178,18 +168,15 @@ fn struct_from_ast<'a>(
container_default: &attr::Default,
) -> (Style, Vec<Field<'a>>) {
match fields {
syn::Fields::Named(fields) => (
Style::Struct,
fields_from_ast(cx, &fields.named, attrs, container_default),
),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => (
Style::Newtype,
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
),
syn::Fields::Unnamed(fields) => (
Style::Tuple,
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
),
syn::Fields::Named(fields) => {
(Style::Struct, fields_from_ast(cx, &fields.named, attrs, container_default))
}
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
(Style::Newtype, fields_from_ast(cx, &fields.unnamed, attrs, container_default))
}
syn::Fields::Unnamed(fields) => {
(Style::Tuple, fields_from_ast(cx, &fields.unnamed, attrs, container_default))
}
syn::Fields::Unit => (Style::Unit, Vec::new()),
}
}

View File

@ -29,12 +29,7 @@ struct Attr<'c, T> {
impl<'c, T> Attr<'c, T> {
fn none(cx: &'c Ctxt, name: Symbol) -> Self {
Attr {
cx,
name,
tokens: TokenStream::new(),
value: None,
}
Attr { cx, name, tokens: TokenStream::new(), value: None }
}
fn set<A: ToTokens>(&mut self, obj: A, value: T) {
@ -98,12 +93,7 @@ struct VecAttr<'c, T> {
impl<'c, T> VecAttr<'c, T> {
fn none(cx: &'c Ctxt, name: Symbol) -> Self {
VecAttr {
cx,
name,
first_dup_tokens: TokenStream::new(),
values: Vec::new(),
}
VecAttr { cx, name, first_dup_tokens: TokenStream::new(), values: Vec::new() }
}
fn insert<A: ToTokens>(&mut self, obj: A, value: T) {
@ -668,8 +658,7 @@ pub fn custom_serde_path(&self) -> Option<&syn::Path> {
}
pub fn serde_path(&self) -> Cow<syn::Path> {
self.custom_serde_path()
.map_or_else(|| Cow::Owned(parse_quote!(_serde)), Cow::Borrowed)
self.custom_serde_path().map_or_else(|| Cow::Owned(parse_quote!(_serde)), Cow::Borrowed)
}
/// Error message generated when type can't be deserialized.
@ -690,11 +679,8 @@ fn decide_tag(
internal_tag: Attr<String>,
content: Attr<String>,
) -> TagType {
match (
untagged.0.get_with_tokens(),
internal_tag.get_with_tokens(),
content.get_with_tokens(),
) {
match (untagged.0.get_with_tokens(), internal_tag.get_with_tokens(), content.get_with_tokens())
{
(None, None, None) => TagType::External,
(Some(_), None, None) => TagType::None,
(None, Some((_, tag)), None) => {
@ -750,11 +736,8 @@ fn decide_identifier(
field_identifier: BoolAttr,
variant_identifier: BoolAttr,
) -> Identifier {
match (
&item.data,
field_identifier.0.get_with_tokens(),
variant_identifier.0.get_with_tokens(),
) {
match (&item.data, field_identifier.0.get_with_tokens(), variant_identifier.0.get_with_tokens())
{
(_, None, None) => Identifier::No,
(_, Some((field_identifier_tokens, ())), Some((variant_identifier_tokens, ()))) => {
let msg =
@ -921,16 +904,10 @@ pub fn from_ast(cx: &Ctxt, variant: &syn::Variant) -> Self {
let borrow_attribute = if meta.input.peek(Token![=]) {
// #[serde(borrow = "'a + 'b")]
let lifetimes = parse_lit_into_lifetimes(cx, &meta)?;
BorrowAttribute {
path: meta.path.clone(),
lifetimes: Some(lifetimes),
}
BorrowAttribute { path: meta.path.clone(), lifetimes: Some(lifetimes) }
} else {
// #[serde(borrow)]
BorrowAttribute {
path: meta.path.clone(),
lifetimes: None,
}
BorrowAttribute { path: meta.path.clone(), lifetimes: None }
};
match &variant.fields {
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
@ -988,9 +965,7 @@ pub fn rename_by_rules(&mut self, rules: RenameAllRules) {
if !self.name.deserialize_renamed {
self.name.deserialize = rules.deserialize.apply_to_variant(&self.name.deserialize);
}
self.name
.deserialize_aliases
.insert(self.name.deserialize.clone());
self.name.deserialize_aliases.insert(self.name.deserialize.clone());
}
pub fn rename_all_rules(&self) -> RenameAllRules {
@ -1258,38 +1233,22 @@ pub fn from_ast(
// impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, str>
// impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, [u8]>
if is_cow(&field.ty, is_str) {
let mut path = syn::Path {
leading_colon: None,
segments: Punctuated::new(),
};
let mut path = syn::Path { leading_colon: None, segments: Punctuated::new() };
let span = Span::call_site();
path.segments.push(Ident::new("_serde", span).into());
path.segments.push(Ident::new("__private", span).into());
path.segments.push(Ident::new("de", span).into());
path.segments
.push(Ident::new("borrow_cow_str", span).into());
let expr = syn::ExprPath {
attrs: Vec::new(),
qself: None,
path,
};
path.segments.push(Ident::new("borrow_cow_str", span).into());
let expr = syn::ExprPath { attrs: Vec::new(), qself: None, path };
deserialize_with.set_if_none(expr);
} else if is_cow(&field.ty, is_slice_u8) {
let mut path = syn::Path {
leading_colon: None,
segments: Punctuated::new(),
};
let mut path = syn::Path { leading_colon: None, segments: Punctuated::new() };
let span = Span::call_site();
path.segments.push(Ident::new("_serde", span).into());
path.segments.push(Ident::new("__private", span).into());
path.segments.push(Ident::new("de", span).into());
path.segments
.push(Ident::new("borrow_cow_bytes", span).into());
let expr = syn::ExprPath {
attrs: Vec::new(),
qself: None,
path,
};
path.segments.push(Ident::new("borrow_cow_bytes", span).into());
let expr = syn::ExprPath { attrs: Vec::new(), qself: None, path };
deserialize_with.set_if_none(expr);
}
} else if is_implicitly_borrowed(&field.ty) {
@ -1330,9 +1289,7 @@ pub fn rename_by_rules(&mut self, rules: RenameAllRules) {
if !self.name.deserialize_renamed {
self.name.deserialize = rules.deserialize.apply_to_field(&self.name.deserialize);
}
self.name
.deserialize_aliases
.insert(self.name.deserialize.clone());
self.name.deserialize_aliases.insert(self.name.deserialize.clone());
}
pub fn skip_serializing(&self) -> bool {
@ -1479,17 +1436,10 @@ fn get_lit_str2(
while let syn::Expr::Group(e) = value {
value = &e.expr;
}
if let syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(lit),
..
}) = value
{
if let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(lit), .. }) = value {
let suffix = lit.suffix();
if !suffix.is_empty() {
cx.error_spanned_by(
lit,
format!("unexpected suffix `{}` on string literal", suffix),
);
cx.error_spanned_by(lit, format!("unexpected suffix `{}` on string literal", suffix));
}
Ok(Some(lit.clone()))
} else {
@ -1517,10 +1467,7 @@ fn parse_lit_into_path(
Ok(match string.parse() {
Ok(path) => Some(path),
Err(_) => {
cx.error_spanned_by(
&string,
format!("failed to parse path: {:?}", string.value()),
);
cx.error_spanned_by(&string, format!("failed to parse path: {:?}", string.value()));
None
}
})
@ -1539,10 +1486,7 @@ fn parse_lit_into_expr_path(
Ok(match string.parse() {
Ok(expr) => Some(expr),
Err(_) => {
cx.error_spanned_by(
&string,
format!("failed to parse path: {:?}", string.value()),
);
cx.error_spanned_by(&string, format!("failed to parse path: {:?}", string.value()));
None
}
})
@ -1559,15 +1503,13 @@ fn parse_lit_into_where(
None => return Ok(Vec::new()),
};
Ok(
match string.parse_with(Punctuated::<syn::WherePredicate, Token![,]>::parse_terminated) {
Ok(predicates) => Vec::from_iter(predicates),
Err(err) => {
cx.error_spanned_by(string, err);
Vec::new()
}
},
)
Ok(match string.parse_with(Punctuated::<syn::WherePredicate, Token![,]>::parse_terminated) {
Ok(predicates) => Vec::from_iter(predicates),
Err(err) => {
cx.error_spanned_by(string, err);
Vec::new()
}
})
}
fn parse_lit_into_ty(
@ -1608,10 +1550,7 @@ fn parse_lit_into_lifetimes(
while !input.is_empty() {
let lifetime: Lifetime = input.parse()?;
if !set.insert(lifetime.clone()) {
cx.error_spanned_by(
&string,
format!("duplicate borrowed lifetime `{}`", lifetime),
);
cx.error_spanned_by(&string, format!("duplicate borrowed lifetime `{}`", lifetime));
}
if input.is_empty() {
break;
@ -1865,10 +1804,7 @@ fn collect_lifetimes_from_tokens(tokens: TokenStream, out: &mut BTreeSet<syn::Li
match &tt {
TokenTree::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
if let Some(TokenTree::Ident(ident)) = iter.next() {
out.insert(syn::Lifetime {
apostrophe: op.span(),
ident,
});
out.insert(syn::Lifetime { apostrophe: op.span(), ident });
}
}
TokenTree::Group(group) => {

View File

@ -48,9 +48,7 @@ pub fn from_str(rename_all_str: &str) -> Result<Self, ParseError> {
return Ok(*rule);
}
}
Err(ParseError {
unknown: rename_all_str,
})
Err(ParseError { unknown: rename_all_str })
}
/// Apply a renaming rule to an enum variant, returning the version expected in the source.
@ -72,9 +70,7 @@ pub fn apply_to_variant(self, variant: &str) -> String {
}
ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(),
KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"),
ScreamingKebabCase => ScreamingSnakeCase
.apply_to_variant(variant)
.replace('_', "-"),
ScreamingKebabCase => ScreamingSnakeCase.apply_to_variant(variant).replace('_', "-"),
}
}
@ -139,9 +135,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
#[test]
fn rename_variants() {
for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
(
"Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
),
("Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME"),
(
"VeryTasty",
"verytasty",
@ -163,19 +157,14 @@ fn rename_variants() {
assert_eq!(SnakeCase.apply_to_variant(original), snake);
assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming);
assert_eq!(KebabCase.apply_to_variant(original), kebab);
assert_eq!(
ScreamingKebabCase.apply_to_variant(original),
screaming_kebab
);
assert_eq!(ScreamingKebabCase.apply_to_variant(original), screaming_kebab);
}
}
#[test]
fn rename_fields() {
for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
(
"outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
),
("outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME"),
(
"very_tasty",
"VERY_TASTY",

View File

@ -148,12 +148,7 @@ fn check_identifier(cx: &Ctxt, cont: &Container) {
};
for (i, variant) in variants.iter().enumerate() {
match (
variant.style,
cont.attrs.identifier(),
variant.attrs.other(),
cont.attrs.tag(),
) {
match (variant.style, cont.attrs.identifier(), variant.attrs.other(), cont.attrs.tag()) {
// The `other` attribute may not be used in a variant_identifier.
(_, Identifier::Variant, true, _) => {
cx.error_spanned_by(
@ -182,10 +177,7 @@ fn check_identifier(cx: &Ctxt, cont: &Container) {
// Variant with `other` attribute must be a unit variant.
(_, Identifier::Field, true, _) | (_, Identifier::No, true, _) => {
cx.error_spanned_by(
variant.original,
"#[serde(other)] must be on a unit variant",
);
cx.error_spanned_by(variant.original, "#[serde(other)] must be on a unit variant");
}
// Any sort of variant is allowed if this is not an identifier.
@ -358,10 +350,7 @@ fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
if type_tag == content_tag {
cx.error_spanned_by(
cont.original,
format!(
"enum tags `{}` for type and content conflict with each other",
type_tag
),
format!("enum tags `{}` for type and content conflict with each other", type_tag),
);
}
}
@ -395,10 +384,7 @@ fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
let fields = match &mut cont.data {
Data::Enum(_) => {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed on an enum",
);
cx.error_spanned_by(cont.original, "#[serde(transparent)] is not allowed on an enum");
return;
}
Data::Struct(Style::Unit, _) => {

View File

@ -20,9 +20,7 @@ impl Ctxt {
///
/// This object contains no errors, but will still trigger a panic if it is not `check`ed.
pub fn new() -> Self {
Ctxt {
errors: RefCell::new(Some(Vec::new())),
}
Ctxt { errors: RefCell::new(Some(Vec::new())) }
}
/// Add an error to the context object with a tokenenizable object.

View File

@ -1,10 +1,7 @@
use proc_macro2::{Group, Span, TokenStream, TokenTree};
pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream {
stream
.into_iter()
.map(|token| respan_token(token, span))
.collect()
stream.into_iter().map(|token| respan_token(token, span)).collect()
}
fn respan_token(mut token: TokenTree, span: Span) -> TokenTree {

View File

@ -61,7 +61,6 @@
clippy::wildcard_imports
)]
#![cfg_attr(all(test, exhaustive), feature(non_exhaustive_omitted_patterns_lint))]
#![allow(elided_lifetimes_in_paths)]
extern crate proc_macro2;
@ -90,15 +89,11 @@
#[proc_macro_derive(Serialize, attributes(serde))]
pub fn derive_serialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
ser::expand_derive_serialize(&mut input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
ser::expand_derive_serialize(&mut input).unwrap_or_else(syn::Error::into_compile_error).into()
}
#[proc_macro_derive(Deserialize, attributes(serde))]
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
de::expand_derive_deserialize(&mut input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
de::expand_derive_deserialize(&mut input).unwrap_or_else(syn::Error::into_compile_error).into()
}

View File

@ -158,9 +158,8 @@ fn pretend_variants_used(cont: &Container) -> TokenStream {
let cases = variants.iter().map(|variant| {
let variant_ident = &variant.ident;
let placeholders = &(0..variant.fields.len())
.map(|i| format_ident!("__v{}", i))
.collect::<Vec<_>>();
let placeholders =
&(0..variant.fields.len()).map(|i| format_ident!("__v{}", i)).collect::<Vec<_>>();
let pat = match variant.style {
Style::Struct => {

View File

@ -52,10 +52,7 @@ fn serialize<__S>(&self, __serializer: __S) -> #serde::__private::Result<__S::Ok
}
};
Ok(dummy::wrap_in_const(
cont.attrs.custom_serde_path(),
impl_block,
))
Ok(dummy::wrap_in_const(cont.attrs.custom_serde_path(), impl_block))
}
fn precondition(cx: &Ctxt, cont: &Container) {
@ -108,14 +105,7 @@ fn new(cont: &Container) -> Self {
let is_packed = cont.attrs.is_packed();
let generics = build_generics(cont);
Parameters {
self_var,
this_type,
this_value,
generics,
is_remote,
is_packed,
}
Parameters { self_var, this_type, this_value, generics, is_remote, is_packed }
}
/// Type name to use in error messages and `&'static str` arguments to
@ -230,14 +220,8 @@ fn serialize_newtype_struct(
) -> Fragment {
let type_name = cattrs.name().serialize_name();
let mut field_expr = get_member(
params,
field,
&Member::Unnamed(Index {
index: 0,
span: Span::call_site(),
}),
);
let mut field_expr =
get_member(params, field, &Member::Unnamed(Index { index: 0, span: Span::call_site() }));
if let Some(path) = field.attrs.serialize_with() {
field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr);
}
@ -259,11 +243,8 @@ fn serialize_tuple_struct(
let type_name = cattrs.name().serialize_name();
let mut serialized_fields = fields
.iter()
.enumerate()
.filter(|(_, field)| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().enumerate().filter(|(_, field)| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some());
@ -271,10 +252,7 @@ fn serialize_tuple_struct(
.map(|(i, field)| match field.attrs.skip_serializing_if() {
None => quote!(1),
Some(path) => {
let index = syn::Index {
index: i as u32,
span: Span::call_site(),
};
let index = syn::Index { index: i as u32, span: Span::call_site() };
let field_expr = get_member(params, field, &Member::Unnamed(index));
quote!(if #path(#field_expr) { 0 } else { 1 })
}
@ -324,10 +302,8 @@ fn serialize_struct_as_struct(
let tag_field = serialize_struct_tag_field(cattrs, &StructTrait::SerializeStruct);
let tag_field_exists = !tag_field.is_empty();
let mut serialized_fields = fields
.iter()
.filter(|&field| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().filter(|&field| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some() || tag_field_exists);
@ -339,10 +315,7 @@ fn serialize_struct_as_struct(
quote!(if #path(#field_expr) { 0 } else { 1 })
}
})
.fold(
quote!(#tag_field_exists as usize),
|sum, expr| quote!(#sum + #expr),
);
.fold(quote!(#tag_field_exists as usize), |sum, expr| quote!(#sum + #expr));
quote_block! {
let #let_mut __serde_state = _serde::Serializer::serialize_struct(__serializer, #type_name, #len)?;
@ -363,10 +336,8 @@ fn serialize_struct_as_map(
let tag_field = serialize_struct_tag_field(cattrs, &StructTrait::SerializeMap);
let tag_field_exists = !tag_field.is_empty();
let mut serialized_fields = fields
.iter()
.filter(|&field| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().filter(|&field| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some() || tag_field_exists);
@ -381,10 +352,7 @@ fn serialize_struct_as_map(
quote!(if #path(#field_expr) { 0 } else { 1 })
}
})
.fold(
quote!(#tag_field_exists as usize),
|sum, expr| quote!(#sum + #expr),
);
.fold(quote!(#tag_field_exists as usize), |sum, expr| quote!(#sum + #expr));
quote!(_serde::__private::Some(#len))
};
@ -557,19 +525,12 @@ fn serialize_externally_tagged_variant(
}
}
Style::Tuple => serialize_tuple_variant(
TupleVariant::ExternallyTagged {
type_name,
variant_index,
variant_name,
},
TupleVariant::ExternallyTagged { type_name, variant_index, variant_name },
params,
&variant.fields,
),
Style::Struct => serialize_struct_variant(
StructVariant::ExternallyTagged {
variant_index,
variant_name,
},
StructVariant::ExternallyTagged { variant_index, variant_name },
params,
&variant.fields,
type_name,
@ -806,11 +767,7 @@ fn serialize_untagged_variant(
}
enum TupleVariant<'a> {
ExternallyTagged {
type_name: &'a str,
variant_index: u32,
variant_name: &'a str,
},
ExternallyTagged { type_name: &'a str, variant_index: u32, variant_name: &'a str },
Untagged,
}
@ -826,11 +783,8 @@ fn serialize_tuple_variant(
let serialize_stmts = serialize_tuple_struct_visitor(fields, params, true, &tuple_trait);
let mut serialized_fields = fields
.iter()
.enumerate()
.filter(|(_, field)| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().enumerate().filter(|(_, field)| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some());
@ -845,11 +799,7 @@ fn serialize_tuple_variant(
.fold(quote!(0), |sum, expr| quote!(#sum + #expr));
match context {
TupleVariant::ExternallyTagged {
type_name,
variant_index,
variant_name,
} => {
TupleVariant::ExternallyTagged { type_name, variant_index, variant_name } => {
quote_block! {
let #let_mut __serde_state = _serde::Serializer::serialize_tuple_variant(
__serializer,
@ -874,14 +824,8 @@ fn serialize_tuple_variant(
}
enum StructVariant<'a> {
ExternallyTagged {
variant_index: u32,
variant_name: &'a str,
},
InternallyTagged {
tag: &'a str,
variant_name: &'a str,
},
ExternallyTagged { variant_index: u32, variant_name: &'a str },
InternallyTagged { tag: &'a str, variant_name: &'a str },
Untagged,
}
@ -904,10 +848,8 @@ fn serialize_struct_variant(
let serialize_fields = serialize_struct_visitor(fields, params, true, &struct_trait);
let mut serialized_fields = fields
.iter()
.filter(|&field| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().filter(|&field| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some());
@ -923,10 +865,7 @@ fn serialize_struct_variant(
.fold(quote!(0), |sum, expr| quote!(#sum + #expr));
match context {
StructVariant::ExternallyTagged {
variant_index,
variant_name,
} => {
StructVariant::ExternallyTagged { variant_index, variant_name } => {
quote_block! {
let #let_mut __serde_state = _serde::Serializer::serialize_struct_variant(
__serializer,
@ -978,18 +917,13 @@ fn serialize_struct_variant_with_flatten(
let struct_trait = StructTrait::SerializeMap;
let serialize_fields = serialize_struct_visitor(fields, params, true, &struct_trait);
let mut serialized_fields = fields
.iter()
.filter(|&field| !field.attrs.skip_serializing())
.peekable();
let mut serialized_fields =
fields.iter().filter(|&field| !field.attrs.skip_serializing()).peekable();
let let_mut = mut_if(serialized_fields.peek().is_some());
match context {
StructVariant::ExternallyTagged {
variant_index,
variant_name,
} => {
StructVariant::ExternallyTagged { variant_index, variant_name } => {
let this_type = &params.this_type;
let fields_ty = fields.iter().map(|f| &f.ty);
let members = &fields.iter().map(|f| &f.member).collect::<Vec<_>>();
@ -1074,17 +1008,11 @@ fn serialize_tuple_struct_visitor(
get_member(
params,
field,
&Member::Unnamed(Index {
index: i as u32,
span: Span::call_site(),
}),
&Member::Unnamed(Index { index: i as u32, span: Span::call_site() }),
)
};
let skip = field
.attrs
.skip_serializing_if()
.map(|path| quote!(#path(#field_expr)));
let skip = field.attrs.skip_serializing_if().map(|path| quote!(#path(#field_expr)));
if let Some(path) = field.attrs.serialize_with() {
field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr);
@ -1198,12 +1126,7 @@ fn wrap_serialize_variant_with(
quote!(#id)
})
.collect();
wrap_serialize_with(
params,
serialize_with,
field_tys.as_slice(),
field_exprs.as_slice(),
)
wrap_serialize_with(params, serialize_with, field_tys.as_slice(), field_exprs.as_slice())
}
fn wrap_serialize_with(
@ -1222,12 +1145,8 @@ fn wrap_serialize_with(
};
let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl();
let field_access = (0..field_exprs.len()).map(|n| {
Member::Unnamed(Index {
index: n as u32,
span: Span::call_site(),
})
});
let field_access = (0..field_exprs.len())
.map(|n| Member::Unnamed(Index { index: n as u32, span: Span::call_site() }));
quote!({
#[doc(hidden)]
@ -1259,11 +1178,7 @@ fn serialize<__S>(&self, __s: __S) -> _serde::__private::Result<__S::Ok, __S::Er
//
// where we want to omit the `mut` to avoid a warning.
fn mut_if(is_mut: bool) -> Option<TokenStream> {
if is_mut {
Some(quote!(mut))
} else {
None
}
if is_mut { Some(quote!(mut)) } else { None }
}
fn get_member(params: &Parameters, field: &Field, member: &Member) -> TokenStream {

View File

@ -77,10 +77,7 @@ impl<R> Volatile<R> {
/// assert_eq!(volatile.read(), 1);
/// ```
pub const fn new(reference: R) -> Volatile<R> {
Volatile {
reference,
access: PhantomData,
}
Volatile { reference, access: PhantomData }
}
/// Constructs a new read-only volatile instance wrapping the given reference.
@ -115,10 +112,7 @@ pub const fn new(reference: R) -> Volatile<R> {
/// // for `volatile::access::ReadOnly`
/// ```
pub const fn new_read_only(reference: R) -> Volatile<R, ReadOnly> {
Volatile {
reference,
access: PhantomData,
}
Volatile { reference, access: PhantomData }
}
/// Constructs a new write-only volatile instance wrapping the given reference.
@ -153,10 +147,7 @@ pub const fn new_read_only(reference: R) -> Volatile<R, ReadOnly> {
/// // for `volatile::access::WriteOnly`
/// ```
pub const fn new_write_only(reference: R) -> Volatile<R, WriteOnly> {
Volatile {
reference,
access: PhantomData,
}
Volatile { reference, access: PhantomData }
}
}
@ -329,10 +320,7 @@ pub fn map<'a, F, U>(&'a self, f: F) -> Volatile<&'a U, A>
U: ?Sized,
T: 'a,
{
Volatile {
reference: f(self.reference.deref()),
access: self.access,
}
Volatile { reference: f(self.reference.deref()), access: self.access }
}
/// Constructs a new mutable `Volatile` reference by mapping the wrapped value.
@ -381,10 +369,7 @@ pub fn map_mut<'a, F, U>(&'a mut self, f: F) -> Volatile<&'a mut U, A>
U: ?Sized,
T: 'a,
{
Volatile {
reference: f(&mut self.reference),
access: self.access,
}
Volatile { reference: f(&mut self.reference), access: self.access }
}
}
@ -514,11 +499,7 @@ pub fn copy_into_slice(&self, dst: &mut [T])
T: Copy,
{
let src = self.reference.deref();
assert_eq!(
src.len(),
dst.len(),
"destination and source slices have different lengths"
);
assert_eq!(src.len(), dst.len(), "destination and source slices have different lengths");
unsafe {
intrinsics::volatile_copy_nonoverlapping_memory(
dst.as_mut_ptr(),
@ -570,11 +551,7 @@ pub fn copy_from_slice(&mut self, src: &[T])
R: DerefMut,
{
let dest = self.reference.deref_mut();
assert_eq!(
dest.len(),
src.len(),
"destination and source slices have different lengths"
);
assert_eq!(dest.len(), src.len(), "destination and source slices have different lengths");
unsafe {
intrinsics::volatile_copy_nonoverlapping_memory(
dest.as_mut_ptr(),
@ -624,10 +601,7 @@ pub fn copy_within(&mut self, src: impl RangeBounds<usize>, dest: usize)
{
let slice = self.reference.deref_mut();
// implementation taken from https://github.com/rust-lang/rust/blob/683d1bcd405727fcc9209f64845bd3b9104878b8/library/core/src/slice/mod.rs#L2726-L2738
let Range {
start: src_start,
end: src_end,
} = range(src, ..slice.len());
let Range { start: src_start, end: src_end } = range(src, ..slice.len());
let count = src_end - src_start;
assert!(dest <= slice.len() - count, "dest is out of bounds");
// SAFETY: the conditions for `volatile_copy_memory` have all been checked above,
@ -755,10 +729,7 @@ impl<R> Volatile<R> {
/// // read_only.write(10); // compile-time error
/// ```
pub fn read_only(self) -> Volatile<R, ReadOnly> {
Volatile {
reference: self.reference,
access: PhantomData,
}
Volatile { reference: self.reference, access: PhantomData }
}
/// Restricts access permissions to write-only.
@ -780,10 +751,7 @@ pub fn read_only(self) -> Volatile<R, ReadOnly> {
/// // field_2.read(); // compile-time error
/// ```
pub fn write_only(self) -> Volatile<R, WriteOnly> {
Volatile {
reference: self.reference,
access: PhantomData,
}
Volatile { reference: self.reference, access: PhantomData }
}
}
@ -848,10 +816,7 @@ struct S {
field_2: bool,
}
let mut val = S {
field_1: 60,
field_2: true,
};
let mut val = S { field_1: 60, field_2: true };
let mut volatile = Volatile::new(&mut val);
volatile.map_mut(|s| &mut s.field_1).update(|v| *v += 1);
let mut field_2 = volatile.map_mut(|s| &mut s.field_2);

View File

@ -51,9 +51,7 @@
impl core::fmt::Debug for VirtAddrNotValid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VirtAddrNotValid")
.field(&format_args!("{:#x}", self.0))
.finish()
f.debug_tuple("VirtAddrNotValid").field(&format_args!("{:#x}", self.0)).finish()
}
}
@ -85,11 +83,7 @@ pub const fn new(addr: u64) -> VirtAddr {
#[inline]
pub const fn try_new(addr: u64) -> Result<VirtAddr, VirtAddrNotValid> {
let v = Self::new_truncate(addr);
if v.0 == addr {
Ok(v)
} else {
Err(VirtAddrNotValid(addr))
}
if v.0 == addr { Ok(v) } else { Err(VirtAddrNotValid(addr)) }
}
/// Creates a new canonical virtual address, throwing out bits 48..64.
@ -277,9 +271,7 @@ pub(crate) fn forward_checked_impl(start: Self, count: usize) -> Option<Self> {
impl fmt::Debug for VirtAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("VirtAddr")
.field(&format_args!("{:#x}", self.0))
.finish()
f.debug_tuple("VirtAddr").field(&format_args!("{:#x}", self.0)).finish()
}
}
@ -402,9 +394,7 @@ fn backward_checked(start: Self, count: usize) -> Option<Self> {
impl core::fmt::Debug for PhysAddrNotValid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PhysAddrNotValid")
.field(&format_args!("{:#x}", self.0))
.finish()
f.debug_tuple("PhysAddrNotValid").field(&format_args!("{:#x}", self.0)).finish()
}
}
@ -445,11 +435,7 @@ pub const fn new_truncate(addr: u64) -> PhysAddr {
#[inline]
pub const fn try_new(addr: u64) -> Result<Self, PhysAddrNotValid> {
let p = Self::new_truncate(addr);
if p.0 == addr {
Ok(p)
} else {
Err(PhysAddrNotValid(addr))
}
if p.0 == addr { Ok(p) } else { Err(PhysAddrNotValid(addr)) }
}
/// Creates a physical address that points to `0`.
@ -509,9 +495,7 @@ pub fn is_aligned<U>(self, align: U) -> bool
impl fmt::Debug for PhysAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("PhysAddr")
.field(&format_args!("{:#x}", self.0))
.finish()
f.debug_tuple("PhysAddr").field(&format_args!("{:#x}", self.0)).finish()
}
}
@ -637,18 +621,12 @@ pub fn virtaddr_new_truncate() {
fn virtaddr_step_forward() {
assert_eq!(Step::forward(VirtAddr(0), 0), VirtAddr(0));
assert_eq!(Step::forward(VirtAddr(0), 1), VirtAddr(1));
assert_eq!(
Step::forward(VirtAddr(0x7fff_ffff_ffff), 1),
VirtAddr(0xffff_8000_0000_0000)
);
assert_eq!(Step::forward(VirtAddr(0x7fff_ffff_ffff), 1), VirtAddr(0xffff_8000_0000_0000));
assert_eq!(
Step::forward(VirtAddr(0xffff_8000_0000_0000), 1),
VirtAddr(0xffff_8000_0000_0001)
);
assert_eq!(
Step::forward_checked(VirtAddr(0xffff_ffff_ffff_ffff), 1),
None
);
assert_eq!(Step::forward_checked(VirtAddr(0xffff_ffff_ffff_ffff), 1), None);
assert_eq!(
Step::forward(VirtAddr(0x7fff_ffff_ffff), 0x1234_5678_9abd),
VirtAddr(0xffff_9234_5678_9abc)
@ -661,14 +639,8 @@ fn virtaddr_step_forward() {
Step::forward(VirtAddr(0x7fff_ffff_ff00), 0x8000_0000_00ff),
VirtAddr(0xffff_ffff_ffff_ffff)
);
assert_eq!(
Step::forward_checked(VirtAddr(0x7fff_ffff_ff00), 0x8000_0000_0100),
None
);
assert_eq!(
Step::forward_checked(VirtAddr(0x7fff_ffff_ffff), 0x8000_0000_0001),
None
);
assert_eq!(Step::forward_checked(VirtAddr(0x7fff_ffff_ff00), 0x8000_0000_0100), None);
assert_eq!(Step::forward_checked(VirtAddr(0x7fff_ffff_ffff), 0x8000_0000_0001), None);
}
#[test]
@ -677,10 +649,7 @@ fn virtaddr_step_backward() {
assert_eq!(Step::backward(VirtAddr(0), 0), VirtAddr(0));
assert_eq!(Step::backward_checked(VirtAddr(0), 1), None);
assert_eq!(Step::backward(VirtAddr(1), 1), VirtAddr(0));
assert_eq!(
Step::backward(VirtAddr(0xffff_8000_0000_0000), 1),
VirtAddr(0x7fff_ffff_ffff)
);
assert_eq!(Step::backward(VirtAddr(0xffff_8000_0000_0000), 1), VirtAddr(0x7fff_ffff_ffff));
assert_eq!(
Step::backward(VirtAddr(0xffff_8000_0000_0001), 1),
VirtAddr(0xffff_8000_0000_0000)
@ -689,18 +658,12 @@ fn virtaddr_step_backward() {
Step::backward(VirtAddr(0xffff_9234_5678_9abc), 0x1234_5678_9abd),
VirtAddr(0x7fff_ffff_ffff)
);
assert_eq!(
Step::backward(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0000),
VirtAddr(0)
);
assert_eq!(Step::backward(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0000), VirtAddr(0));
assert_eq!(
Step::backward(VirtAddr(0xffff_8000_0000_0000), 0x7fff_ffff_ff01),
VirtAddr(0xff)
);
assert_eq!(
Step::backward_checked(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0001),
None
);
assert_eq!(Step::backward_checked(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0001), None);
}
#[test]
@ -710,38 +673,23 @@ fn virtaddr_steps_between() {
assert_eq!(Step::steps_between(&VirtAddr(0), &VirtAddr(1)), Some(1));
assert_eq!(Step::steps_between(&VirtAddr(1), &VirtAddr(0)), None);
assert_eq!(
Step::steps_between(
&VirtAddr(0x7fff_ffff_ffff),
&VirtAddr(0xffff_8000_0000_0000)
),
Step::steps_between(&VirtAddr(0x7fff_ffff_ffff), &VirtAddr(0xffff_8000_0000_0000)),
Some(1)
);
assert_eq!(
Step::steps_between(
&VirtAddr(0xffff_8000_0000_0000),
&VirtAddr(0x7fff_ffff_ffff)
),
Step::steps_between(&VirtAddr(0xffff_8000_0000_0000), &VirtAddr(0x7fff_ffff_ffff)),
None
);
assert_eq!(
Step::steps_between(
&VirtAddr(0xffff_8000_0000_0000),
&VirtAddr(0xffff_8000_0000_0000)
),
Step::steps_between(&VirtAddr(0xffff_8000_0000_0000), &VirtAddr(0xffff_8000_0000_0000)),
Some(0)
);
assert_eq!(
Step::steps_between(
&VirtAddr(0xffff_8000_0000_0000),
&VirtAddr(0xffff_8000_0000_0001)
),
Step::steps_between(&VirtAddr(0xffff_8000_0000_0000), &VirtAddr(0xffff_8000_0000_0001)),
Some(1)
);
assert_eq!(
Step::steps_between(
&VirtAddr(0xffff_8000_0000_0001),
&VirtAddr(0xffff_8000_0000_0000)
),
Step::steps_between(&VirtAddr(0xffff_8000_0000_0001), &VirtAddr(0xffff_8000_0000_0000)),
None
);
}
@ -775,10 +723,7 @@ fn test_virt_addr_align_up() {
#[test]
fn test_virt_addr_align_down() {
// Make sure the 47th bit is extended.
assert_eq!(
VirtAddr::new(0xffff_8000_0000_0000).align_down(1u64 << 48),
VirtAddr::new(0)
);
assert_eq!(VirtAddr::new(0xffff_8000_0000_0000).align_down(1u64 << 48), VirtAddr::new(0));
}
#[test]

View File

@ -146,10 +146,7 @@ pub fn int3() {
/// It can also cause memory/register corruption depending on the interrupt
/// implementation (if it expects values/pointers to be passed in registers).
#[cfg(feature = "asm_const")]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(any(feature = "nightly", feature = "asm_const")))
)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(any(feature = "nightly", feature = "asm_const"))))]
pub unsafe fn software_interrupt<const NUM: u8>() {
unsafe {
asm!("int {num}", num = const NUM, options(nomem, nostack));

View File

@ -136,10 +136,7 @@ impl<T, A> PortGeneric<T, A> {
/// Creates an I/O port with the given port number.
#[inline]
pub const fn new(port: u16) -> PortGeneric<T, A> {
PortGeneric {
port,
phantom: PhantomData,
}
PortGeneric { port, phantom: PhantomData }
}
}
@ -181,10 +178,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl<T, A> Clone for PortGeneric<T, A> {
fn clone(&self) -> Self {
Self {
port: self.port,
phantom: PhantomData,
}
Self { port: self.port, phantom: PhantomData }
}
}

View File

@ -11,11 +11,7 @@ pub fn new() -> Option<Self> {
// RDRAND support indicated by CPUID page 01h, ecx bit 30
// https://en.wikipedia.org/wiki/RdRand#Overview
let cpuid = unsafe { core::arch::x86_64::__cpuid(0x1) };
if cpuid.ecx & (1 << 30) != 0 {
Some(RdRand(()))
} else {
None
}
if cpuid.ecx & (1 << 30) != 0 { Some(RdRand(())) } else { None }
}
/// Uniformly sampled u64.

View File

@ -45,10 +45,8 @@ pub unsafe fn lidt(idt: &DescriptorTablePointer) {
/// Get the address of the current GDT.
#[inline]
pub fn sgdt() -> DescriptorTablePointer {
let mut gdt: DescriptorTablePointer = DescriptorTablePointer {
limit: 0,
base: VirtAddr::new(0),
};
let mut gdt: DescriptorTablePointer =
DescriptorTablePointer { limit: 0, base: VirtAddr::new(0) };
unsafe {
asm!("sgdt [{}]", in(reg) &mut gdt, options(nostack, preserves_flags));
}
@ -58,10 +56,8 @@ pub fn sgdt() -> DescriptorTablePointer {
/// Get the address of the current IDT.
#[inline]
pub fn sidt() -> DescriptorTablePointer {
let mut idt: DescriptorTablePointer = DescriptorTablePointer {
limit: 0,
base: VirtAddr::new(0),
};
let mut idt: DescriptorTablePointer =
DescriptorTablePointer { limit: 0, base: VirtAddr::new(0) };
unsafe {
asm!("sidt [{}]", in(reg) &mut idt, options(nostack, preserves_flags));
}

View File

@ -62,11 +62,7 @@ impl Pcid {
/// Create a new PCID. Will result in a failure if the value of
/// PCID is out of expected bounds.
pub const fn new(pcid: u16) -> Result<Pcid, PcidTooBig> {
if pcid >= 4096 {
Err(PcidTooBig(pcid))
} else {
Ok(Pcid(pcid))
}
if pcid >= 4096 { Err(PcidTooBig(pcid)) } else { Ok(Pcid(pcid)) }
}
/// Get the value of the current PCID.
@ -94,10 +90,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// This function is unsafe as it requires CPUID.(EAX=07H, ECX=0H):EBX.INVPCID to be 1.
#[inline]
pub unsafe fn flush_pcid(command: InvPicdCommand) {
let mut desc = InvpcidDescriptor {
address: 0,
pcid: 0,
};
let mut desc = InvpcidDescriptor { address: 0, pcid: 0 };
let kind: u64;
match command {
@ -167,11 +160,7 @@ pub fn new() -> Option<Self> {
let cpuid = unsafe { core::arch::x86_64::__cpuid(0x8000_000a) };
let nasid = cpuid.ebx;
Some(Self {
tlb_flush_nested,
invlpgb_count_max,
nasid,
})
Some(Self { tlb_flush_nested, invlpgb_count_max, nasid })
}
/// Returns the maximum count of pages to be flushed supported by the processor.
@ -272,10 +261,7 @@ pub unsafe fn pcid(&mut self, pcid: Pcid) -> &mut Self {
// FIXME: Make ASID a type and remove error type.
pub unsafe fn asid(&mut self, asid: u16) -> Result<&mut Self, AsidOutOfRangeError> {
if u32::from(asid) >= self.invlpgb.nasid {
return Err(AsidOutOfRangeError {
asid,
nasid: self.invlpgb.nasid,
});
return Err(AsidOutOfRangeError { asid, nasid: self.invlpgb.nasid });
}
self.asid = Some(asid);
@ -296,10 +282,7 @@ pub fn final_translation_only(&mut self) -> &mut Self {
/// Also flush nestred translations that could be used for guest translation.
pub fn include_nested_translations(mut self) -> Self {
assert!(
self.invlpgb.tlb_flush_nested,
"flushing all nested translations is not supported"
);
assert!(self.invlpgb.tlb_flush_nested, "flushing all nested translations is not supported");
self.include_nested_translations = true;
self
@ -372,11 +355,7 @@ pub struct AsidOutOfRangeError {
impl fmt::Display for AsidOutOfRangeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{} is out of the range of available ASIDS ({})",
self.asid, self.nasid
)
write!(f, "{} is out of the range of available ASIDS ({})", self.asid, self.nasid)
}
}

View File

@ -341,19 +341,13 @@ const fn valid_bits() -> u64 {
/// Convert from underlying bit representation, unless that representation contains bits that do not correspond to a field.
#[inline]
pub const fn from_bits(bits: u64) -> Option<Self> {
if (bits & !Self::valid_bits()) == 0 {
Some(Self { bits })
} else {
None
}
if (bits & !Self::valid_bits()) == 0 { Some(Self { bits }) } else { None }
}
/// Convert from underlying bit representation, dropping any bits that do not correspond to fields.
#[inline]
pub const fn from_bits_truncate(bits: u64) -> Self {
Self {
bits: bits & Self::valid_bits(),
}
Self { bits: bits & Self::valid_bits() }
}
/// Convert from underlying bit representation, preserving all bits (even those not corresponding to a defined field).
@ -414,8 +408,7 @@ pub fn condition(&self, n: DebugAddressRegisterNumber) -> BreakpointCondition {
/// Sets the condition field of a debug address register.
pub fn set_condition(&mut self, n: DebugAddressRegisterNumber, condition: BreakpointCondition) {
self.bits
.set_bits(BreakpointCondition::bit_range(n), condition as u64);
self.bits.set_bits(BreakpointCondition::bit_range(n), condition as u64);
}
/// Returns the size field of a debug address register.
@ -426,8 +419,7 @@ pub fn size(&self, n: DebugAddressRegisterNumber) -> BreakpointSize {
/// Sets the size field of a debug address register.
pub fn set_size(&mut self, n: DebugAddressRegisterNumber, size: BreakpointSize) {
self.bits
.set_bits(BreakpointSize::bit_range(n), size as u64);
self.bits.set_bits(BreakpointSize::bit_range(n), size as u64);
}
}

View File

@ -36,10 +36,7 @@ pub const fn new(reg: u32) -> Msr {
/// KernelGsBase Model Specific Register.
///
#[cfg_attr(
feature = "instructions",
doc = "[`GS::swap`] swaps this register with [`GsBase`]."
)]
#[cfg_attr(feature = "instructions", doc = "[`GS::swap`] swaps this register with [`GsBase`].")]
#[derive(Debug)]
pub struct KernelGsBase;
@ -373,12 +370,7 @@ pub fn read_raw() -> (u16, u16) {
/// - CS Selector SYSCALL
/// - SS Selector SYSCALL
#[inline]
pub fn read() -> (
SegmentSelector,
SegmentSelector,
SegmentSelector,
SegmentSelector,
) {
pub fn read() -> (SegmentSelector, SegmentSelector, SegmentSelector, SegmentSelector) {
let raw = Self::read_raw();
(
SegmentSelector(raw.0 + 16),

View File

@ -128,10 +128,7 @@ pub const fn empty() -> Self {
// TODO: Replace with inline_const when it's stable.
#[allow(clippy::declare_interior_mutable_const)]
const NULL: Entry = Entry::new(0);
Self {
table: [NULL; MAX],
len: 1,
}
Self { table: [NULL; MAX], len: 1 }
}
/// Forms a GDT from a slice of `u64`.
@ -155,10 +152,7 @@ pub const fn from_raw_entries(slice: &[u64]) -> Self {
assert!(len > 0, "cannot initialize GDT with empty slice");
assert!(slice[0] == 0, "first GDT entry must be zero");
assert!(
len <= MAX,
"cannot initialize GDT with slice exceeding the maximum length"
);
assert!(len <= MAX, "cannot initialize GDT with slice exceeding the maximum length");
while idx < len {
table[idx] = Entry::new(slice[idx]);
@ -522,14 +516,8 @@ pub fn from_entries() {
#[test]
pub fn descriptor_dpl() {
assert_eq!(
Descriptor::kernel_code_segment().dpl(),
PrivilegeLevel::Ring0
);
assert_eq!(
Descriptor::kernel_data_segment().dpl(),
PrivilegeLevel::Ring0
);
assert_eq!(Descriptor::kernel_code_segment().dpl(), PrivilegeLevel::Ring0);
assert_eq!(Descriptor::kernel_data_segment().dpl(), PrivilegeLevel::Ring0);
assert_eq!(Descriptor::user_code_segment().dpl(), PrivilegeLevel::Ring3);
assert_eq!(Descriptor::user_code_segment().dpl(), PrivilegeLevel::Ring3);
}

View File

@ -1170,18 +1170,12 @@ pub struct SelectorErrorCode {
impl SelectorErrorCode {
/// Create a SelectorErrorCode. Returns None is any of the reserved bits (16-64) are set.
pub const fn new(value: u64) -> Option<Self> {
if value > u16::MAX as u64 {
None
} else {
Some(Self { flags: value })
}
if value > u16::MAX as u64 { None } else { Some(Self { flags: value }) }
}
/// Create a new SelectorErrorCode dropping any reserved bits (16-64).
pub const fn new_truncate(value: u64) -> Self {
Self {
flags: (value as u16) as u64,
}
Self { flags: (value as u16) as u64 }
}
/// If true, indicates that the exception occurred during delivery of an event
@ -1637,9 +1631,7 @@ fn isr_frame_manipulation() {
});
unsafe {
frame
.as_mut()
.update(|f| f.instruction_pointer = f.instruction_pointer + 2u64);
frame.as_mut().update(|f| f.instruction_pointer = f.instruction_pointer + 2u64);
}
}
}

View File

@ -31,10 +31,7 @@ pub fn check_descriptor_pointer_size() {
// Per the SDM, a descriptor pointer has to be 2+8=10 bytes
assert_eq!(size_of::<DescriptorTablePointer>(), 10);
// Make sure that we can reference a pointer's limit
let p = DescriptorTablePointer {
limit: 5,
base: VirtAddr::zero(),
};
let p = DescriptorTablePointer { limit: 5, base: VirtAddr::zero() };
let _: &u16 = &p.limit;
}
}

View File

@ -37,19 +37,13 @@ pub fn from_start_address(address: PhysAddr) -> Result<Self, AddressNotAligned>
/// The address must be correctly aligned.
#[inline]
pub const unsafe fn from_start_address_unchecked(start_address: PhysAddr) -> Self {
PhysFrame {
start_address,
size: PhantomData,
}
PhysFrame { start_address, size: PhantomData }
}
/// Returns the frame that contains the given physical address.
#[inline]
pub fn containing_address(address: PhysAddr) -> Self {
PhysFrame {
start_address: address.align_down(S::SIZE),
size: PhantomData,
}
PhysFrame { start_address: address.align_down(S::SIZE), size: PhantomData }
}
/// Returns the start address of the frame.
@ -72,7 +66,10 @@ pub const fn range(start: PhysFrame<S>, end: PhysFrame<S>) -> PhysFrameRange<S>
/// Returns a range of frames, inclusive `end`.
#[inline]
pub const fn range_inclusive(start: PhysFrame<S>, end: PhysFrame<S>) -> PhysFrameRangeInclusive<S> {
pub const fn range_inclusive(
start: PhysFrame<S>,
end: PhysFrame<S>,
) -> PhysFrameRangeInclusive<S> {
PhysFrameRangeInclusive { start, end }
}
}

View File

@ -173,9 +173,7 @@ fn unmap(
page: Page<Size1GiB>,
) -> Result<(PhysFrame<Size1GiB>, MapperFlush<Size1GiB>), UnmapError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p3_entry = &mut p3[page.p3_index()];
let flags = p3_entry.flags();
@ -200,9 +198,7 @@ unsafe fn update_flags(
flags: PageTableFlags,
) -> Result<MapperFlush<Size1GiB>, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
if p3[page.p3_index()].is_unused() {
return Err(FlagUpdateError::PageNotMapped);
@ -281,12 +277,8 @@ fn unmap(
page: Page<Size2MiB>,
) -> Result<(PhysFrame<Size2MiB>, MapperFlush<Size2MiB>), UnmapError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self
.page_table_walker
.next_table_mut(&mut p3[page.p3_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self.page_table_walker.next_table_mut(&mut p3[page.p3_index()])?;
let p2_entry = &mut p2[page.p2_index()];
let flags = p2_entry.flags();
@ -311,12 +303,8 @@ unsafe fn update_flags(
flags: PageTableFlags,
) -> Result<MapperFlush<Size2MiB>, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self
.page_table_walker
.next_table_mut(&mut p3[page.p3_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self.page_table_walker.next_table_mut(&mut p3[page.p3_index()])?;
if p2[page.p2_index()].is_unused() {
return Err(FlagUpdateError::PageNotMapped);
@ -350,9 +338,7 @@ unsafe fn set_flags_p3_entry(
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p3_entry = &mut p3[page.p3_index()];
if p3_entry.is_unused() {
@ -409,15 +395,9 @@ fn unmap(
page: Page<Size4KiB>,
) -> Result<(PhysFrame<Size4KiB>, MapperFlush<Size4KiB>), UnmapError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self
.page_table_walker
.next_table_mut(&mut p3[page.p3_index()])?;
let p1 = self
.page_table_walker
.next_table_mut(&mut p2[page.p2_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self.page_table_walker.next_table_mut(&mut p3[page.p3_index()])?;
let p1 = self.page_table_walker.next_table_mut(&mut p2[page.p2_index()])?;
let p1_entry = &mut p1[page.p1_index()];
@ -436,15 +416,9 @@ unsafe fn update_flags(
flags: PageTableFlags,
) -> Result<MapperFlush<Size4KiB>, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self
.page_table_walker
.next_table_mut(&mut p3[page.p3_index()])?;
let p1 = self
.page_table_walker
.next_table_mut(&mut p2[page.p2_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self.page_table_walker.next_table_mut(&mut p3[page.p3_index()])?;
let p1 = self.page_table_walker.next_table_mut(&mut p2[page.p2_index()])?;
if p1[page.p1_index()].is_unused() {
return Err(FlagUpdateError::PageNotMapped);
@ -478,9 +452,7 @@ unsafe fn set_flags_p3_entry(
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p3_entry = &mut p3[page.p3_index()];
if p3_entry.is_unused() {
@ -498,12 +470,8 @@ unsafe fn set_flags_p2_entry(
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
let p4 = &mut self.level_4_table;
let p3 = self
.page_table_walker
.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self
.page_table_walker
.next_table_mut(&mut p3[page.p3_index()])?;
let p3 = self.page_table_walker.next_table_mut(&mut p4[page.p4_index()])?;
let p2 = self.page_table_walker.next_table_mut(&mut p3[page.p3_index()])?;
let p2_entry = &mut p2[page.p2_index()];
if p2_entry.is_unused() {
@ -586,11 +554,7 @@ fn translate(&self, addr: VirtAddr) -> TranslateResult {
};
let offset = u64::from(addr.page_offset());
let flags = p1_entry.flags();
TranslateResult::Mapped {
frame: MappedFrame::Size4KiB(frame),
offset,
flags,
}
TranslateResult::Mapped { frame: MappedFrame::Size4KiB(frame), offset, flags }
}
}
@ -629,10 +593,8 @@ unsafe fn clean_up<P: PageTableFrameMapping>(
return false;
}
let table_addr = range
.start
.start_address()
.align_down(level.table_address_space_alignment());
let table_addr =
range.start.start_address().align_down(level.table_address_space_alignment());
let start = range.start.page_table_index(level);
let end = range.end.page_table_index(level);
@ -696,9 +658,7 @@ struct PageTableWalker<P: PageTableFrameMapping> {
impl<P: PageTableFrameMapping> PageTableWalker<P> {
#[inline]
pub unsafe fn new(page_table_frame_mapping: P) -> Self {
Self {
page_table_frame_mapping,
}
Self { page_table_frame_mapping }
}
/// Internal helper function to get a reference to the page table of the next level.
@ -711,9 +671,7 @@ fn next_table<'b>(
&self,
entry: &'b PageTableEntry,
) -> Result<&'b PageTable, PageTableWalkError> {
let page_table_ptr = self
.page_table_frame_mapping
.frame_to_pointer(entry.frame()?);
let page_table_ptr = self.page_table_frame_mapping.frame_to_pointer(entry.frame()?);
let page_table: &PageTable = unsafe { &*page_table_ptr };
Ok(page_table)
@ -729,9 +687,7 @@ fn next_table_mut<'b>(
&self,
entry: &'b mut PageTableEntry,
) -> Result<&'b mut PageTable, PageTableWalkError> {
let page_table_ptr = self
.page_table_frame_mapping
.frame_to_pointer(entry.frame()?);
let page_table_ptr = self.page_table_frame_mapping.frame_to_pointer(entry.frame()?);
let page_table: &mut PageTable = unsafe { &mut *page_table_ptr };
Ok(page_table)

View File

@ -30,12 +30,8 @@ impl<'a> OffsetPageTable<'a> {
/// by writing to an illegal memory location.
#[inline]
pub unsafe fn new(level_4_table: &'a mut PageTable, phys_offset: VirtAddr) -> Self {
let phys_offset = PhysOffset {
offset: phys_offset,
};
Self {
inner: unsafe { MappedPageTable::new(level_4_table, phys_offset) },
}
let phys_offset = PhysOffset { offset: phys_offset };
Self { inner: unsafe { MappedPageTable::new(level_4_table, phys_offset) } }
}
/// Returns an immutable reference to the wrapped level 4 `PageTable` instance.
@ -82,8 +78,7 @@ unsafe fn map_to_with_table_flags<A>(
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
self.inner.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}
@ -151,8 +146,7 @@ unsafe fn map_to_with_table_flags<A>(
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
self.inner.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}
@ -220,8 +214,7 @@ unsafe fn map_to_with_table_flags<A>(
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
self.inner.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}

View File

@ -70,10 +70,7 @@ pub fn new(table: &'a mut PageTable) -> Result<Self, InvalidPageTable> {
return Err(InvalidPageTable::NotActive);
}
Ok(RecursivePageTable {
p4: table,
recursive_index,
})
Ok(RecursivePageTable { p4: table, recursive_index })
}
/// Creates a new RecursivePageTable without performing any checks.
@ -85,10 +82,7 @@ pub fn new(table: &'a mut PageTable) -> Result<Self, InvalidPageTable> {
/// must be the index of the recursively mapped entry of that page table.
#[inline]
pub unsafe fn new_unchecked(table: &'a mut PageTable, recursive_index: PageTableIndex) -> Self {
RecursivePageTable {
p4: table,
recursive_index,
}
RecursivePageTable { p4: table, recursive_index }
}
/// Returns an immutable reference to the wrapped level 4 `PageTable` instance.
@ -789,11 +783,7 @@ fn translate(&self, addr: VirtAddr) -> TranslateResult {
let frame = PhysFrame::containing_address(entry.addr());
let offset = addr.as_u64() & 0o_777_777_7777;
let flags = entry.flags();
return TranslateResult::Mapped {
frame: MappedFrame::Size1GiB(frame),
offset,
flags,
};
return TranslateResult::Mapped { frame: MappedFrame::Size1GiB(frame), offset, flags };
}
let p2 = unsafe { &*(p2_ptr(page, self.recursive_index)) };
@ -806,11 +796,7 @@ fn translate(&self, addr: VirtAddr) -> TranslateResult {
let frame = PhysFrame::containing_address(entry.addr());
let offset = addr.as_u64() & 0o_777_7777;
let flags = entry.flags();
return TranslateResult::Mapped {
frame: MappedFrame::Size2MiB(frame),
offset,
flags,
};
return TranslateResult::Mapped { frame: MappedFrame::Size2MiB(frame), offset, flags };
}
let p1 = unsafe { &*(p1_ptr(page, self.recursive_index)) };
@ -828,11 +814,7 @@ fn translate(&self, addr: VirtAddr) -> TranslateResult {
};
let offset = u64::from(addr.page_offset());
let flags = p1_entry.flags();
TranslateResult::Mapped {
frame: MappedFrame::Size4KiB(frame),
offset,
flags,
}
TranslateResult::Mapped { frame: MappedFrame::Size4KiB(frame), offset, flags }
}
}
@ -871,10 +853,8 @@ fn clean_up(
return false;
}
let table_addr = range
.start
.start_address()
.align_down(level.table_address_space_alignment());
let table_addr =
range.start.start_address().align_down(level.table_address_space_alignment());
let start = range.start.page_table_index(level);
let end = range.end.page_table_index(level);

View File

@ -91,19 +91,13 @@ pub const fn from_start_address(address: VirtAddr) -> Result<Self, AddressNotAli
/// The address must be correctly aligned.
#[inline]
pub const unsafe fn from_start_address_unchecked(start_address: VirtAddr) -> Self {
Page {
start_address,
size: PhantomData,
}
Page { start_address, size: PhantomData }
}
/// Returns the page that contains the given virtual address.
#[inline]
pub const fn containing_address(address: VirtAddr) -> Self {
Page {
start_address: address.align_down_u64(S::SIZE),
size: PhantomData,
}
Page { start_address: address.align_down_u64(S::SIZE), size: PhantomData }
}
/// Returns the start address of the page.
@ -158,10 +152,7 @@ pub(crate) fn steps_between_impl(start: &Self, end: &Self) -> Option<usize> {
pub(crate) fn forward_checked_impl(start: Self, count: usize) -> Option<Self> {
let count = count.checked_mul(S::SIZE as usize)?;
let start_address = VirtAddr::forward_checked_impl(start.start_address, count)?;
Some(Self {
start_address,
size: PhantomData,
})
Some(Self { start_address, size: PhantomData })
}
}
@ -229,11 +220,7 @@ pub const fn p1_index(self) -> PageTableIndex {
impl<S: PageSize> fmt::Debug for Page<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_fmt(format_args!(
"Page[{}]({:#x})",
S::DEBUG_STR,
self.start_address().as_u64()
))
f.write_fmt(format_args!("Page[{}]({:#x})", S::DEBUG_STR, self.start_address().as_u64()))
}
}
@ -288,10 +275,7 @@ fn forward_checked(start: Self, count: usize) -> Option<Self> {
fn backward_checked(start: Self, count: usize) -> Option<Self> {
let count = count.checked_mul(S::SIZE as usize)?;
let start_address = Step::backward_checked(start.start_address, count)?;
Some(Self {
start_address,
size: PhantomData,
})
Some(Self { start_address, size: PhantomData })
}
}
@ -341,10 +325,7 @@ pub fn as_4kib_page_range(self) -> PageRange<Size4KiB> {
impl<S: PageSize> fmt::Debug for PageRange<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("PageRange")
.field("start", &self.start)
.field("end", &self.end)
.finish()
f.debug_struct("PageRange").field("start", &self.start).field("end", &self.end).finish()
}
}
@ -424,10 +405,7 @@ pub fn test_page_ranges() {
let mut range = Page::range(start, end);
for i in 0..number {
assert_eq!(
range.next(),
Some(Page::containing_address(start_addr + page_size * i))
);
assert_eq!(range.next(), Some(Page::containing_address(start_addr + page_size * i)));
}
assert_eq!(range.next(), None);

View File

@ -195,9 +195,7 @@ impl PageTable {
#[inline]
pub const fn new() -> Self {
const EMPTY: PageTableEntry = PageTableEntry::new();
PageTable {
entries: [EMPTY; ENTRY_COUNT],
}
PageTable { entries: [EMPTY; ENTRY_COUNT] }
}
/// Clears all entries.