Auto merge of #16098 - lnicola:sync-from-rust, r=lnicola

minor: sync from downstream
This commit is contained in:
bors 2023-12-12 10:30:06 +00:00
commit e004a5d6d5
4 changed files with 22 additions and 17 deletions

View File

@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
) )
}; };
let err = parse_err.or(err); let err = parse_err.or(err);
if cfg!(debug) { if cfg!(debug_assertions) {
arg_map.finish(); arg_map.finish();
} }

View File

@ -85,8 +85,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
} }
/// Check the version of rustc that was used to compile a proc macro crate's /// Check the version of rustc that was used to compile a proc macro crate's
///
/// binary file. /// binary file.
///
/// A proc macro crate binary's ".rustc" section has following byte layout: /// A proc macro crate binary's ".rustc" section has following byte layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes, /// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
@ -96,8 +96,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// The bytes you get after decompressing the snappy format portion has /// The bytes you get after decompressing the snappy format portion has
/// following layout: /// following layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again) /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
/// * [crate root bytes] next 4 bytes is to store crate root position, /// * [crate root bytes] next 8 bytes (4 in old versions) is to store
/// according to rustc's source code comment /// crate root position, according to rustc's source code comment
/// * [length byte] next 1 byte tells us how many bytes we should read next /// * [length byte] next 1 byte tells us how many bytes we should read next
/// for the version string's utf8 bytes /// for the version string's utf8 bytes
/// * [version string bytes encoded in utf8] <- GET THIS BOI /// * [version string bytes encoded in utf8] <- GET THIS BOI
@ -119,13 +119,18 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
} }
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]); let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
// Last supported version is: // Last supported version is:
// https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632 // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
let snappy_portion = match version { let (snappy_portion, bytes_before_version) = match version {
5 | 6 => &dot_rustc[8..], 5 | 6 => (&dot_rustc[8..], 13),
7 | 8 => { 7 | 8 => {
let len_bytes = &dot_rustc[8..12]; let len_bytes = &dot_rustc[8..12];
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize; let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
&dot_rustc[12..data_len + 12] (&dot_rustc[12..data_len + 12], 13)
}
9 => {
let len_bytes = &dot_rustc[8..16];
let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
(&dot_rustc[16..data_len + 12], 17)
} }
_ => { _ => {
return Err(io::Error::new( return Err(io::Error::new(
@ -142,15 +147,15 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
Box::new(SnapDecoder::new(snappy_portion)) Box::new(SnapDecoder::new(snappy_portion))
}; };
// the bytes before version string bytes, so this basically is: // We're going to skip over the bytes before the version string, so basically:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5] // 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
// 4 bytes for [crate root bytes] // 4 or 8 bytes for [crate root bytes]
// 1 byte for length of version string // 1 byte for length of version string
// so 13 bytes in total, and we should check the 13th byte // so 13 or 17 bytes in total, and we should check the last of those bytes
// to know the length // to know the length
let mut bytes_before_version = [0u8; 13]; let mut bytes = [0u8; 17];
uncompressed.read_exact(&mut bytes_before_version)?; uncompressed.read_exact(&mut bytes[..bytes_before_version])?;
let length = bytes_before_version[12]; let length = bytes[bytes_before_version - 1];
let mut version_string_utf8 = vec![0u8; length as usize]; let mut version_string_utf8 = vec![0u8; length as usize];
uncompressed.read_exact(&mut version_string_utf8)?; uncompressed.read_exact(&mut version_string_utf8)?;

View File

@ -13,7 +13,7 @@
#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(unreachable_pub)] #![allow(unreachable_pub, internal_features)]
extern crate proc_macro; extern crate proc_macro;

View File

@ -1,5 +1,6 @@
//! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer. //! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer.
//! Copyright © 20142022 Chris Morgan. COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING" //! Copyright © 20142022 Chris Morgan.
//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING
//! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0 //! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
//! //!
//! This implementation provides a safe and convenient store for one value of each type. //! This implementation provides a safe and convenient store for one value of each type.
@ -17,7 +18,6 @@
#![warn(missing_docs, unused_results)] #![warn(missing_docs, unused_results)]
use core::convert::TryInto;
use core::hash::Hasher; use core::hash::Hasher;
/// A hasher designed to eke a little more speed out, given `TypeId`s known characteristics. /// A hasher designed to eke a little more speed out, given `TypeId`s known characteristics.