Shrink parser positions from usize
to u32
.
The number of source code bytes can't exceed a `u32`'s range, so a token position also can't. This reduces the size of `Parser` and `LazyAttrTokenStreamImpl` by eight bytes each.
This commit is contained in:
parent
f5b28968db
commit
3d750e2702
@ -120,21 +120,21 @@ struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
|
||||
struct BestFailure {
|
||||
token: Token,
|
||||
position_in_tokenstream: usize,
|
||||
position_in_tokenstream: u32,
|
||||
msg: &'static str,
|
||||
remaining_matcher: MatcherLoc,
|
||||
}
|
||||
|
||||
impl BestFailure {
|
||||
fn is_better_position(&self, position: usize) -> bool {
|
||||
fn is_better_position(&self, position: u32) -> bool {
|
||||
position > self.position_in_tokenstream
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
@ -211,9 +211,9 @@ pub fn new() -> Self {
|
||||
}
|
||||
|
||||
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
|
@ -452,7 +452,7 @@ fn parse_tt_inner<'matcher, T: Tracker<'matcher>>(
|
||||
&mut self,
|
||||
matcher: &'matcher [MatcherLoc],
|
||||
token: &Token,
|
||||
approx_position: usize,
|
||||
approx_position: u32,
|
||||
track: &mut T,
|
||||
) -> Option<NamedParseResult<T::Failure>> {
|
||||
// Matcher positions that would be valid if the macro invocation was over now. Only
|
||||
|
@ -153,7 +153,7 @@ pub(super) trait Tracker<'matcher> {
|
||||
/// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
|
||||
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
||||
/// The usize is the approximate position of the token in the input token stream.
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure;
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
|
||||
|
||||
/// This is called before trying to match next MatcherLoc on the current token.
|
||||
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
||||
@ -182,7 +182,7 @@ fn get_expected_token(&self) -> Option<&'matcher Token> {
|
||||
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
||||
type Failure = ();
|
||||
|
||||
fn build_failure(_tok: Token, _position: usize, _msg: &'static str) -> Self::Failure {}
|
||||
fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
|
||||
|
||||
fn description() -> &'static str {
|
||||
"none"
|
||||
|
@ -29,15 +29,15 @@ pub struct AttrWrapper {
|
||||
// The start of the outer attributes in the token cursor.
|
||||
// This allows us to create a `ReplaceRange` for the entire attribute
|
||||
// target, including outer attributes.
|
||||
start_pos: usize,
|
||||
start_pos: u32,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||
AttrWrapper { attrs, start_pos }
|
||||
}
|
||||
pub fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||
}
|
||||
|
||||
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
@ -91,7 +91,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||
struct LazyAttrTokenStreamImpl {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
num_calls: usize,
|
||||
num_calls: u32,
|
||||
break_last_token: bool,
|
||||
replace_ranges: Box<[ReplaceRange]>,
|
||||
}
|
||||
@ -110,7 +110,7 @@ fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||
let token = cursor_snapshot.next();
|
||||
(FlatToken::Token(token.0), token.1)
|
||||
}))
|
||||
.take(self.num_calls);
|
||||
.take(self.num_calls as usize);
|
||||
|
||||
if self.replace_ranges.is_empty() {
|
||||
make_token_stream(tokens, self.break_last_token)
|
||||
@ -296,12 +296,12 @@ pub fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
);
|
||||
|
||||
let end_pos = self.num_bump_calls
|
||||
+ captured_trailing as usize
|
||||
+ captured_trailing as u32
|
||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
|
||||
// extend the range of captured tokens to include it, since the parser was not actually
|
||||
// bumped past it. When the `LazyAttrTokenStream` gets converted into an
|
||||
// `AttrTokenStream`, we will create the proper token.
|
||||
+ self.break_last_token as usize;
|
||||
+ self.break_last_token as u32;
|
||||
|
||||
let num_calls = end_pos - start_pos;
|
||||
|
||||
@ -313,14 +313,11 @@ pub fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
// Grab any replace ranges that occur *inside* the current AST node.
|
||||
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
||||
// to an `AttrTokenStream`.
|
||||
let start_calls: u32 = start_pos.try_into().unwrap();
|
||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_replace_ranges.iter().cloned())
|
||||
.map(|(range, tokens)| {
|
||||
((range.start - start_calls)..(range.end - start_calls), tokens)
|
||||
})
|
||||
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -459,6 +456,6 @@ mod size_asserts {
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AttrWrapper, 16);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 104);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 96);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ pub struct Parser<'a> {
|
||||
expected_tokens: Vec<TokenType>,
|
||||
token_cursor: TokenCursor,
|
||||
// The number of calls to `bump`, i.e. the position in the token stream.
|
||||
num_bump_calls: usize,
|
||||
num_bump_calls: u32,
|
||||
// During parsing we may sometimes need to 'unglue' a glued token into two
|
||||
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
|
||||
// them one at a time. This process bypasses the normal capturing mechanism
|
||||
@ -192,7 +192,7 @@ pub struct Parser<'a> {
|
||||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 264);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 256);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -1572,7 +1572,7 @@ pub fn clear_expected_tokens(&mut self) {
|
||||
self.expected_tokens.clear();
|
||||
}
|
||||
|
||||
pub fn approx_token_stream_pos(&self) -> usize {
|
||||
pub fn approx_token_stream_pos(&self) -> u32 {
|
||||
self.num_bump_calls
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user