7347: Reduce TokenMap size r=edwin0cheng a=edwin0cheng

Reduces HygieneFrameQuery's memory usage by to another 10 MB.

cc #7331

bors r+

Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2021-01-19 11:28:58 +00:00 committed by GitHub
commit 02edb4b31b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -16,16 +16,18 @@
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum TokenTextRange { pub enum TokenTextRange {
Token(TextRange), Token(TextRange),
Delimiter(TextRange, TextRange), Delimiter(TextRange),
} }
impl TokenTextRange { impl TokenTextRange {
pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
match self { match self {
TokenTextRange::Token(it) => Some(it), TokenTextRange::Token(it) => Some(it),
TokenTextRange::Delimiter(open, close) => match kind { TokenTextRange::Delimiter(it) => match kind {
T!['{'] | T!['('] | T!['['] => Some(open), T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
T!['}'] | T![')'] | T![']'] => Some(close), T!['}'] | T![')'] | T![']'] => {
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
}
_ => None, _ => None,
}, },
} }
@ -114,8 +116,10 @@ impl TokenMap {
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
TokenTextRange::Token(it) => *it == relative_range, TokenTextRange::Token(it) => *it == relative_range,
TokenTextRange::Delimiter(open, close) => { TokenTextRange::Delimiter(it) => {
*open == relative_range || *close == relative_range let open = TextRange::at(it.start(), 1.into());
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
open == relative_range || close == relative_range
} }
})?; })?;
Some(token_id) Some(token_id)
@ -137,15 +141,17 @@ fn insert_delim(
close_relative_range: TextRange, close_relative_range: TextRange,
) -> usize { ) -> usize {
let res = self.entries.len(); let res = self.entries.len();
self.entries let cover = open_relative_range.cover(close_relative_range);
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
res res
} }
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
let (_, token_text_range) = &mut self.entries[idx]; let (_, token_text_range) = &mut self.entries[idx];
if let TokenTextRange::Delimiter(dim, _) = token_text_range { if let TokenTextRange::Delimiter(dim) = token_text_range {
*token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); let cover = dim.cover(close_relative_range);
*token_text_range = TokenTextRange::Delimiter(cover);
} }
} }