shrink_to_fit TokenMap's backing storage

This commit is contained in:
Jonas Schievink 2021-01-18 18:40:06 +01:00
parent 9daba961f2
commit 2d799cf5e0
2 changed files with 5 additions and 1 deletions

View File

@ -76,6 +76,8 @@ pub struct HygieneFrame {
impl HygieneFrames {
fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}

View File

@ -51,6 +51,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset);
let subtree = c.go()?;
c.id_alloc.map.entries.shrink_to_fit();
Some((subtree, c.id_alloc.map))
}
@ -593,7 +594,8 @@ fn new(cursor: Cursor<'a>) -> Self {
}
}
fn finish(self) -> (Parse<SyntaxNode>, TokenMap) {
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
self.token_map.entries.shrink_to_fit();
(self.inner.finish(), self.token_map)
}
}