Make NodeId a newtype_index to enable niche optimizations

This commit is contained in:
Oliver Scherer 2018-11-07 10:08:41 +01:00
parent d1d79ae3ad
commit 39a0969e64
7 changed files with 27 additions and 47 deletions

View File

@ -49,7 +49,6 @@ use hir::map::{self, Map};
use super::itemlikevisit::DeepVisitor;
use std::cmp;
use std::u32;
#[derive(Copy, Clone)]
pub enum FnKind<'a> {
@ -1152,8 +1151,8 @@ pub struct IdRange {
impl IdRange {
pub fn max() -> IdRange {
IdRange {
min: NodeId::from_u32(u32::MAX),
max: NodeId::from_u32(u32::MIN),
min: NodeId::MAX,
max: NodeId::from_u32(0),
}
}

View File

@ -124,7 +124,7 @@ impl<'a, 'hir: 'a> HirIdValidator<'a, 'hir> {
.enumerate()
.find(|&(_, &entry)| hir_id == entry)
.expect("no node_to_hir_id entry");
let node_id = NodeId::new(node_id);
let node_id = NodeId::from_usize(node_id);
missing_items.push(format!("[local_id: {}, node:{}]",
local_id,
self.hir_map.node_to_string(node_id)));

View File

@ -393,7 +393,7 @@ impl Session {
match id.as_usize().checked_add(count) {
Some(next) => {
self.next_node_id.set(ast::NodeId::new(next));
self.next_node_id.set(ast::NodeId::from_usize(next));
}
None => bug!("Input too large, ran out of node ids!"),
}
@ -1160,7 +1160,7 @@ pub fn build_session_(
recursion_limit: Once::new(),
type_length_limit: Once::new(),
const_eval_stack_frame_limit: 100,
next_node_id: OneThread::new(Cell::new(NodeId::new(1))),
next_node_id: OneThread::new(Cell::new(NodeId::from_u32(1))),
allocator_kind: Once::new(),
injected_panic_runtime: Once::new(),
imported_macro_spans: OneThread::new(RefCell::new(FxHashMap::default())),

View File

@ -566,7 +566,7 @@ impl FromStr for UserIdentifiedItem {
type Err = ();
fn from_str(s: &str) -> Result<UserIdentifiedItem, ()> {
Ok(s.parse()
.map(ast::NodeId::new)
.map(ast::NodeId::from_u32)
.map(ItemViaNode)
.unwrap_or_else(|_| ItemViaPath(s.split("::").map(|s| s.to_string()).collect())))
}

View File

@ -663,7 +663,7 @@ impl<'a, 'b:'a, 'c: 'b> ImportResolver<'a, 'b, 'c> {
let mut errors = false;
let mut seen_spans = FxHashSet::default();
let mut error_vec = Vec::new();
let mut prev_root_id: NodeId = NodeId::new(0);
let mut prev_root_id: NodeId = NodeId::from_u32(0);
for i in 0 .. self.determined_imports.len() {
let import = self.determined_imports[i];
let error = self.finalize_import(import);

View File

@ -18,7 +18,6 @@ pub use util::parser::ExprPrecedence;
use ext::hygiene::{Mark, SyntaxContext};
use print::pprust;
use ptr::P;
use rustc_data_structures::indexed_vec;
use rustc_data_structures::indexed_vec::Idx;
use rustc_target::spec::abi::Abi;
use source_map::{dummy_spanned, respan, Spanned};
@ -31,7 +30,6 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use serialize::{self, Decoder, Encoder};
use std::fmt;
use std::u32;
pub use rustc_target::abi::FloatTy;
@ -213,71 +211,53 @@ pub struct ParenthesisedArgs {
pub output: Option<P<Ty>>,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct NodeId(u32);
// hack to ensure that we don't try to access the private parts of `NodeId` in this module
mod node_id_inner {
use rustc_data_structures::indexed_vec::Idx;
newtype_index! {
pub struct NodeId {
ENCODABLE = custom
}
}
}
pub use self::node_id_inner::NodeId;
impl NodeId {
pub fn new(x: usize) -> NodeId {
assert!(x < (u32::MAX as usize));
NodeId(x as u32)
}
pub fn from_u32(x: u32) -> NodeId {
NodeId(x)
}
pub fn as_usize(&self) -> usize {
self.0 as usize
}
pub fn as_u32(&self) -> u32 {
self.0
}
pub fn placeholder_from_mark(mark: Mark) -> Self {
NodeId(mark.as_u32())
NodeId::from_u32(mark.as_u32())
}
pub fn placeholder_to_mark(self) -> Mark {
Mark::from_u32(self.0)
Mark::from_u32(self.as_u32())
}
}
impl fmt::Display for NodeId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
fmt::Display::fmt(&self.as_u32(), f)
}
}
impl serialize::UseSpecializedEncodable for NodeId {
fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_u32(self.0)
s.emit_u32(self.as_u32())
}
}
impl serialize::UseSpecializedDecodable for NodeId {
fn default_decode<D: Decoder>(d: &mut D) -> Result<NodeId, D::Error> {
d.read_u32().map(NodeId)
}
}
impl indexed_vec::Idx for NodeId {
fn new(idx: usize) -> Self {
NodeId::new(idx)
}
fn index(self) -> usize {
self.as_usize()
d.read_u32().map(NodeId::from_u32)
}
}
/// Node id used to represent the root of the crate.
pub const CRATE_NODE_ID: NodeId = NodeId(0);
pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0);
/// When parsing and doing expansions, we initially give all AST nodes this AST
/// node value. Then later, in the renumber pass, we renumber them to have
/// small, positive ids.
pub const DUMMY_NODE_ID: NodeId = NodeId(!0);
pub const DUMMY_NODE_ID: NodeId = NodeId::MAX;
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.

View File

@ -26,6 +26,7 @@
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
#![feature(str_escape)]
#![feature(step_trait)]
#![feature(try_trait)]
#![feature(unicode_internals)]
@ -37,7 +38,7 @@ extern crate serialize;
#[macro_use] extern crate log;
pub extern crate rustc_errors as errors;
extern crate syntax_pos;
extern crate rustc_data_structures;
#[macro_use] extern crate rustc_data_structures;
extern crate rustc_target;
#[macro_use] extern crate scoped_tls;
#[macro_use]