rename AllocationMap → RangeObjectMap
This commit is contained in:
parent
d98bd98385
commit
dac95a3ad8
@ -1,3 +1,3 @@
|
||||
mod allocation_map;
|
||||
pub mod data_race;
|
||||
mod range_object_map;
|
||||
pub mod weak_memory;
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Implements a map from allocation ranges to data.
|
||||
//! This is somewhat similar to RangeMap, but the ranges
|
||||
//! and data are discrete and non-splittable. An allocation in the
|
||||
//! map will always have the same range until explicitly removed
|
||||
//! Implements a map from allocation ranges to data. This is somewhat similar to RangeMap, but the
|
||||
//! ranges and data are discrete and non-splittable -- they represent distinct "objects". An
|
||||
//! allocation in the map will always have the same range until explicitly removed
|
||||
|
||||
use rustc_target::abi::Size;
|
||||
use std::ops::{Index, IndexMut, Range};
|
||||
@ -20,7 +19,7 @@ struct Elem<T> {
|
||||
type Position = usize;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AllocationMap<T> {
|
||||
pub struct RangeObjectMap<T> {
|
||||
v: Vec<Elem<T>>,
|
||||
}
|
||||
|
||||
@ -34,7 +33,7 @@ pub enum AccessType {
|
||||
ImperfectlyOverlapping(Range<Position>),
|
||||
}
|
||||
|
||||
impl<T> AllocationMap<T> {
|
||||
impl<T> RangeObjectMap<T> {
|
||||
pub fn new() -> Self {
|
||||
Self { v: Vec::new() }
|
||||
}
|
||||
@ -135,7 +134,7 @@ pub fn remove_from_pos(&mut self, pos: Position) {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Index<Position> for AllocationMap<T> {
|
||||
impl<T> Index<Position> for RangeObjectMap<T> {
|
||||
type Output = T;
|
||||
|
||||
fn index(&self, pos: Position) -> &Self::Output {
|
||||
@ -143,7 +142,7 @@ fn index(&self, pos: Position) -> &Self::Output {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IndexMut<Position> for AllocationMap<T> {
|
||||
impl<T> IndexMut<Position> for RangeObjectMap<T> {
|
||||
fn index_mut(&mut self, pos: Position) -> &mut Self::Output {
|
||||
&mut self.v[pos].data
|
||||
}
|
||||
@ -159,7 +158,7 @@ mod tests {
|
||||
fn empty_map() {
|
||||
// FIXME: make Size::from_bytes const
|
||||
let four = Size::from_bytes(4);
|
||||
let map = AllocationMap::<()>::new();
|
||||
let map = RangeObjectMap::<()>::new();
|
||||
|
||||
// Correctly tells where we should insert the first element (at position 0)
|
||||
assert_eq!(map.find_offset(Size::from_bytes(3)), Err(0));
|
||||
@ -173,7 +172,7 @@ fn empty_map() {
|
||||
fn no_overlapping_inserts() {
|
||||
let four = Size::from_bytes(4);
|
||||
|
||||
let mut map = AllocationMap::<&str>::new();
|
||||
let mut map = RangeObjectMap::<&str>::new();
|
||||
|
||||
// |_|_|_|_|#|#|#|#|_|_|_|_|...
|
||||
// 0 1 2 3 4 5 6 7 8 9 a b c d
|
||||
@ -187,7 +186,7 @@ fn no_overlapping_inserts() {
|
||||
fn boundaries() {
|
||||
let four = Size::from_bytes(4);
|
||||
|
||||
let mut map = AllocationMap::<&str>::new();
|
||||
let mut map = RangeObjectMap::<&str>::new();
|
||||
|
||||
// |#|#|#|#|_|_|...
|
||||
// 0 1 2 3 4 5
|
||||
@ -215,7 +214,7 @@ fn boundaries() {
|
||||
fn perfectly_overlapping() {
|
||||
let four = Size::from_bytes(4);
|
||||
|
||||
let mut map = AllocationMap::<&str>::new();
|
||||
let mut map = RangeObjectMap::<&str>::new();
|
||||
|
||||
// |#|#|#|#|_|_|...
|
||||
// 0 1 2 3 4 5
|
||||
@ -241,7 +240,7 @@ fn perfectly_overlapping() {
|
||||
fn straddling() {
|
||||
let four = Size::from_bytes(4);
|
||||
|
||||
let mut map = AllocationMap::<&str>::new();
|
||||
let mut map = RangeObjectMap::<&str>::new();
|
||||
|
||||
// |_|_|_|_|#|#|#|#|_|_|_|_|...
|
||||
// 0 1 2 3 4 5 6 7 8 9 a b c d
|
@ -85,8 +85,8 @@
|
||||
use crate::{AtomicReadOp, AtomicRwOp, AtomicWriteOp, Tag, VClock, VTimestamp, VectorIdx};
|
||||
|
||||
use super::{
|
||||
allocation_map::{AccessType, AllocationMap},
|
||||
data_race::{GlobalState, ThreadClockSet},
|
||||
range_object_map::{AccessType, RangeObjectMap},
|
||||
};
|
||||
|
||||
pub type AllocExtra = StoreBufferAlloc;
|
||||
@ -101,7 +101,7 @@
|
||||
pub struct StoreBufferAlloc {
|
||||
/// Store buffer of each atomic object in this allocation
|
||||
// Behind a RefCell because we need to allocate/remove on read access
|
||||
store_buffers: RefCell<AllocationMap<StoreBuffer>>,
|
||||
store_buffers: RefCell<RangeObjectMap<StoreBuffer>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@ -134,7 +134,7 @@ struct StoreElement {
|
||||
|
||||
impl StoreBufferAlloc {
|
||||
pub fn new_allocation() -> Self {
|
||||
Self { store_buffers: RefCell::new(AllocationMap::new()) }
|
||||
Self { store_buffers: RefCell::new(RangeObjectMap::new()) }
|
||||
}
|
||||
|
||||
/// Checks if the range imperfectly overlaps with existing buffers
|
||||
|
Loading…
Reference in New Issue
Block a user