From f90c3864b66ba28c5cda46a32a564d77b0c0f848 Mon Sep 17 00:00:00 2001 From: "Felix S. Klock II" Date: Wed, 21 Jan 2015 20:02:52 +0100 Subject: [PATCH] Add core::marker::PhantomData. Port `core::ptr::Unique` to have `PhantomData`. Add `PhantomData` to `TypedArena` and `Vec` as well. As a drive-by, switch `ptr::Unique` from a tuple-struct to a struct with fields. --- src/libarena/lib.rs | 6 ++++ src/libcollections/btree/node.rs | 16 +++++----- src/libcollections/vec.rs | 18 ++++++++---- src/libcore/marker.rs | 18 ++++++++++++ src/libcore/ptr.rs | 17 +++++++++-- src/libcoretest/ptr.rs | 2 +- src/libflate/lib.rs | 4 +-- src/librustc/middle/lang_items.rs | 2 ++ src/librustc/middle/ty.rs | 49 +++++++++++++++++++++++++++---- 9 files changed, 106 insertions(+), 26 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 223c5111f8f..9e379e4d475 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell}; use std::cmp; use std::intrinsics::{TyDesc, get_tydesc}; use std::intrinsics; +use std::marker; use std::mem; use std::num::{Int, UnsignedInt}; use std::ptr; @@ -365,6 +366,10 @@ pub struct TypedArena { /// A pointer to the first arena segment. first: RefCell<*mut TypedArenaChunk>, + + /// Marker indicating that dropping the arena causes its owned + /// instances of `T` to be dropped. + _own: marker::PhantomData, } struct TypedArenaChunk { @@ -460,6 +465,7 @@ impl TypedArena { ptr: Cell::new((*chunk).start() as *const T), end: Cell::new((*chunk).end() as *const T), first: RefCell::new(chunk), + _own: marker::PhantomData, } } } diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index bfe74cc6fb4..24523d4dcc9 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -278,7 +278,7 @@ impl Drop for RawItems { #[unsafe_destructor] impl Drop for Node { fn drop(&mut self) { - if self.keys.0.is_null() { + if self.keys.ptr.is_null() { // We have already cleaned up this node. return; } @@ -292,7 +292,7 @@ impl Drop for Node { self.destroy(); } - self.keys.0 = ptr::null_mut(); + self.keys.ptr = ptr::null_mut(); } } @@ -337,18 +337,18 @@ impl Node { unsafe fn destroy(&mut self) { let (alignment, size) = calculate_allocation_generic::(self.capacity(), self.is_leaf()); - heap::deallocate(self.keys.0 as *mut u8, size, alignment); + heap::deallocate(self.keys.ptr as *mut u8, size, alignment); } #[inline] pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) { unsafe {( mem::transmute(raw::Slice { - data: self.keys.0, + data: self.keys.ptr, len: self.len() }), mem::transmute(raw::Slice { - data: self.vals.0, + data: self.vals.ptr, len: self.len() }) )} @@ -368,7 +368,7 @@ impl Node { } else { unsafe { mem::transmute(raw::Slice { - data: self.edges.0, + data: self.edges.ptr, len: self.len() + 1 }) } @@ -586,7 +586,7 @@ impl Node { /// If the node has any children pub fn is_leaf(&self) -> bool { - self.edges.0.is_null() + self.edges.ptr.is_null() } /// if the node has too few elements @@ -1064,7 +1064,7 @@ impl Node { vals: RawItems::from_slice(self.vals()), edges: RawItems::from_slice(self.edges()), - ptr: self.keys.0 as *mut u8, + ptr: self.keys.ptr as *mut u8, capacity: self.capacity(), is_leaf: self.is_leaf() }, diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 1cd2a89ad60..341d91538ad 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -57,7 +57,7 @@ use core::default::Default; use core::fmt; use core::hash::{self, Hash}; use core::iter::{repeat, FromIterator, IntoIterator}; -use core::marker::{ContravariantLifetime, InvariantType}; +use core::marker::{self, ContravariantLifetime, InvariantType}; use core::mem; use core::nonzero::NonZero; use core::num::{Int, UnsignedInt}; @@ -140,6 +140,7 @@ pub struct Vec { ptr: NonZero<*mut T>, len: usize, cap: usize, + _own: marker::PhantomData, } unsafe impl Send for Vec { } @@ -166,7 +167,7 @@ impl Vec { // non-null value which is fine since we never call deallocate on the ptr // if cap is 0. The reason for this is because the pointer of a slice // being NULL would break the null pointer optimization for enums. - Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 } + unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) } } /// Constructs a new, empty `Vec` with the specified capacity. @@ -198,7 +199,7 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> Vec { if mem::size_of::() == 0 { - Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX } + unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) } } else if capacity == 0 { Vec::new() } else { @@ -206,7 +207,7 @@ impl Vec { .expect("capacity overflow"); let ptr = unsafe { allocate(size, mem::min_align_of::()) }; if ptr.is_null() { ::alloc::oom() } - Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity } + unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) } } } @@ -247,7 +248,12 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec { - Vec { ptr: NonZero::new(ptr), len: length, cap: capacity } + Vec { + ptr: NonZero::new(ptr), + len: length, + cap: capacity, + _own: marker::PhantomData, + } } /// Creates a vector by copying the elements from a raw pointer. @@ -1626,7 +1632,7 @@ impl IntoIter { for _x in self.by_ref() { } let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self; mem::forget(self); - Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 } + Vec::from_raw_parts(allocation, 0, cap) } } } diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs index bf9e6bccc71..da93d4f6ca4 100644 --- a/src/libcore/marker.rs +++ b/src/libcore/marker.rs @@ -202,6 +202,24 @@ pub unsafe trait Sync { // Empty } +/// A marker type that indicates to the compiler that the instances +/// of the type itself owns instances of the type parameter `T`. +/// +/// This is used to indicate that one or more instances of the type +/// `T` could be dropped when instances of the type itself is dropped, +/// though that may not be apparent from the other structure of the +/// type itself. For example, the type may hold a `*mut T`, which the +/// compiler does not automatically treat as owned. +#[unstable(feature = "core", + reason = "Newly added to deal with scoping and destructor changes")] +#[lang="phantom_data"] +#[derive(PartialEq, Eq, PartialOrd, Ord)] +pub struct PhantomData; + +impl Copy for PhantomData {} +impl Clone for PhantomData { + fn clone(&self) -> PhantomData { *self } +} /// A marker type whose type parameter `T` is considered to be /// covariant with respect to the type itself. This is (typically) diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index bf801a88ca5..1b8ec048f8d 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -92,7 +92,7 @@ use mem; use clone::Clone; use intrinsics; use option::Option::{self, Some, None}; -use marker::{Send, Sized, Sync}; +use marker::{self, Send, Sized, Sync}; use cmp::{PartialEq, Eq, Ord, PartialOrd}; use cmp::Ordering::{self, Less, Equal, Greater}; @@ -522,7 +522,11 @@ impl PartialOrd for *mut T { /// Useful for building abstractions like `Vec` or `Box`, which /// internally use raw pointers to manage the memory that they own. #[unstable(feature = "core", reason = "recently added to this module")] -pub struct Unique(pub *mut T); +pub struct Unique { + /// The wrapped `*mut T`. + pub ptr: *mut T, + _own: marker::PhantomData, +} /// `Unique` pointers are `Send` if `T` is `Send` because the data they /// reference is unaliased. Note that this aliasing invariant is @@ -550,6 +554,13 @@ impl Unique { #[unstable(feature = "core", reason = "recently added to this module")] pub unsafe fn offset(self, offset: int) -> *mut T { - self.0.offset(offset) + self.ptr.offset(offset) } } + +/// Creates a `Unique` wrapped around `ptr`, taking ownership of the +/// data referenced by `ptr`. +#[allow(non_snake_case)] +pub fn Unique(ptr: *mut T) -> Unique { + Unique { ptr: ptr, _own: marker::PhantomData } +} diff --git a/src/libcoretest/ptr.rs b/src/libcoretest/ptr.rs index 2365b907b3f..797c150e859 100644 --- a/src/libcoretest/ptr.rs +++ b/src/libcoretest/ptr.rs @@ -172,7 +172,7 @@ fn test_set_memory() { fn test_unsized_unique() { let xs: &mut [_] = &mut [1, 2, 3]; let ptr = Unique(xs as *mut [_]); - let ys = unsafe { &mut *ptr.0 }; + let ys = unsafe { &mut *ptr.ptr }; let zs: &mut [_] = &mut [1, 2, 3]; assert!(ys == zs); } diff --git a/src/libflate/lib.rs b/src/libflate/lib.rs index a81b8777af4..ff6400a11df 100644 --- a/src/libflate/lib.rs +++ b/src/libflate/lib.rs @@ -45,13 +45,13 @@ pub struct Bytes { impl Deref for Bytes { type Target = [u8]; fn deref(&self) -> &[u8] { - unsafe { slice::from_raw_parts_mut(self.ptr.0, self.len) } + unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) } } } impl Drop for Bytes { fn drop(&mut self) { - unsafe { libc::free(self.ptr.0 as *mut _); } + unsafe { libc::free(self.ptr.ptr as *mut _); } } } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 05969d4ea43..ef72c2242c1 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -312,6 +312,8 @@ lets_do_this! { ExchangeHeapLangItem, "exchange_heap", exchange_heap; OwnedBoxLangItem, "owned_box", owned_box; + PhantomDataItem, "phantom_data", phantom_data; + CovariantTypeItem, "covariant_type", covariant_type; ContravariantTypeItem, "contravariant_type", contravariant_type; InvariantTypeItem, "invariant_type", invariant_type; diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index bedcd74cfd7..8e94991f656 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -72,6 +72,8 @@ use std::cell::{Cell, RefCell}; use std::cmp; use std::fmt; use std::hash::{Hash, Writer, SipHasher, Hasher}; +#[cfg(stage0)] +use std::marker; use std::mem; use std::ops; use std::rc::Rc; @@ -931,6 +933,26 @@ pub struct TyS<'tcx> { // the maximal depth of any bound regions appearing in this type. region_depth: u32, + + // force the lifetime to be invariant to work-around + // region-inference issues with a covariant lifetime. + #[cfg(stage0)] + marker: ShowInvariantLifetime<'tcx>, +} + +#[cfg(stage0)] +struct ShowInvariantLifetime<'a>(marker::InvariantLifetime<'a>); +#[cfg(stage0)] +impl<'a> ShowInvariantLifetime<'a> { + fn new() -> ShowInvariantLifetime<'a> { + ShowInvariantLifetime(marker::InvariantLifetime) + } +} +#[cfg(stage0)] +impl<'a> fmt::Debug for ShowInvariantLifetime<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "InvariantLifetime") + } } impl fmt::Debug for TypeFlags { @@ -939,9 +961,18 @@ impl fmt::Debug for TypeFlags { } } +#[cfg(stage0)] +impl<'tcx> PartialEq for TyS<'tcx> { + fn eq<'a,'b>(&'a self, other: &'b TyS<'tcx>) -> bool { + let other: &'a TyS<'tcx> = unsafe { mem::transmute(other) }; + (self as *const _) == (other as *const _) + } +} +#[cfg(not(stage0))] impl<'tcx> PartialEq for TyS<'tcx> { fn eq(&self, other: &TyS<'tcx>) -> bool { - (self as *const _) == (other as *const _) + // (self as *const _) == (other as *const _) + (self as *const TyS<'tcx>) == (other as *const TyS<'tcx>) } } impl<'tcx> Eq for TyS<'tcx> {} @@ -2475,11 +2506,17 @@ fn intern_ty<'tcx>(type_arena: &'tcx TypedArena>, let flags = FlagComputation::for_sty(&st); - let ty = type_arena.alloc(TyS { - sty: st, - flags: flags.flags, - region_depth: flags.depth, - }); + let ty = match () { + #[cfg(stage0)] + () => type_arena.alloc(TyS { sty: st, + flags: flags.flags, + region_depth: flags.depth, + marker: ShowInvariantLifetime::new(), }), + #[cfg(not(stage0))] + () => type_arena.alloc(TyS { sty: st, + flags: flags.flags, + region_depth: flags.depth, }), + }; debug!("Interned type: {:?} Pointer: {:?}", ty, ty as *const _);