Auto merge of #56737 - nnethercote:TokenStream-improvements, r=petrochenkov

`TokenStream` improvements

Some `TokenStream` improvements: shrinking `TokenStream` and some other types, and some other code clean-ups.
This commit is contained in:
bors 2018-12-17 01:48:23 +00:00
commit c6fb01d629
12 changed files with 107 additions and 285 deletions

View File

@ -483,7 +483,7 @@ impl MetaItem {
last_pos = segment.ident.span.hi();
}
idents.push(self.node.tokens(self.span));
TokenStream::concat(idents)
TokenStream::new(idents)
}
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
@ -539,7 +539,7 @@ impl MetaItemKind {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => {
TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
}
MetaItemKind::List(ref list) => {
let mut tokens = Vec::new();
@ -552,7 +552,7 @@ impl MetaItemKind {
TokenTree::Delimited(
DelimSpan::from_single(span),
token::Paren,
TokenStream::concat(tokens).into(),
TokenStream::new(tokens).into(),
).into()
}
}

View File

@ -247,7 +247,7 @@ pub mod rt {
let delim_span = DelimSpan::from_single(self.span);
r.push(TokenTree::Delimited(
delim_span, token::Bracket, TokenStream::concat(inner).into()
delim_span, token::Bracket, TokenStream::new(inner).into()
));
r
}

View File

@ -103,12 +103,12 @@ pub fn transcribe(cx: &ExtCtxt,
}
Frame::Delimited { forest, span, .. } => {
if result_stack.is_empty() {
return TokenStream::concat(result);
return TokenStream::new(result);
}
let tree = TokenTree::Delimited(
span,
forest.delim,
TokenStream::concat(result).into(),
TokenStream::new(result).into(),
);
result = result_stack.pop().unwrap();
result.push(tree.into());

View File

@ -145,12 +145,6 @@ pub mod util {
#[cfg(test)]
pub mod parser_testing;
pub mod move_map;
mod rc_slice;
pub use self::rc_slice::RcSlice;
mod rc_vec;
pub use self::rc_vec::RcVec;
}
pub mod json;

View File

@ -170,7 +170,7 @@ impl<'a> Parser<'a> {
token::CloseDelim(_) | token::Eof => self.unexpected()?,
_ => self.parse_token_tree(),
};
TokenStream::concat(vec![eq.into(), tree.into()])
TokenStream::new(vec![eq.into(), tree.into()])
} else {
TokenStream::empty()
};

View File

@ -22,7 +22,7 @@ impl<'a> StringReader<'a> {
tts.push(self.parse_token_tree()?);
}
Ok(TokenStream::concat(tts))
Ok(TokenStream::new(tts))
}
// Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
@ -30,14 +30,14 @@ impl<'a> StringReader<'a> {
let mut tts = vec![];
loop {
if let token::CloseDelim(..) = self.token {
return TokenStream::concat(tts);
return TokenStream::new(tts);
}
match self.parse_token_tree() {
Ok(tree) => tts.push(tree),
Err(mut e) => {
e.emit();
return TokenStream::concat(tts);
return TokenStream::new(tts);
}
}
}

View File

@ -863,13 +863,13 @@ mod tests {
with_globals(|| {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::concat(vec![
let expected = TokenStream::new(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
token::DelimToken::Paren,
TokenStream::concat(vec![
TokenStream::new(vec![
TokenTree::Token(sp(6, 7),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
@ -880,7 +880,7 @@ mod tests {
TokenTree::Delimited(
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
token::DelimToken::Brace,
TokenStream::concat(vec![
TokenStream::new(vec![
TokenTree::Token(sp(17, 18),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),

View File

@ -2939,7 +2939,7 @@ impl<'a> Parser<'a> {
_ => result.push(self.parse_token_tree().into()),
}
}
TokenStream::concat(result)
TokenStream::new(result)
}
/// Parse a prefix-unary-operator expr
@ -4635,7 +4635,7 @@ impl<'a> Parser<'a> {
self.unexpected()?;
unreachable!()
};
TokenStream::concat(vec![
TokenStream::new(vec![
args.into(),
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
body.into(),

View File

@ -28,8 +28,8 @@ use ext::tt::{macro_parser, quoted};
use parse::Directory;
use parse::token::{self, DelimToken, Token};
use print::pprust;
use rustc_data_structures::sync::Lrc;
use serialize::{Decoder, Decodable, Encoder, Encodable};
use util::RcVec;
use std::borrow::Cow;
use std::{fmt, iter, mem};
@ -123,7 +123,7 @@ impl TokenTree {
}
pub fn joint(self) -> TokenStream {
TokenStream { kind: TokenStreamKind::JointTree(self) }
TokenStream::JointTree(self)
}
/// Returns the opening delimiter as a token tree.
@ -154,65 +154,57 @@ impl TokenTree {
/// instead of a representation of the abstract syntax tree.
/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
#[derive(Clone, Debug)]
pub struct TokenStream {
kind: TokenStreamKind,
pub enum TokenStream {
Empty,
Tree(TokenTree),
JointTree(TokenTree),
Stream(Lrc<Vec<TokenStream>>),
}
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 40);
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 32);
impl TokenStream {
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
/// separating the two arguments with a comma for diagnostic suggestions.
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
// Used to suggest if a user writes `foo!(a b);`
if let TokenStreamKind::Stream(ref slice) = self.kind {
if let TokenStream::Stream(ref stream) = self {
let mut suggestion = None;
let mut iter = slice.iter().enumerate().peekable();
let mut iter = stream.iter().enumerate().peekable();
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts.kind, &next.kind) {
(TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)), _) |
(_, TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))) => {
let sp = match (&ts, &next) {
(TokenStream::Tree(TokenTree::Token(_, token::Token::Comma)), _) |
(_, TokenStream::Tree(TokenTree::Token(_, token::Token::Comma))) => {
continue;
}
(TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
(TokenStreamKind::Tree(TokenTree::Delimited(sp, ..)), _) => sp.entire(),
(TokenStream::Tree(TokenTree::Token(sp, _)), _) => *sp,
(TokenStream::Tree(TokenTree::Delimited(sp, ..)), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = TokenStream {
kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
};
let comma = TokenStream::Tree(TokenTree::Token(sp, token::Comma));
suggestion = Some((pos, comma, sp));
}
}
if let Some((pos, comma, sp)) = suggestion {
let mut new_slice = vec![];
let parts = slice.split_at(pos + 1);
new_slice.extend_from_slice(parts.0);
new_slice.push(comma);
new_slice.extend_from_slice(parts.1);
let slice = RcVec::new(new_slice);
return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp));
let mut new_stream = vec![];
let parts = stream.split_at(pos + 1);
new_stream.extend_from_slice(parts.0);
new_stream.push(comma);
new_stream.extend_from_slice(parts.1);
return Some((TokenStream::new(new_stream), sp));
}
}
None
}
}
#[derive(Clone, Debug)]
enum TokenStreamKind {
Empty,
Tree(TokenTree),
JointTree(TokenTree),
Stream(RcVec<TokenStream>),
}
impl From<TokenTree> for TokenStream {
fn from(tt: TokenTree) -> TokenStream {
TokenStream { kind: TokenStreamKind::Tree(tt) }
TokenStream::Tree(tt)
}
}
@ -224,29 +216,29 @@ impl From<Token> for TokenStream {
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::concat(iter.into_iter().map(Into::into).collect::<Vec<_>>())
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<_>>())
}
}
impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, iter: I) {
let iter = iter.into_iter();
let kind = mem::replace(&mut self.kind, TokenStreamKind::Empty);
let this = mem::replace(self, TokenStream::Empty);
// Vector of token streams originally in self.
let tts: Vec<TokenStream> = match kind {
TokenStreamKind::Empty => {
let tts: Vec<TokenStream> = match this {
TokenStream::Empty => {
let mut vec = Vec::new();
vec.reserve(iter.size_hint().0);
vec
}
TokenStreamKind::Tree(_) | TokenStreamKind::JointTree(_) => {
TokenStream::Tree(_) | TokenStream::JointTree(_) => {
let mut vec = Vec::new();
vec.reserve(1 + iter.size_hint().0);
vec.push(TokenStream { kind });
vec.push(this);
vec
}
TokenStreamKind::Stream(rc_vec) => match RcVec::try_unwrap(rc_vec) {
TokenStream::Stream(rc_vec) => match Lrc::try_unwrap(rc_vec) {
Ok(mut vec) => {
// Extend in place using the existing capacity if possible.
// This is the fast path for libraries like `quote` that
@ -273,12 +265,7 @@ impl Extend<TokenStream> for TokenStream {
// Build the resulting token stream. If it contains more than one token,
// preserve capacity in the vector in anticipation of the caller
// performing additional calls to extend.
let mut tts = builder.0;
*self = match tts.len() {
0 => TokenStream::empty(),
1 => tts.pop().unwrap(),
_ => TokenStream::concat_rc_vec(RcVec::new_preserving_capacity(tts)),
};
*self = TokenStream::new(builder.0);
}
}
@ -292,7 +279,7 @@ impl PartialEq<TokenStream> for TokenStream {
impl TokenStream {
pub fn len(&self) -> usize {
if let TokenStreamKind::Stream(ref slice) = self.kind {
if let TokenStream::Stream(ref slice) = self {
slice.len()
} else {
0
@ -300,28 +287,24 @@ impl TokenStream {
}
pub fn empty() -> TokenStream {
TokenStream { kind: TokenStreamKind::Empty }
TokenStream::Empty
}
pub fn is_empty(&self) -> bool {
match self.kind {
TokenStreamKind::Empty => true,
match self {
TokenStream::Empty => true,
_ => false,
}
}
pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
pub fn new(mut streams: Vec<TokenStream>) -> TokenStream {
match streams.len() {
0 => TokenStream::empty(),
1 => streams.pop().unwrap(),
_ => TokenStream::concat_rc_vec(RcVec::new(streams)),
_ => TokenStream::Stream(Lrc::new(streams)),
}
}
fn concat_rc_vec(streams: RcVec<TokenStream>) -> TokenStream {
TokenStream { kind: TokenStreamKind::Stream(streams) }
}
pub fn trees(&self) -> Cursor {
self.clone().into_trees()
}
@ -383,9 +366,9 @@ impl TokenStream {
/// Precondition: `self` consists of a single token tree.
/// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`.
pub fn as_tree(self) -> (TokenTree, bool /* joint? */) {
match self.kind {
TokenStreamKind::Tree(tree) => (tree, false),
TokenStreamKind::JointTree(tree) => (tree, true),
match self {
TokenStream::Tree(tree) => (tree, false),
TokenStream::JointTree(tree) => (tree, true),
_ => unreachable!(),
}
}
@ -395,43 +378,43 @@ impl TokenStream {
let mut result = Vec::new();
let mut i = 0;
while let Some(stream) = trees.next_as_stream() {
result.push(match stream.kind {
TokenStreamKind::Tree(tree) => f(i, tree).into(),
TokenStreamKind::JointTree(tree) => f(i, tree).joint(),
result.push(match stream {
TokenStream::Tree(tree) => f(i, tree).into(),
TokenStream::JointTree(tree) => f(i, tree).joint(),
_ => unreachable!()
});
i += 1;
}
TokenStream::concat(result)
TokenStream::new(result)
}
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
let mut trees = self.into_trees();
let mut result = Vec::new();
while let Some(stream) = trees.next_as_stream() {
result.push(match stream.kind {
TokenStreamKind::Tree(tree) => f(tree).into(),
TokenStreamKind::JointTree(tree) => f(tree).joint(),
result.push(match stream {
TokenStream::Tree(tree) => f(tree).into(),
TokenStream::JointTree(tree) => f(tree).joint(),
_ => unreachable!()
});
}
TokenStream::concat(result)
TokenStream::new(result)
}
fn first_tree_and_joint(&self) -> Option<(TokenTree, bool)> {
match self.kind {
TokenStreamKind::Empty => None,
TokenStreamKind::Tree(ref tree) => Some((tree.clone(), false)),
TokenStreamKind::JointTree(ref tree) => Some((tree.clone(), true)),
TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree_and_joint(),
match self {
TokenStream::Empty => None,
TokenStream::Tree(ref tree) => Some((tree.clone(), false)),
TokenStream::JointTree(ref tree) => Some((tree.clone(), true)),
TokenStream::Stream(ref stream) => stream.first().unwrap().first_tree_and_joint(),
}
}
fn last_tree_if_joint(&self) -> Option<TokenTree> {
match self.kind {
TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None,
TokenStreamKind::JointTree(ref tree) => Some(tree.clone()),
TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(),
match self {
TokenStream::Empty | TokenStream::Tree(..) => None,
TokenStream::JointTree(ref tree) => Some(tree.clone()),
TokenStream::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(),
}
}
}
@ -474,28 +457,28 @@ impl TokenStreamBuilder {
}
pub fn build(self) -> TokenStream {
TokenStream::concat(self.0)
TokenStream::new(self.0)
}
fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
if let TokenStreamKind::Stream(ref streams) = stream.kind {
if let TokenStream::Stream(ref streams) = stream {
let len = streams.len();
match len {
1 => {}
2 => self.0.push(streams[0].clone().into()),
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(0 .. len - 1))),
_ => self.0.push(TokenStream::new(streams[0 .. len - 1].to_vec())),
}
self.push_all_but_last_tree(&streams[len - 1])
}
}
fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
if let TokenStreamKind::Stream(ref streams) = stream.kind {
if let TokenStream::Stream(ref streams) = stream {
let len = streams.len();
match len {
1 => {}
2 => self.0.push(streams[1].clone().into()),
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(1 .. len))),
_ => self.0.push(TokenStream::new(streams[1 .. len].to_vec())),
}
self.push_all_but_first_tree(&streams[0])
}
@ -515,13 +498,13 @@ enum CursorKind {
#[derive(Clone)]
struct StreamCursor {
stream: RcVec<TokenStream>,
stream: Lrc<Vec<TokenStream>>,
index: usize,
stack: Vec<(RcVec<TokenStream>, usize)>,
stack: Vec<(Lrc<Vec<TokenStream>>, usize)>,
}
impl StreamCursor {
fn new(stream: RcVec<TokenStream>) -> Self {
fn new(stream: Lrc<Vec<TokenStream>>) -> Self {
StreamCursor { stream: stream, index: 0, stack: Vec::new() }
}
@ -530,10 +513,10 @@ impl StreamCursor {
if self.index < self.stream.len() {
self.index += 1;
let next = self.stream[self.index - 1].clone();
match next.kind {
TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next),
TokenStreamKind::Stream(stream) => self.insert(stream),
TokenStreamKind::Empty => {}
match next {
TokenStream::Tree(..) | TokenStream::JointTree(..) => return Some(next),
TokenStream::Stream(stream) => self.insert(stream),
TokenStream::Empty => {}
}
} else if let Some((stream, index)) = self.stack.pop() {
self.stream = stream;
@ -544,7 +527,7 @@ impl StreamCursor {
}
}
fn insert(&mut self, stream: RcVec<TokenStream>) {
fn insert(&mut self, stream: Lrc<Vec<TokenStream>>) {
self.stack.push((mem::replace(&mut self.stream, stream),
mem::replace(&mut self.index, 0)));
}
@ -554,8 +537,8 @@ impl Iterator for Cursor {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
self.next_as_stream().map(|stream| match stream.kind {
TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree,
self.next_as_stream().map(|stream| match stream {
TokenStream::Tree(tree) | TokenStream::JointTree(tree) => tree,
_ => unreachable!()
})
}
@ -563,11 +546,11 @@ impl Iterator for Cursor {
impl Cursor {
fn new(stream: TokenStream) -> Self {
Cursor(match stream.kind {
TokenStreamKind::Empty => CursorKind::Empty,
TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false),
TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false),
TokenStreamKind::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)),
Cursor(match stream {
TokenStream::Empty => CursorKind::Empty,
TokenStream::Tree(tree) => CursorKind::Tree(tree, false),
TokenStream::JointTree(tree) => CursorKind::JointTree(tree, false),
TokenStream::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)),
})
}
@ -590,7 +573,7 @@ impl Cursor {
_ if stream.is_empty() => return,
CursorKind::Empty => *self = stream.trees(),
CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => {
*self = TokenStream::concat(vec![self.original_stream(), stream]).trees();
*self = TokenStream::new(vec![self.original_stream(), stream]).trees();
if consumed {
self.next();
}
@ -606,21 +589,21 @@ impl Cursor {
CursorKind::Empty => TokenStream::empty(),
CursorKind::Tree(ref tree, _) => tree.clone().into(),
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({
CursorKind::Stream(ref cursor) => TokenStream::Stream(
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
.unwrap_or_else(|| cursor.stream.clone())
}),
),
}
}
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize> {
for stream in streams {
n = match stream.kind {
TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree)
n = match stream {
TokenStream::Tree(ref tree) | TokenStream::JointTree(ref tree)
if n == 0 => return Ok(tree.clone()),
TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1,
TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) {
TokenStream::Tree(..) | TokenStream::JointTree(..) => n - 1,
TokenStream::Stream(ref stream) => match look_ahead(stream, n) {
Ok(tree) => return Ok(tree),
Err(n) => n,
},
@ -656,7 +639,7 @@ impl Cursor {
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
#[derive(Debug, Clone)]
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
pub struct ThinTokenStream(Option<Lrc<Vec<TokenStream>>>);
impl ThinTokenStream {
pub fn stream(&self) -> TokenStream {
@ -666,18 +649,18 @@ impl ThinTokenStream {
impl From<TokenStream> for ThinTokenStream {
fn from(stream: TokenStream) -> ThinTokenStream {
ThinTokenStream(match stream.kind {
TokenStreamKind::Empty => None,
TokenStreamKind::Tree(tree) => Some(RcVec::new(vec![tree.into()])),
TokenStreamKind::JointTree(tree) => Some(RcVec::new(vec![tree.joint()])),
TokenStreamKind::Stream(stream) => Some(stream),
ThinTokenStream(match stream {
TokenStream::Empty => None,
TokenStream::Tree(tree) => Some(Lrc::new(vec![tree.into()])),
TokenStream::JointTree(tree) => Some(Lrc::new(vec![tree.joint()])),
TokenStream::Stream(stream) => Some(stream),
})
}
}
impl From<ThinTokenStream> for TokenStream {
fn from(stream: ThinTokenStream) -> TokenStream {
stream.0.map(TokenStream::concat_rc_vec).unwrap_or_else(TokenStream::empty)
stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
}
}
@ -776,7 +759,7 @@ mod tests {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
let eq_res = TokenStream::new(vec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);

View File

@ -1,64 +0,0 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use std::ops::{Deref, Range};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
HashStable};
#[derive(Clone)]
pub struct RcSlice<T> {
data: Lrc<Box<[T]>>,
offset: u32,
len: u32,
}
impl<T> RcSlice<T> {
pub fn new(vec: Vec<T>) -> Self {
RcSlice {
offset: 0,
len: vec.len() as u32,
data: Lrc::new(vec.into_boxed_slice()),
}
}
pub fn sub_slice(&self, range: Range<usize>) -> Self {
RcSlice {
data: self.data.clone(),
offset: self.offset + range.start as u32,
len: (range.end - range.start) as u32,
}
}
}
impl<T> Deref for RcSlice<T> {
type Target = [T];
fn deref(&self) -> &[T] {
&self.data[self.offset as usize .. (self.offset + self.len) as usize]
}
}
impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.deref(), f)
}
}
impl<CTX, T> HashStable<CTX> for RcSlice<T>
where T: HashStable<CTX>
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut CTX,
hasher: &mut StableHasher<W>) {
(**self).hash_stable(hcx, hasher);
}
}

View File

@ -1,90 +0,0 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use std::ops::{Deref, Range};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
use rustc_data_structures::sync::Lrc;
#[derive(Clone)]
pub struct RcVec<T> {
data: Lrc<Vec<T>>,
offset: u32,
len: u32,
}
impl<T> RcVec<T> {
pub fn new(mut vec: Vec<T>) -> Self {
// By default, constructing RcVec from Vec gives it just enough capacity
// to hold the initial elements. Callers that anticipate needing to
// extend the vector may prefer RcVec::new_preserving_capacity.
vec.shrink_to_fit();
Self::new_preserving_capacity(vec)
}
pub fn new_preserving_capacity(vec: Vec<T>) -> Self {
RcVec {
offset: 0,
len: vec.len() as u32,
data: Lrc::new(vec),
}
}
pub fn sub_slice(&self, range: Range<usize>) -> Self {
RcVec {
data: self.data.clone(),
offset: self.offset + range.start as u32,
len: (range.end - range.start) as u32,
}
}
/// If this RcVec has exactly one strong reference, returns ownership of the
/// underlying vector. Otherwise returns self unmodified.
pub fn try_unwrap(self) -> Result<Vec<T>, Self> {
match Lrc::try_unwrap(self.data) {
// If no other RcVec shares ownership of this data.
Ok(mut vec) => {
// Drop any elements after our view of the data.
vec.truncate(self.offset as usize + self.len as usize);
// Drop any elements before our view of the data. Do this after
// the `truncate` so that elements past the end of our view do
// not need to be copied around.
vec.drain(..self.offset as usize);
Ok(vec)
}
// If the data is shared.
Err(data) => Err(RcVec { data, ..self }),
}
}
}
impl<T> Deref for RcVec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
&self.data[self.offset as usize..(self.offset + self.len) as usize]
}
}
impl<T: fmt::Debug> fmt::Debug for RcVec<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.deref(), f)
}
}
impl<CTX, T> HashStable<CTX> for RcVec<T>
where
T: HashStable<CTX>,
{
fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) {
(**self).hash_stable(hcx, hasher);
}
}

View File

@ -137,7 +137,6 @@ fn check(cache: &mut Cache,
file.ends_with("symbol/struct.InternedString.html") ||
file.ends_with("ast/struct.ThinVec.html") ||
file.ends_with("util/struct.ThinVec.html") ||
file.ends_with("util/struct.RcSlice.html") ||
file.ends_with("layout/struct.TyLayout.html") ||
file.ends_with("humantime/struct.Timestamp.html") ||
file.ends_with("log/index.html") ||