avoid exposing that binary heap's IntoIter is backed by vec::IntoIter, use a private trait instead

This commit is contained in:
The8472 2019-12-20 20:28:10 +01:00
parent c731648e77
commit 2a51e579f5
3 changed files with 26 additions and 9 deletions

View File

@ -151,7 +151,7 @@
use core::ptr;
use crate::slice;
use crate::vec::{self, Vec};
use crate::vec::{self, Vec, AsIntoIter};
use super::SpecExtend;
@ -1175,17 +1175,23 @@ impl<T> FusedIterator for IntoIter<T> {}
#[unstable(issue = "0", feature = "inplace_iteration")]
unsafe impl<T> SourceIter for IntoIter<T> {
type Source = impl Iterator<Item = T>;
type Source = IntoIter<T>;
#[inline]
fn as_inner(&mut self) -> &mut Self::Source {
&mut self.iter
self
}
}
#[unstable(issue = "0", feature = "inplace_iteration")]
unsafe impl<I> InPlaceIterable for IntoIter<I> {}
impl<I> AsIntoIter<I> for IntoIter<I> {
fn as_into_iter(&mut self) -> &mut vec::IntoIter<I> {
&mut self.iter
}
}
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
#[derive(Clone, Debug)]
pub struct IntoIterSorted<T> {

View File

@ -107,7 +107,7 @@
#![feature(map_first_last)]
#![feature(map_into_keys_values)]
#![feature(negative_impls)]
#![cfg_attr(bootstrap, feature(never_type))]
#![feature(never_type)]
#![feature(new_uninit)]
#![feature(nll)]
#![feature(nonnull_slice_from_raw_parts)]

View File

@ -2221,7 +2221,7 @@ fn from_iter(iterator: IntoIter<T>) -> Self {
// T can be split into IN and OUT which only need to have the same size and alignment
impl<T, I> SpecFrom<T, I> for Vec<T>
where
I: Iterator<Item = T> + InPlaceIterable + SourceIter<Source = IntoIter<T>>,
I: Iterator<Item = T> + InPlaceIterable + SourceIter<Source: AsIntoIter<T>>,
{
default fn from_iter(mut iterator: I) -> Self {
// This specialization only makes sense if we're juggling real allocations.
@ -2230,8 +2230,8 @@ impl<T, I> SpecFrom<T, I> for Vec<T>
return SpecFromNested::from_iter(iterator);
}
let src_buf = iterator.as_inner().buf.as_ptr();
let src_end = iterator.as_inner().end;
let src_buf = iterator.as_inner().as_into_iter().buf.as_ptr();
let src_end = iterator.as_inner().as_into_iter().end;
let dst = src_buf;
let dst = if mem::needs_drop::<T>() {
@ -2273,14 +2273,14 @@ impl<T, I> SpecFrom<T, I> for Vec<T>
.unwrap()
};
let src = iterator.as_inner();
let src = iterator.as_inner().as_into_iter();
// check if SourceIter and InPlaceIterable contracts were upheld.
// caveat: if they weren't we may not even make it to this point
debug_assert_eq!(src_buf, src.buf.as_ptr());
debug_assert!(dst as *const _ <= src.ptr, "InPlaceIterable contract violation");
if mem::needs_drop::<T>() {
// drop tail if iterator was only partially exhaused
// drop tail if iterator was only partially exhausted
unsafe {
ptr::drop_in_place(src.as_mut_slice());
}
@ -2998,6 +2998,17 @@ fn as_inner(&mut self) -> &mut Self::Source {
}
}
// internal helper trait for in-place iteration specialization.
pub(crate) trait AsIntoIter<T> {
fn as_into_iter(&mut self) -> &mut IntoIter<T>;
}
impl<T> AsIntoIter<T> for IntoIter<T> {
fn as_into_iter(&mut self) -> &mut IntoIter<T> {
self
}
}
/// A draining iterator for `Vec<T>`.
///
/// This `struct` is created by [`Vec::drain`].