Auto merge of #26914 - alexcrichton:deprecate-easy, r=aturon
Many of these have long since reached their stage of being obsolete, so this commit starts the removal process for all of them. The unstable features that were deprecated are: * box_heap * cmp_partial * fs_time * hash_default * int_slice * iter_min_max * iter_reset_fuse * iter_to_vec * map_in_place * move_from * owned_ascii_ext * page_size * read_and_zero * scan_state * slice_chars * slice_position_elem * subslice_offset
This commit is contained in:
commit
9ca511cf63
@ -86,10 +86,13 @@
|
||||
#[lang = "exchange_heap"]
|
||||
#[unstable(feature = "box_heap",
|
||||
reason = "may be renamed; uncertain about custom allocator design")]
|
||||
#[allow(deprecated)]
|
||||
pub const HEAP: ExchangeHeapSingleton =
|
||||
ExchangeHeapSingleton { _force_singleton: () };
|
||||
|
||||
/// This the singleton type used solely for `boxed::HEAP`.
|
||||
#[unstable(feature = "box_heap",
|
||||
reason = "may be renamed; uncertain about custom allocator design")]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct ExchangeHeapSingleton { _force_singleton: () }
|
||||
|
||||
|
@ -762,12 +762,16 @@ pub fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
|
||||
|
||||
/// Find the first index containing a matching value.
|
||||
#[unstable(feature = "slice_position_elem")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "less idiomatic than .iter().position()")]
|
||||
pub fn position_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
||||
core_slice::SliceExt::position_elem(self, t)
|
||||
}
|
||||
|
||||
/// Find the last index containing a matching value.
|
||||
#[unstable(feature = "slice_position_elem")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "less idiomatic than .iter().rev().position()")]
|
||||
pub fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
||||
core_slice::SliceExt::rposition_elem(self, t)
|
||||
}
|
||||
@ -1009,6 +1013,8 @@ pub fn clone_from_slice(&mut self, src: &[T]) -> usize where T: Clone {
|
||||
/// ```
|
||||
#[unstable(feature = "move_from",
|
||||
reason = "uncertain about this API approach")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "unclear that it must belong in the standard library")]
|
||||
#[inline]
|
||||
pub fn move_from(&mut self, mut src: Vec<T>, start: usize, end: usize) -> usize {
|
||||
for (a, b) in self.iter_mut().zip(&mut src[start .. end]) {
|
||||
|
@ -553,6 +553,9 @@ pub unsafe fn slice_mut_unchecked(&mut self, begin: usize, end: usize) -> &mut s
|
||||
/// ```
|
||||
#[unstable(feature = "slice_chars",
|
||||
reason = "may have yet to prove its worth")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "can be implemented with char_indices and \
|
||||
hasn't seen enough use to justify inclusion")]
|
||||
pub fn slice_chars(&self, begin: usize, end: usize) -> &str {
|
||||
core_str::StrExt::slice_chars(self, begin, end)
|
||||
}
|
||||
@ -1666,6 +1669,8 @@ pub fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a,
|
||||
/// ```
|
||||
#[unstable(feature = "subslice_offset",
|
||||
reason = "awaiting convention about comparability of arbitrary slices")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "replaced with other pattern-related methods")]
|
||||
pub fn subslice_offset(&self, inner: &str) -> usize {
|
||||
core_str::StrExt::subslice_offset(self, inner)
|
||||
}
|
||||
|
@ -772,6 +772,9 @@ pub fn is_empty(&self) -> bool { self.len() == 0 }
|
||||
/// ```
|
||||
#[unstable(feature = "map_in_place",
|
||||
reason = "API may change to provide stronger guarantees")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "unclear that the API is strong enough and did \
|
||||
not proven itself")]
|
||||
pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
|
||||
// FIXME: Assert statically that the types `T` and `U` have the same
|
||||
// size.
|
||||
@ -1627,6 +1630,7 @@ impl<T> IntoIter<T> {
|
||||
#[inline]
|
||||
/// Drops all items that have not yet been moved and returns the empty vector.
|
||||
#[unstable(feature = "iter_to_vec")]
|
||||
#[deprecated(since = "1.3.0", reason = "replaced by drain()")]
|
||||
pub fn into_inner(mut self) -> Vec<T> {
|
||||
unsafe {
|
||||
for _x in self.by_ref() { }
|
||||
|
@ -401,6 +401,7 @@ pub fn max<T: Ord>(v1: T, v2: T) -> T {
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_partial")]
|
||||
#[deprecated(since = "1.3.0", reason = "has not proven itself worthwhile")]
|
||||
pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||
match v1.partial_cmp(&v2) {
|
||||
Some(Less) | Some(Equal) => Some(v1),
|
||||
@ -434,6 +435,7 @@ pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_partial")]
|
||||
#[deprecated(since = "1.3.0", reason = "has not proven itself worthwhile")]
|
||||
pub fn partial_max<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||
match v1.partial_cmp(&v2) {
|
||||
Some(Equal) | Some(Less) => Some(v2),
|
||||
|
@ -171,6 +171,8 @@ fn write_isize(&mut self, i: isize) { self.write_usize(i as usize) }
|
||||
#[unstable(feature = "hash_default",
|
||||
reason = "not the most ergonomic interface unless `H` is defaulted \
|
||||
to SipHasher, but perhaps not ready to commit to that")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "has yet to prove itself useful")]
|
||||
pub fn hash<T: Hash, H: Hasher + Default>(value: &T) -> u64 {
|
||||
let mut h: H = Default::default();
|
||||
value.hash(&mut h);
|
||||
|
@ -56,6 +56,7 @@
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
#[allow(deprecated)]
|
||||
use self::MinMaxResult::*;
|
||||
|
||||
use clone::Clone;
|
||||
@ -445,6 +446,7 @@ fn take(self, n: usize) -> Take<Self> where Self: Sized, {
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow(deprecated)]
|
||||
fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
|
||||
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
|
||||
{
|
||||
@ -840,6 +842,8 @@ fn min(self) -> Option<Self::Item> where Self: Sized, Self::Item: Ord
|
||||
#[unstable(feature = "iter_min_max",
|
||||
reason = "return type may change or may wish to have a closure \
|
||||
based version as well")]
|
||||
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||
#[allow(deprecated)]
|
||||
fn min_max(mut self) -> MinMaxResult<Self::Item> where Self: Sized, Self::Item: Ord
|
||||
{
|
||||
let (mut min, mut max) = match self.next() {
|
||||
@ -1336,6 +1340,8 @@ fn idx(&mut self, index: usize) -> Option<<I as Iterator>::Item> {
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
#[unstable(feature = "iter_min_max",
|
||||
reason = "unclear whether such a fine-grained result is widely useful")]
|
||||
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||
#[allow(deprecated)]
|
||||
pub enum MinMaxResult<T> {
|
||||
/// Empty iterator
|
||||
NoElements,
|
||||
@ -1349,6 +1355,8 @@ pub enum MinMaxResult<T> {
|
||||
}
|
||||
|
||||
#[unstable(feature = "iter_min_max", reason = "type is unstable")]
|
||||
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||
#[allow(deprecated)]
|
||||
impl<T: Clone> MinMaxResult<T> {
|
||||
/// `into_option` creates an `Option` of type `(T,T)`. The returned `Option`
|
||||
/// has variant `None` if and only if the `MinMaxResult` has variant
|
||||
@ -2249,6 +2257,7 @@ impl<I> ExactSizeIterator for Take<I> where I: ExactSizeIterator {}
|
||||
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[derive(Clone)]
|
||||
#[allow(deprecated)]
|
||||
pub struct Scan<I, St, F> {
|
||||
iter: I,
|
||||
f: F,
|
||||
@ -2256,6 +2265,7 @@ pub struct Scan<I, St, F> {
|
||||
/// The current internal state to be passed to the closure next.
|
||||
#[unstable(feature = "scan_state",
|
||||
reason = "public fields are otherwise rare in the stdlib")]
|
||||
#[deprecated(since = "1.3.0", reason = "unclear whether this is necessary")]
|
||||
pub state: St,
|
||||
}
|
||||
|
||||
@ -2267,6 +2277,7 @@ impl<B, I, St, F> Iterator for Scan<I, St, F> where
|
||||
type Item = B;
|
||||
|
||||
#[inline]
|
||||
#[allow(deprecated)]
|
||||
fn next(&mut self) -> Option<B> {
|
||||
self.iter.next().and_then(|a| (self.f)(&mut self.state, a))
|
||||
}
|
||||
@ -2448,6 +2459,8 @@ impl<I> Fuse<I> {
|
||||
/// previously returned `None`.
|
||||
#[inline]
|
||||
#[unstable(feature = "iter_reset_fuse", reason = "seems marginal")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "unusual for adaptors to have one-off methods")]
|
||||
pub fn reset_fuse(&mut self) {
|
||||
self.done = false
|
||||
}
|
||||
|
@ -131,6 +131,9 @@ pub unsafe fn read<T>(src: *const T) -> T {
|
||||
#[inline(always)]
|
||||
#[unstable(feature = "read_and_zero",
|
||||
reason = "may play a larger role in std::ptr future extensions")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "a \"zero value\" will soon not actually exist for all \
|
||||
types once dynamic drop has been implemented")]
|
||||
pub unsafe fn read_and_zero<T>(dest: *mut T) -> T {
|
||||
// Copy the data out from `dest`:
|
||||
let tmp = read(&*dest);
|
||||
|
@ -37,13 +37,11 @@
|
||||
#![feature(dynamic_lib)]
|
||||
#![feature(enumset)]
|
||||
#![feature(fs_canonicalize)]
|
||||
#![feature(hash_default)]
|
||||
#![feature(hashmap_hasher)]
|
||||
#![feature(into_cow)]
|
||||
#![feature(iter_cmp)]
|
||||
#![feature(iter_arith)]
|
||||
#![feature(libc)]
|
||||
#![feature(map_in_place)]
|
||||
#![feature(num_bits_bytes)]
|
||||
#![feature(path_ext)]
|
||||
#![feature(quote)]
|
||||
@ -55,8 +53,6 @@
|
||||
#![feature(slice_bytes)]
|
||||
#![feature(slice_splits)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(slice_position_elem)]
|
||||
#![feature(slice_concat_ext)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(str_char)]
|
||||
#![feature(str_match_indices)]
|
||||
|
@ -660,14 +660,14 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap,
|
||||
// `CodeMap::new_imported_filemap()` will then translate those
|
||||
// coordinates to their new global frame of reference when the
|
||||
// offset of the FileMap is known.
|
||||
let lines = lines.into_inner().map_in_place(|pos| pos - start_pos);
|
||||
let multibyte_chars = multibyte_chars
|
||||
.into_inner()
|
||||
.map_in_place(|mbc|
|
||||
codemap::MultiByteChar {
|
||||
pos: mbc.pos - start_pos,
|
||||
bytes: mbc.bytes
|
||||
});
|
||||
let mut lines = lines.into_inner();
|
||||
for pos in &mut lines {
|
||||
*pos = *pos - start_pos;
|
||||
}
|
||||
let mut multibyte_chars = multibyte_chars.into_inner();
|
||||
for mbc in &mut multibyte_chars {
|
||||
mbc.pos = mbc.pos - start_pos;
|
||||
}
|
||||
|
||||
let local_version = local_codemap.new_imported_filemap(name,
|
||||
source_length,
|
||||
|
@ -197,7 +197,9 @@ fn visit(cstore: &CStore, cnum: ast::CrateNum,
|
||||
}))
|
||||
.collect::<Vec<_>>();
|
||||
libs.sort_by(|&(a, _), &(b, _)| {
|
||||
ordering.position_elem(&a).cmp(&ordering.position_elem(&b))
|
||||
let a = ordering.iter().position(|x| *x == a);
|
||||
let b = ordering.iter().position(|x| *x == b);
|
||||
a.cmp(&b)
|
||||
});
|
||||
libs
|
||||
}
|
||||
|
@ -35,7 +35,7 @@
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{self, Hash, SipHasher};
|
||||
use std::hash::{Hash, SipHasher, Hasher};
|
||||
use std::io::prelude::*;
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
@ -89,9 +89,9 @@ pub fn maybe_find_item<'a>(item_id: ast::NodeId,
|
||||
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
|
||||
u32_from_be_bytes(bytes) == item_id
|
||||
}
|
||||
lookup_hash(items,
|
||||
|a| eq_item(a, item_id),
|
||||
hash::hash::<i64, SipHasher>(&(item_id as i64)))
|
||||
let mut s = SipHasher::new_with_keys(0, 0);
|
||||
(item_id as i64).hash(&mut s);
|
||||
lookup_hash(items, |a| eq_item(a, item_id), s.finish())
|
||||
}
|
||||
|
||||
fn find_item<'a>(item_id: ast::NodeId, items: rbml::Doc<'a>) -> rbml::Doc<'a> {
|
||||
|
@ -79,7 +79,6 @@
|
||||
use std::rc::Rc;
|
||||
use std::vec::IntoIter;
|
||||
use collections::enum_set::{self, EnumSet, CLike};
|
||||
use collections::slice::SliceConcatExt;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use syntax::abi;
|
||||
use syntax::ast::{CrateNum, DefId, ItemImpl, ItemTrait, LOCAL_CRATE};
|
||||
|
@ -124,6 +124,7 @@ fn GetProcessMemoryInfo(Process: HANDLE,
|
||||
}
|
||||
|
||||
#[cfg_attr(windows, allow(dead_code))]
|
||||
#[allow(deprecated)]
|
||||
fn get_proc_self_statm_field(field: usize) -> Option<usize> {
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
|
@ -279,7 +279,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
.expect("could not lift TraitRef for printing");
|
||||
let projections = tcx.lift(&bounds.projection_bounds[..])
|
||||
.expect("could not lift projections for printing");
|
||||
let projections = projections.map_in_place(|p| p.0);
|
||||
let projections = projections.into_iter().map(|p| p.0).collect();
|
||||
|
||||
let tap = ty::Binder(TraitAndProjections(principal, projections));
|
||||
in_binder(f, tcx, &ty::Binder(""), Some(tap))
|
||||
|
@ -34,7 +34,7 @@
|
||||
//! both occur before the crate is rendered.
|
||||
pub use self::ExternalLocation::*;
|
||||
|
||||
use std::ascii::OwnedAsciiExt;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
@ -2547,7 +2547,7 @@ fn get_index_search_type(item: &clean::Item,
|
||||
|
||||
// Consider `self` an argument as well.
|
||||
if let Some(name) = parent {
|
||||
inputs.push(Type { name: Some(name.into_ascii_lowercase()) });
|
||||
inputs.push(Type { name: Some(name.to_ascii_lowercase()) });
|
||||
}
|
||||
|
||||
inputs.extend(&mut decl.inputs.values.iter().map(|arg| {
|
||||
@ -2563,7 +2563,7 @@ fn get_index_search_type(item: &clean::Item,
|
||||
}
|
||||
|
||||
fn get_index_type(clean_type: &clean::Type) -> Type {
|
||||
Type { name: get_index_type_name(clean_type).map(|s| s.into_ascii_lowercase()) }
|
||||
Type { name: get_index_type_name(clean_type).map(|s| s.to_ascii_lowercase()) }
|
||||
}
|
||||
|
||||
fn get_index_type_name(clean_type: &clean::Type) -> Option<String> {
|
||||
|
@ -24,14 +24,12 @@
|
||||
#![feature(box_syntax)]
|
||||
#![feature(dynamic_lib)]
|
||||
#![feature(libc)]
|
||||
#![feature(owned_ascii_ext)]
|
||||
#![feature(path_ext)]
|
||||
#![feature(path_relative_from)]
|
||||
#![feature(rustc_private)]
|
||||
#![feature(set_stdio)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(subslice_offset)]
|
||||
#![feature(test)]
|
||||
#![feature(unicode)]
|
||||
#![feature(vec_push_all)]
|
||||
|
@ -34,7 +34,7 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
|
||||
// remove %<whitespace>
|
||||
metadata.push(line[1..].trim_left())
|
||||
} else {
|
||||
let line_start_byte = s.subslice_offset(line);
|
||||
let line_start_byte = s.find(line).unwrap();
|
||||
return (metadata, &s[line_start_byte..]);
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,9 @@
|
||||
/// Extension methods for ASCII-subset only operations on owned strings
|
||||
#[unstable(feature = "owned_ascii_ext",
|
||||
reason = "would prefer to do this in a more general way")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "hasn't yet proved essential to be in the standard library")]
|
||||
#[allow(deprecated)]
|
||||
pub trait OwnedAsciiExt {
|
||||
/// Converts the string to ASCII upper case:
|
||||
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
|
||||
@ -164,11 +167,13 @@ fn is_ascii(&self) -> bool {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(deprecated)]
|
||||
fn to_ascii_uppercase(&self) -> String {
|
||||
self.to_string().into_ascii_uppercase()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(deprecated)]
|
||||
fn to_ascii_lowercase(&self) -> String {
|
||||
self.to_string().into_ascii_lowercase()
|
||||
}
|
||||
@ -189,6 +194,7 @@ fn make_ascii_lowercase(&mut self) {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
impl OwnedAsciiExt for String {
|
||||
#[inline]
|
||||
fn into_ascii_uppercase(self) -> String {
|
||||
@ -212,11 +218,13 @@ fn is_ascii(&self) -> bool {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(deprecated)]
|
||||
fn to_ascii_uppercase(&self) -> Vec<u8> {
|
||||
self.to_vec().into_ascii_uppercase()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(deprecated)]
|
||||
fn to_ascii_lowercase(&self) -> Vec<u8> {
|
||||
self.to_vec().into_ascii_lowercase()
|
||||
}
|
||||
@ -242,6 +250,7 @@ fn make_ascii_lowercase(&mut self) {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
impl OwnedAsciiExt for Vec<u8> {
|
||||
#[inline]
|
||||
fn into_ascii_uppercase(mut self) -> Vec<u8> {
|
||||
|
@ -590,6 +590,8 @@ fn len(&self) -> usize { self.inner.len() }
|
||||
|
||||
/// Returns the page size of the current architecture in bytes.
|
||||
#[unstable(feature = "page_size", reason = "naming and/or location may change")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "hasn't seen enough usage to justify inclusion")]
|
||||
pub fn page_size() -> usize {
|
||||
os_imp::page_size()
|
||||
}
|
||||
|
@ -1225,6 +1225,9 @@ fn is_dir(&self) -> bool {
|
||||
reason = "the argument type of u64 is not quite appropriate for \
|
||||
this function and may change if the standard library \
|
||||
gains a type to represent a moment in time")]
|
||||
#[deprecated(since = "1.3.0",
|
||||
reason = "will never be stabilized as-is and its replacement will \
|
||||
likely have a totally new API")]
|
||||
pub fn set_file_times<P: AsRef<Path>>(path: P, accessed: u64,
|
||||
modified: u64) -> io::Result<()> {
|
||||
fs_imp::utimes(path.as_ref(), accessed, modified)
|
||||
|
@ -747,7 +747,7 @@ pub fn into_inner(self) -> Result<W, IntoInnerError<LineWriter<W>>> {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<W: Write> Write for LineWriter<W> {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
match buf.rposition_elem(&b'\n') {
|
||||
match buf.iter().rposition(|b| *b == b'\n') {
|
||||
Some(i) => {
|
||||
let n = try!(self.inner.write(&buf[..i + 1]));
|
||||
if n != i + 1 { return Ok(n) }
|
||||
|
@ -1105,7 +1105,7 @@ fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>)
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) => return Err(e)
|
||||
};
|
||||
match available.position_elem(&delim) {
|
||||
match available.iter().position(|x| *x == delim) {
|
||||
Some(i) => {
|
||||
buf.push_all(&available[..i + 1]);
|
||||
(true, i + 1)
|
||||
|
@ -232,7 +232,6 @@
|
||||
#![feature(linkage, thread_local, asm)]
|
||||
#![feature(macro_reexport)]
|
||||
#![feature(slice_concat_ext)]
|
||||
#![feature(slice_position_elem)]
|
||||
#![feature(no_std)]
|
||||
#![feature(oom)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
|
@ -200,7 +200,7 @@ pub fn parse_prefix<'a>(path: &'a OsStr) -> Option<Prefix> {
|
||||
return Some(VerbatimUNC(server, share));
|
||||
} else {
|
||||
// \\?\path
|
||||
let idx = path.position_elem(&b'\\');
|
||||
let idx = path.iter().position(|&b| b == b'\\');
|
||||
if idx == Some(2) && path[1] == b':' {
|
||||
let c = path[0];
|
||||
if c.is_ascii() && (c as char).is_alphabetic() {
|
||||
@ -214,7 +214,8 @@ pub fn parse_prefix<'a>(path: &'a OsStr) -> Option<Prefix> {
|
||||
} else if path.starts_with(b".\\") {
|
||||
// \\.\path
|
||||
path = &path[2..];
|
||||
let slice = &path[.. path.position_elem(&b'\\').unwrap_or(path.len())];
|
||||
let pos = path.iter().position(|&b| b == b'\\');
|
||||
let slice = &path[..pos.unwrap_or(path.len())];
|
||||
return Some(DeviceNS(u8_slice_as_os_str(slice)));
|
||||
}
|
||||
match parse_two_comps(path, is_sep_byte) {
|
||||
|
@ -56,7 +56,6 @@
|
||||
#![deny(missing_docs)]
|
||||
|
||||
#![feature(box_syntax)]
|
||||
#![feature(owned_ascii_ext)]
|
||||
#![feature(path_ext)]
|
||||
#![feature(rustc_private)]
|
||||
#![feature(staged_api)]
|
||||
|
@ -14,7 +14,7 @@
|
||||
use self::States::*;
|
||||
use self::FormatState::*;
|
||||
use self::FormatOp::*;
|
||||
use std::ascii::OwnedAsciiExt;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::mem::replace;
|
||||
use std::iter::repeat;
|
||||
|
||||
@ -532,7 +532,7 @@ fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8> ,String> {
|
||||
}
|
||||
}
|
||||
FormatHEX => {
|
||||
s = s.into_ascii_uppercase();
|
||||
s = s.to_ascii_uppercase();
|
||||
if flags.alternate {
|
||||
let s_ = replace(&mut s, vec!(b'0', b'X'));
|
||||
s.extend(s_);
|
||||
|
Loading…
Reference in New Issue
Block a user