auto merge of #19242 : jakub-/rust/roll-up, r=jakub-

This commit is contained in:
bors 2014-11-23 20:26:58 +00:00
commit 4e5259503c
56 changed files with 578 additions and 550 deletions

29
configure vendored
View File

@ -546,22 +546,26 @@ CFG_TARGET=$(to_llvm_triple $CFG_TARGET)
# there's no rpath. This is where the build system itself puts libraries;
# --libdir is used to configure the installation directory.
# FIXME: This needs to parameterized over target triples. Do it in platform.mk
CFG_LIBDIR_RELATIVE=lib
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ]
then
CFG_LIBDIR_RELATIVE=bin
CFG_LIBDIR="${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}"
else
valopt libdir "${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}" "install libraries (ignored on windows platform)"
CFG_LIBDIR_RELATIVE=lib
fi
case "$CFG_LIBDIR" in
"$CFG_PREFIX"/*) CAT_INC=2;;
"$CFG_PREFIX"*) CAT_INC=1;;
*)
err "libdir must begin with the prefix. Use --prefix to set it accordingly.";;
esac
valopt libdir "${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}" "install libraries (do not set it on windows platform)"
CFG_LIBDIR_RELATIVE=`echo ${CFG_LIBDIR} | cut -c$((${#CFG_PREFIX}+${CAT_INC}))-`
case "$CFG_LIBDIR" in
"$CFG_PREFIX"/*) CAT_INC=2;;
"$CFG_PREFIX"*) CAT_INC=1;;
*)
err "libdir must begin with the prefix. Use --prefix to set it accordingly.";;
esac
CFG_LIBDIR_RELATIVE=`echo ${CFG_LIBDIR} | cut -c$((${#CFG_PREFIX}+${CAT_INC}))-`
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] && [ "$CFG_LIBDIR_RELATIVE" != "bin" ]; then
err "libdir on windows should be set to 'bin'"
fi
if [ $HELP -eq 1 ]
@ -711,11 +715,6 @@ then
fi
step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV"
putvar CFG_LOCAL_RUST_ROOT
else
if [ ! -z "$CFG_LOCAL_RUST_ROOT" ]
then
warn "Use of --local-rust-root without --enable-local-rust"
fi
fi
# Force freebsd to build with clang; gcc doesn't like us there

View File

@ -190,11 +190,14 @@ endif
# Target-and-rule "utility variables"
######################################################################
define DEF_X
define DEF_FOR_TARGET
X_$(1) := $(CFG_EXE_SUFFIX_$(1))
ifndef CFG_LLVM_TARGET_$(1)
CFG_LLVM_TARGET_$(1) := $(1)
endif
endef
$(foreach target,$(CFG_TARGET), \
$(eval $(call DEF_X,$(target))))
$(eval $(call DEF_FOR_TARGET,$(target))))
# "Source" files we generate in builddir along the way.
GENERATED :=

View File

@ -75,7 +75,7 @@ $$(RT_OUTPUT_DIR_$(1))/%.o: $(S)src/rt/%.ll $$(MKFILE_DEPS) \
@mkdir -p $$(@D)
@$$(call E, compile: $$@)
$$(Q)$$(LLC_$$(CFG_BUILD)) $$(CFG_LLC_FLAGS_$(1)) \
-filetype=obj -mtriple=$(1) -relocation-model=pic -o $$@ $$<
-filetype=obj -mtriple=$$(CFG_LLVM_TARGET_$(1)) -relocation-model=pic -o $$@ $$<
$$(RT_OUTPUT_DIR_$(1))/%.o: $(S)src/rt/%.c $$(MKFILE_DEPS)
@mkdir -p $$(@D)

View File

@ -22,7 +22,7 @@ ifdef CFG_ENABLE_LOCAL_RUST
else
$(Q)$(CFG_PYTHON) $(S)src/etc/get-snapshot.py $(CFG_BUILD) $(SNAPSHOT_FILE)
endif
$(Q)touch $@
$(Q)if [ -e "$@" ]; then touch "$@"; else echo "ERROR: snapshot $@ not found"; exit 1; fi
# For other targets, let the host build the target:

View File

@ -116,7 +116,7 @@ $$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \
$$(foreach dep,$$(TOOL_DEPS_$(4)), \
$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \
$$(TSREQ$(1)_T_$(2)_H_$(3)) \
| $$(TBIN$(1)_T_$(4)_H_$(3))/
| $$(TBIN$(1)_T_$(2)_H_$(3))/
@$$(call E, rustc: $$@)
$$(STAGE$(1)_T_$(2)_H_$(3)) -o $$@ $$< --cfg $(4)

View File

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ascii::AsciiExt;
use std::io::{BufferedReader, File};
use regex::Regex;
@ -31,7 +32,7 @@ pub fn load_errors(re: &Regex, testfile: &Path) -> Vec<ExpectedError> {
fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> {
re.captures(line).and_then(|caps| {
let adjusts = caps.name("adjusts").len();
let kind = caps.name("kind").to_ascii().to_lowercase().into_string();
let kind = caps.name("kind").to_ascii_lower();
let msg = caps.name("msg").trim().to_string();
debug!("line={} kind={} msg={}", line_num, kind, msg);

View File

@ -7,7 +7,7 @@
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(not(stage0))]
use self::TargetLocation::*;
use common::Config;
@ -990,7 +990,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
let i = s.chars();
let c : Vec<char> = i.map( |c| {
if c.is_ascii() {
c.to_ascii().to_lowercase().to_char()
c.to_ascii().to_lowercase().as_char()
} else {
c
}
@ -1161,7 +1161,7 @@ fn compile_test_(config: &Config, props: &TestProps,
let args = make_compile_args(config,
props,
link_args,
|a, b| ThisFile(make_exe_name(a, b)), testfile);
|a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testfile);
compose_and_run_compiler(config, props, testfile, args, None)
}
@ -1219,7 +1219,7 @@ fn compose_and_run_compiler(
crate_type,
|a,b| {
let f = make_lib_name(a, b, testfile);
ThisDirectory(f.dir_path())
TargetLocation::ThisDirectory(f.dir_path())
},
&abs_ab);
let auxres = compose_and_run(config,
@ -1296,11 +1296,11 @@ fn make_compile_args(config: &Config,
args.push("prefer-dynamic".to_string());
}
let path = match xform_file {
ThisFile(path) => {
TargetLocation::ThisFile(path) => {
args.push("-o".to_string());
path
}
ThisDirectory(path) => {
TargetLocation::ThisDirectory(path) => {
args.push("--out-dir".to_string());
path
}
@ -1672,7 +1672,8 @@ fn compile_test_and_save_bitcode(config: &Config, props: &TestProps,
let args = make_compile_args(config,
props,
link_args,
|a, b| ThisDirectory(output_base_name(a, b).dir_path()),
|a, b| TargetLocation::ThisDirectory(
output_base_name(a, b).dir_path()),
testfile);
compose_and_run_compiler(config, props, testfile, args, None)
}

View File

@ -8,8 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![no_start]
#[cfg(rustdoc)]
extern crate "rustdoc" as this;

View File

@ -43,8 +43,6 @@ def print_struct_val(val, internal_dict):
return print_struct_val_starting_from(0, val, internal_dict)
def print_vec_slice_val(val, internal_dict):
output = "&["
length = val.GetChildAtIndex(1).GetValueAsUnsigned()
data_ptr_val = val.GetChildAtIndex(0)
@ -56,16 +54,12 @@ def print_vec_slice_val(val, internal_dict):
start_address = data_ptr_val.GetValueAsUnsigned()
for i in range(length):
def render_element(i):
address = start_address + i * element_type_size
element_val = val.CreateValueFromAddress( val.GetName() + ("[%s]" % i), address, element_type )
output += print_val(element_val, internal_dict)
element_val = val.CreateValueFromAddress( val.GetName() + ("[%s]" % i), address, element_type)
return print_val(element_val, internal_dict)
if i != length - 1:
output += ", "
output += "]"
return output
return "&[%s]" % (', '.join([render_element(i) for i in range(length)]))
def print_struct_val_starting_from(field_start_index, val, internal_dict):
'''
@ -77,39 +71,33 @@ def print_struct_val_starting_from(field_start_index, val, internal_dict):
t = val.GetType()
has_field_names = type_has_field_names(t)
type_name = extract_type_name(t.GetName())
output = ""
if not type_name.startswith("("):
# this is a tuple, so don't print the type name
output += type_name
if has_field_names:
output += " { \n"
template = "%(type_name)s {\n%(body)s\n}"
separator = ", \n"
else:
output += "("
template = "%(type_name)s(%(body)s)"
separator = ", "
if type_name.startswith("("):
# this is a tuple, so don't print the type name
type_name = ""
num_children = val.num_children
for child_index in range(field_start_index, num_children):
def render_child(child_index):
this = ""
if has_field_names:
field_name = t.GetFieldAtIndex(child_index).GetName()
output += field_name + ": "
this += field_name + ": "
field_val = val.GetChildAtIndex(child_index)
output += print_val(field_val, internal_dict)
return this + print_val(field_val, internal_dict)
if child_index != num_children - 1:
output += ", "
body = separator.join([render_child(idx) for idx in range(field_start_index, num_children)])
if has_field_names:
output += "\n"
if has_field_names:
output += "}"
else:
output += ")"
return output
return template % {"type_name": type_name,
"body": body}
def print_enum_val(val, internal_dict):
@ -243,3 +231,5 @@ def is_vec_slice(val):
type_name = extract_type_name(ty.GetName()).replace("&'static", "&").replace(" ", "")
return type_name.startswith("&[") and type_name.endswith("]")
# vi: sw=2:ts=2

View File

@ -119,6 +119,16 @@ impl<T> Arc<T> {
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(atomic::SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(atomic::SeqCst) }
#[unstable = "waiting on stability of Clone"]
impl<T> Clone for Arc<T> {
/// Duplicate an atomically reference counted wrapper.
@ -321,7 +331,7 @@ mod tests {
use std::sync::atomic;
use std::task;
use std::vec::Vec;
use super::{Arc, Weak};
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint);
@ -465,6 +475,49 @@ mod tests {
drop(arc_weak);
}
#[test]
fn test_strong_count() {
let a = Arc::new(0u32);
assert!(strong_count(&a) == 1);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
let b = w.upgrade().expect("");
assert!(strong_count(&b) == 2);
assert!(strong_count(&a) == 2);
drop(w);
drop(a);
assert!(strong_count(&b) == 1);
let c = b.clone();
assert!(strong_count(&b) == 2);
assert!(strong_count(&c) == 2);
}
#[test]
fn test_weak_count() {
let a = Arc::new(0u32);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 1);
let x = w.clone();
assert!(weak_count(&a) == 2);
drop(w);
drop(x);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let c = a.clone();
assert!(strong_count(&a) == 2);
assert!(weak_count(&a) == 0);
let d = c.downgrade();
assert!(weak_count(&c) == 1);
assert!(strong_count(&c) == 2);
drop(a);
drop(c);
drop(d);
}
#[test]
fn show_arc() {
let a = Arc::new(5u32);

View File

@ -15,7 +15,6 @@ use core::clone::Clone;
use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
use core::default::Default;
use core::fmt;
use core::intrinsics;
use core::kinds::Sized;
use core::mem;
use core::option::Option;
@ -104,17 +103,14 @@ pub trait BoxAny {
}
#[stable]
impl BoxAny for Box<Any+'static> {
impl BoxAny for Box<Any> {
#[inline]
fn downcast<T: 'static>(self) -> Result<Box<T>, Box<Any+'static>> {
fn downcast<T: 'static>(self) -> Result<Box<T>, Box<Any>> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject =
*mem::transmute::<&Box<Any>, &TraitObject>(&self);
// Prevent destructor on self being run
intrinsics::forget(self);
mem::transmute::<Box<Any>, TraitObject>(self);
// Extract the data pointer
Ok(mem::transmute(to.data))

View File

@ -213,6 +213,16 @@ impl<T> Rc<T> {
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Rc<T>) -> uint { this.weak() - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Rc<T>) -> uint { this.strong() }
/// Returns true if the `Rc` currently has unique ownership.
///
/// Unique ownership means that there are no other `Rc` or `Weak` values
@ -220,8 +230,7 @@ impl<T> Rc<T> {
#[inline]
#[experimental]
pub fn is_unique<T>(rc: &Rc<T>) -> bool {
// note that we hold both a strong and a weak reference
rc.strong() == 1 && rc.weak() == 1
weak_count(rc) == 0 && strong_count(rc) == 1
}
/// Unwraps the contained value if the `Rc` has unique ownership.
@ -489,7 +498,7 @@ impl<T> RcBoxPtr<T> for Weak<T> {
#[cfg(test)]
#[allow(experimental)]
mod tests {
use super::{Rc, Weak};
use super::{Rc, Weak, weak_count, strong_count};
use std::cell::RefCell;
use std::option::{Option, Some, None};
use std::result::{Err, Ok};
@ -566,6 +575,40 @@ mod tests {
assert!(super::is_unique(&x));
}
#[test]
fn test_strong_count() {
let a = Rc::new(0u32);
assert!(strong_count(&a) == 1);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
let b = w.upgrade().expect("upgrade of live rc failed");
assert!(strong_count(&b) == 2);
assert!(strong_count(&a) == 2);
drop(w);
drop(a);
assert!(strong_count(&b) == 1);
let c = b.clone();
assert!(strong_count(&b) == 2);
assert!(strong_count(&c) == 2);
}
#[test]
fn test_weak_count() {
let a = Rc::new(0u32);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 1);
drop(w);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let c = a.clone();
assert!(strong_count(&a) == 2);
assert!(weak_count(&a) == 0);
drop(c);
}
#[test]
fn try_unwrap() {
let x = Rc::new(3u);

View File

@ -68,15 +68,15 @@
//! // dist[node] = current shortest distance from `start` to `node`
//! let mut dist = Vec::from_elem(adj_list.len(), uint::MAX);
//!
//! let mut pq = BinaryHeap::new();
//! let mut heap = BinaryHeap::new();
//!
//! // We're at `start`, with a zero cost
//! dist[start] = 0u;
//! pq.push(State { cost: 0u, position: start });
//! heap.push(State { cost: 0u, position: start });
//!
//! // Examine the frontier with lower cost nodes first (min-heap)
//! loop {
//! let State { cost, position } = match pq.pop() {
//! let State { cost, position } = match heap.pop() {
//! None => break, // empty
//! Some(s) => s
//! };
@ -94,7 +94,7 @@
//!
//! // If so, add it to the frontier and continue
//! if next.cost < dist[next.position] {
//! pq.push(next);
//! heap.push(next);
//! // Relaxation, we have now found a better way
//! dist[next.position] = next.cost;
//! }
@ -184,7 +184,7 @@ impl<T: Ord> BinaryHeap<T> {
///
/// ```
/// use std::collections::BinaryHeap;
/// let pq: BinaryHeap<uint> = BinaryHeap::new();
/// let heap: BinaryHeap<uint> = BinaryHeap::new();
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn new() -> BinaryHeap<T> { BinaryHeap{data: vec!(),} }
@ -198,7 +198,7 @@ impl<T: Ord> BinaryHeap<T> {
///
/// ```
/// use std::collections::BinaryHeap;
/// let pq: BinaryHeap<uint> = BinaryHeap::with_capacity(10u);
/// let heap: BinaryHeap<uint> = BinaryHeap::with_capacity(10u);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn with_capacity(capacity: uint) -> BinaryHeap<T> {
@ -212,7 +212,7 @@ impl<T: Ord> BinaryHeap<T> {
///
/// ```
/// use std::collections::BinaryHeap;
/// let pq = BinaryHeap::from_vec(vec![9i, 1, 2, 7, 3, 2]);
/// let heap = BinaryHeap::from_vec(vec![9i, 1, 2, 7, 3, 2]);
/// ```
pub fn from_vec(xs: Vec<T>) -> BinaryHeap<T> {
let mut q = BinaryHeap{data: xs,};
@ -231,10 +231,10 @@ impl<T: Ord> BinaryHeap<T> {
///
/// ```
/// use std::collections::BinaryHeap;
/// let pq = BinaryHeap::from_vec(vec![1i, 2, 3, 4]);
/// let heap = BinaryHeap::from_vec(vec![1i, 2, 3, 4]);
///
/// // Print 1, 2, 3, 4 in arbitrary order
/// for x in pq.iter() {
/// for x in heap.iter() {
/// println!("{}", x);
/// }
/// ```
@ -250,13 +250,13 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::new();
/// assert_eq!(pq.top(), None);
/// let mut heap = BinaryHeap::new();
/// assert_eq!(heap.top(), None);
///
/// pq.push(1i);
/// pq.push(5i);
/// pq.push(2i);
/// assert_eq!(pq.top(), Some(&5i));
/// heap.push(1i);
/// heap.push(5i);
/// heap.push(2i);
/// assert_eq!(heap.top(), Some(&5i));
///
/// ```
pub fn top<'a>(&'a self) -> Option<&'a T> {
@ -270,8 +270,8 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let pq: BinaryHeap<uint> = BinaryHeap::with_capacity(100u);
/// assert!(pq.capacity() >= 100u);
/// let heap: BinaryHeap<uint> = BinaryHeap::with_capacity(100u);
/// assert!(heap.capacity() >= 100u);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn capacity(&self) -> uint { self.data.capacity() }
@ -292,9 +292,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq: BinaryHeap<uint> = BinaryHeap::new();
/// pq.reserve_exact(100u);
/// assert!(pq.capacity() >= 100u);
/// let mut heap: BinaryHeap<uint> = BinaryHeap::new();
/// heap.reserve_exact(100u);
/// assert!(heap.capacity() >= 100u);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn reserve_exact(&mut self, additional: uint) { self.data.reserve_exact(additional) }
@ -311,9 +311,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq: BinaryHeap<uint> = BinaryHeap::new();
/// pq.reserve(100u);
/// assert!(pq.capacity() >= 100u);
/// let mut heap: BinaryHeap<uint> = BinaryHeap::new();
/// heap.reserve(100u);
/// assert!(heap.capacity() >= 100u);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn reserve(&mut self, additional: uint) {
@ -334,11 +334,11 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::from_vec(vec![1i, 3]);
/// let mut heap = BinaryHeap::from_vec(vec![1i, 3]);
///
/// assert_eq!(pq.pop(), Some(3i));
/// assert_eq!(pq.pop(), Some(1i));
/// assert_eq!(pq.pop(), None);
/// assert_eq!(heap.pop(), Some(3i));
/// assert_eq!(heap.pop(), Some(1i));
/// assert_eq!(heap.pop(), None);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn pop(&mut self) -> Option<T> {
@ -361,13 +361,13 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::new();
/// pq.push(3i);
/// pq.push(5i);
/// pq.push(1i);
/// let mut heap = BinaryHeap::new();
/// heap.push(3i);
/// heap.push(5i);
/// heap.push(1i);
///
/// assert_eq!(pq.len(), 3);
/// assert_eq!(pq.top(), Some(&5i));
/// assert_eq!(heap.len(), 3);
/// assert_eq!(heap.top(), Some(&5i));
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn push(&mut self, item: T) {
@ -384,14 +384,14 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::new();
/// pq.push(1i);
/// pq.push(5i);
/// let mut heap = BinaryHeap::new();
/// heap.push(1i);
/// heap.push(5i);
///
/// assert_eq!(pq.push_pop(3i), 5);
/// assert_eq!(pq.push_pop(9i), 9);
/// assert_eq!(pq.len(), 2);
/// assert_eq!(pq.top(), Some(&3i));
/// assert_eq!(heap.push_pop(3i), 5);
/// assert_eq!(heap.push_pop(9i), 9);
/// assert_eq!(heap.len(), 2);
/// assert_eq!(heap.top(), Some(&3i));
/// ```
pub fn push_pop(&mut self, mut item: T) -> T {
if !self.is_empty() && *self.top().unwrap() > item {
@ -410,12 +410,12 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::new();
/// let mut heap = BinaryHeap::new();
///
/// assert_eq!(pq.replace(1i), None);
/// assert_eq!(pq.replace(3i), Some(1i));
/// assert_eq!(pq.len(), 1);
/// assert_eq!(pq.top(), Some(&3i));
/// assert_eq!(heap.replace(1i), None);
/// assert_eq!(heap.replace(3i), Some(1i));
/// assert_eq!(heap.len(), 1);
/// assert_eq!(heap.top(), Some(&3i));
/// ```
pub fn replace(&mut self, mut item: T) -> Option<T> {
if !self.is_empty() {
@ -436,8 +436,8 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let pq = BinaryHeap::from_vec(vec![1i, 2, 3, 4, 5, 6, 7]);
/// let vec = pq.into_vec();
/// let heap = BinaryHeap::from_vec(vec![1i, 2, 3, 4, 5, 6, 7]);
/// let vec = heap.into_vec();
///
/// // Will print in some order
/// for x in vec.iter() {
@ -454,11 +454,11 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
///
/// let mut pq = BinaryHeap::from_vec(vec![1i, 2, 4, 5, 7]);
/// pq.push(6);
/// pq.push(3);
/// let mut heap = BinaryHeap::from_vec(vec![1i, 2, 4, 5, 7]);
/// heap.push(6);
/// heap.push(3);
///
/// let vec = pq.into_sorted_vec();
/// let vec = heap.into_sorted_vec();
/// assert_eq!(vec, vec![1i, 2, 3, 4, 5, 6, 7]);
/// ```
pub fn into_sorted_vec(self) -> Vec<T> {
@ -578,9 +578,9 @@ mod tests {
fn test_iterator() {
let data = vec!(5i, 9, 3);
let iterout = [9i, 5, 3];
let pq = BinaryHeap::from_vec(data);
let heap = BinaryHeap::from_vec(data);
let mut i = 0;
for el in pq.iter() {
for el in heap.iter() {
assert_eq!(*el, iterout[i]);
i += 1;
}

View File

@ -251,7 +251,7 @@ impl Default for SipHasher {
/// Hashes a value using the SipHash algorithm.
#[inline]
pub fn hash<T: Hash<SipState>>(value: &T) -> u64 {
pub fn hash<Sized? T: Hash<SipState>>(value: &T) -> u64 {
let mut state = SipState::new();
value.hash(&mut state);
state.result()
@ -259,7 +259,7 @@ pub fn hash<T: Hash<SipState>>(value: &T) -> u64 {
/// Hashes a value with the SipHash algorithm with the provided keys.
#[inline]
pub fn hash_with_keys<T: Hash<SipState>>(k0: u64, k1: u64, value: &T) -> u64 {
pub fn hash_with_keys<Sized? T: Hash<SipState>>(k0: u64, k1: u64, value: &T) -> u64 {
let mut state = SipState::new_with_keys(k0, k1);
value.hash(&mut state);
state.result()

View File

@ -71,7 +71,7 @@
#![stable]
use mem::{transmute, transmute_copy};
use mem::{transmute};
use option::{Option, Some, None};
use raw::TraitObject;
use intrinsics::TypeId;
@ -134,7 +134,7 @@ impl<'a> AnyRefExt<'a> for &'a Any {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
let to: TraitObject = transmute(self);
// Extract the data pointer
Some(transmute(to.data))
@ -162,7 +162,7 @@ impl<'a> AnyMutRefExt<'a> for &'a mut Any {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
let to: TraitObject = transmute(self);
// Extract the data pointer
Some(transmute(to.data))

View File

@ -179,7 +179,7 @@ pub trait Octal for Sized? {
fn fmt(&self, &mut Formatter) -> Result;
}
/// Format trait for the `t` character
/// Format trait for the `b` character
#[unstable = "I/O and core have yet to be reconciled"]
pub trait Binary for Sized? {
/// Formats the value using the given formatter.

View File

@ -57,7 +57,7 @@
//!
//! Pattern matching on `Result`s is clear and straightforward for
//! simple cases, but `Result` comes with some convenience methods
//! that make working it more succinct.
//! that make working with it more succinct.
//!
//! ```
//! let good_result: Result<int, int> = Ok(10);

View File

@ -37,22 +37,18 @@ use util::ppaux::{ty_to_string};
use util::nodemap::{FnvHashMap, NodeSet};
use lint::{Context, LintPass, LintArray};
use std::cmp;
use std::{cmp, slice};
use std::collections::hash_map::{Occupied, Vacant};
use std::num::SignedInt;
use std::slice;
use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64};
use syntax::abi;
use syntax::ast_map;
use syntax::ast_util::is_shift_binop;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::{abi, ast, ast_map};
use syntax::ast_util::{mod, is_shift_binop};
use syntax::attr::{mod, AttrMetaMethods};
use syntax::codemap::{Span, DUMMY_SP};
use syntax::parse::token;
use syntax::{ast, ast_util, visit};
use syntax::ast::{TyI, TyU, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64};
use syntax::ptr::P;
use syntax::visit::Visitor;
use syntax::visit::{mod, Visitor};
declare_lint!(WHILE_TRUE, Warn,
"suggest using `loop { }` instead of `while true { }`")
@ -1112,8 +1108,8 @@ impl UnusedParens {
}
ast::ExprUnary(_, ref x) |
ast::ExprCast(ref x, _) |
ast::ExprField(ref x, _, _) |
ast::ExprTupField(ref x, _, _) |
ast::ExprField(ref x, _) |
ast::ExprTupField(ref x, _) |
ast::ExprIndex(ref x, _) => {
// &X { y: 1 }, X { y: 1 }.y
contains_exterior_struct_lit(&**x)

View File

@ -475,8 +475,8 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
ast::ExprCast(ref e, _) |
ast::ExprUnary(_, ref e) |
ast::ExprParen(ref e) |
ast::ExprField(ref e, _, _) |
ast::ExprTupField(ref e, _, _) => {
ast::ExprField(ref e, _) |
ast::ExprTupField(ref e, _) => {
self.straightline(expr, pred, Some(&**e).into_iter())
}

View File

@ -15,19 +15,16 @@ pub use self::const_val::*;
pub use self::constness::*;
use metadata::csearch;
use middle::astencode;
use middle::def;
use middle::{astencode, def};
use middle::pat_util::def_to_path;
use middle::ty::{mod, Ty};
use middle::typeck::astconv;
use middle::typeck::check;
use util::nodemap::{DefIdMap};
use middle::typeck::{astconv, check};
use util::nodemap::DefIdMap;
use syntax::ast::{mod, Expr};
use syntax::parse::token::InternedString;
use syntax::ptr::P;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::visit::{mod, Visitor};
use syntax::{ast_map, ast_util, codemap};
use std::rc::Rc;
@ -234,9 +231,9 @@ impl<'a, 'tcx> ConstEvalVisitor<'a, 'tcx> {
}
}
ast::ExprField(ref base, _, _) => self.classify(&**base),
ast::ExprField(ref base, _) => self.classify(&**base),
ast::ExprTupField(ref base, _, _) => self.classify(&**base),
ast::ExprTupField(ref base, _) => self.classify(&**base),
ast::ExprIndex(ref base, ref idx) =>
join(self.classify(&**base), self.classify(&**idx)),

View File

@ -12,20 +12,14 @@
// closely. The idea is that all reachable symbols are live, codes called
// from live codes are live, and everything else is dead.
use middle::def;
use middle::pat_util;
use middle::privacy;
use middle::ty;
use middle::typeck;
use middle::{def, pat_util, privacy, ty, typeck};
use lint;
use util::nodemap::NodeSet;
use std::collections::HashSet;
use syntax::ast;
use syntax::ast_map;
use syntax::{ast, ast_map, codemap};
use syntax::ast_util::{local_def, is_local, PostExpansionMethod};
use syntax::attr::{mod, AttrMetaMethods};
use syntax::codemap;
use syntax::visit::{mod, Visitor};
// Any local node that may call something in its body block should be
@ -277,10 +271,10 @@ impl<'a, 'tcx, 'v> Visitor<'v> for MarkSymbolVisitor<'a, 'tcx> {
ast::ExprMethodCall(..) => {
self.lookup_and_handle_method(expr.id, expr.span);
}
ast::ExprField(ref lhs, ref ident, _) => {
ast::ExprField(ref lhs, ref ident) => {
self.handle_field_access(&**lhs, &ident.node);
}
ast::ExprTupField(ref lhs, idx, _) => {
ast::ExprTupField(ref lhs, idx) => {
self.handle_tup_field_access(&**lhs, idx.node);
}
_ => ()

View File

@ -20,11 +20,9 @@ pub use self::ConsumeMode::*;
pub use self::MoveReason::*;
use self::OverloadedCallType::*;
use middle::{def, region, pat_util};
use middle::mem_categorization as mc;
use middle::def;
use middle::mem_categorization::Typer;
use middle::region;
use middle::pat_util;
use middle::ty::{mod, Ty};
use middle::typeck::{MethodCall, MethodObject, MethodTraitObject};
use middle::typeck::{MethodOrigin, MethodParam, MethodTypeParam};
@ -331,11 +329,11 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
}
}
ast::ExprField(ref base, _, _) => { // base.f
ast::ExprField(ref base, _) => { // base.f
self.select_from_expr(&**base);
}
ast::ExprTupField(ref base, _, _) => { // base.<n>
ast::ExprTupField(ref base, _) => { // base.<n>
self.select_from_expr(&**base);
}

View File

@ -113,24 +113,19 @@ use self::VarKind::*;
use middle::def::*;
use middle::mem_categorization::Typer;
use middle::pat_util;
use middle::typeck;
use middle::ty;
use middle::{pat_util, typeck, ty};
use lint;
use util::nodemap::NodeMap;
use std::fmt;
use std::io;
use std::{fmt, io, uint};
use std::rc::Rc;
use std::uint;
use syntax::ast::{mod, NodeId, Expr};
use syntax::codemap::{BytePos, original_sp, Span};
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::parse::token::{mod, special_idents};
use syntax::print::pprust::{expr_to_string, block_to_string};
use syntax::ptr::P;
use syntax::{visit, ast_util};
use syntax::visit::{Visitor, FnKind};
use syntax::ast_util;
use syntax::visit::{mod, Visitor, FnKind};
/// For use with `propagate_through_loop`.
enum LoopKind<'a> {
@ -967,11 +962,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
self.access_path(expr, succ, ACC_READ | ACC_USE)
}
ast::ExprField(ref e, _, _) => {
ast::ExprField(ref e, _) => {
self.propagate_through_expr(&**e, succ)
}
ast::ExprTupField(ref e, _, _) => {
ast::ExprTupField(ref e, _) => {
self.propagate_through_expr(&**e, succ)
}
@ -1295,8 +1290,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
match expr.node {
ast::ExprPath(_) => succ,
ast::ExprField(ref e, _, _) => self.propagate_through_expr(&**e, succ),
ast::ExprTupField(ref e, _, _) => self.propagate_through_expr(&**e, succ),
ast::ExprField(ref e, _) => self.propagate_through_expr(&**e, succ),
ast::ExprTupField(ref e, _) => self.propagate_through_expr(&**e, succ),
_ => self.propagate_through_expr(expr, succ)
}
}

View File

@ -477,7 +477,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
Ok(self.cat_deref(expr, base_cmt, 0, false))
}
ast::ExprField(ref base, f_name, _) => {
ast::ExprField(ref base, f_name) => {
let base_cmt = if_ok!(self.cat_expr(&**base));
debug!("cat_expr(cat_field): id={} expr={} base={}",
expr.id,
@ -486,7 +486,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
Ok(self.cat_field(expr, base_cmt, f_name.node.name, expr_ty))
}
ast::ExprTupField(ref base, idx, _) => {
ast::ExprTupField(ref base, idx) => {
let base_cmt = if_ok!(self.cat_expr(&**base));
Ok(self.cat_tup_field(expr, base_cmt, idx.node, expr_ty))
}

View File

@ -17,20 +17,17 @@ use self::FieldName::*;
use std::mem::replace;
use metadata::csearch;
use middle::def;
use middle::resolve;
use middle::{def, resolve};
use middle::ty::{mod, Ty};
use middle::typeck::{MethodCall, MethodMap, MethodOrigin, MethodParam, MethodTypeParam};
use middle::typeck::{MethodStatic, MethodStaticUnboxedClosure, MethodObject, MethodTraitObject};
use util::nodemap::{NodeMap, NodeSet};
use syntax::ast;
use syntax::ast_map;
use syntax::{ast, ast_map};
use syntax::ast_util::{is_local, local_def, PostExpansionMethod};
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::visit::{mod, Visitor};
type Context<'a, 'tcx> = (&'a MethodMap<'tcx>, &'a resolve::ExportMap2);
@ -836,20 +833,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &ast::Expr) {
match expr.node {
ast::ExprField(ref base, ident, _) => {
match ty::expr_ty_adjusted(self.tcx, &**base).sty {
ty::ty_struct(id, _) => {
self.check_field(expr.span, id, NamedField(ident.node));
}
_ => {}
ast::ExprField(ref base, ident) => {
if let ty::ty_struct(id, _) = ty::expr_ty_adjusted(self.tcx, &**base).sty {
self.check_field(expr.span, id, NamedField(ident.node));
}
}
ast::ExprTupField(ref base, idx, _) => {
match ty::expr_ty_adjusted(self.tcx, &**base).sty {
ty::ty_struct(id, _) => {
self.check_field(expr.span, id, UnnamedField(idx.node));
}
_ => {}
ast::ExprTupField(ref base, idx) => {
if let ty::ty_struct(id, _) = ty::expr_ty_adjusted(self.tcx, &**base).sty {
self.check_field(expr.span, id, UnnamedField(idx.node));
}
}
ast::ExprMethodCall(ident, _, _) => {

View File

@ -22,8 +22,7 @@ Most of the documentation on regions can be found in
use session::Session;
use middle::ty::{FreeRegion};
use middle::ty::{mod, Ty};
use middle::ty::{mod, Ty, FreeRegion};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use util::common::can_reach;
@ -33,7 +32,6 @@ use syntax::codemap::Span;
use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::{stmt_id};
use syntax::ptr::P;
use syntax::visit::{Visitor, FnKind};
/// CodeExtent represents a statically-describable extent that can be
@ -824,11 +822,10 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
match expr.node {
ast::ExprAddrOf(_, ref subexpr) |
ast::ExprUnary(ast::UnDeref, ref subexpr) |
ast::ExprField(ref subexpr, _, _) |
ast::ExprTupField(ref subexpr, _, _) |
ast::ExprField(ref subexpr, _) |
ast::ExprTupField(ref subexpr, _) |
ast::ExprIndex(ref subexpr, _) |
ast::ExprParen(ref subexpr) => {
let subexpr: &'a P<Expr> = subexpr; // FIXME(#11586)
expr = &**subexpr;
}
_ => {

View File

@ -71,17 +71,13 @@ use syntax::ast::{Variant, ViewItem, ViewItemExternCrate};
use syntax::ast::{ViewItemUse, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::ast::{Visibility};
use syntax::ast;
use syntax::ast_util::{PostExpansionMethod, local_def, walk_pat};
use syntax::ast_util;
use syntax::ast_util::{mod, PostExpansionMethod, local_def, walk_pat};
use syntax::attr::AttrMetaMethods;
use syntax::ext::mtwt;
use syntax::parse::token::special_names;
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::parse::token::{mod, special_names, special_idents};
use syntax::codemap::{Span, DUMMY_SP, Pos};
use syntax::owned_slice::OwnedSlice;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::visit::{mod, Visitor};
use std::collections::{HashMap, HashSet};
use std::collections::hash_map::{Occupied, Vacant};
@ -5959,7 +5955,7 @@ impl<'a> Resolver<'a> {
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) {
match expr.node {
ExprField(_, ident, _) => {
ExprField(_, ident) => {
// FIXME(#6890): Even though you can't treat a method like a
// field, we need to add any trait methods we find that match
// the field name so that we can do some nice error reporting

View File

@ -2638,11 +2638,6 @@ impl ops::Sub<TypeContents,TypeContents> for TypeContents {
}
impl fmt::Show for TypeContents {
#[cfg(stage0)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeContents({:t})", self.bits)
}
#[cfg(not(stage0))]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeContents({:b})", self.bits)
}

View File

@ -10,16 +10,13 @@
use super::probe;
use middle::subst;
use middle::subst::Subst;
use middle::subst::{mod, Subst};
use middle::traits;
use middle::ty::{mod, Ty};
use middle::typeck::check;
use middle::typeck::check::{FnCtxt, NoPreference, PreferMutLvalue};
use middle::typeck::check::{mod, FnCtxt, NoPreference, PreferMutLvalue};
use middle::typeck::{MethodCall, MethodCallee, MethodObject, MethodOrigin,
MethodParam, MethodStatic, MethodTraitObject, MethodTypeParam};
use middle::typeck::infer;
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::{mod, InferCtxt};
use middle::ty_fold::HigherRankedFoldable;
use syntax::ast;
use syntax::codemap::Span;
@ -510,8 +507,8 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> {
let last = exprs[exprs.len() - 1];
match last.node {
ast::ExprParen(ref expr) |
ast::ExprField(ref expr, _, _) |
ast::ExprTupField(ref expr, _, _) |
ast::ExprField(ref expr, _) |
ast::ExprTupField(ref expr, _) |
ast::ExprSlice(ref expr, _, _, _) |
ast::ExprIndex(ref expr, _) |
ast::ExprUnary(ast::UnDeref, ref expr) => exprs.push(&**expr),

View File

@ -83,62 +83,41 @@ use self::IsBinopAssignment::*;
use self::TupleArgumentsFlag::*;
use session::Session;
use middle::const_eval;
use middle::def;
use middle::{const_eval, def, traits};
use middle::lang_items::IteratorItem;
use middle::mem_categorization::McResult;
use middle::mem_categorization;
use middle::pat_util::pat_id_map;
use middle::pat_util;
use middle::mem_categorization::{mod, McResult};
use middle::pat_util::{mod, pat_id_map};
use middle::region::CodeExtent;
use middle::subst;
use middle::subst::{Subst, Substs, VecPerParamSpace, ParamSpace};
use middle::traits;
use middle::ty::{FnSig, VariantInfo};
use middle::ty::{Polytype};
use middle::subst::{mod, Subst, Substs, VecPerParamSpace, ParamSpace};
use middle::ty::{FnSig, VariantInfo, Polytype};
use middle::ty::{Disr, ParamTy, ParameterEnvironment};
use middle::ty::{mod, Ty};
use middle::ty::liberate_late_bound_regions;
use middle::ty_fold::TypeFolder;
use middle::typeck::astconv::AstConv;
use middle::typeck::astconv::{ast_region_to_region, ast_ty_to_ty};
use middle::typeck::astconv;
use middle::typeck::astconv::{mod, ast_region_to_region, ast_ty_to_ty, AstConv};
use middle::typeck::check::_match::pat_ctxt;
use middle::typeck::CrateCtxt;
use middle::typeck::infer;
use middle::typeck::rscope::RegionScope;
use middle::typeck::{lookup_def_ccx};
use middle::typeck::no_params;
use middle::typeck::{require_same_types};
use middle::typeck::{MethodCall, MethodCallee, MethodMap, ObjectCastMap};
use middle::typeck::{TypeAndSubsts};
use middle::typeck;
use middle::typeck::{mod, CrateCtxt, infer, lookup_def_ccx, no_params, require_same_types};
use middle::typeck::{MethodCall, MethodCallee, MethodMap, ObjectCastMap, TypeAndSubsts};
use middle::lang_items::TypeIdLangItem;
use lint;
use util::common::{block_query, indenter, loop_query};
use util::ppaux;
use util::ppaux::{UserString, Repr};
use util::ppaux::{mod, UserString, Repr};
use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
use std::cell::{Cell, Ref, RefCell};
use std::collections::hash_map::{Occupied, Vacant};
use std::mem::replace;
use std::rc::Rc;
use syntax::abi;
use syntax::ast::{ProvidedMethod, RequiredMethod, TypeTraitItem};
use syntax::ast;
use syntax::ast_util::{local_def, PostExpansionMethod};
use syntax::ast_util;
use syntax::attr;
use syntax::codemap::Span;
use syntax::codemap;
use syntax::{mod, abi, attr};
use syntax::ast::{mod, ProvidedMethod, RequiredMethod, TypeTraitItem};
use syntax::ast_util::{mod, local_def, PostExpansionMethod};
use syntax::codemap::{mod, Span};
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::visit;
use syntax::visit::Visitor;
use syntax;
use syntax::visit::{mod, Visitor};
pub mod _match;
pub mod vtable;
@ -4405,10 +4384,10 @@ fn check_expr_with_unifier<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
}
ast::ExprField(ref base, ref field, _) => {
ast::ExprField(ref base, ref field) => {
check_field(fcx, expr, lvalue_pref, &**base, field);
}
ast::ExprTupField(ref base, idx, _) => {
ast::ExprTupField(ref base, idx) => {
check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
}
ast::ExprIndex(ref base, ref idx) => {

View File

@ -278,8 +278,8 @@ mod svh_visitor {
ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, _, _) => SawExprAssignOp(op),
ExprField(_, id, _) => SawExprField(content(id.node)),
ExprTupField(_, id, _) => SawExprTupField(id.node),
ExprField(_, id) => SawExprField(content(id.node)),
ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex,
ExprSlice(..) => SawExprSlice,
ExprPath(..) => SawExprPath,

View File

@ -30,34 +30,26 @@
use driver::driver::CrateAnalysis;
use session::Session;
use middle::def;
use middle::{def, typeck};
use middle::ty::{mod, Ty};
use middle::typeck;
use std::cell::Cell;
use std::io;
use std::io::File;
use std::io::fs;
use std::io::{mod, File, fs};
use std::os;
use syntax::ast;
use syntax::ast_util;
use syntax::ast_util::PostExpansionMethod;
use syntax::ast::{NodeId,DefId};
use syntax::ast_util::{mod, PostExpansionMethod};
use syntax::ast::{mod, NodeId, DefId};
use syntax::ast_map::NodeItem;
use syntax::attr;
use syntax::codemap::*;
use syntax::parse::token;
use syntax::parse::token::{get_ident,keywords};
use syntax::parse::token::{mod, get_ident, keywords};
use syntax::owned_slice::OwnedSlice;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::visit::{mod, Visitor};
use syntax::print::pprust::{path_to_string,ty_to_string};
use syntax::ptr::P;
use self::span_utils::SpanUtils;
use self::recorder::Recorder;
use self::recorder::FmtStrs;
use self::recorder::{Recorder, FmtStrs};
use util::ppaux;
@ -568,13 +560,15 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
Some(node_id) => node_id,
None => -1,
};
let val = self.span.snippet(item.span);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Struct);
self.fmt.struct_str(item.span,
sub_span,
item.id,
ctor_id,
qualname.as_slice(),
self.cur_scope);
self.cur_scope,
val.as_slice());
// fields
for field in def.fields.iter() {
@ -589,21 +583,23 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
item: &ast::Item,
enum_definition: &ast::EnumDef,
ty_params: &ast::Generics) {
let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let enum_name = self.analysis.ty_cx.map.path_to_string(item.id);
let val = self.span.snippet(item.span);
match self.span.sub_span_after_keyword(item.span, keywords::Enum) {
Some(sub_span) => self.fmt.enum_str(item.span,
Some(sub_span),
item.id,
qualname.as_slice(),
self.cur_scope),
enum_name.as_slice(),
self.cur_scope,
val.as_slice()),
None => self.sess.span_bug(item.span,
format!("Could not find subspan for enum {}",
qualname).as_slice()),
enum_name).as_slice()),
}
for variant in enum_definition.variants.iter() {
let name = get_ident(variant.node.name);
let name = name.get();
let mut qualname = qualname.clone();
let mut qualname = enum_name.clone();
qualname.push_str("::");
qualname.push_str(name);
let val = self.span.snippet(variant.span);
@ -615,6 +611,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
variant.node.id,
name,
qualname.as_slice(),
enum_name.as_slice(),
val.as_slice(),
item.id);
for arg in args.iter() {
@ -632,18 +629,19 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
variant.node.id,
ctor_id,
qualname.as_slice(),
enum_name.as_slice(),
val.as_slice(),
item.id);
for field in struct_def.fields.iter() {
self.process_struct_field_def(field, qualname.as_slice(), variant.node.id);
self.process_struct_field_def(field, enum_name.as_slice(), variant.node.id);
self.visit_ty(&*field.node.ty);
}
}
}
}
self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
self.process_generic_params(ty_params, item.span, enum_name.as_slice(), item.id);
}
fn process_impl(&mut self,
@ -698,13 +696,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
trait_refs: &OwnedSlice<ast::TyParamBound>,
methods: &Vec<ast::TraitItem>) {
let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let val = self.span.snippet(item.span);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait);
self.fmt.trait_str(item.span,
sub_span,
item.id,
qualname.as_slice(),
self.cur_scope);
self.cur_scope,
val.as_slice());
// super-traits
for super_bound in trait_refs.iter() {
@ -1293,7 +1292,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
ast::ExprStruct(ref path, ref fields, ref base) =>
self.process_struct_lit(ex, path, fields, base),
ast::ExprMethodCall(_, _, ref args) => self.process_method_call(ex, args),
ast::ExprField(ref sub_ex, ident, _) => {
ast::ExprField(ref sub_ex, ident) => {
if generated_code(sub_ex.span) {
return
}
@ -1319,7 +1318,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
"Expected struct type, but not ty_struct"),
}
},
ast::ExprTupField(ref sub_ex, idx, _) => {
ast::ExprTupField(ref sub_ex, idx) => {
if generated_code(sub_ex.span) {
return
}

View File

@ -106,15 +106,19 @@ impl<'a> FmtStrs<'a> {
Variable => ("variable",
vec!("id","name","qualname","value","type","scopeid"),
true, true),
Enum => ("enum", vec!("id","qualname","scopeid"), true, true),
Variant => ("variant", vec!("id","name","qualname","value","scopeid"), true, true),
Enum => ("enum", vec!("id","qualname","scopeid","value"), true, true),
Variant => ("variant",
vec!("id","name","qualname","type","value","scopeid"),
true, true),
VariantStruct => ("variant_struct",
vec!("id","ctor_id","qualname","value","scopeid"), true, true),
Function => ("function", vec!("id","qualname","declid","declidcrate","scopeid"),
vec!("id","ctor_id","qualname","type","value","scopeid"),
true, true),
Function => ("function",
vec!("id","qualname","declid","declidcrate","scopeid"),
true, true),
MethodDecl => ("method_decl", vec!("id","qualname","scopeid"), true, true),
Struct => ("struct", vec!("id","ctor_id","qualname","scopeid"), true, true),
Trait => ("trait", vec!("id","qualname","scopeid"), true, true),
Struct => ("struct", vec!("id","ctor_id","qualname","scopeid","value"), true, true),
Trait => ("trait", vec!("id","qualname","scopeid","value"), true, true),
Impl => ("impl", vec!("id","refid","refidcrate","scopeid"), true, true),
Module => ("module", vec!("id","qualname","scopeid","def_file"), true, false),
UseAlias => ("use_alias",
@ -128,7 +132,7 @@ impl<'a> FmtStrs<'a> {
true, false),
MethodCall => ("method_call",
vec!("refid","refidcrate","declid","declidcrate","scopeid"),
true, true),
true, true),
Typedef => ("typedef", vec!("id","qualname","value"), true, true),
ExternalCrate => ("external_crate", vec!("name","crate","file_name"), false, false),
Crate => ("crate", vec!("name"), true, false),
@ -140,7 +144,7 @@ impl<'a> FmtStrs<'a> {
true, true),
StructRef => ("struct_ref",
vec!("refid","refidcrate","qualname","scopeid"),
true, true),
true, true),
FnRef => ("fn_ref", vec!("refid","refidcrate","qualname","scopeid"), true, true)
}
}
@ -157,6 +161,7 @@ impl<'a> FmtStrs<'a> {
}
let values = values.iter().map(|s| {
// Never take more than 1020 chars
if s.len() > 1020 {
s.as_slice().slice_to(1020)
} else {
@ -323,11 +328,12 @@ impl<'a> FmtStrs<'a> {
sub_span: Option<Span>,
id: NodeId,
name: &str,
scope_id: NodeId) {
scope_id: NodeId,
value: &str) {
self.check_and_record(Enum,
span,
sub_span,
svec!(id, name, scope_id));
svec!(id, name, scope_id, value));
}
pub fn tuple_variant_str(&mut self,
@ -336,12 +342,13 @@ impl<'a> FmtStrs<'a> {
id: NodeId,
name: &str,
qualname: &str,
typ: &str,
val: &str,
scope_id: NodeId) {
self.check_and_record(Variant,
span,
sub_span,
svec!(id, name, qualname, val, scope_id));
svec!(id, name, qualname, typ, val, scope_id));
}
pub fn struct_variant_str(&mut self,
@ -350,12 +357,13 @@ impl<'a> FmtStrs<'a> {
id: NodeId,
ctor_id: NodeId,
name: &str,
typ: &str,
val: &str,
scope_id: NodeId) {
self.check_and_record(VariantStruct,
span,
sub_span,
svec!(id, ctor_id, name, val, scope_id));
svec!(id, ctor_id, name, typ, val, scope_id));
}
pub fn fn_str(&mut self,
@ -405,11 +413,12 @@ impl<'a> FmtStrs<'a> {
id: NodeId,
ctor_id: NodeId,
name: &str,
scope_id: NodeId) {
scope_id: NodeId,
value: &str) {
self.check_and_record(Struct,
span,
sub_span,
svec!(id, ctor_id, name, scope_id));
svec!(id, ctor_id, name, scope_id, value));
}
pub fn trait_str(&mut self,
@ -417,11 +426,12 @@ impl<'a> FmtStrs<'a> {
sub_span: Option<Span>,
id: NodeId,
name: &str,
scope_id: NodeId) {
scope_id: NodeId,
value: &str) {
self.check_and_record(Trait,
span,
sub_span,
svec!(id, name, scope_id));
svec!(id, name, scope_id, value));
}
pub fn impl_str(&mut self,

View File

@ -13,22 +13,14 @@ use back::abi;
use llvm;
use llvm::{ConstFCmp, ConstICmp, SetLinkage, PrivateLinkage, ValueRef, Bool, True, False};
use llvm::{IntEQ, IntNE, IntUGT, IntUGE, IntULT, IntULE, IntSGT, IntSGE, IntSLT, IntSLE,
RealOEQ, RealOGT, RealOGE, RealOLT, RealOLE, RealONE};
RealOEQ, RealOGT, RealOGE, RealOLT, RealOLE, RealONE};
use metadata::csearch;
use middle::const_eval;
use middle::def;
use trans::adt;
use trans::base;
use trans::base::push_ctxt;
use trans::closure;
use middle::{const_eval, def};
use trans::{adt, closure, consts, debuginfo, expr, inline, machine};
use trans::base::{mod, push_ctxt};
use trans::common::*;
use trans::consts;
use trans::expr;
use trans::inline;
use trans::machine;
use trans::type_::Type;
use trans::type_of;
use trans::debuginfo;
use middle::ty::{mod, Ty};
use util::ppaux::{Repr, ty_to_string};
@ -418,7 +410,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
}
}
ast::ExprField(ref base, field, _) => {
ast::ExprField(ref base, field) => {
let (bv, bt) = const_expr(cx, &**base);
let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, field_tys| {
@ -426,7 +418,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
adt::const_get_field(cx, &*brepr, bv, discr, ix)
})
}
ast::ExprTupField(ref base, idx, _) => {
ast::ExprTupField(ref base, idx) => {
let (bv, bt) = const_expr(cx, &**base);
let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, _| {

View File

@ -197,13 +197,10 @@ use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::*;
use metadata::csearch;
use middle::subst::{mod, Subst, Substs};
use trans::adt;
use trans::{mod, adt, machine, type_of};
use trans::common::*;
use trans::machine;
use trans::_match::{BindingInfo, TrByCopy, TrByMove, TrByRef};
use trans::type_of;
use trans::type_::Type;
use trans;
use middle::ty::{mod, Ty};
use middle::pat_util;
use session::config::{mod, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
@ -219,8 +216,7 @@ use syntax::util::interner::Interner;
use syntax::codemap::{Span, Pos};
use syntax::{ast, codemap, ast_util, ast_map};
use syntax::ast_util::PostExpansionMethod;
use syntax::parse::token;
use syntax::parse::token::special_idents;
use syntax::parse::token::{mod, special_idents};
static DW_LANG_RUST: c_uint = 0x9000;
@ -3456,8 +3452,8 @@ fn populate_scope_map(cx: &CrateContext,
ast::ExprCast(ref sub_exp, _) |
ast::ExprAddrOf(_, ref sub_exp) |
ast::ExprField(ref sub_exp, _, _) |
ast::ExprTupField(ref sub_exp, _, _) |
ast::ExprField(ref sub_exp, _) |
ast::ExprTupField(ref sub_exp, _) |
ast::ExprParen(ref sub_exp) =>
walk_expr(cx, &**sub_exp, scope_stack, scope_map),

View File

@ -38,47 +38,26 @@ pub use self::Dest::*;
use self::lazy_binop_ty::*;
use back::abi;
use llvm;
use llvm::{ValueRef};
use llvm::{mod, ValueRef};
use middle::def;
use middle::mem_categorization::Typer;
use middle::subst;
use middle::subst::Subst;
use trans::_match;
use trans::adt;
use trans::asm;
use middle::subst::{mod, Subst};
use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
use trans::{debuginfo, glue, machine, meth, inline, tvec, type_of};
use trans::base::*;
use trans::base;
use trans::build::*;
use trans::callee;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::closure;
use trans::cleanup::{mod, CleanupMethods};
use trans::common::*;
use trans::consts;
use trans::controlflow;
use trans::datum::*;
use trans::debuginfo;
use trans::glue;
use trans::machine;
use trans::meth;
use trans::inline;
use trans::tvec;
use trans::type_of;
use middle::ty::{struct_fields, tup_fields};
use middle::ty::{AdjustDerefRef, AdjustAddEnv, AutoUnsafe};
use middle::ty::{AutoPtr};
use middle::ty::{mod, Ty};
use middle::typeck;
use middle::typeck::MethodCall;
use middle::ty::{mod, struct_fields, tup_fields};
use middle::ty::{AdjustDerefRef, AdjustAddEnv, AutoUnsafe, AutoPtr, Ty};
use middle::typeck::{mod, MethodCall};
use util::common::indenter;
use util::ppaux::Repr;
use trans::machine::{llsize_of, llsize_of_alloc};
use trans::type_::Type;
use syntax::ast;
use syntax::ast_util;
use syntax::codemap;
use syntax::{ast, ast_util, codemap};
use syntax::print::pprust::{expr_to_string};
use syntax::ptr::P;
use std::rc::Rc;
@ -599,10 +578,10 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
ast::ExprPath(_) => {
trans_def(bcx, expr, bcx.def(expr.id))
}
ast::ExprField(ref base, ident, _) => {
ast::ExprField(ref base, ident) => {
trans_rec_field(bcx, &**base, ident.node)
}
ast::ExprTupField(ref base, idx, _) => {
ast::ExprTupField(ref base, idx) => {
trans_rec_tup_field(bcx, &**base, idx.node)
}
ast::ExprIndex(ref base, ref idx) => {

View File

@ -1923,7 +1923,7 @@ impl Clean<ViewItemInner> for ast::ViewItem_ {
#[deriving(Clone, Encodable, Decodable)]
pub enum ViewPath {
// use str = source;
// use source as str;
SimpleImport(String, ImportSource),
// use source::*;
GlobImport(ImportSource),

View File

@ -18,7 +18,6 @@ use rustc_trans::back::link;
use syntax::{ast, ast_map, codemap, diagnostic};
use std::cell::RefCell;
use std::os;
use std::collections::{HashMap, HashSet};
use arena::TypedArena;
@ -89,7 +88,7 @@ pub fn run_core(libs: Vec<Path>, cfgs: Vec<String>, externs: Externs,
let warning_lint = lint::builtin::WARNINGS.name_lower();
let sessopts = config::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),
maybe_sysroot: None,
addl_lib_search_paths: RefCell::new(libs),
crate_types: vec!(config::CrateTypeRlib),
lint_opts: vec!((warning_lint, lint::Allow)),

View File

@ -28,6 +28,7 @@
#![allow(non_camel_case_types)]
use libc;
use std::ascii::AsciiExt;
use std::cell::{RefCell, Cell};
use std::fmt;
use std::slice;
@ -223,12 +224,8 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
};
// Transform the contents of the header into a hyphenated string
let id = s.as_slice().words().map(|s| {
match s.to_ascii_opt() {
Some(s) => s.to_lowercase().into_string(),
None => s.to_string()
}
}).collect::<Vec<String>>().connect("-");
let id = s.as_slice().words().map(|s| s.to_ascii_lower())
.collect::<Vec<String>>().connect("-");
// This is a terrible hack working around how hoedown gives us rendered
// html for text rather than the raw text.

View File

@ -1428,6 +1428,8 @@ impl<'a> fmt::Show for Item<'a> {
clean::TypedefItem(ref t) => item_typedef(fmt, self.item, t),
clean::MacroItem(ref m) => item_macro(fmt, self.item, m),
clean::PrimitiveItem(ref p) => item_primitive(fmt, self.item, p),
clean::StaticItem(ref i) => item_static(fmt, self.item, i),
clean::ConstantItem(ref c) => item_constant(fmt, self.item, c),
_ => Ok(())
}
}
@ -1453,13 +1455,6 @@ fn full_path(cx: &Context, item: &clean::Item) -> String {
return s
}
fn blank<'a>(s: Option<&'a str>) -> &'a str {
match s {
Some(s) => s,
None => ""
}
}
fn shorter<'a>(s: Option<&'a str>) -> &'a str {
match s {
Some(s) => match s.find_str("\n\n") {
@ -1570,66 +1565,18 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
id = short, name = name));
}
struct Initializer<'a>(&'a str, Item<'a>);
impl<'a> fmt::Show for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s, item) = *self;
if s.len() == 0 { return Ok(()); }
try!(write!(f, "<code> = </code>"));
if s.contains("\n") {
match item.href() {
Some(url) => {
write!(f, "<a href='{}'>[definition]</a>",
url)
}
None => Ok(()),
}
} else {
write!(f, "<code>{}</code>", s.as_slice())
}
}
}
match myitem.inner {
clean::StaticItem(ref s) | clean::ForeignStaticItem(ref s) => {
try!(write!(w, "
<tr>
<td>{}<code>{}static {}{}: {}</code>{}</td>
<td class='docblock'>{}&nbsp;</td>
</tr>
",
ConciseStability(&myitem.stability),
VisSpace(myitem.visibility),
MutableSpace(s.mutability),
*myitem.name.as_ref().unwrap(),
s.type_,
Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value()))));
}
clean::ConstantItem(ref s) => {
try!(write!(w, "
<tr>
<td>{}<code>{}const {}: {}</code>{}</td>
<td class='docblock'>{}&nbsp;</td>
</tr>
",
ConciseStability(&myitem.stability),
VisSpace(myitem.visibility),
*myitem.name.as_ref().unwrap(),
s.type_,
Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value()))));
}
clean::ViewItemItem(ref item) => {
match item.inner {
clean::ExternCrate(ref name, ref src, _) => {
try!(write!(w, "<tr><td><code>extern crate {}",
name.as_slice()));
match *src {
Some(ref src) => try!(write!(w, " = \"{}\"",
src.as_slice())),
None => {}
Some(ref src) =>
try!(write!(w, "<tr><td><code>extern crate \"{}\" as {}",
src.as_slice(),
name.as_slice())),
None =>
try!(write!(w, "<tr><td><code>extern crate {}",
name.as_slice())),
}
try!(write!(w, ";</code></td></tr>"));
}
@ -1665,6 +1612,39 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
write!(w, "</table>")
}
struct Initializer<'a>(&'a str);
impl<'a> fmt::Show for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s) = *self;
if s.len() == 0 { return Ok(()); }
try!(write!(f, "<code> = </code>"));
write!(f, "<code>{}</code>", s.as_slice())
}
}
fn item_constant(w: &mut fmt::Formatter, it: &clean::Item,
c: &clean::Constant) -> fmt::Result {
try!(write!(w, "<pre class='rust const'>{vis}const \
{name}: {typ}{init}</pre>",
vis = VisSpace(it.visibility),
name = it.name.as_ref().unwrap().as_slice(),
typ = c.type_,
init = Initializer(c.expr.as_slice())));
document(w, it)
}
fn item_static(w: &mut fmt::Formatter, it: &clean::Item,
s: &clean::Static) -> fmt::Result {
try!(write!(w, "<pre class='rust static'>{vis}static {mutability}\
{name}: {typ}{init}</pre>",
vis = VisSpace(it.visibility),
mutability = MutableSpace(s.mutability),
name = it.name.as_ref().unwrap().as_slice(),
typ = s.type_,
init = Initializer(s.expr.as_slice())));
document(w, it)
}
fn item_function(w: &mut fmt::Formatter, it: &clean::Item,
f: &clean::Function) -> fmt::Result {
try!(write!(w, "<pre class='rust fn'>{vis}{fn_style}fn \

View File

@ -234,6 +234,7 @@ nav.sub {
.content .highlighted.struct { background-color: #e7b1a0; }
.content .highlighted.fn { background-color: #c6afb3; }
.content .highlighted.method { background-color: #c6afb3; }
.content .highlighted.tymethod { background-color: #c6afb3; }
.content .highlighted.ffi { background-color: #c6afb3; }
.docblock.short.nowrap {
@ -348,6 +349,7 @@ p a:hover { text-decoration: underline; }
.content span.struct, .content a.struct, .block a.current.struct { color: #e53700; }
.content span.fn, .content a.fn, .block a.current.fn { color: #8c6067; }
.content span.method, .content a.method, .block a.current.method { color: #8c6067; }
.content span.tymethod, .content a.tymethod, .block a.current.tymethod { color: #8c6067; }
.content span.ffi, .content a.ffi, .block a.current.ffi { color: #8c6067; }
.content .fnname { color: #8c6067; }

View File

@ -12,7 +12,8 @@
//! Operations on ASCII strings and characters
#![experimental]
#![unstable = "unsure about placement and naming"]
#![allow(deprecated)]
use core::kinds::Sized;
use fmt;
@ -31,30 +32,47 @@ pub struct Ascii { chr: u8 }
impl Ascii {
/// Converts an ascii character into a `u8`.
#[inline]
pub fn to_byte(self) -> u8 {
#[unstable = "recently renamed"]
pub fn as_byte(&self) -> u8 {
self.chr
}
/// Deprecated: use `as_byte` isntead.
#[deprecated = "use as_byte"]
pub fn to_byte(self) -> u8 {
self.as_byte()
}
/// Converts an ascii character into a `char`.
#[inline]
pub fn to_char(self) -> char {
#[unstable = "recently renamed"]
pub fn as_char(&self) -> char {
self.chr as char
}
/// Deprecated: use `as_char` isntead.
#[deprecated = "use as_char"]
pub fn to_char(self) -> char {
self.as_char()
}
/// Convert to lowercase.
#[inline]
pub fn to_lowercase(self) -> Ascii {
#[stable]
pub fn to_lowercase(&self) -> Ascii {
Ascii{chr: ASCII_LOWER_MAP[self.chr as uint]}
}
/// Convert to uppercase.
#[inline]
pub fn to_uppercase(self) -> Ascii {
#[stable]
pub fn to_uppercase(&self) -> Ascii {
Ascii{chr: ASCII_UPPER_MAP[self.chr as uint]}
}
/// Compares two ascii characters of equality, ignoring case.
#[inline]
#[deprecated = "normalize with to_lowercase"]
pub fn eq_ignore_case(self, other: Ascii) -> bool {
ASCII_LOWER_MAP[self.chr as uint] == ASCII_LOWER_MAP[other.chr as uint]
}
@ -63,66 +81,77 @@ impl Ascii {
/// Check if the character is a letter (a-z, A-Z)
#[inline]
#[stable]
pub fn is_alphabetic(&self) -> bool {
(self.chr >= 0x41 && self.chr <= 0x5A) || (self.chr >= 0x61 && self.chr <= 0x7A)
}
/// Check if the character is a number (0-9)
#[inline]
#[unstable = "may be renamed"]
pub fn is_digit(&self) -> bool {
self.chr >= 0x30 && self.chr <= 0x39
}
/// Check if the character is a letter or number
#[inline]
#[stable]
pub fn is_alphanumeric(&self) -> bool {
self.is_alphabetic() || self.is_digit()
}
/// Check if the character is a space or horizontal tab
#[inline]
#[experimental = "likely to be removed"]
pub fn is_blank(&self) -> bool {
self.chr == b' ' || self.chr == b'\t'
}
/// Check if the character is a control character
#[inline]
#[stable]
pub fn is_control(&self) -> bool {
self.chr < 0x20 || self.chr == 0x7F
}
/// Checks if the character is printable (except space)
#[inline]
#[experimental = "unsure about naming, or whether this is needed"]
pub fn is_graph(&self) -> bool {
(self.chr - 0x21) < 0x5E
}
/// Checks if the character is printable (including space)
#[inline]
#[unstable = "unsure about naming"]
pub fn is_print(&self) -> bool {
(self.chr - 0x20) < 0x5F
}
/// Checks if the character is lowercase
/// Checks if the character is alphabetic and lowercase
#[inline]
#[stable]
pub fn is_lowercase(&self) -> bool {
(self.chr - b'a') < 26
}
/// Checks if the character is uppercase
/// Checks if the character is alphabetic and uppercase
#[inline]
#[stable]
pub fn is_uppercase(&self) -> bool {
(self.chr - b'A') < 26
}
/// Checks if the character is punctuation
#[inline]
#[stable]
pub fn is_punctuation(&self) -> bool {
self.is_graph() && !self.is_alphanumeric()
}
/// Checks if the character is a valid hex digit
#[inline]
#[stable]
pub fn is_hex(&self) -> bool {
self.is_digit() || ((self.chr | 32u8) - b'a') < 6
}
@ -135,6 +164,7 @@ impl<'a> fmt::Show for Ascii {
}
/// Trait for converting into an ascii type.
#[experimental = "may be replaced by generic conversion traits"]
pub trait AsciiCast<T> {
/// Convert to an ascii type, panic on non-ASCII input.
#[inline]
@ -160,6 +190,7 @@ pub trait AsciiCast<T> {
fn is_ascii(&self) -> bool;
}
#[experimental = "may be replaced by generic conversion traits"]
impl<'a> AsciiCast<&'a[Ascii]> for &'a [u8] {
#[inline]
unsafe fn to_ascii_nocheck(&self) -> &'a[Ascii] {
@ -175,6 +206,7 @@ impl<'a> AsciiCast<&'a[Ascii]> for &'a [u8] {
}
}
#[experimental = "may be replaced by generic conversion traits"]
impl<'a> AsciiCast<&'a [Ascii]> for &'a str {
#[inline]
unsafe fn to_ascii_nocheck(&self) -> &'a [Ascii] {
@ -187,6 +219,7 @@ impl<'a> AsciiCast<&'a [Ascii]> for &'a str {
}
}
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiCast<Ascii> for u8 {
#[inline]
unsafe fn to_ascii_nocheck(&self) -> Ascii {
@ -199,6 +232,7 @@ impl AsciiCast<Ascii> for u8 {
}
}
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiCast<Ascii> for char {
#[inline]
unsafe fn to_ascii_nocheck(&self) -> Ascii {
@ -212,6 +246,7 @@ impl AsciiCast<Ascii> for char {
}
/// Trait for copyless casting to an ascii vector.
#[experimental = "may be replaced by generic conversion traits"]
pub trait OwnedAsciiCast {
/// Check if convertible to ascii
fn is_ascii(&self) -> bool;
@ -241,6 +276,7 @@ pub trait OwnedAsciiCast {
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii>;
}
#[experimental = "may be replaced by generic conversion traits"]
impl OwnedAsciiCast for String {
#[inline]
fn is_ascii(&self) -> bool {
@ -253,6 +289,7 @@ impl OwnedAsciiCast for String {
}
}
#[experimental = "may be replaced by generic conversion traits"]
impl OwnedAsciiCast for Vec<u8> {
#[inline]
fn is_ascii(&self) -> bool {
@ -274,6 +311,7 @@ impl OwnedAsciiCast for Vec<u8> {
/// Trait for converting an ascii type to a string. Needed to convert
/// `&[Ascii]` to `&str`.
#[experimental = "may be replaced by generic conversion traits"]
pub trait AsciiStr for Sized? {
/// Convert to a string.
fn as_str_ascii<'a>(&'a self) -> &'a str;
@ -283,6 +321,7 @@ pub trait AsciiStr for Sized? {
fn to_lower(&self) -> Vec<Ascii>;
/// Convert to vector representing a lower cased ascii string.
#[deprecated = "use iterators instead"]
fn to_lowercase(&self) -> Vec<Ascii>;
/// Deprecated: use `to_uppercase`
@ -290,12 +329,15 @@ pub trait AsciiStr for Sized? {
fn to_upper(&self) -> Vec<Ascii>;
/// Convert to vector representing a upper cased ascii string.
#[deprecated = "use iterators instead"]
fn to_uppercase(&self) -> Vec<Ascii>;
/// Compares two Ascii strings ignoring case.
#[deprecated = "use iterators instead"]
fn eq_ignore_case(&self, other: &[Ascii]) -> bool;
}
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiStr for [Ascii] {
#[inline]
fn as_str_ascii<'a>(&'a self) -> &'a str {
@ -336,11 +378,13 @@ impl IntoString for Vec<Ascii> {
}
/// Trait to convert to an owned byte vector by consuming self
#[experimental = "may be replaced by generic conversion traits"]
pub trait IntoBytes {
/// Converts to an owned byte vector by consuming self
fn into_bytes(self) -> Vec<u8>;
}
#[experimental = "may be replaced by generic conversion traits"]
impl IntoBytes for Vec<Ascii> {
fn into_bytes(self) -> Vec<u8> {
unsafe {
@ -358,6 +402,7 @@ impl IntoBytes for Vec<Ascii> {
/// Extension methods for ASCII-subset only operations on owned strings
#[experimental = "would prefer to do this in a more general way"]
pub trait OwnedAsciiExt {
/// Convert the string to ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
@ -371,6 +416,7 @@ pub trait OwnedAsciiExt {
}
/// Extension methods for ASCII-subset only operations on string slices
#[experimental = "would prefer to do this in a more general way"]
pub trait AsciiExt<T> for Sized? {
/// Makes a copy of the string in ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
@ -388,6 +434,7 @@ pub trait AsciiExt<T> for Sized? {
fn eq_ignore_ascii_case(&self, other: &Self) -> bool;
}
#[experimental = "would prefer to do this in a more general way"]
impl AsciiExt<String> for str {
#[inline]
fn to_ascii_upper(&self) -> String {
@ -407,6 +454,7 @@ impl AsciiExt<String> for str {
}
}
#[experimental = "would prefer to do this in a more general way"]
impl OwnedAsciiExt for String {
#[inline]
fn into_ascii_upper(self) -> String {
@ -421,6 +469,7 @@ impl OwnedAsciiExt for String {
}
}
#[experimental = "would prefer to do this in a more general way"]
impl AsciiExt<Vec<u8>> for [u8] {
#[inline]
fn to_ascii_upper(&self) -> Vec<u8> {
@ -443,6 +492,7 @@ impl AsciiExt<Vec<u8>> for [u8] {
}
}
#[experimental = "would prefer to do this in a more general way"]
impl OwnedAsciiExt for Vec<u8> {
#[inline]
fn into_ascii_upper(mut self) -> Vec<u8> {
@ -472,6 +522,7 @@ impl OwnedAsciiExt for Vec<u8> {
/// - Any other chars in the range [0x20,0x7e] are not escaped.
/// - Any other chars are given hex escapes.
/// - Unicode escapes are never generated by this function.
#[unstable = "needs to be updated to use an iterator"]
pub fn escape_default(c: u8, f: |u8|) {
match c {
b'\t' => { f(b'\\'); f(b't'); }
@ -494,7 +545,7 @@ pub fn escape_default(c: u8, f: |u8|) {
}
}
pub static ASCII_LOWER_MAP: [u8, ..256] = [
static ASCII_LOWER_MAP: [u8, ..256] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
@ -533,7 +584,7 @@ pub static ASCII_LOWER_MAP: [u8, ..256] = [
0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
];
pub static ASCII_UPPER_MAP: [u8, ..256] = [
static ASCII_UPPER_MAP: [u8, ..256] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,

View File

@ -1110,7 +1110,7 @@ extern "system" {
///
/// The first element is traditionally the path to the executable, but it can be
/// set to arbitrary text, and it may not even exist, so this property should not
// be relied upon for security purposes.
/// be relied upon for security purposes.
///
/// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD.
/// See `String::from_utf8_lossy` for details.

View File

@ -235,10 +235,10 @@ impl GenericPathUnsafe for Path {
let repr = me.repr.as_slice();
match me.prefix {
Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().to_byte()
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte()
}
Some(VerbatimDiskPrefix) => {
repr.as_bytes()[4] == path.as_bytes()[0].to_ascii().to_uppercase().to_byte()
repr.as_bytes()[4] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte()
}
_ => false
}
@ -673,14 +673,17 @@ impl Path {
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
s_repr.as_bytes()[0].to_ascii().eq_ignore_case(o_repr.as_bytes()[4].to_ascii())
s_repr.as_bytes()[0].to_ascii().to_lowercase() ==
o_repr.as_bytes()[4].to_ascii().to_lowercase()
}
(Some(VerbatimDiskPrefix), Some(DiskPrefix)) => {
other.is_absolute() &&
s_repr.as_bytes()[4].to_ascii().eq_ignore_case(o_repr.as_bytes()[0].to_ascii())
s_repr.as_bytes()[4].to_ascii().to_lowercase() ==
o_repr.as_bytes()[0].to_ascii().to_lowercase()
}
(Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => {
s_repr.as_bytes()[4].to_ascii().eq_ignore_case(o_repr.as_bytes()[4].to_ascii())
s_repr.as_bytes()[4].to_ascii().to_lowercase() ==
o_repr.as_bytes()[4].to_ascii().to_lowercase()
}
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
s_repr.slice(2, self.prefix_len()) == o_repr.slice(8, other.prefix_len())
@ -747,10 +750,7 @@ impl Path {
let mut s = String::from_str(s.slice_to(len));
unsafe {
let v = s.as_mut_vec();
v[0] = (*v)[0]
.to_ascii()
.to_uppercase()
.to_byte();
v[0] = (*v)[0].to_ascii().to_uppercase().as_byte();
}
if is_abs {
// normalize C:/ to C:\
@ -765,7 +765,7 @@ impl Path {
let mut s = String::from_str(s.slice_to(len));
unsafe {
let v = s.as_mut_vec();
v[4] = (*v)[4].to_ascii().to_uppercase().to_byte();
v[4] = (*v)[4].to_ascii().to_uppercase().as_byte();
}
Some(s)
}
@ -787,13 +787,13 @@ impl Path {
match prefix {
Some(DiskPrefix) => {
s.push(prefix_.as_bytes()[0].to_ascii()
.to_uppercase().to_char());
.to_uppercase().as_char());
s.push(':');
}
Some(VerbatimDiskPrefix) => {
s.push_str(prefix_.slice_to(4));
s.push(prefix_.as_bytes()[4].to_ascii()
.to_uppercase().to_char());
.to_uppercase().as_char());
s.push_str(prefix_.slice_from(5));
}
Some(UNCPrefix(a,b)) => {

View File

@ -673,8 +673,8 @@ pub enum Expr_ {
ExprAssign(P<Expr>, P<Expr>),
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
ExprField(P<Expr>, SpannedIdent, Vec<P<Ty>>),
ExprTupField(P<Expr>, Spanned<uint>, Vec<P<Ty>>),
ExprField(P<Expr>, SpannedIdent),
ExprTupField(P<Expr>, Spanned<uint>),
ExprIndex(P<Expr>, P<Expr>),
ExprSlice(P<Expr>, Option<P<Expr>>, Option<P<Expr>>, Mutability),

View File

@ -577,7 +577,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
};
let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id, Vec::new()))
self.expr(sp, ast::ExprField(expr, id))
}
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
let field_span = Span {
@ -587,7 +587,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
};
let id = Spanned { node: idx, span: field_span };
self.expr(sp, ast::ExprTupField(expr, id, Vec::new()))
self.expr(sp, ast::ExprTupField(expr, id))
}
fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e))

View File

@ -1345,15 +1345,13 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span}: Expr, folder: &mut T) ->
folder.fold_expr(el),
folder.fold_expr(er))
}
ExprField(el, ident, tys) => {
ExprField(el, ident) => {
ExprField(folder.fold_expr(el),
respan(ident.span, folder.fold_ident(ident.node)),
tys.move_map(|x| folder.fold_ty(x)))
respan(ident.span, folder.fold_ident(ident.node)))
}
ExprTupField(el, ident, tys) => {
ExprTupField(el, ident) => {
ExprTupField(folder.fold_expr(el),
respan(ident.span, folder.fold_uint(ident.node)),
tys.move_map(|x| folder.fold_ty(x)))
respan(ident.span, folder.fold_uint(ident.node)))
}
ExprIndex(el, er) => {
ExprIndex(folder.fold_expr(el), folder.fold_expr(er))

View File

@ -49,8 +49,7 @@ use ast::{PolyTraitRef};
use ast::{QPath, RequiredMethod};
use ast::{Return, BiShl, BiShr, Stmt, StmtDecl};
use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub};
use ast::StrStyle;
use ast::{StructVariantKind, BiSub, StrStyle};
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
use ast::{TtDelimited, TtSequence, TtToken};
@ -65,23 +64,18 @@ use ast::{UnsafeFn, ViewItem, ViewItem_, ViewItemExternCrate, ViewItemUse};
use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple};
use ast::{Visibility, WhereClause, WherePredicate};
use ast;
use ast_util::{as_prec, ident_to_path, operator_prec};
use ast_util;
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
use codemap;
use ast_util::{mod, as_prec, ident_to_path, operator_prec};
use codemap::{mod, Span, BytePos, Spanned, spanned, mk_sp};
use diagnostic;
use ext::tt::macro_parser;
use parse;
use parse::attr::ParserAttr;
use parse::classify;
use parse::common::{SeqSep, seq_sep_none};
use parse::common::{seq_sep_trailing_allowed};
use parse::lexer::Reader;
use parse::lexer::TokenAndSpan;
use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed};
use parse::lexer::{Reader, TokenAndSpan};
use parse::obsolete::*;
use parse::token::{MatchNt, SubstNt, InternedString};
use parse::token::{mod, MatchNt, SubstNt, InternedString};
use parse::token::{keywords, special_idents};
use parse::token;
use parse::{new_sub_parser_from_file, ParseSess};
use print::pprust;
use ptr::P;
@ -89,7 +83,6 @@ use owned_slice::OwnedSlice;
use std::collections::HashSet;
use std::io::fs::PathExtensions;
use std::mem::replace;
use std::mem;
use std::num::Float;
use std::rc::Rc;
@ -915,7 +908,7 @@ impl<'a> Parser<'a> {
tok: token::Underscore,
sp: self.span,
};
replace(&mut self.buffer[buffer_start], placeholder)
mem::replace(&mut self.buffer[buffer_start], placeholder)
};
self.span = next.sp;
self.token = next.tok;
@ -924,7 +917,7 @@ impl<'a> Parser<'a> {
/// Advance the parser by one token and return the bumped token.
pub fn bump_and_get(&mut self) -> token::Token {
let old_token = replace(&mut self.token, token::Underscore);
let old_token = mem::replace(&mut self.token, token::Underscore);
self.bump();
old_token
}
@ -2103,14 +2096,12 @@ impl<'a> Parser<'a> {
ExprSlice(expr, start, end, mutbl)
}
pub fn mk_field(&mut self, expr: P<Expr>, ident: ast::SpannedIdent,
tys: Vec<P<Ty>>) -> ast::Expr_ {
ExprField(expr, ident, tys)
pub fn mk_field(&mut self, expr: P<Expr>, ident: ast::SpannedIdent) -> ast::Expr_ {
ExprField(expr, ident)
}
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>,
tys: Vec<P<Ty>>) -> ast::Expr_ {
ExprTupField(expr, idx, tys)
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
ExprTupField(expr, idx)
}
pub fn mk_assign_op(&mut self, binop: ast::BinOp,
@ -2465,31 +2456,26 @@ impl<'a> Parser<'a> {
}
let id = spanned(dot, hi, i);
let field = self.mk_field(e, id, tys);
let field = self.mk_field(e, id);
e = self.mk_expr(lo, hi, field);
}
}
}
token::Literal(token::Integer(n), suf) => {
let sp = self.span;
// A tuple index may not have a suffix
self.expect_no_suffix(sp, "tuple index", suf);
let index = n.as_str();
let dot = self.last_span.hi;
hi = self.span.hi;
self.bump();
let (_, tys) = if self.eat(&token::ModSep) {
self.expect_lt();
self.parse_generic_values_after_lt()
} else {
(Vec::new(), Vec::new())
};
let num = from_str::<uint>(index);
match num {
let index = from_str::<uint>(n.as_str());
match index {
Some(n) => {
let id = spanned(dot, hi, n);
let field = self.mk_tup_field(e, id, tys);
let field = self.mk_tup_field(e, id);
e = self.mk_expr(lo, hi, field);
}
None => {

View File

@ -11,31 +11,25 @@
pub use self::AnnNode::*;
use abi;
use ast::{FnUnboxedClosureKind, FnMutUnboxedClosureKind};
use ast::{mod, FnUnboxedClosureKind, FnMutUnboxedClosureKind};
use ast::{FnOnceUnboxedClosureKind};
use ast::{MethodImplItem, RegionTyParamBound, TraitTyParamBound};
use ast::{RequiredMethod, ProvidedMethod, TypeImplItem, TypeTraitItem};
use ast::{UnboxedClosureKind};
use ast;
use ast_util;
use owned_slice::OwnedSlice;
use attr::{AttrMetaMethods, AttributeMethods};
use codemap::{CodeMap, BytePos};
use codemap;
use codemap::{mod, CodeMap, BytePos};
use diagnostic;
use parse::token::{BinOpToken, Token};
use parse::token;
use parse::token::{mod, BinOpToken, Token};
use parse::lexer::comments;
use parse;
use print::pp::{break_offset, word, space, zerobreak, hardbreak};
use print::pp::{mod, break_offset, word, space, zerobreak, hardbreak};
use print::pp::{Breaks, Consistent, Inconsistent, eof};
use print::pp;
use ptr::P;
use std::ascii;
use std::io::IoResult;
use std::io;
use std::mem;
use std::{ascii, mem};
use std::io::{mod, IoResult};
pub enum AnnNode<'a> {
NodeIdent(&'a ast::Ident),
@ -1734,29 +1728,15 @@ impl<'a> State<'a> {
try!(self.word_space("="));
try!(self.print_expr(&**rhs));
}
ast::ExprField(ref expr, id, ref tys) => {
ast::ExprField(ref expr, id) => {
try!(self.print_expr(&**expr));
try!(word(&mut self.s, "."));
try!(self.print_ident(id.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ">"));
}
}
ast::ExprTupField(ref expr, id, ref tys) => {
ast::ExprTupField(ref expr, id) => {
try!(self.print_expr(&**expr));
try!(word(&mut self.s, "."));
try!(self.print_uint(id.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ">"));
}
}
ast::ExprIndex(ref expr, ref index) => {
try!(self.print_expr(&**expr));
@ -2164,21 +2144,22 @@ impl<'a> State<'a> {
try!(self.print_pat(&**p));
}
try!(space(&mut self.s));
match arm.guard {
Some(ref e) => {
try!(self.word_space("if"));
try!(self.print_expr(&**e));
try!(space(&mut self.s));
}
None => ()
if let Some(ref e) = arm.guard {
try!(self.word_space("if"));
try!(self.print_expr(&**e));
try!(space(&mut self.s));
}
try!(self.word_space("=>"));
match arm.body.node {
ast::ExprBlock(ref blk) => {
// the block will close the pattern's ibox
try!(self.print_block_unclosed_indent(&**blk,
indent_unit));
try!(self.print_block_unclosed_indent(&**blk, indent_unit));
// If it is a user-provided unsafe block, print a comma after it
if let ast::UnsafeBlock(ast::UserProvided) = blk.rules {
try!(word(&mut self.s, ","));
}
}
_ => {
try!(self.end()); // close the ibox for the pattern

View File

@ -838,17 +838,11 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(&**right_expression);
visitor.visit_expr(&**left_expression)
}
ExprField(ref subexpression, _, ref types) => {
ExprField(ref subexpression, _) => {
visitor.visit_expr(&**subexpression);
for typ in types.iter() {
visitor.visit_ty(&**typ)
}
}
ExprTupField(ref subexpression, _, ref types) => {
ExprTupField(ref subexpression, _) => {
visitor.visit_expr(&**subexpression);
for typ in types.iter() {
visitor.visit_ty(&**typ)
}
}
ExprIndex(ref main_expression, ref index_expression) => {
visitor.visit_expr(&**main_expression);

View File

@ -533,9 +533,8 @@ fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8> ,String> {
FormatHEX => {
s = s.as_slice()
.to_ascii()
.to_uppercase()
.into_bytes()
.into_iter()
.iter()
.map(|b| b.to_uppercase().as_byte())
.collect();
if flags.alternate {
let s_ = replace(&mut s, vec!(b'0', b'X'));

View File

@ -1,3 +1,12 @@
S 2014-11-21 c9f6d69
freebsd-x86_64 0ef316e7c369177de043e69e964418bd637cbfc0
linux-i386 c8342e762a1720be939ed7c6a39bdaa27892f66f
linux-x86_64 7a7fe6f5ed47b9cc66261f880e166c7c8738b73e
macos-i386 63e8644512bd5665c14389a83d5af564c7c0b103
macos-x86_64 7933ae0e974d1b897806138b7052cb2b4514585f
winnt-i386 94f5e2974e6120945c909753010d73b53cd6ff90
winnt-x86_64 905ffbdd94580854b01dc4e27fdad7e7c8ae18fe
S 2014-11-18 9c96a79
freebsd-x86_64 22c93a289bdbc886af882b5bb76bfa673d46aa4f
linux-i386 999ba4a0dfb70adca628138a7d5f491023621140

View File

@ -0,0 +1,16 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(tuple_indexing)]
fn main() {
let t = (42i, 42i);
t.0::<int>; //~ ERROR expected one of `;`, `}`, found `::`
}

View File

@ -30,7 +30,6 @@ pub trait PartialEq for Sized? {
fn eq(&self, other: &Self) -> bool;
}
#[cfg(not(stage0))] // NOTE(stage0): remove cfg after a snapshot
#[unstable = "Trait is unstable."]
impl<'a, Sized? T: PartialEq> PartialEq for &'a T {
#[inline]

View File

@ -0,0 +1,20 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// Testing that unsafe blocks in match arms are followed by a comma
// pp-exact
fn main() {
match true {
true if true => (),
false if false => unsafe { },
true => { }
false => (),
}
}

View File

@ -8,6 +8,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty
//
// exec-env:RUST_MIN_STACK=16000000
//
// Big stack is needed for pretty printing, a little sad...