auto merge of #13877 : thestinger/rust/de-tilde-str-vec, r=alexcrichton
This commit is contained in:
commit
9f836d5a53
@ -170,7 +170,7 @@ fn parse_compile_flags(line: &str) -> Option<~str> {
|
||||
}
|
||||
|
||||
fn parse_run_flags(line: &str) -> Option<~str> {
|
||||
parse_name_value_directive(line, ~"run-flags")
|
||||
parse_name_value_directive(line, "run-flags".to_owned())
|
||||
}
|
||||
|
||||
fn parse_debugger_cmd(line: &str) -> Option<~str> {
|
||||
|
@ -698,13 +698,13 @@ static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
|
||||
/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot.");
|
||||
///
|
||||
/// // check for a specific one.
|
||||
/// if !book_reviews.contains_key(& &"Les Misérables") {
|
||||
/// if !book_reviews.contains_key(&("Les Misérables")) {
|
||||
/// println!("We've got {} reviews, but Les Misérables ain't one.",
|
||||
/// book_reviews.len());
|
||||
/// }
|
||||
///
|
||||
/// // oops, this review has a lot of spelling mistakes, let's delete it.
|
||||
/// book_reviews.remove(& &"The Adventures of Sherlock Holmes");
|
||||
/// book_reviews.remove(&("The Adventures of Sherlock Holmes"));
|
||||
///
|
||||
/// // look up the values associated with some keys.
|
||||
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
|
||||
|
@ -1651,10 +1651,10 @@ mod test_set {
|
||||
|
||||
// FIXME: #5801: this needs a type hint to compile...
|
||||
let result: Option<(&uint, & &'static str)> = z.next();
|
||||
assert_eq!(result.unwrap(), (&5u, & &"bar"));
|
||||
assert_eq!(result.unwrap(), (&5u, &("bar")));
|
||||
|
||||
let result: Option<(&uint, & &'static str)> = z.next();
|
||||
assert_eq!(result.unwrap(), (&11u, & &"foo"));
|
||||
assert_eq!(result.unwrap(), (&11u, &("foo")));
|
||||
|
||||
let result: Option<(&uint, & &'static str)> = z.next();
|
||||
assert!(result.is_none());
|
||||
|
@ -1441,7 +1441,7 @@ mod tests {
|
||||
optmulti("l", "", "Desc", "VAL"));
|
||||
|
||||
let expected =
|
||||
~"Usage: fruits
|
||||
"Usage: fruits
|
||||
|
||||
Options:
|
||||
-b --banana VAL Desc
|
||||
@ -1450,7 +1450,7 @@ Options:
|
||||
-k --kiwi Desc
|
||||
-p [VAL] Desc
|
||||
-l VAL Desc
|
||||
";
|
||||
".to_owned();
|
||||
|
||||
let generated_usage = usage("Usage: fruits", optgroups.as_slice());
|
||||
|
||||
@ -1471,13 +1471,13 @@ Options:
|
||||
"This is a long description which _will_ be wrapped..+.."));
|
||||
|
||||
let expected =
|
||||
~"Usage: fruits
|
||||
"Usage: fruits
|
||||
|
||||
Options:
|
||||
-k --kiwi This is a long description which won't be wrapped..+..
|
||||
-a --apple This is a long description which _will_ be
|
||||
wrapped..+..
|
||||
";
|
||||
".to_owned();
|
||||
|
||||
let usage = usage("Usage: fruits", optgroups.as_slice());
|
||||
|
||||
@ -1496,14 +1496,14 @@ Options:
|
||||
confuse the line wrapping; an apple costs 0.51€ in some parts of Europe."));
|
||||
|
||||
let expected =
|
||||
~"Usage: fruits
|
||||
"Usage: fruits
|
||||
|
||||
Options:
|
||||
-k --k–w– The word kiwi is normally spelled with two i's
|
||||
-a --apple This “description” has some characters that could
|
||||
confuse the line wrapping; an apple costs 0.51€ in
|
||||
some parts of Europe.
|
||||
";
|
||||
".to_owned();
|
||||
|
||||
let usage = usage("Usage: fruits", optgroups.as_slice());
|
||||
|
||||
|
@ -220,7 +220,9 @@ impl<'a> Parser<'a> {
|
||||
try!(self.parse_group_opts())
|
||||
} else {
|
||||
self.caps += 1;
|
||||
self.stack.push(Paren(self.flags, self.caps, ~""))
|
||||
self.stack.push(Paren(self.flags,
|
||||
self.caps,
|
||||
"".to_owned()))
|
||||
}
|
||||
}
|
||||
')' => {
|
||||
@ -769,7 +771,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
if self.cur() == ':' {
|
||||
// Save the old flags with the opening paren.
|
||||
self.stack.push(Paren(self.flags, 0, ~""));
|
||||
self.stack.push(Paren(self.flags, 0, "".to_owned()));
|
||||
}
|
||||
self.flags = flags;
|
||||
return Ok(())
|
||||
|
@ -116,7 +116,7 @@ impl<'a> NfaGen<'a> {
|
||||
|cx, name| match name {
|
||||
&Some(ref name) => {
|
||||
let name = name.as_slice();
|
||||
quote_expr!(cx, Some(~$name))
|
||||
quote_expr!(cx, Some($name.to_owned()))
|
||||
}
|
||||
&None => quote_expr!(cx, None),
|
||||
}
|
||||
@ -306,7 +306,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str,
|
||||
}
|
||||
|
||||
::regex::Regex {
|
||||
original: ~$regex,
|
||||
original: $regex.to_owned(),
|
||||
names: vec!$cap_names,
|
||||
p: ::regex::native::Native(exec),
|
||||
}
|
||||
|
@ -1059,16 +1059,16 @@ pub fn build_session_(sopts: session::Options,
|
||||
|
||||
pub fn parse_pretty(sess: &Session, name: &str) -> PpMode {
|
||||
match name {
|
||||
&"normal" => PpmNormal,
|
||||
&"expanded" => PpmExpanded,
|
||||
&"typed" => PpmTyped,
|
||||
&"expanded,identified" => PpmExpandedIdentified,
|
||||
&"identified" => PpmIdentified,
|
||||
_ => {
|
||||
sess.fatal("argument to `pretty` must be one of `normal`, \
|
||||
`expanded`, `typed`, `identified`, \
|
||||
or `expanded,identified`");
|
||||
}
|
||||
"normal" => PpmNormal,
|
||||
"expanded" => PpmExpanded,
|
||||
"typed" => PpmTyped,
|
||||
"expanded,identified" => PpmExpandedIdentified,
|
||||
"identified" => PpmIdentified,
|
||||
_ => {
|
||||
sess.fatal("argument to `pretty` must be one of `normal`, \
|
||||
`expanded`, `typed`, `identified`, \
|
||||
or `expanded,identified`");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,7 +300,7 @@ pub fn run_compiler(args: &[~str]) {
|
||||
None::<d::PpMode> => {/* continue */ }
|
||||
}
|
||||
|
||||
if r.contains(&~"ls") {
|
||||
if r.contains(&("ls".to_owned())) {
|
||||
match input {
|
||||
d::FileInput(ref ifile) => {
|
||||
let mut stdout = io::stdout();
|
||||
|
@ -1036,7 +1036,7 @@ fn check_crate_attrs_usage(cx: &Context, attrs: &[ast::Attribute]) {
|
||||
if !iter.any(|other_attr| { name.equiv(other_attr) }) {
|
||||
cx.span_lint(AttributeUsage, attr.span, "unknown crate attribute");
|
||||
}
|
||||
if name.equiv(& &"link") {
|
||||
if name.equiv(&("link")) {
|
||||
cx.tcx.sess.span_err(attr.span,
|
||||
"obsolete crate `link` attribute");
|
||||
cx.tcx.sess.note("the link attribute has been superceded by the crate_id \
|
||||
|
@ -189,9 +189,9 @@ impl<'a, 'b> Reflector<'a, 'b> {
|
||||
ty::ty_rptr(_, ref mt) => {
|
||||
match ty::get(mt.ty).sty {
|
||||
ty::ty_vec(ref mt, None) => {
|
||||
let (name, extra) = (~"slice", Vec::new());
|
||||
let (name, extra) = ("slice".to_owned(), Vec::new());
|
||||
let extra = extra.append(self.c_mt(mt).as_slice());
|
||||
self.visit(~"evec_" + name, extra.as_slice())
|
||||
self.visit("evec_".to_owned() + name, extra.as_slice())
|
||||
}
|
||||
ty::ty_str => self.visit("estr_slice".to_owned(), &[]),
|
||||
_ => {
|
||||
|
@ -647,8 +647,8 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
|
||||
ty::ty_vec(mt, None) => {
|
||||
fcx.type_error_message(pat.span,
|
||||
|_| {
|
||||
~"unique vector patterns are no \
|
||||
longer supported"
|
||||
"unique vector patterns are no \
|
||||
longer supported".to_owned()
|
||||
},
|
||||
expected,
|
||||
None);
|
||||
|
@ -2564,70 +2564,74 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
||||
let tcx = fcx.ccx.tcx;
|
||||
let id = expr.id;
|
||||
match expr.node {
|
||||
ast::ExprVstore(ev, vst) => {
|
||||
let typ = match ev.node {
|
||||
ast::ExprLit(lit) if ast_util::lit_is_str(lit) => {
|
||||
ast_expr_vstore_to_ty(fcx, ev, vst, || ty::mt{ ty: ty::mk_str(tcx),
|
||||
mutbl: ast::MutImmutable })
|
||||
}
|
||||
ast::ExprVec(ref args) => {
|
||||
let mutability = match vst {
|
||||
ast::ExprVstoreMutSlice => ast::MutMutable,
|
||||
_ => ast::MutImmutable,
|
||||
};
|
||||
let mut any_error = false;
|
||||
let mut any_bot = false;
|
||||
let t: ty::t = fcx.infcx().next_ty_var();
|
||||
for e in args.iter() {
|
||||
check_expr_has_type(fcx, *e, t);
|
||||
let arg_t = fcx.expr_ty(*e);
|
||||
if ty::type_is_error(arg_t) {
|
||||
any_error = true;
|
||||
ast::ExprVstore(ev, vst) => {
|
||||
let typ = match ev.node {
|
||||
ast::ExprVec(ref args) => {
|
||||
let mutability = match vst {
|
||||
ast::ExprVstoreMutSlice => ast::MutMutable,
|
||||
_ => ast::MutImmutable,
|
||||
};
|
||||
let mut any_error = false;
|
||||
let mut any_bot = false;
|
||||
let t: ty::t = fcx.infcx().next_ty_var();
|
||||
for e in args.iter() {
|
||||
check_expr_has_type(fcx, *e, t);
|
||||
let arg_t = fcx.expr_ty(*e);
|
||||
if ty::type_is_error(arg_t) {
|
||||
any_error = true;
|
||||
}
|
||||
else if ty::type_is_bot(arg_t) {
|
||||
any_bot = true;
|
||||
}
|
||||
}
|
||||
if any_error {
|
||||
ty::mk_err()
|
||||
} else if any_bot {
|
||||
ty::mk_bot()
|
||||
} else {
|
||||
ast_expr_vstore_to_ty(fcx, ev, vst, ||
|
||||
ty::mt{ ty: ty::mk_vec(tcx,
|
||||
ty::mt {ty: t, mutbl: mutability},
|
||||
None),
|
||||
mutbl: mutability })
|
||||
}
|
||||
}
|
||||
else if ty::type_is_bot(arg_t) {
|
||||
any_bot = true;
|
||||
ast::ExprRepeat(element, count_expr) => {
|
||||
check_expr_with_hint(fcx, count_expr, ty::mk_uint());
|
||||
let _ = ty::eval_repeat_count(fcx, count_expr);
|
||||
let mutability = match vst {
|
||||
ast::ExprVstoreMutSlice => ast::MutMutable,
|
||||
_ => ast::MutImmutable,
|
||||
};
|
||||
let t = fcx.infcx().next_ty_var();
|
||||
check_expr_has_type(fcx, element, t);
|
||||
let arg_t = fcx.expr_ty(element);
|
||||
if ty::type_is_error(arg_t) {
|
||||
ty::mk_err()
|
||||
} else if ty::type_is_bot(arg_t) {
|
||||
ty::mk_bot()
|
||||
} else {
|
||||
ast_expr_vstore_to_ty(fcx, ev, vst, ||
|
||||
ty::mt{ ty: ty::mk_vec(tcx,
|
||||
ty::mt {ty: t, mutbl: mutability},
|
||||
None),
|
||||
mutbl: mutability})
|
||||
}
|
||||
}
|
||||
}
|
||||
if any_error {
|
||||
ty::mk_err()
|
||||
} else if any_bot {
|
||||
ty::mk_bot()
|
||||
} else {
|
||||
ast_expr_vstore_to_ty(fcx, ev, vst, ||
|
||||
ty::mt{ ty: ty::mk_vec(tcx,
|
||||
ty::mt {ty: t, mutbl: mutability},
|
||||
None),
|
||||
mutbl: mutability })
|
||||
}
|
||||
}
|
||||
ast::ExprRepeat(element, count_expr) => {
|
||||
check_expr_with_hint(fcx, count_expr, ty::mk_uint());
|
||||
let _ = ty::eval_repeat_count(fcx, count_expr);
|
||||
let mutability = match vst {
|
||||
ast::ExprVstoreMutSlice => ast::MutMutable,
|
||||
_ => ast::MutImmutable,
|
||||
ast::ExprLit(_) => {
|
||||
let error = if vst == ast::ExprVstoreSlice {
|
||||
"`&\"string\"` has been removed; use `\"string\"` instead"
|
||||
} else {
|
||||
"`~\"string\"` has been removed; use `\"string\".to_owned()` instead"
|
||||
};
|
||||
tcx.sess.span_err(expr.span, error);
|
||||
ty::mk_err()
|
||||
}
|
||||
_ => tcx.sess.span_bug(expr.span, "vstore modifier on non-sequence"),
|
||||
};
|
||||
let t = fcx.infcx().next_ty_var();
|
||||
check_expr_has_type(fcx, element, t);
|
||||
let arg_t = fcx.expr_ty(element);
|
||||
if ty::type_is_error(arg_t) {
|
||||
ty::mk_err()
|
||||
} else if ty::type_is_bot(arg_t) {
|
||||
ty::mk_bot()
|
||||
} else {
|
||||
ast_expr_vstore_to_ty(fcx, ev, vst, ||
|
||||
ty::mt{ ty: ty::mk_vec(tcx,
|
||||
ty::mt {ty: t, mutbl: mutability},
|
||||
None),
|
||||
mutbl: mutability})
|
||||
}
|
||||
}
|
||||
_ =>
|
||||
tcx.sess.span_bug(expr.span, "vstore modifier on non-sequence")
|
||||
};
|
||||
fcx.write_ty(ev.id, typ);
|
||||
fcx.write_ty(id, typ);
|
||||
}
|
||||
fcx.write_ty(ev.id, typ);
|
||||
fcx.write_ty(id, typ);
|
||||
}
|
||||
|
||||
ast::ExprBox(place, subexpr) => {
|
||||
check_expr(fcx, place);
|
||||
|
@ -1740,7 +1740,7 @@ impl<T: Iterator<char>> Builder<T> {
|
||||
Some(NumberValue(n)) => { Ok(Number(n)) }
|
||||
Some(BooleanValue(b)) => { Ok(Boolean(b)) }
|
||||
Some(StringValue(ref mut s)) => {
|
||||
let mut temp = ~"";
|
||||
let mut temp = "".to_owned();
|
||||
swap(s, &mut temp);
|
||||
Ok(String(temp))
|
||||
}
|
||||
@ -2633,16 +2633,16 @@ mod tests {
|
||||
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
|
||||
assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
|
||||
|
||||
assert_eq!(from_str("\"\""), Ok(String(~"")));
|
||||
assert_eq!(from_str("\"foo\""), Ok(String(~"foo")));
|
||||
assert_eq!(from_str("\"\\\"\""), Ok(String(~"\"")));
|
||||
assert_eq!(from_str("\"\\b\""), Ok(String(~"\x08")));
|
||||
assert_eq!(from_str("\"\\n\""), Ok(String(~"\n")));
|
||||
assert_eq!(from_str("\"\\r\""), Ok(String(~"\r")));
|
||||
assert_eq!(from_str("\"\\t\""), Ok(String(~"\t")));
|
||||
assert_eq!(from_str(" \"foo\" "), Ok(String(~"foo")));
|
||||
assert_eq!(from_str("\"\\u12ab\""), Ok(String(~"\u12ab")));
|
||||
assert_eq!(from_str("\"\\uAB12\""), Ok(String(~"\uAB12")));
|
||||
assert_eq!(from_str("\"\""), Ok(String("".to_owned())));
|
||||
assert_eq!(from_str("\"foo\""), Ok(String("foo".to_owned())));
|
||||
assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_owned())));
|
||||
assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_owned())));
|
||||
assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_owned())));
|
||||
assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_owned())));
|
||||
assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_owned())));
|
||||
assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_owned())));
|
||||
assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u12ab".to_owned())));
|
||||
assert_eq!(from_str("\"\\uAB12\""), Ok(String("\uAB12".to_owned())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2890,7 +2890,7 @@ mod tests {
|
||||
fn test_find(){
|
||||
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
|
||||
let found_str = json_value.find(&"dog".to_owned());
|
||||
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == &"cat");
|
||||
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cat");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2898,7 +2898,7 @@ mod tests {
|
||||
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
|
||||
let found_str = json_value.find_path(&[&"dog".to_owned(),
|
||||
&"cat".to_owned(), &"mouse".to_owned()]);
|
||||
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == &"cheese");
|
||||
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cheese");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2906,7 +2906,7 @@ mod tests {
|
||||
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
|
||||
let found_str = json_value.search(&"mouse".to_owned()).and_then(|j| j.as_string());
|
||||
assert!(found_str.is_some());
|
||||
assert!(found_str.unwrap() == &"cheese");
|
||||
assert!(found_str.unwrap() == "cheese");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2946,7 +2946,7 @@ mod tests {
|
||||
fn test_as_string(){
|
||||
let json_value = from_str("\"dog\"").unwrap();
|
||||
let json_str = json_value.as_string();
|
||||
let expected_str = &"dog";
|
||||
let expected_str = "dog";
|
||||
assert_eq!(json_str, Some(expected_str));
|
||||
}
|
||||
|
||||
@ -3067,7 +3067,7 @@ mod tests {
|
||||
r#"{ "foo":"bar", "array" : [0, 1, 2,3 ,4,5], "idents":[null,true,false]}"#,
|
||||
~[
|
||||
(ObjectStart, ~[]),
|
||||
(StringValue(~"bar"), ~[Key("foo")]),
|
||||
(StringValue("bar".to_owned()), ~[Key("foo")]),
|
||||
(ListStart, ~[Key("array")]),
|
||||
(NumberValue(0.0), ~[Key("array"), Index(0)]),
|
||||
(NumberValue(1.0), ~[Key("array"), Index(1)]),
|
||||
@ -3155,7 +3155,7 @@ mod tests {
|
||||
(NumberValue(1.0), ~[Key("a")]),
|
||||
(ListStart, ~[Key("b")]),
|
||||
(BooleanValue(true), ~[Key("b"), Index(0)]),
|
||||
(StringValue(~"foo\nbar"), ~[Key("b"), Index(1)]),
|
||||
(StringValue("foo\nbar".to_owned()), ~[Key("b"), Index(1)]),
|
||||
(ObjectStart, ~[Key("b"), Index(2)]),
|
||||
(ObjectStart, ~[Key("b"), Index(2), Key("c")]),
|
||||
(NullValue, ~[Key("b"), Index(2), Key("c"), Key("d")]),
|
||||
@ -3287,7 +3287,7 @@ mod tests {
|
||||
assert!(stack.last_is_index());
|
||||
assert!(stack.get(0) == Index(1));
|
||||
|
||||
stack.push_key(~"foo");
|
||||
stack.push_key("foo".to_owned());
|
||||
|
||||
assert!(stack.len() == 2);
|
||||
assert!(stack.is_equal_to([Index(1), Key("foo")]));
|
||||
@ -3299,7 +3299,7 @@ mod tests {
|
||||
assert!(stack.get(0) == Index(1));
|
||||
assert!(stack.get(1) == Key("foo"));
|
||||
|
||||
stack.push_key(~"bar");
|
||||
stack.push_key("bar".to_owned());
|
||||
|
||||
assert!(stack.len() == 3);
|
||||
assert!(stack.is_equal_to([Index(1), Key("foo"), Key("bar")]));
|
||||
@ -3363,7 +3363,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn big_json() -> ~str {
|
||||
let mut src = ~"[\n";
|
||||
let mut src = "[\n".to_owned();
|
||||
for _ in range(0, 500) {
|
||||
src = src + r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": [1,2,3]},"#;
|
||||
}
|
||||
|
@ -62,7 +62,7 @@
|
||||
//! let mut flags = FlagA | FlagB;
|
||||
//! flags.clear();
|
||||
//! assert!(flags.is_empty());
|
||||
//! assert_eq!(format!("{}", flags), ~"hi!");
|
||||
//! assert_eq!(format!("{}", flags).as_slice(), "hi!");
|
||||
//! }
|
||||
//! ~~~
|
||||
//!
|
||||
|
@ -343,7 +343,7 @@ mod tests {
|
||||
|
||||
assert_eq!(hasher.hash(&'a'), 97);
|
||||
|
||||
assert_eq!(hasher.hash(& &"a"), 97 + 0xFF);
|
||||
assert_eq!(hasher.hash(&("a")), 97 + 0xFF);
|
||||
assert_eq!(hasher.hash(& &[1u8, 2u8, 3u8]), 9);
|
||||
|
||||
unsafe {
|
||||
|
@ -555,7 +555,7 @@ mod tests {
|
||||
($path:expr, $disp:ident, $exp:expr) => (
|
||||
{
|
||||
let path = Path::new($path);
|
||||
assert!(path.$disp().to_str() == ~$exp);
|
||||
assert!(path.$disp().to_str().as_slice() == $exp);
|
||||
}
|
||||
)
|
||||
)
|
||||
|
@ -637,7 +637,7 @@ fn test_repr() {
|
||||
exact_test(&true, "true");
|
||||
exact_test(&false, "false");
|
||||
exact_test(&1.234, "1.234f64");
|
||||
exact_test(&(&"hello"), "\"hello\"");
|
||||
exact_test(&("hello"), "\"hello\"");
|
||||
// FIXME What do I do about this one?
|
||||
exact_test(&("he\u10f3llo".to_owned()), "~\"he\\u10f3llo\"");
|
||||
|
||||
|
@ -2025,12 +2025,12 @@ pub trait StrSlice<'a> {
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// let s = ~"Do you know the muffin man,
|
||||
/// The muffin man, the muffin man, ...";
|
||||
/// let s = "Do you know the muffin man,
|
||||
/// The muffin man, the muffin man, ...".to_owned();
|
||||
///
|
||||
/// assert_eq!(s.replace("muffin man", "little lamb"),
|
||||
/// ~"Do you know the little lamb,
|
||||
/// The little lamb, the little lamb, ...");
|
||||
/// "Do you know the little lamb,
|
||||
/// The little lamb, the little lamb, ...".to_owned());
|
||||
///
|
||||
/// // not found, so no change.
|
||||
/// assert_eq!(s.replace("cookie monster", "little lamb"), s);
|
||||
@ -3604,11 +3604,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_total_ord() {
|
||||
"1234".cmp(& &"123") == Greater;
|
||||
"123".cmp(& &"1234") == Less;
|
||||
"1234".cmp(& &"1234") == Equal;
|
||||
"12345555".cmp(& &"123456") == Less;
|
||||
"22".cmp(& &"1234") == Greater;
|
||||
"1234".cmp(&("123")) == Greater;
|
||||
"123".cmp(&("1234")) == Less;
|
||||
"1234".cmp(&("1234")) == Equal;
|
||||
"12345555".cmp(&("123456")) == Less;
|
||||
"22".cmp(&("1234")) == Greater;
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -4005,7 +4005,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_from_str() {
|
||||
let owned: Option<~str> = from_str(&"string");
|
||||
let owned: Option<~str> = from_str("string");
|
||||
assert_eq!(owned, Some("string".to_owned()));
|
||||
}
|
||||
|
||||
|
@ -242,7 +242,7 @@ impl<T: Clone> Vec<T> {
|
||||
///
|
||||
/// ```rust
|
||||
/// let mut vec = vec!("hello");
|
||||
/// vec.grow(2, & &"world");
|
||||
/// vec.grow(2, &("world"));
|
||||
/// assert_eq!(vec, vec!("hello", "world", "world"));
|
||||
/// ```
|
||||
pub fn grow(&mut self, n: uint, value: &T) {
|
||||
@ -267,8 +267,8 @@ impl<T: Clone> Vec<T> {
|
||||
///
|
||||
/// ```rust
|
||||
/// let mut vec = vec!("a", "b", "c");
|
||||
/// vec.grow_set(1, & &"fill", "d");
|
||||
/// vec.grow_set(4, & &"fill", "e");
|
||||
/// vec.grow_set(1, &("fill"), "d");
|
||||
/// vec.grow_set(4, &("fill"), "e");
|
||||
/// assert_eq!(vec, vec!("a", "d", "c", "fill", "e"));
|
||||
/// ```
|
||||
pub fn grow_set(&mut self, index: uint, initval: &T, value: T) {
|
||||
|
@ -270,22 +270,22 @@ pub fn syntax_expander_table() -> SyntaxEnv {
|
||||
}
|
||||
|
||||
let mut syntax_expanders = SyntaxEnv::new();
|
||||
syntax_expanders.insert(intern(&"macro_rules"),
|
||||
syntax_expanders.insert(intern("macro_rules"),
|
||||
IdentTT(~BasicIdentMacroExpander {
|
||||
expander: ext::tt::macro_rules::add_new_extension,
|
||||
span: None,
|
||||
},
|
||||
None));
|
||||
syntax_expanders.insert(intern(&"fmt"),
|
||||
syntax_expanders.insert(intern("fmt"),
|
||||
builtin_normal_expander(
|
||||
ext::fmt::expand_syntax_ext));
|
||||
syntax_expanders.insert(intern(&"format_args"),
|
||||
syntax_expanders.insert(intern("format_args"),
|
||||
builtin_normal_expander(
|
||||
ext::format::expand_args));
|
||||
syntax_expanders.insert(intern(&"env"),
|
||||
syntax_expanders.insert(intern("env"),
|
||||
builtin_normal_expander(
|
||||
ext::env::expand_env));
|
||||
syntax_expanders.insert(intern(&"option_env"),
|
||||
syntax_expanders.insert(intern("option_env"),
|
||||
builtin_normal_expander(
|
||||
ext::env::expand_option_env));
|
||||
syntax_expanders.insert(intern("bytes"),
|
||||
@ -297,63 +297,63 @@ pub fn syntax_expander_table() -> SyntaxEnv {
|
||||
syntax_expanders.insert(intern("concat"),
|
||||
builtin_normal_expander(
|
||||
ext::concat::expand_syntax_ext));
|
||||
syntax_expanders.insert(intern(&"log_syntax"),
|
||||
syntax_expanders.insert(intern("log_syntax"),
|
||||
builtin_normal_expander(
|
||||
ext::log_syntax::expand_syntax_ext));
|
||||
syntax_expanders.insert(intern(&"deriving"),
|
||||
syntax_expanders.insert(intern("deriving"),
|
||||
ItemDecorator(ext::deriving::expand_meta_deriving));
|
||||
|
||||
// Quasi-quoting expanders
|
||||
syntax_expanders.insert(intern(&"quote_tokens"),
|
||||
syntax_expanders.insert(intern("quote_tokens"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_tokens));
|
||||
syntax_expanders.insert(intern(&"quote_expr"),
|
||||
syntax_expanders.insert(intern("quote_expr"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_expr));
|
||||
syntax_expanders.insert(intern(&"quote_ty"),
|
||||
syntax_expanders.insert(intern("quote_ty"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_ty));
|
||||
syntax_expanders.insert(intern(&"quote_item"),
|
||||
syntax_expanders.insert(intern("quote_item"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_item));
|
||||
syntax_expanders.insert(intern(&"quote_pat"),
|
||||
syntax_expanders.insert(intern("quote_pat"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_pat));
|
||||
syntax_expanders.insert(intern(&"quote_stmt"),
|
||||
syntax_expanders.insert(intern("quote_stmt"),
|
||||
builtin_normal_expander(
|
||||
ext::quote::expand_quote_stmt));
|
||||
|
||||
syntax_expanders.insert(intern(&"line"),
|
||||
syntax_expanders.insert(intern("line"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_line));
|
||||
syntax_expanders.insert(intern(&"col"),
|
||||
syntax_expanders.insert(intern("col"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_col));
|
||||
syntax_expanders.insert(intern(&"file"),
|
||||
syntax_expanders.insert(intern("file"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_file));
|
||||
syntax_expanders.insert(intern(&"stringify"),
|
||||
syntax_expanders.insert(intern("stringify"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_stringify));
|
||||
syntax_expanders.insert(intern(&"include"),
|
||||
syntax_expanders.insert(intern("include"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_include));
|
||||
syntax_expanders.insert(intern(&"include_str"),
|
||||
syntax_expanders.insert(intern("include_str"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_include_str));
|
||||
syntax_expanders.insert(intern(&"include_bin"),
|
||||
syntax_expanders.insert(intern("include_bin"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_include_bin));
|
||||
syntax_expanders.insert(intern(&"module_path"),
|
||||
syntax_expanders.insert(intern("module_path"),
|
||||
builtin_normal_expander(
|
||||
ext::source_util::expand_mod));
|
||||
syntax_expanders.insert(intern(&"asm"),
|
||||
syntax_expanders.insert(intern("asm"),
|
||||
builtin_normal_expander(
|
||||
ext::asm::expand_asm));
|
||||
syntax_expanders.insert(intern(&"cfg"),
|
||||
syntax_expanders.insert(intern("cfg"),
|
||||
builtin_normal_expander(
|
||||
ext::cfg::expand_cfg));
|
||||
syntax_expanders.insert(intern(&"trace_macros"),
|
||||
syntax_expanders.insert(intern("trace_macros"),
|
||||
builtin_normal_expander(
|
||||
ext::trace_macros::expand_trace_macros));
|
||||
syntax_expanders
|
||||
|
@ -2779,28 +2779,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
token::BINOP(token::AND) | token::ANDAND => {
|
||||
// parse &pat
|
||||
let lo = self.span.lo;
|
||||
self.expect_and();
|
||||
let sub = self.parse_pat();
|
||||
hi = sub.span.hi;
|
||||
// HACK: parse &"..." as a literal of a borrowed str
|
||||
pat = match sub.node {
|
||||
PatLit(e) => {
|
||||
match e.node {
|
||||
ExprLit(lit) if lit_is_str(lit) => {
|
||||
let vst = @Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ExprVstore(e, ExprVstoreSlice),
|
||||
span: mk_sp(lo, hi)
|
||||
};
|
||||
PatLit(vst)
|
||||
}
|
||||
_ => PatRegion(sub),
|
||||
}
|
||||
}
|
||||
_ => PatRegion(sub),
|
||||
};
|
||||
// parse &pat
|
||||
let lo = self.span.lo;
|
||||
self.expect_and();
|
||||
let sub = self.parse_pat();
|
||||
pat = PatRegion(sub);
|
||||
hi = self.last_span.hi;
|
||||
return @ast::Pat {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
|
@ -534,7 +534,7 @@ impl<T: Writer> ConsoleTestState<T> {
|
||||
|
||||
pub fn write_run_start(&mut self, len: uint) -> io::IoResult<()> {
|
||||
self.total = len;
|
||||
let noun = if len != 1 { &"tests" } else { &"test" };
|
||||
let noun = if len != 1 { "tests" } else { "test" };
|
||||
self.write_plain(format!("\nrunning {} {}\n", len, noun))
|
||||
}
|
||||
|
||||
|
@ -185,7 +185,7 @@ fn encode_inner(s: &str, full_url: bool) -> ~str {
|
||||
* ```rust
|
||||
* use url::encode;
|
||||
*
|
||||
* let url = encode(&"https://example.com/Rust (programming language)");
|
||||
* let url = encode("https://example.com/Rust (programming language)");
|
||||
* println!("{}", url); // https://example.com/Rust%20(programming%20language)
|
||||
* ```
|
||||
*/
|
||||
@ -260,7 +260,7 @@ fn decode_inner(s: &str, full_url: bool) -> ~str {
|
||||
* ```rust
|
||||
* use url::decode;
|
||||
*
|
||||
* let url = decode(&"https://example.com/Rust%20(programming%20language)");
|
||||
* let url = decode("https://example.com/Rust%20(programming%20language)");
|
||||
* println!("{}", url); // https://example.com/Rust (programming language)
|
||||
* ```
|
||||
*/
|
||||
|
@ -30,7 +30,7 @@ impl Drop for S {
|
||||
}
|
||||
|
||||
fn move_in_match() {
|
||||
match S {f:~"foo", g:~"bar"} {
|
||||
match S {f: "foo".to_owned(), g: "bar".to_owned()} {
|
||||
S { //~ ERROR cannot move out of type `S`, which defines the `Drop` trait
|
||||
f: _s, //~ NOTE attempting to move value to here
|
||||
g: _t //~ NOTE and here
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
static a: &'static str = &"foo";
|
||||
static a: &'static str = "foo";
|
||||
static b: *u8 = a as *u8; //~ ERROR non-scalar cast
|
||||
static c: *u8 = &a as *u8; //~ ERROR mismatched types
|
||||
|
||||
|
@ -33,7 +33,7 @@ pub fn main() {
|
||||
(&[1]).test_imm();
|
||||
("test").test_imm();
|
||||
("test".to_owned()).test_imm();
|
||||
(&"test").test_imm();
|
||||
("test").test_imm();
|
||||
|
||||
// FIXME: Other types of mutable vecs don't currently exist
|
||||
|
||||
|
@ -10,9 +10,9 @@
|
||||
|
||||
|
||||
pub fn main() {
|
||||
let x = &"hello";
|
||||
let v = &"hello";
|
||||
let y : &str = &"there";
|
||||
let x = "hello";
|
||||
let v = "hello";
|
||||
let y : &str = "there";
|
||||
|
||||
println!("{}", x);
|
||||
println!("{}", y);
|
||||
@ -20,15 +20,15 @@ pub fn main() {
|
||||
assert_eq!(x[0], 'h' as u8);
|
||||
assert_eq!(x[4], 'o' as u8);
|
||||
|
||||
let z : &str = &"thing";
|
||||
let z : &str = "thing";
|
||||
assert_eq!(v, x);
|
||||
assert!(x != z);
|
||||
|
||||
let a = &"aaaa";
|
||||
let b = &"bbbb";
|
||||
let a = "aaaa";
|
||||
let b = "bbbb";
|
||||
|
||||
let c = &"cccc";
|
||||
let cc = &"ccccc";
|
||||
let c = "cccc";
|
||||
let cc = "ccccc";
|
||||
|
||||
println!("{}", a);
|
||||
|
||||
|
@ -16,7 +16,7 @@ fn perform_hax<T: 'static>(x: ~T) -> ~hax: {
|
||||
}
|
||||
|
||||
fn deadcode() {
|
||||
perform_hax(~~"deadcode");
|
||||
perform_hax(~"deadcode".to_owned());
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -16,7 +16,7 @@ fn perform_hax<T: 'static>(x: ~T) -> ~hax: {
|
||||
}
|
||||
|
||||
fn deadcode() {
|
||||
perform_hax(~~"deadcode");
|
||||
perform_hax(~"deadcode".to_owned());
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -44,15 +44,15 @@ fn g2(ref_1: &str, ref_2: &str) -> ~str {
|
||||
|
||||
pub fn main() {
|
||||
assert_eq!(f1("b".to_owned()), "found b".to_owned());
|
||||
assert_eq!(f1(&"c"), "not found".to_owned());
|
||||
assert_eq!(f1("c"), "not found".to_owned());
|
||||
assert_eq!(f1("d"), "not found".to_owned());
|
||||
assert_eq!(f2("b".to_owned()), "found b".to_owned());
|
||||
assert_eq!(f2(&"c"), "not found (c)".to_owned());
|
||||
assert_eq!(f2("c"), "not found (c)".to_owned());
|
||||
assert_eq!(f2("d"), "not found (d)".to_owned());
|
||||
assert_eq!(g1("b".to_owned(), "c".to_owned()), "found b,c".to_owned());
|
||||
assert_eq!(g1(&"c", &"d"), "not found".to_owned());
|
||||
assert_eq!(g1("c", "d"), "not found".to_owned());
|
||||
assert_eq!(g1("d", "e"), "not found".to_owned());
|
||||
assert_eq!(g2("b".to_owned(), "c".to_owned()), "found b,c".to_owned());
|
||||
assert_eq!(g2(&"c", &"d"), "not found (c, d)".to_owned());
|
||||
assert_eq!(g2("c", "d"), "not found (c, d)".to_owned());
|
||||
assert_eq!(g2("d", "e"), "not found (d, e)".to_owned());
|
||||
}
|
||||
|
@ -22,8 +22,8 @@ macro_rules! check {
|
||||
static S: $t = $e;
|
||||
let v: $t = $e;
|
||||
assert_eq!(S, v);
|
||||
assert_eq!(format!("{:?}", v), ~$s);
|
||||
assert_eq!(format!("{:?}", S), ~$s);
|
||||
assert_eq!(format!("{:?}", v).as_slice(), $s);
|
||||
assert_eq!(format!("{:?}", S).as_slice(), $s);
|
||||
});*
|
||||
}}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user