Remove unnecessary allocations flagged by lint
This commit is contained in:
parent
363e672736
commit
8f80323f09
@ -462,7 +462,7 @@ pub impl Bitv {
|
|||||||
*/
|
*/
|
||||||
fn to_str(&self) -> ~str {
|
fn to_str(&self) -> ~str {
|
||||||
let mut rs = ~"";
|
let mut rs = ~"";
|
||||||
for self.each() |i| { if i { rs += ~"1"; } else { rs += ~"0"; } };
|
for self.each() |i| { if i { rs += "1"; } else { rs += "0"; } };
|
||||||
rs
|
rs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,19 +193,19 @@ pub enum Fail_ {
|
|||||||
pub fn fail_str(f: Fail_) -> ~str {
|
pub fn fail_str(f: Fail_) -> ~str {
|
||||||
return match f {
|
return match f {
|
||||||
ArgumentMissing(ref nm) => {
|
ArgumentMissing(ref nm) => {
|
||||||
~"Argument to option '" + *nm + ~"' missing."
|
~"Argument to option '" + *nm + "' missing."
|
||||||
}
|
}
|
||||||
UnrecognizedOption(ref nm) => {
|
UnrecognizedOption(ref nm) => {
|
||||||
~"Unrecognized option: '" + *nm + ~"'."
|
~"Unrecognized option: '" + *nm + "'."
|
||||||
}
|
}
|
||||||
OptionMissing(ref nm) => {
|
OptionMissing(ref nm) => {
|
||||||
~"Required option '" + *nm + ~"' missing."
|
~"Required option '" + *nm + "' missing."
|
||||||
}
|
}
|
||||||
OptionDuplicated(ref nm) => {
|
OptionDuplicated(ref nm) => {
|
||||||
~"Option '" + *nm + ~"' given more than once."
|
~"Option '" + *nm + "' given more than once."
|
||||||
}
|
}
|
||||||
UnexpectedArgument(ref nm) => {
|
UnexpectedArgument(ref nm) => {
|
||||||
~"Option " + *nm + ~" does not take an argument."
|
~"Option " + *nm + " does not take an argument."
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -618,7 +618,7 @@ pub mod groups {
|
|||||||
row += match hasarg {
|
row += match hasarg {
|
||||||
No => ~"",
|
No => ~"",
|
||||||
Yes => hint,
|
Yes => hint,
|
||||||
Maybe => ~"[" + hint + ~"]",
|
Maybe => ~"[" + hint + "]",
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME: #5516
|
// FIXME: #5516
|
||||||
@ -650,10 +650,10 @@ pub mod groups {
|
|||||||
row
|
row
|
||||||
});
|
});
|
||||||
|
|
||||||
return str::to_owned(brief) +
|
return str::to_owned(brief) +
|
||||||
~"\n\nOptions:\n" +
|
"\n\nOptions:\n" +
|
||||||
str::connect(rows, "\n") +
|
str::connect(rows, "\n") +
|
||||||
~"\n\n";
|
"\n\n";
|
||||||
}
|
}
|
||||||
} // end groups module
|
} // end groups module
|
||||||
|
|
||||||
|
@ -47,18 +47,18 @@ fn escape_str(s: &str) -> ~str {
|
|||||||
let mut escaped = ~"\"";
|
let mut escaped = ~"\"";
|
||||||
for str::each_char(s) |c| {
|
for str::each_char(s) |c| {
|
||||||
match c {
|
match c {
|
||||||
'"' => escaped += ~"\\\"",
|
'"' => escaped += "\\\"",
|
||||||
'\\' => escaped += ~"\\\\",
|
'\\' => escaped += "\\\\",
|
||||||
'\x08' => escaped += ~"\\b",
|
'\x08' => escaped += "\\b",
|
||||||
'\x0c' => escaped += ~"\\f",
|
'\x0c' => escaped += "\\f",
|
||||||
'\n' => escaped += ~"\\n",
|
'\n' => escaped += "\\n",
|
||||||
'\r' => escaped += ~"\\r",
|
'\r' => escaped += "\\r",
|
||||||
'\t' => escaped += ~"\\t",
|
'\t' => escaped += "\\t",
|
||||||
_ => escaped += str::from_char(c)
|
_ => escaped += str::from_char(c)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
escaped += ~"\"";
|
escaped += "\"";
|
||||||
|
|
||||||
escaped
|
escaped
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ pub fn md4_str(msg: &[u8]) -> ~str {
|
|||||||
let mut i = 0u32;
|
let mut i = 0u32;
|
||||||
while i < 4u32 {
|
while i < 4u32 {
|
||||||
let byte = (u >> (i * 8u32)) as u8;
|
let byte = (u >> (i * 8u32)) as u8;
|
||||||
if byte <= 16u8 { result += ~"0"; }
|
if byte <= 16u8 { result += "0"; }
|
||||||
result += uint::to_str_radix(byte as uint, 16u);
|
result += uint::to_str_radix(byte as uint, 16u);
|
||||||
i += 1u32;
|
i += 1u32;
|
||||||
}
|
}
|
||||||
|
@ -22,42 +22,42 @@ pub fn get_target_strs(target_os: session::os) -> target_strs::t {
|
|||||||
data_layout: match target_os {
|
data_layout: match target_os {
|
||||||
session::os_macos => {
|
session::os_macos => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_win32 => {
|
session::os_win32 => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_linux => {
|
session::os_linux => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_android => {
|
session::os_android => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_freebsd => {
|
session::os_freebsd => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ pub fn llvm_err(sess: Session, msg: ~str) -> ! {
|
|||||||
if cstr == ptr::null() {
|
if cstr == ptr::null() {
|
||||||
sess.fatal(msg);
|
sess.fatal(msg);
|
||||||
} else {
|
} else {
|
||||||
sess.fatal(msg + ~": " + str::raw::from_c_str(cstr));
|
sess.fatal(msg + ": " + str::raw::from_c_str(cstr));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -653,13 +653,13 @@ pub fn sanitize(s: &str) -> ~str {
|
|||||||
let mut result = ~"";
|
let mut result = ~"";
|
||||||
for str::each_char(s) |c| {
|
for str::each_char(s) |c| {
|
||||||
match c {
|
match c {
|
||||||
'@' => result += ~"_sbox_",
|
'@' => result += "_sbox_",
|
||||||
'~' => result += ~"_ubox_",
|
'~' => result += "_ubox_",
|
||||||
'*' => result += ~"_ptr_",
|
'*' => result += "_ptr_",
|
||||||
'&' => result += ~"_ref_",
|
'&' => result += "_ref_",
|
||||||
',' => result += ~"_",
|
',' => result += "_",
|
||||||
|
|
||||||
'{' | '(' => result += ~"_of_",
|
'{' | '(' => result += "_of_",
|
||||||
'a' .. 'z'
|
'a' .. 'z'
|
||||||
| 'A' .. 'Z'
|
| 'A' .. 'Z'
|
||||||
| '0' .. '9'
|
| '0' .. '9'
|
||||||
@ -693,7 +693,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
|
|||||||
n += fmt!("%u%s", str::len(sani), sani);
|
n += fmt!("%u%s", str::len(sani), sani);
|
||||||
} }
|
} }
|
||||||
}
|
}
|
||||||
n += ~"E"; // End name-sequence.
|
n += "E"; // End name-sequence.
|
||||||
n
|
n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,42 +22,42 @@ pub fn get_target_strs(target_os: session::os) -> target_strs::t {
|
|||||||
data_layout: match target_os {
|
data_layout: match target_os {
|
||||||
session::os_macos => {
|
session::os_macos => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_win32 => {
|
session::os_win32 => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_linux => {
|
session::os_linux => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_android => {
|
session::os_android => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_freebsd => {
|
session::os_freebsd => {
|
||||||
~"e-p:32:32:32" +
|
~"e-p:32:32:32" +
|
||||||
~"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
|
||||||
~"-f32:32:32-f64:64:64" +
|
"-f32:32:32-f64:64:64" +
|
||||||
~"-v64:64:64-v128:64:128" +
|
"-v64:64:64-v128:64:128" +
|
||||||
~"-a0:0:64-n32"
|
"-a0:0:64-n32"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -23,9 +23,9 @@ pub fn get_target_strs(target_os: session::os) -> target_strs::t {
|
|||||||
data_layout: match target_os {
|
data_layout: match target_os {
|
||||||
session::os_macos => {
|
session::os_macos => {
|
||||||
~"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16" +
|
~"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16" +
|
||||||
~"-i32:32:32-i64:32:64" +
|
"-i32:32:32-i64:32:64" +
|
||||||
~"-f32:32:32-f64:32:64-v64:64:64" +
|
"-f32:32:32-f64:32:64-v64:64:64" +
|
||||||
~"-v128:128:128-a0:0:64-f80:128:128" + ~"-n8:16:32"
|
"-v128:128:128-a0:0:64-f80:128:128" + "-n8:16:32"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_win32 => {
|
session::os_win32 => {
|
||||||
|
@ -23,32 +23,32 @@ pub fn get_target_strs(target_os: session::os) -> target_strs::t {
|
|||||||
data_layout: match target_os {
|
data_layout: match target_os {
|
||||||
session::os_macos => {
|
session::os_macos => {
|
||||||
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
||||||
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
||||||
~"s0:64:64-f80:128:128-n8:16:32:64"
|
"s0:64:64-f80:128:128-n8:16:32:64"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_win32 => {
|
session::os_win32 => {
|
||||||
// FIXME: Test this. Copied from linux (#2398)
|
// FIXME: Test this. Copied from linux (#2398)
|
||||||
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
||||||
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
||||||
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_linux => {
|
session::os_linux => {
|
||||||
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
||||||
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
||||||
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
||||||
}
|
}
|
||||||
session::os_android => {
|
session::os_android => {
|
||||||
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
||||||
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
||||||
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
||||||
}
|
}
|
||||||
|
|
||||||
session::os_freebsd => {
|
session::os_freebsd => {
|
||||||
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
~"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-"+
|
||||||
~"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
|
||||||
~"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
"s0:64:64-f80:128:128-n8:16:32:64-S128"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -170,8 +170,8 @@ fn visit_item(e: @mut Env, i: @ast::item) {
|
|||||||
if *nn == ~"" {
|
if *nn == ~"" {
|
||||||
e.diag.span_fatal(
|
e.diag.span_fatal(
|
||||||
i.span,
|
i.span,
|
||||||
~"empty #[link_name] not allowed; use " +
|
"empty #[link_name] not allowed; use \
|
||||||
~"#[nolink].");
|
#[nolink].");
|
||||||
}
|
}
|
||||||
nn
|
nn
|
||||||
}
|
}
|
||||||
@ -183,7 +183,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
|
|||||||
}
|
}
|
||||||
if !link_args.is_empty() && already_added {
|
if !link_args.is_empty() && already_added {
|
||||||
e.diag.span_fatal(i.span, ~"library '" + *foreign_name +
|
e.diag.span_fatal(i.span, ~"library '" + *foreign_name +
|
||||||
~"' already added: can't specify link_args.");
|
"' already added: can't specify link_args.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::anonymous => { /* do nothing */ }
|
ast::anonymous => { /* do nothing */ }
|
||||||
|
@ -528,7 +528,7 @@ pub fn _each_path(intr: @ident_interner,
|
|||||||
if path_is_empty {
|
if path_is_empty {
|
||||||
reexport_path = reexport_name;
|
reexport_path = reexport_name;
|
||||||
} else {
|
} else {
|
||||||
reexport_path = path + ~"::" + reexport_name;
|
reexport_path = path + "::" + reexport_name;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This reexport may be in yet another crate
|
// This reexport may be in yet another crate
|
||||||
|
@ -922,7 +922,7 @@ fn encode_info_for_item(ecx: @EncodeContext,
|
|||||||
|
|
||||||
// >:-<
|
// >:-<
|
||||||
let mut impl_path = vec::append(~[], path);
|
let mut impl_path = vec::append(~[], path);
|
||||||
impl_path += ~[ast_map::path_name(item.ident)];
|
impl_path += [ast_map::path_name(item.ident)];
|
||||||
|
|
||||||
for methods.each |m| {
|
for methods.each |m| {
|
||||||
index.push(entry {val: m.id, pos: ebml_w.writer.tell()});
|
index.push(entry {val: m.id, pos: ebml_w.writer.tell()});
|
||||||
|
@ -81,7 +81,7 @@ fn find_library_crate_aux(
|
|||||||
filesearch: @filesearch::FileSearch
|
filesearch: @filesearch::FileSearch
|
||||||
) -> Option<(~str, @~[u8])> {
|
) -> Option<(~str, @~[u8])> {
|
||||||
let crate_name = crate_name_from_metas(cx.metas);
|
let crate_name = crate_name_from_metas(cx.metas);
|
||||||
let prefix: ~str = prefix + *crate_name + ~"-";
|
let prefix: ~str = prefix + *crate_name + "-";
|
||||||
let suffix: ~str = /*bad*/copy suffix;
|
let suffix: ~str = /*bad*/copy suffix;
|
||||||
|
|
||||||
let mut matches = ~[];
|
let mut matches = ~[];
|
||||||
@ -262,7 +262,7 @@ pub fn list_file_metadata(intr: @ident_interner,
|
|||||||
option::Some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
|
option::Some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
|
||||||
option::None => {
|
option::None => {
|
||||||
out.write_str(~"could not find metadata in "
|
out.write_str(~"could not find metadata in "
|
||||||
+ path.to_str() + ~".\n");
|
+ path.to_str() + ".\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,8 +89,8 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
|
|||||||
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
|
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
|
||||||
if abbrev_len < len {
|
if abbrev_len < len {
|
||||||
// I.e. it's actually an abbreviation.
|
// I.e. it's actually an abbreviation.
|
||||||
let s = ~"#" + uint::to_str_radix(pos, 16u) + ~":" +
|
let s = ~"#" + uint::to_str_radix(pos, 16u) + ":" +
|
||||||
uint::to_str_radix(len, 16u) + ~"#";
|
uint::to_str_radix(len, 16u) + "#";
|
||||||
let a = ty_abbrev { pos: pos, len: len, s: @s };
|
let a = ty_abbrev { pos: pos, len: len, s: @s };
|
||||||
abbrevs.insert(t, a);
|
abbrevs.insert(t, a);
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ pub fn check_expr(sess: Session,
|
|||||||
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
|
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
|
||||||
sess.span_err(e.span, ~"can not cast to `" +
|
sess.span_err(e.span, ~"can not cast to `" +
|
||||||
ppaux::ty_to_str(tcx, ety) +
|
ppaux::ty_to_str(tcx, ety) +
|
||||||
~"` in a constant expression");
|
"` in a constant expression");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
expr_path(pth) => {
|
expr_path(pth) => {
|
||||||
|
@ -171,7 +171,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let msg = ~"non-exhaustive patterns" + match ext {
|
let msg = ~"non-exhaustive patterns" + match ext {
|
||||||
Some(ref s) => ~": " + **s + ~" not covered",
|
Some(ref s) => ~": " + **s + " not covered",
|
||||||
None => ~""
|
None => ~""
|
||||||
};
|
};
|
||||||
cx.tcx.sess.span_err(sp, msg);
|
cx.tcx.sess.span_err(sp, msg);
|
||||||
|
@ -90,7 +90,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
|
|||||||
|
|
||||||
let mut clobbers = getClobbers();
|
let mut clobbers = getClobbers();
|
||||||
if *ia.clobbers != ~"" && clobbers != ~"" {
|
if *ia.clobbers != ~"" && clobbers != ~"" {
|
||||||
clobbers = *ia.clobbers + ~"," + clobbers;
|
clobbers = *ia.clobbers + "," + clobbers;
|
||||||
} else {
|
} else {
|
||||||
clobbers += *ia.clobbers;
|
clobbers += *ia.clobbers;
|
||||||
};
|
};
|
||||||
|
@ -1984,7 +1984,7 @@ pub fn trans_enum_variant(ccx: @CrateContext,
|
|||||||
|
|
||||||
debug!("trans_enum_variant: name=%s tps=%s repr=%? enum_ty=%s",
|
debug!("trans_enum_variant: name=%s tps=%s repr=%? enum_ty=%s",
|
||||||
unsafe { str::raw::from_c_str(llvm::LLVMGetValueName(llfndecl)) },
|
unsafe { str::raw::from_c_str(llvm::LLVMGetValueName(llfndecl)) },
|
||||||
~"[" + str::connect(ty_param_substs.map(|&t| ty_to_str(ccx.tcx, t)), ", ") + ~"]",
|
~"[" + str::connect(ty_param_substs.map(|&t| ty_to_str(ccx.tcx, t)), ", ") + "]",
|
||||||
repr, ty_to_str(ccx.tcx, enum_ty));
|
repr, ty_to_str(ccx.tcx, enum_ty));
|
||||||
|
|
||||||
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
|
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
|
||||||
@ -2901,7 +2901,7 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
|
|||||||
let cstore = sess.cstore;
|
let cstore = sess.cstore;
|
||||||
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
|
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
|
||||||
let mapname = if *sess.building_library {
|
let mapname = if *sess.building_library {
|
||||||
mapmeta.name.to_owned() + ~"_" + mapmeta.vers.to_owned() + ~"_"
|
mapmeta.name.to_owned() + "_" + mapmeta.vers.to_owned() + "_"
|
||||||
+ mapmeta.extras_hash.to_owned()
|
+ mapmeta.extras_hash.to_owned()
|
||||||
} else {
|
} else {
|
||||||
~"toplevel"
|
~"toplevel"
|
||||||
@ -2925,8 +2925,8 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
|
|||||||
while cstore::have_crate_data(cstore, i) {
|
while cstore::have_crate_data(cstore, i) {
|
||||||
let cdata = cstore::get_crate_data(cstore, i);
|
let cdata = cstore::get_crate_data(cstore, i);
|
||||||
let nm = ~"_rust_crate_map_" + *cdata.name +
|
let nm = ~"_rust_crate_map_" + *cdata.name +
|
||||||
~"_" + *cstore::get_crate_vers(cstore, i) +
|
"_" + *cstore::get_crate_vers(cstore, i) +
|
||||||
~"_" + *cstore::get_crate_hash(cstore, i);
|
"_" + *cstore::get_crate_hash(cstore, i);
|
||||||
let cr = str::as_c_str(nm, |buf| {
|
let cr = str::as_c_str(nm, |buf| {
|
||||||
unsafe {
|
unsafe {
|
||||||
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
|
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
|
||||||
@ -3035,7 +3035,7 @@ pub fn trans_crate(sess: session::Session,
|
|||||||
// crashes if the module identifer is same as other symbols
|
// crashes if the module identifer is same as other symbols
|
||||||
// such as a function name in the module.
|
// such as a function name in the module.
|
||||||
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
||||||
let llmod_id = link_meta.name.to_owned() + ~".rc";
|
let llmod_id = link_meta.name.to_owned() + ".rc";
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let llmod = str::as_c_str(llmod_id, |buf| {
|
let llmod = str::as_c_str(llmod_id, |buf| {
|
||||||
|
@ -65,12 +65,12 @@ pub fn count_insn(cx: block, category: &str) {
|
|||||||
i = 0u;
|
i = 0u;
|
||||||
while i < len {
|
while i < len {
|
||||||
i = *mm.get(&v[i]);
|
i = *mm.get(&v[i]);
|
||||||
s += ~"/";
|
s += "/";
|
||||||
s += v[i];
|
s += v[i];
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
|
|
||||||
s += ~"/";
|
s += "/";
|
||||||
s += category;
|
s += category;
|
||||||
|
|
||||||
let n = match h.find(&s) {
|
let n = match h.find(&s) {
|
||||||
|
@ -356,7 +356,7 @@ pub type fn_ctxt = @mut fn_ctxt_;
|
|||||||
pub fn warn_not_to_commit(ccx: @CrateContext, msg: &str) {
|
pub fn warn_not_to_commit(ccx: @CrateContext, msg: &str) {
|
||||||
if !*ccx.do_not_commit_warning_issued {
|
if !*ccx.do_not_commit_warning_issued {
|
||||||
*ccx.do_not_commit_warning_issued = true;
|
*ccx.do_not_commit_warning_issued = true;
|
||||||
ccx.sess.warn(msg.to_str() + ~" -- do not commit like this!");
|
ccx.sess.warn(msg.to_str() + " -- do not commit like this!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1420,7 +1420,7 @@ pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
|
|||||||
match *e {
|
match *e {
|
||||||
ast_map::path_name(s) | ast_map::path_mod(s) => {
|
ast_map::path_name(s) | ast_map::path_mod(s) => {
|
||||||
if first { first = false; }
|
if first { first = false; }
|
||||||
else { r += ~"::"; }
|
else { r += "::"; }
|
||||||
r += *sess.str_of(s);
|
r += *sess.str_of(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -402,7 +402,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
|
|||||||
let lname = link_name(ccx, foreign_item);
|
let lname = link_name(ccx, foreign_item);
|
||||||
let llbasefn = base_fn(ccx, *lname, tys, cc);
|
let llbasefn = base_fn(ccx, *lname, tys, cc);
|
||||||
// Name the shim function
|
// Name the shim function
|
||||||
let shim_name = *lname + ~"__c_stack_shim";
|
let shim_name = *lname + "__c_stack_shim";
|
||||||
build_shim_fn_(ccx,
|
build_shim_fn_(ccx,
|
||||||
shim_name,
|
shim_name,
|
||||||
llbasefn,
|
llbasefn,
|
||||||
|
@ -279,7 +279,7 @@ pub impl Reflector {
|
|||||||
let opaqueptrty = ty::mk_ptr(ccx.tcx, ty::mt { ty: opaquety, mutbl: ast::m_imm });
|
let opaqueptrty = ty::mk_ptr(ccx.tcx, ty::mt { ty: opaquety, mutbl: ast::m_imm });
|
||||||
|
|
||||||
let make_get_disr = || {
|
let make_get_disr = || {
|
||||||
let sub_path = bcx.fcx.path + ~[path_name(special_idents::anon)];
|
let sub_path = bcx.fcx.path + [path_name(special_idents::anon)];
|
||||||
let sym = mangle_internal_name_by_path_and_seq(ccx,
|
let sym = mangle_internal_name_by_path_and_seq(ccx,
|
||||||
sub_path,
|
sub_path,
|
||||||
"get_disr");
|
"get_disr");
|
||||||
|
@ -3354,8 +3354,8 @@ pub fn occurs_check(tcx: ctxt, sp: span, vid: TyVid, rt: t) {
|
|||||||
(sp, ~"type inference failed because I \
|
(sp, ~"type inference failed because I \
|
||||||
could not find a type\n that's both of the form "
|
could not find a type\n that's both of the form "
|
||||||
+ ::util::ppaux::ty_to_str(tcx, mk_var(tcx, vid)) +
|
+ ::util::ppaux::ty_to_str(tcx, mk_var(tcx, vid)) +
|
||||||
~" and of the form " + ::util::ppaux::ty_to_str(tcx, rt) +
|
" and of the form " + ::util::ppaux::ty_to_str(tcx, rt) +
|
||||||
~" - such a type would have to be infinitely large.");
|
" - such a type would have to be infinitely large.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1870,7 +1870,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
|||||||
let (_, seen) = *class_field_map.get(&name);
|
let (_, seen) = *class_field_map.get(&name);
|
||||||
if !seen {
|
if !seen {
|
||||||
missing_fields.push(
|
missing_fields.push(
|
||||||
~"`" + *tcx.sess.str_of(name) + ~"`");
|
~"`" + *tcx.sess.str_of(name) + "`");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3669,7 +3669,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
|
|||||||
ref other => {
|
ref other => {
|
||||||
tcx.sess.span_err(it.span,
|
tcx.sess.span_err(it.span,
|
||||||
~"unrecognized intrinsic function: `" +
|
~"unrecognized intrinsic function: `" +
|
||||||
(*other) + ~"`");
|
(*other) + "`");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -256,7 +256,7 @@ pub fn require_same_types(
|
|||||||
match infer::mk_eqty(l_infcx, t1_is_expected, span, t1, t2) {
|
match infer::mk_eqty(l_infcx, t1_is_expected, span, t1, t2) {
|
||||||
result::Ok(()) => true,
|
result::Ok(()) => true,
|
||||||
result::Err(ref terr) => {
|
result::Err(ref terr) => {
|
||||||
l_tcx.sess.span_err(span, msg() + ~": " +
|
l_tcx.sess.span_err(span, msg() + ": " +
|
||||||
ty::type_err_to_str(l_tcx, terr));
|
ty::type_err_to_str(l_tcx, terr));
|
||||||
ty::note_and_explain_type_err(l_tcx, terr);
|
ty::note_and_explain_type_err(l_tcx, terr);
|
||||||
false
|
false
|
||||||
@ -323,7 +323,7 @@ fn check_main_fn_ty(ccx: @mut CrateCtxt,
|
|||||||
_ => {
|
_ => {
|
||||||
tcx.sess.span_bug(main_span,
|
tcx.sess.span_bug(main_span,
|
||||||
~"main has a non-function type: found `" +
|
~"main has a non-function type: found `" +
|
||||||
ppaux::ty_to_str(tcx, main_t) + ~"`");
|
ppaux::ty_to_str(tcx, main_t) + "`");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -372,7 +372,7 @@ fn check_start_fn_ty(ccx: @mut CrateCtxt,
|
|||||||
_ => {
|
_ => {
|
||||||
tcx.sess.span_bug(start_span,
|
tcx.sess.span_bug(start_span,
|
||||||
~"start has a non-function type: found `" +
|
~"start has a non-function type: found `" +
|
||||||
ppaux::ty_to_str(tcx, start_t) + ~"`");
|
ppaux::ty_to_str(tcx, start_t) + "`");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,10 +382,10 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
|||||||
m.fty.purity,
|
m.fty.purity,
|
||||||
m.fty.abis,
|
m.fty.abis,
|
||||||
Some(m.ident),
|
Some(m.ident),
|
||||||
&m.fty.sig) + ~";"
|
&m.fty.sig) + ";"
|
||||||
}
|
}
|
||||||
fn field_to_str(cx: ctxt, f: field) -> ~str {
|
fn field_to_str(cx: ctxt, f: field) -> ~str {
|
||||||
return *cx.sess.str_of(f.ident) + ~": " + mt_to_str(cx, &f.mt);
|
return *cx.sess.str_of(f.ident) + ": " + mt_to_str(cx, &f.mt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if there is an id, print that instead of the structural type:
|
// if there is an id, print that instead of the structural type:
|
||||||
@ -413,11 +413,11 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
|||||||
ty_rptr(r, ref tm) => {
|
ty_rptr(r, ref tm) => {
|
||||||
region_to_str_space(cx, "&", r) + mt_to_str(cx, tm)
|
region_to_str_space(cx, "&", r) + mt_to_str(cx, tm)
|
||||||
}
|
}
|
||||||
ty_unboxed_vec(ref tm) => { ~"unboxed_vec<" + mt_to_str(cx, tm) + ~">" }
|
ty_unboxed_vec(ref tm) => { ~"unboxed_vec<" + mt_to_str(cx, tm) + ">" }
|
||||||
ty_type => ~"type",
|
ty_type => ~"type",
|
||||||
ty_tup(ref elems) => {
|
ty_tup(ref elems) => {
|
||||||
let strs = elems.map(|elem| ty_to_str(cx, *elem));
|
let strs = elems.map(|elem| ty_to_str(cx, *elem));
|
||||||
~"(" + str::connect(strs, ",") + ~")"
|
~"(" + str::connect(strs, ",") + ")"
|
||||||
}
|
}
|
||||||
ty_closure(ref f) => {
|
ty_closure(ref f) => {
|
||||||
closure_to_str(cx, f)
|
closure_to_str(cx, f)
|
||||||
|
@ -1711,9 +1711,9 @@ pub fn read_whole_file_str(file: &Path) -> Result<~str, ~str> {
|
|||||||
result::chain(read_whole_file(file), |bytes| {
|
result::chain(read_whole_file(file), |bytes| {
|
||||||
if str::is_utf8(bytes) {
|
if str::is_utf8(bytes) {
|
||||||
result::Ok(str::from_bytes(bytes))
|
result::Ok(str::from_bytes(bytes))
|
||||||
} else {
|
} else {
|
||||||
result::Err(file.to_str() + ~" is not UTF-8")
|
result::Err(file.to_str() + " is not UTF-8")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ impl<A:ToStr> ToStr for (A,) {
|
|||||||
fn to_str(&self) -> ~str {
|
fn to_str(&self) -> ~str {
|
||||||
match *self {
|
match *self {
|
||||||
(ref a,) => {
|
(ref a,) => {
|
||||||
~"(" + a.to_str() + ~", " + ~")"
|
~"(" + a.to_str() + ",)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -97,7 +97,7 @@ impl<A:ToStr,B:ToStr> ToStr for (A, B) {
|
|||||||
//let &(ref a, ref b) = self;
|
//let &(ref a, ref b) = self;
|
||||||
match *self {
|
match *self {
|
||||||
(ref a, ref b) => {
|
(ref a, ref b) => {
|
||||||
~"(" + a.to_str() + ~", " + b.to_str() + ~")"
|
~"(" + a.to_str() + ", " + b.to_str() + ")"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -241,7 +241,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
|
|||||||
// Print the offending lines
|
// Print the offending lines
|
||||||
for display_lines.each |line| {
|
for display_lines.each |line| {
|
||||||
io::stderr().write_str(fmt!("%s:%u ", fm.name, *line + 1u));
|
io::stderr().write_str(fmt!("%s:%u ", fm.name, *line + 1u));
|
||||||
let s = fm.get_line(*line as int) + ~"\n";
|
let s = fm.get_line(*line as int) + "\n";
|
||||||
io::stderr().write_str(s);
|
io::stderr().write_str(s);
|
||||||
}
|
}
|
||||||
if elided {
|
if elided {
|
||||||
@ -249,8 +249,8 @@ fn highlight_lines(cm: @codemap::CodeMap,
|
|||||||
let s = fmt!("%s:%u ", fm.name, last_line + 1u);
|
let s = fmt!("%s:%u ", fm.name, last_line + 1u);
|
||||||
let mut indent = str::len(s);
|
let mut indent = str::len(s);
|
||||||
let mut out = ~"";
|
let mut out = ~"";
|
||||||
while indent > 0u { out += ~" "; indent -= 1u; }
|
while indent > 0u { out += " "; indent -= 1u; }
|
||||||
out += ~"...\n";
|
out += "...\n";
|
||||||
io::stderr().write_str(out);
|
io::stderr().write_str(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -271,7 +271,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
|
|||||||
// part of the 'filename:line ' part of the previous line.
|
// part of the 'filename:line ' part of the previous line.
|
||||||
let skip = str::len(fm.name) + digits + 3u;
|
let skip = str::len(fm.name) + digits + 3u;
|
||||||
for skip.times() {
|
for skip.times() {
|
||||||
s += ~" ";
|
s += " ";
|
||||||
}
|
}
|
||||||
let orig = fm.get_line(lines.lines[0] as int);
|
let orig = fm.get_line(lines.lines[0] as int);
|
||||||
for uint::range(0u,left-skip) |pos| {
|
for uint::range(0u,left-skip) |pos| {
|
||||||
@ -281,14 +281,14 @@ fn highlight_lines(cm: @codemap::CodeMap,
|
|||||||
_ => " " // -squigly-line as well (instead of a
|
_ => " " // -squigly-line as well (instead of a
|
||||||
}; // space). This way the squigly-line will
|
}; // space). This way the squigly-line will
|
||||||
} // usually appear in the correct position.
|
} // usually appear in the correct position.
|
||||||
s += ~"^";
|
s += "^";
|
||||||
let hi = cm.lookup_char_pos(sp.hi);
|
let hi = cm.lookup_char_pos(sp.hi);
|
||||||
if hi.col != lo.col {
|
if hi.col != lo.col {
|
||||||
// the ^ already takes up one space
|
// the ^ already takes up one space
|
||||||
let num_squiglies = hi.col.to_uint()-lo.col.to_uint()-1u;
|
let num_squiglies = hi.col.to_uint()-lo.col.to_uint()-1u;
|
||||||
for num_squiglies.times() { s += ~"~"; }
|
for num_squiglies.times() { s += "~"; }
|
||||||
}
|
}
|
||||||
io::stderr().write_str(s + ~"\n");
|
io::stderr().write_str(s + "\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
|
|||||||
p.eat(&token::COMMA);
|
p.eat(&token::COMMA);
|
||||||
}
|
}
|
||||||
|
|
||||||
let clob = ~"~{" + *p.parse_str() + ~"}";
|
let clob = ~"~{" + *p.parse_str() + "}";
|
||||||
clobs.push(clob);
|
clobs.push(clob);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,15 +158,15 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
|
|||||||
FlagSignAlways => {
|
FlagSignAlways => {
|
||||||
if !is_signed_type(cnv) {
|
if !is_signed_type(cnv) {
|
||||||
cx.span_fatal(sp,
|
cx.span_fatal(sp,
|
||||||
~"+ flag only valid in " +
|
"+ flag only valid in \
|
||||||
~"signed fmt! conversion");
|
signed fmt! conversion");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FlagSpaceForSign => {
|
FlagSpaceForSign => {
|
||||||
if !is_signed_type(cnv) {
|
if !is_signed_type(cnv) {
|
||||||
cx.span_fatal(sp,
|
cx.span_fatal(sp,
|
||||||
~"space flag only valid in " +
|
"space flag only valid in \
|
||||||
~"signed fmt! conversions");
|
signed fmt! conversions");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FlagLeftZeroPad => (),
|
FlagLeftZeroPad => (),
|
||||||
@ -284,8 +284,8 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
|
|||||||
n += 1u;
|
n += 1u;
|
||||||
if n >= nargs {
|
if n >= nargs {
|
||||||
cx.span_fatal(sp,
|
cx.span_fatal(sp,
|
||||||
~"not enough arguments to fmt! " +
|
"not enough arguments to fmt! \
|
||||||
~"for the given format string");
|
for the given format string");
|
||||||
}
|
}
|
||||||
|
|
||||||
log_conv(conv);
|
log_conv(conv);
|
||||||
|
@ -66,7 +66,7 @@ impl gen_send for message {
|
|||||||
|
|
||||||
let mut body = ~"{\n";
|
let mut body = ~"{\n";
|
||||||
body += fmt!("use super::%s;\n", name);
|
body += fmt!("use super::%s;\n", name);
|
||||||
body += ~"let mut pipe = pipe;\n";
|
body += "let mut pipe = pipe;\n";
|
||||||
|
|
||||||
if this.proto.is_bounded() {
|
if this.proto.is_bounded() {
|
||||||
let (sp, rp) = match (this.dir, next.dir) {
|
let (sp, rp) = match (this.dir, next.dir) {
|
||||||
@ -76,7 +76,7 @@ impl gen_send for message {
|
|||||||
(recv, recv) => (~"c", ~"s")
|
(recv, recv) => (~"c", ~"s")
|
||||||
};
|
};
|
||||||
|
|
||||||
body += ~"let mut b = pipe.reuse_buffer();\n";
|
body += "let mut b = pipe.reuse_buffer();\n";
|
||||||
body += fmt!("let %s = ::std::pipes::SendPacketBuffered(\
|
body += fmt!("let %s = ::std::pipes::SendPacketBuffered(\
|
||||||
&mut (b.buffer.data.%s));\n",
|
&mut (b.buffer.data.%s));\n",
|
||||||
sp, next.name);
|
sp, next.name);
|
||||||
@ -103,7 +103,7 @@ impl gen_send for message {
|
|||||||
if !try {
|
if !try {
|
||||||
body += fmt!("::std::pipes::send(pipe, message);\n");
|
body += fmt!("::std::pipes::send(pipe, message);\n");
|
||||||
// return the new channel
|
// return the new channel
|
||||||
body += ~"c }";
|
body += "c }";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
body += fmt!("if ::std::pipes::send(pipe, message) {\n \
|
body += fmt!("if ::std::pipes::send(pipe, message) {\n \
|
||||||
@ -152,7 +152,7 @@ impl gen_send for message {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
~"(" + str::connect(arg_names.map(|x| copy *x),
|
~"(" + str::connect(arg_names.map(|x| copy *x),
|
||||||
", ") + ~")"
|
", ") + ")"
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut body = ~"{ ";
|
let mut body = ~"{ ";
|
||||||
@ -161,7 +161,7 @@ impl gen_send for message {
|
|||||||
|
|
||||||
if !try {
|
if !try {
|
||||||
body += fmt!("::std::pipes::send(pipe, message);\n");
|
body += fmt!("::std::pipes::send(pipe, message);\n");
|
||||||
body += ~" }";
|
body += " }";
|
||||||
} else {
|
} else {
|
||||||
body += fmt!("if ::std::pipes::send(pipe, message) \
|
body += fmt!("if ::std::pipes::send(pipe, message) \
|
||||||
{ \
|
{ \
|
||||||
|
@ -42,7 +42,7 @@ impl parser_attr for Parser {
|
|||||||
if self.look_ahead(1u) != token::LBRACKET {
|
if self.look_ahead(1u) != token::LBRACKET {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
attrs += ~[self.parse_attribute(ast::attr_outer)];
|
attrs += [self.parse_attribute(ast::attr_outer)];
|
||||||
}
|
}
|
||||||
token::DOC_COMMENT(s) => {
|
token::DOC_COMMENT(s) => {
|
||||||
let attr = ::attr::mk_sugared_doc_attr(
|
let attr = ::attr::mk_sugared_doc_attr(
|
||||||
@ -53,7 +53,7 @@ impl parser_attr for Parser {
|
|||||||
if attr.node.style != ast::attr_outer {
|
if attr.node.style != ast::attr_outer {
|
||||||
self.fatal("expected outer comment");
|
self.fatal("expected outer comment");
|
||||||
}
|
}
|
||||||
attrs += ~[attr];
|
attrs += [attr];
|
||||||
self.bump();
|
self.bump();
|
||||||
}
|
}
|
||||||
_ => break
|
_ => break
|
||||||
@ -105,7 +105,7 @@ impl parser_attr for Parser {
|
|||||||
let attr = self.parse_attribute(ast::attr_inner);
|
let attr = self.parse_attribute(ast::attr_inner);
|
||||||
if *self.token == token::SEMI {
|
if *self.token == token::SEMI {
|
||||||
self.bump();
|
self.bump();
|
||||||
inner_attrs += ~[attr];
|
inner_attrs += [attr];
|
||||||
} else {
|
} else {
|
||||||
// It's not really an inner attribute
|
// It's not really an inner attribute
|
||||||
let outer_attr =
|
let outer_attr =
|
||||||
@ -113,7 +113,7 @@ impl parser_attr for Parser {
|
|||||||
ast::attribute_ { style: ast::attr_outer,
|
ast::attribute_ { style: ast::attr_outer,
|
||||||
value: attr.node.value,
|
value: attr.node.value,
|
||||||
is_sugared_doc: false });
|
is_sugared_doc: false });
|
||||||
next_outer_attrs += ~[outer_attr];
|
next_outer_attrs += [outer_attr];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -125,9 +125,9 @@ impl parser_attr for Parser {
|
|||||||
);
|
);
|
||||||
self.bump();
|
self.bump();
|
||||||
if attr.node.style == ast::attr_inner {
|
if attr.node.style == ast::attr_inner {
|
||||||
inner_attrs += ~[attr];
|
inner_attrs += [attr];
|
||||||
} else {
|
} else {
|
||||||
next_outer_attrs += ~[attr];
|
next_outer_attrs += [attr];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -235,7 +235,7 @@ fn read_block_comment(rdr: @mut StringReader,
|
|||||||
bump(rdr);
|
bump(rdr);
|
||||||
}
|
}
|
||||||
if !is_eof(rdr) {
|
if !is_eof(rdr) {
|
||||||
curr_line += ~"*/";
|
curr_line += "*/";
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
}
|
}
|
||||||
@ -259,13 +259,13 @@ fn read_block_comment(rdr: @mut StringReader,
|
|||||||
if rdr.curr == '/' && nextch(rdr) == '*' {
|
if rdr.curr == '/' && nextch(rdr) == '*' {
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
curr_line += ~"*";
|
curr_line += "*";
|
||||||
level += 1;
|
level += 1;
|
||||||
} else {
|
} else {
|
||||||
if rdr.curr == '*' && nextch(rdr) == '/' {
|
if rdr.curr == '*' && nextch(rdr) == '/' {
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
curr_line += ~"/";
|
curr_line += "/";
|
||||||
level -= 1;
|
level -= 1;
|
||||||
} else { bump(rdr); }
|
} else { bump(rdr); }
|
||||||
}
|
}
|
||||||
|
@ -194,9 +194,9 @@ pub impl Parser {
|
|||||||
} else {
|
} else {
|
||||||
let mut s: ~str = ~"expected `";
|
let mut s: ~str = ~"expected `";
|
||||||
s += self.token_to_str(&token::GT);
|
s += self.token_to_str(&token::GT);
|
||||||
s += ~"`, found `";
|
s += "`, found `";
|
||||||
s += self.this_token_to_str();
|
s += self.this_token_to_str();
|
||||||
s += ~"`";
|
s += "`";
|
||||||
self.fatal(s);
|
self.fatal(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -320,7 +320,7 @@ fn consume_block_comment(rdr: @mut StringReader)
|
|||||||
if is_eof(rdr) {
|
if is_eof(rdr) {
|
||||||
rdr.fatal(~"unterminated block doc-comment");
|
rdr.fatal(~"unterminated block doc-comment");
|
||||||
} else {
|
} else {
|
||||||
acc += ~"*/";
|
acc += "*/";
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
bump(rdr);
|
bump(rdr);
|
||||||
// but comments with only "*"s between two "/"s are not
|
// but comments with only "*"s between two "/"s are not
|
||||||
|
@ -3584,7 +3584,7 @@ pub impl Parser {
|
|||||||
let prefix = prefix.dir_path();
|
let prefix = prefix.dir_path();
|
||||||
let mod_path_stack = &*self.mod_path_stack;
|
let mod_path_stack = &*self.mod_path_stack;
|
||||||
let mod_path = Path(".").push_many(*mod_path_stack);
|
let mod_path = Path(".").push_many(*mod_path_stack);
|
||||||
let default_path = *self.sess.interner.get(id) + ~".rs";
|
let default_path = *self.sess.interner.get(id) + ".rs";
|
||||||
let file_path = match ::attr::first_attr_value_str_by_name(
|
let file_path = match ::attr::first_attr_value_str_by_name(
|
||||||
outer_attrs, "path") {
|
outer_attrs, "path") {
|
||||||
Some(d) => {
|
Some(d) => {
|
||||||
@ -4213,8 +4213,8 @@ pub impl Parser {
|
|||||||
// FAILURE TO PARSE ITEM
|
// FAILURE TO PARSE ITEM
|
||||||
if visibility != inherited {
|
if visibility != inherited {
|
||||||
let mut s = ~"unmatched visibility `";
|
let mut s = ~"unmatched visibility `";
|
||||||
s += if visibility == public { ~"pub" } else { ~"priv" };
|
s += if visibility == public { "pub" } else { "priv" };
|
||||||
s += ~"`";
|
s += "`";
|
||||||
self.span_fatal(*self.last_span, s);
|
self.span_fatal(*self.last_span, s);
|
||||||
}
|
}
|
||||||
return iovi_none;
|
return iovi_none;
|
||||||
|
@ -138,7 +138,7 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
|
|||||||
OROR => ~"||",
|
OROR => ~"||",
|
||||||
ANDAND => ~"&&",
|
ANDAND => ~"&&",
|
||||||
BINOP(op) => binop_to_str(op),
|
BINOP(op) => binop_to_str(op),
|
||||||
BINOPEQ(op) => binop_to_str(op) + ~"=",
|
BINOPEQ(op) => binop_to_str(op) + "=",
|
||||||
|
|
||||||
/* Structural symbols */
|
/* Structural symbols */
|
||||||
AT => ~"@",
|
AT => ~"@",
|
||||||
@ -163,7 +163,7 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
|
|||||||
|
|
||||||
/* Literals */
|
/* Literals */
|
||||||
LIT_INT(c, ast::ty_char) => {
|
LIT_INT(c, ast::ty_char) => {
|
||||||
~"'" + char::escape_default(c as char) + ~"'"
|
~"'" + char::escape_default(c as char) + "'"
|
||||||
}
|
}
|
||||||
LIT_INT(i, t) => {
|
LIT_INT(i, t) => {
|
||||||
i.to_str() + ast_util::int_ty_to_str(t)
|
i.to_str() + ast_util::int_ty_to_str(t)
|
||||||
@ -175,18 +175,18 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
|
|||||||
LIT_FLOAT(s, t) => {
|
LIT_FLOAT(s, t) => {
|
||||||
let mut body = copy *in.get(s);
|
let mut body = copy *in.get(s);
|
||||||
if body.ends_with(".") {
|
if body.ends_with(".") {
|
||||||
body = body + ~"0"; // `10.f` is not a float literal
|
body += "0"; // `10.f` is not a float literal
|
||||||
}
|
}
|
||||||
body + ast_util::float_ty_to_str(t)
|
body + ast_util::float_ty_to_str(t)
|
||||||
}
|
}
|
||||||
LIT_FLOAT_UNSUFFIXED(s) => {
|
LIT_FLOAT_UNSUFFIXED(s) => {
|
||||||
let mut body = copy *in.get(s);
|
let mut body = copy *in.get(s);
|
||||||
if body.ends_with(".") {
|
if body.ends_with(".") {
|
||||||
body = body + ~"0"; // `10.f` is not a float literal
|
body += "0"; // `10.f` is not a float literal
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
}
|
}
|
||||||
LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + ~"\"" }
|
LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + "\"" }
|
||||||
|
|
||||||
/* Name components */
|
/* Name components */
|
||||||
IDENT(s, _) => copy *in.get(s),
|
IDENT(s, _) => copy *in.get(s),
|
||||||
|
@ -120,12 +120,12 @@ pub fn buf_str(toks: ~[token], szs: ~[int], left: uint, right: uint,
|
|||||||
let mut s = ~"[";
|
let mut s = ~"[";
|
||||||
while i != right && L != 0u {
|
while i != right && L != 0u {
|
||||||
L -= 1u;
|
L -= 1u;
|
||||||
if i != left { s += ~", "; }
|
if i != left { s += ", "; }
|
||||||
s += fmt!("%d=%s", szs[i], tok_str(toks[i]));
|
s += fmt!("%d=%s", szs[i], tok_str(toks[i]));
|
||||||
i += 1u;
|
i += 1u;
|
||||||
i %= n;
|
i %= n;
|
||||||
}
|
}
|
||||||
s += ~"]";
|
s += "]";
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1996,7 +1996,7 @@ pub fn print_literal(s: @ps, lit: @ast::lit) {
|
|||||||
match lit.node {
|
match lit.node {
|
||||||
ast::lit_str(st) => print_string(s, *st),
|
ast::lit_str(st) => print_string(s, *st),
|
||||||
ast::lit_int(ch, ast::ty_char) => {
|
ast::lit_int(ch, ast::ty_char) => {
|
||||||
word(s.s, ~"'" + char::escape_default(ch as char) + ~"'");
|
word(s.s, ~"'" + char::escape_default(ch as char) + "'");
|
||||||
}
|
}
|
||||||
ast::lit_int(i, t) => {
|
ast::lit_int(i, t) => {
|
||||||
if i < 0_i64 {
|
if i < 0_i64 {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user