Fix various useless derefs and slicings
This commit is contained in:
parent
79feb9476d
commit
eb447f4ef4
@ -586,7 +586,7 @@ fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
|
||||
.arg(ADB_TEST_DIR));
|
||||
|
||||
let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
|
||||
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
|
||||
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
|
||||
|
||||
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
|
||||
let f = t!(f);
|
||||
|
@ -196,7 +196,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
|
||||
let toknum = &s[content_end + 3 .. toknum_end];
|
||||
|
||||
let not_found = format!("didn't find token {:?} in the map", toknum);
|
||||
let proto_tok = tokens.get(toknum).expect(¬_found[..]);
|
||||
let proto_tok = tokens.get(toknum).expect(¬_found);
|
||||
|
||||
let nm = Symbol::intern(content);
|
||||
|
||||
@ -304,14 +304,14 @@ fn main() {
|
||||
let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap();
|
||||
let mut token_list = String::new();
|
||||
token_file.read_to_string(&mut token_list).unwrap();
|
||||
let token_map = parse_token_list(&token_list[..]);
|
||||
let token_map = parse_token_list(&token_list);
|
||||
|
||||
let stdin = std::io::stdin();
|
||||
let lock = stdin.lock();
|
||||
let lines = lock.lines();
|
||||
let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(),
|
||||
&token_map,
|
||||
&surrogate_pairs_pos[..],
|
||||
&surrogate_pairs_pos,
|
||||
has_bom));
|
||||
|
||||
for antlr_tok in antlr_tokens {
|
||||
|
@ -1376,7 +1376,7 @@ mod tests {
|
||||
thread::spawn(move || {
|
||||
check_links(&n);
|
||||
let a: &[_] = &[&1, &2, &3];
|
||||
assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
|
||||
assert_eq!(a, &*n.iter().collect::<Vec<_>>());
|
||||
})
|
||||
.join()
|
||||
.ok()
|
||||
|
@ -554,7 +554,7 @@ impl<'a> LabelText<'a> {
|
||||
pub fn to_dot_string(&self) -> String {
|
||||
match self {
|
||||
&LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
|
||||
&EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])),
|
||||
&EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
|
||||
&HtmlStr(ref s) => format!("<{}>", s),
|
||||
}
|
||||
}
|
||||
@ -587,7 +587,7 @@ impl<'a> LabelText<'a> {
|
||||
let mut prefix = self.pre_escaped_content().into_owned();
|
||||
let suffix = suffix.pre_escaped_content();
|
||||
prefix.push_str(r"\n\n");
|
||||
prefix.push_str(&suffix[..]);
|
||||
prefix.push_str(&suffix);
|
||||
EscStr(prefix.into_cow())
|
||||
}
|
||||
}
|
||||
@ -878,7 +878,7 @@ mod tests {
|
||||
type Node = Node;
|
||||
type Edge = &'a Edge;
|
||||
fn graph_id(&'a self) -> Id<'a> {
|
||||
Id::new(&self.name[..]).unwrap()
|
||||
Id::new(self.name).unwrap()
|
||||
}
|
||||
fn node_id(&'a self, n: &Node) -> Id<'a> {
|
||||
id_name(n)
|
||||
|
@ -55,7 +55,7 @@ impl Fingerprint {
|
||||
impl Encodable for Fingerprint {
|
||||
#[inline]
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
for &byte in &self.0[..] {
|
||||
for &byte in &self.0 {
|
||||
s.emit_u8(byte)?;
|
||||
}
|
||||
Ok(())
|
||||
@ -66,7 +66,7 @@ impl Decodable for Fingerprint {
|
||||
#[inline]
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
|
||||
let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]);
|
||||
for byte in &mut result.0[..] {
|
||||
for byte in &mut result.0 {
|
||||
*byte = d.read_u8()?;
|
||||
}
|
||||
Ok(result)
|
||||
|
@ -40,7 +40,6 @@ use std::cmp;
|
||||
use std::default::Default as StdDefault;
|
||||
use std::mem;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::Symbol;
|
||||
@ -485,7 +484,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||
Allow => bug!("earlier conditional return should handle Allow case")
|
||||
};
|
||||
let hyphen_case_lint_name = name.replace("_", "-");
|
||||
if lint_flag_val.as_str().deref() == name {
|
||||
if lint_flag_val.as_str() == name {
|
||||
err.note(&format!("requested on the command line with `{} {}`",
|
||||
flag, hyphen_case_lint_name));
|
||||
} else {
|
||||
@ -496,7 +495,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
|
||||
},
|
||||
Node(lint_attr_name, src) => {
|
||||
def = Some(src);
|
||||
if lint_attr_name.as_str().deref() != name {
|
||||
if lint_attr_name.as_str() != name {
|
||||
let level_str = level.as_str();
|
||||
err.note(&format!("#[{}({})] implied by #[{}({})]",
|
||||
level_str, name, level_str, lint_attr_name));
|
||||
|
@ -536,7 +536,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
if !self.stability.borrow().active_features.contains(feature) {
|
||||
let msg = match *reason {
|
||||
Some(ref r) => format!("use of unstable library feature '{}': {}",
|
||||
&feature.as_str(), &r),
|
||||
feature.as_str(), &r),
|
||||
None => format!("use of unstable library feature '{}'", &feature)
|
||||
};
|
||||
emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,
|
||||
|
@ -267,11 +267,11 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
|
||||
// First, filter out duplicates
|
||||
moved.sort();
|
||||
moved.dedup();
|
||||
debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
|
||||
debug!("fragments 1 moved: {:?}", path_lps(&moved));
|
||||
|
||||
assigned.sort();
|
||||
assigned.dedup();
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
|
||||
|
||||
// Second, build parents from the moved and assigned.
|
||||
for m in &moved {
|
||||
@ -291,14 +291,14 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
|
||||
|
||||
parents.sort();
|
||||
parents.dedup();
|
||||
debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
|
||||
debug!("fragments 2 parents: {:?}", path_lps(&parents));
|
||||
|
||||
// Third, filter the moved and assigned fragments down to just the non-parents
|
||||
moved.retain(|f| non_member(*f, &parents[..]));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
|
||||
moved.retain(|f| non_member(*f, &parents));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(&moved));
|
||||
|
||||
assigned.retain(|f| non_member(*f, &parents[..]));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
|
||||
assigned.retain(|f| non_member(*f, &parents));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
|
||||
|
||||
// Fourth, build the leftover from the moved, assigned, and parents.
|
||||
for m in &moved {
|
||||
@ -316,16 +316,16 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
|
||||
|
||||
unmoved.sort();
|
||||
unmoved.dedup();
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
|
||||
|
||||
// Fifth, filter the leftover fragments down to its core.
|
||||
unmoved.retain(|f| match *f {
|
||||
AllButOneFrom(_) => true,
|
||||
Just(mpi) => non_member(mpi, &parents[..]) &&
|
||||
non_member(mpi, &moved[..]) &&
|
||||
non_member(mpi, &assigned[..])
|
||||
Just(mpi) => non_member(mpi, &parents) &&
|
||||
non_member(mpi, &moved) &&
|
||||
non_member(mpi, &assigned)
|
||||
});
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
|
||||
|
||||
// Swap contents back in.
|
||||
fragments.unmoved_fragments = unmoved;
|
||||
|
@ -112,7 +112,7 @@ fn borrowck_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_id: hir::BodyId) {
|
||||
&flowed_moves.move_data,
|
||||
owner_id);
|
||||
|
||||
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body);
|
||||
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
|
||||
}
|
||||
|
||||
fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,
|
||||
|
@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
set.push_str(", ");
|
||||
}
|
||||
let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp);
|
||||
set.push_str(&loan_str[..]);
|
||||
set.push_str(&loan_str);
|
||||
saw_some = true;
|
||||
true
|
||||
});
|
||||
|
@ -680,10 +680,10 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>(
|
||||
}).collect();
|
||||
let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect();
|
||||
let matrix = Matrix(m.iter().flat_map(|r| {
|
||||
specialize(cx, &r[..], &ctor, &wild_patterns)
|
||||
specialize(cx, &r, &ctor, &wild_patterns)
|
||||
}).collect());
|
||||
match specialize(cx, v, &ctor, &wild_patterns) {
|
||||
Some(v) => match is_useful(cx, &matrix, &v[..], witness) {
|
||||
Some(v) => match is_useful(cx, &matrix, &v, witness) {
|
||||
UsefulWithWitness(witnesses) => UsefulWithWitness(
|
||||
witnesses.into_iter()
|
||||
.map(|witness| witness.apply_constructor(cx, &ctor, lty))
|
||||
|
@ -311,7 +311,7 @@ fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
||||
for &(pat, hir_pat) in pats {
|
||||
let v = vec![pat];
|
||||
|
||||
match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
|
||||
match is_useful(cx, &seen, &v, LeaveOutWitness) {
|
||||
NotUseful => {
|
||||
match source {
|
||||
hir::MatchSource::IfLetDesugar { .. } => {
|
||||
|
@ -91,8 +91,8 @@ impl<A: Array> Deref for AccumulateVec<A> {
|
||||
type Target = [A::Element];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match *self {
|
||||
AccumulateVec::Array(ref v) => &v[..],
|
||||
AccumulateVec::Heap(ref v) => &v[..],
|
||||
AccumulateVec::Array(ref v) => v,
|
||||
AccumulateVec::Heap(ref v) => v,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -100,8 +100,8 @@ impl<A: Array> Deref for AccumulateVec<A> {
|
||||
impl<A: Array> DerefMut for AccumulateVec<A> {
|
||||
fn deref_mut(&mut self) -> &mut [A::Element] {
|
||||
match *self {
|
||||
AccumulateVec::Array(ref mut v) => &mut v[..],
|
||||
AccumulateVec::Heap(ref mut v) => &mut v[..],
|
||||
AccumulateVec::Array(ref mut v) => v,
|
||||
AccumulateVec::Heap(ref mut v) => v,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ pub fn encode(n: u64, base: u64) -> String {
|
||||
#[test]
|
||||
fn test_encode() {
|
||||
fn test(n: u64, base: u64) {
|
||||
assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32));
|
||||
assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32));
|
||||
}
|
||||
|
||||
for base in 2..37 {
|
||||
|
@ -35,7 +35,7 @@ pub struct Blake2bCtx {
|
||||
impl ::std::fmt::Debug for Blake2bCtx {
|
||||
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
|
||||
try!(write!(fmt, "hash: "));
|
||||
for v in &self.h[..] {
|
||||
for v in &self.h {
|
||||
try!(write!(fmt, "{:x}", v));
|
||||
}
|
||||
Ok(())
|
||||
|
@ -91,13 +91,13 @@ impl<T: Idx> IdxSet<T> {
|
||||
impl<T: Idx> Deref for IdxSetBuf<T> {
|
||||
type Target = IdxSet<T>;
|
||||
fn deref(&self) -> &IdxSet<T> {
|
||||
unsafe { IdxSet::from_slice(&self.bits[..]) }
|
||||
unsafe { IdxSet::from_slice(&self.bits) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Idx> DerefMut for IdxSetBuf<T> {
|
||||
fn deref_mut(&mut self) -> &mut IdxSet<T> {
|
||||
unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) }
|
||||
unsafe { IdxSet::from_slice_mut(&mut self.bits) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -135,11 +135,11 @@ impl<T: Idx> IdxSet<T> {
|
||||
}
|
||||
|
||||
pub fn words(&self) -> &[Word] {
|
||||
&self.bits[..]
|
||||
&self.bits
|
||||
}
|
||||
|
||||
pub fn words_mut(&mut self) -> &mut [Word] {
|
||||
&mut self.bits[..]
|
||||
&mut self.bits
|
||||
}
|
||||
|
||||
pub fn clone_from(&mut self, other: &IdxSet<T>) {
|
||||
|
@ -233,7 +233,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>)
|
||||
// Extract input (string or file and optional path) from matches.
|
||||
fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
|
||||
if free_matches.len() == 1 {
|
||||
let ifile = &free_matches[0][..];
|
||||
let ifile = &free_matches[0];
|
||||
if ifile == "-" {
|
||||
let mut src = String::new();
|
||||
io::stdin().read_to_string(&mut src).unwrap();
|
||||
@ -800,7 +800,7 @@ Available lint options:
|
||||
for lint in lints {
|
||||
let name = lint.name_lower().replace("_", "-");
|
||||
println!(" {} {:7.7} {}",
|
||||
padded(&name[..]),
|
||||
padded(&name),
|
||||
lint.default_level.as_str(),
|
||||
lint.desc);
|
||||
}
|
||||
@ -838,7 +838,7 @@ Available lint options:
|
||||
.map(|x| x.to_string().replace("_", "-"))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
println!(" {} {}", padded(&name[..]), desc);
|
||||
println!(" {} {}", padded(&name), desc);
|
||||
}
|
||||
println!("\n");
|
||||
};
|
||||
@ -945,7 +945,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||
.into_iter()
|
||||
.map(|x| x.opt_group)
|
||||
.collect();
|
||||
let matches = match getopts::getopts(&args[..], &all_groups) {
|
||||
let matches = match getopts::getopts(&args, &all_groups) {
|
||||
Ok(m) => m,
|
||||
Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
|
||||
};
|
||||
@ -1084,7 +1084,7 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
|
||||
format!("we would appreciate a bug report: {}", BUG_REPORT_URL)];
|
||||
for note in &xs {
|
||||
handler.emit(&MultiSpan::new(),
|
||||
¬e[..],
|
||||
¬e,
|
||||
errors::Level::Note);
|
||||
}
|
||||
if match env::var_os("RUST_BACKTRACE") {
|
||||
|
@ -589,7 +589,7 @@ impl UserIdentifiedItem {
|
||||
-> NodesMatchingUII<'a, 'hir> {
|
||||
match *self {
|
||||
ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()),
|
||||
ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
|
||||
ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -600,7 +600,7 @@ impl UserIdentifiedItem {
|
||||
user_option,
|
||||
self.reconstructed_input(),
|
||||
is_wrong_because);
|
||||
sess.fatal(&message[..])
|
||||
sess.fatal(&message)
|
||||
};
|
||||
|
||||
let mut saw_node = ast::DUMMY_NODE_ID;
|
||||
@ -771,7 +771,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
|
||||
fn expand_err_details(r: io::Result<()>) -> io::Result<()> {
|
||||
r.map_err(|ioerr| {
|
||||
io::Error::new(io::ErrorKind::Other,
|
||||
&format!("graphviz::render failed: {}", ioerr)[..])
|
||||
format!("graphviz::render failed: {}", ioerr))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -289,7 +289,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {
|
||||
|
||||
pub fn t_param(&self, index: u32) -> Ty<'tcx> {
|
||||
let name = format!("T{}", index);
|
||||
self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
|
||||
self.infcx.tcx.mk_param(index, Symbol::intern(&name))
|
||||
}
|
||||
|
||||
pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {
|
||||
|
@ -99,9 +99,9 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
|
||||
let rustc_version_str_len = rustc_version_str_len[0] as usize;
|
||||
let mut buffer = Vec::with_capacity(rustc_version_str_len);
|
||||
buffer.resize(rustc_version_str_len, 0);
|
||||
file.read_exact(&mut buffer[..])?;
|
||||
file.read_exact(&mut buffer)?;
|
||||
|
||||
if &buffer[..] != rustc_version().as_bytes() {
|
||||
if buffer != rustc_version().as_bytes() {
|
||||
report_format_mismatch(sess, path, "Different compiler version");
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ impl NonCamelCaseTypes {
|
||||
} else {
|
||||
format!("{} `{}` should have a camel case name such as `{}`", sort, name, c)
|
||||
};
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -334,7 +334,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc {
|
||||
attr.check_name("doc") &&
|
||||
match attr.meta_item_list() {
|
||||
None => false,
|
||||
Some(l) => attr::list_contains_name(&l[..], "hidden"),
|
||||
Some(l) => attr::list_contains_name(&l, "hidden"),
|
||||
}
|
||||
});
|
||||
self.doc_hidden_stack.push(doc_hidden);
|
||||
|
@ -146,7 +146,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
ty::TyBool => return,
|
||||
ty::TyAdt(def, _) => {
|
||||
let attrs = cx.tcx.get_attrs(def.did);
|
||||
check_must_use(cx, &attrs[..], s.span)
|
||||
check_must_use(cx, &attrs, s.span)
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
@ -140,7 +140,7 @@ fn main() {
|
||||
cfg.flag(flag);
|
||||
}
|
||||
|
||||
for component in &components[..] {
|
||||
for component in &components {
|
||||
let mut flag = String::from("-DLLVM_COMPONENT_");
|
||||
flag.push_str(&component.to_uppercase());
|
||||
cfg.flag(&flag);
|
||||
@ -173,7 +173,7 @@ fn main() {
|
||||
if !is_crossed {
|
||||
cmd.arg("--system-libs");
|
||||
}
|
||||
cmd.args(&components[..]);
|
||||
cmd.args(&components);
|
||||
|
||||
for lib in output(&mut cmd).split_whitespace() {
|
||||
let name = if lib.starts_with("-l") {
|
||||
|
@ -669,7 +669,7 @@ impl<'a> CrateLoader<'a> {
|
||||
name,
|
||||
config::host_triple(),
|
||||
self.sess.opts.target_triple);
|
||||
span_fatal!(self.sess, span, E0456, "{}", &message[..]);
|
||||
span_fatal!(self.sess, span, E0456, "{}", &message);
|
||||
}
|
||||
|
||||
let root = ekrate.metadata.get_root();
|
||||
|
@ -918,14 +918,14 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
|
||||
self.encode_fields(def_id);
|
||||
}
|
||||
hir::ItemImpl(..) => {
|
||||
for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
|
||||
for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
|
||||
self.record(trait_item_def_id,
|
||||
EncodeContext::encode_info_for_impl_item,
|
||||
trait_item_def_id);
|
||||
}
|
||||
}
|
||||
hir::ItemTrait(..) => {
|
||||
for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
|
||||
for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
|
||||
self.record(item_def_id,
|
||||
EncodeContext::encode_info_for_trait_item,
|
||||
item_def_id);
|
||||
|
@ -477,15 +477,15 @@ impl<'a> Context<'a> {
|
||||
Some(file) => file,
|
||||
};
|
||||
let (hash, found_kind) =
|
||||
if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") {
|
||||
if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") {
|
||||
(&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
|
||||
} else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") {
|
||||
} else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
|
||||
(&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
|
||||
} else if file.starts_with(&dylib_prefix) &&
|
||||
file.ends_with(&dypair.1) {
|
||||
(&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
|
||||
} else {
|
||||
if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) {
|
||||
if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
|
||||
staticlibs.push(CrateMismatch {
|
||||
path: path.to_path_buf(),
|
||||
got: "static".to_string(),
|
||||
|
@ -126,19 +126,19 @@ impl<'a> PluginLoader<'a> {
|
||||
// inside this crate, so continue would spew "macro undefined"
|
||||
// errors
|
||||
Err(err) => {
|
||||
self.sess.span_fatal(span, &err[..])
|
||||
self.sess.span_fatal(span, &err)
|
||||
}
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let registrar =
|
||||
match lib.symbol(&symbol[..]) {
|
||||
match lib.symbol(&symbol) {
|
||||
Ok(registrar) => {
|
||||
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
|
||||
}
|
||||
// again fatal if we can't register macros
|
||||
Err(err) => {
|
||||
self.sess.span_fatal(span, &err[..])
|
||||
self.sess.span_fatal(span, &err)
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -423,7 +423,7 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String {
|
||||
|
||||
let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v))));
|
||||
strs.fold(String::new(), |mut s, ss| {
|
||||
s.push_str(&ss[..]);
|
||||
s.push_str(&ss);
|
||||
s
|
||||
})
|
||||
}
|
||||
|
@ -369,7 +369,7 @@ impl FnType {
|
||||
match sig.inputs().last().unwrap().sty {
|
||||
ty::TyTuple(ref tupled_arguments, _) => {
|
||||
inputs = &sig.inputs()[0..sig.inputs().len() - 1];
|
||||
&tupled_arguments[..]
|
||||
&tupled_arguments
|
||||
}
|
||||
_ => {
|
||||
bug!("argument to function with \"rust-call\" ABI \
|
||||
|
@ -229,11 +229,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
variant_fill].iter().cloned().collect();
|
||||
match name {
|
||||
None => {
|
||||
Type::struct_(cx, &fields[..], false)
|
||||
Type::struct_(cx, &fields, false)
|
||||
}
|
||||
Some(name) => {
|
||||
let mut llty = Type::named_struct(cx, name);
|
||||
llty.set_struct_body(&fields[..], false);
|
||||
llty.set_struct_body(&fields, false);
|
||||
llty
|
||||
}
|
||||
}
|
||||
@ -330,7 +330,7 @@ fn struct_wrapped_nullable_bitdiscr(
|
||||
alignment: Alignment,
|
||||
) -> ValueRef {
|
||||
let llptrptr = bcx.gepi(scrutinee,
|
||||
&discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>()[..]);
|
||||
&discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>());
|
||||
let llptr = bcx.load(llptrptr, alignment.to_align());
|
||||
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
|
||||
bcx.icmp(cmp, llptr, C_null(val_ty(llptr)))
|
||||
@ -402,7 +402,7 @@ pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: Valu
|
||||
base::call_memset(bcx, llptr, fill_byte, size, align, false);
|
||||
} else {
|
||||
let path = discrfield.iter().map(|&i| i as usize).collect::<Vec<_>>();
|
||||
let llptrptr = bcx.gepi(val, &path[..]);
|
||||
let llptrptr = bcx.gepi(val, &path);
|
||||
let llptrty = val_ty(llptrptr).element_type();
|
||||
bcx.store(C_null(llptrty), llptrptr, None);
|
||||
}
|
||||
|
@ -77,14 +77,14 @@ pub fn trans_inline_asm<'a, 'tcx>(
|
||||
.chain(arch_clobbers.iter().map(|s| s.to_string()))
|
||||
.collect::<Vec<String>>().join(",");
|
||||
|
||||
debug!("Asm Constraints: {}", &all_constraints[..]);
|
||||
debug!("Asm Constraints: {}", &all_constraints);
|
||||
|
||||
// Depending on how many outputs we have, the return type is different
|
||||
let num_outputs = output_types.len();
|
||||
let output_type = match num_outputs {
|
||||
0 => Type::void(bcx.ccx),
|
||||
1 => output_types[0],
|
||||
_ => Type::struct_(bcx.ccx, &output_types[..], false)
|
||||
_ => Type::struct_(bcx.ccx, &output_types, false)
|
||||
};
|
||||
|
||||
let dialect = match ia.dialect {
|
||||
|
@ -65,10 +65,10 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session)
|
||||
|
||||
for path in search_paths {
|
||||
debug!("looking for {} inside {:?}", name, path);
|
||||
let test = path.join(&oslibname[..]);
|
||||
let test = path.join(&oslibname);
|
||||
if test.exists() { return test }
|
||||
if oslibname != unixlibname {
|
||||
let test = path.join(&unixlibname[..]);
|
||||
let test = path.join(&unixlibname);
|
||||
if test.exists() { return test }
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
attrs: &[ast::Attribute],
|
||||
input: &Input) -> String {
|
||||
let validate = |s: String, span: Option<Span>| {
|
||||
cstore::validate_crate_name(sess, &s[..], span);
|
||||
cstore::validate_crate_name(sess, &s, span);
|
||||
s
|
||||
};
|
||||
|
||||
@ -109,7 +109,7 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
let msg = format!("--crate-name and #[crate_name] are \
|
||||
required to match, but `{}` != `{}`",
|
||||
s, name);
|
||||
sess.span_err(attr.span, &msg[..]);
|
||||
sess.span_err(attr.span, &msg);
|
||||
}
|
||||
}
|
||||
return validate(s.clone(), None);
|
||||
@ -417,7 +417,7 @@ fn object_filenames(trans: &CrateTranslation,
|
||||
outputs: &OutputFilenames)
|
||||
-> Vec<PathBuf> {
|
||||
trans.modules.iter().map(|module| {
|
||||
outputs.temp_path(OutputType::Object, Some(&module.name[..]))
|
||||
outputs.temp_path(OutputType::Object, Some(&module.name))
|
||||
}).collect()
|
||||
}
|
||||
|
||||
@ -551,7 +551,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
e))
|
||||
}
|
||||
|
||||
let bc_data_deflated = flate::deflate_bytes(&bc_data[..]);
|
||||
let bc_data_deflated = flate::deflate_bytes(&bc_data);
|
||||
|
||||
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
|
||||
Ok(file) => file,
|
||||
@ -819,12 +819,12 @@ fn link_natively(sess: &Session,
|
||||
pname,
|
||||
prog.status))
|
||||
.note(&format!("{:?}", &cmd))
|
||||
.note(&escape_string(&output[..]))
|
||||
.note(&escape_string(&output))
|
||||
.emit();
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
info!("linker stderr:\n{}", escape_string(&prog.stderr[..]));
|
||||
info!("linker stdout:\n{}", escape_string(&prog.stdout[..]));
|
||||
info!("linker stderr:\n{}", escape_string(&prog.stderr));
|
||||
info!("linker stdout:\n{}", escape_string(&prog.stdout));
|
||||
},
|
||||
Err(e) => {
|
||||
sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e))
|
||||
|
@ -61,7 +61,7 @@ pub fn run(sess: &session::Session,
|
||||
}
|
||||
|
||||
let export_threshold =
|
||||
symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
|
||||
symbol_export::crates_export_threshold(&sess.crate_types.borrow());
|
||||
|
||||
let symbol_filter = &|&(ref name, level): &(String, _)| {
|
||||
if symbol_export::is_below_threshold(level, export_threshold) {
|
||||
@ -147,7 +147,7 @@ pub fn run(sess: &session::Session,
|
||||
bc_decoded.len() as libc::size_t) {
|
||||
write::llvm_err(sess.diagnostic(),
|
||||
format!("failed to load bc of `{}`",
|
||||
&name[..]));
|
||||
name));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -37,8 +37,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
|
||||
|
||||
let libs = config.used_crates.clone();
|
||||
let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::<Vec<_>>();
|
||||
let rpaths = get_rpaths(config, &libs[..]);
|
||||
flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
|
||||
let rpaths = get_rpaths(config, &libs);
|
||||
flags.extend_from_slice(&rpaths_to_flags(&rpaths));
|
||||
|
||||
// Use DT_RUNPATH instead of DT_RPATH if available
|
||||
if config.linker_is_gnu {
|
||||
@ -84,14 +84,14 @@ fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
|
||||
}
|
||||
}
|
||||
|
||||
log_rpaths("relative", &rel_rpaths[..]);
|
||||
log_rpaths("fallback", &fallback_rpaths[..]);
|
||||
log_rpaths("relative", &rel_rpaths);
|
||||
log_rpaths("fallback", &fallback_rpaths);
|
||||
|
||||
let mut rpaths = rel_rpaths;
|
||||
rpaths.extend_from_slice(&fallback_rpaths[..]);
|
||||
rpaths.extend_from_slice(&fallback_rpaths);
|
||||
|
||||
// Remove duplicates
|
||||
let rpaths = minimize_rpaths(&rpaths[..]);
|
||||
let rpaths = minimize_rpaths(&rpaths);
|
||||
return rpaths;
|
||||
}
|
||||
|
||||
@ -177,7 +177,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
|
||||
let mut set = HashSet::new();
|
||||
let mut minimized = Vec::new();
|
||||
for rpath in rpaths {
|
||||
if set.insert(&rpath[..]) {
|
||||
if set.insert(rpath) {
|
||||
minimized.push(rpath.clone());
|
||||
}
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ impl ExportedSymbols {
|
||||
cnum: CrateNum)
|
||||
-> &[(String, SymbolExportLevel)] {
|
||||
match self.exports.get(&cnum) {
|
||||
Some(exports) => &exports[..],
|
||||
Some(exports) => exports,
|
||||
None => &[]
|
||||
}
|
||||
}
|
||||
@ -167,7 +167,7 @@ impl ExportedSymbols {
|
||||
{
|
||||
for &(ref name, export_level) in self.exported_symbols(cnum) {
|
||||
if is_below_threshold(export_level, export_threshold) {
|
||||
f(&name[..], export_level)
|
||||
f(&name, export_level)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -341,7 +341,7 @@ pub fn sanitize(s: &str) -> String {
|
||||
if !result.is_empty() &&
|
||||
result.as_bytes()[0] != '_' as u8 &&
|
||||
! (result.as_bytes()[0] as char).is_xid_start() {
|
||||
return format!("_{}", &result[..]);
|
||||
return format!("_{}", result);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -105,7 +105,7 @@ impl SharedEmitter {
|
||||
Some(ref code) => {
|
||||
handler.emit_with_code(&MultiSpan::new(),
|
||||
&diag.msg,
|
||||
&code[..],
|
||||
&code,
|
||||
diag.lvl);
|
||||
},
|
||||
None => {
|
||||
@ -189,8 +189,8 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let fdata_sections = ffunction_sections;
|
||||
|
||||
let code_model_arg = match sess.opts.cg.code_model {
|
||||
Some(ref s) => &s[..],
|
||||
None => &sess.target.target.options.code_model[..],
|
||||
Some(ref s) => &s,
|
||||
None => &sess.target.target.options.code_model,
|
||||
};
|
||||
|
||||
let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
|
||||
@ -397,7 +397,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
|
||||
let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s))
|
||||
.expect("non-UTF8 SMDiagnostic");
|
||||
|
||||
report_inline_asm(cgcx, &msg[..], cookie);
|
||||
report_inline_asm(cgcx, &msg, cookie);
|
||||
}
|
||||
|
||||
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
|
||||
@ -823,7 +823,7 @@ pub fn run_passes(sess: &Session,
|
||||
if trans.modules.len() == 1 {
|
||||
// 1) Only one codegen unit. In this case it's no difficulty
|
||||
// to copy `foo.0.x` to `foo.x`.
|
||||
let module_name = Some(&(trans.modules[0].name)[..]);
|
||||
let module_name = Some(&trans.modules[0].name[..]);
|
||||
let path = crate_output.temp_path(output_type, module_name);
|
||||
copy_gracefully(&path,
|
||||
&crate_output.path(output_type));
|
||||
@ -939,7 +939,7 @@ pub fn run_passes(sess: &Session,
|
||||
|
||||
if metadata_config.emit_bc && !user_wants_bitcode {
|
||||
let path = crate_output.temp_path(OutputType::Bitcode,
|
||||
Some(&trans.metadata_module.name[..]));
|
||||
Some(&trans.metadata_module.name));
|
||||
remove(sess, &path);
|
||||
}
|
||||
}
|
||||
|
@ -514,7 +514,7 @@ pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>,
|
||||
n_bytes: ValueRef,
|
||||
align: u32) {
|
||||
let ccx = b.ccx;
|
||||
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
|
||||
let ptr_width = &ccx.sess().target.target.target_pointer_width;
|
||||
let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
|
||||
let memcpy = ccx.get_intrinsic(&key);
|
||||
let src_ptr = b.pointercast(src, Type::i8p(ccx));
|
||||
@ -550,7 +550,7 @@ pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>,
|
||||
size: ValueRef,
|
||||
align: ValueRef,
|
||||
volatile: bool) -> ValueRef {
|
||||
let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..];
|
||||
let ptr_width = &b.ccx.sess().target.target.target_pointer_width;
|
||||
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
|
||||
let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
|
||||
let volatile = C_bool(b.ccx, volatile);
|
||||
@ -765,7 +765,7 @@ fn write_metadata(cx: &SharedCrateContext,
|
||||
let mut compressed = cstore.metadata_encoding_version().to_vec();
|
||||
compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
|
||||
|
||||
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
|
||||
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed);
|
||||
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
|
||||
let name = cx.metadata_symbol_name();
|
||||
let buf = CString::new(name).unwrap();
|
||||
@ -796,7 +796,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
|
||||
symbol_map: &SymbolMap<'tcx>,
|
||||
exported_symbols: &ExportedSymbols) {
|
||||
let export_threshold =
|
||||
symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
|
||||
symbol_export::crates_export_threshold(&sess.crate_types.borrow());
|
||||
|
||||
let exported_symbols = exported_symbols
|
||||
.exported_symbols(LOCAL_CRATE)
|
||||
@ -1035,7 +1035,7 @@ pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
|
||||
(generics.parent_types == 0 && generics.types.is_empty()) &&
|
||||
// Functions marked with #[inline] are only ever translated
|
||||
// with "internal" linkage and are never exported.
|
||||
!attr::requests_inline(&attributes[..])
|
||||
!attr::requests_inline(&attributes)
|
||||
}
|
||||
|
||||
_ => false
|
||||
@ -1574,7 +1574,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||
cgus.dedup();
|
||||
for &(ref cgu_name, linkage) in cgus.iter() {
|
||||
output.push_str(" ");
|
||||
output.push_str(&cgu_name[..]);
|
||||
output.push_str(&cgu_name);
|
||||
|
||||
let linkage_abbrev = match linkage {
|
||||
llvm::Linkage::ExternalLinkage => "External",
|
||||
|
@ -627,7 +627,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
} else {
|
||||
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
|
||||
self.count_insn("gepi");
|
||||
self.inbounds_gep(base, &v[..])
|
||||
self.inbounds_gep(base, &v)
|
||||
}
|
||||
}
|
||||
|
||||
@ -835,8 +835,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
let s = format!("{} ({})",
|
||||
text,
|
||||
self.ccx.sess().codemap().span_to_string(sp));
|
||||
debug!("{}", &s[..]);
|
||||
self.add_comment(&s[..]);
|
||||
debug!("{}", s);
|
||||
self.add_comment(&s);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2611,7 +2611,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
|
||||
if attr.is_word() {
|
||||
Some(format!("{}", name))
|
||||
} else if let Some(v) = attr.value_str() {
|
||||
Some(format!("{} = {:?}", name, &v.as_str()[..]))
|
||||
Some(format!("{} = {:?}", name, v.as_str()))
|
||||
} else if let Some(values) = attr.meta_item_list() {
|
||||
let display: Vec<_> = values.iter().filter_map(|attr| {
|
||||
attr.meta_item().and_then(|mi| render_attribute(mi))
|
||||
@ -2642,7 +2642,7 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
|
||||
|
||||
for attr in &it.attrs.other_attrs {
|
||||
let name = attr.name().unwrap();
|
||||
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
|
||||
if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
if let Some(s) = render_attribute(&attr.meta().unwrap()) {
|
||||
|
@ -119,7 +119,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
};
|
||||
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
|
||||
p.root_module_name = cx.current_expansion.module.mod_path.last()
|
||||
.map(|id| (*id.name.as_str()).to_owned());
|
||||
.map(|id| id.name.as_str().to_string());
|
||||
|
||||
p.check_unknown_macro_variable();
|
||||
// Let the context choose how to interpret the result.
|
||||
|
@ -818,7 +818,7 @@ pub struct GatedCfg {
|
||||
|
||||
impl GatedCfg {
|
||||
pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
|
||||
let name = &*cfg.name().as_str();
|
||||
let name = cfg.name().as_str();
|
||||
GATED_CFGS.iter()
|
||||
.position(|info| info.0 == name)
|
||||
.map(|idx| {
|
||||
@ -865,8 +865,7 @@ macro_rules! gate_feature {
|
||||
impl<'a> Context<'a> {
|
||||
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
|
||||
debug!("check_attribute(attr = {:?})", attr);
|
||||
let name = unwrap_or!(attr.name(), return);
|
||||
|
||||
let name = unwrap_or!(attr.name(), return).as_str();
|
||||
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
|
||||
if name == n {
|
||||
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
|
||||
@ -885,12 +884,12 @@ impl<'a> Context<'a> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if name.as_str().starts_with("rustc_") {
|
||||
if name.starts_with("rustc_") {
|
||||
gate_feature!(self, rustc_attrs, attr.span,
|
||||
"unless otherwise specified, attributes \
|
||||
with the prefix `rustc_` \
|
||||
are reserved for internal compiler diagnostics");
|
||||
} else if name.as_str().starts_with("derive_") {
|
||||
} else if name.starts_with("derive_") {
|
||||
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
|
||||
} else if !attr::is_known(attr) {
|
||||
// Only run the custom attribute lint during regular
|
||||
|
@ -5151,15 +5151,15 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
|
||||
if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
|
||||
self.directory.path.push(&*path.as_str());
|
||||
self.directory.path.push(&path.as_str());
|
||||
self.directory.ownership = DirectoryOwnership::Owned;
|
||||
} else {
|
||||
self.directory.path.push(&*id.name.as_str());
|
||||
self.directory.path.push(&id.name.as_str());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
|
||||
attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
|
||||
attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str()))
|
||||
}
|
||||
|
||||
/// Returns either a path to a module, or .
|
||||
|
@ -616,7 +616,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
|
||||
|
||||
fn is_test_crate(krate: &ast::Crate) -> bool {
|
||||
match attr::find_crate_name(&krate.attrs) {
|
||||
Some(s) if "test" == &*s.as_str() => true,
|
||||
Some(s) if "test" == s.as_str() => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user