codegen_llvm_back: improve allocations

This commit is contained in:
ljedrz 2018-10-06 11:45:11 +02:00
parent a563ceb3b9
commit 2043d30c2e
4 changed files with 23 additions and 21 deletions

View File

@ -72,12 +72,12 @@ pub(crate) fn link_binary(sess: &Session,
bug!("invalid output type `{:?}` for target os `{}`",
crate_type, sess.opts.target_triple);
}
let mut out_files = link_binary_output(sess,
codegen_results,
crate_type,
outputs,
crate_name);
out_filenames.append(&mut out_files);
let out_files = link_binary_output(sess,
codegen_results,
crate_type,
outputs,
crate_name);
out_filenames.extend(out_files);
}
// Remove the temporary object file and metadata if we aren't saving temps

View File

@ -225,11 +225,12 @@ fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
// and we want to move everything to the same LLVM context. Currently the
// way we know of to do that is to serialize them to a string and them parse
// them later. Not great but hey, that's why it's "fat" LTO, right?
for module in modules {
serialized_modules.extend(modules.into_iter().map(|module| {
let buffer = ModuleBuffer::new(module.module_llvm.llmod());
let llmod_id = CString::new(&module.name[..]).unwrap();
serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
}
(SerializedModule::Local(buffer), llmod_id)
}));
// For all serialized bitcode files we parse them and link them in as we did
// above, this is all mostly handled in C++. Like above, though, we don't
@ -349,9 +350,10 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
.map(|&(_, ref wp)| (wp.cgu_name.clone(), wp.clone()))
.collect();
let mut thin_buffers = Vec::new();
let mut module_names = Vec::new();
let mut thin_modules = Vec::new();
let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
let mut thin_buffers = Vec::with_capacity(modules.len());
let mut module_names = Vec::with_capacity(full_scope_len);
let mut thin_modules = Vec::with_capacity(full_scope_len);
// FIXME: right now, like with fat LTO, we serialize all in-memory
// modules before working with them and ThinLTO. We really
@ -360,7 +362,7 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
// into the global index. It turns out that this loop is by far
// the most expensive portion of this small bit of global
// analysis!
for (i, module) in modules.iter().enumerate() {
for (i, module) in modules.into_iter().enumerate() {
info!("local module: {} - {}", i, module.name);
let name = CString::new(module.name.clone()).unwrap();
let buffer = ThinBuffer::new(module.module_llvm.llmod());
@ -406,7 +408,7 @@ fn thin_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
// incremental ThinLTO first where we could actually avoid
// looking at upstream modules entirely sometimes (the contents,
// we must always unconditionally look at the index).
let mut serialized = Vec::new();
let mut serialized = Vec::with_capacity(serialized_modules.len() + cached_modules.len());
let cached_modules = cached_modules.into_iter().map(|(sm, wp)| {
(sm, CString::new(wp.cgu_name).unwrap())

View File

@ -31,14 +31,12 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
return Vec::new();
}
let mut flags = Vec::new();
debug!("preparing the RPATH!");
let libs = config.used_crates.clone();
let libs = libs.iter().filter_map(|&(_, ref l)| l.option()).collect::<Vec<_>>();
let rpaths = get_rpaths(config, &libs);
flags.extend_from_slice(&rpaths_to_flags(&rpaths));
let mut flags = rpaths_to_flags(&rpaths);
// Use DT_RUNPATH instead of DT_RPATH if available
if config.linker_is_gnu {
@ -49,7 +47,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
}
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
let mut ret = Vec::new();
let mut ret = Vec::with_capacity(rpaths.len()); // the minimum needed capacity
for rpath in rpaths {
if rpath.contains(',') {
ret.push("-Wl,-rpath".into());

View File

@ -229,10 +229,11 @@ fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
"__llvm_profile_raw_version",
"__llvm_profile_filename",
];
for sym in &PROFILER_WEAK_SYMBOLS {
symbols.extend(PROFILER_WEAK_SYMBOLS.iter().map(|sym| {
let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(sym));
symbols.push((exported_symbol, SymbolExportLevel::C));
}
(exported_symbol, SymbolExportLevel::C)
}));
}
if tcx.sess.crate_types.borrow().contains(&config::CrateType::Dylib) {