Auto merge of #123838 - matthiaskrgr:rollup-zkgwyye, r=matthiaskrgr

Rollup of 7 pull requests

Successful merges:

 - #123599 (remove some things that do not need to be)
 - #123763 (Set the host library path in run-make v2)
 - #123775 (Make `PlaceRef` and `OperandValue::Ref` share a common `PlaceValue` type)
 - #123789 (move QueryKeyStringCache from rustc_middle to rustc_query_impl, where it actually used)
 - #123826 (Move rare overflow error to a cold function)
 - #123827 (linker: Avoid some allocations in search directory iteration)
 - #123829 (Fix revisions syntax in cfg(ub_checks) test)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-04-12 04:16:12 +00:00
commit 6475796a81
37 changed files with 335 additions and 250 deletions

View File

@ -4263,7 +4263,6 @@ dependencies = [
"either", "either",
"field-offset", "field-offset",
"gsgdt", "gsgdt",
"measureme",
"polonius-engine", "polonius-engine",
"rustc-rayon", "rustc-rayon",
"rustc-rayon-core", "rustc-rayon-core",

View File

@ -960,7 +960,7 @@ fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
sm.span_to_diagnostic_string(span) sm.span_to_diagnostic_string(span)
} }
}; };
let mut spans: MultiSpan = spans.clone().into(); let mut spans: MultiSpan = spans.into();
// Point at all the `continue`s and explicit `break`s in the relevant loops. // Point at all the `continue`s and explicit `break`s in the relevant loops.
for (desc, elements) in [ for (desc, elements) in [
("`break` exits", &finder.found_breaks), ("`break` exits", &finder.found_breaks),

View File

@ -333,10 +333,8 @@ fn find_path_suggestion(
.flatten() .flatten()
.take(4); .take(4);
for new_path in root_absolute.chain(add).chain(remove) { root_absolute
if source_map.file_exists(&base_dir.join(&new_path)) { .chain(add)
return Some(new_path); .chain(remove)
} .find(|new_path| source_map.file_exists(&base_dir.join(&new_path)))
}
None
} }

View File

@ -974,7 +974,7 @@ fn load_operand(
&mut self, &mut self,
place: PlaceRef<'tcx, RValue<'gcc>>, place: PlaceRef<'tcx, RValue<'gcc>>,
) -> OperandRef<'tcx, RValue<'gcc>> { ) -> OperandRef<'tcx, RValue<'gcc>> {
assert_eq!(place.llextra.is_some(), place.layout.is_unsized()); assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
if place.layout.is_zst() { if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout); return OperandRef::zero_sized(place.layout);
@ -999,10 +999,11 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>(
} }
} }
let val = if let Some(llextra) = place.llextra { let val = if let Some(_) = place.val.llextra {
OperandValue::Ref(place.llval, Some(llextra), place.align) // FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_gcc_immediate() { } else if place.layout.is_gcc_immediate() {
let load = self.load(place.layout.gcc_type(self), place.llval, place.align); let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align);
if let abi::Abi::Scalar(ref scalar) = place.layout.abi { if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar); scalar_load_metadata(self, load, scalar);
} }
@ -1012,9 +1013,9 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>(
let mut load = |i, scalar: &abi::Scalar, align| { let mut load = |i, scalar: &abi::Scalar, align| {
let llptr = if i == 0 { let llptr = if i == 0 {
place.llval place.val.llval
} else { } else {
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes())) self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
}; };
let llty = place.layout.scalar_pair_element_gcc_type(self, i); let llty = place.layout.scalar_pair_element_gcc_type(self, i);
let load = self.load(llty, llptr, align); let load = self.load(llty, llptr, align);
@ -1027,11 +1028,11 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>(
}; };
OperandValue::Pair( OperandValue::Pair(
load(0, a, place.align), load(0, a, place.val.align),
load(1, b, place.align.restrict_for_offset(b_offset)), load(1, b, place.val.align.restrict_for_offset(b_offset)),
) )
} else { } else {
OperandValue::Ref(place.llval, None, place.align) OperandValue::Ref(place.val)
}; };
OperandRef { val, layout: place.layout } OperandRef { val, layout: place.layout }
@ -1045,8 +1046,8 @@ fn write_operand_repeatedly(
) { ) {
let zero = self.const_usize(0); let zero = self.const_usize(0);
let count = self.const_usize(count); let count = self.const_usize(count);
let start = dest.project_index(self, zero).llval; let start = dest.project_index(self, zero).val.llval;
let end = dest.project_index(self, count).llval; let end = dest.project_index(self, count).val.llval;
let header_bb = self.append_sibling_block("repeat_loop_header"); let header_bb = self.append_sibling_block("repeat_loop_header");
let body_bb = self.append_sibling_block("repeat_loop_body"); let body_bb = self.append_sibling_block("repeat_loop_body");
@ -1064,7 +1065,7 @@ fn write_operand_repeatedly(
self.cond_br(keep_going, body_bb, next_bb); self.cond_br(keep_going, body_bb, next_bb);
self.switch_to_block(body_bb); self.switch_to_block(body_bb);
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size); let align = dest.val.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
cg_elem.val.store(self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align)); cg_elem.val.store(self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));
let next = self.inbounds_gep( let next = self.inbounds_gep(

View File

@ -11,7 +11,7 @@
use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::common::IntPredicate;
use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_codegen_ssa::errors::InvalidMonomorphization;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::{ use rustc_codegen_ssa::traits::{
ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods, ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods,
}; };
@ -354,7 +354,7 @@ fn codegen_intrinsic_call(
let block = self.llbb(); let block = self.llbb();
let extended_asm = block.add_extended_asm(None, ""); let extended_asm = block.add_extended_asm(None, "");
extended_asm.add_input_operand(None, "r", result.llval); extended_asm.add_input_operand(None, "r", result.val.llval);
extended_asm.add_clobber("memory"); extended_asm.add_clobber("memory");
extended_asm.set_volatile_flag(true); extended_asm.set_volatile_flag(true);
@ -388,8 +388,8 @@ fn codegen_intrinsic_call(
if !fn_abi.ret.is_ignore() { if !fn_abi.ret.is_ignore() {
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode { if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
let ptr_llty = self.type_ptr_to(ty.gcc_type(self)); let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
let ptr = self.pointercast(result.llval, ptr_llty); let ptr = self.pointercast(result.val.llval, ptr_llty);
self.store(llval, ptr, result.align); self.store(llval, ptr, result.val.align);
} else { } else {
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout) OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
.val .val
@ -502,7 +502,7 @@ fn store(
return; return;
} }
if self.is_sized_indirect() { if self.is_sized_indirect() {
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst) OperandValue::Ref(PlaceValue::new_sized(val, self.layout.align.abi)).store(bx, dst)
} else if self.is_unsized_indirect() { } else if self.is_unsized_indirect() {
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
} else if let PassMode::Cast { ref cast, .. } = self.mode { } else if let PassMode::Cast { ref cast, .. } = self.mode {
@ -511,7 +511,7 @@ fn store(
let can_store_through_cast_ptr = false; let can_store_through_cast_ptr = false;
if can_store_through_cast_ptr { if can_store_through_cast_ptr {
let cast_ptr_llty = bx.type_ptr_to(cast.gcc_type(bx)); let cast_ptr_llty = bx.type_ptr_to(cast.gcc_type(bx));
let cast_dst = bx.pointercast(dst.llval, cast_ptr_llty); let cast_dst = bx.pointercast(dst.val.llval, cast_ptr_llty);
bx.store(val, cast_dst, self.layout.align.abi); bx.store(val, cast_dst, self.layout.align.abi);
} else { } else {
// The actual return type is a struct, but the ABI // The actual return type is a struct, but the ABI
@ -539,7 +539,7 @@ fn store(
// ... and then memcpy it to the intended destination. // ... and then memcpy it to the intended destination.
bx.memcpy( bx.memcpy(
dst.llval, dst.val.llval,
self.layout.align.abi, self.layout.align.abi,
llscratch, llscratch,
scratch_align, scratch_align,
@ -571,7 +571,12 @@ fn store_fn_arg<'a>(
OperandValue::Pair(next(), next()).store(bx, dst); OperandValue::Pair(next(), next()).store(bx, dst);
} }
PassMode::Indirect { meta_attrs: Some(_), .. } => { PassMode::Indirect { meta_attrs: Some(_), .. } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
} }
PassMode::Direct(_) PassMode::Direct(_)
| PassMode::Indirect { meta_attrs: None, .. } | PassMode::Indirect { meta_attrs: None, .. }

View File

@ -82,7 +82,7 @@ macro_rules! require_simd {
let place = PlaceRef::alloca(bx, args[0].layout); let place = PlaceRef::alloca(bx, args[0].layout);
args[0].val.store(bx, place); args[0].val.store(bx, place);
let int_ty = bx.type_ix(expected_bytes * 8); let int_ty = bx.type_ix(expected_bytes * 8);
let ptr = bx.pointercast(place.llval, bx.cx.type_ptr_to(int_ty)); let ptr = bx.pointercast(place.val.llval, bx.cx.type_ptr_to(int_ty));
bx.load(int_ty, ptr, Align::ONE) bx.load(int_ty, ptr, Align::ONE)
} }
_ => return_error!(InvalidMonomorphization::InvalidBitmask { _ => return_error!(InvalidMonomorphization::InvalidBitmask {

View File

@ -7,7 +7,7 @@
use crate::value::Value; use crate::value::Value;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags; use rustc_codegen_ssa::MemFlags;
use rustc_middle::bug; use rustc_middle::bug;
@ -207,7 +207,7 @@ fn store(
// Sized indirect arguments // Sized indirect arguments
PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => { PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
let align = attrs.pointee_align.unwrap_or(self.layout.align.abi); let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
OperandValue::Ref(val, None, align).store(bx, dst); OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
} }
// Unsized indirect qrguments // Unsized indirect qrguments
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
@ -233,7 +233,7 @@ fn store(
bx.store(val, llscratch, scratch_align); bx.store(val, llscratch, scratch_align);
// ... and then memcpy it to the intended destination. // ... and then memcpy it to the intended destination.
bx.memcpy( bx.memcpy(
dst.llval, dst.val.llval,
self.layout.align.abi, self.layout.align.abi,
llscratch, llscratch,
scratch_align, scratch_align,
@ -265,7 +265,12 @@ fn store_fn_arg(
OperandValue::Pair(next(), next()).store(bx, dst); OperandValue::Pair(next(), next()).store(bx, dst);
} }
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
} }
PassMode::Direct(_) PassMode::Direct(_)
| PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }

View File

@ -535,7 +535,7 @@ fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx
panic!("unsized locals must not be `extern` types"); panic!("unsized locals must not be `extern` types");
} }
} }
assert_eq!(place.llextra.is_some(), place.layout.is_unsized()); assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
if place.layout.is_zst() { if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout); return OperandRef::zero_sized(place.layout);
@ -579,13 +579,14 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
} }
} }
let val = if let Some(llextra) = place.llextra { let val = if let Some(_) = place.val.llextra {
OperandValue::Ref(place.llval, Some(llextra), place.align) // FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_llvm_immediate() { } else if place.layout.is_llvm_immediate() {
let mut const_llval = None; let mut const_llval = None;
let llty = place.layout.llvm_type(self); let llty = place.layout.llvm_type(self);
unsafe { unsafe {
if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) { if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
if llvm::LLVMIsGlobalConstant(global) == llvm::True { if llvm::LLVMIsGlobalConstant(global) == llvm::True {
if let Some(init) = llvm::LLVMGetInitializer(global) { if let Some(init) = llvm::LLVMGetInitializer(global) {
if self.val_ty(init) == llty { if self.val_ty(init) == llty {
@ -596,7 +597,7 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
} }
} }
let llval = const_llval.unwrap_or_else(|| { let llval = const_llval.unwrap_or_else(|| {
let load = self.load(llty, place.llval, place.align); let load = self.load(llty, place.val.llval, place.val.align);
if let abi::Abi::Scalar(scalar) = place.layout.abi { if let abi::Abi::Scalar(scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO); scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
} }
@ -608,9 +609,9 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
let mut load = |i, scalar: abi::Scalar, layout, align, offset| { let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
let llptr = if i == 0 { let llptr = if i == 0 {
place.llval place.val.llval
} else { } else {
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes())) self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
}; };
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false); let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align); let load = self.load(llty, llptr, align);
@ -619,11 +620,11 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
}; };
OperandValue::Pair( OperandValue::Pair(
load(0, a, place.layout, place.align, Size::ZERO), load(0, a, place.layout, place.val.align, Size::ZERO),
load(1, b, place.layout, place.align.restrict_for_offset(b_offset), b_offset), load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
) )
} else { } else {
OperandValue::Ref(place.llval, None, place.align) OperandValue::Ref(place.val)
}; };
OperandRef { val, layout: place.layout } OperandRef { val, layout: place.layout }

View File

@ -264,7 +264,7 @@ fn codegen_intrinsic_call(
llvm::LLVMSetAlignment(load, align); llvm::LLVMSetAlignment(load, align);
} }
if !result.layout.is_zst() { if !result.layout.is_zst() {
self.store(load, result.llval, result.align); self.store_to_place(load, result.val);
} }
return Ok(()); return Ok(());
} }
@ -428,7 +428,7 @@ fn codegen_intrinsic_call(
sym::black_box => { sym::black_box => {
args[0].val.store(self, result); args[0].val.store(self, result);
let result_val_span = [result.llval]; let result_val_span = [result.val.llval];
// We need to "use" the argument in some way LLVM can't introspect, and on // We need to "use" the argument in some way LLVM can't introspect, and on
// targets that support it we can typically leverage inline assembly to do // targets that support it we can typically leverage inline assembly to do
// this. LLVM's interpretation of inline assembly is that it's, well, a black // this. LLVM's interpretation of inline assembly is that it's, well, a black
@ -482,7 +482,7 @@ fn codegen_intrinsic_call(
if !fn_abi.ret.is_ignore() { if !fn_abi.ret.is_ignore() {
if let PassMode::Cast { .. } = &fn_abi.ret.mode { if let PassMode::Cast { .. } = &fn_abi.ret.mode {
self.store(llval, result.llval, result.align); self.store(llval, result.val.llval, result.val.align);
} else { } else {
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout) OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
.val .val
@ -1065,7 +1065,7 @@ macro_rules! require_simd {
let place = PlaceRef::alloca(bx, args[0].layout); let place = PlaceRef::alloca(bx, args[0].layout);
args[0].val.store(bx, place); args[0].val.store(bx, place);
let int_ty = bx.type_ix(expected_bytes * 8); let int_ty = bx.type_ix(expected_bytes * 8);
bx.load(int_ty, place.llval, Align::ONE) bx.load(int_ty, place.val.llval, Align::ONE)
} }
_ => return_error!(InvalidMonomorphization::InvalidBitmask { _ => return_error!(InvalidMonomorphization::InvalidBitmask {
span, span,

View File

@ -56,8 +56,13 @@
pub struct SearchPaths(OnceCell<Vec<PathBuf>>); pub struct SearchPaths(OnceCell<Vec<PathBuf>>);
impl SearchPaths { impl SearchPaths {
pub(super) fn get(&self, sess: &Session) -> &[PathBuf] { pub(super) fn get(&self, sess: &Session) -> impl Iterator<Item = &Path> {
self.0.get_or_init(|| archive_search_paths(sess)) let native_search_paths = || {
Vec::from_iter(
sess.target_filesearch(PathKind::Native).search_path_dirs().map(|p| p.to_owned()),
)
};
self.0.get_or_init(native_search_paths).iter().map(|p| &**p)
} }
} }
@ -310,8 +315,6 @@ fn link_rlib<'a>(
flavor: RlibFlavor, flavor: RlibFlavor,
tmpdir: &MaybeTempDir, tmpdir: &MaybeTempDir,
) -> Result<Box<dyn ArchiveBuilder + 'a>, ErrorGuaranteed> { ) -> Result<Box<dyn ArchiveBuilder + 'a>, ErrorGuaranteed> {
let lib_search_paths = archive_search_paths(sess);
let mut ab = archive_builder_builder.new_archive_builder(sess); let mut ab = archive_builder_builder.new_archive_builder(sess);
let trailing_metadata = match flavor { let trailing_metadata = match flavor {
@ -378,26 +381,24 @@ fn link_rlib<'a>(
// feature then we'll need to figure out how to record what objects were // feature then we'll need to figure out how to record what objects were
// loaded from the libraries found here and then encode that into the // loaded from the libraries found here and then encode that into the
// metadata of the rlib we're generating somehow. // metadata of the rlib we're generating somehow.
let search_paths = SearchPaths::default();
for lib in codegen_results.crate_info.used_libraries.iter() { for lib in codegen_results.crate_info.used_libraries.iter() {
let NativeLibKind::Static { bundle: None | Some(true), .. } = lib.kind else { let NativeLibKind::Static { bundle: None | Some(true), .. } = lib.kind else {
continue; continue;
}; };
let search_paths = search_paths.get(sess);
if flavor == RlibFlavor::Normal if flavor == RlibFlavor::Normal
&& let Some(filename) = lib.filename && let Some(filename) = lib.filename
{ {
let path = find_native_static_library(filename.as_str(), true, &lib_search_paths, sess); let path = find_native_static_library(filename.as_str(), true, search_paths, sess);
let src = read(path) let src = read(path)
.map_err(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }))?; .map_err(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }))?;
let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src); let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src);
let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str()); let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
packed_bundled_libs.push(wrapper_file); packed_bundled_libs.push(wrapper_file);
} else { } else {
let path = find_native_static_library( let path =
lib.name.as_str(), find_native_static_library(lib.name.as_str(), lib.verbatim, search_paths, sess);
lib.verbatim,
&lib_search_paths,
sess,
);
ab.add_archive(&path, Box::new(|_| false)).unwrap_or_else(|error| { ab.add_archive(&path, Box::new(|_| false)).unwrap_or_else(|error| {
sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: path, error }) sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: path, error })
}); });
@ -1445,10 +1446,6 @@ fn preserve_objects_for_their_debuginfo(sess: &Session) -> (bool, bool) {
} }
} }
fn archive_search_paths(sess: &Session) -> Vec<PathBuf> {
sess.target_filesearch(PathKind::Native).search_path_dirs()
}
#[derive(PartialEq)] #[derive(PartialEq)]
enum RlibFlavor { enum RlibFlavor {
Normal, Normal,

View File

@ -1,6 +1,6 @@
use super::operand::OperandRef; use super::operand::OperandRef;
use super::operand::OperandValue::{Immediate, Pair, Ref, ZeroSized}; use super::operand::OperandValue::{Immediate, Pair, Ref, ZeroSized};
use super::place::PlaceRef; use super::place::{PlaceRef, PlaceValue};
use super::{CachedLlbb, FunctionCx, LocalRef}; use super::{CachedLlbb, FunctionCx, LocalRef};
use crate::base; use crate::base;
@ -242,7 +242,7 @@ fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
bx.switch_to_block(fx.llbb(target)); bx.switch_to_block(fx.llbb(target));
fx.set_debug_loc(bx, self.terminator.source_info); fx.set_debug_loc(bx, self.terminator.source_info);
for tmp in copied_constant_arguments { for tmp in copied_constant_arguments {
bx.lifetime_end(tmp.llval, tmp.layout.size); bx.lifetime_end(tmp.val.llval, tmp.layout.size);
} }
fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret); fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret);
} }
@ -256,7 +256,7 @@ fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
if let Some((ret_dest, target)) = destination { if let Some((ret_dest, target)) = destination {
for tmp in copied_constant_arguments { for tmp in copied_constant_arguments {
bx.lifetime_end(tmp.llval, tmp.layout.size); bx.lifetime_end(tmp.val.llval, tmp.layout.size);
} }
fx.store_return(bx, ret_dest, &fn_abi.ret, llret); fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
self.funclet_br(fx, bx, target, mergeable_succ) self.funclet_br(fx, bx, target, mergeable_succ)
@ -431,7 +431,7 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
let va_list_arg_idx = self.fn_abi.args.len(); let va_list_arg_idx = self.fn_abi.args.len();
match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] { match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] {
LocalRef::Place(va_list) => { LocalRef::Place(va_list) => {
bx.va_end(va_list.llval); bx.va_end(va_list.val.llval);
} }
_ => bug!("C-variadic function must have a `VaList` place"), _ => bug!("C-variadic function must have a `VaList` place"),
} }
@ -455,8 +455,8 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
PassMode::Direct(_) | PassMode::Pair(..) => { PassMode::Direct(_) | PassMode::Pair(..) => {
let op = self.codegen_consume(bx, mir::Place::return_place().as_ref()); let op = self.codegen_consume(bx, mir::Place::return_place().as_ref());
if let Ref(llval, _, align) = op.val { if let Ref(place_val) = op.val {
bx.load(bx.backend_type(op.layout), llval, align) bx.load_from_place(bx.backend_type(op.layout), place_val)
} else { } else {
op.immediate_or_packed_pair(bx) op.immediate_or_packed_pair(bx)
} }
@ -466,21 +466,23 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
let op = match self.locals[mir::RETURN_PLACE] { let op = match self.locals[mir::RETURN_PLACE] {
LocalRef::Operand(op) => op, LocalRef::Operand(op) => op,
LocalRef::PendingOperand => bug!("use of return before def"), LocalRef::PendingOperand => bug!("use of return before def"),
LocalRef::Place(cg_place) => OperandRef { LocalRef::Place(cg_place) => {
val: Ref(cg_place.llval, None, cg_place.align), OperandRef { val: Ref(cg_place.val), layout: cg_place.layout }
layout: cg_place.layout, }
},
LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
}; };
let llslot = match op.val { let llslot = match op.val {
Immediate(_) | Pair(..) => { Immediate(_) | Pair(..) => {
let scratch = PlaceRef::alloca(bx, self.fn_abi.ret.layout); let scratch = PlaceRef::alloca(bx, self.fn_abi.ret.layout);
op.val.store(bx, scratch); op.val.store(bx, scratch);
scratch.llval scratch.val.llval
} }
Ref(llval, _, align) => { Ref(place_val) => {
assert_eq!(align, op.layout.align.abi, "return place is unaligned!"); assert_eq!(
llval place_val.align, op.layout.align.abi,
"return place is unaligned!"
);
place_val.llval
} }
ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"), ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"),
}; };
@ -512,11 +514,11 @@ fn codegen_drop_terminator(
let place = self.codegen_place(bx, location.as_ref()); let place = self.codegen_place(bx, location.as_ref());
let (args1, args2); let (args1, args2);
let mut args = if let Some(llextra) = place.llextra { let mut args = if let Some(llextra) = place.val.llextra {
args2 = [place.llval, llextra]; args2 = [place.val.llval, llextra];
&args2[..] &args2[..]
} else { } else {
args1 = [place.llval]; args1 = [place.val.llval];
&args1[..] &args1[..]
}; };
let (drop_fn, fn_abi, drop_instance) = let (drop_fn, fn_abi, drop_instance) =
@ -918,7 +920,7 @@ fn codegen_call_terminator(
let dest = match ret_dest { let dest = match ret_dest {
_ if fn_abi.ret.is_indirect() => llargs[0], _ if fn_abi.ret.is_indirect() => llargs[0],
ReturnDest::Nothing => bx.const_undef(bx.type_ptr()), ReturnDest::Nothing => bx.const_undef(bx.type_ptr()),
ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.llval, ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.val.llval,
ReturnDest::DirectOperand(_) => { ReturnDest::DirectOperand(_) => {
bug!("Cannot use direct operand with an intrinsic call") bug!("Cannot use direct operand with an intrinsic call")
} }
@ -951,7 +953,7 @@ fn codegen_call_terminator(
match Self::codegen_intrinsic_call(bx, instance, fn_abi, &args, dest, span) { match Self::codegen_intrinsic_call(bx, instance, fn_abi, &args, dest, span) {
Ok(()) => { Ok(()) => {
if let ReturnDest::IndirectOperand(dst, _) = ret_dest { if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
self.store_return(bx, ret_dest, &fn_abi.ret, dst.llval); self.store_return(bx, ret_dest, &fn_abi.ret, dst.val.llval);
} }
return if let Some(target) = target { return if let Some(target) = target {
@ -1032,7 +1034,7 @@ fn codegen_call_terminator(
llargs.push(data_ptr); llargs.push(data_ptr);
continue 'make_args; continue 'make_args;
} }
Ref(data_ptr, Some(meta), _) => { Ref(PlaceValue { llval: data_ptr, llextra: Some(meta), .. }) => {
// by-value dynamic dispatch // by-value dynamic dispatch
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn( llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
bx, bx,
@ -1058,16 +1060,16 @@ fn codegen_call_terminator(
span_bug!(span, "can't codegen a virtual call on {:#?}", op); span_bug!(span, "can't codegen a virtual call on {:#?}", op);
} }
let place = op.deref(bx.cx()); let place = op.deref(bx.cx());
let data_ptr = place.project_field(bx, 0); let data_place = place.project_field(bx, 0);
let meta_ptr = place.project_field(bx, 1); let meta_place = place.project_field(bx, 1);
let meta = bx.load_operand(meta_ptr); let meta = bx.load_operand(meta_place);
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn( llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
bx, bx,
meta.immediate(), meta.immediate(),
op.layout.ty, op.layout.ty,
fn_abi, fn_abi,
)); ));
llargs.push(data_ptr.llval); llargs.push(data_place.val.llval);
continue; continue;
} }
_ => { _ => {
@ -1079,12 +1081,12 @@ fn codegen_call_terminator(
// The callee needs to own the argument memory if we pass it // The callee needs to own the argument memory if we pass it
// by-ref, so make a local copy of non-immediate constants. // by-ref, so make a local copy of non-immediate constants.
match (&arg.node, op.val) { match (&arg.node, op.val) {
(&mir::Operand::Copy(_), Ref(_, None, _)) (&mir::Operand::Copy(_), Ref(PlaceValue { llextra: None, .. }))
| (&mir::Operand::Constant(_), Ref(_, None, _)) => { | (&mir::Operand::Constant(_), Ref(PlaceValue { llextra: None, .. })) => {
let tmp = PlaceRef::alloca(bx, op.layout); let tmp = PlaceRef::alloca(bx, op.layout);
bx.lifetime_start(tmp.llval, tmp.layout.size); bx.lifetime_start(tmp.val.llval, tmp.layout.size);
op.val.store(bx, tmp); op.val.store(bx, tmp);
op.val = Ref(tmp.llval, None, tmp.align); op.val = Ref(tmp.val);
copied_constant_arguments.push(tmp); copied_constant_arguments.push(tmp);
} }
_ => {} _ => {}
@ -1428,7 +1430,7 @@ fn codegen_argument(
_ => bug!("codegen_argument: {:?} invalid for pair argument", op), _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
}, },
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val {
Ref(a, Some(b), _) => { Ref(PlaceValue { llval: a, llextra: Some(b), .. }) => {
llargs.push(a); llargs.push(a);
llargs.push(b); llargs.push(b);
return; return;
@ -1450,34 +1452,34 @@ fn codegen_argument(
}; };
let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align);
op.val.store(bx, scratch); op.val.store(bx, scratch);
(scratch.llval, scratch.align, true) (scratch.val.llval, scratch.val.align, true)
} }
PassMode::Cast { .. } => { PassMode::Cast { .. } => {
let scratch = PlaceRef::alloca(bx, arg.layout); let scratch = PlaceRef::alloca(bx, arg.layout);
op.val.store(bx, scratch); op.val.store(bx, scratch);
(scratch.llval, scratch.align, true) (scratch.val.llval, scratch.val.align, true)
} }
_ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false), _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
}, },
Ref(llval, llextra, align) => match arg.mode { Ref(op_place_val) => match arg.mode {
PassMode::Indirect { attrs, .. } => { PassMode::Indirect { attrs, .. } => {
let required_align = match attrs.pointee_align { let required_align = match attrs.pointee_align {
Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi),
None => arg.layout.align.abi, None => arg.layout.align.abi,
}; };
if align < required_align { if op_place_val.align < required_align {
// For `foo(packed.large_field)`, and types with <4 byte alignment on x86, // For `foo(packed.large_field)`, and types with <4 byte alignment on x86,
// alignment requirements may be higher than the type's alignment, so copy // alignment requirements may be higher than the type's alignment, so copy
// to a higher-aligned alloca. // to a higher-aligned alloca.
let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align);
let op_place = PlaceRef { llval, llextra, layout: op.layout, align }; let op_place = PlaceRef { val: op_place_val, layout: op.layout };
bx.typed_place_copy(scratch, op_place); bx.typed_place_copy(scratch, op_place);
(scratch.llval, scratch.align, true) (scratch.val.llval, scratch.val.align, true)
} else { } else {
(llval, align, true) (op_place_val.llval, op_place_val.align, true)
} }
} }
_ => (llval, align, true), _ => (op_place_val.llval, op_place_val.align, true),
}, },
ZeroSized => match arg.mode { ZeroSized => match arg.mode {
PassMode::Indirect { on_stack, .. } => { PassMode::Indirect { on_stack, .. } => {
@ -1490,7 +1492,7 @@ fn codegen_argument(
// a pointer for `repr(C)` structs even when empty, so get // a pointer for `repr(C)` structs even when empty, so get
// one from an `alloca` (which can be left uninitialized). // one from an `alloca` (which can be left uninitialized).
let scratch = PlaceRef::alloca(bx, arg.layout); let scratch = PlaceRef::alloca(bx, arg.layout);
(scratch.llval, scratch.align, true) (scratch.val.llval, scratch.val.align, true)
} }
_ => bug!("ZST {op:?} wasn't ignored, but was passed with abi {arg:?}"), _ => bug!("ZST {op:?} wasn't ignored, but was passed with abi {arg:?}"),
}, },
@ -1557,15 +1559,16 @@ fn codegen_arguments_untupled(
let tuple = self.codegen_operand(bx, operand); let tuple = self.codegen_operand(bx, operand);
// Handle both by-ref and immediate tuples. // Handle both by-ref and immediate tuples.
if let Ref(llval, None, align) = tuple.val { if let Ref(place_val) = tuple.val {
let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align); if place_val.llextra.is_some() {
bug!("closure arguments must be sized");
}
let tuple_ptr = PlaceRef { val: place_val, layout: tuple.layout };
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i); let field_ptr = tuple_ptr.project_field(bx, i);
let field = bx.load_operand(field_ptr); let field = bx.load_operand(field_ptr);
self.codegen_argument(bx, field, llargs, &args[i]); self.codegen_argument(bx, field, llargs, &args[i]);
} }
} else if let Ref(_, Some(_), _) = tuple.val {
bug!("closure arguments must be sized")
} else { } else {
// If the tuple is immediate, the elements are as well. // If the tuple is immediate, the elements are as well.
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {
@ -1782,7 +1785,7 @@ fn make_return_dest(
// but the calling convention has an indirect return. // but the calling convention has an indirect return.
let tmp = PlaceRef::alloca(bx, fn_ret.layout); let tmp = PlaceRef::alloca(bx, fn_ret.layout);
tmp.storage_live(bx); tmp.storage_live(bx);
llargs.push(tmp.llval); llargs.push(tmp.val.llval);
ReturnDest::IndirectOperand(tmp, index) ReturnDest::IndirectOperand(tmp, index)
} else if intrinsic.is_some() { } else if intrinsic.is_some() {
// Currently, intrinsics always need a location to store // Currently, intrinsics always need a location to store
@ -1803,7 +1806,7 @@ fn make_return_dest(
self.codegen_place(bx, mir::PlaceRef { local: dest.local, projection: dest.projection }) self.codegen_place(bx, mir::PlaceRef { local: dest.local, projection: dest.projection })
}; };
if fn_ret.is_indirect() { if fn_ret.is_indirect() {
if dest.align < dest.layout.align.abi { if dest.val.align < dest.layout.align.abi {
// Currently, MIR code generation does not create calls // Currently, MIR code generation does not create calls
// that store directly to fields of packed structs (in // that store directly to fields of packed structs (in
// fact, the calls it creates write only to temps). // fact, the calls it creates write only to temps).
@ -1812,7 +1815,7 @@ fn make_return_dest(
// to create a temporary. // to create a temporary.
span_bug!(self.mir.span, "can't directly store to unaligned value"); span_bug!(self.mir.span, "can't directly store to unaligned value");
} }
llargs.push(dest.llval); llargs.push(dest.val.llval);
ReturnDest::Nothing ReturnDest::Nothing
} else { } else {
ReturnDest::Store(dest) ReturnDest::Store(dest)

View File

@ -14,7 +14,7 @@
use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx}; use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx};
use super::operand::{OperandRef, OperandValue}; use super::operand::{OperandRef, OperandValue};
use super::place::PlaceRef; use super::place::{PlaceRef, PlaceValue};
use super::{FunctionCx, LocalRef}; use super::{FunctionCx, LocalRef};
use std::ops::Range; use std::ops::Range;
@ -252,7 +252,7 @@ fn spill_operand_to_stack(
// at least for the cases which LLVM handles correctly. // at least for the cases which LLVM handles correctly.
let spill_slot = PlaceRef::alloca(bx, operand.layout); let spill_slot = PlaceRef::alloca(bx, operand.layout);
if let Some(name) = name { if let Some(name) = name {
bx.set_var_name(spill_slot.llval, &(name + ".dbg.spill")); bx.set_var_name(spill_slot.val.llval, &(name + ".dbg.spill"));
} }
operand.val.store(bx, spill_slot); operand.val.store(bx, spill_slot);
spill_slot spill_slot
@ -331,10 +331,10 @@ pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) {
if let Some(name) = &name { if let Some(name) = &name {
match local_ref { match local_ref {
LocalRef::Place(place) | LocalRef::UnsizedPlace(place) => { LocalRef::Place(place) | LocalRef::UnsizedPlace(place) => {
bx.set_var_name(place.llval, name); bx.set_var_name(place.val.llval, name);
} }
LocalRef::Operand(operand) => match operand.val { LocalRef::Operand(operand) => match operand.val {
OperandValue::Ref(x, ..) | OperandValue::Immediate(x) => { OperandValue::Ref(PlaceValue { llval: x, .. }) | OperandValue::Immediate(x) => {
bx.set_var_name(x, name); bx.set_var_name(x, name);
} }
OperandValue::Pair(a, b) => { OperandValue::Pair(a, b) => {
@ -417,16 +417,16 @@ fn debug_introduce_local_as_var(
let ptr_ty = Ty::new_mut_ptr(bx.tcx(), place.layout.ty); let ptr_ty = Ty::new_mut_ptr(bx.tcx(), place.layout.ty);
let ptr_layout = bx.layout_of(ptr_ty); let ptr_layout = bx.layout_of(ptr_ty);
let alloca = PlaceRef::alloca(bx, ptr_layout); let alloca = PlaceRef::alloca(bx, ptr_layout);
bx.set_var_name(alloca.llval, &(var.name.to_string() + ".dbg.spill")); bx.set_var_name(alloca.val.llval, &(var.name.to_string() + ".dbg.spill"));
// Write the pointer to the variable // Write the pointer to the variable
bx.store(place.llval, alloca.llval, alloca.align); bx.store_to_place(place.val.llval, alloca.val);
// Point the debug info to `*alloca` for the current variable // Point the debug info to `*alloca` for the current variable
bx.dbg_var_addr( bx.dbg_var_addr(
dbg_var, dbg_var,
dbg_loc, dbg_loc,
alloca.llval, alloca.val.llval,
Size::ZERO, Size::ZERO,
&[Size::ZERO], &[Size::ZERO],
var.fragment, var.fragment,
@ -435,7 +435,7 @@ fn debug_introduce_local_as_var(
bx.dbg_var_addr( bx.dbg_var_addr(
dbg_var, dbg_var,
dbg_loc, dbg_loc,
base.llval, base.val.llval,
direct_offset, direct_offset,
&indirect_offsets, &indirect_offsets,
var.fragment, var.fragment,
@ -553,7 +553,14 @@ pub fn compute_per_local_var_debug_info(
let base = let base =
Self::spill_operand_to_stack(operand, Some(var.name.to_string()), bx); Self::spill_operand_to_stack(operand, Some(var.name.to_string()), bx);
bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, Size::ZERO, &[], fragment); bx.dbg_var_addr(
dbg_var,
dbg_loc,
base.val.llval,
Size::ZERO,
&[],
fragment,
);
} }
} }
} }

View File

@ -387,9 +387,9 @@ pub fn codegen_intrinsic_call(
let success = bx.from_immediate(success); let success = bx.from_immediate(success);
let dest = result.project_field(bx, 0); let dest = result.project_field(bx, 0);
bx.store(val, dest.llval, dest.align); bx.store_to_place(val, dest.val);
let dest = result.project_field(bx, 1); let dest = result.project_field(bx, 1);
bx.store(success, dest.llval, dest.align); bx.store_to_place(success, dest.val);
} else { } else {
invalid_monomorphization(ty); invalid_monomorphization(ty);
} }
@ -511,7 +511,7 @@ pub fn codegen_intrinsic_call(
if !fn_abi.ret.is_ignore() { if !fn_abi.ret.is_ignore() {
if let PassMode::Cast { .. } = &fn_abi.ret.mode { if let PassMode::Cast { .. } = &fn_abi.ret.mode {
bx.store(llval, result.llval, result.align); bx.store_to_place(llval, result.val);
} else { } else {
OperandRef::from_immediate_or_packed_pair(bx, llval, result.layout) OperandRef::from_immediate_or_packed_pair(bx, llval, result.layout)
.val .val

View File

@ -336,7 +336,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() { if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty)); let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
bx.va_start(va_list.llval); bx.va_start(va_list.val.llval);
return LocalRef::Place(va_list); return LocalRef::Place(va_list);
} }

View File

@ -1,4 +1,4 @@
use super::place::PlaceRef; use super::place::{PlaceRef, PlaceValue};
use super::{FunctionCx, LocalRef}; use super::{FunctionCx, LocalRef};
use crate::size_of_val; use crate::size_of_val;
@ -23,11 +23,14 @@ pub enum OperandValue<V> {
/// The second value, if any, is the extra data (vtable or length) /// The second value, if any, is the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue. /// which indicates that it refers to an unsized rvalue.
/// ///
/// An `OperandValue` has this variant for types which are neither /// An `OperandValue` *must* be this variant for any type for which
/// `Immediate` nor `Pair`s. The backend value in this variant must be a /// [`LayoutTypeMethods::is_backend_ref`] returns `true`.
/// pointer to the *non*-immediate backend type. That pointee type is the /// (That basically amounts to "isn't one of the other variants".)
///
/// This holds a [`PlaceValue`] (like a [`PlaceRef`] does) with a pointer
/// to the location holding the value. The type behind that pointer is the
/// one returned by [`LayoutTypeMethods::backend_type`]. /// one returned by [`LayoutTypeMethods::backend_type`].
Ref(V, Option<V>, Align), Ref(PlaceValue<V>),
/// A single LLVM immediate value. /// A single LLVM immediate value.
/// ///
/// An `OperandValue` *must* be this variant for any type for which /// An `OperandValue` *must* be this variant for any type for which
@ -221,7 +224,8 @@ pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self), OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
}; };
let layout = cx.layout_of(projected_ty); let layout = cx.layout_of(projected_ty);
PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi } let val = PlaceValue { llval: llptr, llextra, align: layout.align.abi };
PlaceRef { val, layout }
} }
/// If this operand is a `Pair`, we return an aggregate with the two values. /// If this operand is a `Pair`, we return an aggregate with the two values.
@ -361,7 +365,7 @@ pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1)) OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
} else { } else {
let ptr = bx.cx().type_ptr(); let ptr = bx.cx().type_ptr();
OperandValue::Ref(bx.const_poison(ptr), None, layout.align.abi) OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
} }
} }
@ -409,18 +413,17 @@ pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
// Avoid generating stores of zero-sized values, because the only way to have a zero-sized // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
// value is through `undef`/`poison`, and the store itself is useless. // value is through `undef`/`poison`, and the store itself is useless.
} }
OperandValue::Ref(llval, llextra @ None, source_align) => { OperandValue::Ref(val) => {
assert!(dest.layout.is_sized(), "cannot directly store unsized values"); assert!(dest.layout.is_sized(), "cannot directly store unsized values");
let source_place = if val.llextra.is_some() {
PlaceRef { llval, llextra, align: source_align, layout: dest.layout }; bug!("cannot directly store unsized values");
}
let source_place = PlaceRef { val, layout: dest.layout };
bx.typed_place_copy_with_flags(dest, source_place, flags); bx.typed_place_copy_with_flags(dest, source_place, flags);
} }
OperandValue::Ref(_, Some(_), _) => {
bug!("cannot directly store unsized values");
}
OperandValue::Immediate(s) => { OperandValue::Immediate(s) => {
let val = bx.from_immediate(s); let val = bx.from_immediate(s);
bx.store_with_flags(val, dest.llval, dest.align, flags); bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
} }
OperandValue::Pair(a, b) => { OperandValue::Pair(a, b) => {
let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else { let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else {
@ -429,12 +432,12 @@ pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi); let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
let val = bx.from_immediate(a); let val = bx.from_immediate(a);
let align = dest.align; let align = dest.val.align;
bx.store_with_flags(val, dest.llval, align, flags); bx.store_with_flags(val, dest.val.llval, align, flags);
let llptr = bx.inbounds_ptradd(dest.llval, bx.const_usize(b_offset.bytes())); let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
let val = bx.from_immediate(b); let val = bx.from_immediate(b);
let align = dest.align.restrict_for_offset(b_offset); let align = dest.val.align.restrict_for_offset(b_offset);
bx.store_with_flags(val, llptr, align, flags); bx.store_with_flags(val, llptr, align, flags);
} }
} }
@ -454,7 +457,8 @@ pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
.unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest)) .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest))
.ty; .ty;
let OperandValue::Ref(llptr, Some(llextra), _) = self else { let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self
else {
bug!("store_unsized called with a sized value (or with an extern type)") bug!("store_unsized called with a sized value (or with an extern type)")
}; };

View File

@ -12,25 +12,48 @@
use rustc_target::abi::{Align, FieldsShape, Int, Pointer, TagEncoding}; use rustc_target::abi::{Align, FieldsShape, Int, Pointer, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants}; use rustc_target::abi::{VariantIdx, Variants};
/// The location and extra runtime properties of the place.
///
/// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`].
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct PlaceRef<'tcx, V> { pub struct PlaceValue<V> {
/// A pointer to the contents of the place. /// A pointer to the contents of the place.
pub llval: V, pub llval: V,
/// This place's extra data if it is unsized, or `None` if null. /// This place's extra data if it is unsized, or `None` if null.
pub llextra: Option<V>, pub llextra: Option<V>,
/// The monomorphized type of this place, including variant information.
pub layout: TyAndLayout<'tcx>,
/// The alignment we know for this place. /// The alignment we know for this place.
pub align: Align, pub align: Align,
} }
impl<V: CodegenObject> PlaceValue<V> {
/// Constructor for the ordinary case of `Sized` types.
///
/// Sets `llextra` to `None`.
pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
PlaceValue { llval, llextra: None, align }
}
}
#[derive(Copy, Clone, Debug)]
pub struct PlaceRef<'tcx, V> {
/// The location and extra runtime properties of the place.
pub val: PlaceValue<V>,
/// The monomorphized type of this place, including variant information.
///
/// You probably shouldn't use the alignment from this layout;
/// rather you should use the `.val.align` of the actual place,
/// which might be different from the type's normal alignment.
pub layout: TyAndLayout<'tcx>,
}
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> { pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
assert!(layout.is_sized()); assert!(layout.is_sized());
PlaceRef { llval, llextra: None, layout, align: layout.align.abi } let val = PlaceValue::new_sized(llval, layout.align.abi);
PlaceRef { val, layout }
} }
pub fn new_sized_aligned( pub fn new_sized_aligned(
@ -39,7 +62,8 @@ pub fn new_sized_aligned(
align: Align, align: Align,
) -> PlaceRef<'tcx, V> { ) -> PlaceRef<'tcx, V> {
assert!(layout.is_sized()); assert!(layout.is_sized());
PlaceRef { llval, llextra: None, layout, align } let val = PlaceValue::new_sized(llval, align);
PlaceRef { val, layout }
} }
// FIXME(eddyb) pass something else for the name so no work is done // FIXME(eddyb) pass something else for the name so no work is done
@ -78,7 +102,7 @@ pub fn len<Cx: ConstMethods<'tcx, Value = V>>(&self, cx: &Cx) -> V {
if let FieldsShape::Array { count, .. } = self.layout.fields { if let FieldsShape::Array { count, .. } = self.layout.fields {
if self.layout.is_unsized() { if self.layout.is_unsized() {
assert_eq!(count, 0); assert_eq!(count, 0);
self.llextra.unwrap() self.val.llextra.unwrap()
} else { } else {
cx.const_usize(count) cx.const_usize(count)
} }
@ -97,21 +121,27 @@ pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
) -> Self { ) -> Self {
let field = self.layout.field(bx.cx(), ix); let field = self.layout.field(bx.cx(), ix);
let offset = self.layout.fields.offset(ix); let offset = self.layout.fields.offset(ix);
let effective_field_align = self.align.restrict_for_offset(offset); let effective_field_align = self.val.align.restrict_for_offset(offset);
// `simple` is called when we don't need to adjust the offset to // `simple` is called when we don't need to adjust the offset to
// the dynamic alignment of the field. // the dynamic alignment of the field.
let mut simple = || { let mut simple = || {
let llval = if offset.bytes() == 0 { let llval = if offset.bytes() == 0 {
self.llval self.val.llval
} else { } else {
bx.inbounds_ptradd(self.llval, bx.const_usize(offset.bytes())) bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
}; };
PlaceRef { PlaceRef {
llval, val: PlaceValue {
llextra: if bx.cx().type_has_metadata(field.ty) { self.llextra } else { None }, llval,
llextra: if bx.cx().type_has_metadata(field.ty) {
self.val.llextra
} else {
None
},
align: effective_field_align,
},
layout: field, layout: field,
align: effective_field_align,
} }
}; };
@ -142,7 +172,7 @@ pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
// The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that // The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
// the `y` field has 16-bit alignment. // the `y` field has 16-bit alignment.
let meta = self.llextra; let meta = self.val.llextra;
let unaligned_offset = bx.cx().const_usize(offset.bytes()); let unaligned_offset = bx.cx().const_usize(offset.bytes());
@ -164,9 +194,10 @@ pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
debug!("struct_field_ptr: DST field offset: {:?}", offset); debug!("struct_field_ptr: DST field offset: {:?}", offset);
// Adjust pointer. // Adjust pointer.
let ptr = bx.inbounds_ptradd(self.llval, offset); let ptr = bx.inbounds_ptradd(self.val.llval, offset);
let val =
PlaceRef { llval: ptr, llextra: self.llextra, layout: field, align: effective_field_align } PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
PlaceRef { val, layout: field }
} }
/// Obtain the actual discriminant of a value. /// Obtain the actual discriminant of a value.
@ -312,10 +343,9 @@ pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
let ptr = self.project_field(bx, tag_field); let ptr = self.project_field(bx, tag_field);
let to = let to =
self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val; self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
bx.store( bx.store_to_place(
bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to), bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
ptr.llval, ptr.val,
ptr.align,
); );
} }
Variants::Multiple { Variants::Multiple {
@ -357,14 +387,16 @@ pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
}; };
PlaceRef { PlaceRef {
llval: bx.inbounds_gep( val: PlaceValue {
bx.cx().backend_type(self.layout), llval: bx.inbounds_gep(
self.llval, bx.cx().backend_type(self.layout),
&[bx.cx().const_usize(0), llindex], self.val.llval,
), &[bx.cx().const_usize(0), llindex],
llextra: None, ),
llextra: None,
align: self.val.align.restrict_for_offset(offset),
},
layout, layout,
align: self.align.restrict_for_offset(offset),
} }
} }
@ -389,11 +421,11 @@ pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
} }
pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) { pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
bx.lifetime_start(self.llval, self.layout.size); bx.lifetime_start(self.val.llval, self.layout.size);
} }
pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) { pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
bx.lifetime_end(self.llval, self.layout.size); bx.lifetime_end(self.val.llval, self.layout.size);
} }
} }
@ -461,8 +493,9 @@ pub fn codegen_place(
if subslice.layout.is_unsized() { if subslice.layout.is_unsized() {
assert!(from_end, "slice subslices should be `from_end`"); assert!(from_end, "slice subslices should be `from_end`");
subslice.llextra = subslice.val.llextra = Some(
Some(bx.sub(cg_base.llextra.unwrap(), bx.cx().const_usize(from + to))); bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
);
} }
subslice subslice

View File

@ -68,13 +68,13 @@ pub fn codegen_rvalue(
base::coerce_unsized_into(bx, scratch, dest); base::coerce_unsized_into(bx, scratch, dest);
scratch.storage_dead(bx); scratch.storage_dead(bx);
} }
OperandValue::Ref(llref, None, align) => { OperandValue::Ref(val) => {
let source = PlaceRef::new_sized_aligned(llref, operand.layout, align); if val.llextra.is_some() {
bug!("unsized coercion on an unsized rvalue");
}
let source = PlaceRef { val, layout: operand.layout };
base::coerce_unsized_into(bx, source, dest); base::coerce_unsized_into(bx, source, dest);
} }
OperandValue::Ref(_, Some(_), _) => {
bug!("unsized coercion on an unsized rvalue");
}
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
bug!("unsized coercion on a ZST rvalue"); bug!("unsized coercion on a ZST rvalue");
} }
@ -95,20 +95,20 @@ pub fn codegen_rvalue(
} }
if let OperandValue::Immediate(v) = cg_elem.val { if let OperandValue::Immediate(v) = cg_elem.val {
let start = dest.llval; let start = dest.val.llval;
let size = bx.const_usize(dest.layout.size.bytes()); let size = bx.const_usize(dest.layout.size.bytes());
// Use llvm.memset.p0i8.* to initialize all zero arrays // Use llvm.memset.p0i8.* to initialize all zero arrays
if bx.cx().const_to_opt_u128(v, false) == Some(0) { if bx.cx().const_to_opt_u128(v, false) == Some(0) {
let fill = bx.cx().const_u8(0); let fill = bx.cx().const_u8(0);
bx.memset(start, fill, size, dest.align, MemFlags::empty()); bx.memset(start, fill, size, dest.val.align, MemFlags::empty());
return; return;
} }
// Use llvm.memset.p0i8.* to initialize byte arrays // Use llvm.memset.p0i8.* to initialize byte arrays
let v = bx.from_immediate(v); let v = bx.from_immediate(v);
if bx.cx().val_ty(v) == bx.cx().type_i8() { if bx.cx().val_ty(v) == bx.cx().type_i8() {
bx.memset(start, v, size, dest.align, MemFlags::empty()); bx.memset(start, v, size, dest.val.align, MemFlags::empty());
return; return;
} }
} }
@ -182,7 +182,10 @@ fn codegen_transmute(
OperandValue::Immediate(..) | OperandValue::Pair(..) => { OperandValue::Immediate(..) | OperandValue::Pair(..) => {
// When we have immediate(s), the alignment of the source is irrelevant, // When we have immediate(s), the alignment of the source is irrelevant,
// so we can store them using the destination's alignment. // so we can store them using the destination's alignment.
src.val.store(bx, PlaceRef::new_sized_aligned(dst.llval, src.layout, dst.align)); src.val.store(
bx,
PlaceRef::new_sized_aligned(dst.val.llval, src.layout, dst.val.align),
);
} }
} }
} }
@ -217,10 +220,10 @@ fn codegen_transmute_operand(
let cast_kind = self.value_kind(cast); let cast_kind = self.value_kind(cast);
match operand.val { match operand.val {
OperandValue::Ref(ptr, meta, align) => { OperandValue::Ref(source_place_val) => {
debug_assert_eq!(meta, None); debug_assert_eq!(source_place_val.llextra, None);
debug_assert!(matches!(operand_kind, OperandValueKind::Ref)); debug_assert!(matches!(operand_kind, OperandValueKind::Ref));
let fake_place = PlaceRef::new_sized_aligned(ptr, cast, align); let fake_place = PlaceRef { val: source_place_val, layout: cast };
Some(bx.load_operand(fake_place).val) Some(bx.load_operand(fake_place).val)
} }
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
@ -375,7 +378,7 @@ pub fn codegen_rvalue_unsized(
) { ) {
debug!( debug!(
"codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})", "codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})",
indirect_dest.llval, rvalue indirect_dest.val.llval, rvalue
); );
match *rvalue { match *rvalue {
@ -487,7 +490,7 @@ pub fn codegen_rvalue_operand(
} }
mir::CastKind::DynStar => { mir::CastKind::DynStar => {
let (lldata, llextra) = match operand.val { let (lldata, llextra) = match operand.val {
OperandValue::Ref(_, _, _) => todo!(), OperandValue::Ref(..) => todo!(),
OperandValue::Immediate(v) => (v, None), OperandValue::Immediate(v) => (v, None),
OperandValue::Pair(v, l) => (v, Some(l)), OperandValue::Pair(v, l) => (v, Some(l)),
OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"), OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"),
@ -765,9 +768,9 @@ fn codegen_place_to_pointer(
// Note: places are indirect, so storing the `llval` into the // Note: places are indirect, so storing the `llval` into the
// destination effectively creates a reference. // destination effectively creates a reference.
let val = if !bx.cx().type_has_metadata(ty) { let val = if !bx.cx().type_has_metadata(ty) {
OperandValue::Immediate(cg_place.llval) OperandValue::Immediate(cg_place.val.llval)
} else { } else {
OperandValue::Pair(cg_place.llval, cg_place.llextra.unwrap()) OperandValue::Pair(cg_place.val.llval, cg_place.val.llextra.unwrap())
}; };
OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) } OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) }
} }

View File

@ -12,7 +12,7 @@
AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind, AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind,
}; };
use crate::mir::operand::{OperandRef, OperandValue}; use crate::mir::operand::{OperandRef, OperandValue};
use crate::mir::place::PlaceRef; use crate::mir::place::{PlaceRef, PlaceValue};
use crate::MemFlags; use crate::MemFlags;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
@ -156,6 +156,10 @@ fn atomic_load(
order: AtomicOrdering, order: AtomicOrdering,
size: Size, size: Size,
) -> Self::Value; ) -> Self::Value;
fn load_from_place(&mut self, ty: Self::Type, place: PlaceValue<Self::Value>) -> Self::Value {
debug_assert_eq!(place.llextra, None);
self.load(ty, place.llval, place.align)
}
fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>) fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
-> OperandRef<'tcx, Self::Value>; -> OperandRef<'tcx, Self::Value>;
@ -171,6 +175,10 @@ fn write_operand_repeatedly(
fn nonnull_metadata(&mut self, load: Self::Value); fn nonnull_metadata(&mut self, load: Self::Value);
fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value; fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
fn store_to_place(&mut self, val: Self::Value, place: PlaceValue<Self::Value>) -> Self::Value {
debug_assert_eq!(place.llextra, None);
self.store(val, place.llval, place.align)
}
fn store_with_flags( fn store_with_flags(
&mut self, &mut self,
val: Self::Value, val: Self::Value,
@ -290,14 +298,14 @@ fn typed_place_copy_with_flags(
src: PlaceRef<'tcx, Self::Value>, src: PlaceRef<'tcx, Self::Value>,
flags: MemFlags, flags: MemFlags,
) { ) {
debug_assert!(src.llextra.is_none(), "cannot directly copy from unsized values"); debug_assert!(src.val.llextra.is_none(), "cannot directly copy from unsized values");
debug_assert!(dst.llextra.is_none(), "cannot directly copy into unsized values"); debug_assert!(dst.val.llextra.is_none(), "cannot directly copy into unsized values");
debug_assert_eq!(dst.layout.size, src.layout.size); debug_assert_eq!(dst.layout.size, src.layout.size);
if flags.contains(MemFlags::NONTEMPORAL) { if flags.contains(MemFlags::NONTEMPORAL) {
// HACK(nox): This is inefficient but there is no nontemporal memcpy. // HACK(nox): This is inefficient but there is no nontemporal memcpy.
let ty = self.backend_type(dst.layout); let ty = self.backend_type(dst.layout);
let val = self.load(ty, src.llval, src.align); let val = self.load_from_place(ty, src.val);
self.store_with_flags(val, dst.llval, dst.align, flags); self.store_with_flags(val, dst.val.llval, dst.val.align, flags);
} else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(dst.layout) } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(dst.layout)
{ {
// If we're not optimizing, the aliasing information from `memcpy` // If we're not optimizing, the aliasing information from `memcpy`
@ -306,7 +314,7 @@ fn typed_place_copy_with_flags(
temp.val.store_with_flags(self, dst, flags); temp.val.store_with_flags(self, dst, flags);
} else if !dst.layout.is_zst() { } else if !dst.layout.is_zst() {
let bytes = self.const_usize(dst.layout.size.bytes()); let bytes = self.const_usize(dst.layout.size.bytes());
self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags); self.memcpy(dst.val.llval, dst.val.align, src.val.llval, src.val.align, bytes, flags);
} }
} }

View File

@ -1082,7 +1082,7 @@ pub fn is_builtin_attr_name(name: Symbol) -> bool {
/// This means it can be used cross crate. /// This means it can be used cross crate.
pub fn encode_cross_crate(name: Symbol) -> bool { pub fn encode_cross_crate(name: Symbol) -> bool {
if let Some(attr) = BUILTIN_ATTRIBUTE_MAP.get(&name) { if let Some(attr) = BUILTIN_ATTRIBUTE_MAP.get(&name) {
if attr.encode_cross_crate == EncodeCrossCrate::Yes { true } else { false } attr.encode_cross_crate == EncodeCrossCrate::Yes
} else { } else {
true true
} }

View File

@ -1134,7 +1134,7 @@ enum ProhibitGenericsArg {
for (what, span) in types_and_spans { for (what, span) in types_and_spans {
err.span_label(span, format!("not allowed on {what}")); err.span_label(span, format!("not allowed on {what}"));
} }
generics_args_err_extend(self.tcx(), segments.clone(), &mut err, err_extend); generics_args_err_extend(self.tcx(), segments, &mut err, err_extend);
let reported = err.emit(); let reported = err.emit();
self.set_tainted_by_errors(reported); self.set_tainted_by_errors(reported);
reported reported

View File

@ -170,7 +170,9 @@ fn configure_and_expand(
let mut old_path = OsString::new(); let mut old_path = OsString::new();
if cfg!(windows) { if cfg!(windows) {
old_path = env::var_os("PATH").unwrap_or(old_path); old_path = env::var_os("PATH").unwrap_or(old_path);
let mut new_path = sess.host_filesearch(PathKind::All).search_path_dirs(); let mut new_path = Vec::from_iter(
sess.host_filesearch(PathKind::All).search_path_dirs().map(|p| p.to_owned()),
);
for path in env::split_paths(&old_path) { for path in env::split_paths(&old_path) {
if !new_path.contains(&path) { if !new_path.contains(&path) {
new_path.push(path); new_path.push(path);

View File

@ -17,12 +17,12 @@
use crate::errors; use crate::errors;
use std::path::PathBuf; use std::path::{Path, PathBuf};
pub fn find_native_static_library( pub fn find_native_static_library<'a>(
name: &str, name: &str,
verbatim: bool, verbatim: bool,
search_paths: &[PathBuf], search_paths: impl Iterator<Item = &'a Path>,
sess: &Session, sess: &Session,
) -> PathBuf { ) -> PathBuf {
let formats = if verbatim { let formats = if verbatim {
@ -60,7 +60,7 @@ fn find_bundled_library(
&& (sess.opts.unstable_opts.packed_bundled_libs || has_cfg || whole_archive == Some(true)) && (sess.opts.unstable_opts.packed_bundled_libs || has_cfg || whole_archive == Some(true))
{ {
let verbatim = verbatim.unwrap_or(false); let verbatim = verbatim.unwrap_or(false);
let search_paths = &sess.target_filesearch(PathKind::Native).search_path_dirs(); let search_paths = sess.target_filesearch(PathKind::Native).search_path_dirs();
return find_native_static_library(name.as_str(), verbatim, search_paths, sess) return find_native_static_library(name.as_str(), verbatim, search_paths, sess)
.file_name() .file_name()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())

View File

@ -2009,7 +2009,7 @@ fn encode_impls(&mut self) -> LazyArray<TraitImpls> {
.push((id.owner_id.def_id.local_def_index, simplified_self_ty)); .push((id.owner_id.def_id.local_def_index, simplified_self_ty));
let trait_def = tcx.trait_def(trait_ref.def_id); let trait_def = tcx.trait_def(trait_ref.def_id);
if let Some(mut an) = trait_def.ancestors(tcx, def_id).ok() { if let Ok(mut an) = trait_def.ancestors(tcx, def_id) {
if let Some(specialization_graph::Node::Impl(parent)) = an.nth(1) { if let Some(specialization_graph::Node::Impl(parent)) = an.nth(1) {
self.tables.impl_parent.set_some(def_id.index, parent.into()); self.tables.impl_parent.set_some(def_id.index, parent.into());
} }

View File

@ -10,7 +10,6 @@ derivative = "2.2.0"
either = "1.5.0" either = "1.5.0"
field-offset = "0.3.5" field-offset = "0.3.5"
gsgdt = "0.1.2" gsgdt = "0.1.2"
measureme = "11"
polonius-engine = "0.13.0" polonius-engine = "0.13.0"
rustc-rayon = { version = "0.5.0", optional = true } rustc-rayon = { version = "0.5.0", optional = true }
rustc-rayon-core = { version = "0.5.0", optional = true } rustc-rayon-core = { version = "0.5.0", optional = true }

View File

@ -8,8 +8,6 @@
}; };
use crate::ty::TyCtxt; use crate::ty::TyCtxt;
use field_offset::FieldOffset; use field_offset::FieldOffset;
use measureme::StringId;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::AtomicU64; use rustc_data_structures::sync::AtomicU64;
use rustc_data_structures::sync::WorkerLocal; use rustc_data_structures::sync::WorkerLocal;
use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::def_id::{DefId, LocalDefId};
@ -22,16 +20,6 @@
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP}; use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
use std::ops::Deref; use std::ops::Deref;
pub struct QueryKeyStringCache {
pub def_id_cache: FxHashMap<DefId, StringId>,
}
impl QueryKeyStringCache {
pub fn new() -> QueryKeyStringCache {
QueryKeyStringCache { def_id_cache: Default::default() }
}
}
pub struct DynamicQuery<'tcx, C: QueryCache> { pub struct DynamicQuery<'tcx, C: QueryCache> {
pub name: &'static str, pub name: &'static str,
pub eval_always: bool, pub eval_always: bool,

View File

@ -3585,8 +3585,7 @@ pub(super) fn parse_struct_fields(
match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) { match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
Ok(_) => { Ok(_) => {
if let Some(f) = if let Ok(f) = parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar))
parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar)).ok()
{ {
// Only include the field if there's no parse error for the field name. // Only include the field if there's no parse error for the field name.
fields.push(f); fields.push(f);

View File

@ -13,6 +13,7 @@
extern crate rustc_middle; extern crate rustc_middle;
use crate::plumbing::{__rust_begin_short_backtrace, encode_all_query_results, try_mark_green}; use crate::plumbing::{__rust_begin_short_backtrace, encode_all_query_results, try_mark_green};
use crate::profiling_support::QueryKeyStringCache;
use field_offset::offset_of; use field_offset::offset_of;
use rustc_data_structures::stable_hasher::HashStable; use rustc_data_structures::stable_hasher::HashStable;
use rustc_data_structures::sync::AtomicU64; use rustc_data_structures::sync::AtomicU64;
@ -21,9 +22,7 @@
use rustc_middle::dep_graph::{self, DepKind, DepKindStruct}; use rustc_middle::dep_graph::{self, DepKind, DepKindStruct};
use rustc_middle::query::erase::{erase, restore, Erase}; use rustc_middle::query::erase::{erase, restore, Erase};
use rustc_middle::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache}; use rustc_middle::query::on_disk_cache::{CacheEncoder, EncodedDepNodeIndex, OnDiskCache};
use rustc_middle::query::plumbing::{ use rustc_middle::query::plumbing::{DynamicQuery, QuerySystem, QuerySystemFns};
DynamicQuery, QueryKeyStringCache, QuerySystem, QuerySystemFns,
};
use rustc_middle::query::AsLocalKey; use rustc_middle::query::AsLocalKey;
use rustc_middle::query::{ use rustc_middle::query::{
queries, DynamicQueries, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates, queries, DynamicQueries, ExternProviders, Providers, QueryCaches, QueryEngine, QueryStates,

View File

@ -1,13 +1,23 @@
use measureme::{StringComponent, StringId}; use measureme::{StringComponent, StringId};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::profiling::SelfProfiler; use rustc_data_structures::profiling::SelfProfiler;
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE}; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE};
use rustc_hir::definitions::DefPathData; use rustc_hir::definitions::DefPathData;
use rustc_middle::query::plumbing::QueryKeyStringCache;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_query_system::query::QueryCache; use rustc_query_system::query::QueryCache;
use std::fmt::Debug; use std::fmt::Debug;
use std::io::Write; use std::io::Write;
pub(crate) struct QueryKeyStringCache {
def_id_cache: FxHashMap<DefId, StringId>,
}
impl QueryKeyStringCache {
fn new() -> QueryKeyStringCache {
QueryKeyStringCache { def_id_cache: Default::default() }
}
}
struct QueryKeyStringBuilder<'p, 'tcx> { struct QueryKeyStringBuilder<'p, 'tcx> {
profiler: &'p SelfProfiler, profiler: &'p SelfProfiler,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,

View File

@ -541,12 +541,7 @@ fn encode_node(
record_graph: &Option<Lock<DepGraphQuery>>, record_graph: &Option<Lock<DepGraphQuery>>,
) -> DepNodeIndex { ) -> DepNodeIndex {
node.encode::<D>(&mut self.encoder); node.encode::<D>(&mut self.encoder);
self.record( self.record(node.node, node.edges.len(), |_| node.edges[..].to_vec(), record_graph)
node.node,
node.edges.len(),
|_| node.edges[..].iter().copied().collect(),
record_graph,
)
} }
/// Encodes a node that was promoted from the previous graph. It reads the information directly from /// Encodes a node that was promoted from the previous graph. It reads the information directly from

View File

@ -47,8 +47,8 @@ pub fn new(
} }
/// Returns just the directories within the search paths. /// Returns just the directories within the search paths.
pub fn search_path_dirs(&self) -> Vec<PathBuf> { pub fn search_path_dirs(&self) -> impl Iterator<Item = &'a Path> {
self.search_paths().map(|sp| sp.dir.to_path_buf()).collect() self.search_paths().map(|sp| &*sp.dir)
} }
} }

View File

@ -47,10 +47,16 @@ pub(super) fn increment_num_running_threads(&self) {
// chance it overflows to 0, which would result in unsoundness. // chance it overflows to 0, which would result in unsoundness.
if self.num_running_threads.fetch_add(1, Ordering::Relaxed) > usize::MAX / 2 { if self.num_running_threads.fetch_add(1, Ordering::Relaxed) > usize::MAX / 2 {
// This can only reasonably happen by mem::forget()'ing a lot of ScopedJoinHandles. // This can only reasonably happen by mem::forget()'ing a lot of ScopedJoinHandles.
self.decrement_num_running_threads(false); self.overflow();
panic!("too many running threads in thread scope");
} }
} }
#[cold]
fn overflow(&self) {
self.decrement_num_running_threads(false);
panic!("too many running threads in thread scope");
}
pub(super) fn decrement_num_running_threads(&self, panic: bool) { pub(super) fn decrement_num_running_threads(&self, panic: bool) {
if panic { if panic {
self.a_thread_panicked.store(true, Ordering::Relaxed); self.a_thread_panicked.store(true, Ordering::Relaxed);

View File

@ -3785,6 +3785,14 @@ fn run_rmake_v2_test(&self) {
debug!(?support_lib_deps); debug!(?support_lib_deps);
debug!(?support_lib_deps_deps); debug!(?support_lib_deps_deps);
let orig_dylib_env_paths =
Vec::from_iter(env::split_paths(&env::var(dylib_env_var()).unwrap()));
let mut host_dylib_env_paths = Vec::new();
host_dylib_env_paths.push(cwd.join(&self.config.compile_lib_path));
host_dylib_env_paths.extend(orig_dylib_env_paths.iter().cloned());
let host_dylib_env_paths = env::join_paths(host_dylib_env_paths).unwrap();
let mut cmd = Command::new(&self.config.rustc_path); let mut cmd = Command::new(&self.config.rustc_path);
cmd.arg("-o") cmd.arg("-o")
.arg(&recipe_bin) .arg(&recipe_bin)
@ -3801,6 +3809,7 @@ fn run_rmake_v2_test(&self) {
.env("RUSTC", cwd.join(&self.config.rustc_path)) .env("RUSTC", cwd.join(&self.config.rustc_path))
.env("TMPDIR", &tmpdir) .env("TMPDIR", &tmpdir)
.env("LD_LIB_PATH_ENVVAR", dylib_env_var()) .env("LD_LIB_PATH_ENVVAR", dylib_env_var())
.env(dylib_env_var(), &host_dylib_env_paths)
.env("HOST_RPATH_DIR", cwd.join(&self.config.compile_lib_path)) .env("HOST_RPATH_DIR", cwd.join(&self.config.compile_lib_path))
.env("TARGET_RPATH_DIR", cwd.join(&self.config.run_lib_path)) .env("TARGET_RPATH_DIR", cwd.join(&self.config.run_lib_path))
.env("LLVM_COMPONENTS", &self.config.llvm_components) .env("LLVM_COMPONENTS", &self.config.llvm_components)
@ -3828,19 +3837,15 @@ fn run_rmake_v2_test(&self) {
// Finally, we need to run the recipe binary to build and run the actual tests. // Finally, we need to run the recipe binary to build and run the actual tests.
debug!(?recipe_bin); debug!(?recipe_bin);
let mut dylib_env_paths = String::new(); let mut dylib_env_paths = orig_dylib_env_paths.clone();
dylib_env_paths.push_str(&env::var(dylib_env_var()).unwrap()); dylib_env_paths.push(support_lib_path.parent().unwrap().to_path_buf());
dylib_env_paths.push(':'); dylib_env_paths.push(stage_std_path.join("rustlib").join(&self.config.host).join("lib"));
dylib_env_paths.push_str(&support_lib_path.parent().unwrap().to_string_lossy()); let dylib_env_paths = env::join_paths(dylib_env_paths).unwrap();
dylib_env_paths.push(':');
dylib_env_paths.push_str(
&stage_std_path.join("rustlib").join(&self.config.host).join("lib").to_string_lossy(),
);
let mut target_rpath_env_path = String::new(); let mut target_rpath_env_path = Vec::new();
target_rpath_env_path.push_str(&tmpdir.to_string_lossy()); target_rpath_env_path.push(&tmpdir);
target_rpath_env_path.push(':'); target_rpath_env_path.extend(&orig_dylib_env_paths);
target_rpath_env_path.push_str(&dylib_env_paths); let target_rpath_env_path = env::join_paths(target_rpath_env_path).unwrap();
let mut cmd = Command::new(&recipe_bin); let mut cmd = Command::new(&recipe_bin);
cmd.current_dir(&self.testpaths.file) cmd.current_dir(&self.testpaths.file)

View File

@ -115,3 +115,17 @@ fn handle_failed_output(cmd: &str, output: Output, caller_line_number: u32) -> !
eprintln!("=== STDERR ===\n{}\n\n", String::from_utf8(output.stderr).unwrap()); eprintln!("=== STDERR ===\n{}\n\n", String::from_utf8(output.stderr).unwrap());
std::process::exit(1) std::process::exit(1)
} }
/// Set the runtime library path as needed for running the host rustc/rustdoc/etc.
pub fn set_host_rpath(cmd: &mut Command) {
let ld_lib_path_envvar = env::var("LD_LIB_PATH_ENVVAR").unwrap();
cmd.env(&ld_lib_path_envvar, {
let mut paths = vec![];
paths.push(PathBuf::from(env::var("TMPDIR").unwrap()));
paths.push(PathBuf::from(env::var("HOST_RPATH_DIR").unwrap()));
for p in env::split_paths(&env::var(&ld_lib_path_envvar).unwrap()) {
paths.push(p.to_path_buf());
}
env::join_paths(paths.iter()).unwrap()
});
}

View File

@ -3,7 +3,7 @@
use std::path::Path; use std::path::Path;
use std::process::{Command, Output}; use std::process::{Command, Output};
use crate::{handle_failed_output, tmp_dir}; use crate::{handle_failed_output, set_host_rpath, tmp_dir};
/// Construct a new `rustc` invocation. /// Construct a new `rustc` invocation.
pub fn rustc() -> Rustc { pub fn rustc() -> Rustc {
@ -24,6 +24,7 @@ pub struct Rustc {
fn setup_common() -> Command { fn setup_common() -> Command {
let rustc = env::var("RUSTC").unwrap(); let rustc = env::var("RUSTC").unwrap();
let mut cmd = Command::new(rustc); let mut cmd = Command::new(rustc);
set_host_rpath(&mut cmd);
cmd.arg("--out-dir").arg(tmp_dir()).arg("-L").arg(tmp_dir()); cmd.arg("--out-dir").arg(tmp_dir()).arg("-L").arg(tmp_dir());
cmd cmd
} }

View File

@ -3,7 +3,7 @@
use std::path::Path; use std::path::Path;
use std::process::{Command, Output}; use std::process::{Command, Output};
use crate::handle_failed_output; use crate::{handle_failed_output, set_host_rpath};
/// Construct a plain `rustdoc` invocation with no flags set. /// Construct a plain `rustdoc` invocation with no flags set.
pub fn bare_rustdoc() -> Rustdoc { pub fn bare_rustdoc() -> Rustdoc {
@ -22,7 +22,9 @@ pub struct Rustdoc {
fn setup_common() -> Command { fn setup_common() -> Command {
let rustdoc = env::var("RUSTDOC").unwrap(); let rustdoc = env::var("RUSTDOC").unwrap();
Command::new(rustdoc) let mut cmd = Command::new(rustdoc);
set_host_rpath(&mut cmd);
cmd
} }
impl Rustdoc { impl Rustdoc {

View File

@ -22,6 +22,7 @@
use run_make_support::object::ObjectSection; use run_make_support::object::ObjectSection;
use run_make_support::object::ObjectSymbol; use run_make_support::object::ObjectSymbol;
use run_make_support::object::RelocationTarget; use run_make_support::object::RelocationTarget;
use run_make_support::set_host_rpath;
use run_make_support::tmp_dir; use run_make_support::tmp_dir;
use std::collections::HashSet; use std::collections::HashSet;
@ -48,8 +49,8 @@ fn main() {
let path = std::env::var("PATH").unwrap(); let path = std::env::var("PATH").unwrap();
let rustc = std::env::var("RUSTC").unwrap(); let rustc = std::env::var("RUSTC").unwrap();
let bootstrap_cargo = std::env::var("BOOTSTRAP_CARGO").unwrap(); let bootstrap_cargo = std::env::var("BOOTSTRAP_CARGO").unwrap();
let status = std::process::Command::new(bootstrap_cargo) let mut cmd = std::process::Command::new(bootstrap_cargo);
.args([ cmd.args([
"build", "build",
"--manifest-path", "--manifest-path",
manifest_path.to_str().unwrap(), manifest_path.to_str().unwrap(),
@ -62,10 +63,10 @@ fn main() {
.env("RUSTC", rustc) .env("RUSTC", rustc)
.env("RUSTFLAGS", "-Copt-level=0 -Cdebug-assertions=yes") .env("RUSTFLAGS", "-Copt-level=0 -Cdebug-assertions=yes")
.env("CARGO_TARGET_DIR", &target_dir) .env("CARGO_TARGET_DIR", &target_dir)
.env("RUSTC_BOOTSTRAP", "1") .env("RUSTC_BOOTSTRAP", "1");
.status() set_host_rpath(&mut cmd);
.unwrap();
let status = cmd.status().unwrap();
assert!(status.success()); assert!(status.success());
let rlibs_path = target_dir.join(target).join("debug").join("deps"); let rlibs_path = target_dir.join(target).join("debug").join("deps");

View File

@ -1,5 +1,5 @@
//@ run-pass //@ run-pass
//@ revisions YES NO //@ revisions: YES NO
//@ [YES] compile-flags: -Cdebug-assertions=yes //@ [YES] compile-flags: -Cdebug-assertions=yes
//@ [NO] compile-flags: -Cdebug-assertions=no //@ [NO] compile-flags: -Cdebug-assertions=no