Auto merge of #66259 - JohnTitor:rollup-x9nk1e2, r=JohnTitor
Rollup of 7 pull requests Successful merges: - #65719 (Refactor sync::Once) - #65831 (Don't cast directly from &[T; N] to *const T) - #66048 (Correct error in documentation for Ipv4Addr method) - #66058 (Correct deprecated `is_global` IPv6 documentation) - #66216 ([mir-opt] Handle return place in ConstProp and improve SimplifyLocals pass) - #66217 (invalid_value lint: use diagnostic items) - #66235 (rustc_metadata: don't let LLVM confuse rmeta blobs for COFF object files.) Failed merges: r? @ghost
This commit is contained in:
commit
57a5f92bef
@ -256,6 +256,7 @@ impl<T> MaybeUninit<T> {
|
||||
/// [type]: union.MaybeUninit.html
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
#[cfg_attr(all(not(bootstrap)), rustc_diagnostic_item = "maybe_uninit_uninit")]
|
||||
pub const fn uninit() -> MaybeUninit<T> {
|
||||
MaybeUninit { uninit: () }
|
||||
}
|
||||
@ -339,6 +340,7 @@ impl<T> MaybeUninit<T> {
|
||||
/// ```
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline]
|
||||
#[cfg_attr(all(not(bootstrap)), rustc_diagnostic_item = "maybe_uninit_zeroed")]
|
||||
pub fn zeroed() -> MaybeUninit<T> {
|
||||
let mut u = MaybeUninit::<T>::uninit();
|
||||
unsafe {
|
||||
|
@ -468,6 +468,7 @@ pub const fn needs_drop<T>() -> bool {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow(deprecated_in_future)]
|
||||
#[allow(deprecated)]
|
||||
#[cfg_attr(all(not(bootstrap)), rustc_diagnostic_item = "mem_zeroed")]
|
||||
pub unsafe fn zeroed<T>() -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::init()
|
||||
@ -496,6 +497,7 @@ pub unsafe fn zeroed<T>() -> T {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[allow(deprecated_in_future)]
|
||||
#[allow(deprecated)]
|
||||
#[cfg_attr(all(not(bootstrap)), rustc_diagnostic_item = "mem_uninitialized")]
|
||||
pub unsafe fn uninitialized<T>() -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::uninit()
|
||||
|
@ -20,6 +20,9 @@ pub enum PointerCast {
|
||||
/// Go from a mut raw pointer to a const raw pointer.
|
||||
MutToConstPointer,
|
||||
|
||||
/// Go from `*const [T; N]` to `*const T`
|
||||
ArrayToPointer,
|
||||
|
||||
/// Unsize a pointer/reference value, e.g., `&[T; n]` to
|
||||
/// `&[T]`. Note that the source could be a thin or fat pointer.
|
||||
/// This will do things like convert thin pointers to fat
|
||||
|
@ -59,7 +59,8 @@ pub struct ModuleCodegen<M> {
|
||||
pub kind: ModuleKind,
|
||||
}
|
||||
|
||||
pub const METADATA_FILENAME: &str = "rust.metadata.bin";
|
||||
// FIXME(eddyb) maybe include the crate name in this?
|
||||
pub const METADATA_FILENAME: &str = "lib.rmeta";
|
||||
pub const RLIB_BYTECODE_EXTENSION: &str = "bc.z";
|
||||
|
||||
|
||||
|
@ -269,6 +269,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
}
|
||||
}
|
||||
mir::CastKind::Pointer(PointerCast::MutToConstPointer)
|
||||
| mir::CastKind::Pointer(PointerCast::ArrayToPointer)
|
||||
| mir::CastKind::Misc => {
|
||||
assert!(bx.cx().is_backend_immediate(cast));
|
||||
let ll_t_out = bx.cx().immediate_backend_type(cast);
|
||||
|
@ -1903,29 +1903,23 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidValue {
|
||||
|
||||
/// Determine if this expression is a "dangerous initialization".
|
||||
fn is_dangerous_init(cx: &LateContext<'_, '_>, expr: &hir::Expr) -> Option<InitKind> {
|
||||
const ZEROED_PATH: &[Symbol] = &[sym::core, sym::mem, sym::zeroed];
|
||||
const UININIT_PATH: &[Symbol] = &[sym::core, sym::mem, sym::uninitialized];
|
||||
// `transmute` is inside an anonymous module (the `extern` block?);
|
||||
// `Invalid` represents the empty string and matches that.
|
||||
// FIXME(#66075): use diagnostic items. Somehow, that does not seem to work
|
||||
// on intrinsics right now.
|
||||
const TRANSMUTE_PATH: &[Symbol] =
|
||||
&[sym::core, sym::intrinsics, kw::Invalid, sym::transmute];
|
||||
const MU_ZEROED_PATH: &[Symbol] =
|
||||
&[sym::core, sym::mem, sym::maybe_uninit, sym::MaybeUninit, sym::zeroed];
|
||||
const MU_UNINIT_PATH: &[Symbol] =
|
||||
&[sym::core, sym::mem, sym::maybe_uninit, sym::MaybeUninit, sym::uninit];
|
||||
|
||||
if let hir::ExprKind::Call(ref path_expr, ref args) = expr.kind {
|
||||
// Find calls to `mem::{uninitialized,zeroed}` methods.
|
||||
if let hir::ExprKind::Path(ref qpath) = path_expr.kind {
|
||||
let def_id = cx.tables.qpath_res(qpath, path_expr.hir_id).opt_def_id()?;
|
||||
|
||||
if cx.match_def_path(def_id, ZEROED_PATH) {
|
||||
if cx.tcx.is_diagnostic_item(sym::mem_zeroed, def_id) {
|
||||
return Some(InitKind::Zeroed);
|
||||
}
|
||||
if cx.match_def_path(def_id, UININIT_PATH) {
|
||||
} else if cx.tcx.is_diagnostic_item(sym::mem_uninitialized, def_id) {
|
||||
return Some(InitKind::Uninit);
|
||||
}
|
||||
if cx.match_def_path(def_id, TRANSMUTE_PATH) {
|
||||
} else if cx.match_def_path(def_id, TRANSMUTE_PATH) {
|
||||
if is_zero(&args[0]) {
|
||||
return Some(InitKind::Zeroed);
|
||||
}
|
||||
@ -1940,9 +1934,10 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidValue {
|
||||
if let hir::ExprKind::Call(ref path_expr, _) = args[0].kind {
|
||||
if let hir::ExprKind::Path(ref qpath) = path_expr.kind {
|
||||
let def_id = cx.tables.qpath_res(qpath, path_expr.hir_id).opt_def_id()?;
|
||||
if cx.match_def_path(def_id, MU_ZEROED_PATH) {
|
||||
|
||||
if cx.tcx.is_diagnostic_item(sym::maybe_uninit_zeroed, def_id) {
|
||||
return Some(InitKind::Zeroed);
|
||||
} else if cx.match_def_path(def_id, MU_UNINIT_PATH) {
|
||||
} else if cx.tcx.is_diagnostic_item(sym::maybe_uninit_uninit, def_id) {
|
||||
return Some(InitKind::Uninit);
|
||||
}
|
||||
}
|
||||
|
@ -37,18 +37,15 @@ crate fn rustc_version() -> String {
|
||||
/// Metadata encoding version.
|
||||
/// N.B., increment this if you change the format of metadata such that
|
||||
/// the rustc version can't be found to compare with `rustc_version()`.
|
||||
const METADATA_VERSION: u8 = 4;
|
||||
const METADATA_VERSION: u8 = 5;
|
||||
|
||||
/// Metadata header which includes `METADATA_VERSION`.
|
||||
/// To get older versions of rustc to ignore this metadata,
|
||||
/// there are 4 zero bytes at the start, which are treated
|
||||
/// as a length of 0 by old compilers.
|
||||
///
|
||||
/// This header is followed by the position of the `CrateRoot`,
|
||||
/// which is encoded as a 32-bit big-endian unsigned integer,
|
||||
/// and further followed by the rustc version string.
|
||||
crate const METADATA_HEADER: &[u8; 12] =
|
||||
&[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
|
||||
crate const METADATA_HEADER: &[u8; 8] =
|
||||
&[b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
|
||||
|
||||
/// Additional metadata for a `Lazy<T>` where `T` may not be `Sized`,
|
||||
/// e.g. for `Lazy<[T]>`, this is the length (count of `T` values).
|
||||
|
@ -36,6 +36,7 @@ use rustc::traits::query::type_op::custom::CustomTypeOp;
|
||||
use rustc::traits::query::{Fallible, NoSolution};
|
||||
use rustc::traits::{self, ObligationCause, PredicateObligations};
|
||||
use rustc::ty::adjustment::{PointerCast};
|
||||
use rustc::ty::cast::CastTy;
|
||||
use rustc::ty::fold::TypeFoldable;
|
||||
use rustc::ty::subst::{Subst, SubstsRef, GenericArgKind, UserSubsts};
|
||||
use rustc::ty::{
|
||||
@ -2177,72 +2178,125 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
ty_from,
|
||||
ty_to,
|
||||
terr
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
CastKind::Pointer(PointerCast::ArrayToPointer) => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
|
||||
let opt_ty_elem = match ty_from.kind {
|
||||
ty::RawPtr(
|
||||
ty::TypeAndMut { mutbl: hir::MutImmutable, ty: array_ty }
|
||||
) => {
|
||||
match array_ty.kind {
|
||||
ty::Array(ty_elem, _) => Some(ty_elem),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let ty_elem = match opt_ty_elem {
|
||||
Some(ty_elem) => ty_elem,
|
||||
None => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"ArrayToPointer cast from unexpected type {:?}",
|
||||
ty_from,
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let ty_to = match ty.kind {
|
||||
ty::RawPtr(
|
||||
ty::TypeAndMut { mutbl: hir::MutImmutable, ty: ty_to }
|
||||
) => {
|
||||
ty_to
|
||||
}
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"ArrayToPointer cast to unexpected type {:?}",
|
||||
ty,
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(terr) = self.sub_types(
|
||||
ty_elem,
|
||||
ty_to,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Cast,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"relating {:?} with {:?} yields {:?}",
|
||||
ty_elem,
|
||||
ty_to,
|
||||
terr
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
CastKind::Misc => {
|
||||
if let ty::Ref(_, mut ty_from, _) = op.ty(body, tcx).kind {
|
||||
let (mut ty_to, mutability) = if let ty::RawPtr(ty::TypeAndMut {
|
||||
ty: ty_to,
|
||||
mutbl,
|
||||
}) = ty.kind {
|
||||
(ty_to, mutbl)
|
||||
} else {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"invalid cast types {:?} -> {:?}",
|
||||
op.ty(body, tcx),
|
||||
ty,
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
// Handle the direct cast from `&[T; N]` to `*const T` by unwrapping
|
||||
// any array we find.
|
||||
while let ty::Array(ty_elem_from, _) = ty_from.kind {
|
||||
ty_from = ty_elem_from;
|
||||
if let ty::Array(ty_elem_to, _) = ty_to.kind {
|
||||
ty_to = ty_elem_to;
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::RPtr(ref_tm)), Some(CastTy::Ptr(ptr_tm))) => {
|
||||
if let hir::MutMutable = ptr_tm.mutbl {
|
||||
if let Err(terr) = self.eq_types(
|
||||
ref_tm.ty,
|
||||
ptr_tm.ty,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Cast,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"equating {:?} with {:?} yields {:?}",
|
||||
ref_tm.ty,
|
||||
ptr_tm.ty,
|
||||
terr
|
||||
)
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
if let Err(terr) = self.sub_types(
|
||||
ref_tm.ty,
|
||||
ptr_tm.ty,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Cast,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"relating {:?} with {:?} yields {:?}",
|
||||
ref_tm.ty,
|
||||
ptr_tm.ty,
|
||||
terr
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let hir::MutMutable = mutability {
|
||||
if let Err(terr) = self.eq_types(
|
||||
ty_from,
|
||||
ty_to,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Cast,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"equating {:?} with {:?} yields {:?}",
|
||||
ty_from,
|
||||
ty_to,
|
||||
terr
|
||||
)
|
||||
}
|
||||
} else {
|
||||
if let Err(terr) = self.sub_types(
|
||||
ty_from,
|
||||
ty_to,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Cast,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"relating {:?} with {:?} yields {:?}",
|
||||
ty_from,
|
||||
ty_to,
|
||||
terr
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
(None, _)
|
||||
| (_, None)
|
||||
| (_, Some(CastTy::FnPtr))
|
||||
| (Some(CastTy::Float), Some(CastTy::Ptr(_)))
|
||||
| (Some(CastTy::Ptr(_)), Some(CastTy::Float))
|
||||
| (Some(CastTy::FnPtr), Some(CastTy::Float)) => span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty,
|
||||
),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -628,6 +628,11 @@ fn make_mirror_unadjusted<'a, 'tcx>(
|
||||
let cast = if cx.tables().is_coercion_cast(source.hir_id) {
|
||||
// Convert the lexpr to a vexpr.
|
||||
ExprKind::Use { source: source.to_ref() }
|
||||
} else if cx.tables().expr_ty(source).is_region_ptr() {
|
||||
// Special cased so that we can type check that the element
|
||||
// type of the source matches the pointed to type of the
|
||||
// destination.
|
||||
ExprKind::Pointer { source: source.to_ref(), cast: PointerCast::ArrayToPointer }
|
||||
} else {
|
||||
// check whether this is casting an enum variant discriminant
|
||||
// to prevent cycles, we refer to the discriminant initializer
|
||||
|
@ -26,7 +26,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
self.unsize_into(src, dest)?;
|
||||
}
|
||||
|
||||
Misc | Pointer(PointerCast::MutToConstPointer) => {
|
||||
Misc
|
||||
| Pointer(PointerCast::MutToConstPointer)
|
||||
| Pointer(PointerCast::ArrayToPointer) => {
|
||||
let src = self.read_immediate(src)?;
|
||||
let res = self.cast_immediate(src, dest.layout)?;
|
||||
self.write_immediate(res, dest)?;
|
||||
|
@ -9,7 +9,7 @@ use rustc::hir::def_id::DefId;
|
||||
use rustc::mir::{
|
||||
AggregateKind, Constant, Location, Place, PlaceBase, Body, Operand, Rvalue, Local, UnOp,
|
||||
StatementKind, Statement, LocalKind, TerminatorKind, Terminator, ClearCrossCrate, SourceInfo,
|
||||
BinOp, SourceScope, SourceScopeLocalData, LocalDecl, BasicBlock,
|
||||
BinOp, SourceScope, SourceScopeLocalData, LocalDecl, BasicBlock, RETURN_PLACE,
|
||||
};
|
||||
use rustc::mir::visit::{
|
||||
Visitor, PlaceContext, MutatingUseContext, MutVisitor, NonMutatingUseContext,
|
||||
@ -25,6 +25,7 @@ use rustc::ty::layout::{
|
||||
LayoutOf, TyLayout, LayoutError, HasTyCtxt, TargetDataLayout, HasDataLayout,
|
||||
};
|
||||
|
||||
use crate::rustc::ty::subst::Subst;
|
||||
use crate::interpret::{
|
||||
self, InterpCx, ScalarMaybeUndef, Immediate, OpTy,
|
||||
StackPopCleanup, LocalValue, LocalState, AllocId, Frame,
|
||||
@ -269,6 +270,7 @@ struct ConstPropagator<'mir, 'tcx> {
|
||||
param_env: ParamEnv<'tcx>,
|
||||
source_scope_local_data: ClearCrossCrate<IndexVec<SourceScope, SourceScopeLocalData>>,
|
||||
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
|
||||
ret: Option<OpTy<'tcx, ()>>,
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx> LayoutOf for ConstPropagator<'mir, 'tcx> {
|
||||
@ -308,11 +310,21 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
let mut ecx = InterpCx::new(tcx.at(span), param_env, ConstPropMachine, ());
|
||||
let can_const_prop = CanConstProp::check(body);
|
||||
|
||||
let substs = &InternalSubsts::identity_for_item(tcx, def_id);
|
||||
|
||||
let ret =
|
||||
ecx
|
||||
.layout_of(body.return_ty().subst(tcx, substs))
|
||||
.ok()
|
||||
// Don't bother allocating memory for ZST types which have no values.
|
||||
.filter(|ret_layout| !ret_layout.is_zst())
|
||||
.map(|ret_layout| ecx.allocate(ret_layout, MemoryKind::Stack));
|
||||
|
||||
ecx.push_stack_frame(
|
||||
Instance::new(def_id, &InternalSubsts::identity_for_item(tcx, def_id)),
|
||||
Instance::new(def_id, substs),
|
||||
span,
|
||||
dummy_body,
|
||||
None,
|
||||
ret.map(Into::into),
|
||||
StackPopCleanup::None {
|
||||
cleanup: false,
|
||||
},
|
||||
@ -327,6 +339,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
source_scope_local_data,
|
||||
//FIXME(wesleywiser) we can't steal this because `Visitor::super_visit_body()` needs it
|
||||
local_decls: body.local_decls.clone(),
|
||||
ret: ret.map(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,6 +348,15 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
}
|
||||
|
||||
fn get_const(&self, local: Local) -> Option<Const<'tcx>> {
|
||||
if local == RETURN_PLACE {
|
||||
// Try to read the return place as an immediate so that if it is representable as a
|
||||
// scalar, we can handle it as such, but otherwise, just return the value as is.
|
||||
return match self.ret.map(|ret| self.ecx.try_read_immediate(ret)) {
|
||||
Some(Ok(Ok(imm))) => Some(imm.into()),
|
||||
_ => self.ret,
|
||||
};
|
||||
}
|
||||
|
||||
self.ecx.access_local(self.ecx.frame(), local, None).ok()
|
||||
}
|
||||
|
||||
@ -643,7 +665,8 @@ impl CanConstProp {
|
||||
// lint for x != y
|
||||
// FIXME(oli-obk): lint variables until they are used in a condition
|
||||
// FIXME(oli-obk): lint if return value is constant
|
||||
*val = body.local_kind(local) == LocalKind::Temp;
|
||||
let local_kind = body.local_kind(local);
|
||||
*val = local_kind == LocalKind::Temp || local_kind == LocalKind::ReturnPointer;
|
||||
|
||||
if !*val {
|
||||
trace!("local {:?} can't be propagated because it's not a temporary", local);
|
||||
@ -731,7 +754,9 @@ impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> {
|
||||
}
|
||||
} else {
|
||||
trace!("can't propagate into {:?}", local);
|
||||
self.remove_const(local);
|
||||
if local != RETURN_PLACE {
|
||||
self.remove_const(local);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -150,7 +150,8 @@ fn check_rvalue(
|
||||
_ => check_operand(tcx, operand, span, def_id, body),
|
||||
}
|
||||
}
|
||||
Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, _) => {
|
||||
Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, _)
|
||||
| Rvalue::Cast(CastKind::Pointer(PointerCast::ArrayToPointer), operand, _) => {
|
||||
check_operand(tcx, operand, span, def_id, body)
|
||||
}
|
||||
Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), _, _) |
|
||||
|
@ -359,13 +359,20 @@ impl<'a, 'tcx> Visitor<'tcx> for DeclMarker<'a, 'tcx> {
|
||||
// Ignore stores of constants because `ConstProp` and `CopyProp` can remove uses of many
|
||||
// of these locals. However, if the local is still needed, then it will be referenced in
|
||||
// another place and we'll mark it as being used there.
|
||||
if ctx == PlaceContext::MutatingUse(MutatingUseContext::Store) {
|
||||
let stmt =
|
||||
&self.body.basic_blocks()[location.block].statements[location.statement_index];
|
||||
if let StatementKind::Assign(box (p, Rvalue::Use(Operand::Constant(c)))) = &stmt.kind {
|
||||
if p.as_local().is_some() {
|
||||
trace!("skipping store of const value {:?} to {:?}", c, local);
|
||||
return;
|
||||
if ctx == PlaceContext::MutatingUse(MutatingUseContext::Store) ||
|
||||
ctx == PlaceContext::MutatingUse(MutatingUseContext::Projection) {
|
||||
let block = &self.body.basic_blocks()[location.block];
|
||||
if location.statement_index != block.statements.len() {
|
||||
let stmt =
|
||||
&block.statements[location.statement_index];
|
||||
|
||||
if let StatementKind::Assign(
|
||||
box (p, Rvalue::Use(Operand::Constant(c)))
|
||||
) = &stmt.kind {
|
||||
if !p.is_indirect() {
|
||||
trace!("skipping store of const value {:?} to {:?}", c, p);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -392,7 +399,7 @@ impl<'tcx> MutVisitor<'tcx> for LocalUpdater<'tcx> {
|
||||
self.map[*l].is_some()
|
||||
}
|
||||
StatementKind::Assign(box (place, _)) => {
|
||||
if let Some(local) = place.as_local() {
|
||||
if let PlaceBase::Local(local) = place.base {
|
||||
self.map[local].is_some()
|
||||
} else {
|
||||
true
|
||||
|
@ -636,6 +636,15 @@ impl<'a, 'tcx> CastCheck<'tcx> {
|
||||
// need to special-case obtaining a raw pointer
|
||||
// from a region pointer to a vector.
|
||||
|
||||
// Coerce to a raw pointer so that we generate AddressOf in MIR.
|
||||
let array_ptr_type = fcx.tcx.mk_ptr(m_expr);
|
||||
fcx.try_coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No)
|
||||
.unwrap_or_else(|_| bug!(
|
||||
"could not cast from reference to array to pointer to array ({:?} to {:?})",
|
||||
self.expr_ty,
|
||||
array_ptr_type,
|
||||
));
|
||||
|
||||
// this will report a type mismatch if needed
|
||||
fcx.demand_eqtype(self.span, ety, m_cast.ty);
|
||||
return Ok(CastKind::ArrayPtrCast);
|
||||
|
@ -536,7 +536,7 @@ impl Ipv4Addr {
|
||||
/// // the broadcast address is not global
|
||||
/// assert_eq!(Ipv4Addr::new(255, 255, 255, 255).is_global(), false);
|
||||
///
|
||||
/// // the broadcast address is not global
|
||||
/// // the address space designated for documentation is not global
|
||||
/// assert_eq!(Ipv4Addr::new(192, 0, 2, 255).is_global(), false);
|
||||
/// assert_eq!(Ipv4Addr::new(198, 51, 100, 65).is_global(), false);
|
||||
/// assert_eq!(Ipv4Addr::new(203, 0, 113, 6).is_global(), false);
|
||||
@ -1130,7 +1130,7 @@ impl Ipv6Addr {
|
||||
/// The following return [`false`]:
|
||||
///
|
||||
/// - the loopback address
|
||||
/// - link-local, site-local, and unique local unicast addresses
|
||||
/// - link-local and unique local unicast addresses
|
||||
/// - interface-, link-, realm-, admin- and site-local multicast addresses
|
||||
///
|
||||
/// [`true`]: ../../std/primitive.bool.html
|
||||
|
@ -51,11 +51,43 @@
|
||||
//
|
||||
// You'll find a few more details in the implementation, but that's the gist of
|
||||
// it!
|
||||
//
|
||||
// Atomic orderings:
|
||||
// When running `Once` we deal with multiple atomics:
|
||||
// `Once.state_and_queue` and an unknown number of `Waiter.signaled`.
|
||||
// * `state_and_queue` is used (1) as a state flag, (2) for synchronizing the
|
||||
// result of the `Once`, and (3) for synchronizing `Waiter` nodes.
|
||||
// - At the end of the `call_inner` function we have to make sure the result
|
||||
// of the `Once` is acquired. So every load which can be the only one to
|
||||
// load COMPLETED must have at least Acquire ordering, which means all
|
||||
// three of them.
|
||||
// - `WaiterQueue::Drop` is the only place that may store COMPLETED, and
|
||||
// must do so with Release ordering to make the result available.
|
||||
// - `wait` inserts `Waiter` nodes as a pointer in `state_and_queue`, and
|
||||
// needs to make the nodes available with Release ordering. The load in
|
||||
// its `compare_and_swap` can be Relaxed because it only has to compare
|
||||
// the atomic, not to read other data.
|
||||
// - `WaiterQueue::Drop` must see the `Waiter` nodes, so it must load
|
||||
// `state_and_queue` with Acquire ordering.
|
||||
// - There is just one store where `state_and_queue` is used only as a
|
||||
// state flag, without having to synchronize data: switching the state
|
||||
// from INCOMPLETE to RUNNING in `call_inner`. This store can be Relaxed,
|
||||
// but the read has to be Acquire because of the requirements mentioned
|
||||
// above.
|
||||
// * `Waiter.signaled` is both used as a flag, and to protect a field with
|
||||
// interior mutability in `Waiter`. `Waiter.thread` is changed in
|
||||
// `WaiterQueue::Drop` which then sets `signaled` with Release ordering.
|
||||
// After `wait` loads `signaled` with Acquire and sees it is true, it needs to
|
||||
// see the changes to drop the `Waiter` struct correctly.
|
||||
// * There is one place where the two atomics `Once.state_and_queue` and
|
||||
// `Waiter.signaled` come together, and might be reordered by the compiler or
|
||||
// processor. Because both use Aquire ordering such a reordering is not
|
||||
// allowed, so no need for SeqCst.
|
||||
|
||||
use crate::cell::Cell;
|
||||
use crate::fmt;
|
||||
use crate::marker;
|
||||
use crate::ptr;
|
||||
use crate::sync::atomic::{AtomicUsize, AtomicBool, Ordering};
|
||||
use crate::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
|
||||
use crate::thread::{self, Thread};
|
||||
|
||||
/// A synchronization primitive which can be used to run a one-time global
|
||||
@ -78,10 +110,10 @@ use crate::thread::{self, Thread};
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct Once {
|
||||
// This `state` word is actually an encoded version of just a pointer to a
|
||||
// `Waiter`, so we add the `PhantomData` appropriately.
|
||||
state: AtomicUsize,
|
||||
_marker: marker::PhantomData<*mut Waiter>,
|
||||
// `state_and_queue` is actually an a pointer to a `Waiter` with extra state
|
||||
// bits, so we add the `PhantomData` appropriately.
|
||||
state_and_queue: AtomicUsize,
|
||||
_marker: marker::PhantomData<*const Waiter>,
|
||||
}
|
||||
|
||||
// The `PhantomData` of a raw pointer removes these two auto traits, but we
|
||||
@ -117,12 +149,12 @@ pub struct OnceState {
|
||||
#[rustc_deprecated(
|
||||
since = "1.38.0",
|
||||
reason = "the `new` function is now preferred",
|
||||
suggestion = "Once::new()",
|
||||
suggestion = "Once::new()"
|
||||
)]
|
||||
pub const ONCE_INIT: Once = Once::new();
|
||||
|
||||
// Four states that a Once can be in, encoded into the lower bits of `state` in
|
||||
// the Once structure.
|
||||
// Four states that a Once can be in, encoded into the lower bits of
|
||||
// `state_and_queue` in the Once structure.
|
||||
const INCOMPLETE: usize = 0x0;
|
||||
const POISONED: usize = 0x1;
|
||||
const RUNNING: usize = 0x2;
|
||||
@ -132,28 +164,32 @@ const COMPLETE: usize = 0x3;
|
||||
// this is in the RUNNING state.
|
||||
const STATE_MASK: usize = 0x3;
|
||||
|
||||
// Representation of a node in the linked list of waiters in the RUNNING state.
|
||||
// Representation of a node in the linked list of waiters, used while in the
|
||||
// RUNNING state.
|
||||
// Note: `Waiter` can't hold a mutable pointer to the next thread, because then
|
||||
// `wait` would both hand out a mutable reference to its `Waiter` node, and keep
|
||||
// a shared reference to check `signaled`. Instead we hold shared references and
|
||||
// use interior mutability.
|
||||
#[repr(align(4))] // Ensure the two lower bits are free to use as state bits.
|
||||
struct Waiter {
|
||||
thread: Option<Thread>,
|
||||
thread: Cell<Option<Thread>>,
|
||||
signaled: AtomicBool,
|
||||
next: *mut Waiter,
|
||||
next: *const Waiter,
|
||||
}
|
||||
|
||||
// Helper struct used to clean up after a closure call with a `Drop`
|
||||
// implementation to also run on panic.
|
||||
struct Finish<'a> {
|
||||
panicked: bool,
|
||||
me: &'a Once,
|
||||
// Head of a linked list of waiters.
|
||||
// Every node is a struct on the stack of a waiting thread.
|
||||
// Will wake up the waiters when it gets dropped, i.e. also on panic.
|
||||
struct WaiterQueue<'a> {
|
||||
state_and_queue: &'a AtomicUsize,
|
||||
set_state_on_drop_to: usize,
|
||||
}
|
||||
|
||||
impl Once {
|
||||
/// Creates a new `Once` value.
|
||||
#[stable(feature = "once_new", since = "1.2.0")]
|
||||
pub const fn new() -> Once {
|
||||
Once {
|
||||
state: AtomicUsize::new(INCOMPLETE),
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
Once { state_and_queue: AtomicUsize::new(INCOMPLETE), _marker: marker::PhantomData }
|
||||
}
|
||||
|
||||
/// Performs an initialization routine once and only once. The given closure
|
||||
@ -214,7 +250,10 @@ impl Once {
|
||||
///
|
||||
/// [poison]: struct.Mutex.html#poisoning
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn call_once<F>(&self, f: F) where F: FnOnce() {
|
||||
pub fn call_once<F>(&self, f: F)
|
||||
where
|
||||
F: FnOnce(),
|
||||
{
|
||||
// Fast path check
|
||||
if self.is_completed() {
|
||||
return;
|
||||
@ -271,16 +310,17 @@ impl Once {
|
||||
/// INIT.call_once(|| {});
|
||||
/// ```
|
||||
#[unstable(feature = "once_poison", issue = "33577")]
|
||||
pub fn call_once_force<F>(&self, f: F) where F: FnOnce(&OnceState) {
|
||||
pub fn call_once_force<F>(&self, f: F)
|
||||
where
|
||||
F: FnOnce(&OnceState),
|
||||
{
|
||||
// Fast path check
|
||||
if self.is_completed() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut f = Some(f);
|
||||
self.call_inner(true, &mut |p| {
|
||||
f.take().unwrap()(&OnceState { poisoned: p })
|
||||
});
|
||||
self.call_inner(true, &mut |p| f.take().unwrap()(&OnceState { poisoned: p }));
|
||||
}
|
||||
|
||||
/// Returns `true` if some `call_once` call has completed
|
||||
@ -329,8 +369,8 @@ impl Once {
|
||||
// An `Acquire` load is enough because that makes all the initialization
|
||||
// operations visible to us, and, this being a fast path, weaker
|
||||
// ordering helps with performance. This `Acquire` synchronizes with
|
||||
// `SeqCst` operations on the slow path.
|
||||
self.state.load(Ordering::Acquire) == COMPLETE
|
||||
// `Release` operations on the slow path.
|
||||
self.state_and_queue.load(Ordering::Acquire) == COMPLETE
|
||||
}
|
||||
|
||||
// This is a non-generic function to reduce the monomorphization cost of
|
||||
@ -345,94 +385,92 @@ impl Once {
|
||||
// currently no way to take an `FnOnce` and call it via virtual dispatch
|
||||
// without some allocation overhead.
|
||||
#[cold]
|
||||
fn call_inner(&self,
|
||||
ignore_poisoning: bool,
|
||||
init: &mut dyn FnMut(bool)) {
|
||||
|
||||
// This cold path uses SeqCst consistently because the
|
||||
// performance difference really does not matter there, and
|
||||
// SeqCst minimizes the chances of something going wrong.
|
||||
let mut state = self.state.load(Ordering::SeqCst);
|
||||
|
||||
'outer: loop {
|
||||
match state {
|
||||
// If we're complete, then there's nothing to do, we just
|
||||
// jettison out as we shouldn't run the closure.
|
||||
COMPLETE => return,
|
||||
|
||||
// If we're poisoned and we're not in a mode to ignore
|
||||
// poisoning, then we panic here to propagate the poison.
|
||||
fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(bool)) {
|
||||
let mut state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
||||
loop {
|
||||
match state_and_queue {
|
||||
COMPLETE => break,
|
||||
POISONED if !ignore_poisoning => {
|
||||
// Panic to propagate the poison.
|
||||
panic!("Once instance has previously been poisoned");
|
||||
}
|
||||
|
||||
// Otherwise if we see a poisoned or otherwise incomplete state
|
||||
// we will attempt to move ourselves into the RUNNING state. If
|
||||
// we succeed, then the queue of waiters starts at null (all 0
|
||||
// bits).
|
||||
POISONED |
|
||||
INCOMPLETE => {
|
||||
let old = self.state.compare_and_swap(state, RUNNING,
|
||||
Ordering::SeqCst);
|
||||
if old != state {
|
||||
state = old;
|
||||
continue
|
||||
POISONED | INCOMPLETE => {
|
||||
// Try to register this thread as the one RUNNING.
|
||||
let old = self.state_and_queue.compare_and_swap(
|
||||
state_and_queue,
|
||||
RUNNING,
|
||||
Ordering::Acquire,
|
||||
);
|
||||
if old != state_and_queue {
|
||||
state_and_queue = old;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Run the initialization routine, letting it know if we're
|
||||
// poisoned or not. The `Finish` struct is then dropped, and
|
||||
// the `Drop` implementation here is responsible for waking
|
||||
// up other waiters both in the normal return and panicking
|
||||
// case.
|
||||
let mut complete = Finish {
|
||||
panicked: true,
|
||||
me: self,
|
||||
// `waiter_queue` will manage other waiting threads, and
|
||||
// wake them up on drop.
|
||||
let mut waiter_queue = WaiterQueue {
|
||||
state_and_queue: &self.state_and_queue,
|
||||
set_state_on_drop_to: POISONED,
|
||||
};
|
||||
init(state == POISONED);
|
||||
complete.panicked = false;
|
||||
return
|
||||
// Run the initialization function, letting it know if we're
|
||||
// poisoned or not.
|
||||
init(state_and_queue == POISONED);
|
||||
waiter_queue.set_state_on_drop_to = COMPLETE;
|
||||
break;
|
||||
}
|
||||
|
||||
// All other values we find should correspond to the RUNNING
|
||||
// state with an encoded waiter list in the more significant
|
||||
// bits. We attempt to enqueue ourselves by moving us to the
|
||||
// head of the list and bail out if we ever see a state that's
|
||||
// not RUNNING.
|
||||
_ => {
|
||||
assert!(state & STATE_MASK == RUNNING);
|
||||
let mut node = Waiter {
|
||||
thread: Some(thread::current()),
|
||||
signaled: AtomicBool::new(false),
|
||||
next: ptr::null_mut(),
|
||||
};
|
||||
let me = &mut node as *mut Waiter as usize;
|
||||
assert!(me & STATE_MASK == 0);
|
||||
|
||||
while state & STATE_MASK == RUNNING {
|
||||
node.next = (state & !STATE_MASK) as *mut Waiter;
|
||||
let old = self.state.compare_and_swap(state,
|
||||
me | RUNNING,
|
||||
Ordering::SeqCst);
|
||||
if old != state {
|
||||
state = old;
|
||||
continue
|
||||
}
|
||||
|
||||
// Once we've enqueued ourselves, wait in a loop.
|
||||
// Afterwards reload the state and continue with what we
|
||||
// were doing from before.
|
||||
while !node.signaled.load(Ordering::SeqCst) {
|
||||
thread::park();
|
||||
}
|
||||
state = self.state.load(Ordering::SeqCst);
|
||||
continue 'outer
|
||||
}
|
||||
// All other values must be RUNNING with possibly a
|
||||
// pointer to the waiter queue in the more significant bits.
|
||||
assert!(state_and_queue & STATE_MASK == RUNNING);
|
||||
wait(&self.state_and_queue, state_and_queue);
|
||||
state_and_queue = self.state_and_queue.load(Ordering::Acquire);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) {
|
||||
// Note: the following code was carefully written to avoid creating a
|
||||
// mutable reference to `node` that gets aliased.
|
||||
loop {
|
||||
// Don't queue this thread if the status is no longer running,
|
||||
// otherwise we will not be woken up.
|
||||
if current_state & STATE_MASK != RUNNING {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the node for our current thread.
|
||||
let node = Waiter {
|
||||
thread: Cell::new(Some(thread::current())),
|
||||
signaled: AtomicBool::new(false),
|
||||
next: (current_state & !STATE_MASK) as *const Waiter,
|
||||
};
|
||||
let me = &node as *const Waiter as usize;
|
||||
|
||||
// Try to slide in the node at the head of the linked list, making sure
|
||||
// that another thread didn't just replace the head of the linked list.
|
||||
let old = state_and_queue.compare_and_swap(current_state, me | RUNNING, Ordering::Release);
|
||||
if old != current_state {
|
||||
current_state = old;
|
||||
continue;
|
||||
}
|
||||
|
||||
// We have enqueued ourselves, now lets wait.
|
||||
// It is important not to return before being signaled, otherwise we
|
||||
// would drop our `Waiter` node and leave a hole in the linked list
|
||||
// (and a dangling reference). Guard against spurious wakeups by
|
||||
// reparking ourselves until we are signaled.
|
||||
while !node.signaled.load(Ordering::Acquire) {
|
||||
// If the managing thread happens to signal and unpark us before we
|
||||
// can park ourselves, the result could be this thread never gets
|
||||
// unparked. Luckily `park` comes with the guarantee that if it got
|
||||
// an `unpark` just before on an unparked thread is does not park.
|
||||
thread::park();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "std_debug", since = "1.16.0")]
|
||||
impl fmt::Debug for Once {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
@ -440,29 +478,31 @@ impl fmt::Debug for Once {
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Finish<'_> {
|
||||
impl Drop for WaiterQueue<'_> {
|
||||
fn drop(&mut self) {
|
||||
// Swap out our state with however we finished. We should only ever see
|
||||
// an old state which was RUNNING.
|
||||
let queue = if self.panicked {
|
||||
self.me.state.swap(POISONED, Ordering::SeqCst)
|
||||
} else {
|
||||
self.me.state.swap(COMPLETE, Ordering::SeqCst)
|
||||
};
|
||||
assert_eq!(queue & STATE_MASK, RUNNING);
|
||||
// Swap out our state with however we finished.
|
||||
let state_and_queue =
|
||||
self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel);
|
||||
|
||||
// Decode the RUNNING to a list of waiters, then walk that entire list
|
||||
// and wake them up. Note that it is crucial that after we store `true`
|
||||
// in the node it can be free'd! As a result we load the `thread` to
|
||||
// signal ahead of time and then unpark it after the store.
|
||||
// We should only ever see an old state which was RUNNING.
|
||||
assert_eq!(state_and_queue & STATE_MASK, RUNNING);
|
||||
|
||||
// Walk the entire linked list of waiters and wake them up (in lifo
|
||||
// order, last to register is first to wake up).
|
||||
unsafe {
|
||||
let mut queue = (queue & !STATE_MASK) as *mut Waiter;
|
||||
// Right after setting `node.signaled = true` the other thread may
|
||||
// free `node` if there happens to be has a spurious wakeup.
|
||||
// So we have to take out the `thread` field and copy the pointer to
|
||||
// `next` first.
|
||||
let mut queue = (state_and_queue & !STATE_MASK) as *const Waiter;
|
||||
while !queue.is_null() {
|
||||
let next = (*queue).next;
|
||||
let thread = (*queue).thread.take().unwrap();
|
||||
(*queue).signaled.store(true, Ordering::SeqCst);
|
||||
thread.unpark();
|
||||
let thread = (*queue).thread.replace(None).unwrap();
|
||||
(*queue).signaled.store(true, Ordering::Release);
|
||||
// ^- FIXME (maybe): This is another case of issue #55005
|
||||
// `store()` has a potentially dangling ref to `signaled`.
|
||||
queue = next;
|
||||
thread.unpark();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -518,10 +558,10 @@ impl OnceState {
|
||||
|
||||
#[cfg(all(test, not(target_os = "emscripten")))]
|
||||
mod tests {
|
||||
use super::Once;
|
||||
use crate::panic;
|
||||
use crate::sync::mpsc::channel;
|
||||
use crate::thread;
|
||||
use super::Once;
|
||||
|
||||
#[test]
|
||||
fn smoke_once() {
|
||||
@ -541,8 +581,10 @@ mod tests {
|
||||
let (tx, rx) = channel();
|
||||
for _ in 0..10 {
|
||||
let tx = tx.clone();
|
||||
thread::spawn(move|| {
|
||||
for _ in 0..4 { thread::yield_now() }
|
||||
thread::spawn(move || {
|
||||
for _ in 0..4 {
|
||||
thread::yield_now()
|
||||
}
|
||||
unsafe {
|
||||
O.call_once(|| {
|
||||
assert!(!RUN);
|
||||
@ -631,6 +673,5 @@ mod tests {
|
||||
|
||||
assert!(t1.join().is_ok());
|
||||
assert!(t2.join().is_ok());
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -418,9 +418,10 @@ symbols! {
|
||||
match_beginning_vert,
|
||||
match_default_bindings,
|
||||
may_dangle,
|
||||
maybe_uninit,
|
||||
MaybeUninit,
|
||||
mem,
|
||||
maybe_uninit_uninit,
|
||||
maybe_uninit_zeroed,
|
||||
mem_uninitialized,
|
||||
mem_zeroed,
|
||||
member_constraints,
|
||||
message,
|
||||
meta,
|
||||
@ -713,8 +714,6 @@ symbols! {
|
||||
underscore_imports,
|
||||
underscore_lifetimes,
|
||||
uniform_paths,
|
||||
uninit,
|
||||
uninitialized,
|
||||
universal_impl_trait,
|
||||
unmarked_api,
|
||||
unreachable_code,
|
||||
@ -745,7 +744,6 @@ symbols! {
|
||||
windows,
|
||||
windows_subsystem,
|
||||
Yield,
|
||||
zeroed,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,7 +152,7 @@ pub fn change_constructor_path_regular_struct() {
|
||||
}
|
||||
|
||||
#[cfg(not(cfail1))]
|
||||
#[rustc_clean(cfg="cfail2", except="HirBody,optimized_mir,mir_built,typeck_tables_of")]
|
||||
#[rustc_clean(cfg="cfail2", except="HirBody,mir_built,typeck_tables_of")]
|
||||
#[rustc_clean(cfg="cfail3")]
|
||||
pub fn change_constructor_path_regular_struct() {
|
||||
let _ = RegularStruct2 {
|
||||
@ -213,7 +213,7 @@ pub fn change_constructor_path_tuple_struct() {
|
||||
}
|
||||
|
||||
#[cfg(not(cfail1))]
|
||||
#[rustc_clean(cfg="cfail2", except="HirBody,optimized_mir,mir_built,typeck_tables_of")]
|
||||
#[rustc_clean(cfg="cfail2", except="HirBody,mir_built,typeck_tables_of")]
|
||||
#[rustc_clean(cfg="cfail3")]
|
||||
pub fn change_constructor_path_tuple_struct() {
|
||||
let _ = TupleStruct2(0, 1, 2);
|
||||
|
54
src/test/mir-opt/const_prop/return_place.rs
Normal file
54
src/test/mir-opt/const_prop/return_place.rs
Normal file
@ -0,0 +1,54 @@
|
||||
// compile-flags: -C overflow-checks=on
|
||||
|
||||
fn add() -> u32 {
|
||||
2 + 2
|
||||
}
|
||||
|
||||
fn main() {
|
||||
add();
|
||||
}
|
||||
|
||||
// END RUST SOURCE
|
||||
// START rustc.add.ConstProp.before.mir
|
||||
// fn add() -> u32 {
|
||||
// let mut _0: u32;
|
||||
// let mut _1: (u32, bool);
|
||||
// bb0: {
|
||||
// _1 = CheckedAdd(const 2u32, const 2u32);
|
||||
// assert(!move (_1.1: bool), "attempt to add with overflow") -> bb1;
|
||||
// }
|
||||
// bb1: {
|
||||
// _0 = move (_1.0: u32);
|
||||
// return;
|
||||
// }
|
||||
// bb2 (cleanup): {
|
||||
// resume;
|
||||
// }
|
||||
// }
|
||||
// END rustc.add.ConstProp.before.mir
|
||||
// START rustc.add.ConstProp.after.mir
|
||||
// fn add() -> u32 {
|
||||
// let mut _0: u32;
|
||||
// let mut _1: (u32, bool);
|
||||
// bb0: {
|
||||
// _1 = (const 4u32, const false);
|
||||
// assert(!const false, "attempt to add with overflow") -> bb1;
|
||||
// }
|
||||
// bb1: {
|
||||
// _0 = const 4u32;
|
||||
// return;
|
||||
// }
|
||||
// bb2 (cleanup): {
|
||||
// resume;
|
||||
// }
|
||||
// }
|
||||
// END rustc.add.ConstProp.after.mir
|
||||
// START rustc.add.PreCodegen.before.mir
|
||||
// fn add() -> u32 {
|
||||
// let mut _0: u32;
|
||||
// bb0: {
|
||||
// _0 = const 4u32;
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// END rustc.add.PreCodegen.before.mir
|
@ -1,6 +1,6 @@
|
||||
-include ../tools.mk
|
||||
|
||||
all:
|
||||
touch $(TMPDIR)/rust.metadata.bin
|
||||
$(AR) crus $(TMPDIR)/libfoo-ffffffff-1.0.rlib $(TMPDIR)/rust.metadata.bin
|
||||
touch $(TMPDIR)/lib.rmeta
|
||||
$(AR) crus $(TMPDIR)/libfoo-ffffffff-1.0.rlib $(TMPDIR)/lib.rmeta
|
||||
$(RUSTC) foo.rs 2>&1 | $(CGREP) "can't find crate for"
|
||||
|
Loading…
x
Reference in New Issue
Block a user