Auto merge of #125824 - matthiaskrgr:rollup-eam7i0p, r=matthiaskrgr

Rollup of 7 pull requests

Successful merges:

 - #125652 (Revert propagation of drop-live information from Polonius)
 - #125730 (Apply `x clippy --fix` and `x fmt` on Rustc)
 - #125756 (coverage: Optionally instrument the RHS of lazy logical operators)
 - #125776 (Stop using `translate_args` in the new solver)
 - #125796 (Also InstSimplify `&raw*`)
 - #125807 (Also resolve the type of constants, even if we already turned it into an error constant)
 - #125816 (Don't build the `rust-demangler` binary for coverage tests)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-05-31 16:12:57 +00:00
commit ada5e2c7b5
51 changed files with 875 additions and 161 deletions

View File

@ -14,7 +14,6 @@ use std::rc::Rc;
use crate::{
constraints::OutlivesConstraintSet,
facts::{AllFacts, AllFactsExt},
location::LocationTable,
region_infer::values::LivenessValues,
universal_regions::UniversalRegions,
};
@ -39,7 +38,6 @@ pub(super) fn generate<'mir, 'tcx>(
elements: &Rc<DenseLocationMap>,
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
move_data: &MoveData<'tcx>,
location_table: &LocationTable,
use_polonius: bool,
) {
debug!("liveness::generate");
@ -53,11 +51,9 @@ pub(super) fn generate<'mir, 'tcx>(
compute_relevant_live_locals(typeck.tcx(), &free_regions, body);
let facts_enabled = use_polonius || AllFacts::enabled(typeck.tcx());
let polonius_drop_used = facts_enabled.then(|| {
let mut drop_used = Vec::new();
polonius::populate_access_facts(typeck, body, location_table, move_data, &mut drop_used);
drop_used
});
if facts_enabled {
polonius::populate_access_facts(typeck, body, move_data);
};
trace::trace(
typeck,
@ -67,7 +63,6 @@ pub(super) fn generate<'mir, 'tcx>(
move_data,
relevant_live_locals,
boring_locals,
polonius_drop_used,
);
// Mark regions that should be live where they appear within rvalues or within a call: like

View File

@ -85,13 +85,10 @@ impl<'a, 'tcx> Visitor<'tcx> for UseFactsExtractor<'a, 'tcx> {
pub(super) fn populate_access_facts<'a, 'tcx>(
typeck: &mut TypeChecker<'a, 'tcx>,
body: &Body<'tcx>,
location_table: &LocationTable,
move_data: &MoveData<'tcx>,
//FIXME: this is not mutated, but expected to be modified as
// out param, bug?
dropped_at: &mut Vec<(Local, Location)>,
) {
debug!("populate_access_facts()");
let location_table = typeck.borrowck_context.location_table;
if let Some(facts) = typeck.borrowck_context.all_facts.as_mut() {
let mut extractor = UseFactsExtractor {
@ -104,10 +101,6 @@ pub(super) fn populate_access_facts<'a, 'tcx>(
};
extractor.visit_body(body);
facts.var_dropped_at.extend(
dropped_at.iter().map(|&(local, location)| (local, location_table.mid_index(location))),
);
for (local, local_decl) in body.local_decls.iter_enumerated() {
debug!(
"add use_of_var_derefs_origin facts - local={:?}, type={:?}",

View File

@ -16,6 +16,7 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex};
use rustc_mir_dataflow::ResultsCursor;
use crate::location::RichLocation;
use crate::{
region_infer::values::{self, LiveLoans},
type_check::liveness::local_use_map::LocalUseMap,
@ -46,7 +47,6 @@ pub(super) fn trace<'mir, 'tcx>(
move_data: &MoveData<'tcx>,
relevant_live_locals: Vec<Local>,
boring_locals: Vec<Local>,
polonius_drop_used: Option<Vec<(Local, Location)>>,
) {
let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body);
@ -93,9 +93,7 @@ pub(super) fn trace<'mir, 'tcx>(
let mut results = LivenessResults::new(cx);
if let Some(drop_used) = polonius_drop_used {
results.add_extra_drop_facts(drop_used, relevant_live_locals.iter().copied().collect())
}
results.add_extra_drop_facts(&relevant_live_locals);
results.compute_for_all_locals(relevant_live_locals);
@ -218,21 +216,38 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
///
/// Add facts for all locals with free regions, since regions may outlive
/// the function body only at certain nodes in the CFG.
fn add_extra_drop_facts(
&mut self,
drop_used: Vec<(Local, Location)>,
relevant_live_locals: FxIndexSet<Local>,
) {
fn add_extra_drop_facts(&mut self, relevant_live_locals: &[Local]) -> Option<()> {
let drop_used = self
.cx
.typeck
.borrowck_context
.all_facts
.as_ref()
.map(|facts| facts.var_dropped_at.clone())?;
let relevant_live_locals: FxIndexSet<_> = relevant_live_locals.iter().copied().collect();
let locations = IntervalSet::new(self.cx.elements.num_points());
for (local, location) in drop_used {
for (local, location_index) in drop_used {
if !relevant_live_locals.contains(&local) {
let local_ty = self.cx.body.local_decls[local].ty;
if local_ty.has_free_regions() {
let location = match self
.cx
.typeck
.borrowck_context
.location_table
.to_location(location_index)
{
RichLocation::Start(l) => l,
RichLocation::Mid(l) => l,
};
self.cx.add_drop_live_facts_for(local, local_ty, &[location], &locations);
}
}
}
Some(())
}
/// Clear the value of fields that are "per local variable".

View File

@ -188,15 +188,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
checker.equate_inputs_and_outputs(body, universal_regions, &normalized_inputs_and_output);
checker.check_signature_annotation(body);
liveness::generate(
&mut checker,
body,
elements,
flow_inits,
move_data,
location_table,
use_polonius,
);
liveness::generate(&mut checker, body, elements, flow_inits, move_data, use_polonius);
translate_outlives_facts(&mut checker);
let opaque_type_values = infcx.take_opaque_types();

View File

@ -93,7 +93,7 @@ fn dominators_impl<G: ControlFlowGraph>(graph: &G) -> Inner<G::Node> {
// These are all done here rather than through one of the 'standard'
// graph traversals to help make this fast.
'recurse: while let Some(frame) = stack.last_mut() {
while let Some(successor) = frame.iter.next() {
for successor in frame.iter.by_ref() {
if real_to_pre_order[successor].is_none() {
let pre_order_idx = pre_order_to_real.push(successor);
real_to_pre_order[successor] = Some(pre_order_idx);

View File

@ -48,7 +48,7 @@ fn post_order_walk<G: DirectedGraph + Successors>(
let node = frame.node;
visited[node] = true;
while let Some(successor) = frame.iter.next() {
for successor in frame.iter.by_ref() {
if !visited[successor] {
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
continue 'recurse;
@ -112,7 +112,7 @@ where
/// This is equivalent to just invoke `next` repeatedly until
/// you get a `None` result.
pub fn complete_search(&mut self) {
while let Some(_) = self.next() {}
for _ in self.by_ref() {}
}
/// Returns true if node has been visited thus far.

View File

@ -40,7 +40,7 @@ pub struct SccData<S: Idx> {
}
impl<N: Idx, S: Idx + Ord> Sccs<N, S> {
pub fn new(graph: &(impl DirectedGraph<Node = N> + Successors)) -> Self {
pub fn new(graph: &impl Successors<Node = N>) -> Self {
SccsConstruction::construct(graph)
}

View File

@ -562,7 +562,7 @@ impl SelfProfiler {
// ASLR is disabled and the heap is otherwise deterministic.
let pid: u32 = process::id();
let filename = format!("{crate_name}-{pid:07}.rustc_profile");
let path = output_directory.join(&filename);
let path = output_directory.join(filename);
let profiler =
Profiler::with_counter(&path, measureme::counters::Counter::by_name(counter_name)?)?;

View File

@ -125,13 +125,13 @@ impl<K: Ord, V> SortedMap<K, V> {
/// Iterate over the keys, sorted
#[inline]
pub fn keys(&self) -> impl Iterator<Item = &K> + ExactSizeIterator + DoubleEndedIterator {
pub fn keys(&self) -> impl ExactSizeIterator<Item = &K> + DoubleEndedIterator {
self.data.iter().map(|(k, _)| k)
}
/// Iterate over values, sorted by key
#[inline]
pub fn values(&self) -> impl Iterator<Item = &V> + ExactSizeIterator + DoubleEndedIterator {
pub fn values(&self) -> impl ExactSizeIterator<Item = &V> + DoubleEndedIterator {
self.data.iter().map(|(_, v)| v)
}

View File

@ -69,7 +69,7 @@ mod maybe_sync {
match self.mode {
Mode::NoSync => {
let cell = unsafe { &self.lock.mode_union.no_sync };
debug_assert_eq!(cell.get(), true);
debug_assert!(cell.get());
cell.set(false);
}
// SAFETY (unlock): We know that the lock is locked as this type is a proof of that.

View File

@ -865,6 +865,7 @@ impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Resolver<'cx, 'tcx> {
self.handle_term(ct, ty::Const::outer_exclusive_binder, |tcx, guar| {
ty::Const::new_error(tcx, guar, ct.ty())
})
.super_fold_with(self)
}
fn fold_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {

View File

@ -157,6 +157,63 @@ impl BranchInfoBuilder {
}
impl<'tcx> Builder<'_, 'tcx> {
/// If condition coverage is enabled, inject extra blocks and marker statements
/// that will let us track the value of the condition in `place`.
pub(crate) fn visit_coverage_standalone_condition(
&mut self,
mut expr_id: ExprId, // Expression giving the span of the condition
place: mir::Place<'tcx>, // Already holds the boolean condition value
block: &mut BasicBlock,
) {
// Bail out if condition coverage is not enabled for this function.
let Some(branch_info) = self.coverage_branch_info.as_mut() else { return };
if !self.tcx.sess.instrument_coverage_condition() {
return;
};
// Remove any wrappers, so that we can inspect the real underlying expression.
while let ExprKind::Use { source: inner } | ExprKind::Scope { value: inner, .. } =
self.thir[expr_id].kind
{
expr_id = inner;
}
// If the expression is a lazy logical op, it will naturally get branch
// coverage as part of its normal lowering, so we can disregard it here.
if let ExprKind::LogicalOp { .. } = self.thir[expr_id].kind {
return;
}
let source_info = SourceInfo { span: self.thir[expr_id].span, scope: self.source_scope };
// Using the boolean value that has already been stored in `place`, set up
// control flow in the shape of a diamond, so that we can place separate
// marker statements in the true and false blocks. The coverage MIR pass
// will use those markers to inject coverage counters as appropriate.
//
// block
// / \
// true_block false_block
// (marker) (marker)
// \ /
// join_block
let true_block = self.cfg.start_new_block();
let false_block = self.cfg.start_new_block();
self.cfg.terminate(
*block,
source_info,
mir::TerminatorKind::if_(mir::Operand::Copy(place), true_block, false_block),
);
branch_info.add_two_way_branch(&mut self.cfg, source_info, true_block, false_block);
let join_block = self.cfg.start_new_block();
self.cfg.goto(true_block, source_info, join_block);
self.cfg.goto(false_block, source_info, join_block);
// Any subsequent codegen in the caller should use the new join block.
*block = join_block;
}
/// If branch coverage is enabled, inject marker statements into `then_block`
/// and `else_block`, and record their IDs in the table of branch spans.
pub(crate) fn visit_coverage_branch_condition(

View File

@ -183,9 +183,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
const_: Const::from_bool(this.tcx, constant),
},
);
let rhs = unpack!(this.expr_into_dest(destination, continuation, rhs));
let mut rhs_block = unpack!(this.expr_into_dest(destination, continuation, rhs));
// Instrument the lowered RHS's value for condition coverage.
// (Does nothing if condition coverage is not enabled.)
this.visit_coverage_standalone_condition(rhs, destination, &mut rhs_block);
let target = this.cfg.start_new_block();
this.cfg.goto(rhs, source_info, target);
this.cfg.goto(rhs_block, source_info, target);
this.cfg.goto(short_circuit, source_info, target);
target.unit()
}

View File

@ -123,7 +123,7 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
/// Transform `&(*a)` ==> `a`.
fn simplify_ref_deref(&self, source_info: &SourceInfo, rvalue: &mut Rvalue<'tcx>) {
if let Rvalue::Ref(_, _, place) = rvalue {
if let Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) = rvalue {
if let Some((base, ProjectionElem::Deref)) = place.as_ref().last_projection() {
if rvalue.ty(self.local_decls, self.tcx) != base.ty(self.local_decls, self.tcx).ty {
return;

View File

@ -286,13 +286,11 @@ impl<'a> Iterator for Parser<'a> {
lbrace_byte_pos.to(InnerOffset(rbrace_byte_pos.0 + width)),
);
}
} else {
if let Some(&(_, maybe)) = self.cur.peek() {
match maybe {
'?' => self.suggest_format_debug(),
'<' | '^' | '>' => self.suggest_format_align(maybe),
_ => self.suggest_positional_arg_instead_of_captured_arg(arg),
}
} else if let Some(&(_, maybe)) = self.cur.peek() {
match maybe {
'?' => self.suggest_format_debug(),
'<' | '^' | '>' => self.suggest_format_align(maybe),
_ => self.suggest_positional_arg_instead_of_captured_arg(arg),
}
}
Some(NextArgument(Box::new(arg)))
@ -1028,7 +1026,7 @@ fn find_width_map_from_snippet(
if next_c == '{' {
// consume up to 6 hexanumeric chars
let digits_len =
s.clone().take(6).take_while(|(_, c)| c.is_digit(16)).count();
s.clone().take(6).take_while(|(_, c)| c.is_ascii_hexdigit()).count();
let len_utf8 = s
.as_str()
@ -1047,14 +1045,14 @@ fn find_width_map_from_snippet(
width += required_skips + 2;
s.nth(digits_len);
} else if next_c.is_digit(16) {
} else if next_c.is_ascii_hexdigit() {
width += 1;
// We suggest adding `{` and `}` when appropriate, accept it here as if
// it were correct
let mut i = 0; // consume up to 6 hexanumeric chars
while let (Some((_, c)), _) = (s.next(), i < 6) {
if c.is_digit(16) {
if c.is_ascii_hexdigit() {
width += 1;
} else {
break;

View File

@ -252,7 +252,7 @@ impl<S: Encoder> Encodable<S> for () {
}
impl<D: Decoder> Decodable<D> for () {
fn decode(_: &mut D) -> () {}
fn decode(_: &mut D) {}
}
impl<S: Encoder, T> Encodable<S> for PhantomData<T> {

View File

@ -159,7 +159,23 @@ pub enum CoverageLevel {
Block,
/// Also instrument branch points (includes block coverage).
Branch,
/// Instrument for MC/DC. Mostly a superset of branch coverage, but might
/// Same as branch coverage, but also adds branch instrumentation for
/// certain boolean expressions that are not directly used for branching.
///
/// For example, in the following code, `b` does not directly participate
/// in a branch, but condition coverage will instrument it as its own
/// artificial branch:
/// ```
/// # let (a, b) = (false, true);
/// let x = a && b;
/// // ^ last operand
/// ```
///
/// This level is mainly intended to be a stepping-stone towards full MC/DC
/// instrumentation, so it might be removed in the future when MC/DC is
/// sufficiently complete, or if it is making MC/DC changes difficult.
Condition,
/// Instrument for MC/DC. Mostly a superset of condition coverage, but might
/// differ in some corner cases.
Mcdc,
}

View File

@ -395,7 +395,7 @@ mod desc {
pub const parse_optimization_fuel: &str = "crate=integer";
pub const parse_dump_mono_stats: &str = "`markdown` (default) or `json`";
pub const parse_instrument_coverage: &str = parse_bool;
pub const parse_coverage_options: &str = "`block` | `branch` | `mcdc`";
pub const parse_coverage_options: &str = "`block` | `branch` | `condition` | `mcdc`";
pub const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`";
pub const parse_unpretty: &str = "`string` or `string=string`";
pub const parse_treat_err_as_bug: &str = "either no value or a non-negative number";
@ -961,6 +961,7 @@ mod parse {
match option {
"block" => slot.level = CoverageLevel::Block,
"branch" => slot.level = CoverageLevel::Branch,
"condition" => slot.level = CoverageLevel::Condition,
"mcdc" => slot.level = CoverageLevel::Mcdc,
_ => return false,
}

View File

@ -353,6 +353,11 @@ impl Session {
&& self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Branch
}
pub fn instrument_coverage_condition(&self) -> bool {
self.instrument_coverage()
&& self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Condition
}
pub fn instrument_coverage_mcdc(&self) -> bool {
self.instrument_coverage()
&& self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Mcdc

View File

@ -12,7 +12,6 @@ use rustc_middle::bug;
use rustc_middle::traits::solve::{
inspect, CanonicalInput, CanonicalResponse, Certainty, PredefinedOpaquesData, QueryResult,
};
use rustc_middle::traits::specialization_graph;
use rustc_middle::ty::AliasRelationDirection;
use rustc_middle::ty::TypeFolder;
use rustc_middle::ty::{
@ -900,16 +899,6 @@ impl<'tcx> EvalCtxt<'_, InferCtxt<'tcx>> {
args
}
pub(super) fn translate_args(
&self,
param_env: ty::ParamEnv<'tcx>,
source_impl: DefId,
source_args: ty::GenericArgsRef<'tcx>,
target_node: specialization_graph::Node,
) -> ty::GenericArgsRef<'tcx> {
crate::traits::translate_args(self.infcx, param_env, source_impl, source_args, target_node)
}
pub(super) fn register_ty_outlives(&self, ty: Ty<'tcx>, lt: ty::Region<'tcx>) {
self.infcx.register_region_obligation_with_cause(ty, lt, &ObligationCause::dummy());
}

View File

@ -1,4 +1,4 @@
use crate::traits::specialization_graph;
use crate::traits::specialization_graph::{self, LeafDef, Node};
use super::assembly::structural_traits::AsyncCallableRelevantTypes;
use super::assembly::{self, structural_traits, Candidate};
@ -9,7 +9,6 @@ use rustc_infer::infer::InferCtxt;
use rustc_infer::traits::query::NoSolution;
use rustc_infer::traits::solve::inspect::ProbeKind;
use rustc_infer::traits::solve::MaybeCause;
use rustc_infer::traits::specialization_graph::LeafDef;
use rustc_infer::traits::Reveal;
use rustc_middle::traits::solve::{CandidateSource, Certainty, Goal, QueryResult};
use rustc_middle::traits::BuiltinImplSource;
@ -189,8 +188,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
// In case the associated item is hidden due to specialization, we have to
// return ambiguity this would otherwise be incomplete, resulting in
// unsoundness during coherence (#105782).
let Some(assoc_def) = fetch_eligible_assoc_item_def(
ecx,
let Some(assoc_def) = ecx.fetch_eligible_assoc_item_def(
goal.param_env,
goal_trait_ref,
goal.predicate.def_id(),
@ -235,16 +233,10 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
//
// And then map these args to the args of the defining impl of `Assoc`, going
// from `[u32, u64]` to `[u32, i32, u64]`.
let impl_args_with_gat =
goal.predicate.alias.args.rebase_onto(tcx, goal_trait_ref.def_id, impl_args);
let args = ecx.translate_args(
goal.param_env,
impl_def_id,
impl_args_with_gat,
assoc_def.defining_node,
);
let associated_item_args =
ecx.translate_args(&assoc_def, goal, impl_def_id, impl_args, impl_trait_ref)?;
if !tcx.check_args_compatible(assoc_def.item.def_id, args) {
if !tcx.check_args_compatible(assoc_def.item.def_id, associated_item_args) {
return error_response(
ecx,
"associated item has mismatched generic item arguments",
@ -272,7 +264,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
ty::AssocKind::Fn => unreachable!("we should never project to a fn"),
};
ecx.instantiate_normalizes_to_term(goal, term.instantiate(tcx, args));
ecx.instantiate_normalizes_to_term(goal, term.instantiate(tcx, associated_item_args));
ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
})
}
@ -889,38 +881,79 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
}
}
/// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code.
///
/// FIXME: We should merge these 3 implementations as it's likely that they otherwise
/// diverge.
#[instrument(level = "trace", skip(ecx, param_env), ret)]
fn fetch_eligible_assoc_item_def<'tcx>(
ecx: &EvalCtxt<'_, InferCtxt<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
goal_trait_ref: ty::TraitRef<'tcx>,
trait_assoc_def_id: DefId,
impl_def_id: DefId,
) -> Result<Option<LeafDef>, NoSolution> {
let node_item =
specialization_graph::assoc_def(ecx.interner(), impl_def_id, trait_assoc_def_id)
.map_err(|ErrorGuaranteed { .. }| NoSolution)?;
impl<'tcx> EvalCtxt<'_, InferCtxt<'tcx>> {
fn translate_args(
&mut self,
assoc_def: &LeafDef,
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
impl_def_id: DefId,
impl_args: ty::GenericArgsRef<'tcx>,
impl_trait_ref: rustc_type_ir::TraitRef<TyCtxt<'tcx>>,
) -> Result<ty::GenericArgsRef<'tcx>, NoSolution> {
let tcx = self.interner();
Ok(match assoc_def.defining_node {
Node::Trait(_) => goal.predicate.alias.args,
Node::Impl(target_impl_def_id) => {
if target_impl_def_id == impl_def_id {
// Same impl, no need to fully translate, just a rebase from
// the trait is sufficient.
goal.predicate.alias.args.rebase_onto(tcx, impl_trait_ref.def_id, impl_args)
} else {
let target_args = self.fresh_args_for_item(target_impl_def_id);
let target_trait_ref = tcx
.impl_trait_ref(target_impl_def_id)
.unwrap()
.instantiate(tcx, target_args);
// Relate source impl to target impl by equating trait refs.
self.eq(goal.param_env, impl_trait_ref, target_trait_ref)?;
// Also add predicates since they may be needed to constrain the
// target impl's params.
self.add_goals(
GoalSource::Misc,
tcx.predicates_of(target_impl_def_id)
.instantiate(tcx, target_args)
.into_iter()
.map(|(pred, _)| goal.with(tcx, pred)),
);
goal.predicate.alias.args.rebase_onto(tcx, impl_trait_ref.def_id, target_args)
}
}
})
}
let eligible = if node_item.is_final() {
// Non-specializable items are always projectable.
true
} else {
// Only reveal a specializable default if we're past type-checking
// and the obligation is monomorphic, otherwise passes such as
// transmute checking and polymorphic MIR optimizations could
// get a result which isn't correct for all monomorphizations.
if param_env.reveal() == Reveal::All {
let poly_trait_ref = ecx.resolve_vars_if_possible(goal_trait_ref);
!poly_trait_ref.still_further_specializable()
/// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code.
///
/// FIXME: We should merge these 3 implementations as it's likely that they otherwise
/// diverge.
#[instrument(level = "trace", skip(self, param_env), ret)]
fn fetch_eligible_assoc_item_def(
&self,
param_env: ty::ParamEnv<'tcx>,
goal_trait_ref: ty::TraitRef<'tcx>,
trait_assoc_def_id: DefId,
impl_def_id: DefId,
) -> Result<Option<LeafDef>, NoSolution> {
let node_item =
specialization_graph::assoc_def(self.interner(), impl_def_id, trait_assoc_def_id)
.map_err(|ErrorGuaranteed { .. }| NoSolution)?;
let eligible = if node_item.is_final() {
// Non-specializable items are always projectable.
true
} else {
trace!(?node_item.item.def_id, "not eligible due to default");
false
}
};
// Only reveal a specializable default if we're past type-checking
// and the obligation is monomorphic, otherwise passes such as
// transmute checking and polymorphic MIR optimizations could
// get a result which isn't correct for all monomorphizations.
if param_env.reveal() == Reveal::All {
let poly_trait_ref = self.resolve_vars_if_possible(goal_trait_ref);
!poly_trait_ref.still_further_specializable()
} else {
trace!(?node_item.item.def_id, "not eligible due to default");
false
}
};
if eligible { Ok(Some(node_item)) } else { Ok(None) }
if eligible { Ok(Some(node_item)) } else { Ok(None) }
}
}

View File

@ -156,7 +156,7 @@ fn pretty_terminator<W: Write>(writer: &mut W, terminator: &TerminatorKind) -> i
fn pretty_terminator_head<W: Write>(writer: &mut W, terminator: &TerminatorKind) -> io::Result<()> {
use self::TerminatorKind::*;
const INDENT: &'static str = " ";
const INDENT: &str = " ";
match terminator {
Goto { .. } => write!(writer, "{INDENT}goto"),
SwitchInt { discr, .. } => {
@ -315,7 +315,7 @@ fn pretty_operand(operand: &Operand) -> String {
}
fn pretty_const(literal: &Const) -> String {
with(|cx| cx.const_pretty(&literal))
with(|cx| cx.const_pretty(literal))
}
fn pretty_rvalue<W: Write>(writer: &mut W, rval: &Rvalue) -> io::Result<()> {

View File

@ -526,7 +526,7 @@ pub enum IntTy {
impl IntTy {
pub fn num_bytes(self) -> usize {
match self {
IntTy::Isize => crate::target::MachineInfo::target_pointer_width().bytes().into(),
IntTy::Isize => crate::target::MachineInfo::target_pointer_width().bytes(),
IntTy::I8 => 1,
IntTy::I16 => 2,
IntTy::I32 => 4,
@ -549,7 +549,7 @@ pub enum UintTy {
impl UintTy {
pub fn num_bytes(self) -> usize {
match self {
UintTy::Usize => crate::target::MachineInfo::target_pointer_width().bytes().into(),
UintTy::Usize => crate::target::MachineInfo::target_pointer_width().bytes(),
UintTy::U8 => 1,
UintTy::U16 => 2,
UintTy::U32 => 4,
@ -1185,7 +1185,7 @@ impl Allocation {
match self.read_int()? {
0 => Ok(false),
1 => Ok(true),
val @ _ => Err(error!("Unexpected value for bool: `{val}`")),
val => Err(error!("Unexpected value for bool: `{val}`")),
}
}

View File

@ -69,7 +69,7 @@ impl Hasher for FxHasher {
hash.add_to_hash(u16::from_ne_bytes(bytes[..2].try_into().unwrap()) as usize);
bytes = &bytes[2..];
}
if (size_of::<usize>() > 1) && bytes.len() >= 1 {
if (size_of::<usize>() > 1) && !bytes.is_empty() {
hash.add_to_hash(bytes[0] as usize);
}
self.hash = hash.hash;

View File

@ -264,9 +264,9 @@ impl From<Box<dyn Any + Send>> for PanicMessage {
}
}
impl Into<Box<dyn Any + Send>> for PanicMessage {
fn into(self) -> Box<dyn Any + Send> {
match self {
impl From<PanicMessage> for Box<dyn Any + Send> {
fn from(val: PanicMessage) -> Self {
match val {
PanicMessage::StaticStr(s) => Box::new(s),
PanicMessage::String(s) => Box::new(s),
PanicMessage::Unknown => {

View File

@ -200,7 +200,7 @@ Test Attributes:
pub fn parse_opts(args: &[String]) -> Option<OptRes> {
// Parse matches.
let opts = optgroups();
let binary = args.get(0).map(|c| &**c).unwrap_or("...");
let binary = args.first().map(|c| &**c).unwrap_or("...");
let args = args.get(1..).unwrap_or(args);
let matches = match opts.parse(args) {
Ok(m) => m,

View File

@ -524,7 +524,7 @@ fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8>, String> {
} else {
let mut s_ = Vec::with_capacity(flags.width);
s_.extend(repeat(b' ').take(n));
s_.extend(s.into_iter());
s_.extend(s);
s = s_;
}
}

View File

@ -1781,25 +1781,11 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
.arg(builder.ensure(tool::JsonDocLint { compiler: json_compiler, target }));
}
if mode == "coverage-map" {
let coverage_dump = builder.ensure(tool::CoverageDump {
compiler: compiler.with_stage(0),
target: compiler.host,
});
if matches!(mode, "coverage-map" | "coverage-run") {
let coverage_dump = builder.tool_exe(Tool::CoverageDump);
cmd.arg("--coverage-dump-path").arg(coverage_dump);
}
if mode == "coverage-run" {
// The demangler doesn't need the current compiler, so we can avoid
// unnecessary rebuilds by using the bootstrap compiler instead.
let rust_demangler = builder.ensure(tool::RustDemangler {
compiler: compiler.with_stage(0),
target: compiler.host,
extra_features: Vec::new(),
});
cmd.arg("--rust-demangler-path").arg(rust_demangler);
}
cmd.arg("--src-base").arg(builder.src.join("tests").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));

View File

@ -5,13 +5,16 @@ This option controls details of the coverage instrumentation performed by
Multiple options can be passed, separated by commas. Valid options are:
- `block`, `branch`, `mcdc`:
- `block`, `branch`, `condition`, `mcdc`:
Sets the level of coverage instrumentation.
Setting the level will override any previously-specified level.
- `block` (default):
Blocks in the control-flow graph will be instrumented for coverage.
- `branch`:
In addition to block coverage, also enables branch coverage instrumentation.
- `condition`:
In addition to branch coverage, also instruments some boolean expressions
as branches, even if they are not directly used as branch conditions.
- `mcdc`:
In addition to block and branch coverage, also enables MC/DC instrumentation.
In addition to condition coverage, also enables MC/DC instrumentation.
(Branch coverage instrumentation may differ in some cases.)

View File

@ -187,9 +187,6 @@ pub struct Config {
/// The rustdoc executable.
pub rustdoc_path: Option<PathBuf>,
/// The rust-demangler executable.
pub rust_demangler_path: Option<PathBuf>,
/// The coverage-dump executable.
pub coverage_dump_path: Option<PathBuf>,

View File

@ -46,7 +46,6 @@ pub fn parse_config(args: Vec<String>) -> Config {
.reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
.reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
.optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
.optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
.optopt("", "coverage-dump-path", "path to coverage-dump to use in tests", "PATH")
.reqopt("", "python", "path to python to use for doc tests", "PATH")
.optopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH")
@ -232,7 +231,6 @@ pub fn parse_config(args: Vec<String>) -> Config {
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
rustc_path: opt_path(matches, "rustc-path"),
rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
coverage_dump_path: matches.opt_str("coverage-dump-path").map(PathBuf::from),
python: matches.opt_str("python").unwrap(),
jsondocck_path: matches.opt_str("jsondocck-path"),
@ -337,7 +335,6 @@ pub fn log_config(config: &Config) {
logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
logv(c, format!("src_base: {:?}", config.src_base.display()));
logv(c, format!("build_base: {:?}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));

View File

@ -3561,10 +3561,6 @@ impl<'test> TestCx<'test> {
cmd.env("RUSTDOC", cwd.join(rustdoc));
}
if let Some(ref rust_demangler) = self.config.rust_demangler_path {
cmd.env("RUST_DEMANGLER", cwd.join(rust_demangler));
}
if let Some(ref node) = self.config.nodejs {
cmd.env("NODE", node);
}

View File

@ -10,10 +10,15 @@ use crate::common::{UI_COVERAGE, UI_COVERAGE_MAP};
use crate::runtest::{static_regex, Emit, ProcRes, TestCx, WillExecute};
impl<'test> TestCx<'test> {
fn coverage_dump_path(&self) -> &Path {
self.config
.coverage_dump_path
.as_deref()
.unwrap_or_else(|| self.fatal("missing --coverage-dump"))
}
pub(crate) fn run_coverage_map_test(&self) {
let Some(coverage_dump_path) = &self.config.coverage_dump_path else {
self.fatal("missing --coverage-dump");
};
let coverage_dump_path = self.coverage_dump_path();
let (proc_res, llvm_ir_path) = self.compile_test_and_save_ir();
if !proc_res.status.success() {
@ -102,8 +107,10 @@ impl<'test> TestCx<'test> {
let proc_res = self.run_llvm_tool("llvm-cov", |cmd| {
cmd.args(["show", "--format=text", "--show-line-counts-or-regions"]);
cmd.arg("--Xdemangler");
cmd.arg(self.config.rust_demangler_path.as_ref().unwrap());
// Specify the demangler binary and its arguments.
let coverage_dump_path = self.coverage_dump_path();
cmd.arg("--Xdemangler").arg(coverage_dump_path);
cmd.arg("--Xdemangler").arg("--demangle");
cmd.arg("--instr-profile");
cmd.arg(&profdata_path);

View File

@ -6,3 +6,8 @@ The output format is mostly arbitrary, so it's OK to change the output as long
as any affected tests are also re-blessed. However, the output should be
consistent across different executions on different platforms, so avoid
printing any information that is platform-specific or non-deterministic.
## Demangle mode
When run as `coverage-dump --demangle`, this tool instead functions as a
command-line demangler that can be invoked by `llvm-cov`.

View File

@ -7,6 +7,13 @@ fn main() -> anyhow::Result<()> {
let args = std::env::args().collect::<Vec<_>>();
// The coverage-dump tool already needs `rustc_demangle` in order to read
// coverage metadata, so it's very easy to also have a separate mode that
// turns it into a command-line demangler for use by coverage-run tests.
if &args[1..] == &["--demangle"] {
return demangle();
}
let llvm_ir_path = args.get(1).context("LLVM IR file not specified")?;
let llvm_ir = std::fs::read_to_string(llvm_ir_path).context("couldn't read LLVM IR file")?;
@ -15,3 +22,15 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
fn demangle() -> anyhow::Result<()> {
use std::fmt::Write as _;
let stdin = std::io::read_to_string(std::io::stdin())?;
let mut output = String::with_capacity(stdin.len());
for line in stdin.lines() {
writeln!(output, "{:#}", rustc_demangle::demangle(line))?;
}
print!("{output}");
Ok(())
}

View File

@ -0,0 +1,152 @@
Function name: conditions::assign_3_and_or
Raw bytes (69): 0x[01, 01, 07, 07, 11, 09, 0d, 01, 05, 05, 09, 16, 1a, 05, 09, 01, 05, 09, 01, 1c, 01, 00, 2f, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 1a, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 16, 00, 12, 00, 13, 13, 00, 17, 00, 18, 20, 0d, 11, 00, 17, 00, 18, 03, 01, 05, 01, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 7
- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(4)
- expression 1 operands: lhs = Counter(2), rhs = Counter(3)
- expression 2 operands: lhs = Counter(0), rhs = Counter(1)
- expression 3 operands: lhs = Counter(1), rhs = Counter(2)
- expression 4 operands: lhs = Expression(5, Sub), rhs = Expression(6, Sub)
- expression 5 operands: lhs = Counter(1), rhs = Counter(2)
- expression 6 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 9
- Code(Counter(0)) at (prev + 28, 1) to (start + 0, 47)
- Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
= ((c2 + c3) + c4)
- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
- Branch { true: Counter(1), false: Expression(6, Sub) } at (prev + 0, 13) to (start + 0, 14)
true = c1
false = (c0 - c1)
- Code(Counter(1)) at (prev + 0, 18) to (start + 0, 19)
- Branch { true: Counter(2), false: Expression(5, Sub) } at (prev + 0, 18) to (start + 0, 19)
true = c2
false = (c1 - c2)
- Code(Expression(4, Add)) at (prev + 0, 23) to (start + 0, 24)
= ((c1 - c2) + (c0 - c1))
- Branch { true: Counter(3), false: Counter(4) } at (prev + 0, 23) to (start + 0, 24)
true = c3
false = c4
- Code(Expression(0, Add)) at (prev + 1, 5) to (start + 1, 2)
= ((c2 + c3) + c4)
Function name: conditions::assign_3_or_and
Raw bytes (73): 0x[01, 01, 09, 05, 07, 0b, 11, 09, 0d, 01, 05, 01, 05, 22, 11, 01, 05, 22, 11, 01, 05, 09, 01, 17, 01, 00, 2f, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 22, 00, 0d, 00, 0e, 22, 00, 12, 00, 13, 20, 1e, 11, 00, 12, 00, 13, 1e, 00, 17, 00, 18, 20, 09, 0d, 00, 17, 00, 18, 03, 01, 05, 01, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 9
- expression 0 operands: lhs = Counter(1), rhs = Expression(1, Add)
- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(4)
- expression 2 operands: lhs = Counter(2), rhs = Counter(3)
- expression 3 operands: lhs = Counter(0), rhs = Counter(1)
- expression 4 operands: lhs = Counter(0), rhs = Counter(1)
- expression 5 operands: lhs = Expression(8, Sub), rhs = Counter(4)
- expression 6 operands: lhs = Counter(0), rhs = Counter(1)
- expression 7 operands: lhs = Expression(8, Sub), rhs = Counter(4)
- expression 8 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 9
- Code(Counter(0)) at (prev + 23, 1) to (start + 0, 47)
- Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
= (c1 + ((c2 + c3) + c4))
- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
- Branch { true: Counter(1), false: Expression(8, Sub) } at (prev + 0, 13) to (start + 0, 14)
true = c1
false = (c0 - c1)
- Code(Expression(8, Sub)) at (prev + 0, 18) to (start + 0, 19)
= (c0 - c1)
- Branch { true: Expression(7, Sub), false: Counter(4) } at (prev + 0, 18) to (start + 0, 19)
true = ((c0 - c1) - c4)
false = c4
- Code(Expression(7, Sub)) at (prev + 0, 23) to (start + 0, 24)
= ((c0 - c1) - c4)
- Branch { true: Counter(2), false: Counter(3) } at (prev + 0, 23) to (start + 0, 24)
true = c2
false = c3
- Code(Expression(0, Add)) at (prev + 1, 5) to (start + 1, 2)
= (c1 + ((c2 + c3) + c4))
Function name: conditions::assign_and
Raw bytes (51): 0x[01, 01, 04, 07, 0e, 09, 0d, 01, 05, 01, 05, 07, 01, 0d, 01, 00, 21, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 0e, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 20, 09, 0d, 00, 12, 00, 13, 03, 01, 05, 01, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 4
- expression 0 operands: lhs = Expression(1, Add), rhs = Expression(3, Sub)
- expression 1 operands: lhs = Counter(2), rhs = Counter(3)
- expression 2 operands: lhs = Counter(0), rhs = Counter(1)
- expression 3 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 7
- Code(Counter(0)) at (prev + 13, 1) to (start + 0, 33)
- Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
= ((c2 + c3) + (c0 - c1))
- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
- Branch { true: Counter(1), false: Expression(3, Sub) } at (prev + 0, 13) to (start + 0, 14)
true = c1
false = (c0 - c1)
- Code(Counter(1)) at (prev + 0, 18) to (start + 0, 19)
- Branch { true: Counter(2), false: Counter(3) } at (prev + 0, 18) to (start + 0, 19)
true = c2
false = c3
- Code(Expression(0, Add)) at (prev + 1, 5) to (start + 1, 2)
= ((c2 + c3) + (c0 - c1))
Function name: conditions::assign_or
Raw bytes (51): 0x[01, 01, 04, 07, 0d, 05, 09, 01, 05, 01, 05, 07, 01, 12, 01, 00, 20, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 0e, 00, 0d, 00, 0e, 0e, 00, 12, 00, 13, 20, 09, 0d, 00, 12, 00, 13, 03, 01, 05, 01, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 4
- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3)
- expression 1 operands: lhs = Counter(1), rhs = Counter(2)
- expression 2 operands: lhs = Counter(0), rhs = Counter(1)
- expression 3 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 7
- Code(Counter(0)) at (prev + 18, 1) to (start + 0, 32)
- Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
= ((c1 + c2) + c3)
- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
- Branch { true: Counter(1), false: Expression(3, Sub) } at (prev + 0, 13) to (start + 0, 14)
true = c1
false = (c0 - c1)
- Code(Expression(3, Sub)) at (prev + 0, 18) to (start + 0, 19)
= (c0 - c1)
- Branch { true: Counter(2), false: Counter(3) } at (prev + 0, 18) to (start + 0, 19)
true = c2
false = c3
- Code(Expression(0, Add)) at (prev + 1, 5) to (start + 1, 2)
= ((c1 + c2) + c3)
Function name: conditions::foo
Raw bytes (9): 0x[01, 01, 00, 01, 01, 21, 01, 02, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 0
Number of file 0 mappings: 1
- Code(Counter(0)) at (prev + 33, 1) to (start + 2, 2)
Function name: conditions::func_call
Raw bytes (39): 0x[01, 01, 03, 01, 05, 0b, 02, 09, 0d, 05, 01, 25, 01, 01, 0a, 20, 05, 02, 01, 09, 00, 0a, 05, 00, 0e, 00, 0f, 20, 09, 0d, 00, 0e, 00, 0f, 07, 01, 01, 00, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 3
- expression 0 operands: lhs = Counter(0), rhs = Counter(1)
- expression 1 operands: lhs = Expression(2, Add), rhs = Expression(0, Sub)
- expression 2 operands: lhs = Counter(2), rhs = Counter(3)
Number of file 0 mappings: 5
- Code(Counter(0)) at (prev + 37, 1) to (start + 1, 10)
- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 1, 9) to (start + 0, 10)
true = c1
false = (c0 - c1)
- Code(Counter(1)) at (prev + 0, 14) to (start + 0, 15)
- Branch { true: Counter(2), false: Counter(3) } at (prev + 0, 14) to (start + 0, 15)
true = c2
false = c3
- Code(Expression(1, Add)) at (prev + 1, 1) to (start + 0, 2)
= ((c2 + c3) + (c0 - c1))
Function name: conditions::simple_assign
Raw bytes (9): 0x[01, 01, 00, 01, 01, 08, 01, 03, 02]
Number of files: 1
- file 0 => global file 1
Number of expressions: 0
Number of file 0 mappings: 1
- Code(Counter(0)) at (prev + 8, 1) to (start + 3, 2)

View File

@ -0,0 +1,95 @@
LL| |#![feature(coverage_attribute)]
LL| |//@ edition: 2021
LL| |//@ compile-flags: -Zcoverage-options=condition
LL| |//@ llvm-cov-flags: --show-branches=count
LL| |
LL| |use core::hint::black_box;
LL| |
LL| 2|fn simple_assign(a: bool) {
LL| 2| let x = a;
LL| 2| black_box(x);
LL| 2|}
LL| |
LL| 3|fn assign_and(a: bool, b: bool) {
LL| 3| let x = a && b;
^2
------------------
| Branch (LL:13): [True: 2, False: 1]
| Branch (LL:18): [True: 1, False: 1]
------------------
LL| 3| black_box(x);
LL| 3|}
LL| |
LL| 3|fn assign_or(a: bool, b: bool) {
LL| 3| let x = a || b;
^1
------------------
| Branch (LL:13): [True: 2, False: 1]
| Branch (LL:18): [True: 0, False: 1]
------------------
LL| 3| black_box(x);
LL| 3|}
LL| |
LL| 4|fn assign_3_or_and(a: bool, b: bool, c: bool) {
LL| 4| let x = a || b && c;
^2 ^1
------------------
| Branch (LL:13): [True: 2, False: 2]
| Branch (LL:18): [True: 1, False: 1]
| Branch (LL:23): [True: 1, False: 0]
------------------
LL| 4| black_box(x);
LL| 4|}
LL| |
LL| 4|fn assign_3_and_or(a: bool, b: bool, c: bool) {
LL| 4| let x = a && b || c;
^2 ^3
------------------
| Branch (LL:13): [True: 2, False: 2]
| Branch (LL:18): [True: 1, False: 1]
| Branch (LL:23): [True: 2, False: 1]
------------------
LL| 4| black_box(x);
LL| 4|}
LL| |
LL| 3|fn foo(a: bool) -> bool {
LL| 3| black_box(a)
LL| 3|}
LL| |
LL| 3|fn func_call(a: bool, b: bool) {
LL| 3| foo(a && b);
^2
------------------
| Branch (LL:9): [True: 2, False: 1]
| Branch (LL:14): [True: 1, False: 1]
------------------
LL| 3|}
LL| |
LL| |#[coverage(off)]
LL| |fn main() {
LL| | simple_assign(true);
LL| | simple_assign(false);
LL| |
LL| | assign_and(true, false);
LL| | assign_and(true, true);
LL| | assign_and(false, false);
LL| |
LL| | assign_or(true, false);
LL| | assign_or(true, true);
LL| | assign_or(false, false);
LL| |
LL| | assign_3_or_and(true, false, false);
LL| | assign_3_or_and(true, true, false);
LL| | assign_3_or_and(false, false, true);
LL| | assign_3_or_and(false, true, true);
LL| |
LL| | assign_3_and_or(true, false, false);
LL| | assign_3_and_or(true, true, false);
LL| | assign_3_and_or(false, false, true);
LL| | assign_3_and_or(false, true, true);
LL| |
LL| | func_call(true, false);
LL| | func_call(true, true);
LL| | func_call(false, false);
LL| |}

View File

@ -0,0 +1,67 @@
#![feature(coverage_attribute)]
//@ edition: 2021
//@ compile-flags: -Zcoverage-options=condition
//@ llvm-cov-flags: --show-branches=count
use core::hint::black_box;
fn simple_assign(a: bool) {
let x = a;
black_box(x);
}
fn assign_and(a: bool, b: bool) {
let x = a && b;
black_box(x);
}
fn assign_or(a: bool, b: bool) {
let x = a || b;
black_box(x);
}
fn assign_3_or_and(a: bool, b: bool, c: bool) {
let x = a || b && c;
black_box(x);
}
fn assign_3_and_or(a: bool, b: bool, c: bool) {
let x = a && b || c;
black_box(x);
}
fn foo(a: bool) -> bool {
black_box(a)
}
fn func_call(a: bool, b: bool) {
foo(a && b);
}
#[coverage(off)]
fn main() {
simple_assign(true);
simple_assign(false);
assign_and(true, false);
assign_and(true, true);
assign_and(false, false);
assign_or(true, false);
assign_or(true, true);
assign_or(false, false);
assign_3_or_and(true, false, false);
assign_3_or_and(true, true, false);
assign_3_or_and(false, false, true);
assign_3_or_and(false, true, true);
assign_3_and_or(true, false, false);
assign_3_and_or(true, true, false);
assign_3_and_or(false, false, true);
assign_3_and_or(false, true, true);
func_call(true, false);
func_call(true, true);
func_call(false, false);
}

View File

@ -0,0 +1,58 @@
- // MIR for `pointers` before InstSimplify
+ // MIR for `pointers` after InstSimplify
fn pointers(_1: *const [i32], _2: *mut i32) -> () {
debug const_ptr => _1;
debug mut_ptr => _2;
let mut _0: ();
let _3: &[i32];
scope 1 {
debug _a => _3;
let _4: &i32;
scope 2 {
debug _b => _4;
let _5: &mut i32;
scope 3 {
debug _c => _5;
let _6: *const [i32];
scope 4 {
debug _d => _6;
let _7: *const i32;
scope 5 {
debug _e => _7;
let _8: *mut i32;
scope 6 {
debug _f => _8;
}
}
}
}
}
}
bb0: {
StorageLive(_3);
_3 = &(*_1);
StorageLive(_4);
_4 = &(*_2);
StorageLive(_5);
_5 = &mut (*_2);
StorageLive(_6);
- _6 = &raw const (*_1);
+ _6 = _1;
StorageLive(_7);
_7 = &raw const (*_2);
StorageLive(_8);
- _8 = &raw mut (*_2);
+ _8 = _2;
_0 = const ();
StorageDead(_8);
StorageDead(_7);
StorageDead(_6);
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
return;
}
}

View File

@ -0,0 +1,58 @@
- // MIR for `references` before InstSimplify
+ // MIR for `references` after InstSimplify
fn references(_1: &i32, _2: &mut [i32]) -> () {
debug const_ref => _1;
debug mut_ref => _2;
let mut _0: ();
let _3: &i32;
scope 1 {
debug _a => _3;
let _4: &[i32];
scope 2 {
debug _b => _4;
let _5: &mut [i32];
scope 3 {
debug _c => _5;
let _6: *const i32;
scope 4 {
debug _d => _6;
let _7: *const [i32];
scope 5 {
debug _e => _7;
let _8: *mut [i32];
scope 6 {
debug _f => _8;
}
}
}
}
}
}
bb0: {
StorageLive(_3);
- _3 = &(*_1);
+ _3 = _1;
StorageLive(_4);
_4 = &(*_2);
StorageLive(_5);
- _5 = &mut (*_2);
+ _5 = _2;
StorageLive(_6);
_6 = &raw const (*_1);
StorageLive(_7);
_7 = &raw const (*_2);
StorageLive(_8);
_8 = &raw mut (*_2);
_0 = const ();
StorageDead(_8);
StorageDead(_7);
StorageDead(_6);
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
return;
}
}

View File

@ -0,0 +1,40 @@
//@ test-mir-pass: InstSimplify
#![crate_type = "lib"]
#![feature(raw_ref_op)]
// For each of these, only 2 of the 6 should simplify,
// as the others have the wrong types.
// EMIT_MIR ref_of_deref.references.InstSimplify.diff
// CHECK-LABEL: references
pub fn references(const_ref: &i32, mut_ref: &mut [i32]) {
// CHECK: _3 = _1;
let _a = &*const_ref;
// CHECK: _4 = &(*_2);
let _b = &*mut_ref;
// CHECK: _5 = _2;
let _c = &mut *mut_ref;
// CHECK: _6 = &raw const (*_1);
let _d = &raw const *const_ref;
// CHECK: _7 = &raw const (*_2);
let _e = &raw const *mut_ref;
// CHECK: _8 = &raw mut (*_2);
let _f = &raw mut *mut_ref;
}
// EMIT_MIR ref_of_deref.pointers.InstSimplify.diff
// CHECK-LABEL: pointers
pub unsafe fn pointers(const_ptr: *const [i32], mut_ptr: *mut i32) {
// CHECK: _3 = &(*_1);
let _a = &*const_ptr;
// CHECK: _4 = &(*_2);
let _b = &*mut_ptr;
// CHECK: _5 = &mut (*_2);
let _c = &mut *mut_ptr;
// CHECK: _6 = _1;
let _d = &raw const *const_ptr;
// CHECK: _7 = &raw const (*_2);
let _e = &raw const *mut_ptr;
// CHECK: _8 = _2;
let _f = &raw mut *mut_ptr;
}

View File

@ -1,2 +1,2 @@
error: incorrect value `bad` for unstable option `coverage-options` - `block` | `branch` | `mcdc` was expected
error: incorrect value `bad` for unstable option `coverage-options` - `block` | `branch` | `condition` | `mcdc` was expected

View File

@ -1,5 +1,5 @@
//@ needs-profiler-support
//@ revisions: block branch mcdc bad
//@ revisions: block branch condition mcdc bad
//@ compile-flags -Cinstrument-coverage
//@ [block] check-pass
@ -8,6 +8,9 @@
//@ [branch] check-pass
//@ [branch] compile-flags: -Zcoverage-options=branch
//@ [condition] check-pass
//@ [condition] compile-flags: -Zcoverage-options=condition
//@ [mcdc] check-pass
//@ [mcdc] compile-flags: -Zcoverage-options=mcdc

View File

@ -0,0 +1,12 @@
warning: the feature `specialization` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/source-impl-requires-constraining-predicates-ambig.rs:14:12
|
LL | #![feature(specialization)]
| ^^^^^^^^^^^^^^
|
= note: see issue #31844 <https://github.com/rust-lang/rust/issues/31844> for more information
= help: consider using `min_specialization` instead, which is more stable and complete
= note: `#[warn(incomplete_features)]` on by default
warning: 1 warning emitted

View File

@ -0,0 +1,29 @@
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
//@[next] check-pass
//@[current] known-bug: unknown
//@[current] failure-status: 101
//@[current] dont-check-compiler-stderr
// Tests that rebasing from the concrete impl to the default impl also processes the
// `[u32; 0]: IntoIterator<Item = ?U>` predicate to constrain the `?U` impl arg.
// This test also makes sure that we don't do anything weird when rebasing the args
// is ambiguous.
#![feature(specialization)]
//[next]~^ WARN the feature `specialization` is incomplete
trait Spec {
type Assoc;
}
default impl<T, U> Spec for T where T: IntoIterator<Item = U> {
type Assoc = U;
}
impl<T> Spec for [T; 0] {}
fn main() {
let x: <[_; 0] as Spec>::Assoc = 1;
}

View File

@ -0,0 +1,12 @@
warning: the feature `specialization` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/source-impl-requires-constraining-predicates.rs:9:12
|
LL | #![feature(specialization)]
| ^^^^^^^^^^^^^^
|
= note: see issue #31844 <https://github.com/rust-lang/rust/issues/31844> for more information
= help: consider using `min_specialization` instead, which is more stable and complete
= note: `#[warn(incomplete_features)]` on by default
warning: 1 warning emitted

View File

@ -0,0 +1,12 @@
warning: the feature `specialization` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/source-impl-requires-constraining-predicates.rs:9:12
|
LL | #![feature(specialization)]
| ^^^^^^^^^^^^^^
|
= note: see issue #31844 <https://github.com/rust-lang/rust/issues/31844> for more information
= help: consider using `min_specialization` instead, which is more stable and complete
= note: `#[warn(incomplete_features)]` on by default
warning: 1 warning emitted

View File

@ -0,0 +1,24 @@
//@ check-pass
//@ revisions: current next
//@ ignore-compare-mode-next-solver (explicit revisions)
//@[next] compile-flags: -Znext-solver
// Tests that rebasing from the concrete impl to the default impl also processes the
// `[u32; 0]: IntoIterator<Item = ?U>` predicate to constrain the `?U` impl arg.
#![feature(specialization)]
//~^ WARN the feature `specialization` is incomplete
trait Spec {
type Assoc;
}
default impl<T, U> Spec for T where T: IntoIterator<Item = U> {
type Assoc = U;
}
impl<T> Spec for [T; 0] {}
fn main() {
let x: <[u32; 0] as Spec>::Assoc = 1;
}

View File

@ -0,0 +1,10 @@
error: `Bar` is forbidden as the type of a const generic parameter
--> $DIR/const_generic_type.rs:7:24
|
LL | async fn test<const N: crate::Bar>() {
| ^^^^^^^^^^
|
= note: the only supported types are integers, `bool` and `char`
error: aborting due to 1 previous error

View File

@ -0,0 +1,19 @@
error[E0283]: type annotations needed
--> $DIR/const_generic_type.rs:7:1
|
LL | async fn test<const N: crate::Bar>() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type
|
= note: cannot satisfy `_: std::fmt::Display`
error: `Bar` is forbidden as the type of a const generic parameter
--> $DIR/const_generic_type.rs:7:24
|
LL | async fn test<const N: crate::Bar>() {
| ^^^^^^^^^^
|
= note: the only supported types are integers, `bool` and `char`
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0283`.

View File

@ -0,0 +1,14 @@
//@edition: 2021
//@revisions: infer no_infer
#![feature(type_alias_impl_trait)]
type Bar = impl std::fmt::Display;
async fn test<const N: crate::Bar>() {
//[no_infer]~^ ERROR: type annotations needed
//~^^ ERROR: `Bar` is forbidden as the type of a const generic parameter
#[cfg(infer)]
let x: u32 = N;
}
fn main() {}