Auto merge of #111984 - matthiaskrgr:rollup-6u7ynyv, r=matthiaskrgr
Rollup of 5 pull requests Successful merges: - #111384 (Fix linking Mac Catalyst by including LC_BUILD_VERSION in object files) - #111899 (CGU cleanups) - #111940 (Clarify safety concern of `io::Read::read` is only relevant in unsafe code) - #111947 (Add test for RPIT defined with different hidden types with different substs) - #111951 (Correct comment on privately uninhabited pattern.) Failed merges: - #111954 (improve error message for calling a method on a raw pointer with an unknown pointee) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
1221e43bdf
@ -188,6 +188,11 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
|
||||
};
|
||||
|
||||
let mut file = write::Object::new(binary_format, architecture, endianness);
|
||||
if sess.target.is_like_osx {
|
||||
if let Some(build_version) = macho_object_build_version_for_target(&sess.target) {
|
||||
file.set_macho_build_version(build_version)
|
||||
}
|
||||
}
|
||||
let e_flags = match architecture {
|
||||
Architecture::Mips => {
|
||||
let arch = match sess.target.options.cpu.as_ref() {
|
||||
@ -258,6 +263,33 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
|
||||
Some(file)
|
||||
}
|
||||
|
||||
/// Apple's LD, when linking for Mac Catalyst, requires object files to
|
||||
/// contain information about what they were built for (LC_BUILD_VERSION):
|
||||
/// the platform (macOS/watchOS etc), minimum OS version, and SDK version.
|
||||
/// This returns a `MachOBuildVersion` if necessary for the target.
|
||||
fn macho_object_build_version_for_target(
|
||||
target: &Target,
|
||||
) -> Option<object::write::MachOBuildVersion> {
|
||||
if !target.llvm_target.ends_with("-macabi") {
|
||||
return None;
|
||||
}
|
||||
/// The `object` crate demands "X.Y.Z encoded in nibbles as xxxx.yy.zz"
|
||||
/// e.g. minOS 14.0 = 0x000E0000, or SDK 16.2 = 0x00100200
|
||||
fn pack_version((major, minor): (u32, u32)) -> u32 {
|
||||
(major << 16) | (minor << 8)
|
||||
}
|
||||
|
||||
let platform = object::macho::PLATFORM_MACCATALYST;
|
||||
let min_os = (14, 0);
|
||||
let sdk = (16, 2);
|
||||
|
||||
let mut build_version = object::write::MachOBuildVersion::default();
|
||||
build_version.platform = platform;
|
||||
build_version.minos = pack_version(min_os);
|
||||
build_version.sdk = pack_version(sdk);
|
||||
Some(build_version)
|
||||
}
|
||||
|
||||
pub enum MetadataPosition {
|
||||
First,
|
||||
Last,
|
||||
|
@ -334,10 +334,7 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
}
|
||||
|
||||
pub fn modify_size_estimate(&mut self, delta: usize) {
|
||||
assert!(self.size_estimate.is_some());
|
||||
if let Some(size_estimate) = self.size_estimate {
|
||||
self.size_estimate = Some(size_estimate + delta);
|
||||
}
|
||||
*self.size_estimate.as_mut().unwrap() += delta;
|
||||
}
|
||||
|
||||
pub fn contains_item(&self, item: &MonoItem<'tcx>) -> bool {
|
||||
|
@ -491,8 +491,8 @@ impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
|
||||
AdtDefinedHere { adt_def_span, ty, variants }
|
||||
};
|
||||
|
||||
// Emit an extra note if the first uncovered witness is
|
||||
// visibly uninhabited anywhere in the current crate.
|
||||
// Emit an extra note if the first uncovered witness would be uninhabited
|
||||
// if we disregard visibility.
|
||||
let witness_1_is_privately_uninhabited =
|
||||
if cx.tcx.features().exhaustive_patterns
|
||||
&& let Some(witness_1) = witnesses.get(0)
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::cmp;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
@ -14,10 +15,7 @@ use rustc_span::symbol::Symbol;
|
||||
|
||||
use super::PartitioningCx;
|
||||
use crate::collector::InliningMap;
|
||||
use crate::partitioning::merging;
|
||||
use crate::partitioning::{
|
||||
MonoItemPlacement, Partition, PostInliningPartitioning, PreInliningPartitioning,
|
||||
};
|
||||
use crate::partitioning::{MonoItemPlacement, Partition, PlacedRootMonoItems};
|
||||
|
||||
pub struct DefaultPartitioning;
|
||||
|
||||
@ -26,7 +24,7 @@ impl<'tcx> Partition<'tcx> for DefaultPartitioning {
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
mono_items: &mut I,
|
||||
) -> PreInliningPartitioning<'tcx>
|
||||
) -> PlacedRootMonoItems<'tcx>
|
||||
where
|
||||
I: Iterator<Item = MonoItem<'tcx>>,
|
||||
{
|
||||
@ -91,38 +89,120 @@ impl<'tcx> Partition<'tcx> for DefaultPartitioning {
|
||||
codegen_units.insert(codegen_unit_name, CodegenUnit::new(codegen_unit_name));
|
||||
}
|
||||
|
||||
PreInliningPartitioning {
|
||||
codegen_units: codegen_units.into_values().collect(),
|
||||
roots,
|
||||
internalization_candidates,
|
||||
}
|
||||
let codegen_units = codegen_units.into_values().collect();
|
||||
PlacedRootMonoItems { codegen_units, roots, internalization_candidates }
|
||||
}
|
||||
|
||||
fn merge_codegen_units(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: &mut PreInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut Vec<CodegenUnit<'tcx>>,
|
||||
) {
|
||||
merging::merge_codegen_units(cx, initial_partitioning);
|
||||
assert!(cx.target_cgu_count >= 1);
|
||||
|
||||
// Note that at this point in time the `codegen_units` here may not be
|
||||
// in a deterministic order (but we know they're deterministically the
|
||||
// same set). We want this merging to produce a deterministic ordering
|
||||
// of codegen units from the input.
|
||||
//
|
||||
// Due to basically how we've implemented the merging below (merge the
|
||||
// two smallest into each other) we're sure to start off with a
|
||||
// deterministic order (sorted by name). This'll mean that if two cgus
|
||||
// have the same size the stable sort below will keep everything nice
|
||||
// and deterministic.
|
||||
codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
|
||||
|
||||
// This map keeps track of what got merged into what.
|
||||
let mut cgu_contents: FxHashMap<Symbol, Vec<Symbol>> =
|
||||
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name()])).collect();
|
||||
|
||||
// Merge the two smallest codegen units until the target size is
|
||||
// reached.
|
||||
while codegen_units.len() > cx.target_cgu_count {
|
||||
// Sort small cgus to the back
|
||||
codegen_units.sort_by_cached_key(|cgu| cmp::Reverse(cgu.size_estimate()));
|
||||
let mut smallest = codegen_units.pop().unwrap();
|
||||
let second_smallest = codegen_units.last_mut().unwrap();
|
||||
|
||||
// Move the mono-items from `smallest` to `second_smallest`
|
||||
second_smallest.modify_size_estimate(smallest.size_estimate());
|
||||
for (k, v) in smallest.items_mut().drain() {
|
||||
second_smallest.items_mut().insert(k, v);
|
||||
}
|
||||
|
||||
// Record that `second_smallest` now contains all the stuff that was
|
||||
// in `smallest` before.
|
||||
let mut consumed_cgu_names = cgu_contents.remove(&smallest.name()).unwrap();
|
||||
cgu_contents.get_mut(&second_smallest.name()).unwrap().append(&mut consumed_cgu_names);
|
||||
|
||||
debug!(
|
||||
"CodegenUnit {} merged into CodegenUnit {}",
|
||||
smallest.name(),
|
||||
second_smallest.name()
|
||||
);
|
||||
}
|
||||
|
||||
let cgu_name_builder = &mut CodegenUnitNameBuilder::new(cx.tcx);
|
||||
|
||||
if cx.tcx.sess.opts.incremental.is_some() {
|
||||
// If we are doing incremental compilation, we want CGU names to
|
||||
// reflect the path of the source level module they correspond to.
|
||||
// For CGUs that contain the code of multiple modules because of the
|
||||
// merging done above, we use a concatenation of the names of all
|
||||
// contained CGUs.
|
||||
let new_cgu_names: FxHashMap<Symbol, String> = cgu_contents
|
||||
.into_iter()
|
||||
// This `filter` makes sure we only update the name of CGUs that
|
||||
// were actually modified by merging.
|
||||
.filter(|(_, cgu_contents)| cgu_contents.len() > 1)
|
||||
.map(|(current_cgu_name, cgu_contents)| {
|
||||
let mut cgu_contents: Vec<&str> =
|
||||
cgu_contents.iter().map(|s| s.as_str()).collect();
|
||||
|
||||
// Sort the names, so things are deterministic and easy to
|
||||
// predict. We are sorting primitive `&str`s here so we can
|
||||
// use unstable sort.
|
||||
cgu_contents.sort_unstable();
|
||||
|
||||
(current_cgu_name, cgu_contents.join("--"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for cgu in codegen_units.iter_mut() {
|
||||
if let Some(new_cgu_name) = new_cgu_names.get(&cgu.name()) {
|
||||
if cx.tcx.sess.opts.unstable_opts.human_readable_cgu_names {
|
||||
cgu.set_name(Symbol::intern(&new_cgu_name));
|
||||
} else {
|
||||
// If we don't require CGU names to be human-readable,
|
||||
// we use a fixed length hash of the composite CGU name
|
||||
// instead.
|
||||
let new_cgu_name = CodegenUnit::mangle_name(&new_cgu_name);
|
||||
cgu.set_name(Symbol::intern(&new_cgu_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If we are compiling non-incrementally we just generate simple CGU
|
||||
// names containing an index.
|
||||
for (index, cgu) in codegen_units.iter_mut().enumerate() {
|
||||
let numbered_codegen_unit_name =
|
||||
cgu_name_builder.build_cgu_name_no_mangle(LOCAL_CRATE, &["cgu"], Some(index));
|
||||
cgu.set_name(numbered_codegen_unit_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn place_inlined_mono_items(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: PreInliningPartitioning<'tcx>,
|
||||
) -> PostInliningPartitioning<'tcx> {
|
||||
let mut new_partitioning = Vec::new();
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
roots: FxHashSet<MonoItem<'tcx>>,
|
||||
) -> FxHashMap<MonoItem<'tcx>, MonoItemPlacement> {
|
||||
let mut mono_item_placements = FxHashMap::default();
|
||||
|
||||
let PreInliningPartitioning {
|
||||
codegen_units: initial_cgus,
|
||||
roots,
|
||||
internalization_candidates,
|
||||
} = initial_partitioning;
|
||||
let single_codegen_unit = codegen_units.len() == 1;
|
||||
|
||||
let single_codegen_unit = initial_cgus.len() == 1;
|
||||
|
||||
for old_codegen_unit in initial_cgus {
|
||||
for old_codegen_unit in codegen_units.iter_mut() {
|
||||
// Collect all items that need to be available in this codegen unit.
|
||||
let mut reachable = FxHashSet::default();
|
||||
for root in old_codegen_unit.items().keys() {
|
||||
@ -174,14 +254,10 @@ impl<'tcx> Partition<'tcx> for DefaultPartitioning {
|
||||
}
|
||||
}
|
||||
|
||||
new_partitioning.push(new_codegen_unit);
|
||||
*old_codegen_unit = new_codegen_unit;
|
||||
}
|
||||
|
||||
return PostInliningPartitioning {
|
||||
codegen_units: new_partitioning,
|
||||
mono_item_placements,
|
||||
internalization_candidates,
|
||||
};
|
||||
return mono_item_placements;
|
||||
|
||||
fn follow_inlining<'tcx>(
|
||||
mono_item: MonoItem<'tcx>,
|
||||
@ -201,14 +277,16 @@ impl<'tcx> Partition<'tcx> for DefaultPartitioning {
|
||||
fn internalize_symbols(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
partitioning: &mut PostInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
) {
|
||||
if partitioning.codegen_units.len() == 1 {
|
||||
if codegen_units.len() == 1 {
|
||||
// Fast path for when there is only one codegen unit. In this case we
|
||||
// can internalize all candidates, since there is nowhere else they
|
||||
// could be accessed from.
|
||||
for cgu in &mut partitioning.codegen_units {
|
||||
for candidate in &partitioning.internalization_candidates {
|
||||
for cgu in codegen_units {
|
||||
for candidate in &internalization_candidates {
|
||||
cgu.items_mut().insert(*candidate, (Linkage::Internal, Visibility::Default));
|
||||
}
|
||||
}
|
||||
@ -225,15 +303,13 @@ impl<'tcx> Partition<'tcx> for DefaultPartitioning {
|
||||
}
|
||||
});
|
||||
|
||||
let mono_item_placements = &partitioning.mono_item_placements;
|
||||
|
||||
// For each internalization candidates in each codegen unit, check if it is
|
||||
// accessed from outside its defining codegen unit.
|
||||
for cgu in &mut partitioning.codegen_units {
|
||||
for cgu in codegen_units {
|
||||
let home_cgu = MonoItemPlacement::SingleCgu { cgu_name: cgu.name() };
|
||||
|
||||
for (accessee, linkage_and_visibility) in cgu.items_mut() {
|
||||
if !partitioning.internalization_candidates.contains(accessee) {
|
||||
if !internalization_candidates.contains(accessee) {
|
||||
// This item is no candidate for internalizing, so skip it.
|
||||
continue;
|
||||
}
|
||||
|
@ -1,111 +0,0 @@
|
||||
use std::cmp;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder};
|
||||
use rustc_span::symbol::Symbol;
|
||||
|
||||
use super::PartitioningCx;
|
||||
use crate::partitioning::PreInliningPartitioning;
|
||||
|
||||
pub fn merge_codegen_units<'tcx>(
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: &mut PreInliningPartitioning<'tcx>,
|
||||
) {
|
||||
assert!(cx.target_cgu_count >= 1);
|
||||
let codegen_units = &mut initial_partitioning.codegen_units;
|
||||
|
||||
// Note that at this point in time the `codegen_units` here may not be in a
|
||||
// deterministic order (but we know they're deterministically the same set).
|
||||
// We want this merging to produce a deterministic ordering of codegen units
|
||||
// from the input.
|
||||
//
|
||||
// Due to basically how we've implemented the merging below (merge the two
|
||||
// smallest into each other) we're sure to start off with a deterministic
|
||||
// order (sorted by name). This'll mean that if two cgus have the same size
|
||||
// the stable sort below will keep everything nice and deterministic.
|
||||
codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
|
||||
|
||||
// This map keeps track of what got merged into what.
|
||||
let mut cgu_contents: FxHashMap<Symbol, Vec<Symbol>> =
|
||||
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name()])).collect();
|
||||
|
||||
// Merge the two smallest codegen units until the target size is reached.
|
||||
while codegen_units.len() > cx.target_cgu_count {
|
||||
// Sort small cgus to the back
|
||||
codegen_units.sort_by_cached_key(|cgu| cmp::Reverse(cgu.size_estimate()));
|
||||
let mut smallest = codegen_units.pop().unwrap();
|
||||
let second_smallest = codegen_units.last_mut().unwrap();
|
||||
|
||||
// Move the mono-items from `smallest` to `second_smallest`
|
||||
second_smallest.modify_size_estimate(smallest.size_estimate());
|
||||
for (k, v) in smallest.items_mut().drain() {
|
||||
second_smallest.items_mut().insert(k, v);
|
||||
}
|
||||
|
||||
// Record that `second_smallest` now contains all the stuff that was in
|
||||
// `smallest` before.
|
||||
let mut consumed_cgu_names = cgu_contents.remove(&smallest.name()).unwrap();
|
||||
cgu_contents.get_mut(&second_smallest.name()).unwrap().append(&mut consumed_cgu_names);
|
||||
|
||||
debug!(
|
||||
"CodegenUnit {} merged into CodegenUnit {}",
|
||||
smallest.name(),
|
||||
second_smallest.name()
|
||||
);
|
||||
}
|
||||
|
||||
let cgu_name_builder = &mut CodegenUnitNameBuilder::new(cx.tcx);
|
||||
|
||||
if cx.tcx.sess.opts.incremental.is_some() {
|
||||
// If we are doing incremental compilation, we want CGU names to
|
||||
// reflect the path of the source level module they correspond to.
|
||||
// For CGUs that contain the code of multiple modules because of the
|
||||
// merging done above, we use a concatenation of the names of
|
||||
// all contained CGUs.
|
||||
let new_cgu_names: FxHashMap<Symbol, String> = cgu_contents
|
||||
.into_iter()
|
||||
// This `filter` makes sure we only update the name of CGUs that
|
||||
// were actually modified by merging.
|
||||
.filter(|(_, cgu_contents)| cgu_contents.len() > 1)
|
||||
.map(|(current_cgu_name, cgu_contents)| {
|
||||
let mut cgu_contents: Vec<&str> = cgu_contents.iter().map(|s| s.as_str()).collect();
|
||||
|
||||
// Sort the names, so things are deterministic and easy to
|
||||
// predict.
|
||||
|
||||
// We are sorting primitive &strs here so we can use unstable sort
|
||||
cgu_contents.sort_unstable();
|
||||
|
||||
(current_cgu_name, cgu_contents.join("--"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for cgu in codegen_units.iter_mut() {
|
||||
if let Some(new_cgu_name) = new_cgu_names.get(&cgu.name()) {
|
||||
if cx.tcx.sess.opts.unstable_opts.human_readable_cgu_names {
|
||||
cgu.set_name(Symbol::intern(&new_cgu_name));
|
||||
} else {
|
||||
// If we don't require CGU names to be human-readable, we
|
||||
// use a fixed length hash of the composite CGU name
|
||||
// instead.
|
||||
let new_cgu_name = CodegenUnit::mangle_name(&new_cgu_name);
|
||||
cgu.set_name(Symbol::intern(&new_cgu_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If we are compiling non-incrementally we just generate simple CGU
|
||||
// names containing an index.
|
||||
for (index, cgu) in codegen_units.iter_mut().enumerate() {
|
||||
cgu.set_name(numbered_codegen_unit_name(cgu_name_builder, index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn numbered_codegen_unit_name(
|
||||
name_builder: &mut CodegenUnitNameBuilder<'_>,
|
||||
index: usize,
|
||||
) -> Symbol {
|
||||
name_builder.build_cgu_name_no_mangle(LOCAL_CRATE, &["cgu"], Some(index))
|
||||
}
|
@ -93,7 +93,6 @@
|
||||
//! inlining, even when they are not marked `#[inline]`.
|
||||
|
||||
mod default;
|
||||
mod merging;
|
||||
|
||||
use std::cmp;
|
||||
use std::fs::{self, File};
|
||||
@ -129,7 +128,7 @@ impl<'tcx> Partition<'tcx> for Partitioner {
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
mono_items: &mut I,
|
||||
) -> PreInliningPartitioning<'tcx>
|
||||
) -> PlacedRootMonoItems<'tcx>
|
||||
where
|
||||
I: Iterator<Item = MonoItem<'tcx>>,
|
||||
{
|
||||
@ -142,12 +141,10 @@ impl<'tcx> Partition<'tcx> for Partitioner {
|
||||
fn merge_codegen_units(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: &mut PreInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut Vec<CodegenUnit<'tcx>>,
|
||||
) {
|
||||
match self {
|
||||
Partitioner::Default(partitioner) => {
|
||||
partitioner.merge_codegen_units(cx, initial_partitioning)
|
||||
}
|
||||
Partitioner::Default(partitioner) => partitioner.merge_codegen_units(cx, codegen_units),
|
||||
Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
|
||||
}
|
||||
}
|
||||
@ -155,11 +152,12 @@ impl<'tcx> Partition<'tcx> for Partitioner {
|
||||
fn place_inlined_mono_items(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: PreInliningPartitioning<'tcx>,
|
||||
) -> PostInliningPartitioning<'tcx> {
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
roots: FxHashSet<MonoItem<'tcx>>,
|
||||
) -> FxHashMap<MonoItem<'tcx>, MonoItemPlacement> {
|
||||
match self {
|
||||
Partitioner::Default(partitioner) => {
|
||||
partitioner.place_inlined_mono_items(cx, initial_partitioning)
|
||||
partitioner.place_inlined_mono_items(cx, codegen_units, roots)
|
||||
}
|
||||
Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
|
||||
}
|
||||
@ -168,48 +166,62 @@ impl<'tcx> Partition<'tcx> for Partitioner {
|
||||
fn internalize_symbols(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
post_inlining_partitioning: &mut PostInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
) {
|
||||
match self {
|
||||
Partitioner::Default(partitioner) => {
|
||||
partitioner.internalize_symbols(cx, post_inlining_partitioning)
|
||||
}
|
||||
Partitioner::Default(partitioner) => partitioner.internalize_symbols(
|
||||
cx,
|
||||
codegen_units,
|
||||
mono_item_placements,
|
||||
internalization_candidates,
|
||||
),
|
||||
Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PartitioningCx<'a, 'tcx> {
|
||||
struct PartitioningCx<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
target_cgu_count: usize,
|
||||
inlining_map: &'a InliningMap<'tcx>,
|
||||
}
|
||||
|
||||
pub struct PlacedRootMonoItems<'tcx> {
|
||||
codegen_units: Vec<CodegenUnit<'tcx>>,
|
||||
roots: FxHashSet<MonoItem<'tcx>>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
}
|
||||
|
||||
trait Partition<'tcx> {
|
||||
fn place_root_mono_items<I>(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
mono_items: &mut I,
|
||||
) -> PreInliningPartitioning<'tcx>
|
||||
) -> PlacedRootMonoItems<'tcx>
|
||||
where
|
||||
I: Iterator<Item = MonoItem<'tcx>>;
|
||||
|
||||
fn merge_codegen_units(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: &mut PreInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut Vec<CodegenUnit<'tcx>>,
|
||||
);
|
||||
|
||||
fn place_inlined_mono_items(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
initial_partitioning: PreInliningPartitioning<'tcx>,
|
||||
) -> PostInliningPartitioning<'tcx>;
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
roots: FxHashSet<MonoItem<'tcx>>,
|
||||
) -> FxHashMap<MonoItem<'tcx>, MonoItemPlacement>;
|
||||
|
||||
fn internalize_symbols(
|
||||
&mut self,
|
||||
cx: &PartitioningCx<'_, 'tcx>,
|
||||
partitioning: &mut PostInliningPartitioning<'tcx>,
|
||||
codegen_units: &mut [CodegenUnit<'tcx>],
|
||||
mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
);
|
||||
}
|
||||
|
||||
@ -225,7 +237,7 @@ fn get_partitioner(tcx: TyCtxt<'_>) -> Partitioner {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn partition<'tcx, I>(
|
||||
fn partition<'tcx, I>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
mono_items: &mut I,
|
||||
max_cgu_count: usize,
|
||||
@ -241,44 +253,51 @@ where
|
||||
// In the first step, we place all regular monomorphizations into their
|
||||
// respective 'home' codegen unit. Regular monomorphizations are all
|
||||
// functions and statics defined in the local crate.
|
||||
let mut initial_partitioning = {
|
||||
let PlacedRootMonoItems { mut codegen_units, roots, internalization_candidates } = {
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_roots");
|
||||
partitioner.place_root_mono_items(cx, mono_items)
|
||||
};
|
||||
|
||||
for cgu in &mut initial_partitioning.codegen_units {
|
||||
for cgu in &mut codegen_units {
|
||||
cgu.create_size_estimate(tcx);
|
||||
}
|
||||
|
||||
debug_dump(tcx, "INITIAL PARTITIONING", &initial_partitioning.codegen_units);
|
||||
debug_dump(tcx, "INITIAL PARTITIONING", &codegen_units);
|
||||
|
||||
// Merge until we have at most `max_cgu_count` codegen units.
|
||||
// `merge_codegen_units` is responsible for updating the CGU size
|
||||
// estimates.
|
||||
{
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus");
|
||||
partitioner.merge_codegen_units(cx, &mut initial_partitioning);
|
||||
debug_dump(tcx, "POST MERGING", &initial_partitioning.codegen_units);
|
||||
partitioner.merge_codegen_units(cx, &mut codegen_units);
|
||||
debug_dump(tcx, "POST MERGING", &codegen_units);
|
||||
}
|
||||
|
||||
// In the next step, we use the inlining map to determine which additional
|
||||
// monomorphizations have to go into each codegen unit. These additional
|
||||
// monomorphizations can be drop-glue, functions from external crates, and
|
||||
// local functions the definition of which is marked with `#[inline]`.
|
||||
let mut post_inlining = {
|
||||
let mono_item_placements = {
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_inline_items");
|
||||
partitioner.place_inlined_mono_items(cx, initial_partitioning)
|
||||
partitioner.place_inlined_mono_items(cx, &mut codegen_units, roots)
|
||||
};
|
||||
|
||||
for cgu in &mut post_inlining.codegen_units {
|
||||
for cgu in &mut codegen_units {
|
||||
cgu.create_size_estimate(tcx);
|
||||
}
|
||||
|
||||
debug_dump(tcx, "POST INLINING", &post_inlining.codegen_units);
|
||||
debug_dump(tcx, "POST INLINING", &codegen_units);
|
||||
|
||||
// Next we try to make as many symbols "internal" as possible, so LLVM has
|
||||
// more freedom to optimize.
|
||||
if !tcx.sess.link_dead_code() {
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
|
||||
partitioner.internalize_symbols(cx, &mut post_inlining);
|
||||
partitioner.internalize_symbols(
|
||||
cx,
|
||||
&mut codegen_units,
|
||||
mono_item_placements,
|
||||
internalization_candidates,
|
||||
);
|
||||
}
|
||||
|
||||
let instrument_dead_code =
|
||||
@ -286,7 +305,7 @@ where
|
||||
|
||||
if instrument_dead_code {
|
||||
assert!(
|
||||
post_inlining.codegen_units.len() > 0,
|
||||
codegen_units.len() > 0,
|
||||
"There must be at least one CGU that code coverage data can be generated in."
|
||||
);
|
||||
|
||||
@ -297,7 +316,7 @@ where
|
||||
// the object file (CGU) containing the dead function stubs is included
|
||||
// in the final binary. This will probably require forcing these
|
||||
// function symbols to be included via `-u` or `/include` linker args.
|
||||
let mut cgus: Vec<_> = post_inlining.codegen_units.iter_mut().collect();
|
||||
let mut cgus: Vec<_> = codegen_units.iter_mut().collect();
|
||||
cgus.sort_by_key(|cgu| cgu.size_estimate());
|
||||
|
||||
let dead_code_cgu =
|
||||
@ -308,29 +327,17 @@ where
|
||||
} else {
|
||||
// If there are no CGUs that have externally linked items,
|
||||
// then we just pick the first CGU as a fallback.
|
||||
&mut post_inlining.codegen_units[0]
|
||||
&mut codegen_units[0]
|
||||
};
|
||||
dead_code_cgu.make_code_coverage_dead_code_cgu();
|
||||
}
|
||||
|
||||
// Finally, sort by codegen unit name, so that we get deterministic results.
|
||||
let PostInliningPartitioning {
|
||||
codegen_units: mut result,
|
||||
mono_item_placements: _,
|
||||
internalization_candidates: _,
|
||||
} = post_inlining;
|
||||
codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
|
||||
|
||||
result.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
|
||||
debug_dump(tcx, "FINAL", &codegen_units);
|
||||
|
||||
debug_dump(tcx, "FINAL", &result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub struct PreInliningPartitioning<'tcx> {
|
||||
codegen_units: Vec<CodegenUnit<'tcx>>,
|
||||
roots: FxHashSet<MonoItem<'tcx>>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
codegen_units
|
||||
}
|
||||
|
||||
/// For symbol internalization, we need to know whether a symbol/mono-item is
|
||||
@ -342,12 +349,6 @@ enum MonoItemPlacement {
|
||||
MultipleCgus,
|
||||
}
|
||||
|
||||
struct PostInliningPartitioning<'tcx> {
|
||||
codegen_units: Vec<CodegenUnit<'tcx>>,
|
||||
mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
}
|
||||
|
||||
fn debug_dump<'a, 'tcx: 'a>(tcx: TyCtxt<'tcx>, label: &str, cgus: &[CodegenUnit<'tcx>]) {
|
||||
let dump = move || {
|
||||
use std::fmt::Write;
|
||||
|
@ -2,7 +2,7 @@ use super::apple_base::{opts, Arch};
|
||||
use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let llvm_target = "arm64-apple-ios-macabi";
|
||||
let llvm_target = "arm64-apple-ios14.0-macabi";
|
||||
|
||||
let arch = Arch::Arm64_macabi;
|
||||
let mut base = opts("ios", arch);
|
||||
|
@ -2,7 +2,7 @@ use super::apple_base::{opts, Arch};
|
||||
use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let llvm_target = "x86_64-apple-ios-macabi";
|
||||
let llvm_target = "x86_64-apple-ios14.0-macabi";
|
||||
|
||||
let arch = Arch::X86_64_macabi;
|
||||
let mut base = opts("ios", arch);
|
||||
|
@ -593,7 +593,8 @@ pub trait Read {
|
||||
/// This may happen for example because fewer bytes are actually available right now
|
||||
/// (e. g. being close to end-of-file) or because read() was interrupted by a signal.
|
||||
///
|
||||
/// As this trait is safe to implement, callers cannot rely on `n <= buf.len()` for safety.
|
||||
/// As this trait is safe to implement, callers in unsafe code cannot rely on
|
||||
/// `n <= buf.len()` for safety.
|
||||
/// Extra care needs to be taken when `unsafe` functions are used to access the read bytes.
|
||||
/// Callers have to ensure that no unchecked out-of-bounds accesses are possible even if
|
||||
/// `n > buf.len()`.
|
||||
@ -603,8 +604,8 @@ pub trait Read {
|
||||
/// contents of `buf` being true. It is recommended that *implementations*
|
||||
/// only write data to `buf` instead of reading its contents.
|
||||
///
|
||||
/// Correspondingly, however, *callers* of this method must not assume any guarantees
|
||||
/// about how the implementation uses `buf`. The trait is safe to implement,
|
||||
/// Correspondingly, however, *callers* of this method in unsafe code must not assume
|
||||
/// any guarantees about how the implementation uses `buf`. The trait is safe to implement,
|
||||
/// so it is possible that the code that's supposed to write to the buffer might also read
|
||||
/// from it. It is your responsibility to make sure that `buf` is initialized
|
||||
/// before calling `read`. Calling `read` with an uninitialized `buf` (of the kind one
|
||||
|
12
tests/ui/impl-trait/multiple-defining-usages-in-body.rs
Normal file
12
tests/ui/impl-trait/multiple-defining-usages-in-body.rs
Normal file
@ -0,0 +1,12 @@
|
||||
trait Trait {}
|
||||
impl Trait for () {}
|
||||
|
||||
fn foo<T: Trait, U: Trait>() -> impl Trait {
|
||||
//~^ WARN function cannot return without recursing [unconditional_recursion]
|
||||
let a: T = foo::<T, U>();
|
||||
//~^ ERROR concrete type differs from previous defining opaque type use
|
||||
loop {}
|
||||
let _: T = foo::<U, T>();
|
||||
}
|
||||
|
||||
fn main() {}
|
26
tests/ui/impl-trait/multiple-defining-usages-in-body.stderr
Normal file
26
tests/ui/impl-trait/multiple-defining-usages-in-body.stderr
Normal file
@ -0,0 +1,26 @@
|
||||
warning: function cannot return without recursing
|
||||
--> $DIR/multiple-defining-usages-in-body.rs:4:1
|
||||
|
|
||||
LL | fn foo<T: Trait, U: Trait>() -> impl Trait {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot return without recursing
|
||||
LL |
|
||||
LL | let a: T = foo::<T, U>();
|
||||
| ------------- recursive call site
|
||||
|
|
||||
= help: a `loop` may express intention better if this is on purpose
|
||||
= note: `#[warn(unconditional_recursion)]` on by default
|
||||
|
||||
error: concrete type differs from previous defining opaque type use
|
||||
--> $DIR/multiple-defining-usages-in-body.rs:6:16
|
||||
|
|
||||
LL | let a: T = foo::<T, U>();
|
||||
| ^^^^^^^^^^^^^ expected `U`, got `T`
|
||||
|
|
||||
note: previous use here
|
||||
--> $DIR/multiple-defining-usages-in-body.rs:9:16
|
||||
|
|
||||
LL | let _: T = foo::<U, T>();
|
||||
| ^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to previous error; 1 warning emitted
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:37:9
|
||||
--> $DIR/empty-match.rs:58:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
@ -11,37 +11,52 @@ LL | #![deny(unreachable_patterns)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:40:9
|
||||
--> $DIR/empty-match.rs:61:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:47:9
|
||||
--> $DIR/empty-match.rs:68:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:50:9
|
||||
--> $DIR/empty-match.rs:71:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error[E0005]: refutable pattern in local binding
|
||||
--> $DIR/empty-match.rs:76:9
|
||||
|
|
||||
LL | let None = x;
|
||||
| ^^^^ pattern `Some(_)` not covered
|
||||
|
|
||||
= note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
|
||||
= note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
|
||||
= note: pattern `Some(_)` is currently uninhabited, but this variant contains private fields which may become inhabited in the future
|
||||
= note: the matched value is of type `Option<SecretlyUninhabitedForeignStruct>`
|
||||
help: you might want to use `if let` to ignore the variant that isn't matched
|
||||
|
|
||||
LL | if let None = x { todo!() };
|
||||
| ++ +++++++++++
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:57:9
|
||||
--> $DIR/empty-match.rs:88:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:60:9
|
||||
--> $DIR/empty-match.rs:91:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `u8` is non-empty
|
||||
--> $DIR/empty-match.rs:78:20
|
||||
--> $DIR/empty-match.rs:109:20
|
||||
|
|
||||
LL | match_no_arms!(0u8);
|
||||
| ^^^
|
||||
@ -50,13 +65,13 @@ LL | match_no_arms!(0u8);
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyStruct1` is non-empty
|
||||
--> $DIR/empty-match.rs:79:20
|
||||
--> $DIR/empty-match.rs:111:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyStruct1);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyStruct1` defined here
|
||||
--> $DIR/empty-match.rs:14:8
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
|
|
||||
LL | struct NonEmptyStruct1;
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -64,13 +79,13 @@ LL | struct NonEmptyStruct1;
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyStruct2` is non-empty
|
||||
--> $DIR/empty-match.rs:80:20
|
||||
--> $DIR/empty-match.rs:113:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyStruct2(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyStruct2` defined here
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
--> $DIR/empty-match.rs:18:8
|
||||
|
|
||||
LL | struct NonEmptyStruct2(bool);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -78,13 +93,13 @@ LL | struct NonEmptyStruct2(bool);
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyUnion1` is non-empty
|
||||
--> $DIR/empty-match.rs:81:20
|
||||
--> $DIR/empty-match.rs:115:20
|
||||
|
|
||||
LL | match_no_arms!((NonEmptyUnion1 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyUnion1` defined here
|
||||
--> $DIR/empty-match.rs:16:7
|
||||
--> $DIR/empty-match.rs:21:7
|
||||
|
|
||||
LL | union NonEmptyUnion1 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -92,13 +107,13 @@ LL | union NonEmptyUnion1 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyUnion2` is non-empty
|
||||
--> $DIR/empty-match.rs:82:20
|
||||
--> $DIR/empty-match.rs:117:20
|
||||
|
|
||||
LL | match_no_arms!((NonEmptyUnion2 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyUnion2` defined here
|
||||
--> $DIR/empty-match.rs:19:7
|
||||
--> $DIR/empty-match.rs:26:7
|
||||
|
|
||||
LL | union NonEmptyUnion2 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -106,13 +121,13 @@ LL | union NonEmptyUnion2 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||
--> $DIR/empty-match.rs:83:20
|
||||
--> $DIR/empty-match.rs:119:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum1::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
|
|
||||
note: `NonEmptyEnum1` defined here
|
||||
--> $DIR/empty-match.rs:24:5
|
||||
--> $DIR/empty-match.rs:33:5
|
||||
|
|
||||
LL | enum NonEmptyEnum1 {
|
||||
| -------------
|
||||
@ -122,31 +137,32 @@ LL | Foo(bool),
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
--> $DIR/empty-match.rs:84:20
|
||||
--> $DIR/empty-match.rs:122:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum2::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
|
|
||||
note: `NonEmptyEnum2` defined here
|
||||
--> $DIR/empty-match.rs:27:5
|
||||
--> $DIR/empty-match.rs:40:5
|
||||
|
|
||||
LL | enum NonEmptyEnum2 {
|
||||
| -------------
|
||||
LL | Foo(bool),
|
||||
| ^^^ not covered
|
||||
...
|
||||
LL | Bar,
|
||||
| ^^^ not covered
|
||||
= note: the matched value is of type `NonEmptyEnum2`
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or multiple match arms
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
--> $DIR/empty-match.rs:85:20
|
||||
--> $DIR/empty-match.rs:125:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum5::V1);
|
||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
|
|
||||
note: `NonEmptyEnum5` defined here
|
||||
--> $DIR/empty-match.rs:30:6
|
||||
--> $DIR/empty-match.rs:49:6
|
||||
|
|
||||
LL | enum NonEmptyEnum5 {
|
||||
| ^^^^^^^^^^^^^
|
||||
@ -154,7 +170,7 @@ LL | enum NonEmptyEnum5 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or multiple match arms
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `_` not covered
|
||||
--> $DIR/empty-match.rs:87:24
|
||||
--> $DIR/empty-match.rs:129:24
|
||||
|
|
||||
LL | match_guarded_arm!(0u8);
|
||||
| ^^^ pattern `_` not covered
|
||||
@ -167,13 +183,13 @@ LL + _ => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
||||
--> $DIR/empty-match.rs:88:24
|
||||
--> $DIR/empty-match.rs:133:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyStruct1);
|
||||
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
||||
|
|
||||
note: `NonEmptyStruct1` defined here
|
||||
--> $DIR/empty-match.rs:14:8
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
|
|
||||
LL | struct NonEmptyStruct1;
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -185,13 +201,13 @@ LL + NonEmptyStruct1 => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
||||
--> $DIR/empty-match.rs:89:24
|
||||
--> $DIR/empty-match.rs:137:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
||||
|
|
||||
note: `NonEmptyStruct2` defined here
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
--> $DIR/empty-match.rs:18:8
|
||||
|
|
||||
LL | struct NonEmptyStruct2(bool);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -203,13 +219,13 @@ LL + NonEmptyStruct2(_) => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
||||
--> $DIR/empty-match.rs:90:24
|
||||
--> $DIR/empty-match.rs:141:24
|
||||
|
|
||||
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
||||
|
|
||||
note: `NonEmptyUnion1` defined here
|
||||
--> $DIR/empty-match.rs:16:7
|
||||
--> $DIR/empty-match.rs:21:7
|
||||
|
|
||||
LL | union NonEmptyUnion1 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -221,13 +237,13 @@ LL + NonEmptyUnion1 { .. } => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
||||
--> $DIR/empty-match.rs:91:24
|
||||
--> $DIR/empty-match.rs:145:24
|
||||
|
|
||||
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
||||
|
|
||||
note: `NonEmptyUnion2` defined here
|
||||
--> $DIR/empty-match.rs:19:7
|
||||
--> $DIR/empty-match.rs:26:7
|
||||
|
|
||||
LL | union NonEmptyUnion2 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -239,13 +255,13 @@ LL + NonEmptyUnion2 { .. } => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||
--> $DIR/empty-match.rs:92:24
|
||||
--> $DIR/empty-match.rs:149:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
|
|
||||
note: `NonEmptyEnum1` defined here
|
||||
--> $DIR/empty-match.rs:24:5
|
||||
--> $DIR/empty-match.rs:33:5
|
||||
|
|
||||
LL | enum NonEmptyEnum1 {
|
||||
| -------------
|
||||
@ -259,18 +275,19 @@ LL + NonEmptyEnum1::Foo(_) => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
--> $DIR/empty-match.rs:93:24
|
||||
--> $DIR/empty-match.rs:153:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
|
|
||||
note: `NonEmptyEnum2` defined here
|
||||
--> $DIR/empty-match.rs:27:5
|
||||
--> $DIR/empty-match.rs:40:5
|
||||
|
|
||||
LL | enum NonEmptyEnum2 {
|
||||
| -------------
|
||||
LL | Foo(bool),
|
||||
| ^^^ not covered
|
||||
...
|
||||
LL | Bar,
|
||||
| ^^^ not covered
|
||||
= note: the matched value is of type `NonEmptyEnum2`
|
||||
@ -281,13 +298,13 @@ LL + NonEmptyEnum2::Foo(_) | NonEmptyEnum2::Bar => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
--> $DIR/empty-match.rs:94:24
|
||||
--> $DIR/empty-match.rs:157:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
|
|
||||
note: `NonEmptyEnum5` defined here
|
||||
--> $DIR/empty-match.rs:30:6
|
||||
--> $DIR/empty-match.rs:49:6
|
||||
|
|
||||
LL | enum NonEmptyEnum5 {
|
||||
| ^^^^^^^^^^^^^
|
||||
@ -298,6 +315,7 @@ LL ~ _ if false => {},
|
||||
LL + _ => todo!()
|
||||
|
|
||||
|
||||
error: aborting due to 22 previous errors
|
||||
error: aborting due to 23 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0004`.
|
||||
Some errors have detailed explanations: E0004, E0005.
|
||||
For more information about an error, try `rustc --explain E0004`.
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:37:9
|
||||
--> $DIR/empty-match.rs:58:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
@ -11,37 +11,51 @@ LL | #![deny(unreachable_patterns)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:40:9
|
||||
--> $DIR/empty-match.rs:61:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:47:9
|
||||
--> $DIR/empty-match.rs:68:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:50:9
|
||||
--> $DIR/empty-match.rs:71:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error[E0005]: refutable pattern in local binding
|
||||
--> $DIR/empty-match.rs:76:9
|
||||
|
|
||||
LL | let None = x;
|
||||
| ^^^^ pattern `Some(_)` not covered
|
||||
|
|
||||
= note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
|
||||
= note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
|
||||
= note: the matched value is of type `Option<SecretlyUninhabitedForeignStruct>`
|
||||
help: you might want to use `if let` to ignore the variant that isn't matched
|
||||
|
|
||||
LL | if let None = x { todo!() };
|
||||
| ++ +++++++++++
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:57:9
|
||||
--> $DIR/empty-match.rs:88:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| ^
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/empty-match.rs:60:9
|
||||
--> $DIR/empty-match.rs:91:9
|
||||
|
|
||||
LL | _ if false => {},
|
||||
| ^
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `u8` is non-empty
|
||||
--> $DIR/empty-match.rs:78:20
|
||||
--> $DIR/empty-match.rs:109:20
|
||||
|
|
||||
LL | match_no_arms!(0u8);
|
||||
| ^^^
|
||||
@ -50,13 +64,13 @@ LL | match_no_arms!(0u8);
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyStruct1` is non-empty
|
||||
--> $DIR/empty-match.rs:79:20
|
||||
--> $DIR/empty-match.rs:111:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyStruct1);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyStruct1` defined here
|
||||
--> $DIR/empty-match.rs:14:8
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
|
|
||||
LL | struct NonEmptyStruct1;
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -64,13 +78,13 @@ LL | struct NonEmptyStruct1;
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyStruct2` is non-empty
|
||||
--> $DIR/empty-match.rs:80:20
|
||||
--> $DIR/empty-match.rs:113:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyStruct2(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyStruct2` defined here
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
--> $DIR/empty-match.rs:18:8
|
||||
|
|
||||
LL | struct NonEmptyStruct2(bool);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -78,13 +92,13 @@ LL | struct NonEmptyStruct2(bool);
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyUnion1` is non-empty
|
||||
--> $DIR/empty-match.rs:81:20
|
||||
--> $DIR/empty-match.rs:115:20
|
||||
|
|
||||
LL | match_no_arms!((NonEmptyUnion1 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyUnion1` defined here
|
||||
--> $DIR/empty-match.rs:16:7
|
||||
--> $DIR/empty-match.rs:21:7
|
||||
|
|
||||
LL | union NonEmptyUnion1 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -92,13 +106,13 @@ LL | union NonEmptyUnion1 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: type `NonEmptyUnion2` is non-empty
|
||||
--> $DIR/empty-match.rs:82:20
|
||||
--> $DIR/empty-match.rs:117:20
|
||||
|
|
||||
LL | match_no_arms!((NonEmptyUnion2 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `NonEmptyUnion2` defined here
|
||||
--> $DIR/empty-match.rs:19:7
|
||||
--> $DIR/empty-match.rs:26:7
|
||||
|
|
||||
LL | union NonEmptyUnion2 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -106,13 +120,13 @@ LL | union NonEmptyUnion2 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||
--> $DIR/empty-match.rs:83:20
|
||||
--> $DIR/empty-match.rs:119:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum1::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
|
|
||||
note: `NonEmptyEnum1` defined here
|
||||
--> $DIR/empty-match.rs:24:5
|
||||
--> $DIR/empty-match.rs:33:5
|
||||
|
|
||||
LL | enum NonEmptyEnum1 {
|
||||
| -------------
|
||||
@ -122,31 +136,32 @@ LL | Foo(bool),
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
--> $DIR/empty-match.rs:84:20
|
||||
--> $DIR/empty-match.rs:122:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum2::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
|
|
||||
note: `NonEmptyEnum2` defined here
|
||||
--> $DIR/empty-match.rs:27:5
|
||||
--> $DIR/empty-match.rs:40:5
|
||||
|
|
||||
LL | enum NonEmptyEnum2 {
|
||||
| -------------
|
||||
LL | Foo(bool),
|
||||
| ^^^ not covered
|
||||
...
|
||||
LL | Bar,
|
||||
| ^^^ not covered
|
||||
= note: the matched value is of type `NonEmptyEnum2`
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or multiple match arms
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
--> $DIR/empty-match.rs:85:20
|
||||
--> $DIR/empty-match.rs:125:20
|
||||
|
|
||||
LL | match_no_arms!(NonEmptyEnum5::V1);
|
||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
|
|
||||
note: `NonEmptyEnum5` defined here
|
||||
--> $DIR/empty-match.rs:30:6
|
||||
--> $DIR/empty-match.rs:49:6
|
||||
|
|
||||
LL | enum NonEmptyEnum5 {
|
||||
| ^^^^^^^^^^^^^
|
||||
@ -154,7 +169,7 @@ LL | enum NonEmptyEnum5 {
|
||||
= help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or multiple match arms
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `_` not covered
|
||||
--> $DIR/empty-match.rs:87:24
|
||||
--> $DIR/empty-match.rs:129:24
|
||||
|
|
||||
LL | match_guarded_arm!(0u8);
|
||||
| ^^^ pattern `_` not covered
|
||||
@ -167,13 +182,13 @@ LL + _ => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
||||
--> $DIR/empty-match.rs:88:24
|
||||
--> $DIR/empty-match.rs:133:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyStruct1);
|
||||
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
||||
|
|
||||
note: `NonEmptyStruct1` defined here
|
||||
--> $DIR/empty-match.rs:14:8
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
|
|
||||
LL | struct NonEmptyStruct1;
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -185,13 +200,13 @@ LL + NonEmptyStruct1 => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
||||
--> $DIR/empty-match.rs:89:24
|
||||
--> $DIR/empty-match.rs:137:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
||||
|
|
||||
note: `NonEmptyStruct2` defined here
|
||||
--> $DIR/empty-match.rs:15:8
|
||||
--> $DIR/empty-match.rs:18:8
|
||||
|
|
||||
LL | struct NonEmptyStruct2(bool);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
@ -203,13 +218,13 @@ LL + NonEmptyStruct2(_) => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
||||
--> $DIR/empty-match.rs:90:24
|
||||
--> $DIR/empty-match.rs:141:24
|
||||
|
|
||||
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
||||
|
|
||||
note: `NonEmptyUnion1` defined here
|
||||
--> $DIR/empty-match.rs:16:7
|
||||
--> $DIR/empty-match.rs:21:7
|
||||
|
|
||||
LL | union NonEmptyUnion1 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -221,13 +236,13 @@ LL + NonEmptyUnion1 { .. } => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
||||
--> $DIR/empty-match.rs:91:24
|
||||
--> $DIR/empty-match.rs:145:24
|
||||
|
|
||||
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
||||
|
|
||||
note: `NonEmptyUnion2` defined here
|
||||
--> $DIR/empty-match.rs:19:7
|
||||
--> $DIR/empty-match.rs:26:7
|
||||
|
|
||||
LL | union NonEmptyUnion2 {
|
||||
| ^^^^^^^^^^^^^^
|
||||
@ -239,13 +254,13 @@ LL + NonEmptyUnion2 { .. } => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||
--> $DIR/empty-match.rs:92:24
|
||||
--> $DIR/empty-match.rs:149:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
|
|
||||
note: `NonEmptyEnum1` defined here
|
||||
--> $DIR/empty-match.rs:24:5
|
||||
--> $DIR/empty-match.rs:33:5
|
||||
|
|
||||
LL | enum NonEmptyEnum1 {
|
||||
| -------------
|
||||
@ -259,18 +274,19 @@ LL + NonEmptyEnum1::Foo(_) => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
--> $DIR/empty-match.rs:93:24
|
||||
--> $DIR/empty-match.rs:153:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
|
|
||||
note: `NonEmptyEnum2` defined here
|
||||
--> $DIR/empty-match.rs:27:5
|
||||
--> $DIR/empty-match.rs:40:5
|
||||
|
|
||||
LL | enum NonEmptyEnum2 {
|
||||
| -------------
|
||||
LL | Foo(bool),
|
||||
| ^^^ not covered
|
||||
...
|
||||
LL | Bar,
|
||||
| ^^^ not covered
|
||||
= note: the matched value is of type `NonEmptyEnum2`
|
||||
@ -281,13 +297,13 @@ LL + NonEmptyEnum2::Foo(_) | NonEmptyEnum2::Bar => todo!()
|
||||
|
|
||||
|
||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
--> $DIR/empty-match.rs:94:24
|
||||
--> $DIR/empty-match.rs:157:24
|
||||
|
|
||||
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
|
|
||||
note: `NonEmptyEnum5` defined here
|
||||
--> $DIR/empty-match.rs:30:6
|
||||
--> $DIR/empty-match.rs:49:6
|
||||
|
|
||||
LL | enum NonEmptyEnum5 {
|
||||
| ^^^^^^^^^^^^^
|
||||
@ -298,6 +314,7 @@ LL ~ _ if false => {},
|
||||
LL + _ => todo!()
|
||||
|
|
||||
|
||||
error: aborting due to 22 previous errors
|
||||
error: aborting due to 23 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0004`.
|
||||
Some errors have detailed explanations: E0004, E0005.
|
||||
For more information about an error, try `rustc --explain E0004`.
|
||||
|
@ -6,28 +6,49 @@
|
||||
#![feature(never_type_fallback)]
|
||||
#![cfg_attr(exhaustive_patterns, feature(exhaustive_patterns))]
|
||||
#![deny(unreachable_patterns)]
|
||||
//~^ NOTE the lint level is defined here
|
||||
|
||||
extern crate empty;
|
||||
|
||||
enum EmptyEnum {}
|
||||
|
||||
struct NonEmptyStruct1;
|
||||
//~^ NOTE `NonEmptyStruct1` defined here
|
||||
//~| NOTE `NonEmptyStruct1` defined here
|
||||
struct NonEmptyStruct2(bool);
|
||||
//~^ NOTE `NonEmptyStruct2` defined here
|
||||
//~| NOTE `NonEmptyStruct2` defined here
|
||||
union NonEmptyUnion1 {
|
||||
//~^ NOTE `NonEmptyUnion1` defined here
|
||||
//~| NOTE `NonEmptyUnion1` defined here
|
||||
foo: (),
|
||||
}
|
||||
union NonEmptyUnion2 {
|
||||
//~^ NOTE `NonEmptyUnion2` defined here
|
||||
//~| NOTE `NonEmptyUnion2` defined here
|
||||
foo: (),
|
||||
bar: (),
|
||||
}
|
||||
enum NonEmptyEnum1 {
|
||||
Foo(bool),
|
||||
//~^ NOTE `NonEmptyEnum1` defined here
|
||||
//~| NOTE `NonEmptyEnum1` defined here
|
||||
//~| NOTE not covered
|
||||
//~| NOTE not covered
|
||||
}
|
||||
enum NonEmptyEnum2 {
|
||||
Foo(bool),
|
||||
//~^ NOTE `NonEmptyEnum2` defined here
|
||||
//~| NOTE `NonEmptyEnum2` defined here
|
||||
//~| NOTE not covered
|
||||
//~| NOTE not covered
|
||||
Bar,
|
||||
//~^ NOTE not covered
|
||||
//~| NOTE not covered
|
||||
}
|
||||
enum NonEmptyEnum5 {
|
||||
//~^ NOTE `NonEmptyEnum5` defined here
|
||||
//~| NOTE `NonEmptyEnum5` defined here
|
||||
V1, V2, V3, V4, V5,
|
||||
}
|
||||
|
||||
@ -51,6 +72,16 @@ fn empty_foreign_enum(x: empty::EmptyForeignEnum) {
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_foreign_enum_private(x: Option<empty::SecretlyUninhabitedForeignStruct>) {
|
||||
let None = x;
|
||||
//~^ ERROR refutable pattern in local binding
|
||||
//~| NOTE `let` bindings require an "irrefutable pattern"
|
||||
//~| NOTE for more information, visit
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `Some(_)` not covered
|
||||
//[exhaustive_patterns]~| NOTE currently uninhabited, but this variant contains private fields
|
||||
}
|
||||
|
||||
fn never(x: !) {
|
||||
match x {} // ok
|
||||
match x {
|
||||
@ -76,20 +107,55 @@ macro_rules! match_guarded_arm {
|
||||
|
||||
fn main() {
|
||||
match_no_arms!(0u8); //~ ERROR type `u8` is non-empty
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!(NonEmptyStruct1); //~ ERROR type `NonEmptyStruct1` is non-empty
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!(NonEmptyStruct2(true)); //~ ERROR type `NonEmptyStruct2` is non-empty
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!((NonEmptyUnion1 { foo: () })); //~ ERROR type `NonEmptyUnion1` is non-empty
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!((NonEmptyUnion2 { foo: () })); //~ ERROR type `NonEmptyUnion2` is non-empty
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!(NonEmptyEnum1::Foo(true)); //~ ERROR `NonEmptyEnum1::Foo(_)` not covered
|
||||
//~| NOTE pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!(NonEmptyEnum2::Foo(true)); //~ ERROR `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
//~| NOTE patterns `NonEmptyEnum2::Foo(_)` and
|
||||
//~| NOTE the matched value is of type
|
||||
match_no_arms!(NonEmptyEnum5::V1); //~ ERROR `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
//~| NOTE patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`
|
||||
//~| NOTE the matched value is of type
|
||||
|
||||
match_guarded_arm!(0u8); //~ ERROR `_` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `_` not covered
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!(NonEmptyStruct1); //~ ERROR `NonEmptyStruct1` not covered
|
||||
//~| NOTE pattern `NonEmptyStruct1` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!(NonEmptyStruct2(true)); //~ ERROR `NonEmptyStruct2(_)` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `NonEmptyStruct2(_)` not covered
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!((NonEmptyUnion1 { foo: () })); //~ ERROR `NonEmptyUnion1 { .. }` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `NonEmptyUnion1 { .. }` not covered
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!((NonEmptyUnion2 { foo: () })); //~ ERROR `NonEmptyUnion2 { .. }` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `NonEmptyUnion2 { .. }` not covered
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!(NonEmptyEnum1::Foo(true)); //~ ERROR `NonEmptyEnum1::Foo(_)` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!(NonEmptyEnum2::Foo(true)); //~ ERROR `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE patterns `NonEmptyEnum2::Foo(_)` and
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
match_guarded_arm!(NonEmptyEnum5::V1); //~ ERROR `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||
//~| NOTE the matched value is of type
|
||||
//~| NOTE patterns `NonEmptyEnum5::V1`,
|
||||
//~| NOTE in this expansion of match_guarded_arm!
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user