Rollup merge of #79072 - oli-obk:byte_str_pat, r=estebank

Fix exhaustiveness in case a byte string literal is used at slice type

fixes #79048
This commit is contained in:
Mara Bos 2020-11-17 16:13:53 +01:00 committed by GitHub
commit b6f52410bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 113 additions and 12 deletions

View File

@ -422,6 +422,12 @@ pub struct TypeckResults<'tcx> {
/// Stores the type, expression, span and optional scope span of all types /// Stores the type, expression, span and optional scope span of all types
/// that are live across the yield of this generator (if a generator). /// that are live across the yield of this generator (if a generator).
pub generator_interior_types: Vec<GeneratorInteriorTypeCause<'tcx>>, pub generator_interior_types: Vec<GeneratorInteriorTypeCause<'tcx>>,
/// We sometimes treat byte string literals (which are of type `&[u8; N]`)
/// as `&[u8]`, depending on the pattern in which they are used.
/// This hashset records all instances where we behave
/// like this to allow `const_to_pat` to reliably handle this situation.
pub treat_byte_string_as_slice: ItemLocalSet,
} }
impl<'tcx> TypeckResults<'tcx> { impl<'tcx> TypeckResults<'tcx> {
@ -448,6 +454,7 @@ pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> {
closure_captures: Default::default(), closure_captures: Default::default(),
closure_min_captures: Default::default(), closure_min_captures: Default::default(),
generator_interior_types: Default::default(), generator_interior_types: Default::default(),
treat_byte_string_as_slice: Default::default(),
} }
} }
@ -683,6 +690,7 @@ fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHas
ref closure_captures, ref closure_captures,
ref closure_min_captures, ref closure_min_captures,
ref generator_interior_types, ref generator_interior_types,
ref treat_byte_string_as_slice,
} = *self; } = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
@ -717,6 +725,7 @@ fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHas
closure_captures.hash_stable(hcx, hasher); closure_captures.hash_stable(hcx, hasher);
closure_min_captures.hash_stable(hcx, hasher); closure_min_captures.hash_stable(hcx, hasher);
generator_interior_types.hash_stable(hcx, hasher); generator_interior_types.hash_stable(hcx, hasher);
treat_byte_string_as_slice.hash_stable(hcx, hasher);
}) })
} }
} }

View File

@ -18,6 +18,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
/// Converts an evaluated constant to a pattern (if possible). /// Converts an evaluated constant to a pattern (if possible).
/// This means aggregate values (like structs and enums) are converted /// This means aggregate values (like structs and enums) are converted
/// to a pattern that matches the value (as if you'd compared via structural equality). /// to a pattern that matches the value (as if you'd compared via structural equality).
#[instrument(skip(self))]
pub(super) fn const_to_pat( pub(super) fn const_to_pat(
&self, &self,
cv: &'tcx ty::Const<'tcx>, cv: &'tcx ty::Const<'tcx>,
@ -25,15 +26,12 @@ pub(super) fn const_to_pat(
span: Span, span: Span,
mir_structural_match_violation: bool, mir_structural_match_violation: bool,
) -> Pat<'tcx> { ) -> Pat<'tcx> {
debug!("const_to_pat: cv={:#?} id={:?}", cv, id);
debug!("const_to_pat: cv.ty={:?} span={:?}", cv.ty, span);
let pat = self.tcx.infer_ctxt().enter(|infcx| { let pat = self.tcx.infer_ctxt().enter(|infcx| {
let mut convert = ConstToPat::new(self, id, span, infcx); let mut convert = ConstToPat::new(self, id, span, infcx);
convert.to_pat(cv, mir_structural_match_violation) convert.to_pat(cv, mir_structural_match_violation)
}); });
debug!("const_to_pat: pat={:?}", pat); debug!(?pat);
pat pat
} }
} }
@ -61,6 +59,8 @@ struct ConstToPat<'a, 'tcx> {
infcx: InferCtxt<'a, 'tcx>, infcx: InferCtxt<'a, 'tcx>,
include_lint_checks: bool, include_lint_checks: bool,
treat_byte_string_as_slice: bool,
} }
mod fallback_to_const_ref { mod fallback_to_const_ref {
@ -88,6 +88,7 @@ fn new(
span: Span, span: Span,
infcx: InferCtxt<'a, 'tcx>, infcx: InferCtxt<'a, 'tcx>,
) -> Self { ) -> Self {
trace!(?pat_ctxt.typeck_results.hir_owner);
ConstToPat { ConstToPat {
id, id,
span, span,
@ -97,6 +98,10 @@ fn new(
saw_const_match_error: Cell::new(false), saw_const_match_error: Cell::new(false),
saw_const_match_lint: Cell::new(false), saw_const_match_lint: Cell::new(false),
behind_reference: Cell::new(false), behind_reference: Cell::new(false),
treat_byte_string_as_slice: pat_ctxt
.typeck_results
.treat_byte_string_as_slice
.contains(&id.local_id),
} }
} }
@ -153,6 +158,7 @@ fn to_pat(
cv: &'tcx ty::Const<'tcx>, cv: &'tcx ty::Const<'tcx>,
mir_structural_match_violation: bool, mir_structural_match_violation: bool,
) -> Pat<'tcx> { ) -> Pat<'tcx> {
trace!(self.treat_byte_string_as_slice);
// This method is just a wrapper handling a validity check; the heavy lifting is // This method is just a wrapper handling a validity check; the heavy lifting is
// performed by the recursive `recur` method, which is not meant to be // performed by the recursive `recur` method, which is not meant to be
// invoked except by this method. // invoked except by this method.
@ -384,7 +390,7 @@ fn recur(
} }
PatKind::Wild PatKind::Wild
} }
// `&str` and `&[u8]` are represented as `ConstValue::Slice`, let's keep using this // `&str` is represented as `ConstValue::Slice`, let's keep using this
// optimization for now. // optimization for now.
ty::Str => PatKind::Constant { value: cv }, ty::Str => PatKind::Constant { value: cv },
// `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when // `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when
@ -393,11 +399,33 @@ fn recur(
// as slices. This means we turn `&[T; N]` constants into slice patterns, which // as slices. This means we turn `&[T; N]` constants into slice patterns, which
// has no negative effects on pattern matching, even if we're actually matching on // has no negative effects on pattern matching, even if we're actually matching on
// arrays. // arrays.
ty::Array(..) | ty::Array(..) if !self.treat_byte_string_as_slice => {
let old = self.behind_reference.replace(true);
let array = tcx.deref_const(self.param_env.and(cv));
let val = PatKind::Deref {
subpattern: Pat {
kind: Box::new(PatKind::Array {
prefix: tcx
.destructure_const(param_env.and(array))
.fields
.iter()
.map(|val| self.recur(val, false))
.collect::<Result<_, _>>()?,
slice: None,
suffix: vec![],
}),
span,
ty: pointee_ty,
},
};
self.behind_reference.set(old);
val
}
ty::Array(elem_ty, _) |
// Cannot merge this with the catch all branch below, because the `const_deref` // Cannot merge this with the catch all branch below, because the `const_deref`
// changes the type from slice to array, we need to keep the original type in the // changes the type from slice to array, we need to keep the original type in the
// pattern. // pattern.
ty::Slice(..) => { ty::Slice(elem_ty) => {
let old = self.behind_reference.replace(true); let old = self.behind_reference.replace(true);
let array = tcx.deref_const(self.param_env.and(cv)); let array = tcx.deref_const(self.param_env.and(cv));
let val = PatKind::Deref { let val = PatKind::Deref {
@ -413,7 +441,7 @@ fn recur(
suffix: vec![], suffix: vec![],
}), }),
span, span,
ty: pointee_ty, ty: tcx.mk_slice(elem_ty),
}, },
}; };
self.behind_reference.set(old); self.behind_reference.set(old);

View File

@ -149,6 +149,7 @@ pub fn check_pat_top(
/// ///
/// Outside of this module, `check_pat_top` should always be used. /// Outside of this module, `check_pat_top` should always be used.
/// Conversely, inside this module, `check_pat_top` should never be used. /// Conversely, inside this module, `check_pat_top` should never be used.
#[instrument(skip(self, ti))]
fn check_pat( fn check_pat(
&self, &self,
pat: &'tcx Pat<'tcx>, pat: &'tcx Pat<'tcx>,
@ -156,8 +157,6 @@ fn check_pat(
def_bm: BindingMode, def_bm: BindingMode,
ti: TopInfo<'tcx>, ti: TopInfo<'tcx>,
) { ) {
debug!("check_pat(pat={:?},expected={:?},def_bm={:?})", pat, expected, def_bm);
let path_res = match &pat.kind { let path_res = match &pat.kind {
PatKind::Path(qpath) => Some(self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span)), PatKind::Path(qpath) => Some(self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span)),
_ => None, _ => None,
@ -398,6 +397,11 @@ fn check_pat_lit(
if let ty::Ref(_, inner_ty, _) = expected.kind() { if let ty::Ref(_, inner_ty, _) = expected.kind() {
if matches!(inner_ty.kind(), ty::Slice(_)) { if matches!(inner_ty.kind(), ty::Slice(_)) {
let tcx = self.tcx; let tcx = self.tcx;
trace!(?lt.hir_id.local_id, "polymorphic byte string lit");
self.typeck_results
.borrow_mut()
.treat_byte_string_as_slice
.insert(lt.hir_id.local_id);
pat_ty = tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_slice(tcx.types.u8)); pat_ty = tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_slice(tcx.types.u8));
} }
} }

View File

@ -70,6 +70,9 @@ pub fn resolve_type_vars_in_body(
debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports); debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
wbcx.typeck_results.used_trait_imports = used_trait_imports; wbcx.typeck_results.used_trait_imports = used_trait_imports;
wbcx.typeck_results.treat_byte_string_as_slice =
mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
wbcx.typeck_results.closure_captures = wbcx.typeck_results.closure_captures =
mem::take(&mut self.typeck_results.borrow_mut().closure_captures); mem::take(&mut self.typeck_results.borrow_mut().closure_captures);

View File

@ -0,0 +1,36 @@
#[deny(unreachable_patterns)]
fn parse_data1(data: &[u8]) -> u32 {
match data {
b"" => 1,
_ => 2,
}
}
fn parse_data2(data: &[u8]) -> u32 {
match data { //~ ERROR non-exhaustive patterns: `&[_, ..]` not covered
b"" => 1,
}
}
fn parse_data3(data: &[u8; 0]) -> u8 {
match data {
b"" => 1,
}
}
fn parse_data4(data: &[u8]) -> u8 {
match data { //~ ERROR non-exhaustive patterns
b"aaa" => 0,
[_, _, _] => 1,
}
}
fn parse_data5(data: &[u8; 3]) -> u8 {
match data {
b"aaa" => 0,
[_, _, _] => 1,
}
}
fn main() {}

View File

@ -0,0 +1,21 @@
error[E0004]: non-exhaustive patterns: `&[_, ..]` not covered
--> $DIR/type_polymorphic_byte_str_literals.rs:11:11
|
LL | match data {
| ^^^^ pattern `&[_, ..]` not covered
|
= help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
= note: the matched value is of type `&[u8]`
error[E0004]: non-exhaustive patterns: `&[]`, `&[_]`, `&[_, _]` and 1 more not covered
--> $DIR/type_polymorphic_byte_str_literals.rs:23:11
|
LL | match data {
| ^^^^ patterns `&[]`, `&[_]`, `&[_, _]` and 1 more not covered
|
= help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
= note: the matched value is of type `&[u8]`
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0004`.

View File

@ -7,11 +7,11 @@ LL | match buf {
= help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
= note: the matched value is of type `&[u8; 4]` = note: the matched value is of type `&[u8; 4]`
error[E0004]: non-exhaustive patterns: `&[0_u8..=64_u8, _, _, _]` and `&[66_u8..=u8::MAX, _, _, _]` not covered error[E0004]: non-exhaustive patterns: `&[]`, `&[_]`, `&[_, _]` and 2 more not covered
--> $DIR/match-byte-array-patterns-2.rs:10:11 --> $DIR/match-byte-array-patterns-2.rs:10:11
| |
LL | match buf { LL | match buf {
| ^^^ patterns `&[0_u8..=64_u8, _, _, _]` and `&[66_u8..=u8::MAX, _, _, _]` not covered | ^^^ patterns `&[]`, `&[_]`, `&[_, _]` and 2 more not covered
| |
= help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
= note: the matched value is of type `&[u8]` = note: the matched value is of type `&[u8]`