]> git.lizzy.rs Git - rust.git/blobdiff - compiler/rustc_mir_build/src/thir/pattern/_match.rs
Handle ranges of float consistently
[rust.git] / compiler / rustc_mir_build / src / thir / pattern / _match.rs
index eddd2882406ba38cb40a586cc9873fec8f62a102..6a37dd896b6c3648d073f064f7be3024958642c7 100644 (file)
 //!
 //!    It is computed as follows. We look at the pattern `p_1` on top of the stack,
 //!    and we have three cases:
-//!         1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
-//!         1.2. `p_1 = _`. We return the rest of the stack:
+//!         2.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
+//!         2.2. `p_1 = _`. We return the rest of the stack:
 //!                 p_2, .., p_n
-//!         1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
+//!         2.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
 //!           stack.
 //!                 D((r_1, p_2, .., p_n))
 //!                 D((r_2, p_2, .., p_n))
 use self::WitnessPreference::*;
 
 use rustc_data_structures::captures::Captures;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_index::vec::Idx;
 
 use super::{compare_const_vals, PatternFoldable, PatternFolder};
 
 use rustc_arena::TypedArena;
 use rustc_attr::{SignedInt, UnsignedInt};
-use rustc_errors::ErrorReported;
 use rustc_hir::def_id::DefId;
 use rustc_hir::{HirId, RangeEnd};
-use rustc_middle::mir::interpret::{truncate, AllocId, ConstValue, Pointer, Scalar};
+use rustc_middle::mir::interpret::{truncate, ConstValue};
 use rustc_middle::mir::Field;
 use rustc_middle::ty::layout::IntegerExt;
 use rustc_middle::ty::{self, Const, Ty, TyCtxt};
 use rustc_target::abi::{Integer, Size, VariantIdx};
 
 use smallvec::{smallvec, SmallVec};
-use std::borrow::Cow;
 use std::cmp::{self, max, min, Ordering};
-use std::convert::TryInto;
 use std::fmt;
 use std::iter::{FromIterator, IntoIterator};
 use std::ops::RangeInclusive;
 
-crate fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> Pat<'tcx> {
-    LiteralExpander { tcx: cx.tcx, param_env: cx.param_env }.fold_pattern(&pat)
+crate fn expand_pattern<'tcx>(pat: Pat<'tcx>) -> Pat<'tcx> {
+    LiteralExpander.fold_pattern(&pat)
 }
 
-struct LiteralExpander<'tcx> {
-    tcx: TyCtxt<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-}
-
-impl<'tcx> LiteralExpander<'tcx> {
-    /// Derefs `val` and potentially unsizes the value if `crty` is an array and `rty` a slice.
-    ///
-    /// `crty` and `rty` can differ because you can use array constants in the presence of slice
-    /// patterns. So the pattern may end up being a slice, but the constant is an array. We convert
-    /// the array to a slice in that case.
-    fn fold_const_value_deref(
-        &mut self,
-        val: ConstValue<'tcx>,
-        // the pattern's pointee type
-        rty: Ty<'tcx>,
-        // the constant's pointee type
-        crty: Ty<'tcx>,
-    ) -> ConstValue<'tcx> {
-        debug!("fold_const_value_deref {:?} {:?} {:?}", val, rty, crty);
-        match (val, &crty.kind(), &rty.kind()) {
-            // the easy case, deref a reference
-            (ConstValue::Scalar(p), x, y) if x == y => {
-                match p {
-                    Scalar::Ptr(p) => {
-                        let alloc = self.tcx.global_alloc(p.alloc_id).unwrap_memory();
-                        ConstValue::ByRef { alloc, offset: p.offset }
-                    }
-                    Scalar::Raw { .. } => {
-                        let layout = self.tcx.layout_of(self.param_env.and(rty)).unwrap();
-                        if layout.is_zst() {
-                            // Deref of a reference to a ZST is a nop.
-                            ConstValue::Scalar(Scalar::zst())
-                        } else {
-                            // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;`
-                            bug!("cannot deref {:#?}, {} -> {}", val, crty, rty);
-                        }
-                    }
-                }
-            }
-            // unsize array to slice if pattern is array but match value or other patterns are slice
-            (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => {
-                assert_eq!(t, u);
-                ConstValue::Slice {
-                    data: self.tcx.global_alloc(p.alloc_id).unwrap_memory(),
-                    start: p.offset.bytes().try_into().unwrap(),
-                    end: n.eval_usize(self.tcx, ty::ParamEnv::empty()).try_into().unwrap(),
-                }
-            }
-            // fat pointers stay the same
-            (ConstValue::Slice { .. }, _, _)
-            | (_, ty::Slice(_), ty::Slice(_))
-            | (_, ty::Str, ty::Str) => val,
-            // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` being used
-            _ => bug!("cannot deref {:#?}, {} -> {}", val, crty, rty),
-        }
-    }
-}
+struct LiteralExpander;
 
-impl<'tcx> PatternFolder<'tcx> for LiteralExpander<'tcx> {
+impl<'tcx> PatternFolder<'tcx> for LiteralExpander {
     fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> {
         debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind(), pat.kind);
         match (pat.ty.kind(), &*pat.kind) {
-            (&ty::Ref(_, rty, _), &PatKind::Constant { value: Const { val, ty: const_ty } })
-                if const_ty.is_ref() =>
-            {
-                let crty =
-                    if let ty::Ref(_, crty, _) = const_ty.kind() { crty } else { unreachable!() };
-                if let ty::ConstKind::Value(val) = val {
-                    Pat {
-                        ty: pat.ty,
-                        span: pat.span,
-                        kind: box PatKind::Deref {
-                            subpattern: Pat {
-                                ty: rty,
-                                span: pat.span,
-                                kind: box PatKind::Constant {
-                                    value: Const::from_value(
-                                        self.tcx,
-                                        self.fold_const_value_deref(*val, rty, crty),
-                                        rty,
-                                    ),
-                                },
-                            },
-                        },
-                    }
-                } else {
-                    bug!("cannot deref {:#?}, {} -> {}", val, crty, rty)
-                }
-            }
-
             (_, &PatKind::Binding { subpattern: Some(ref s), .. }) => s.fold_with(self),
             (_, &PatKind::AscribeUserType { subpattern: ref s, .. }) => s.fold_with(self),
             _ => pat.super_fold_with(self),
@@ -416,7 +328,7 @@ pub(super) fn is_wildcard(&self) -> bool {
 
 /// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
 /// works well.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, PartialEq)]
 crate struct PatStack<'p, 'tcx>(SmallVec<[&'p Pat<'tcx>; 2]>);
 
 impl<'p, 'tcx> PatStack<'p, 'tcx> {
@@ -504,13 +416,36 @@ fn from_iter<T>(iter: T) -> Self
     }
 }
 
+/// Depending on the match patterns, the specialization process might be able to use a fast path.
+/// Tracks whether we can use the fast path and the lookup table needed in those cases.
+#[derive(Clone, Debug, PartialEq)]
+enum SpecializationCache {
+    /// Patterns consist of only enum variants.
+    /// Variant patterns does not intersect with each other (in contrast to range patterns),
+    /// so it is possible to precompute the result of `Matrix::specialize_constructor` at a
+    /// lower computational complexity.
+    /// `lookup` is responsible for holding the precomputed result of
+    /// `Matrix::specialize_constructor`, while `wilds` is used for two purposes: the first one is
+    /// the precomputed result of `Matrix::specialize_wildcard`, and the second is to be used as a
+    /// fallback for `Matrix::specialize_constructor` when it tries to apply a constructor that
+    /// has not been seen in the `Matrix`. See `update_cache` for further explanations.
+    Variants { lookup: FxHashMap<DefId, SmallVec<[usize; 1]>>, wilds: SmallVec<[usize; 1]> },
+    /// Does not belong to the cases above, use the slow path.
+    Incompatible,
+}
+
 /// A 2D matrix.
-#[derive(Clone)]
-crate struct Matrix<'p, 'tcx>(Vec<PatStack<'p, 'tcx>>);
+#[derive(Clone, PartialEq)]
+crate struct Matrix<'p, 'tcx> {
+    patterns: Vec<PatStack<'p, 'tcx>>,
+    cache: SpecializationCache,
+}
 
 impl<'p, 'tcx> Matrix<'p, 'tcx> {
     crate fn empty() -> Self {
-        Matrix(vec![])
+        // Use `SpecializationCache::Incompatible` as a placeholder; we will initialize it on the
+        // first call to `push`. See the first half of `update_cache`.
+        Matrix { patterns: vec![], cache: SpecializationCache::Incompatible }
     }
 
     /// Pushes a new row to the matrix. If the row starts with an or-pattern, this expands it.
@@ -522,18 +457,101 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
                 self.push(row)
             }
         } else {
-            self.0.push(row);
+            self.patterns.push(row);
+            self.update_cache(self.patterns.len() - 1);
+        }
+    }
+
+    fn update_cache(&mut self, idx: usize) {
+        let row = &self.patterns[idx];
+        // We don't know which kind of cache could be used until we see the first row; therefore an
+        // empty `Matrix` is initialized with `SpecializationCache::Empty`, then the cache is
+        // assigned the appropriate variant below on the first call to `push`.
+        if self.patterns.is_empty() {
+            self.cache = if row.is_empty() {
+                SpecializationCache::Incompatible
+            } else {
+                match *row.head().kind {
+                    PatKind::Variant { .. } => SpecializationCache::Variants {
+                        lookup: FxHashMap::default(),
+                        wilds: SmallVec::new(),
+                    },
+                    // Note: If the first pattern is a wildcard, then all patterns after that is not
+                    // useful. The check is simple enough so we treat it as the same as unsupported
+                    // patterns.
+                    _ => SpecializationCache::Incompatible,
+                }
+            };
+        }
+        // Update the cache.
+        match &mut self.cache {
+            SpecializationCache::Variants { ref mut lookup, ref mut wilds } => {
+                let head = row.head();
+                match *head.kind {
+                    _ if head.is_wildcard() => {
+                        // Per rule 1.3 in the top-level comments, a wildcard pattern is included in
+                        // the result of `specialize_constructor` for *any* `Constructor`.
+                        // We push the wildcard pattern to the precomputed result for constructors
+                        // that we have seen before; results for constructors we have not yet seen
+                        // defaults to `wilds`, which is updated right below.
+                        for (_, v) in lookup.iter_mut() {
+                            v.push(idx);
+                        }
+                        // Per rule 2.1 and 2.2 in the top-level comments, only wildcard patterns
+                        // are included in the result of `specialize_wildcard`.
+                        // What we do here is to track the wildcards we have seen; so in addition to
+                        // acting as the precomputed result of `specialize_wildcard`, `wilds` also
+                        // serves as the default value of `specialize_constructor` for constructors
+                        // that are not in `lookup`.
+                        wilds.push(idx);
+                    }
+                    PatKind::Variant { adt_def, variant_index, .. } => {
+                        // Handle the cases of rule 1.1 and 1.2 in the top-level comments.
+                        // A variant pattern can only be included in the results of
+                        // `specialize_constructor` for a particular constructor, therefore we are
+                        // using a HashMap to track that.
+                        lookup
+                            .entry(adt_def.variants[variant_index].def_id)
+                            // Default to `wilds` for absent keys. See above for an explanation.
+                            .or_insert_with(|| wilds.clone())
+                            .push(idx);
+                    }
+                    _ => {
+                        self.cache = SpecializationCache::Incompatible;
+                    }
+                }
+            }
+            SpecializationCache::Incompatible => {}
         }
     }
 
     /// Iterate over the first component of each row
     fn heads<'a>(&'a self) -> impl Iterator<Item = &'a Pat<'tcx>> + Captures<'p> {
-        self.0.iter().map(|r| r.head())
+        self.patterns.iter().map(|r| r.head())
     }
 
     /// This computes `D(self)`. See top of the file for explanations.
     fn specialize_wildcard(&self) -> Self {
-        self.0.iter().filter_map(|r| r.specialize_wildcard()).collect()
+        match &self.cache {
+            SpecializationCache::Variants { wilds, .. } => {
+                let result =
+                    wilds.iter().filter_map(|&i| self.patterns[i].specialize_wildcard()).collect();
+                // When debug assertions are enabled, check the results against the "slow path"
+                // result.
+                debug_assert_eq!(
+                    result,
+                    Self {
+                        patterns: self.patterns.clone(),
+                        cache: SpecializationCache::Incompatible
+                    }
+                    .specialize_wildcard()
+                );
+                result
+            }
+            SpecializationCache::Incompatible => {
+                self.patterns.iter().filter_map(|r| r.specialize_wildcard()).collect()
+            }
+        }
     }
 
     /// This computes `S(constructor, self)`. See top of the file for explanations.
@@ -543,10 +561,47 @@ fn specialize_constructor(
         constructor: &Constructor<'tcx>,
         ctor_wild_subpatterns: &Fields<'p, 'tcx>,
     ) -> Matrix<'p, 'tcx> {
-        self.0
-            .iter()
-            .filter_map(|r| r.specialize_constructor(cx, constructor, ctor_wild_subpatterns))
-            .collect()
+        match &self.cache {
+            SpecializationCache::Variants { lookup, wilds } => {
+                let result: Self = if let Constructor::Variant(id) = constructor {
+                    lookup
+                        .get(id)
+                        // Default to `wilds` for absent keys. See `update_cache` for an explanation.
+                        .unwrap_or(&wilds)
+                        .iter()
+                        .filter_map(|&i| {
+                            self.patterns[i].specialize_constructor(
+                                cx,
+                                constructor,
+                                ctor_wild_subpatterns,
+                            )
+                        })
+                        .collect()
+                } else {
+                    unreachable!()
+                };
+                // When debug assertions are enabled, check the results against the "slow path"
+                // result.
+                debug_assert_eq!(
+                    result,
+                    Matrix {
+                        patterns: self.patterns.clone(),
+                        cache: SpecializationCache::Incompatible
+                    }
+                    .specialize_constructor(
+                        cx,
+                        constructor,
+                        ctor_wild_subpatterns
+                    )
+                );
+                result
+            }
+            SpecializationCache::Incompatible => self
+                .patterns
+                .iter()
+                .filter_map(|r| r.specialize_constructor(cx, constructor, ctor_wild_subpatterns))
+                .collect(),
+        }
     }
 }
 
@@ -564,11 +619,12 @@ fn specialize_constructor(
 /// +++++++++++++++++++++++++++++
 /// + _     + [_, _, tail @ ..] +
 /// +++++++++++++++++++++++++++++
+/// ```
 impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "\n")?;
 
-        let &Matrix(ref m) = self;
+        let Matrix { patterns: m, .. } = self;
         let pretty_printed_matrix: Vec<Vec<String>> =
             m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect();
 
@@ -778,13 +834,6 @@ enum Constructor<'tcx> {
 }
 
 impl<'tcx> Constructor<'tcx> {
-    fn is_slice(&self) -> bool {
-        match self {
-            Slice(_) => true,
-            _ => false,
-        }
-    }
-
     fn variant_index_for_adt<'a>(
         &self,
         cx: &MatchCheckCtxt<'a, 'tcx>,
@@ -822,8 +871,6 @@ fn subtract_ctors(&self, other_ctors: &Vec<Constructor<'tcx>>) -> Vec<Constructo
                     .iter()
                     .filter_map(|c: &Constructor<'_>| match c {
                         Slice(slice) => Some(*slice),
-                        // FIXME(oli-obk): implement `deref` for `ConstValue`
-                        ConstantValue(..) => None,
                         _ => bug!("bad slice pattern constructor {:?}", c),
                     })
                     .map(Slice::value_kind);
@@ -1043,12 +1090,6 @@ fn from_single_pattern(pat: &'p Pat<'tcx>) -> Self {
         Fields::Slice(std::slice::from_ref(pat))
     }
 
-    /// Construct a new `Fields` from the given patterns. You must be sure those patterns can't
-    /// contain fields that need to be filtered out. When in doubt, prefer `replace_fields`.
-    fn from_slice_unfiltered(pats: &'p [Pat<'tcx>]) -> Self {
-        Fields::Slice(pats)
-    }
-
     /// Convenience; internal use.
     fn wildcards_from_tys(
         cx: &MatchCheckCtxt<'p, 'tcx>,
@@ -1438,9 +1479,7 @@ fn all_constructors<'a, 'tcx>(
         )
     };
     match *pcx.ty.kind() {
-        ty::Bool => {
-            [true, false].iter().map(|&b| ConstantValue(ty::Const::from_bool(cx.tcx, b))).collect()
-        }
+        ty::Bool => vec![make_range(0, 1)],
         ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
             let len = len.eval_usize(cx.tcx, cx.param_env);
             if len != 0 && cx.is_uninhabited(sub_ty) {
@@ -1553,7 +1592,7 @@ impl<'tcx> IntRange<'tcx> {
     #[inline]
     fn is_integral(ty: Ty<'_>) -> bool {
         match ty.kind() {
-            ty::Char | ty::Int(_) | ty::Uint(_) => true,
+            ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool => true,
             _ => false,
         }
     }
@@ -1575,6 +1614,7 @@ fn treat_exhaustively(&self, tcx: TyCtxt<'tcx>) -> bool {
     #[inline]
     fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> {
         match *ty.kind() {
+            ty::Bool => Some((Size::from_bytes(1), 0)),
             ty::Char => Some((Size::from_bytes(4), 0)),
             ty::Int(ity) => {
                 let size = Integer::from_attr(&tcx, SignedInt(ity)).size();
@@ -1824,7 +1864,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     is_under_guard: bool,
     is_top_level: bool,
 ) -> Usefulness<'tcx> {
-    let &Matrix(ref rows) = matrix;
+    let Matrix { patterns: rows, .. } = matrix;
     debug!("is_useful({:#?}, {:#?})", matrix, v);
 
     // The base case. We are pattern-matching on () and the return value is
@@ -2064,17 +2104,8 @@ fn pat_constructor<'tcx>(
             if let Some(int_range) = IntRange::from_const(tcx, param_env, value, pat.span) {
                 Some(IntRange(int_range))
             } else {
-                match (value.val, &value.ty.kind()) {
-                    (_, ty::Array(_, n)) => {
-                        let len = n.eval_usize(tcx, param_env);
-                        Some(Slice(Slice { array_len: Some(len), kind: FixedLen(len) }))
-                    }
-                    (ty::ConstKind::Value(ConstValue::Slice { start, end, .. }), ty::Slice(_)) => {
-                        let len = (end - start) as u64;
-                        Some(Slice(Slice { array_len: None, kind: FixedLen(len) }))
-                    }
-                    // FIXME(oli-obk): implement `deref` for `ConstValue`
-                    // (ty::ConstKind::Value(ConstValue::ByRef { .. }), ty::Slice(_)) => { ... }
+                match value.ty.kind() {
+                    ty::Float(_) => Some(FloatRange(value, value, RangeEnd::Included)),
                     _ => Some(ConstantValue(value)),
                 }
             }
@@ -2111,75 +2142,6 @@ fn pat_constructor<'tcx>(
     }
 }
 
-// checks whether a constant is equal to a user-written slice pattern. Only supports byte slices,
-// meaning all other types will compare unequal and thus equal patterns often do not cause the
-// second pattern to lint about unreachable match arms.
-fn slice_pat_covered_by_const<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    _span: Span,
-    const_val: &'tcx ty::Const<'tcx>,
-    prefix: &[Pat<'tcx>],
-    slice: &Option<Pat<'tcx>>,
-    suffix: &[Pat<'tcx>],
-    param_env: ty::ParamEnv<'tcx>,
-) -> Result<bool, ErrorReported> {
-    let const_val_val = if let ty::ConstKind::Value(val) = const_val.val {
-        val
-    } else {
-        bug!(
-            "slice_pat_covered_by_const: {:#?}, {:#?}, {:#?}, {:#?}",
-            const_val,
-            prefix,
-            slice,
-            suffix,
-        )
-    };
-
-    let data: &[u8] = match (const_val_val, &const_val.ty.kind()) {
-        (ConstValue::ByRef { offset, alloc, .. }, ty::Array(t, n)) => {
-            assert_eq!(*t, tcx.types.u8);
-            let n = n.eval_usize(tcx, param_env);
-            let ptr = Pointer::new(AllocId(0), offset);
-            alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap()
-        }
-        (ConstValue::Slice { data, start, end }, ty::Slice(t)) => {
-            assert_eq!(*t, tcx.types.u8);
-            let ptr = Pointer::new(AllocId(0), Size::from_bytes(start));
-            data.get_bytes(&tcx, ptr, Size::from_bytes(end - start)).unwrap()
-        }
-        // FIXME(oli-obk): create a way to extract fat pointers from ByRef
-        (_, ty::Slice(_)) => return Ok(false),
-        _ => bug!(
-            "slice_pat_covered_by_const: {:#?}, {:#?}, {:#?}, {:#?}",
-            const_val,
-            prefix,
-            slice,
-            suffix,
-        ),
-    };
-
-    let pat_len = prefix.len() + suffix.len();
-    if data.len() < pat_len || (slice.is_none() && data.len() > pat_len) {
-        return Ok(false);
-    }
-
-    for (ch, pat) in data[..prefix.len()]
-        .iter()
-        .zip(prefix)
-        .chain(data[data.len() - suffix.len()..].iter().zip(suffix))
-    {
-        if let box PatKind::Constant { value } = pat.kind {
-            let b = value.eval_bits(tcx, param_env, pat.ty);
-            assert_eq!(b as u8 as u128, b);
-            if b as u8 != *ch {
-                return Ok(false);
-            }
-        }
-    }
-
-    Ok(true)
-}
-
 /// For exhaustive integer matching, some constructors are grouped within other constructors
 /// (namely integer typed values are grouped within ranges). However, when specialising these
 /// constructors, we want to be specialising for the underlying constructors (the integers), not
@@ -2266,7 +2228,7 @@ fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
                 // `borders` is the set of borders between equivalence classes: each equivalence
                 // class lies between 2 borders.
                 let row_borders = matrix
-                    .0
+                    .patterns
                     .iter()
                     .flat_map(|row| {
                         IntRange::from_pat(tcx, param_env, row.head()).map(|r| (r, row.len()))
@@ -2477,35 +2439,6 @@ fn lint_overlapping_patterns<'tcx>(
     }
 }
 
-fn constructor_covered_by_range<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-    ctor: &Constructor<'tcx>,
-    pat: &Pat<'tcx>,
-) -> Option<()> {
-    if let Single = ctor {
-        return Some(());
-    }
-
-    let (pat_from, pat_to, pat_end, ty) = match *pat.kind {
-        PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty),
-        PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty),
-        _ => bug!("`constructor_covered_by_range` called with {:?}", pat),
-    };
-    let (ctor_from, ctor_to, ctor_end) = match *ctor {
-        ConstantValue(value) => (value, value, RangeEnd::Included),
-        FloatRange(from, to, ctor_end) => (from, to, ctor_end),
-        _ => bug!("`constructor_covered_by_range` called with {:?}", ctor),
-    };
-    trace!("constructor_covered_by_range {:#?}, {:#?}, {:#?}, {}", ctor, pat_from, pat_to, ty);
-
-    let to = compare_const_vals(tcx, ctor_to, pat_to, param_env, ty)?;
-    let from = compare_const_vals(tcx, ctor_from, pat_from, param_env, ty)?;
-    let intersects = (from == Ordering::Greater || from == Ordering::Equal)
-        && (to == Ordering::Less || (pat_end == ctor_end && to == Ordering::Equal));
-    if intersects { Some(()) } else { None }
-}
-
 /// This is the main specialization step. It expands the pattern
 /// into `arity` patterns based on the constructor. For most patterns, the step is trivial,
 /// for instance tuple patterns are flattened and box patterns expand into their inner pattern.
@@ -2550,93 +2483,51 @@ fn specialize_one_pattern<'p, 'tcx>(
 
         PatKind::Deref { ref subpattern } => Some(Fields::from_single_pattern(subpattern)),
 
-        PatKind::Constant { value } if constructor.is_slice() => {
-            // We extract an `Option` for the pointer because slices of zero
-            // elements don't necessarily point to memory, they are usually
-            // just integers. The only time they should be pointing to memory
-            // is when they are subslices of nonzero slices.
-            let (alloc, offset, n, ty) = match value.ty.kind() {
-                ty::Array(t, n) => {
-                    let n = n.eval_usize(cx.tcx, cx.param_env);
-                    // Shortcut for `n == 0` where no matter what `alloc` and `offset` we produce,
-                    // the result would be exactly what we early return here.
-                    if n == 0 {
-                        if ctor_wild_subpatterns.len() as u64 != n {
-                            return None;
-                        }
-                        return Some(Fields::empty());
-                    }
-                    match value.val {
-                        ty::ConstKind::Value(ConstValue::ByRef { offset, alloc, .. }) => {
-                            (Cow::Borrowed(alloc), offset, n, t)
-                        }
-                        _ => span_bug!(pat.span, "array pattern is {:?}", value,),
+        PatKind::Constant { .. } | PatKind::Range { .. } => {
+            match constructor {
+                Single => {}
+                IntRange(ctor) => {
+                    let pat = IntRange::from_pat(cx.tcx, cx.param_env, pat)?;
+                    ctor.intersection(cx.tcx, &pat)?;
+                    // Constructor splitting should ensure that all intersections we encounter
+                    // are actually inclusions.
+                    assert!(ctor.is_subrange(&pat));
+                }
+                FloatRange(ctor_from, ctor_to, ctor_end) => {
+                    let (pat_from, pat_to, pat_end, ty) = match *pat.kind {
+                        PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty),
+                        PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty),
+                        _ => unreachable!(), // This is ensured by the branch we're in
+                    };
+                    let to = compare_const_vals(cx.tcx, ctor_to, pat_to, cx.param_env, ty)?;
+                    let from = compare_const_vals(cx.tcx, ctor_from, pat_from, cx.param_env, ty)?;
+                    let intersects = (from == Ordering::Greater || from == Ordering::Equal)
+                        && (to == Ordering::Less
+                            || (pat_end == *ctor_end && to == Ordering::Equal));
+                    if !intersects {
+                        return None;
                     }
                 }
-                ty::Slice(t) => {
-                    match value.val {
-                        ty::ConstKind::Value(ConstValue::Slice { data, start, end }) => {
-                            let offset = Size::from_bytes(start);
-                            let n = (end - start) as u64;
-                            (Cow::Borrowed(data), offset, n, t)
-                        }
-                        ty::ConstKind::Value(ConstValue::ByRef { .. }) => {
-                            // FIXME(oli-obk): implement `deref` for `ConstValue`
-                            return None;
-                        }
+                ConstantValue(ctor_value) => {
+                    let pat_value = match *pat.kind {
+                        PatKind::Constant { value } => value,
                         _ => span_bug!(
                             pat.span,
-                            "slice pattern constant must be scalar pair but is {:?}",
-                            value,
+                            "unexpected range pattern {:?} for constant value ctor",
+                            pat
                         ),
+                    };
+
+                    // FIXME: there's probably a more direct way of comparing for equality
+                    if compare_const_vals(cx.tcx, ctor_value, pat_value, cx.param_env, pat.ty)?
+                        != Ordering::Equal
+                    {
+                        return None;
                     }
                 }
-                _ => span_bug!(
-                    pat.span,
-                    "unexpected const-val {:?} with ctor {:?}",
-                    value,
-                    constructor,
-                ),
-            };
-            if ctor_wild_subpatterns.len() as u64 != n {
-                return None;
-            }
-
-            // Convert a constant slice/array pattern to a list of patterns.
-            let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?;
-            let ptr = Pointer::new(AllocId(0), offset);
-            let pats = cx.pattern_arena.alloc_from_iter((0..n).filter_map(|i| {
-                let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?;
-                let scalar = alloc.read_scalar(&cx.tcx, ptr, layout.size).ok()?;
-                let scalar = scalar.check_init().ok()?;
-                let value = ty::Const::from_scalar(cx.tcx, scalar, ty);
-                let pattern = Pat { ty, span: pat.span, kind: box PatKind::Constant { value } };
-                Some(pattern)
-            }));
-            // Ensure none of the dereferences failed.
-            if pats.len() as u64 != n {
-                return None;
-            }
-            Some(Fields::from_slice_unfiltered(pats))
-        }
-
-        PatKind::Constant { .. } | PatKind::Range { .. } => {
-            // If the constructor is a:
-            // - Single value: add a row if the pattern contains the constructor.
-            // - Range: add a row if the constructor intersects the pattern.
-            if let IntRange(ctor) = constructor {
-                let pat = IntRange::from_pat(cx.tcx, cx.param_env, pat)?;
-                ctor.intersection(cx.tcx, &pat)?;
-                // Constructor splitting should ensure that all intersections we encounter
-                // are actually inclusions.
-                assert!(ctor.is_subrange(&pat));
-            } else {
-                // Fallback for non-ranges and ranges that involve
-                // floating-point numbers, which are not conveniently handled
-                // by `IntRange`. For these cases, the constructor may not be a
-                // range so intersection actually devolves into being covered
-                // by the pattern.
-                constructor_covered_by_range(cx.tcx, cx.param_env, constructor, pat)?;
+                _ => {
+                    span_bug!(pat.span, "unexpected pattern {:?} with ctor {:?}", pat, constructor)
+                }
             }
             Some(Fields::empty())
         }
@@ -2659,21 +2550,6 @@ fn specialize_one_pattern<'p, 'tcx>(
                 let suffix = suffix.iter().enumerate().map(|(i, p)| (arity - suffix.len() + i, p));
                 Some(ctor_wild_subpatterns.replace_fields_indexed(prefix.chain(suffix)))
             }
-            ConstantValue(cv) => {
-                match slice_pat_covered_by_const(
-                    cx.tcx,
-                    pat.span,
-                    cv,
-                    prefix,
-                    slice,
-                    suffix,
-                    cx.param_env,
-                ) {
-                    Ok(true) => Some(Fields::empty()),
-                    Ok(false) => None,
-                    Err(ErrorReported) => None,
-                }
-            }
             _ => span_bug!(pat.span, "unexpected ctor {:?} for slice pat", constructor),
         },