1 //! Code related to match expressions. These are sufficiently complex
2 //! to warrant their own module and submodules. :) This main module
3 //! includes the high-level algorithm, the submodules contain the
6 use crate::build::scope::{CachedBlock, DropKind};
7 use crate::build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
8 use crate::build::{BlockAnd, BlockAndExtension, Builder};
9 use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
10 use crate::hair::{self, *};
12 use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty};
13 use rustc::ty::layout::VariantIdx;
14 use rustc_data_structures::bit_set::BitSet;
15 use rustc_data_structures::fx::FxHashMap;
16 use syntax::ast::{Name, NodeId};
19 // helper functions, broken out by category:
24 use std::convert::TryFrom;
26 /// ArmHasGuard is isomorphic to a boolean flag. It indicates whether
27 /// a match arm has a guard expression attached to it.
28 #[derive(Copy, Clone, Debug)]
29 pub(crate) struct ArmHasGuard(pub bool);
31 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
34 destination: &Place<'tcx>,
36 mut block: BasicBlock,
37 discriminant: ExprRef<'tcx>,
40 let tcx = self.hir.tcx();
41 let discriminant_span = discriminant.span();
42 let discriminant_place = unpack!(block = self.as_place(block, discriminant));
44 // Matching on a `discriminant_place` with an uninhabited type doesn't
45 // generate any memory reads by itself, and so if the place "expression"
46 // contains unsafe operations like raw pointer dereferences or union
47 // field projections, we wouldn't know to require an `unsafe` block
48 // around a `match` equivalent to `std::intrinsics::unreachable()`.
49 // See issue #47412 for this hole being discovered in the wild.
51 // HACK(eddyb) Work around the above issue by adding a dummy inspection
52 // of `discriminant_place`, specifically by applying `ReadForMatch`.
54 // NOTE: ReadForMatch also checks that the discriminant is initialized.
55 // This is currently needed to not allow matching on an uninitialized,
56 // uninhabited value. If we get never patterns, those will check that
57 // the place is initialized, and so this read would only be used to
60 let source_info = self.source_info(discriminant_span);
61 self.cfg.push(block, Statement {
63 kind: StatementKind::FakeRead(
64 FakeReadCause::ForMatchedPlace,
65 discriminant_place.clone(),
69 let mut arm_blocks = ArmBlocks {
70 blocks: arms.iter().map(|_| self.cfg.start_new_block()).collect(),
73 // Get the arm bodies and their scopes, while declaring bindings.
74 let arm_bodies: Vec<_> = arms.iter()
76 // BUG: use arm lint level
77 let body = self.hir.mirror(arm.body.clone());
78 let scope = self.declare_bindings(
83 ArmHasGuard(arm.guard.is_some()),
84 Some((Some(&discriminant_place), discriminant_span)),
86 (body, scope.unwrap_or(self.source_scope))
90 // create binding start block for link them by false edges
91 let candidate_count = arms.iter().map(|c| c.patterns.len()).sum::<usize>();
92 let pre_binding_blocks: Vec<_> = (0..=candidate_count)
93 .map(|_| self.cfg.start_new_block())
96 let mut has_guard = false;
98 // assemble a list of candidates: there is one candidate per
99 // pattern, which means there may be more than one candidate
100 // *per arm*. These candidates are kept sorted such that the
101 // highest priority candidate comes first in the list.
102 // (i.e., same order as in source)
104 let candidates: Vec<_> = arms.iter()
106 .flat_map(|(arm_index, arm)| {
110 .map(move |(pat_index, pat)| (arm_index, pat_index, pat, arm.guard.clone()))
115 .zip(pre_binding_blocks.iter().skip(1)),
119 (arm_index, pat_index, pattern, guard),
120 (pre_binding_block, next_candidate_pre_binding_block)
122 has_guard |= guard.is_some();
124 // One might ask: why not build up the match pair such that it
125 // matches via `borrowed_input_temp.deref()` instead of
126 // using the `discriminant_place` directly, as it is doing here?
128 // The basic answer is that if you do that, then you end up with
129 // accceses to a shared borrow of the input and that conflicts with
130 // any arms that look like e.g.
134 // ... /* mutate `foo` in arm body */ ...
138 // (Perhaps we could further revise the MIR
139 // construction here so that it only does a
140 // shared borrow at the outset and delays doing
141 // the mutable borrow until after the pattern is
142 // matched *and* the guard (if any) for the arm
147 match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
153 pre_binding_block: *pre_binding_block,
154 next_candidate_pre_binding_block: *next_candidate_pre_binding_block,
160 let outer_source_info = self.source_info(span);
162 *pre_binding_blocks.last().unwrap(),
164 TerminatorKind::Unreachable,
167 // Maps a place to the kind of Fake borrow that we want to perform on
168 // it: either Shallow or Shared, depending on whether the place is
169 // bound in the match, or just switched on.
170 // If there are no match guards then we don't need any fake borrows,
171 // so don't track them.
172 let mut fake_borrows = if has_guard && tcx.generate_borrow_of_any_match_input() {
173 Some(FxHashMap::default())
178 let pre_binding_blocks: Vec<_> = candidates
180 .map(|cand| (cand.pre_binding_block, cand.span))
183 // this will generate code to test discriminant_place and
184 // branch to the appropriate arm block
185 let otherwise = self.match_candidates(
193 if !otherwise.is_empty() {
194 // All matches are exhaustive. However, because some matches
195 // only have exponentially-large exhaustive decision trees, we
196 // sometimes generate an inexhaustive decision tree.
198 // In that case, the inexhaustive tips of the decision tree
199 // can't be reached - terminate them with an `unreachable`.
200 let source_info = self.source_info(span);
202 let mut otherwise = otherwise;
204 otherwise.dedup(); // variant switches can introduce duplicate target blocks
205 for block in otherwise {
207 .terminate(block, source_info, TerminatorKind::Unreachable);
211 if let Some(fake_borrows) = fake_borrows {
212 self.add_fake_borrows(&pre_binding_blocks, fake_borrows, source_info, block);
215 // all the arm blocks will rejoin here
216 let end_block = self.cfg.start_new_block();
218 let outer_source_info = self.source_info(span);
219 for (arm_index, (body, source_scope)) in arm_bodies.into_iter().enumerate() {
220 let mut arm_block = arm_blocks.blocks[arm_index];
221 // Re-enter the source scope we created the bindings in.
222 self.source_scope = source_scope;
223 unpack!(arm_block = self.into(destination, arm_block, body));
227 TerminatorKind::Goto { target: end_block },
230 self.source_scope = outer_source_info.scope;
235 pub(super) fn expr_into_pattern(
237 mut block: BasicBlock,
238 irrefutable_pat: Pattern<'tcx>,
239 initializer: ExprRef<'tcx>,
241 match *irrefutable_pat.kind {
242 // Optimize the case of `let x = ...` to write directly into `x`
243 PatternKind::Binding {
244 mode: BindingMode::ByValue,
250 self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
251 unpack!(block = self.into(&place, block, initializer));
254 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
255 let source_info = self.source_info(irrefutable_pat.span);
260 kind: StatementKind::FakeRead(FakeReadCause::ForLet, place),
264 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
268 // Optimize the case of `let x: T = ...` to write directly
269 // into `x` and then require that `T == typeof(x)`.
271 // Weirdly, this is needed to prevent the
272 // `intrinsic-move-val.rs` test case from crashing. That
273 // test works with uninitialized values in a rather
274 // dubious way, so it may be that the test is kind of
276 PatternKind::AscribeUserType {
277 subpattern: Pattern {
278 kind: box PatternKind::Binding {
279 mode: BindingMode::ByValue,
286 ascription: hair::pattern::Ascription {
287 user_ty: pat_ascription_ty,
293 self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
294 unpack!(block = self.into(&place, block, initializer));
296 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
297 let pattern_source_info = self.source_info(irrefutable_pat.span);
301 source_info: pattern_source_info,
302 kind: StatementKind::FakeRead(FakeReadCause::ForLet, place.clone()),
306 let ty_source_info = self.source_info(user_ty_span);
307 let user_ty = box pat_ascription_ty.user_ty(
308 &mut self.canonical_user_type_annotations,
309 place.ty(&self.local_decls, self.hir.tcx()).to_ty(self.hir.tcx()),
315 source_info: ty_source_info,
316 kind: StatementKind::AscribeUserType(
318 // We always use invariant as the variance here. This is because the
319 // variance field from the ascription refers to the variance to use
320 // when applying the type to the value being matched, but this
321 // ascription applies rather to the type of the binding. e.g., in this
328 // We are creating an ascription that defines the type of `x` to be
329 // exactly `T` (i.e., with invariance). The variance field, in
330 // contrast, is intended to be used to relate `T` to the type of
332 ty::Variance::Invariant,
338 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
342 let place = unpack!(block = self.as_place(block, initializer));
343 self.place_into_pattern(block, irrefutable_pat, &place, true)
348 pub fn place_into_pattern(
351 irrefutable_pat: Pattern<'tcx>,
352 initializer: &Place<'tcx>,
353 set_match_place: bool,
355 // create a dummy candidate
356 let mut candidate = Candidate {
357 span: irrefutable_pat.span,
358 match_pairs: vec![MatchPair::new(initializer.clone(), &irrefutable_pat)],
363 // since we don't call `match_candidates`, next fields is unused
366 pre_binding_block: block,
367 next_candidate_pre_binding_block: block,
370 // Simplify the candidate. Since the pattern is irrefutable, this should
371 // always convert all match-pairs into bindings.
372 self.simplify_candidate(&mut candidate);
374 if !candidate.match_pairs.is_empty() {
376 candidate.match_pairs[0].pattern.span,
377 "match pairs {:?} remaining after simplifying \
378 irrefutable pattern",
379 candidate.match_pairs
383 // for matches and function arguments, the place that is being matched
384 // can be set when creating the variables. But the place for
385 // let PATTERN = ... might not even exist until we do the assignment.
386 // so we set it here instead
388 for binding in &candidate.bindings {
389 let local = self.var_local_id(binding.var_id, OutsideGuard);
391 if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
392 opt_match_place: Some((ref mut match_place, _)),
394 }))) = self.local_decls[local].is_user_variable
396 *match_place = Some(initializer.clone());
398 bug!("Let binding to non-user variable.")
403 self.ascribe_types(block, &candidate.ascriptions);
405 // now apply the bindings, which will also declare the variables
406 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
411 /// Declares the bindings of the given patterns and returns the visibility
412 /// scope for the bindings in these patterns, if such a scope had to be
413 /// created. NOTE: Declaring the bindings should always be done in their
415 pub fn declare_bindings(
417 mut visibility_scope: Option<SourceScope>,
419 lint_level: LintLevel,
420 patterns: &[Pattern<'tcx>],
421 has_guard: ArmHasGuard,
422 opt_match_place: Option<(Option<&Place<'tcx>>, Span)>,
423 ) -> Option<SourceScope> {
425 !(visibility_scope.is_some() && lint_level.is_explicit()),
426 "can't have both a visibility and a lint scope at the same time"
428 let mut scope = self.source_scope;
429 let num_patterns = patterns.len();
430 debug!("declare_bindings: patterns={:?}", patterns);
433 UserTypeProjections::none(),
434 &mut |this, mutability, name, mode, var, span, ty, user_ty| {
435 if visibility_scope.is_none() {
437 Some(this.new_source_scope(scope_span, LintLevel::Inherited, None));
438 // If we have lints, create a new source scope
439 // that marks the lints for the locals. See the comment
440 // on the `source_info` field for why this is needed.
441 if lint_level.is_explicit() {
442 scope = this.new_source_scope(scope_span, lint_level, None);
445 let source_info = SourceInfo { span, scope };
446 let visibility_scope = visibility_scope.unwrap();
447 this.declare_binding(
458 opt_match_place.map(|(x, y)| (x.cloned(), y)),
466 pub fn storage_live_binding(
473 let local_id = self.var_local_id(var, for_guard);
474 let source_info = self.source_info(span);
479 kind: StatementKind::StorageLive(local_id),
482 let place = Place::Local(local_id);
483 let var_ty = self.local_decls[local_id].ty;
484 let hir_id = self.hir.tcx().hir().node_to_hir_id(var);
485 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
486 self.schedule_drop(span, region_scope, &place, var_ty, DropKind::Storage);
490 pub fn schedule_drop_for_binding(&mut self, var: NodeId, span: Span, for_guard: ForGuard) {
491 let local_id = self.var_local_id(var, for_guard);
492 let var_ty = self.local_decls[local_id].ty;
493 let hir_id = self.hir.tcx().hir().node_to_hir_id(var);
494 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
498 &Place::Local(local_id),
501 cached_block: CachedBlock::default(),
506 pub(super) fn visit_bindings(
508 pattern: &Pattern<'tcx>,
509 pattern_user_ty: UserTypeProjections<'tcx>,
518 UserTypeProjections<'tcx>,
521 debug!("visit_bindings: pattern={:?} pattern_user_ty={:?}", pattern, pattern_user_ty);
522 match *pattern.kind {
523 PatternKind::Binding {
532 f(self, mutability, name, mode, var, pattern.span, ty, pattern_user_ty.clone());
533 if let Some(subpattern) = subpattern.as_ref() {
534 self.visit_bindings(subpattern, pattern_user_ty, f);
542 | PatternKind::Slice {
547 let from = u32::try_from(prefix.len()).unwrap();
548 let to = u32::try_from(suffix.len()).unwrap();
549 for subpattern in prefix {
550 self.visit_bindings(subpattern, pattern_user_ty.clone().index(), f);
552 for subpattern in slice {
553 self.visit_bindings(subpattern, pattern_user_ty.clone().subslice(from, to), f);
555 for subpattern in suffix {
556 self.visit_bindings(subpattern, pattern_user_ty.clone().index(), f);
559 PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {}
560 PatternKind::Deref { ref subpattern } => {
561 self.visit_bindings(subpattern, pattern_user_ty.deref(), f);
563 PatternKind::AscribeUserType {
565 ascription: hair::pattern::Ascription {
571 // This corresponds to something like
574 // let A::<'a>(_): A<'static> = ...;
577 // Note that the variance doesn't apply here, as we are tracking the effect
578 // of `user_ty` on any bindings contained with subpattern.
579 let annotation = CanonicalUserTypeAnnotation {
581 user_ty: user_ty.user_ty,
582 inferred_ty: subpattern.ty,
584 let projection = UserTypeProjection {
585 base: self.canonical_user_type_annotations.push(annotation),
588 let subpattern_user_ty = pattern_user_ty.push_projection(&projection, user_ty_span);
589 self.visit_bindings(subpattern, subpattern_user_ty, f)
592 PatternKind::Leaf { ref subpatterns } => {
593 for subpattern in subpatterns {
594 let subpattern_user_ty = pattern_user_ty.clone().leaf(subpattern.field);
595 debug!("visit_bindings: subpattern_user_ty={:?}", subpattern_user_ty);
596 self.visit_bindings(&subpattern.pattern, subpattern_user_ty, f);
600 PatternKind::Variant { adt_def, substs: _, variant_index, ref subpatterns } => {
601 for subpattern in subpatterns {
602 let subpattern_user_ty = pattern_user_ty.clone().variant(
603 adt_def, variant_index, subpattern.field);
604 self.visit_bindings(&subpattern.pattern, subpattern_user_ty, f);
611 /// List of blocks for each arm (and potentially other metadata in the
614 blocks: Vec<BasicBlock>,
617 #[derive(Clone, Debug)]
618 pub struct Candidate<'pat, 'tcx: 'pat> {
619 // span of the original pattern that gave rise to this candidate
622 // all of these must be satisfied...
623 match_pairs: Vec<MatchPair<'pat, 'tcx>>,
625 // ...these bindings established...
626 bindings: Vec<Binding<'tcx>>,
628 // ...these types asserted...
629 ascriptions: Vec<Ascription<'tcx>>,
631 // ...and the guard must be evaluated...
632 guard: Option<Guard<'tcx>>,
634 // ...and then we branch to arm with this index.
637 // ...and the blocks for add false edges between candidates
638 pre_binding_block: BasicBlock,
639 next_candidate_pre_binding_block: BasicBlock,
641 // This uniquely identifies this candidate *within* the arm.
645 #[derive(Clone, Debug)]
646 struct Binding<'tcx> {
652 mutability: Mutability,
653 binding_mode: BindingMode,
656 /// Indicates that the type of `source` must be a subtype of the
657 /// user-given type `user_ty`; this is basically a no-op but can
658 /// influence region inference.
659 #[derive(Clone, Debug)]
660 struct Ascription<'tcx> {
663 user_ty: PatternTypeProjection<'tcx>,
664 variance: ty::Variance,
667 #[derive(Clone, Debug)]
668 pub struct MatchPair<'pat, 'tcx: 'pat> {
672 // ... must match this pattern.
673 pattern: &'pat Pattern<'tcx>,
675 // HACK(eddyb) This is used to toggle whether a Slice pattern
676 // has had its length checked. This is only necessary because
677 // the "rest" part of the pattern right now has type &[T] and
678 // as such, it requires an Rvalue::Slice to be generated.
679 // See RFC 495 / issue #23121 for the eventual (proper) solution.
680 slice_len_checked: bool,
683 #[derive(Clone, Debug, PartialEq)]
684 enum TestKind<'tcx> {
685 // test the branches of enum
687 adt_def: &'tcx ty::AdtDef,
688 variants: BitSet<VariantIdx>,
691 // test the branches of enum
695 indices: FxHashMap<ty::Const<'tcx>, usize>,
700 value: ty::Const<'tcx>,
704 // test whether the value falls within an inclusive or exclusive range
705 Range(PatternRange<'tcx>),
707 // test length of the slice is equal to len
715 pub struct Test<'tcx> {
717 kind: TestKind<'tcx>,
720 ///////////////////////////////////////////////////////////////////////////
721 // Main matching algorithm
723 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
724 /// The main match algorithm. It begins with a set of candidates
725 /// `candidates` and has the job of generating code to determine
726 /// which of these candidates, if any, is the correct one. The
727 /// candidates are sorted such that the first item in the list
728 /// has the highest priority. When a candidate is found to match
729 /// the value, we will generate a branch to the appropriate
730 /// block found in `arm_blocks`.
732 /// The return value is a list of "otherwise" blocks. These are
733 /// points in execution where we found that *NONE* of the
734 /// candidates apply. In principle, this means that the input
735 /// list was not exhaustive, though at present we sometimes are
736 /// not smart enough to recognize all exhaustive inputs.
738 /// It might be surprising that the input can be inexhaustive.
739 /// Indeed, initially, it is not, because all matches are
740 /// exhaustive in Rust. But during processing we sometimes divide
741 /// up the list of candidates and recurse with a non-exhaustive
742 /// list. This is important to keep the size of the generated code
743 /// under control. See `test_candidates` for more details.
745 /// If `add_fake_borrows` is true, then places which need fake borrows
746 /// will be added to it.
747 fn match_candidates<'pat>(
750 arm_blocks: &mut ArmBlocks,
751 mut candidates: Vec<Candidate<'pat, 'tcx>>,
752 mut block: BasicBlock,
753 fake_borrows: &mut Option<FxHashMap<Place<'tcx>, BorrowKind>>,
754 ) -> Vec<BasicBlock> {
756 "matched_candidate(span={:?}, block={:?}, candidates={:?})",
757 span, block, candidates
760 // Start by simplifying candidates. Once this process is
761 // complete, all the match pairs which remain require some
762 // form of test, whether it be a switch or pattern comparison.
763 for candidate in &mut candidates {
764 self.simplify_candidate(candidate);
767 // The candidates are sorted by priority. Check to see
768 // whether the higher priority candidates (and hence at
769 // the front of the vec) have satisfied all their match
771 let fully_matched = candidates
773 .take_while(|c| c.match_pairs.is_empty())
776 "match_candidates: {:?} candidates fully matched",
779 let mut unmatched_candidates = candidates.split_off(fully_matched);
781 // Insert a *Shared* borrow of any places that are bound.
782 if let Some(fake_borrows) = fake_borrows {
783 for Binding { source, .. }
784 in candidates.iter().flat_map(|candidate| &candidate.bindings)
786 fake_borrows.insert(source.clone(), BorrowKind::Shared);
790 let fully_matched_with_guard = candidates.iter().take_while(|c| c.guard.is_some()).count();
792 let unreachable_candidates = if fully_matched_with_guard + 1 < candidates.len() {
793 candidates.split_off(fully_matched_with_guard + 1)
798 for candidate in candidates {
799 // If so, apply any bindings, test the guard (if any), and
800 // branch to the arm.
801 if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
804 // if None is returned, then any remaining candidates
805 // are unreachable (at least not through this path).
806 // Link them with false edges.
808 "match_candidates: add false edges for unreachable {:?} and unmatched {:?}",
809 unreachable_candidates, unmatched_candidates
811 for candidate in unreachable_candidates {
812 let source_info = self.source_info(candidate.span);
813 let target = self.cfg.start_new_block();
814 if let Some(otherwise) =
815 self.bind_and_guard_matched_candidate(target, arm_blocks, candidate)
818 .terminate(otherwise, source_info, TerminatorKind::Unreachable);
822 if unmatched_candidates.is_empty() {
825 let target = self.cfg.start_new_block();
826 return self.match_candidates(
829 unmatched_candidates,
837 // If there are no candidates that still need testing, we're done.
838 // Since all matches are exhaustive, execution should never reach this point.
839 if unmatched_candidates.is_empty() {
843 // Test candidates where possible.
844 let (otherwise, tested_candidates) =
845 self.test_candidates(span, arm_blocks, &unmatched_candidates, block, fake_borrows);
847 // If the target candidates were exhaustive, then we are done.
848 // But for borrowck continue build decision tree.
850 // If all candidates were sorted into `target_candidates` somewhere, then
851 // the initial set was inexhaustive.
852 let untested_candidates = unmatched_candidates.split_off(tested_candidates);
853 if untested_candidates.len() == 0 {
857 // Otherwise, let's process those remaining candidates.
858 let join_block = self.join_otherwise_blocks(span, otherwise);
859 self.match_candidates(span, arm_blocks, untested_candidates, join_block, &mut None)
862 fn join_otherwise_blocks(&mut self, span: Span, mut otherwise: Vec<BasicBlock>) -> BasicBlock {
863 let source_info = self.source_info(span);
865 otherwise.dedup(); // variant switches can introduce duplicate target blocks
866 if otherwise.len() == 1 {
869 let join_block = self.cfg.start_new_block();
870 for block in otherwise {
874 TerminatorKind::Goto { target: join_block },
881 /// This is the most subtle part of the matching algorithm. At
882 /// this point, the input candidates have been fully simplified,
883 /// and so we know that all remaining match-pairs require some
884 /// sort of test. To decide what test to do, we take the highest
885 /// priority candidate (last one in the list) and extract the
886 /// first match-pair from the list. From this we decide what kind
887 /// of test is needed using `test`, defined in the `test` module.
889 /// *Note:* taking the first match pair is somewhat arbitrary, and
890 /// we might do better here by choosing more carefully what to
893 /// For example, consider the following possible match-pairs:
895 /// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has
896 /// 2. `x @ 22` -- we will do a `SwitchInt`
897 /// 3. `x @ 3..5` -- we will do a range test
900 /// Once we know what sort of test we are going to perform, this
901 /// Tests may also help us with other candidates. So we walk over
902 /// the candidates (from high to low priority) and check. This
903 /// gives us, for each outcome of the test, a transformed list of
904 /// candidates. For example, if we are testing the current
905 /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
906 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as
907 /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
908 /// simpler (and, in fact, irrefutable).
910 /// But there may also be candidates that the test just doesn't
911 /// apply to. The classical example involves wildcards:
914 /// # let (x, y, z) = (true, true, true);
915 /// match (x, y, z) {
916 /// (true, _, true) => true, // (0)
917 /// (_, true, _) => true, // (1)
918 /// (false, false, _) => false, // (2)
919 /// (true, _, false) => false, // (3)
923 /// In that case, after we test on `x`, there are 2 overlapping candidate
926 /// - If the outcome is that `x` is true, candidates 0, 1, and 3
927 /// - If the outcome is that `x` is false, candidates 1 and 2
929 /// Here, the traditional "decision tree" method would generate 2
930 /// separate code-paths for the 2 separate cases.
932 /// In some cases, this duplication can create an exponential amount of
933 /// code. This is most easily seen by noticing that this method terminates
934 /// with precisely the reachable arms being reachable - but that problem
935 /// is trivially NP-complete:
938 /// match (var0, var1, var2, var3, ..) {
939 /// (true, _, _, false, true, ...) => false,
940 /// (_, true, true, false, _, ...) => false,
941 /// (false, _, false, false, _, ...) => false,
947 /// Here the last arm is reachable only if there is an assignment to
948 /// the variables that does not match any of the literals. Therefore,
949 /// compilation would take an exponential amount of time in some cases.
951 /// That kind of exponential worst-case might not occur in practice, but
952 /// our simplistic treatment of constants and guards would make it occur
953 /// in very common situations - for example #29740:
957 /// "foo" if foo_guard => ...,
958 /// "bar" if bar_guard => ...,
959 /// "baz" if baz_guard => ...,
964 /// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test.
966 /// It might seem that we would end up with 2 disjoint candidate
967 /// sets, consisting of the first candidate or the other 3, but our
968 /// algorithm doesn't reason about "foo" being distinct from the other
969 /// constants; it considers the latter arms to potentially match after
970 /// both outcomes, which obviously leads to an exponential amount
973 /// To avoid these kinds of problems, our algorithm tries to ensure
974 /// the amount of generated tests is linear. When we do a k-way test,
975 /// we return an additional "unmatched" set alongside the obvious `k`
976 /// sets. When we encounter a candidate that would be present in more
977 /// than one of the sets, we put it and all candidates below it into the
978 /// "unmatched" set. This ensures these `k+1` sets are disjoint.
980 /// After we perform our test, we branch into the appropriate candidate
981 /// set and recurse with `match_candidates`. These sub-matches are
982 /// obviously inexhaustive - as we discarded our otherwise set - so
983 /// we set their continuation to do `match_candidates` on the
984 /// "unmatched" set (which is again inexhaustive).
986 /// If you apply this to the above test, you basically wind up
987 /// with an if-else-if chain, testing each candidate in turn,
988 /// which is precisely what we want.
990 /// In addition to avoiding exponential-time blowups, this algorithm
991 /// also has nice property that each guard and arm is only generated
993 fn test_candidates<'pat>(
996 arm_blocks: &mut ArmBlocks,
997 candidates: &[Candidate<'pat, 'tcx>],
999 fake_borrows: &mut Option<FxHashMap<Place<'tcx>, BorrowKind>>,
1000 ) -> (Vec<BasicBlock>, usize) {
1001 // extract the match-pair from the highest priority candidate
1002 let match_pair = &candidates.first().unwrap().match_pairs[0];
1003 let mut test = self.test(match_pair);
1005 // most of the time, the test to perform is simply a function
1006 // of the main candidate; but for a test like SwitchInt, we
1007 // may want to add cases based on the candidates that are
1010 TestKind::SwitchInt {
1015 for candidate in candidates.iter() {
1016 if !self.add_cases_to_switch(
1031 for candidate in candidates.iter() {
1032 if !self.add_variants_to_switch(&match_pair.place, candidate, variants) {
1040 // Insert a Shallow borrow of any places that is switched on.
1041 fake_borrows.as_mut().map(|fb| {
1042 fb.entry(match_pair.place.clone()).or_insert(BorrowKind::Shallow)
1045 // perform the test, branching to one of N blocks. For each of
1046 // those N possible outcomes, create a (initially empty)
1047 // vector of candidates. Those are the candidates that still
1048 // apply if the test has that particular outcome.
1050 "match_candidates: test={:?} match_pair={:?}",
1053 let target_blocks = self.perform_test(block, &match_pair.place, &test);
1054 let mut target_candidates = vec![vec![]; target_blocks.len()];
1056 // Sort the candidates into the appropriate vector in
1057 // `target_candidates`. Note that at some point we may
1058 // encounter a candidate where the test is not relevant; at
1059 // that point, we stop sorting.
1060 let tested_candidates = candidates
1063 self.sort_candidate(&match_pair.place, &test, c, &mut target_candidates)
1066 assert!(tested_candidates > 0); // at least the last candidate ought to be tested
1067 debug!("tested_candidates: {}", tested_candidates);
1069 "untested_candidates: {}",
1070 candidates.len() - tested_candidates
1073 // For each outcome of test, process the candidates that still
1074 // apply. Collect a list of blocks where control flow will
1075 // branch if one of the `target_candidate` sets is not
1077 let otherwise: Vec<_> = target_blocks
1079 .zip(target_candidates)
1080 .flat_map(|(target_block, target_candidates)| {
1081 self.match_candidates(
1091 (otherwise, tested_candidates)
1094 /// Initializes each of the bindings from the candidate by
1095 /// moving/copying/ref'ing the source as appropriate. Tests the
1096 /// guard, if any, and then branches to the arm. Returns the block
1097 /// for the case where the guard fails.
1099 /// Note: we check earlier that if there is a guard, there cannot
1100 /// be move bindings. This isn't really important for the
1101 /// self-consistency of this fn, but the reason for it should be
1102 /// clear: after we've done the assignments, if there were move
1103 /// bindings, further tests would be a use-after-move (which would
1104 /// in turn be detected by the borrowck code that runs on the
1106 fn bind_and_guard_matched_candidate<'pat>(
1108 mut block: BasicBlock,
1109 arm_blocks: &mut ArmBlocks,
1110 candidate: Candidate<'pat, 'tcx>,
1111 ) -> Option<BasicBlock> {
1113 "bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
1117 debug_assert!(candidate.match_pairs.is_empty());
1119 self.ascribe_types(block, &candidate.ascriptions);
1121 let arm_block = arm_blocks.blocks[candidate.arm_index];
1122 let candidate_source_info = self.source_info(candidate.span);
1126 candidate_source_info,
1127 TerminatorKind::Goto {
1128 target: candidate.pre_binding_block,
1132 block = self.cfg.start_new_block();
1134 candidate.pre_binding_block,
1135 candidate_source_info,
1136 TerminatorKind::FalseEdges {
1138 imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
1142 // rust-lang/rust#27282: The `autoref` business deserves some
1143 // explanation here.
1145 // The intent of the `autoref` flag is that when it is true,
1146 // then any pattern bindings of type T will map to a `&T`
1147 // within the context of the guard expression, but will
1148 // continue to map to a `T` in the context of the arm body. To
1149 // avoid surfacing this distinction in the user source code
1150 // (which would be a severe change to the language and require
1151 // far more revision to the compiler), when `autoref` is true,
1152 // then any occurrence of the identifier in the guard
1153 // expression will automatically get a deref op applied to it.
1155 // So an input like:
1158 // let place = Foo::new();
1159 // match place { foo if inspect(foo)
1160 // => feed(foo), ... }
1163 // will be treated as if it were really something like:
1166 // let place = Foo::new();
1167 // match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
1168 // => { let tmp2 = place; feed(tmp2) }, ... }
1170 // And an input like:
1173 // let place = Foo::new();
1174 // match place { ref mut foo if inspect(foo)
1175 // => feed(foo), ... }
1178 // will be treated as if it were really something like:
1181 // let place = Foo::new();
1182 // match place { Foo { .. } if { let tmp1 = & &mut place; inspect(*tmp1) }
1183 // => { let tmp2 = &mut place; feed(tmp2) }, ... }
1186 // In short, any pattern binding will always look like *some*
1187 // kind of `&T` within the guard at least in terms of how the
1188 // MIR-borrowck views it, and this will ensure that guard
1189 // expressions cannot mutate their the match inputs via such
1190 // bindings. (It also ensures that guard expressions can at
1191 // most *copy* values from such bindings; non-Copy things
1192 // cannot be moved via pattern bindings in guard expressions.)
1196 // Implementation notes (under assumption `autoref` is true).
1198 // To encode the distinction above, we must inject the
1199 // temporaries `tmp1` and `tmp2`.
1201 // There are two cases of interest: binding by-value, and binding by-ref.
1203 // 1. Binding by-value: Things are simple.
1205 // * Establishing `tmp1` creates a reference into the
1206 // matched place. This code is emitted by
1207 // bind_matched_candidate_for_guard.
1209 // * `tmp2` is only initialized "lazily", after we have
1210 // checked the guard. Thus, the code that can trigger
1211 // moves out of the candidate can only fire after the
1212 // guard evaluated to true. This initialization code is
1213 // emitted by bind_matched_candidate_for_arm.
1215 // 2. Binding by-reference: Things are tricky.
1217 // * Here, the guard expression wants a `&&` or `&&mut`
1218 // into the original input. This means we need to borrow
1219 // a reference that we do not immediately have at hand
1220 // (because all we have is the places associated with the
1221 // match input itself; it is up to us to create a place
1222 // holding a `&` or `&mut` that we can then borrow).
1224 let autoref = self.hir
1226 .all_pat_vars_are_implicit_refs_within_guards();
1227 if let Some(guard) = candidate.guard {
1229 self.bind_matched_candidate_for_guard(
1231 candidate.pat_index,
1232 &candidate.bindings,
1234 let guard_frame = GuardFrame {
1238 .map(|b| GuardFrameLocal::new(b.var_id, b.binding_mode))
1241 debug!("Entering guard building context: {:?}", guard_frame);
1242 self.guard_context.push(guard_frame);
1244 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1247 // the block to branch to if the guard fails; if there is no
1248 // guard, this block is simply unreachable
1249 let guard = match guard {
1250 Guard::If(e) => self.hir.mirror(e),
1252 let source_info = self.source_info(guard.span);
1253 let cond = unpack!(block = self.as_local_operand(block, guard));
1255 let guard_frame = self.guard_context.pop().unwrap();
1257 "Exiting guard building context with locals: {:?}",
1262 let false_edge_block = self.cfg.start_new_block();
1264 // We want to ensure that the matched candidates are bound
1265 // after we have confirmed this candidate *and* any
1266 // associated guard; Binding them on `block` is too soon,
1267 // because that would be before we've checked the result
1270 // But binding them on `arm_block` is *too late*, because
1271 // then all of the candidates for a single arm would be
1272 // bound in the same place, that would cause a case like:
1276 // (mut x, 1) | (2, mut x) if { true } => { ... }
1277 // ... // ^^^^^^^ (this is `arm_block`)
1281 // would yield a `arm_block` something like:
1284 // StorageLive(_4); // _4 is `x`
1285 // _4 = &mut (_1.0: i32); // this is handling `(mut x, 1)` case
1286 // _4 = &mut (_1.1: i32); // this is handling `(2, mut x)` case
1289 // and that is clearly not correct.
1290 let post_guard_block = self.cfg.start_new_block();
1294 TerminatorKind::if_(self.hir.tcx(), cond, post_guard_block, false_edge_block),
1298 self.bind_matched_candidate_for_arm_body(post_guard_block, &candidate.bindings);
1304 TerminatorKind::Goto { target: arm_block },
1307 let otherwise = self.cfg.start_new_block();
1312 TerminatorKind::FalseEdges {
1313 real_target: otherwise,
1314 imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
1319 // (Here, it is not too early to bind the matched
1320 // candidate on `block`, because there is no guard result
1321 // that we have to inspect before we bind them.)
1322 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1325 candidate_source_info,
1326 TerminatorKind::Goto { target: arm_block },
1332 /// Append `AscribeUserType` statements onto the end of `block`
1333 /// for each ascription
1334 fn ascribe_types<'pat>(
1337 ascriptions: &[Ascription<'tcx>],
1339 for ascription in ascriptions {
1340 let source_info = self.source_info(ascription.span);
1343 "adding user ascription at span {:?} of place {:?} and {:?}",
1349 let user_ty = box ascription.user_ty.clone().user_ty(
1350 &mut self.canonical_user_type_annotations,
1351 ascription.source.ty(&self.local_decls, self.hir.tcx()).to_ty(self.hir.tcx()),
1358 kind: StatementKind::AscribeUserType(
1359 ascription.source.clone(),
1360 ascription.variance,
1368 // Only called when all_pat_vars_are_implicit_refs_within_guards,
1369 // and thus all code/comments assume we are in that context.
1370 fn bind_matched_candidate_for_guard(
1374 bindings: &[Binding<'tcx>],
1377 "bind_matched_candidate_for_guard(block={:?}, pat_index={:?}, bindings={:?})",
1378 block, pat_index, bindings
1381 // Assign each of the bindings. Since we are binding for a
1382 // guard expression, this will never trigger moves out of the
1384 let re_erased = self.hir.tcx().types.re_erased;
1385 for binding in bindings {
1386 let source_info = self.source_info(binding.span);
1388 // For each pattern ident P of type T, `ref_for_guard` is
1389 // a reference R: &T pointing to the location matched by
1390 // the pattern, and every occurrence of P within a guard
1393 self.storage_live_binding(block, binding.var_id, binding.span, RefWithinGuard);
1394 // Question: Why schedule drops if bindings are all
1395 // shared-&'s? Answer: Because schedule_drop_for_binding
1396 // also emits StorageDead's for those locals.
1397 self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
1398 match binding.binding_mode {
1399 BindingMode::ByValue => {
1400 let rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, binding.source.clone());
1402 .push_assign(block, source_info, &ref_for_guard, rvalue);
1404 BindingMode::ByRef(borrow_kind) => {
1405 // Tricky business: For `ref id` and `ref mut id`
1406 // patterns, we want `id` within the guard to
1407 // correspond to a temp of type `& &T` or `& &mut
1408 // T` (i.e., a "borrow of a borrow") that is
1409 // implicitly dereferenced.
1411 // To borrow a borrow, we need that inner borrow
1412 // to point to. So, create a temp for the inner
1413 // borrow, and then take a reference to it.
1415 // Note: the temp created here is *not* the one
1416 // used by the arm body itself. This eases
1417 // observing two-phase borrow restrictions.
1418 let val_for_guard = self.storage_live_binding(
1422 ValWithinGuard(pat_index),
1424 self.schedule_drop_for_binding(
1427 ValWithinGuard(pat_index),
1430 // rust-lang/rust#27282: We reuse the two-phase
1431 // borrow infrastructure so that the mutable
1432 // borrow (whose mutabilty is *unusable* within
1433 // the guard) does not conflict with the implicit
1434 // borrow of the whole match input. See additional
1435 // discussion on rust-lang/rust#49870.
1436 let borrow_kind = match borrow_kind {
1438 | BorrowKind::Shallow
1439 | BorrowKind::Unique => borrow_kind,
1440 BorrowKind::Mut { .. } => BorrowKind::Mut {
1441 allow_two_phase_borrow: true,
1444 let rvalue = Rvalue::Ref(re_erased, borrow_kind, binding.source.clone());
1446 .push_assign(block, source_info, &val_for_guard, rvalue);
1447 let rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, val_for_guard);
1449 .push_assign(block, source_info, &ref_for_guard, rvalue);
1455 fn bind_matched_candidate_for_arm_body(
1458 bindings: &[Binding<'tcx>],
1461 "bind_matched_candidate_for_arm_body(block={:?}, bindings={:?}",
1466 let re_erased = self.hir.tcx().types.re_erased;
1467 // Assign each of the bindings. This may trigger moves out of the candidate.
1468 for binding in bindings {
1469 let source_info = self.source_info(binding.span);
1471 self.storage_live_binding(block, binding.var_id, binding.span, OutsideGuard);
1472 self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
1473 let rvalue = match binding.binding_mode {
1474 BindingMode::ByValue => {
1475 Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
1477 BindingMode::ByRef(borrow_kind) => {
1478 Rvalue::Ref(re_erased, borrow_kind, binding.source.clone())
1481 self.cfg.push_assign(block, source_info, &local, rvalue);
1485 /// Each binding (`ref mut var`/`ref var`/`mut var`/`var`, where
1486 /// the bound `var` has type `T` in the arm body) in a pattern
1487 /// maps to `2+N` locals. The first local is a binding for
1488 /// occurrences of `var` in the guard, which will all have type
1489 /// `&T`. The N locals are bindings for the `T` that is referenced
1490 /// by the first local; they are not used outside of the
1491 /// guard. The last local is a binding for occurrences of `var` in
1492 /// the arm body, which will have type `T`.
1494 /// The reason we have N locals rather than just 1 is to
1495 /// accommodate rust-lang/rust#51348: If the arm has N candidate
1496 /// patterns, then in general they can correspond to distinct
1497 /// parts of the matched data, and we want them to be distinct
1498 /// temps in order to simplify checks performed by our internal
1499 /// leveraging of two-phase borrows).
1502 source_info: SourceInfo,
1503 visibility_scope: SourceScope,
1504 mutability: Mutability,
1507 num_patterns: usize,
1510 user_ty: UserTypeProjections<'tcx>,
1511 has_guard: ArmHasGuard,
1512 opt_match_place: Option<(Option<Place<'tcx>>, Span)>,
1516 "declare_binding(var_id={:?}, name={:?}, mode={:?}, var_ty={:?}, \
1517 visibility_scope={:?}, source_info={:?})",
1518 var_id, name, mode, var_ty, visibility_scope, source_info
1521 let tcx = self.hir.tcx();
1522 let binding_mode = match mode {
1523 BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
1524 BindingMode::ByRef(_) => ty::BindingMode::BindByReference(mutability.into()),
1526 debug!("declare_binding: user_ty={:?}", user_ty);
1527 let local = LocalDecl::<'tcx> {
1535 is_block_tail: None,
1536 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
1538 // hypothetically, `visit_bindings` could try to unzip
1539 // an outermost hir::Ty as we descend, matching up
1540 // idents in pat; but complex w/ unclear UI payoff.
1541 // Instead, just abandon providing diagnostic info.
1547 let for_arm_body = self.local_decls.push(local.clone());
1548 let locals = if has_guard.0 && tcx.all_pat_vars_are_implicit_refs_within_guards() {
1549 let mut vals_for_guard = Vec::with_capacity(num_patterns);
1550 for _ in 0..num_patterns {
1551 let val_for_guard_idx = self.local_decls.push(LocalDecl {
1552 // This variable isn't mutated but has a name, so has to be
1553 // immutable to avoid the unused mut lint.
1554 mutability: Mutability::Not,
1557 vals_for_guard.push(val_for_guard_idx);
1559 let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
1560 // See previous comment.
1561 mutability: Mutability::Not,
1562 ty: tcx.mk_imm_ref(tcx.types.re_erased, var_ty),
1563 user_ty: UserTypeProjections::none(),
1567 // FIXME: should these secretly injected ref_for_guard's be marked as `internal`?
1569 is_block_tail: None,
1570 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::RefForGuard)),
1572 LocalsForNode::ForGuard {
1578 LocalsForNode::One(for_arm_body)
1580 debug!("declare_binding: vars={:?}", locals);
1581 self.var_indices.insert(var_id, locals);
1584 // Determine the fake borrows that are needed to ensure that the place
1585 // will evaluate to the same thing until an arm has been chosen.
1586 fn add_fake_borrows<'pat>(
1588 pre_binding_blocks: &[(BasicBlock, Span)],
1589 fake_borrows: FxHashMap<Place<'tcx>, BorrowKind>,
1590 source_info: SourceInfo,
1591 start_block: BasicBlock,
1593 let tcx = self.hir.tcx();
1595 debug!("add_fake_borrows pre_binding_blocks = {:?}, fake_borrows = {:?}",
1596 pre_binding_blocks, fake_borrows);
1598 let mut all_fake_borrows = Vec::with_capacity(fake_borrows.len());
1600 // Insert a Shallow borrow of the prefixes of any fake borrows.
1601 for (place, borrow_kind) in fake_borrows
1604 let mut prefix_cursor = &place;
1605 while let Place::Projection(box Projection { base, elem }) = prefix_cursor {
1606 if let ProjectionElem::Deref = elem {
1607 // Insert a shallow borrow after a deref. For other
1608 // projections the borrow of prefix_cursor will
1609 // conflict with any mutation of base.
1610 all_fake_borrows.push((base.clone(), BorrowKind::Shallow));
1612 prefix_cursor = base;
1616 all_fake_borrows.push((place, borrow_kind));
1619 // Deduplicate and ensure a deterministic order.
1620 all_fake_borrows.sort();
1621 all_fake_borrows.dedup();
1623 debug!("add_fake_borrows all_fake_borrows = {:?}", all_fake_borrows);
1625 // Add fake borrows to the start of the match and reads of them before
1626 // the start of each arm.
1627 let mut borrowed_input_temps = Vec::with_capacity(all_fake_borrows.len());
1629 for (matched_place, borrow_kind) in all_fake_borrows {
1630 let borrowed_input =
1631 Rvalue::Ref(tcx.types.re_erased, borrow_kind, matched_place.clone());
1632 let borrowed_input_ty = borrowed_input.ty(&self.local_decls, tcx);
1633 let borrowed_input_temp = self.temp(borrowed_input_ty, source_info.span);
1634 self.cfg.push_assign(
1637 &borrowed_input_temp,
1640 borrowed_input_temps.push(borrowed_input_temp);
1643 // FIXME: This could be a lot of reads (#fake borrows * #patterns).
1644 // The false edges that we currently generate would allow us to only do
1645 // this on the last Candidate, but it's possible that there might not be
1646 // so many false edges in the future, so we read for all Candidates for
1648 // Another option would be to make our own block and add our own false
1650 if tcx.emit_read_for_match() {
1651 for &(pre_binding_block, span) in pre_binding_blocks {
1652 let pattern_source_info = self.source_info(span);
1653 for temp in &borrowed_input_temps {
1654 self.cfg.push(pre_binding_block, Statement {
1655 source_info: pattern_source_info,
1656 kind: StatementKind::FakeRead(
1657 FakeReadCause::ForMatchGuard,