1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code related to match expressions. These are sufficiently complex
12 //! to warrant their own module and submodules. :) This main module
13 //! includes the high-level algorithm, the submodules contain the
16 use build::{BlockAnd, BlockAndExtension, Builder};
17 use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
18 use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
19 use build::scope::{CachedBlock, DropKind};
20 use rustc_data_structures::fx::FxHashMap;
21 use rustc_data_structures::bitvec::BitVector;
22 use rustc::ty::{self, Ty};
26 use syntax::ast::{Name, NodeId};
29 // helper functions, broken out by category:
34 /// ArmHasGuard is isomorphic to a boolean flag. It indicates whether
35 /// a match arm has a guard expression attached to it.
36 #[derive(Copy, Clone, Debug)]
37 pub(crate) struct ArmHasGuard(pub bool);
39 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
40 pub fn match_expr(&mut self,
41 destination: &Place<'tcx>,
43 mut block: BasicBlock,
44 discriminant: ExprRef<'tcx>,
47 let tcx = self.hir.tcx();
48 let discriminant_span = discriminant.span();
49 let discriminant_place = unpack!(block = self.as_place(block, discriminant));
51 // Matching on a `discriminant_place` with an uninhabited type doesn't
52 // generate any memory reads by itself, and so if the place "expression"
53 // contains unsafe operations like raw pointer dereferences or union
54 // field projections, we wouldn't know to require an `unsafe` block
55 // around a `match` equivalent to `std::intrinsics::unreachable()`.
56 // See issue #47412 for this hole being discovered in the wild.
58 // HACK(eddyb) Work around the above issue by adding a dummy inspection
59 // of `discriminant_place`, specifically by applying `Rvalue::Discriminant`
60 // (which will work regardless of type) and storing the result in a temp.
62 // NOTE: Under NLL, the above issue should no longer occur because it
63 // injects a borrow of the matched input, which should have the same effect
64 // as eddyb's hack. Once NLL is the default, we can remove the hack.
66 let dummy_source_info = self.source_info(span);
67 let dummy_access = Rvalue::Discriminant(discriminant_place.clone());
68 let dummy_ty = dummy_access.ty(&self.local_decls, tcx);
69 let dummy_temp = self.temp(dummy_ty, dummy_source_info.span);
70 self.cfg.push_assign(block, dummy_source_info, &dummy_temp, dummy_access);
72 let source_info = self.source_info(span);
73 let borrowed_input_temp = if tcx.generate_borrow_of_any_match_input() {
74 // The region is unknown at this point; we rely on NLL
75 // inference to find an appropriate one. Therefore you can
76 // only use this when NLL is turned on.
77 assert!(tcx.use_mir_borrowck());
79 Rvalue::Ref(tcx.types.re_empty, BorrowKind::Shared, discriminant_place.clone());
80 let borrowed_input_ty = borrowed_input.ty(&self.local_decls, tcx);
81 let borrowed_input_temp = self.temp(borrowed_input_ty, span);
82 self.cfg.push_assign(block, source_info, &borrowed_input_temp, borrowed_input);
83 Some(borrowed_input_temp)
88 let mut arm_blocks = ArmBlocks {
90 .map(|_| self.cfg.start_new_block())
94 // Get the arm bodies and their scopes, while declaring bindings.
95 let arm_bodies: Vec<_> = arms.iter().map(|arm| {
96 // BUG: use arm lint level
97 let body = self.hir.mirror(arm.body.clone());
98 let scope = self.declare_bindings(None, body.span,
101 ArmHasGuard(arm.guard.is_some()),
102 Some((Some(&discriminant_place), discriminant_span)));
103 (body, scope.unwrap_or(self.source_scope))
106 // create binding start block for link them by false edges
107 let candidate_count = arms.iter().fold(0, |ac, c| ac + c.patterns.len());
108 let pre_binding_blocks: Vec<_> = (0..candidate_count + 1)
109 .map(|_| self.cfg.start_new_block()).collect();
111 // assemble a list of candidates: there is one candidate per
112 // pattern, which means there may be more than one candidate
113 // *per arm*. These candidates are kept sorted such that the
114 // highest priority candidate comes first in the list.
115 // (i.e. same order as in source)
117 let candidates: Vec<_> =
120 .flat_map(|(arm_index, arm)| {
122 .map(move |pat| (arm_index, pat, arm.guard.clone()))
124 .zip(pre_binding_blocks.iter().zip(pre_binding_blocks.iter().skip(1)))
125 .map(|((arm_index, pattern, guard),
126 (pre_binding_block, next_candidate_pre_binding_block))| {
128 if let (true, Some(borrow_temp)) = (tcx.emit_read_for_match(),
129 borrowed_input_temp.clone()) {
130 // inject a fake read of the borrowed input at
131 // the start of each arm's pattern testing
134 // This should ensure that you cannot change
135 // the variant for an enum while you are in
136 // the midst of matching on it.
138 self.cfg.push(*pre_binding_block, Statement {
140 kind: StatementKind::ReadForMatch(borrow_temp.clone()),
144 // One might ask: why not build up the match pair such that it
145 // matches via `borrowed_input_temp.deref()` instead of
146 // using the `discriminant_place` directly, as it is doing here?
148 // The basic answer is that if you do that, then you end up with
149 // accceses to a shared borrow of the input and that conflicts with
150 // any arms that look like e.g.
154 // ... /* mutate `foo` in arm body */ ...
158 // (Perhaps we could further revise the MIR
159 // construction here so that it only does a
160 // shared borrow at the outset and delays doing
161 // the mutable borrow until after the pattern is
162 // matched *and* the guard (if any) for the arm
167 match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
171 pre_binding_block: *pre_binding_block,
172 next_candidate_pre_binding_block: *next_candidate_pre_binding_block,
177 let outer_source_info = self.source_info(span);
178 self.cfg.terminate(*pre_binding_blocks.last().unwrap(),
179 outer_source_info, TerminatorKind::Unreachable);
181 // this will generate code to test discriminant_place and
182 // branch to the appropriate arm block
183 let otherwise = self.match_candidates(span, &mut arm_blocks, candidates, block);
185 if !otherwise.is_empty() {
186 // All matches are exhaustive. However, because some matches
187 // only have exponentially-large exhaustive decision trees, we
188 // sometimes generate an inexhaustive decision tree.
190 // In that case, the inexhaustive tips of the decision tree
191 // can't be reached - terminate them with an `unreachable`.
192 let source_info = self.source_info(span);
194 let mut otherwise = otherwise;
196 otherwise.dedup(); // variant switches can introduce duplicate target blocks
197 for block in otherwise {
198 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
202 // all the arm blocks will rejoin here
203 let end_block = self.cfg.start_new_block();
205 let outer_source_info = self.source_info(span);
206 for (arm_index, (body, source_scope)) in arm_bodies.into_iter().enumerate() {
207 let mut arm_block = arm_blocks.blocks[arm_index];
208 // Re-enter the source scope we created the bindings in.
209 self.source_scope = source_scope;
210 unpack!(arm_block = self.into(destination, arm_block, body));
211 self.cfg.terminate(arm_block, outer_source_info,
212 TerminatorKind::Goto { target: end_block });
214 self.source_scope = outer_source_info.scope;
219 pub fn user_assert_ty(&mut self, block: BasicBlock, hir_id: hir::HirId,
220 var: NodeId, span: Span) {
221 if self.hir.tcx().sess.opts.debugging_opts.disable_nll_user_type_assert { return; }
223 let local_id = self.var_local_id(var, OutsideGuard);
224 let source_info = self.source_info(span);
226 debug!("user_assert_ty: local_id={:?}", hir_id.local_id);
227 if let Some(c_ty) = self.hir.tables.user_provided_tys().get(hir_id) {
228 debug!("user_assert_ty: c_ty={:?}", c_ty);
229 self.cfg.push(block, Statement {
231 kind: StatementKind::UserAssertTy(*c_ty, local_id),
236 pub fn expr_into_pattern(&mut self,
237 mut block: BasicBlock,
238 ty: Option<hir::HirId>,
239 irrefutable_pat: Pattern<'tcx>,
240 initializer: ExprRef<'tcx>)
242 // optimize the case of `let x = ...`
243 match *irrefutable_pat.kind {
244 PatternKind::Binding { mode: BindingMode::ByValue,
246 subpattern: None, .. } => {
247 let place = self.storage_live_binding(block, var, irrefutable_pat.span,
250 if let Some(ty) = ty {
251 self.user_assert_ty(block, ty, var, irrefutable_pat.span);
254 unpack!(block = self.into(&place, block, initializer));
255 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
259 let place = unpack!(block = self.as_place(block, initializer));
260 self.place_into_pattern(block, irrefutable_pat, &place, true)
265 pub fn place_into_pattern(&mut self,
266 mut block: BasicBlock,
267 irrefutable_pat: Pattern<'tcx>,
268 initializer: &Place<'tcx>,
269 set_match_place: bool)
271 // create a dummy candidate
272 let mut candidate = Candidate {
273 span: irrefutable_pat.span,
274 match_pairs: vec![MatchPair::new(initializer.clone(), &irrefutable_pat)],
278 // since we don't call `match_candidates`, next fields is unused
280 pre_binding_block: block,
281 next_candidate_pre_binding_block: block
284 // Simplify the candidate. Since the pattern is irrefutable, this should
285 // always convert all match-pairs into bindings.
286 unpack!(block = self.simplify_candidate(block, &mut candidate));
288 if !candidate.match_pairs.is_empty() {
289 span_bug!(candidate.match_pairs[0].pattern.span,
290 "match pairs {:?} remaining after simplifying \
291 irrefutable pattern",
292 candidate.match_pairs);
295 // for matches and function arguments, the place that is being matched
296 // can be set when creating the variables. But the place for
297 // let PATTERN = ... might not even exist until we do the assignment.
298 // so we set it here instead
300 for binding in &candidate.bindings {
301 let local = self.var_local_id(binding.var_id, OutsideGuard);
303 if let Some(ClearCrossCrate::Set(BindingForm::Var(
304 VarBindingForm {opt_match_place: Some((ref mut match_place, _)), .. }
305 ))) = self.local_decls[local].is_user_variable
307 *match_place = Some(initializer.clone());
309 bug!("Let binding to non-user variable.")
314 // now apply the bindings, which will also declare the variables
315 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
320 /// Declares the bindings of the given pattern and returns the visibility scope
321 /// for the bindings in this patterns, if such a scope had to be created.
322 /// NOTE: Declaring the bindings should always be done in their drop scope.
323 pub fn declare_bindings(&mut self,
324 mut visibility_scope: Option<SourceScope>,
326 lint_level: LintLevel,
327 pattern: &Pattern<'tcx>,
328 has_guard: ArmHasGuard,
329 opt_match_place: Option<(Option<&Place<'tcx>>, Span)>)
330 -> Option<SourceScope> {
331 assert!(!(visibility_scope.is_some() && lint_level.is_explicit()),
332 "can't have both a visibility and a lint scope at the same time");
333 let mut scope = self.source_scope;
334 self.visit_bindings(pattern, &mut |this, mutability, name, mode, var, span, ty| {
335 if visibility_scope.is_none() {
336 visibility_scope = Some(this.new_source_scope(scope_span,
337 LintLevel::Inherited,
339 // If we have lints, create a new source scope
340 // that marks the lints for the locals. See the comment
341 // on the `source_info` field for why this is needed.
342 if lint_level.is_explicit() {
344 this.new_source_scope(scope_span, lint_level, None);
347 let source_info = SourceInfo {
351 let visibility_scope = visibility_scope.unwrap();
352 this.declare_binding(source_info, visibility_scope, mutability, name, mode, var,
353 ty, has_guard, opt_match_place.map(|(x, y)| (x.cloned(), y)));
358 pub fn storage_live_binding(&mut self,
365 let local_id = self.var_local_id(var, for_guard);
366 let source_info = self.source_info(span);
367 self.cfg.push(block, Statement {
369 kind: StatementKind::StorageLive(local_id)
371 let place = Place::Local(local_id);
372 let var_ty = self.local_decls[local_id].ty;
373 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
374 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
376 span, region_scope, &place, var_ty,
382 pub fn schedule_drop_for_binding(&mut self,
385 for_guard: ForGuard) {
386 let local_id = self.var_local_id(var, for_guard);
387 let var_ty = self.local_decls[local_id].ty;
388 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
389 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
391 span, region_scope, &Place::Local(local_id), var_ty,
393 cached_block: CachedBlock::default(),
398 pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
399 where F: FnMut(&mut Self, Mutability, Name, BindingMode, NodeId, Span, Ty<'tcx>)
401 match *pattern.kind {
402 PatternKind::Binding { mutability, name, mode, var, ty, ref subpattern, .. } => {
403 f(self, mutability, name, mode, var, pattern.span, ty);
404 if let Some(subpattern) = subpattern.as_ref() {
405 self.visit_bindings(subpattern, f);
408 PatternKind::Array { ref prefix, ref slice, ref suffix } |
409 PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
410 for subpattern in prefix.iter().chain(slice).chain(suffix) {
411 self.visit_bindings(subpattern, f);
414 PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {
416 PatternKind::Deref { ref subpattern } => {
417 self.visit_bindings(subpattern, f);
419 PatternKind::Leaf { ref subpatterns } |
420 PatternKind::Variant { ref subpatterns, .. } => {
421 for subpattern in subpatterns {
422 self.visit_bindings(&subpattern.pattern, f);
430 /// List of blocks for each arm (and potentially other metadata in the
433 blocks: Vec<BasicBlock>,
436 #[derive(Clone, Debug)]
437 pub struct Candidate<'pat, 'tcx:'pat> {
438 // span of the original pattern that gave rise to this candidate
441 // all of these must be satisfied...
442 match_pairs: Vec<MatchPair<'pat, 'tcx>>,
444 // ...these bindings established...
445 bindings: Vec<Binding<'tcx>>,
447 // ...and the guard must be evaluated...
448 guard: Option<ExprRef<'tcx>>,
450 // ...and then we branch to arm with this index.
453 // ...and the blocks for add false edges between candidates
454 pre_binding_block: BasicBlock,
455 next_candidate_pre_binding_block: BasicBlock,
458 #[derive(Clone, Debug)]
459 struct Binding<'tcx> {
465 mutability: Mutability,
466 binding_mode: BindingMode<'tcx>,
469 #[derive(Clone, Debug)]
470 pub struct MatchPair<'pat, 'tcx:'pat> {
474 // ... must match this pattern.
475 pattern: &'pat Pattern<'tcx>,
477 // HACK(eddyb) This is used to toggle whether a Slice pattern
478 // has had its length checked. This is only necessary because
479 // the "rest" part of the pattern right now has type &[T] and
480 // as such, it requires an Rvalue::Slice to be generated.
481 // See RFC 495 / issue #23121 for the eventual (proper) solution.
482 slice_len_checked: bool
485 #[derive(Clone, Debug, PartialEq)]
486 enum TestKind<'tcx> {
487 // test the branches of enum
489 adt_def: &'tcx ty::AdtDef,
493 // test the branches of enum
497 indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
502 value: &'tcx ty::Const<'tcx>,
506 // test whether the value falls within an inclusive or exclusive range
514 // test length of the slice is equal to len
522 pub struct Test<'tcx> {
524 kind: TestKind<'tcx>,
527 ///////////////////////////////////////////////////////////////////////////
528 // Main matching algorithm
530 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
531 /// The main match algorithm. It begins with a set of candidates
532 /// `candidates` and has the job of generating code to determine
533 /// which of these candidates, if any, is the correct one. The
534 /// candidates are sorted such that the first item in the list
535 /// has the highest priority. When a candidate is found to match
536 /// the value, we will generate a branch to the appropriate
537 /// block found in `arm_blocks`.
539 /// The return value is a list of "otherwise" blocks. These are
540 /// points in execution where we found that *NONE* of the
541 /// candidates apply. In principle, this means that the input
542 /// list was not exhaustive, though at present we sometimes are
543 /// not smart enough to recognize all exhaustive inputs.
545 /// It might be surprising that the input can be inexhaustive.
546 /// Indeed, initially, it is not, because all matches are
547 /// exhaustive in Rust. But during processing we sometimes divide
548 /// up the list of candidates and recurse with a non-exhaustive
549 /// list. This is important to keep the size of the generated code
550 /// under control. See `test_candidates` for more details.
551 fn match_candidates<'pat>(&mut self,
553 arm_blocks: &mut ArmBlocks,
554 mut candidates: Vec<Candidate<'pat, 'tcx>>,
555 mut block: BasicBlock)
558 debug!("matched_candidate(span={:?}, block={:?}, candidates={:?})",
559 span, block, candidates);
561 // Start by simplifying candidates. Once this process is
562 // complete, all the match pairs which remain require some
563 // form of test, whether it be a switch or pattern comparison.
564 for candidate in &mut candidates {
565 unpack!(block = self.simplify_candidate(block, candidate));
568 // The candidates are sorted by priority. Check to see
569 // whether the higher priority candidates (and hence at
570 // the front of the vec) have satisfied all their match
573 candidates.iter().take_while(|c| c.match_pairs.is_empty()).count();
574 debug!("match_candidates: {:?} candidates fully matched", fully_matched);
575 let mut unmatched_candidates = candidates.split_off(fully_matched);
577 let fully_matched_with_guard =
578 candidates.iter().take_while(|c| c.guard.is_some()).count();
580 let unreachable_candidates = if fully_matched_with_guard + 1 < candidates.len() {
581 candidates.split_off(fully_matched_with_guard + 1)
586 for candidate in candidates {
587 // If so, apply any bindings, test the guard (if any), and
588 // branch to the arm.
589 if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
592 // if None is returned, then any remaining candidates
593 // are unreachable (at least not through this path).
594 // Link them with false edges.
595 debug!("match_candidates: add false edges for unreachable {:?} and unmatched {:?}",
596 unreachable_candidates, unmatched_candidates);
597 for candidate in unreachable_candidates {
598 let source_info = self.source_info(candidate.span);
599 let target = self.cfg.start_new_block();
600 if let Some(otherwise) = self.bind_and_guard_matched_candidate(target,
603 self.cfg.terminate(otherwise, source_info, TerminatorKind::Unreachable);
607 if unmatched_candidates.is_empty() {
610 let target = self.cfg.start_new_block();
611 return self.match_candidates(span, arm_blocks, unmatched_candidates, target);
616 // If there are no candidates that still need testing, we're done.
617 // Since all matches are exhaustive, execution should never reach this point.
618 if unmatched_candidates.is_empty() {
622 // Test candidates where possible.
623 let (otherwise, tested_candidates) =
624 self.test_candidates(span, arm_blocks, &unmatched_candidates, block);
626 // If the target candidates were exhaustive, then we are done.
627 // But for borrowck continue build decision tree.
629 // If all candidates were sorted into `target_candidates` somewhere, then
630 // the initial set was inexhaustive.
631 let untested_candidates = unmatched_candidates.split_off(tested_candidates);
632 if untested_candidates.len() == 0 {
636 // Otherwise, let's process those remaining candidates.
637 let join_block = self.join_otherwise_blocks(span, otherwise);
638 self.match_candidates(span, arm_blocks, untested_candidates, join_block)
641 fn join_otherwise_blocks(&mut self,
643 mut otherwise: Vec<BasicBlock>)
646 let source_info = self.source_info(span);
648 otherwise.dedup(); // variant switches can introduce duplicate target blocks
649 if otherwise.len() == 1 {
652 let join_block = self.cfg.start_new_block();
653 for block in otherwise {
654 self.cfg.terminate(block, source_info,
655 TerminatorKind::Goto { target: join_block });
661 /// This is the most subtle part of the matching algorithm. At
662 /// this point, the input candidates have been fully simplified,
663 /// and so we know that all remaining match-pairs require some
664 /// sort of test. To decide what test to do, we take the highest
665 /// priority candidate (last one in the list) and extract the
666 /// first match-pair from the list. From this we decide what kind
667 /// of test is needed using `test`, defined in the `test` module.
669 /// *Note:* taking the first match pair is somewhat arbitrary, and
670 /// we might do better here by choosing more carefully what to
673 /// For example, consider the following possible match-pairs:
675 /// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has
676 /// 2. `x @ 22` -- we will do a `SwitchInt`
677 /// 3. `x @ 3..5` -- we will do a range test
680 /// Once we know what sort of test we are going to perform, this
681 /// test may also help us with other candidates. So we walk over
682 /// the candidates (from high to low priority) and check. This
683 /// gives us, for each outcome of the test, a transformed list of
684 /// candidates. For example, if we are testing the current
685 /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
686 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as
687 /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
688 /// simpler (and, in fact, irrefutable).
690 /// But there may also be candidates that the test just doesn't
691 /// apply to. The classical example involves wildcards:
694 /// # let (x, y, z) = (true, true, true);
695 /// match (x, y, z) {
696 /// (true, _, true) => true, // (0)
697 /// (_, true, _) => true, // (1)
698 /// (false, false, _) => false, // (2)
699 /// (true, _, false) => false, // (3)
703 /// In that case, after we test on `x`, there are 2 overlapping candidate
706 /// - If the outcome is that `x` is true, candidates 0, 1, and 3
707 /// - If the outcome is that `x` is false, candidates 1 and 2
709 /// Here, the traditional "decision tree" method would generate 2
710 /// separate code-paths for the 2 separate cases.
712 /// In some cases, this duplication can create an exponential amount of
713 /// code. This is most easily seen by noticing that this method terminates
714 /// with precisely the reachable arms being reachable - but that problem
715 /// is trivially NP-complete:
718 /// match (var0, var1, var2, var3, ..) {
719 /// (true, _, _, false, true, ...) => false,
720 /// (_, true, true, false, _, ...) => false,
721 /// (false, _, false, false, _, ...) => false,
727 /// Here the last arm is reachable only if there is an assignment to
728 /// the variables that does not match any of the literals. Therefore,
729 /// compilation would take an exponential amount of time in some cases.
731 /// That kind of exponential worst-case might not occur in practice, but
732 /// our simplistic treatment of constants and guards would make it occur
733 /// in very common situations - for example #29740:
737 /// "foo" if foo_guard => ...,
738 /// "bar" if bar_guard => ...,
739 /// "baz" if baz_guard => ...,
744 /// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test.
746 /// It might seem that we would end up with 2 disjoint candidate
747 /// sets, consisting of the first candidate or the other 3, but our
748 /// algorithm doesn't reason about "foo" being distinct from the other
749 /// constants; it considers the latter arms to potentially match after
750 /// both outcomes, which obviously leads to an exponential amount
753 /// To avoid these kinds of problems, our algorithm tries to ensure
754 /// the amount of generated tests is linear. When we do a k-way test,
755 /// we return an additional "unmatched" set alongside the obvious `k`
756 /// sets. When we encounter a candidate that would be present in more
757 /// than one of the sets, we put it and all candidates below it into the
758 /// "unmatched" set. This ensures these `k+1` sets are disjoint.
760 /// After we perform our test, we branch into the appropriate candidate
761 /// set and recurse with `match_candidates`. These sub-matches are
762 /// obviously inexhaustive - as we discarded our otherwise set - so
763 /// we set their continuation to do `match_candidates` on the
764 /// "unmatched" set (which is again inexhaustive).
766 /// If you apply this to the above test, you basically wind up
767 /// with an if-else-if chain, testing each candidate in turn,
768 /// which is precisely what we want.
770 /// In addition to avoiding exponential-time blowups, this algorithm
771 /// also has nice property that each guard and arm is only generated
773 fn test_candidates<'pat>(&mut self,
775 arm_blocks: &mut ArmBlocks,
776 candidates: &[Candidate<'pat, 'tcx>],
778 -> (Vec<BasicBlock>, usize)
780 // extract the match-pair from the highest priority candidate
781 let match_pair = &candidates.first().unwrap().match_pairs[0];
782 let mut test = self.test(match_pair);
784 // most of the time, the test to perform is simply a function
785 // of the main candidate; but for a test like SwitchInt, we
786 // may want to add cases based on the candidates that are
789 TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => {
790 for candidate in candidates.iter() {
791 if !self.add_cases_to_switch(&match_pair.place,
800 TestKind::Switch { adt_def: _, ref mut variants} => {
801 for candidate in candidates.iter() {
802 if !self.add_variants_to_switch(&match_pair.place,
812 // perform the test, branching to one of N blocks. For each of
813 // those N possible outcomes, create a (initially empty)
814 // vector of candidates. Those are the candidates that still
815 // apply if the test has that particular outcome.
816 debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair);
817 let target_blocks = self.perform_test(block, &match_pair.place, &test);
818 let mut target_candidates: Vec<_> = (0..target_blocks.len()).map(|_| vec![]).collect();
820 // Sort the candidates into the appropriate vector in
821 // `target_candidates`. Note that at some point we may
822 // encounter a candidate where the test is not relevant; at
823 // that point, we stop sorting.
824 let tested_candidates =
826 .take_while(|c| self.sort_candidate(&match_pair.place,
829 &mut target_candidates))
831 assert!(tested_candidates > 0); // at least the last candidate ought to be tested
832 debug!("tested_candidates: {}", tested_candidates);
833 debug!("untested_candidates: {}", candidates.len() - tested_candidates);
835 // For each outcome of test, process the candidates that still
836 // apply. Collect a list of blocks where control flow will
837 // branch if one of the `target_candidate` sets is not
839 let otherwise: Vec<_> =
840 target_blocks.into_iter()
841 .zip(target_candidates)
842 .flat_map(|(target_block, target_candidates)| {
843 self.match_candidates(span,
850 (otherwise, tested_candidates)
853 /// Initializes each of the bindings from the candidate by
854 /// moving/copying/ref'ing the source as appropriate. Tests the
855 /// guard, if any, and then branches to the arm. Returns the block
856 /// for the case where the guard fails.
858 /// Note: we check earlier that if there is a guard, there cannot
859 /// be move bindings. This isn't really important for the
860 /// self-consistency of this fn, but the reason for it should be
861 /// clear: after we've done the assignments, if there were move
862 /// bindings, further tests would be a use-after-move (which would
863 /// in turn be detected by the borrowck code that runs on the
865 fn bind_and_guard_matched_candidate<'pat>(&mut self,
866 mut block: BasicBlock,
867 arm_blocks: &mut ArmBlocks,
868 candidate: Candidate<'pat, 'tcx>)
869 -> Option<BasicBlock> {
870 debug!("bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
873 debug_assert!(candidate.match_pairs.is_empty());
875 let arm_block = arm_blocks.blocks[candidate.arm_index];
876 let candidate_source_info = self.source_info(candidate.span);
878 self.cfg.terminate(block, candidate_source_info,
879 TerminatorKind::Goto { target: candidate.pre_binding_block });
881 block = self.cfg.start_new_block();
882 self.cfg.terminate(candidate.pre_binding_block, candidate_source_info,
883 TerminatorKind::FalseEdges {
886 vec![candidate.next_candidate_pre_binding_block],
890 // rust-lang/rust#27282: The `autoref` business deserves some
893 // The intent of the `autoref` flag is that when it is true,
894 // then any pattern bindings of type T will map to a `&T`
895 // within the context of the guard expression, but will
896 // continue to map to a `T` in the context of the arm body. To
897 // avoid surfacing this distinction in the user source code
898 // (which would be a severe change to the language and require
899 // far more revision to the compiler), when `autoref` is true,
900 // then any occurrence of the identifier in the guard
901 // expression will automatically get a deref op applied to it.
906 // let place = Foo::new();
907 // match place { foo if inspect(foo)
908 // => feed(foo), ... }
911 // will be treated as if it were really something like:
914 // let place = Foo::new();
915 // match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
916 // => { let tmp2 = place; feed(tmp2) }, ... }
918 // And an input like:
921 // let place = Foo::new();
922 // match place { ref mut foo if inspect(foo)
923 // => feed(foo), ... }
926 // will be treated as if it were really something like:
929 // let place = Foo::new();
930 // match place { Foo { .. } if { let tmp1 = & &mut place; inspect(*tmp1) }
931 // => { let tmp2 = &mut place; feed(tmp2) }, ... }
934 // In short, any pattern binding will always look like *some*
935 // kind of `&T` within the guard at least in terms of how the
936 // MIR-borrowck views it, and this will ensure that guard
937 // expressions cannot mutate their the match inputs via such
938 // bindings. (It also ensures that guard expressions can at
939 // most *copy* values from such bindings; non-Copy things
940 // cannot be moved via pattern bindings in guard expressions.)
944 // Implementation notes (under assumption `autoref` is true).
946 // To encode the distinction above, we must inject the
947 // temporaries `tmp1` and `tmp2`.
949 // There are two cases of interest: binding by-value, and binding by-ref.
951 // 1. Binding by-value: Things are simple.
953 // * Establishing `tmp1` creates a reference into the
954 // matched place. This code is emitted by
955 // bind_matched_candidate_for_guard.
957 // * `tmp2` is only initialized "lazily", after we have
958 // checked the guard. Thus, the code that can trigger
959 // moves out of the candidate can only fire after the
960 // guard evaluated to true. This initialization code is
961 // emitted by bind_matched_candidate_for_arm.
963 // 2. Binding by-reference: Things are tricky.
965 // * Here, the guard expression wants a `&&` or `&&mut`
966 // into the original input. This means we need to borrow
967 // a reference that we do not immediately have at hand
968 // (because all we have is the places associated with the
969 // match input itself; it is up to us to create a place
970 // holding a `&` or `&mut` that we can then borrow).
972 let autoref = self.hir.tcx().all_pat_vars_are_implicit_refs_within_guards();
973 if let Some(guard) = candidate.guard {
975 self.bind_matched_candidate_for_guard(block, &candidate.bindings);
976 let guard_frame = GuardFrame {
977 locals: candidate.bindings.iter()
978 .map(|b| GuardFrameLocal::new(b.var_id, b.binding_mode))
981 debug!("Entering guard building context: {:?}", guard_frame);
982 self.guard_context.push(guard_frame);
984 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
987 // the block to branch to if the guard fails; if there is no
988 // guard, this block is simply unreachable
989 let guard = self.hir.mirror(guard);
990 let source_info = self.source_info(guard.span);
991 let cond = unpack!(block = self.as_local_operand(block, guard));
993 let guard_frame = self.guard_context.pop().unwrap();
994 debug!("Exiting guard building context with locals: {:?}", guard_frame);
997 let false_edge_block = self.cfg.start_new_block();
999 // We want to ensure that the matched candidates are bound
1000 // after we have confirmed this candidate *and* any
1001 // associated guard; Binding them on `block` is too soon,
1002 // because that would be before we've checked the result
1005 // But binding them on `arm_block` is *too late*, because
1006 // then all of the candidates for a single arm would be
1007 // bound in the same place, that would cause a case like:
1011 // (mut x, 1) | (2, mut x) if { true } => { ... }
1012 // ... // ^^^^^^^ (this is `arm_block`)
1016 // would yield a `arm_block` something like:
1019 // StorageLive(_4); // _4 is `x`
1020 // _4 = &mut (_1.0: i32); // this is handling `(mut x, 1)` case
1021 // _4 = &mut (_1.1: i32); // this is handling `(2, mut x)` case
1024 // and that is clearly not correct.
1025 let post_guard_block = self.cfg.start_new_block();
1026 self.cfg.terminate(block, source_info,
1027 TerminatorKind::if_(self.hir.tcx(), cond, post_guard_block,
1031 self.bind_matched_candidate_for_arm_body(post_guard_block, &candidate.bindings);
1034 self.cfg.terminate(post_guard_block, source_info,
1035 TerminatorKind::Goto { target: arm_block });
1037 let otherwise = self.cfg.start_new_block();
1039 self.cfg.terminate(false_edge_block, source_info,
1040 TerminatorKind::FalseEdges {
1041 real_target: otherwise,
1043 vec![candidate.next_candidate_pre_binding_block],
1047 // (Here, it is not too early to bind the matched
1048 // candidate on `block`, because there is no guard result
1049 // that we have to inspect before we bind them.)
1050 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1051 self.cfg.terminate(block, candidate_source_info,
1052 TerminatorKind::Goto { target: arm_block });
1057 // Only called when all_pat_vars_are_implicit_refs_within_guards,
1058 // and thus all code/comments assume we are in that context.
1059 fn bind_matched_candidate_for_guard(&mut self,
1061 bindings: &[Binding<'tcx>]) {
1062 debug!("bind_matched_candidate_for_guard(block={:?}, bindings={:?})",
1065 // Assign each of the bindings. Since we are binding for a
1066 // guard expression, this will never trigger moves out of the
1068 let re_empty = self.hir.tcx().types.re_empty;
1069 for binding in bindings {
1070 let source_info = self.source_info(binding.span);
1072 // For each pattern ident P of type T, `ref_for_guard` is
1073 // a reference R: &T pointing to the location matched by
1074 // the pattern, and every occurrence of P within a guard
1076 let ref_for_guard = self.storage_live_binding(
1077 block, binding.var_id, binding.span, RefWithinGuard);
1078 // Question: Why schedule drops if bindings are all
1079 // shared-&'s? Answer: Because schedule_drop_for_binding
1080 // also emits StorageDead's for those locals.
1081 self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
1082 match binding.binding_mode {
1083 BindingMode::ByValue => {
1084 let rvalue = Rvalue::Ref(re_empty, BorrowKind::Shared, binding.source.clone());
1085 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
1087 BindingMode::ByRef(region, borrow_kind) => {
1088 // Tricky business: For `ref id` and `ref mut id`
1089 // patterns, we want `id` within the guard to
1090 // correspond to a temp of type `& &T` or `& &mut
1091 // T` (i.e. a "borrow of a borrow") that is
1092 // implicitly dereferenced.
1094 // To borrow a borrow, we need that inner borrow
1095 // to point to. So, create a temp for the inner
1096 // borrow, and then take a reference to it.
1098 // Note: the temp created here is *not* the one
1099 // used by the arm body itself. This eases
1100 // observing two-phase borrow restrictions.
1101 let val_for_guard = self.storage_live_binding(
1102 block, binding.var_id, binding.span, ValWithinGuard);
1103 self.schedule_drop_for_binding(binding.var_id, binding.span, ValWithinGuard);
1105 // rust-lang/rust#27282: We reuse the two-phase
1106 // borrow infrastructure so that the mutable
1107 // borrow (whose mutabilty is *unusable* within
1108 // the guard) does not conflict with the implicit
1109 // borrow of the whole match input. See additional
1110 // discussion on rust-lang/rust#49870.
1111 let borrow_kind = match borrow_kind {
1112 BorrowKind::Shared | BorrowKind::Unique => borrow_kind,
1113 BorrowKind::Mut { .. } => BorrowKind::Mut { allow_two_phase_borrow: true },
1115 let rvalue = Rvalue::Ref(region, borrow_kind, binding.source.clone());
1116 self.cfg.push_assign(block, source_info, &val_for_guard, rvalue);
1117 let rvalue = Rvalue::Ref(region, BorrowKind::Shared, val_for_guard);
1118 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
1124 fn bind_matched_candidate_for_arm_body(&mut self,
1126 bindings: &[Binding<'tcx>]) {
1127 debug!("bind_matched_candidate_for_arm_body(block={:?}, bindings={:?}", block, bindings);
1129 // Assign each of the bindings. This may trigger moves out of the candidate.
1130 for binding in bindings {
1131 let source_info = self.source_info(binding.span);
1132 let local = self.storage_live_binding(block, binding.var_id, binding.span,
1134 self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
1135 let rvalue = match binding.binding_mode {
1136 BindingMode::ByValue => {
1137 Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
1139 BindingMode::ByRef(region, borrow_kind) => {
1140 Rvalue::Ref(region, borrow_kind, binding.source.clone())
1143 self.cfg.push_assign(block, source_info, &local, rvalue);
1147 /// Each binding (`ref mut var`/`ref var`/`mut var`/`var`, where
1148 /// the bound `var` has type `T` in the arm body) in a pattern
1149 /// maps to *two* locals. The first local is a binding for
1150 /// occurrences of `var` in the guard, which will all have type
1151 /// `&T`. The second local is a binding for occurrences of `var`
1152 /// in the arm body, which will have type `T`.
1153 fn declare_binding(&mut self,
1154 source_info: SourceInfo,
1155 visibility_scope: SourceScope,
1156 mutability: Mutability,
1161 has_guard: ArmHasGuard,
1162 opt_match_place: Option<(Option<Place<'tcx>>, Span)>)
1164 debug!("declare_binding(var_id={:?}, name={:?}, mode={:?}, var_ty={:?}, \
1165 visibility_scope={:?}, source_info={:?})",
1166 var_id, name, mode, var_ty, visibility_scope, source_info);
1168 let tcx = self.hir.tcx();
1169 let binding_mode = match mode {
1170 BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
1171 BindingMode::ByRef { .. } => ty::BindingMode::BindByReference(mutability.into()),
1173 let local = LocalDecl::<'tcx> {
1180 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
1182 // hypothetically, `visit_bindings` could try to unzip
1183 // an outermost hir::Ty as we descend, matching up
1184 // idents in pat; but complex w/ unclear UI payoff.
1185 // Instead, just abandon providing diagnostic info.
1190 let for_arm_body = self.local_decls.push(local.clone());
1191 let locals = if has_guard.0 && tcx.all_pat_vars_are_implicit_refs_within_guards() {
1192 let val_for_guard = self.local_decls.push(local);
1193 let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
1195 ty: tcx.mk_imm_ref(tcx.types.re_empty, var_ty),
1199 // FIXME: should these secretly injected ref_for_guard's be marked as `internal`?
1201 is_user_variable: None,
1203 LocalsForNode::Three { val_for_guard, ref_for_guard, for_arm_body }
1205 LocalsForNode::One(for_arm_body)
1207 debug!("declare_binding: vars={:?}", locals);
1208 self.var_indices.insert(var_id, locals);