1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Code related to match expressions. These are sufficiently complex
12 //! to warrant their own module and submodules. :) This main module
13 //! includes the high-level algorithm, the submodules contain the
16 use build::{BlockAnd, BlockAndExtension, Builder};
17 use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
18 use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
19 use build::scope::{CachedBlock, DropKind};
20 use rustc_data_structures::fx::FxHashMap;
21 use rustc_data_structures::bitvec::BitArray;
22 use rustc::ty::{self, Ty};
26 use syntax::ast::{Name, NodeId};
29 // helper functions, broken out by category:
34 /// ArmHasGuard is isomorphic to a boolean flag. It indicates whether
35 /// a match arm has a guard expression attached to it.
36 #[derive(Copy, Clone, Debug)]
37 pub(crate) struct ArmHasGuard(pub bool);
39 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
40 pub fn match_expr(&mut self,
41 destination: &Place<'tcx>,
43 mut block: BasicBlock,
44 discriminant: ExprRef<'tcx>,
47 let tcx = self.hir.tcx();
48 let discriminant_span = discriminant.span();
49 let discriminant_place = unpack!(block = self.as_place(block, discriminant));
51 // Matching on a `discriminant_place` with an uninhabited type doesn't
52 // generate any memory reads by itself, and so if the place "expression"
53 // contains unsafe operations like raw pointer dereferences or union
54 // field projections, we wouldn't know to require an `unsafe` block
55 // around a `match` equivalent to `std::intrinsics::unreachable()`.
56 // See issue #47412 for this hole being discovered in the wild.
58 // HACK(eddyb) Work around the above issue by adding a dummy inspection
59 // of `discriminant_place`, specifically by applying `Rvalue::Discriminant`
60 // (which will work regardless of type) and storing the result in a temp.
62 // NOTE: Under NLL, the above issue should no longer occur because it
63 // injects a borrow of the matched input, which should have the same effect
64 // as eddyb's hack. Once NLL is the default, we can remove the hack.
66 let dummy_source_info = self.source_info(discriminant_span);
67 let dummy_access = Rvalue::Discriminant(discriminant_place.clone());
68 let dummy_ty = dummy_access.ty(&self.local_decls, tcx);
69 let dummy_temp = self.temp(dummy_ty, dummy_source_info.span);
70 self.cfg.push_assign(block, dummy_source_info, &dummy_temp, dummy_access);
72 let source_info = self.source_info(discriminant_span);
73 let borrowed_input_temp = if tcx.generate_borrow_of_any_match_input() {
74 // The region is unknown at this point; we rely on NLL
75 // inference to find an appropriate one. Therefore you can
76 // only use this when NLL is turned on.
77 assert!(tcx.use_mir_borrowck());
79 Rvalue::Ref(tcx.types.re_empty, BorrowKind::Shared, discriminant_place.clone());
80 let borrowed_input_ty = borrowed_input.ty(&self.local_decls, tcx);
81 let borrowed_input_temp = self.temp(borrowed_input_ty, span);
82 self.cfg.push_assign(block, source_info, &borrowed_input_temp, borrowed_input);
83 Some(borrowed_input_temp)
88 let mut arm_blocks = ArmBlocks {
90 .map(|_| self.cfg.start_new_block())
94 // Get the arm bodies and their scopes, while declaring bindings.
95 let arm_bodies: Vec<_> = arms.iter().map(|arm| {
96 // BUG: use arm lint level
97 let body = self.hir.mirror(arm.body.clone());
98 let scope = self.declare_bindings(None, body.span,
101 ArmHasGuard(arm.guard.is_some()),
102 Some((Some(&discriminant_place), discriminant_span)));
103 (body, scope.unwrap_or(self.source_scope))
106 // create binding start block for link them by false edges
107 let candidate_count = arms.iter().fold(0, |ac, c| ac + c.patterns.len());
108 let pre_binding_blocks: Vec<_> = (0..candidate_count + 1)
109 .map(|_| self.cfg.start_new_block()).collect();
111 // assemble a list of candidates: there is one candidate per
112 // pattern, which means there may be more than one candidate
113 // *per arm*. These candidates are kept sorted such that the
114 // highest priority candidate comes first in the list.
115 // (i.e. same order as in source)
117 let candidates: Vec<_> =
120 .flat_map(|(arm_index, arm)| {
121 arm.patterns.iter().enumerate()
122 .map(move |(pat_index, pat)| {
123 (arm_index, pat_index, pat, arm.guard.clone())
126 .zip(pre_binding_blocks.iter().zip(pre_binding_blocks.iter().skip(1)))
127 .map(|((arm_index, pat_index, pattern, guard),
128 (pre_binding_block, next_candidate_pre_binding_block))| {
130 if let (true, Some(borrow_temp)) = (tcx.emit_read_for_match(),
131 borrowed_input_temp.clone()) {
132 // inject a fake read of the borrowed input at
133 // the start of each arm's pattern testing
136 // This should ensure that you cannot change
137 // the variant for an enum while you are in
138 // the midst of matching on it.
139 let pattern_source_info = self.source_info(pattern.span);
140 self.cfg.push(*pre_binding_block, Statement {
141 source_info: pattern_source_info,
142 kind: StatementKind::ReadForMatch(borrow_temp.clone()),
146 // One might ask: why not build up the match pair such that it
147 // matches via `borrowed_input_temp.deref()` instead of
148 // using the `discriminant_place` directly, as it is doing here?
150 // The basic answer is that if you do that, then you end up with
151 // accceses to a shared borrow of the input and that conflicts with
152 // any arms that look like e.g.
156 // ... /* mutate `foo` in arm body */ ...
160 // (Perhaps we could further revise the MIR
161 // construction here so that it only does a
162 // shared borrow at the outset and delays doing
163 // the mutable borrow until after the pattern is
164 // matched *and* the guard (if any) for the arm
169 match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
174 pre_binding_block: *pre_binding_block,
175 next_candidate_pre_binding_block: *next_candidate_pre_binding_block,
180 let outer_source_info = self.source_info(span);
181 self.cfg.terminate(*pre_binding_blocks.last().unwrap(),
182 outer_source_info, TerminatorKind::Unreachable);
184 // this will generate code to test discriminant_place and
185 // branch to the appropriate arm block
186 let otherwise = self.match_candidates(span, &mut arm_blocks, candidates, block);
188 if !otherwise.is_empty() {
189 // All matches are exhaustive. However, because some matches
190 // only have exponentially-large exhaustive decision trees, we
191 // sometimes generate an inexhaustive decision tree.
193 // In that case, the inexhaustive tips of the decision tree
194 // can't be reached - terminate them with an `unreachable`.
195 let source_info = self.source_info(span);
197 let mut otherwise = otherwise;
199 otherwise.dedup(); // variant switches can introduce duplicate target blocks
200 for block in otherwise {
201 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
205 // all the arm blocks will rejoin here
206 let end_block = self.cfg.start_new_block();
208 let outer_source_info = self.source_info(span);
209 for (arm_index, (body, source_scope)) in arm_bodies.into_iter().enumerate() {
210 let mut arm_block = arm_blocks.blocks[arm_index];
211 // Re-enter the source scope we created the bindings in.
212 self.source_scope = source_scope;
213 unpack!(arm_block = self.into(destination, arm_block, body));
214 self.cfg.terminate(arm_block, outer_source_info,
215 TerminatorKind::Goto { target: end_block });
217 self.source_scope = outer_source_info.scope;
222 pub fn user_assert_ty(&mut self, block: BasicBlock, hir_id: hir::HirId,
223 var: NodeId, span: Span) {
224 if self.hir.tcx().sess.opts.debugging_opts.disable_nll_user_type_assert { return; }
226 let local_id = self.var_local_id(var, OutsideGuard);
227 let source_info = self.source_info(span);
229 debug!("user_assert_ty: local_id={:?}", hir_id.local_id);
230 if let Some(c_ty) = self.hir.tables.user_provided_tys().get(hir_id) {
231 debug!("user_assert_ty: c_ty={:?}", c_ty);
232 self.cfg.push(block, Statement {
234 kind: StatementKind::UserAssertTy(*c_ty, local_id),
239 pub fn expr_into_pattern(&mut self,
240 mut block: BasicBlock,
241 ty: Option<hir::HirId>,
242 irrefutable_pat: Pattern<'tcx>,
243 initializer: ExprRef<'tcx>)
245 // optimize the case of `let x = ...`
246 match *irrefutable_pat.kind {
247 PatternKind::Binding { mode: BindingMode::ByValue,
249 subpattern: None, .. } => {
250 let place = self.storage_live_binding(block, var, irrefutable_pat.span,
253 if let Some(ty) = ty {
254 self.user_assert_ty(block, ty, var, irrefutable_pat.span);
257 unpack!(block = self.into(&place, block, initializer));
258 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
262 let place = unpack!(block = self.as_place(block, initializer));
263 self.place_into_pattern(block, irrefutable_pat, &place, true)
268 pub fn place_into_pattern(&mut self,
269 mut block: BasicBlock,
270 irrefutable_pat: Pattern<'tcx>,
271 initializer: &Place<'tcx>,
272 set_match_place: bool)
274 // create a dummy candidate
275 let mut candidate = Candidate {
276 span: irrefutable_pat.span,
277 match_pairs: vec![MatchPair::new(initializer.clone(), &irrefutable_pat)],
281 // since we don't call `match_candidates`, next fields is unused
284 pre_binding_block: block,
285 next_candidate_pre_binding_block: block
288 // Simplify the candidate. Since the pattern is irrefutable, this should
289 // always convert all match-pairs into bindings.
290 unpack!(block = self.simplify_candidate(block, &mut candidate));
292 if !candidate.match_pairs.is_empty() {
293 span_bug!(candidate.match_pairs[0].pattern.span,
294 "match pairs {:?} remaining after simplifying \
295 irrefutable pattern",
296 candidate.match_pairs);
299 // for matches and function arguments, the place that is being matched
300 // can be set when creating the variables. But the place for
301 // let PATTERN = ... might not even exist until we do the assignment.
302 // so we set it here instead
304 for binding in &candidate.bindings {
305 let local = self.var_local_id(binding.var_id, OutsideGuard);
307 if let Some(ClearCrossCrate::Set(BindingForm::Var(
308 VarBindingForm {opt_match_place: Some((ref mut match_place, _)), .. }
309 ))) = self.local_decls[local].is_user_variable
311 *match_place = Some(initializer.clone());
313 bug!("Let binding to non-user variable.")
318 // now apply the bindings, which will also declare the variables
319 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
324 /// Declares the bindings of the given pattern and returns the visibility scope
325 /// for the bindings in this patterns, if such a scope had to be created.
326 /// NOTE: Declaring the bindings should always be done in their drop scope.
327 pub fn declare_bindings(&mut self,
328 mut visibility_scope: Option<SourceScope>,
330 lint_level: LintLevel,
331 patterns: &[Pattern<'tcx>],
332 has_guard: ArmHasGuard,
333 opt_match_place: Option<(Option<&Place<'tcx>>, Span)>)
334 -> Option<SourceScope> {
335 assert!(!(visibility_scope.is_some() && lint_level.is_explicit()),
336 "can't have both a visibility and a lint scope at the same time");
337 let mut scope = self.source_scope;
338 let num_patterns = patterns.len();
339 self.visit_bindings(&patterns[0], &mut |this, mutability, name, mode, var, span, ty| {
340 if visibility_scope.is_none() {
341 visibility_scope = Some(this.new_source_scope(scope_span,
342 LintLevel::Inherited,
344 // If we have lints, create a new source scope
345 // that marks the lints for the locals. See the comment
346 // on the `source_info` field for why this is needed.
347 if lint_level.is_explicit() {
349 this.new_source_scope(scope_span, lint_level, None);
352 let source_info = SourceInfo {
356 let visibility_scope = visibility_scope.unwrap();
357 this.declare_binding(source_info, visibility_scope, mutability, name, mode,
358 num_patterns, var, ty, has_guard,
359 opt_match_place.map(|(x, y)| (x.cloned(), y)));
364 pub fn storage_live_binding(&mut self,
371 let local_id = self.var_local_id(var, for_guard);
372 let source_info = self.source_info(span);
373 self.cfg.push(block, Statement {
375 kind: StatementKind::StorageLive(local_id)
377 let place = Place::Local(local_id);
378 let var_ty = self.local_decls[local_id].ty;
379 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
380 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
382 span, region_scope, &place, var_ty,
388 pub fn schedule_drop_for_binding(&mut self,
391 for_guard: ForGuard) {
392 let local_id = self.var_local_id(var, for_guard);
393 let var_ty = self.local_decls[local_id].ty;
394 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
395 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
397 span, region_scope, &Place::Local(local_id), var_ty,
399 cached_block: CachedBlock::default(),
404 pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
405 where F: FnMut(&mut Self, Mutability, Name, BindingMode, NodeId, Span, Ty<'tcx>)
407 match *pattern.kind {
408 PatternKind::Binding { mutability, name, mode, var, ty, ref subpattern, .. } => {
409 f(self, mutability, name, mode, var, pattern.span, ty);
410 if let Some(subpattern) = subpattern.as_ref() {
411 self.visit_bindings(subpattern, f);
414 PatternKind::Array { ref prefix, ref slice, ref suffix } |
415 PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
416 for subpattern in prefix.iter().chain(slice).chain(suffix) {
417 self.visit_bindings(subpattern, f);
420 PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {
422 PatternKind::Deref { ref subpattern } => {
423 self.visit_bindings(subpattern, f);
425 PatternKind::Leaf { ref subpatterns } |
426 PatternKind::Variant { ref subpatterns, .. } => {
427 for subpattern in subpatterns {
428 self.visit_bindings(&subpattern.pattern, f);
436 /// List of blocks for each arm (and potentially other metadata in the
439 blocks: Vec<BasicBlock>,
442 #[derive(Clone, Debug)]
443 pub struct Candidate<'pat, 'tcx:'pat> {
444 // span of the original pattern that gave rise to this candidate
447 // all of these must be satisfied...
448 match_pairs: Vec<MatchPair<'pat, 'tcx>>,
450 // ...these bindings established...
451 bindings: Vec<Binding<'tcx>>,
453 // ...and the guard must be evaluated...
454 guard: Option<ExprRef<'tcx>>,
456 // ...and then we branch to arm with this index.
459 // ...and the blocks for add false edges between candidates
460 pre_binding_block: BasicBlock,
461 next_candidate_pre_binding_block: BasicBlock,
463 // This uniquely identifies this candidate *within* the arm.
467 #[derive(Clone, Debug)]
468 struct Binding<'tcx> {
474 mutability: Mutability,
475 binding_mode: BindingMode<'tcx>,
478 #[derive(Clone, Debug)]
479 pub struct MatchPair<'pat, 'tcx:'pat> {
483 // ... must match this pattern.
484 pattern: &'pat Pattern<'tcx>,
486 // HACK(eddyb) This is used to toggle whether a Slice pattern
487 // has had its length checked. This is only necessary because
488 // the "rest" part of the pattern right now has type &[T] and
489 // as such, it requires an Rvalue::Slice to be generated.
490 // See RFC 495 / issue #23121 for the eventual (proper) solution.
491 slice_len_checked: bool
494 #[derive(Clone, Debug, PartialEq)]
495 enum TestKind<'tcx> {
496 // test the branches of enum
498 adt_def: &'tcx ty::AdtDef,
499 variants: BitArray<usize>,
502 // test the branches of enum
506 indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
511 value: &'tcx ty::Const<'tcx>,
515 // test whether the value falls within an inclusive or exclusive range
517 lo: &'tcx ty::Const<'tcx>,
518 hi: &'tcx ty::Const<'tcx>,
523 // test length of the slice is equal to len
531 pub struct Test<'tcx> {
533 kind: TestKind<'tcx>,
536 ///////////////////////////////////////////////////////////////////////////
537 // Main matching algorithm
539 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
540 /// The main match algorithm. It begins with a set of candidates
541 /// `candidates` and has the job of generating code to determine
542 /// which of these candidates, if any, is the correct one. The
543 /// candidates are sorted such that the first item in the list
544 /// has the highest priority. When a candidate is found to match
545 /// the value, we will generate a branch to the appropriate
546 /// block found in `arm_blocks`.
548 /// The return value is a list of "otherwise" blocks. These are
549 /// points in execution where we found that *NONE* of the
550 /// candidates apply. In principle, this means that the input
551 /// list was not exhaustive, though at present we sometimes are
552 /// not smart enough to recognize all exhaustive inputs.
554 /// It might be surprising that the input can be inexhaustive.
555 /// Indeed, initially, it is not, because all matches are
556 /// exhaustive in Rust. But during processing we sometimes divide
557 /// up the list of candidates and recurse with a non-exhaustive
558 /// list. This is important to keep the size of the generated code
559 /// under control. See `test_candidates` for more details.
560 fn match_candidates<'pat>(&mut self,
562 arm_blocks: &mut ArmBlocks,
563 mut candidates: Vec<Candidate<'pat, 'tcx>>,
564 mut block: BasicBlock)
567 debug!("matched_candidate(span={:?}, block={:?}, candidates={:?})",
568 span, block, candidates);
570 // Start by simplifying candidates. Once this process is
571 // complete, all the match pairs which remain require some
572 // form of test, whether it be a switch or pattern comparison.
573 for candidate in &mut candidates {
574 unpack!(block = self.simplify_candidate(block, candidate));
577 // The candidates are sorted by priority. Check to see
578 // whether the higher priority candidates (and hence at
579 // the front of the vec) have satisfied all their match
582 candidates.iter().take_while(|c| c.match_pairs.is_empty()).count();
583 debug!("match_candidates: {:?} candidates fully matched", fully_matched);
584 let mut unmatched_candidates = candidates.split_off(fully_matched);
586 let fully_matched_with_guard =
587 candidates.iter().take_while(|c| c.guard.is_some()).count();
589 let unreachable_candidates = if fully_matched_with_guard + 1 < candidates.len() {
590 candidates.split_off(fully_matched_with_guard + 1)
595 for candidate in candidates {
596 // If so, apply any bindings, test the guard (if any), and
597 // branch to the arm.
598 if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
601 // if None is returned, then any remaining candidates
602 // are unreachable (at least not through this path).
603 // Link them with false edges.
604 debug!("match_candidates: add false edges for unreachable {:?} and unmatched {:?}",
605 unreachable_candidates, unmatched_candidates);
606 for candidate in unreachable_candidates {
607 let source_info = self.source_info(candidate.span);
608 let target = self.cfg.start_new_block();
609 if let Some(otherwise) = self.bind_and_guard_matched_candidate(target,
612 self.cfg.terminate(otherwise, source_info, TerminatorKind::Unreachable);
616 if unmatched_candidates.is_empty() {
619 let target = self.cfg.start_new_block();
620 return self.match_candidates(span, arm_blocks, unmatched_candidates, target);
625 // If there are no candidates that still need testing, we're done.
626 // Since all matches are exhaustive, execution should never reach this point.
627 if unmatched_candidates.is_empty() {
631 // Test candidates where possible.
632 let (otherwise, tested_candidates) =
633 self.test_candidates(span, arm_blocks, &unmatched_candidates, block);
635 // If the target candidates were exhaustive, then we are done.
636 // But for borrowck continue build decision tree.
638 // If all candidates were sorted into `target_candidates` somewhere, then
639 // the initial set was inexhaustive.
640 let untested_candidates = unmatched_candidates.split_off(tested_candidates);
641 if untested_candidates.len() == 0 {
645 // Otherwise, let's process those remaining candidates.
646 let join_block = self.join_otherwise_blocks(span, otherwise);
647 self.match_candidates(span, arm_blocks, untested_candidates, join_block)
650 fn join_otherwise_blocks(&mut self,
652 mut otherwise: Vec<BasicBlock>)
655 let source_info = self.source_info(span);
657 otherwise.dedup(); // variant switches can introduce duplicate target blocks
658 if otherwise.len() == 1 {
661 let join_block = self.cfg.start_new_block();
662 for block in otherwise {
663 self.cfg.terminate(block, source_info,
664 TerminatorKind::Goto { target: join_block });
670 /// This is the most subtle part of the matching algorithm. At
671 /// this point, the input candidates have been fully simplified,
672 /// and so we know that all remaining match-pairs require some
673 /// sort of test. To decide what test to do, we take the highest
674 /// priority candidate (last one in the list) and extract the
675 /// first match-pair from the list. From this we decide what kind
676 /// of test is needed using `test`, defined in the `test` module.
678 /// *Note:* taking the first match pair is somewhat arbitrary, and
679 /// we might do better here by choosing more carefully what to
682 /// For example, consider the following possible match-pairs:
684 /// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has
685 /// 2. `x @ 22` -- we will do a `SwitchInt`
686 /// 3. `x @ 3..5` -- we will do a range test
689 /// Once we know what sort of test we are going to perform, this
690 /// test may also help us with other candidates. So we walk over
691 /// the candidates (from high to low priority) and check. This
692 /// gives us, for each outcome of the test, a transformed list of
693 /// candidates. For example, if we are testing the current
694 /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
695 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as
696 /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
697 /// simpler (and, in fact, irrefutable).
699 /// But there may also be candidates that the test just doesn't
700 /// apply to. The classical example involves wildcards:
703 /// # let (x, y, z) = (true, true, true);
704 /// match (x, y, z) {
705 /// (true, _, true) => true, // (0)
706 /// (_, true, _) => true, // (1)
707 /// (false, false, _) => false, // (2)
708 /// (true, _, false) => false, // (3)
712 /// In that case, after we test on `x`, there are 2 overlapping candidate
715 /// - If the outcome is that `x` is true, candidates 0, 1, and 3
716 /// - If the outcome is that `x` is false, candidates 1 and 2
718 /// Here, the traditional "decision tree" method would generate 2
719 /// separate code-paths for the 2 separate cases.
721 /// In some cases, this duplication can create an exponential amount of
722 /// code. This is most easily seen by noticing that this method terminates
723 /// with precisely the reachable arms being reachable - but that problem
724 /// is trivially NP-complete:
727 /// match (var0, var1, var2, var3, ..) {
728 /// (true, _, _, false, true, ...) => false,
729 /// (_, true, true, false, _, ...) => false,
730 /// (false, _, false, false, _, ...) => false,
736 /// Here the last arm is reachable only if there is an assignment to
737 /// the variables that does not match any of the literals. Therefore,
738 /// compilation would take an exponential amount of time in some cases.
740 /// That kind of exponential worst-case might not occur in practice, but
741 /// our simplistic treatment of constants and guards would make it occur
742 /// in very common situations - for example #29740:
746 /// "foo" if foo_guard => ...,
747 /// "bar" if bar_guard => ...,
748 /// "baz" if baz_guard => ...,
753 /// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test.
755 /// It might seem that we would end up with 2 disjoint candidate
756 /// sets, consisting of the first candidate or the other 3, but our
757 /// algorithm doesn't reason about "foo" being distinct from the other
758 /// constants; it considers the latter arms to potentially match after
759 /// both outcomes, which obviously leads to an exponential amount
762 /// To avoid these kinds of problems, our algorithm tries to ensure
763 /// the amount of generated tests is linear. When we do a k-way test,
764 /// we return an additional "unmatched" set alongside the obvious `k`
765 /// sets. When we encounter a candidate that would be present in more
766 /// than one of the sets, we put it and all candidates below it into the
767 /// "unmatched" set. This ensures these `k+1` sets are disjoint.
769 /// After we perform our test, we branch into the appropriate candidate
770 /// set and recurse with `match_candidates`. These sub-matches are
771 /// obviously inexhaustive - as we discarded our otherwise set - so
772 /// we set their continuation to do `match_candidates` on the
773 /// "unmatched" set (which is again inexhaustive).
775 /// If you apply this to the above test, you basically wind up
776 /// with an if-else-if chain, testing each candidate in turn,
777 /// which is precisely what we want.
779 /// In addition to avoiding exponential-time blowups, this algorithm
780 /// also has nice property that each guard and arm is only generated
782 fn test_candidates<'pat>(&mut self,
784 arm_blocks: &mut ArmBlocks,
785 candidates: &[Candidate<'pat, 'tcx>],
787 -> (Vec<BasicBlock>, usize)
789 // extract the match-pair from the highest priority candidate
790 let match_pair = &candidates.first().unwrap().match_pairs[0];
791 let mut test = self.test(match_pair);
793 // most of the time, the test to perform is simply a function
794 // of the main candidate; but for a test like SwitchInt, we
795 // may want to add cases based on the candidates that are
798 TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => {
799 for candidate in candidates.iter() {
800 if !self.add_cases_to_switch(&match_pair.place,
809 TestKind::Switch { adt_def: _, ref mut variants} => {
810 for candidate in candidates.iter() {
811 if !self.add_variants_to_switch(&match_pair.place,
821 // perform the test, branching to one of N blocks. For each of
822 // those N possible outcomes, create a (initially empty)
823 // vector of candidates. Those are the candidates that still
824 // apply if the test has that particular outcome.
825 debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair);
826 let target_blocks = self.perform_test(block, &match_pair.place, &test);
827 let mut target_candidates: Vec<_> = (0..target_blocks.len()).map(|_| vec![]).collect();
829 // Sort the candidates into the appropriate vector in
830 // `target_candidates`. Note that at some point we may
831 // encounter a candidate where the test is not relevant; at
832 // that point, we stop sorting.
833 let tested_candidates =
835 .take_while(|c| self.sort_candidate(&match_pair.place,
838 &mut target_candidates))
840 assert!(tested_candidates > 0); // at least the last candidate ought to be tested
841 debug!("tested_candidates: {}", tested_candidates);
842 debug!("untested_candidates: {}", candidates.len() - tested_candidates);
844 // For each outcome of test, process the candidates that still
845 // apply. Collect a list of blocks where control flow will
846 // branch if one of the `target_candidate` sets is not
848 let otherwise: Vec<_> =
849 target_blocks.into_iter()
850 .zip(target_candidates)
851 .flat_map(|(target_block, target_candidates)| {
852 self.match_candidates(span,
859 (otherwise, tested_candidates)
862 /// Initializes each of the bindings from the candidate by
863 /// moving/copying/ref'ing the source as appropriate. Tests the
864 /// guard, if any, and then branches to the arm. Returns the block
865 /// for the case where the guard fails.
867 /// Note: we check earlier that if there is a guard, there cannot
868 /// be move bindings. This isn't really important for the
869 /// self-consistency of this fn, but the reason for it should be
870 /// clear: after we've done the assignments, if there were move
871 /// bindings, further tests would be a use-after-move (which would
872 /// in turn be detected by the borrowck code that runs on the
874 fn bind_and_guard_matched_candidate<'pat>(&mut self,
875 mut block: BasicBlock,
876 arm_blocks: &mut ArmBlocks,
877 candidate: Candidate<'pat, 'tcx>)
878 -> Option<BasicBlock> {
879 debug!("bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
882 debug_assert!(candidate.match_pairs.is_empty());
884 let arm_block = arm_blocks.blocks[candidate.arm_index];
885 let candidate_source_info = self.source_info(candidate.span);
887 self.cfg.terminate(block, candidate_source_info,
888 TerminatorKind::Goto { target: candidate.pre_binding_block });
890 block = self.cfg.start_new_block();
891 self.cfg.terminate(candidate.pre_binding_block, candidate_source_info,
892 TerminatorKind::FalseEdges {
895 vec![candidate.next_candidate_pre_binding_block],
899 // rust-lang/rust#27282: The `autoref` business deserves some
902 // The intent of the `autoref` flag is that when it is true,
903 // then any pattern bindings of type T will map to a `&T`
904 // within the context of the guard expression, but will
905 // continue to map to a `T` in the context of the arm body. To
906 // avoid surfacing this distinction in the user source code
907 // (which would be a severe change to the language and require
908 // far more revision to the compiler), when `autoref` is true,
909 // then any occurrence of the identifier in the guard
910 // expression will automatically get a deref op applied to it.
915 // let place = Foo::new();
916 // match place { foo if inspect(foo)
917 // => feed(foo), ... }
920 // will be treated as if it were really something like:
923 // let place = Foo::new();
924 // match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
925 // => { let tmp2 = place; feed(tmp2) }, ... }
927 // And an input like:
930 // let place = Foo::new();
931 // match place { ref mut foo if inspect(foo)
932 // => feed(foo), ... }
935 // will be treated as if it were really something like:
938 // let place = Foo::new();
939 // match place { Foo { .. } if { let tmp1 = & &mut place; inspect(*tmp1) }
940 // => { let tmp2 = &mut place; feed(tmp2) }, ... }
943 // In short, any pattern binding will always look like *some*
944 // kind of `&T` within the guard at least in terms of how the
945 // MIR-borrowck views it, and this will ensure that guard
946 // expressions cannot mutate their the match inputs via such
947 // bindings. (It also ensures that guard expressions can at
948 // most *copy* values from such bindings; non-Copy things
949 // cannot be moved via pattern bindings in guard expressions.)
953 // Implementation notes (under assumption `autoref` is true).
955 // To encode the distinction above, we must inject the
956 // temporaries `tmp1` and `tmp2`.
958 // There are two cases of interest: binding by-value, and binding by-ref.
960 // 1. Binding by-value: Things are simple.
962 // * Establishing `tmp1` creates a reference into the
963 // matched place. This code is emitted by
964 // bind_matched_candidate_for_guard.
966 // * `tmp2` is only initialized "lazily", after we have
967 // checked the guard. Thus, the code that can trigger
968 // moves out of the candidate can only fire after the
969 // guard evaluated to true. This initialization code is
970 // emitted by bind_matched_candidate_for_arm.
972 // 2. Binding by-reference: Things are tricky.
974 // * Here, the guard expression wants a `&&` or `&&mut`
975 // into the original input. This means we need to borrow
976 // a reference that we do not immediately have at hand
977 // (because all we have is the places associated with the
978 // match input itself; it is up to us to create a place
979 // holding a `&` or `&mut` that we can then borrow).
981 let autoref = self.hir.tcx().all_pat_vars_are_implicit_refs_within_guards();
982 if let Some(guard) = candidate.guard {
984 self.bind_matched_candidate_for_guard(
985 block, candidate.pat_index, &candidate.bindings);
986 let guard_frame = GuardFrame {
987 locals: candidate.bindings.iter()
988 .map(|b| GuardFrameLocal::new(b.var_id, b.binding_mode))
991 debug!("Entering guard building context: {:?}", guard_frame);
992 self.guard_context.push(guard_frame);
994 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
997 // the block to branch to if the guard fails; if there is no
998 // guard, this block is simply unreachable
999 let guard = self.hir.mirror(guard);
1000 let source_info = self.source_info(guard.span);
1001 let cond = unpack!(block = self.as_local_operand(block, guard));
1003 let guard_frame = self.guard_context.pop().unwrap();
1004 debug!("Exiting guard building context with locals: {:?}", guard_frame);
1007 let false_edge_block = self.cfg.start_new_block();
1009 // We want to ensure that the matched candidates are bound
1010 // after we have confirmed this candidate *and* any
1011 // associated guard; Binding them on `block` is too soon,
1012 // because that would be before we've checked the result
1015 // But binding them on `arm_block` is *too late*, because
1016 // then all of the candidates for a single arm would be
1017 // bound in the same place, that would cause a case like:
1021 // (mut x, 1) | (2, mut x) if { true } => { ... }
1022 // ... // ^^^^^^^ (this is `arm_block`)
1026 // would yield a `arm_block` something like:
1029 // StorageLive(_4); // _4 is `x`
1030 // _4 = &mut (_1.0: i32); // this is handling `(mut x, 1)` case
1031 // _4 = &mut (_1.1: i32); // this is handling `(2, mut x)` case
1034 // and that is clearly not correct.
1035 let post_guard_block = self.cfg.start_new_block();
1036 self.cfg.terminate(block, source_info,
1037 TerminatorKind::if_(self.hir.tcx(), cond, post_guard_block,
1041 self.bind_matched_candidate_for_arm_body(post_guard_block, &candidate.bindings);
1044 self.cfg.terminate(post_guard_block, source_info,
1045 TerminatorKind::Goto { target: arm_block });
1047 let otherwise = self.cfg.start_new_block();
1049 self.cfg.terminate(false_edge_block, source_info,
1050 TerminatorKind::FalseEdges {
1051 real_target: otherwise,
1053 vec![candidate.next_candidate_pre_binding_block],
1057 // (Here, it is not too early to bind the matched
1058 // candidate on `block`, because there is no guard result
1059 // that we have to inspect before we bind them.)
1060 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1061 self.cfg.terminate(block, candidate_source_info,
1062 TerminatorKind::Goto { target: arm_block });
1067 // Only called when all_pat_vars_are_implicit_refs_within_guards,
1068 // and thus all code/comments assume we are in that context.
1069 fn bind_matched_candidate_for_guard(&mut self,
1072 bindings: &[Binding<'tcx>]) {
1073 debug!("bind_matched_candidate_for_guard(block={:?}, pat_index={:?}, bindings={:?})",
1074 block, pat_index, bindings);
1076 // Assign each of the bindings. Since we are binding for a
1077 // guard expression, this will never trigger moves out of the
1079 let re_empty = self.hir.tcx().types.re_empty;
1080 for binding in bindings {
1081 let source_info = self.source_info(binding.span);
1083 // For each pattern ident P of type T, `ref_for_guard` is
1084 // a reference R: &T pointing to the location matched by
1085 // the pattern, and every occurrence of P within a guard
1087 let ref_for_guard = self.storage_live_binding(
1088 block, binding.var_id, binding.span, RefWithinGuard);
1089 // Question: Why schedule drops if bindings are all
1090 // shared-&'s? Answer: Because schedule_drop_for_binding
1091 // also emits StorageDead's for those locals.
1092 self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
1093 match binding.binding_mode {
1094 BindingMode::ByValue => {
1095 let rvalue = Rvalue::Ref(re_empty, BorrowKind::Shared, binding.source.clone());
1096 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
1098 BindingMode::ByRef(region, borrow_kind) => {
1099 // Tricky business: For `ref id` and `ref mut id`
1100 // patterns, we want `id` within the guard to
1101 // correspond to a temp of type `& &T` or `& &mut
1102 // T` (i.e. a "borrow of a borrow") that is
1103 // implicitly dereferenced.
1105 // To borrow a borrow, we need that inner borrow
1106 // to point to. So, create a temp for the inner
1107 // borrow, and then take a reference to it.
1109 // Note: the temp created here is *not* the one
1110 // used by the arm body itself. This eases
1111 // observing two-phase borrow restrictions.
1112 let val_for_guard = self.storage_live_binding(
1113 block, binding.var_id, binding.span, ValWithinGuard(pat_index));
1114 self.schedule_drop_for_binding(
1115 binding.var_id, binding.span, ValWithinGuard(pat_index));
1117 // rust-lang/rust#27282: We reuse the two-phase
1118 // borrow infrastructure so that the mutable
1119 // borrow (whose mutabilty is *unusable* within
1120 // the guard) does not conflict with the implicit
1121 // borrow of the whole match input. See additional
1122 // discussion on rust-lang/rust#49870.
1123 let borrow_kind = match borrow_kind {
1124 BorrowKind::Shared | BorrowKind::Unique => borrow_kind,
1125 BorrowKind::Mut { .. } => BorrowKind::Mut { allow_two_phase_borrow: true },
1127 let rvalue = Rvalue::Ref(region, borrow_kind, binding.source.clone());
1128 self.cfg.push_assign(block, source_info, &val_for_guard, rvalue);
1129 let rvalue = Rvalue::Ref(region, BorrowKind::Shared, val_for_guard);
1130 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
1136 fn bind_matched_candidate_for_arm_body(&mut self,
1138 bindings: &[Binding<'tcx>]) {
1139 debug!("bind_matched_candidate_for_arm_body(block={:?}, bindings={:?}", block, bindings);
1141 // Assign each of the bindings. This may trigger moves out of the candidate.
1142 for binding in bindings {
1143 let source_info = self.source_info(binding.span);
1144 let local = self.storage_live_binding(block, binding.var_id, binding.span,
1146 self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
1147 let rvalue = match binding.binding_mode {
1148 BindingMode::ByValue => {
1149 Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
1151 BindingMode::ByRef(region, borrow_kind) => {
1152 Rvalue::Ref(region, borrow_kind, binding.source.clone())
1155 self.cfg.push_assign(block, source_info, &local, rvalue);
1159 /// Each binding (`ref mut var`/`ref var`/`mut var`/`var`, where
1160 /// the bound `var` has type `T` in the arm body) in a pattern
1161 /// maps to `2+N` locals. The first local is a binding for
1162 /// occurrences of `var` in the guard, which will all have type
1163 /// `&T`. The N locals are bindings for the `T` that is referenced
1164 /// by the first local; they are not used outside of the
1165 /// guard. The last local is a binding for occurrences of `var` in
1166 /// the arm body, which will have type `T`.
1168 /// The reason we have N locals rather than just 1 is to
1169 /// accommodate rust-lang/rust#51348: If the arm has N candidate
1170 /// patterns, then in general they can correspond to distinct
1171 /// parts of the matched data, and we want them to be distinct
1172 /// temps in order to simplify checks performed by our internal
1173 /// leveraging of two-phase borrows).
1174 fn declare_binding(&mut self,
1175 source_info: SourceInfo,
1176 visibility_scope: SourceScope,
1177 mutability: Mutability,
1180 num_patterns: usize,
1183 has_guard: ArmHasGuard,
1184 opt_match_place: Option<(Option<Place<'tcx>>, Span)>)
1186 debug!("declare_binding(var_id={:?}, name={:?}, mode={:?}, var_ty={:?}, \
1187 visibility_scope={:?}, source_info={:?})",
1188 var_id, name, mode, var_ty, visibility_scope, source_info);
1190 let tcx = self.hir.tcx();
1191 let binding_mode = match mode {
1192 BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
1193 BindingMode::ByRef { .. } => ty::BindingMode::BindByReference(mutability.into()),
1195 let local = LocalDecl::<'tcx> {
1202 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
1204 // hypothetically, `visit_bindings` could try to unzip
1205 // an outermost hir::Ty as we descend, matching up
1206 // idents in pat; but complex w/ unclear UI payoff.
1207 // Instead, just abandon providing diagnostic info.
1212 let for_arm_body = self.local_decls.push(local.clone());
1213 let locals = if has_guard.0 && tcx.all_pat_vars_are_implicit_refs_within_guards() {
1214 let mut vals_for_guard = Vec::with_capacity(num_patterns);
1215 for _ in 0..num_patterns {
1216 let val_for_guard_idx = self.local_decls.push(LocalDecl {
1217 // This variable isn't mutated but has a name, so has to be
1218 // immutable to avoid the unused mut lint.
1219 mutability: Mutability::Not,
1222 vals_for_guard.push(val_for_guard_idx);
1224 let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
1225 // See previous comment.
1226 mutability: Mutability::Not,
1227 ty: tcx.mk_imm_ref(tcx.types.re_empty, var_ty),
1231 // FIXME: should these secretly injected ref_for_guard's be marked as `internal`?
1233 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::RefForGuard)),
1235 LocalsForNode::ForGuard { vals_for_guard, ref_for_guard, for_arm_body }
1237 LocalsForNode::One(for_arm_body)
1239 debug!("declare_binding: vars={:?}", locals);
1240 self.var_indices.insert(var_id, locals);