1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Compilation of match statements
13 //! I will endeavor to explain the code as best I can. I have only a loose
14 //! understanding of some parts of it.
18 //! The basic state of the code is maintained in an array `m` of `Match`
19 //! objects. Each `Match` describes some list of patterns, all of which must
20 //! match against the current list of values. If those patterns match, then
21 //! the arm listed in the match is the correct arm. A given arm may have
22 //! multiple corresponding match entries, one for each alternative that
23 //! remains. As we proceed these sets of matches are adjusted by the various
24 //! `enter_XXX()` functions, each of which adjusts the set of options given
25 //! some information about the value which has been matched.
27 //! So, initially, there is one value and N matches, each of which have one
28 //! constituent pattern. N here is usually the number of arms but may be
29 //! greater, if some arms have multiple alternatives. For example, here:
31 //! enum Foo { A, B(int), C(usize, usize) }
39 //! The value would be `foo`. There would be four matches, each of which
40 //! contains one pattern (and, in one case, a guard). We could collect the
41 //! various options and then compile the code for the case where `foo` is an
42 //! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
43 //! drop the two matches that do not match a `C` and (2) expand the other two
44 //! into two patterns each. In the first case, the two patterns would be `1`
45 //! and `2`, and the in the second case the _ pattern would be expanded into
46 //! `_` and `_`. The two values are of course the arguments to `C`.
48 //! Here is a quick guide to the various functions:
50 //! - `compile_submatch()`: The main workhouse. It takes a list of values and
51 //! a list of matches and finds the various possibilities that could occur.
53 //! - `enter_XXX()`: modifies the list of matches based on some information
54 //! about the value that has been matched. For example,
55 //! `enter_rec_or_struct()` adjusts the values given that a record or struct
56 //! has been matched. This is an infallible pattern, so *all* of the matches
57 //! must be either wildcards or record/struct patterns. `enter_opt()`
58 //! handles the fallible cases, and it is correspondingly more complex.
62 //! We store information about the bound variables for each arm as part of the
63 //! per-arm `ArmData` struct. There is a mapping from identifiers to
64 //! `BindingInfo` structs. These structs contain the mode/id/type of the
65 //! binding, but they also contain an LLVM value which points at an alloca
66 //! called `llmatch`. For by value bindings that are Copy, we also create
67 //! an extra alloca that we copy the matched value to so that any changes
68 //! we do to our copy is not reflected in the original and vice-versa.
69 //! We don't do this if it's a move since the original value can't be used
70 //! and thus allowing us to cheat in not creating an extra alloca.
72 //! The `llmatch` binding always stores a pointer into the value being matched
73 //! which points at the data for the binding. If the value being matched has
74 //! type `T`, then, `llmatch` will point at an alloca of type `T*` (and hence
75 //! `llmatch` has type `T**`). So, if you have a pattern like:
79 //! match (a, b) { (ref c, d) => { ... } }
81 //! For `c` and `d`, we would generate allocas of type `C*` and `D*`
82 //! respectively. These are called the `llmatch`. As we match, when we come
83 //! up against an identifier, we store the current pointer into the
84 //! corresponding alloca.
86 //! Once a pattern is completely matched, and assuming that there is no guard
87 //! pattern, we will branch to a block that leads to the body itself. For any
88 //! by-value bindings, this block will first load the ptr from `llmatch` (the
89 //! one of type `D*`) and then load a second time to get the actual value (the
90 //! one of type `D`). For by ref bindings, the value of the local variable is
91 //! simply the first alloca.
93 //! So, for the example above, we would generate a setup kind of like this:
99 //! +--------------------------------------------+
100 //! | llmatch_c = (addr of first half of tuple) |
101 //! | llmatch_d = (addr of second half of tuple) |
102 //! +--------------------------------------------+
104 //! +--------------------------------------+
105 //! | *llbinding_d = **llmatch_d |
106 //! +--------------------------------------+
108 //! If there is a guard, the situation is slightly different, because we must
109 //! execute the guard code. Moreover, we need to do so once for each of the
110 //! alternatives that lead to the arm, because if the guard fails, they may
111 //! have different points from which to continue the search. Therefore, in that
112 //! case, we generate code that looks more like:
118 //! +-------------------------------------------+
119 //! | llmatch_c = (addr of first half of tuple) |
120 //! | llmatch_d = (addr of first half of tuple) |
121 //! +-------------------------------------------+
123 //! +-------------------------------------------------+
124 //! | *llbinding_d = **llmatch_d |
125 //! | check condition |
126 //! | if false { goto next case } |
127 //! | if true { goto body } |
128 //! +-------------------------------------------------+
130 //! The handling for the cleanups is a bit... sensitive. Basically, the body
131 //! is the one that invokes `add_clean()` for each binding. During the guard
132 //! evaluation, we add temporary cleanups and revoke them after the guard is
133 //! evaluated (it could fail, after all). Note that guards and moves are
134 //! just plain incompatible.
136 //! Some relevant helper functions that manage bindings:
137 //! - `create_bindings_map()`
138 //! - `insert_lllocals()`
141 //! ## Notes on vector pattern matching.
143 //! Vector pattern matching is surprisingly tricky. The problem is that
144 //! the structure of the vector isn't fully known, and slice matches
145 //! can be done on subparts of it.
147 //! The way that vector pattern matches are dealt with, then, is as
148 //! follows. First, we make the actual condition associated with a
149 //! vector pattern simply a vector length comparison. So the pattern
150 //! [1, .. x] gets the condition "vec len >= 1", and the pattern
151 //! [.. x] gets the condition "vec len >= 0". The problem here is that
152 //! having the condition "vec len >= 1" hold clearly does not mean that
153 //! only a pattern that has exactly that condition will match. This
154 //! means that it may well be the case that a condition holds, but none
155 //! of the patterns matching that condition match; to deal with this,
156 //! when doing vector length matches, we have match failures proceed to
157 //! the next condition to check.
159 //! There are a couple more subtleties to deal with. While the "actual"
160 //! condition associated with vector length tests is simply a test on
161 //! the vector length, the actual vec_len Opt entry contains more
162 //! information used to restrict which matches are associated with it.
163 //! So that all matches in a submatch are matching against the same
164 //! values from inside the vector, they are split up by how many
165 //! elements they match at the front and at the back of the vector. In
166 //! order to make sure that arms are properly checked in order, even
167 //! with the overmatching conditions, each vec_len Opt entry is
168 //! associated with a range of matches.
169 //! Consider the following:
171 //! match &[1, 2, 3] {
172 //! [1, 1, .. _] => 0,
173 //! [1, 2, 2, .. _] => 1,
174 //! [1, 2, 3, .. _] => 2,
175 //! [1, 2, .. _] => 3,
178 //! The proper arm to match is arm 2, but arms 0 and 3 both have the
179 //! condition "len >= 2". If arm 3 was lumped in with arm 0, then the
180 //! wrong branch would be taken. Instead, vec_len Opts are associated
181 //! with a contiguous range of matches that have the same "shape".
182 //! This is sort of ugly and requires a bunch of special handling of
185 pub use self::BranchKind::*;
186 pub use self::OptResult::*;
187 pub use self::TransBindingMode::*;
189 use self::FailureHandler::*;
191 use llvm::{ValueRef, BasicBlockRef};
192 use middle::check_match::StaticInliner;
193 use middle::check_match;
194 use middle::const_eval;
195 use middle::def::{self, DefMap};
196 use middle::def_id::DefId;
197 use middle::expr_use_visitor as euv;
199 use middle::lang_items::StrEqFnLangItem;
200 use middle::mem_categorization as mc;
201 use middle::pat_util::*;
204 use trans::build::{AddCase, And, Br, CondBr, GEPi, InBoundsGEP, Load, PointerCast};
205 use trans::build::{Not, Store, Sub, add_comment};
208 use trans::cleanup::{self, CleanupMethods, DropHintMethods};
209 use trans::common::*;
212 use trans::debuginfo::{self, DebugLoc, ToDebugLoc};
213 use trans::expr::{self, Dest};
214 use trans::monomorphize;
217 use middle::ty::{self, Ty};
218 use session::config::NoDebugInfo;
219 use util::common::indenter;
220 use util::nodemap::FnvHashMap;
224 use std::cmp::Ordering;
227 use rustc_front::hir;
228 use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
229 use syntax::codemap::Span;
230 use rustc_front::fold::Folder;
233 #[derive(Copy, Clone, Debug)]
234 struct ConstantExpr<'a>(&'a hir::Expr);
236 impl<'a> ConstantExpr<'a> {
237 fn eq(self, other: ConstantExpr<'a>, tcx: &ty::ctxt) -> bool {
238 match const_eval::compare_lit_exprs(tcx, self.0, other.0) {
239 Some(result) => result == Ordering::Equal,
240 None => panic!("compare_list_exprs: type mismatch"),
245 // An option identifying a branch (either a literal, an enum variant or a range)
248 ConstantValue(ConstantExpr<'a>, DebugLoc),
249 ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>, DebugLoc),
250 Variant(ty::Disr, Rc<adt::Repr<'tcx>>, DefId, DebugLoc),
251 SliceLengthEqual(usize, DebugLoc),
252 SliceLengthGreaterOrEqual(/* prefix length */ usize,
253 /* suffix length */ usize,
257 impl<'a, 'tcx> Opt<'a, 'tcx> {
258 fn eq(&self, other: &Opt<'a, 'tcx>, tcx: &ty::ctxt<'tcx>) -> bool {
259 match (self, other) {
260 (&ConstantValue(a, _), &ConstantValue(b, _)) => a.eq(b, tcx),
261 (&ConstantRange(a1, a2, _), &ConstantRange(b1, b2, _)) => {
262 a1.eq(b1, tcx) && a2.eq(b2, tcx)
264 (&Variant(a_disr, ref a_repr, a_def, _),
265 &Variant(b_disr, ref b_repr, b_def, _)) => {
266 a_disr == b_disr && *a_repr == *b_repr && a_def == b_def
268 (&SliceLengthEqual(a, _), &SliceLengthEqual(b, _)) => a == b,
269 (&SliceLengthGreaterOrEqual(a1, a2, _),
270 &SliceLengthGreaterOrEqual(b1, b2, _)) => {
277 fn trans<'blk>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> {
278 let _icx = push_ctxt("match::trans_opt");
281 ConstantValue(ConstantExpr(lit_expr), _) => {
282 let lit_ty = bcx.tcx().node_id_to_type(lit_expr.id);
283 let (llval, _) = consts::const_expr(ccx, &*lit_expr, bcx.fcx.param_substs, None);
284 let lit_datum = immediate_rvalue(llval, lit_ty);
285 let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
286 SingleResult(Result::new(bcx, lit_datum.val))
288 ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2), _) => {
289 let (l1, _) = consts::const_expr(ccx, &**l1, bcx.fcx.param_substs, None);
290 let (l2, _) = consts::const_expr(ccx, &**l2, bcx.fcx.param_substs, None);
291 RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
293 Variant(disr_val, ref repr, _, _) => {
294 adt::trans_case(bcx, &**repr, disr_val)
296 SliceLengthEqual(length, _) => {
297 SingleResult(Result::new(bcx, C_uint(ccx, length)))
299 SliceLengthGreaterOrEqual(prefix, suffix, _) => {
300 LowerBound(Result::new(bcx, C_uint(ccx, prefix + suffix)))
305 fn debug_loc(&self) -> DebugLoc {
307 ConstantValue(_,debug_loc) |
308 ConstantRange(_, _, debug_loc) |
309 Variant(_, _, _, debug_loc) |
310 SliceLengthEqual(_, debug_loc) |
311 SliceLengthGreaterOrEqual(_, _, debug_loc) => debug_loc
316 #[derive(Copy, Clone, PartialEq)]
317 pub enum BranchKind {
325 pub enum OptResult<'blk, 'tcx: 'blk> {
326 SingleResult(Result<'blk, 'tcx>),
327 RangeResult(Result<'blk, 'tcx>, Result<'blk, 'tcx>),
328 LowerBound(Result<'blk, 'tcx>)
331 #[derive(Clone, Copy, PartialEq)]
332 pub enum TransBindingMode {
333 /// By-value binding for a copy type: copies from matched data
334 /// into a fresh LLVM alloca.
335 TrByCopy(/* llbinding */ ValueRef),
337 /// By-value binding for a non-copy type where we copy into a
338 /// fresh LLVM alloca; this most accurately reflects the language
339 /// semantics (e.g. it properly handles overwrites of the matched
340 /// input), but potentially injects an unwanted copy.
341 TrByMoveIntoCopy(/* llbinding */ ValueRef),
343 /// Binding a non-copy type by reference under the hood; this is
344 /// a codegen optimization to avoid unnecessary memory traffic.
347 /// By-ref binding exposed in the original source input.
351 impl TransBindingMode {
352 /// if binding by making a fresh copy; returns the alloca that it
353 /// will copy into; otherwise None.
354 fn alloca_if_copy(&self) -> Option<ValueRef> {
356 TrByCopy(llbinding) | TrByMoveIntoCopy(llbinding) => Some(llbinding),
357 TrByMoveRef | TrByRef => None,
362 /// Information about a pattern binding:
363 /// - `llmatch` is a pointer to a stack slot. The stack slot contains a
364 /// pointer into the value being matched. Hence, llmatch has type `T**`
365 /// where `T` is the value being matched.
366 /// - `trmode` is the trans binding mode
367 /// - `id` is the node id of the binding
368 /// - `ty` is the Rust type of the binding
369 #[derive(Clone, Copy)]
370 pub struct BindingInfo<'tcx> {
371 pub llmatch: ValueRef,
372 pub trmode: TransBindingMode,
378 type BindingsMap<'tcx> = FnvHashMap<ast::Ident, BindingInfo<'tcx>>;
380 struct ArmData<'p, 'blk, 'tcx: 'blk> {
381 bodycx: Block<'blk, 'tcx>,
383 bindings_map: BindingsMap<'tcx>
386 /// Info about Match.
387 /// If all `pats` are matched then arm `data` will be executed.
388 /// As we proceed `bound_ptrs` are filled with pointers to values to be bound,
389 /// these pointers are stored in llmatch variables just before executing `data` arm.
390 struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
391 pats: Vec<&'p hir::Pat>,
392 data: &'a ArmData<'p, 'blk, 'tcx>,
393 bound_ptrs: Vec<(ast::Ident, ValueRef)>,
394 // Thread along renamings done by the check_match::StaticInliner, so we can
395 // map back to original NodeIds
396 pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
399 impl<'a, 'p, 'blk, 'tcx> fmt::Debug for Match<'a, 'p, 'blk, 'tcx> {
400 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
401 if ppaux::verbose() {
402 // for many programs, this just take too long to serialize
403 write!(f, "{:?}", self.pats)
405 write!(f, "{} pats", self.pats.len())
410 fn has_nested_bindings(m: &[Match], col: usize) -> bool {
412 match br.pats[col].node {
413 hir::PatIdent(_, _, Some(_)) => return true,
420 // As noted in `fn match_datum`, we should eventually pass around a
421 // `Datum<Lvalue>` for the `val`; but until we get to that point, this
422 // `MatchInput` struct will serve -- it has everything `Datum<Lvalue>`
423 // does except for the type field.
424 #[derive(Copy, Clone)]
425 pub struct MatchInput { val: ValueRef, lval: Lvalue }
427 impl<'tcx> Datum<'tcx, Lvalue> {
428 pub fn match_input(&self) -> MatchInput {
437 fn from_val(val: ValueRef) -> MatchInput {
440 lval: Lvalue::new("MatchInput::from_val"),
444 fn to_datum<'tcx>(self, ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
445 Datum::new(self.val, ty, self.lval)
449 fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
450 m: &[Match<'a, 'p, 'blk, 'tcx>],
453 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
454 debug!("expand_nested_bindings(bcx={}, m={:?}, col={}, val={})",
458 bcx.val_to_string(val.val));
459 let _indenter = indenter();
462 let mut bound_ptrs = br.bound_ptrs.clone();
463 let mut pat = br.pats[col];
465 pat = match pat.node {
466 hir::PatIdent(_, ref path, Some(ref inner)) => {
467 bound_ptrs.push((path.node, val.val));
474 let mut pats = br.pats.clone();
479 bound_ptrs: bound_ptrs,
480 pat_renaming_map: br.pat_renaming_map,
485 fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
487 m: &[Match<'a, 'p, 'blk, 'tcx>],
491 -> Vec<Match<'a, 'p, 'blk, 'tcx>> where
492 F: FnMut(&[&'p hir::Pat]) -> Option<Vec<&'p hir::Pat>>,
494 debug!("enter_match(bcx={}, m={:?}, col={}, val={})",
498 bcx.val_to_string(val.val));
499 let _indenter = indenter();
501 m.iter().filter_map(|br| {
502 e(&br.pats).map(|pats| {
503 let this = br.pats[col];
504 let mut bound_ptrs = br.bound_ptrs.clone();
506 hir::PatIdent(_, ref path, None) => {
507 if pat_is_binding(dm, &*this) {
508 bound_ptrs.push((path.node, val.val));
511 hir::PatVec(ref before, Some(ref slice), ref after) => {
512 if let hir::PatIdent(_, ref path, None) = slice.node {
513 let subslice_val = bind_subslice_pat(
515 before.len(), after.len());
516 bound_ptrs.push((path.node, subslice_val));
524 bound_ptrs: bound_ptrs,
525 pat_renaming_map: br.pat_renaming_map,
531 fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
533 m: &[Match<'a, 'p, 'blk, 'tcx>],
536 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
537 debug!("enter_default(bcx={}, m={:?}, col={}, val={})",
541 bcx.val_to_string(val.val));
542 let _indenter = indenter();
544 // Collect all of the matches that can match against anything.
545 enter_match(bcx, dm, m, col, val, |pats| {
546 if pat_is_binding_or_wild(dm, &*pats[col]) {
547 let mut r = pats[..col].to_vec();
548 r.push_all(&pats[col + 1..]);
556 // <pcwalton> nmatsakis: what does enter_opt do?
557 // <pcwalton> in trans/match
558 // <pcwalton> trans/match.rs is like stumbling around in a dark cave
559 // <nmatsakis> pcwalton: the enter family of functions adjust the set of
560 // patterns as needed
561 // <nmatsakis> yeah, at some point I kind of achieved some level of
563 // <nmatsakis> anyhow, they adjust the patterns given that something of that
564 // kind has been found
565 // <nmatsakis> pcwalton: ok, right, so enter_XXX() adjusts the patterns, as I
567 // <nmatsakis> enter_match() kind of embodies the generic code
568 // <nmatsakis> it is provided with a function that tests each pattern to see
569 // if it might possibly apply and so forth
570 // <nmatsakis> so, if you have a pattern like {a: _, b: _, _} and one like _
571 // <nmatsakis> then _ would be expanded to (_, _)
572 // <nmatsakis> one spot for each of the sub-patterns
573 // <nmatsakis> enter_opt() is one of the more complex; it covers the fallible
575 // <nmatsakis> enter_rec_or_struct() or enter_tuple() are simpler, since they
576 // are infallible patterns
577 // <nmatsakis> so all patterns must either be records (resp. tuples) or
580 /// The above is now outdated in that enter_match() now takes a function that
581 /// takes the complete row of patterns rather than just the first one.
582 /// Also, most of the enter_() family functions have been unified with
583 /// the check_match specialization step.
584 fn enter_opt<'a, 'p, 'blk, 'tcx>(
585 bcx: Block<'blk, 'tcx>,
588 m: &[Match<'a, 'p, 'blk, 'tcx>],
593 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
594 debug!("enter_opt(bcx={}, m={:?}, opt={:?}, col={}, val={})",
599 bcx.val_to_string(val.val));
600 let _indenter = indenter();
602 let ctor = match opt {
603 &ConstantValue(ConstantExpr(expr), _) => check_match::ConstantValue(
604 const_eval::eval_const_expr(bcx.tcx(), &*expr)
606 &ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => check_match::ConstantRange(
607 const_eval::eval_const_expr(bcx.tcx(), &*lo),
608 const_eval::eval_const_expr(bcx.tcx(), &*hi)
610 &SliceLengthEqual(n, _) =>
611 check_match::Slice(n),
612 &SliceLengthGreaterOrEqual(before, after, _) =>
613 check_match::SliceWithSubslice(before, after),
614 &Variant(_, _, def_id, _) =>
615 check_match::Constructor::Variant(def_id)
618 let param_env = bcx.tcx().empty_parameter_environment();
619 let mcx = check_match::MatchCheckCtxt {
621 param_env: param_env,
623 enter_match(bcx, dm, m, col, val, |pats|
624 check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
628 // Returns the options in one column of matches. An option is something that
629 // needs to be conditionally matched at runtime; for example, the discriminant
630 // on a set of enum variants or a literal.
631 fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
632 m: &[Match<'a, 'p, 'blk, 'tcx>],
634 -> Vec<Opt<'p, 'tcx>> {
637 let mut found: Vec<Opt> = vec![];
639 let cur = br.pats[col];
640 let debug_loc = match br.pat_renaming_map {
641 Some(pat_renaming_map) => {
642 match pat_renaming_map.get(&(cur.id, cur.span)) {
643 Some(&id) => DebugLoc::At(id, cur.span),
644 None => DebugLoc::At(cur.id, cur.span),
647 None => DebugLoc::None
650 let opt = match cur.node {
651 hir::PatLit(ref l) => {
652 ConstantValue(ConstantExpr(&**l), debug_loc)
654 hir::PatIdent(..) | hir::PatEnum(..) | hir::PatStruct(..) => {
655 // This is either an enum variant or a variable binding.
656 let opt_def = tcx.def_map.borrow().get(&cur.id).map(|d| d.full_def());
658 Some(def::DefVariant(enum_id, var_id, _)) => {
659 let variant = tcx.lookup_adt_def(enum_id).variant_with_id(var_id);
660 Variant(variant.disr_val,
661 adt::represent_node(bcx, cur.id),
668 hir::PatRange(ref l1, ref l2) => {
669 ConstantRange(ConstantExpr(&**l1), ConstantExpr(&**l2), debug_loc)
671 hir::PatVec(ref before, None, ref after) => {
672 SliceLengthEqual(before.len() + after.len(), debug_loc)
674 hir::PatVec(ref before, Some(_), ref after) => {
675 SliceLengthGreaterOrEqual(before.len(), after.len(), debug_loc)
680 if !found.iter().any(|x| x.eq(&opt, tcx)) {
687 struct ExtractedBlock<'blk, 'tcx: 'blk> {
689 bcx: Block<'blk, 'tcx>,
692 fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
693 repr: &adt::Repr<'tcx>,
696 -> ExtractedBlock<'blk, 'tcx> {
697 let _icx = push_ctxt("match::extract_variant_args");
698 let args = (0..adt::num_args(repr, disr_val)).map(|i| {
699 adt::trans_field_ptr(bcx, repr, val.val, disr_val, i)
702 ExtractedBlock { vals: args, bcx: bcx }
705 /// Helper for converting from the ValueRef that we pass around in the match code, which is always
706 /// an lvalue, into a Datum. Eventually we should just pass around a Datum and be done with it.
707 fn match_datum<'tcx>(val: MatchInput, left_ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
708 val.to_datum(left_ty)
711 fn bind_subslice_pat(bcx: Block,
715 offset_right: usize) -> ValueRef {
716 let _icx = push_ctxt("match::bind_subslice_pat");
717 let vec_ty = node_id_type(bcx, pat_id);
718 let vec_ty_contents = match vec_ty.sty {
720 ty::TyRef(_, mt) | ty::TyRawPtr(mt) => mt.ty,
723 let unit_ty = vec_ty_contents.sequence_element_type(bcx.tcx());
724 let vec_datum = match_datum(val, vec_ty);
725 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
727 let slice_begin = InBoundsGEP(bcx, base, &[C_uint(bcx.ccx(), offset_left)]);
728 let slice_len_offset = C_uint(bcx.ccx(), offset_left + offset_right);
729 let slice_len = Sub(bcx, len, slice_len_offset, DebugLoc::None);
730 let slice_ty = bcx.tcx().mk_imm_ref(bcx.tcx().mk_region(ty::ReStatic),
731 bcx.tcx().mk_slice(unit_ty));
732 let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
733 Store(bcx, slice_begin, expr::get_dataptr(bcx, scratch.val));
734 Store(bcx, slice_len, expr::get_meta(bcx, scratch.val));
738 fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
743 -> ExtractedBlock<'blk, 'tcx> {
744 let _icx = push_ctxt("match::extract_vec_elems");
745 let vec_datum = match_datum(val, left_ty);
746 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
747 let mut elems = vec![];
748 elems.extend((0..before).map(|i| GEPi(bcx, base, &[i])));
749 elems.extend((0..after).rev().map(|i| {
750 InBoundsGEP(bcx, base, &[
751 Sub(bcx, len, C_uint(bcx.ccx(), i + 1), DebugLoc::None)
754 ExtractedBlock { vals: elems, bcx: bcx }
757 // Macro for deciding whether any of the remaining matches fit a given kind of
758 // pattern. Note that, because the macro is well-typed, either ALL of the
759 // matches should fit that sort of pattern or NONE (however, some of the
760 // matches may be wildcards like _ or identifiers).
761 macro_rules! any_pat {
762 ($m:expr, $col:expr, $pattern:pat) => (
763 ($m).iter().any(|br| {
764 match br.pats[$col].node {
772 fn any_uniq_pat(m: &[Match], col: usize) -> bool {
773 any_pat!(m, col, hir::PatBox(_))
776 fn any_region_pat(m: &[Match], col: usize) -> bool {
777 any_pat!(m, col, hir::PatRegion(..))
780 fn any_irrefutable_adt_pat(tcx: &ty::ctxt, m: &[Match], col: usize) -> bool {
782 let pat = br.pats[col];
784 hir::PatTup(_) => true,
785 hir::PatStruct(..) => {
786 match tcx.def_map.borrow().get(&pat.id).map(|d| d.full_def()) {
787 Some(def::DefVariant(..)) => false,
791 hir::PatEnum(..) | hir::PatIdent(_, _, None) => {
792 match tcx.def_map.borrow().get(&pat.id).map(|d| d.full_def()) {
793 Some(def::DefStruct(..)) => true,
802 /// What to do when the pattern match fails.
803 enum FailureHandler {
805 JumpToBasicBlock(BasicBlockRef),
809 impl FailureHandler {
810 fn is_fallible(&self) -> bool {
817 fn is_infallible(&self) -> bool {
821 fn handle_fail(&self, bcx: Block) {
824 panic!("attempted to panic in a non-panicking panic handler!"),
825 JumpToBasicBlock(basic_block) =>
826 Br(bcx, basic_block, DebugLoc::None),
828 build::Unreachable(bcx)
833 fn pick_column_to_specialize(def_map: &DefMap, m: &[Match]) -> Option<usize> {
834 fn pat_score(def_map: &DefMap, pat: &hir::Pat) -> usize {
836 hir::PatIdent(_, _, Some(ref inner)) => pat_score(def_map, &**inner),
837 _ if pat_is_refutable(def_map, pat) => 1,
842 let column_score = |m: &[Match], col: usize| -> usize {
843 let total_score = m.iter()
844 .map(|row| row.pats[col])
845 .map(|pat| pat_score(def_map, pat))
848 // Irrefutable columns always go first, they'd only be duplicated in the branches.
849 if total_score == 0 {
856 let column_contains_any_nonwild_patterns = |&col: &usize| -> bool {
857 m.iter().any(|row| match row.pats[col].node {
858 hir::PatWild(_) => false,
864 .filter(column_contains_any_nonwild_patterns)
865 .map(|col| (col, column_score(m, col)))
866 .max_by(|&(_, score)| score)
870 // Compiles a comparison between two things.
871 fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
876 -> Result<'blk, 'tcx> {
877 fn compare_str<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
884 -> Result<'blk, 'tcx> {
885 let did = langcall(cx,
887 &format!("comparison of `{}`", rhs_t),
889 callee::trans_lang_call(cx, did, &[lhs_data, lhs_len, rhs_data, rhs_len], None, debug_loc)
892 let _icx = push_ctxt("compare_values");
893 if rhs_t.is_scalar() {
894 let cmp = compare_scalar_types(cx, lhs, rhs, rhs_t, hir::BiEq, debug_loc);
895 return Result::new(cx, cmp);
899 ty::TyRef(_, mt) => match mt.ty.sty {
901 let lhs_data = Load(cx, expr::get_dataptr(cx, lhs));
902 let lhs_len = Load(cx, expr::get_meta(cx, lhs));
903 let rhs_data = Load(cx, expr::get_dataptr(cx, rhs));
904 let rhs_len = Load(cx, expr::get_meta(cx, rhs));
905 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
907 ty::TyArray(ty, _) | ty::TySlice(ty) => match ty.sty {
908 ty::TyUint(hir::TyU8) => {
909 // NOTE: cast &[u8] and &[u8; N] to &str and abuse the str_eq lang item,
910 // which calls memcmp().
911 let pat_len = val_ty(rhs).element_type().array_length();
912 let ty_str_slice = cx.tcx().mk_static_str();
914 let rhs_data = GEPi(cx, rhs, &[0, 0]);
915 let rhs_len = C_uint(cx.ccx(), pat_len);
919 if val_ty(lhs) == val_ty(rhs) {
920 // Both the discriminant and the pattern are thin pointers
921 lhs_data = GEPi(cx, lhs, &[0, 0]);
922 lhs_len = C_uint(cx.ccx(), pat_len);
924 // The discriminant is a fat pointer
925 let llty_str_slice = type_of::type_of(cx.ccx(), ty_str_slice).ptr_to();
926 let lhs_str = PointerCast(cx, lhs, llty_str_slice);
927 lhs_data = Load(cx, expr::get_dataptr(cx, lhs_str));
928 lhs_len = Load(cx, expr::get_meta(cx, lhs_str));
931 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
933 _ => cx.sess().bug("only byte strings supported in compare_values"),
935 _ => cx.sess().bug("only string and byte strings supported in compare_values"),
937 _ => cx.sess().bug("only scalars, byte strings, and strings supported in compare_values"),
941 /// For each binding in `data.bindings_map`, adds an appropriate entry into the `fcx.lllocals` map
942 fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
943 bindings_map: &BindingsMap<'tcx>,
944 cs: Option<cleanup::ScopeId>)
945 -> Block<'blk, 'tcx> {
946 for (&ident, &binding_info) in bindings_map {
947 let (llval, aliases_other_state) = match binding_info.trmode {
948 // By value mut binding for a copy type: load from the ptr
949 // into the matched value and copy to our alloca
950 TrByCopy(llbinding) |
951 TrByMoveIntoCopy(llbinding) => {
952 let llval = Load(bcx, binding_info.llmatch);
953 let lvalue = match binding_info.trmode {
955 Lvalue::new("_match::insert_lllocals"),
956 TrByMoveIntoCopy(..) => {
957 // match_input moves from the input into a
958 // separate stack slot.
960 // E.g. consider moving the value `D(A)` out
961 // of the tuple `(D(A), D(B))` and into the
962 // local variable `x` via the pattern `(x,_)`,
963 // leaving the remainder of the tuple `(_,
964 // D(B))` still to be dropped in the future.
966 // Thus, here we must must zero the place that
967 // we are moving *from*, because we do not yet
968 // track drop flags for a fragmented parent
969 // match input expression.
971 // Longer term we will be able to map the move
972 // into `(x, _)` up to the parent path that
973 // owns the whole tuple, and mark the
974 // corresponding stack-local drop-flag
975 // tracking the first component of the tuple.
976 let hint_kind = HintKind::ZeroAndMaintain;
977 Lvalue::new_with_hint("_match::insert_lllocals (match_input)",
978 bcx, binding_info.id, hint_kind)
982 let datum = Datum::new(llval, binding_info.ty, lvalue);
983 call_lifetime_start(bcx, llbinding);
984 bcx = datum.store_to(bcx, llbinding);
985 if let Some(cs) = cs {
986 bcx.fcx.schedule_lifetime_end(cs, llbinding);
992 // By value move bindings: load from the ptr into the matched value
993 TrByMoveRef => (Load(bcx, binding_info.llmatch), true),
995 // By ref binding: use the ptr into the matched value
996 TrByRef => (binding_info.llmatch, true),
1000 // A local that aliases some other state must be zeroed, since
1001 // the other state (e.g. some parent data that we matched
1002 // into) will still have its subcomponents (such as this
1003 // local) destructed at the end of the parent's scope. Longer
1004 // term, we will properly map such parents to the set of
1005 // unique drop flags for its fragments.
1006 let hint_kind = if aliases_other_state {
1007 HintKind::ZeroAndMaintain
1009 HintKind::DontZeroJustUse
1011 let lvalue = Lvalue::new_with_hint("_match::insert_lllocals (local)",
1015 let datum = Datum::new(llval, binding_info.ty, lvalue);
1016 if let Some(cs) = cs {
1017 let opt_datum = lvalue.dropflag_hint(bcx);
1018 bcx.fcx.schedule_lifetime_end(cs, binding_info.llmatch);
1019 bcx.fcx.schedule_drop_and_fill_mem(cs, llval, binding_info.ty, opt_datum);
1022 debug!("binding {} to {}", binding_info.id, bcx.val_to_string(llval));
1023 bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
1024 debuginfo::create_match_binding_metadata(bcx, ident.name, binding_info);
1029 fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1030 guard_expr: &hir::Expr,
1031 data: &ArmData<'p, 'blk, 'tcx>,
1032 m: &[Match<'a, 'p, 'blk, 'tcx>],
1033 vals: &[MatchInput],
1034 chk: &FailureHandler,
1035 has_genuine_default: bool)
1036 -> Block<'blk, 'tcx> {
1037 debug!("compile_guard(bcx={}, guard_expr={:?}, m={:?}, vals=[{}])",
1041 vals.iter().map(|v| bcx.val_to_string(v.val)).collect::<Vec<_>>().join(", "));
1042 let _indenter = indenter();
1044 let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
1046 let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
1047 let val = val.to_llbool(bcx);
1049 for (_, &binding_info) in &data.bindings_map {
1050 if let Some(llbinding) = binding_info.trmode.alloca_if_copy() {
1051 call_lifetime_end(bcx, llbinding)
1055 for (_, &binding_info) in &data.bindings_map {
1056 bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
1059 with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
1060 for (_, &binding_info) in &data.bindings_map {
1061 call_lifetime_end(bcx, binding_info.llmatch);
1064 // If the default arm is the only one left, move on to the next
1065 // condition explicitly rather than (possibly) falling back to
1067 &JumpToBasicBlock(_) if m.len() == 1 && has_genuine_default => {
1068 chk.handle_fail(bcx);
1071 compile_submatch(bcx, m, vals, chk, has_genuine_default);
1078 fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1079 m: &[Match<'a, 'p, 'blk, 'tcx>],
1080 vals: &[MatchInput],
1081 chk: &FailureHandler,
1082 has_genuine_default: bool) {
1083 debug!("compile_submatch(bcx={}, m={:?}, vals=[{}])",
1086 vals.iter().map(|v| bcx.val_to_string(v.val)).collect::<Vec<_>>().join(", "));
1087 let _indenter = indenter();
1088 let _icx = push_ctxt("match::compile_submatch");
1091 if chk.is_fallible() {
1092 chk.handle_fail(bcx);
1097 let tcx = bcx.tcx();
1098 let def_map = &tcx.def_map;
1099 match pick_column_to_specialize(def_map, m) {
1101 let val = vals[col];
1102 if has_nested_bindings(m, col) {
1103 let expanded = expand_nested_bindings(bcx, m, col, val);
1104 compile_submatch_continue(bcx,
1110 has_genuine_default)
1112 compile_submatch_continue(bcx, m, vals, chk, col, val, has_genuine_default)
1116 let data = &m[0].data;
1117 for &(ref ident, ref value_ptr) in &m[0].bound_ptrs {
1118 let binfo = *data.bindings_map.get(ident).unwrap();
1119 call_lifetime_start(bcx, binfo.llmatch);
1120 if binfo.trmode == TrByRef && type_is_fat_ptr(bcx.tcx(), binfo.ty) {
1121 expr::copy_fat_ptr(bcx, *value_ptr, binfo.llmatch);
1124 Store(bcx, *value_ptr, binfo.llmatch);
1127 match data.arm.guard {
1128 Some(ref guard_expr) => {
1129 bcx = compile_guard(bcx,
1135 has_genuine_default);
1139 Br(bcx, data.bodycx.llbb, DebugLoc::None);
1144 fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1145 m: &[Match<'a, 'p, 'blk, 'tcx>],
1146 vals: &[MatchInput],
1147 chk: &FailureHandler,
1150 has_genuine_default: bool) {
1152 let tcx = bcx.tcx();
1153 let dm = &tcx.def_map;
1155 let mut vals_left = vals[0..col].to_vec();
1156 vals_left.push_all(&vals[col + 1..]);
1157 let ccx = bcx.fcx.ccx;
1159 // Find a real id (we're adding placeholder wildcard patterns, but
1160 // each column is guaranteed to have at least one real pattern)
1161 let pat_id = m.iter().map(|br| br.pats[col].id)
1162 .find(|&id| id != DUMMY_NODE_ID)
1163 .unwrap_or(DUMMY_NODE_ID);
1165 let left_ty = if pat_id == DUMMY_NODE_ID {
1168 node_id_type(bcx, pat_id)
1171 let mcx = check_match::MatchCheckCtxt {
1173 param_env: bcx.tcx().empty_parameter_environment(),
1175 let adt_vals = if any_irrefutable_adt_pat(bcx.tcx(), m, col) {
1176 let repr = adt::represent_type(bcx.ccx(), left_ty);
1177 let arg_count = adt::num_args(&*repr, 0);
1178 let (arg_count, struct_val) = if type_is_sized(bcx.tcx(), left_ty) {
1179 (arg_count, val.val)
1181 // For an unsized ADT (i.e. DST struct), we need to treat
1182 // the last field specially: instead of simply passing a
1183 // ValueRef pointing to that field, as with all the others,
1184 // we skip it and instead construct a 'fat ptr' below.
1185 (arg_count - 1, Load(bcx, expr::get_dataptr(bcx, val.val)))
1187 let mut field_vals: Vec<ValueRef> = (0..arg_count).map(|ix|
1188 adt::trans_field_ptr(bcx, &*repr, struct_val, 0, ix)
1192 ty::TyStruct(def, substs) if !type_is_sized(bcx.tcx(), left_ty) => {
1193 // The last field is technically unsized but
1194 // since we can only ever match that field behind
1195 // a reference we construct a fat ptr here.
1196 let unsized_ty = def.struct_variant().fields.last().map(|field| {
1197 monomorphize::field_ty(bcx.tcx(), substs, field)
1199 let scratch = alloc_ty(bcx, unsized_ty, "__struct_field_fat_ptr");
1200 let data = adt::trans_field_ptr(bcx, &*repr, struct_val, 0, arg_count);
1201 let len = Load(bcx, expr::get_meta(bcx, val.val));
1202 Store(bcx, data, expr::get_dataptr(bcx, scratch));
1203 Store(bcx, len, expr::get_meta(bcx, scratch));
1204 field_vals.push(scratch);
1209 } else if any_uniq_pat(m, col) || any_region_pat(m, col) {
1210 Some(vec!(Load(bcx, val.val)))
1213 ty::TyArray(_, n) => {
1214 let args = extract_vec_elems(bcx, left_ty, n, 0, val);
1221 Some(field_vals) => {
1222 let pats = enter_match(bcx, dm, m, col, val, |pats|
1223 check_match::specialize(&mcx, pats,
1224 &check_match::Single, col,
1227 let mut vals: Vec<_> = field_vals.into_iter()
1228 .map(|v|MatchInput::from_val(v))
1230 vals.push_all(&vals_left);
1231 compile_submatch(bcx, &pats, &vals, chk, has_genuine_default);
1237 // Decide what kind of branch we need
1238 let opts = get_branches(bcx, m, col);
1239 debug!("options={:?}", opts);
1240 let mut kind = NoBranch;
1241 let mut test_val = val.val;
1242 debug!("test_val={}", bcx.val_to_string(test_val));
1243 if !opts.is_empty() {
1245 ConstantValue(..) | ConstantRange(..) => {
1246 test_val = load_if_immediate(bcx, val.val, left_ty);
1247 kind = if left_ty.is_integral() {
1253 Variant(_, ref repr, _, _) => {
1254 let (the_kind, val_opt) = adt::trans_switch(bcx, &**repr, val.val);
1256 if let Some(tval) = val_opt { test_val = tval; }
1258 SliceLengthEqual(..) | SliceLengthGreaterOrEqual(..) => {
1259 let (_, len) = tvec::get_base_and_len(bcx, val.val, left_ty);
1267 ConstantRange(..) => { kind = Compare; break },
1268 SliceLengthGreaterOrEqual(..) => { kind = CompareSliceLength; break },
1272 let else_cx = match kind {
1273 NoBranch | Single => bcx,
1274 _ => bcx.fcx.new_temp_block("match_else")
1276 let sw = if kind == Switch {
1277 build::Switch(bcx, test_val, else_cx.llbb, opts.len())
1279 C_int(ccx, 0) // Placeholder for when not using a switch
1282 let defaults = enter_default(else_cx, dm, m, col, val);
1283 let exhaustive = chk.is_infallible() && defaults.is_empty();
1284 let len = opts.len();
1286 // Compile subtrees for each option
1287 for (i, opt) in opts.iter().enumerate() {
1288 // In some cases of range and vector pattern matching, we need to
1289 // override the failure case so that instead of failing, it proceeds
1290 // to try more matching. branch_chk, then, is the proper failure case
1291 // for the current conditional branch.
1292 let mut branch_chk = None;
1293 let mut opt_cx = else_cx;
1294 let debug_loc = opt.debug_loc();
1296 if !exhaustive || i + 1 < len {
1297 opt_cx = bcx.fcx.new_temp_block("match_case");
1299 Single => Br(bcx, opt_cx.llbb, debug_loc),
1301 match opt.trans(bcx) {
1302 SingleResult(r) => {
1303 AddCase(sw, r.val, opt_cx.llbb);
1308 "in compile_submatch, expected \
1309 opt.trans() to return a SingleResult")
1313 Compare | CompareSliceLength => {
1314 let t = if kind == Compare {
1317 tcx.types.usize // vector length
1319 let Result { bcx: after_cx, val: matches } = {
1320 match opt.trans(bcx) {
1321 SingleResult(Result { bcx, val }) => {
1322 compare_values(bcx, test_val, val, t, debug_loc)
1324 RangeResult(Result { val: vbegin, .. },
1325 Result { bcx, val: vend }) => {
1326 let llge = compare_scalar_types(bcx, test_val, vbegin,
1327 t, hir::BiGe, debug_loc);
1328 let llle = compare_scalar_types(bcx, test_val, vend,
1329 t, hir::BiLe, debug_loc);
1330 Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
1332 LowerBound(Result { bcx, val }) => {
1333 Result::new(bcx, compare_scalar_types(bcx, test_val,
1339 bcx = fcx.new_temp_block("compare_next");
1341 // If none of the sub-cases match, and the current condition
1342 // is guarded or has multiple patterns, move on to the next
1343 // condition, if there is any, rather than falling back to
1345 let guarded = m[i].data.arm.guard.is_some();
1346 let multi_pats = m[i].pats.len() > 1;
1347 if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
1348 branch_chk = Some(JumpToBasicBlock(bcx.llbb));
1350 CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb, debug_loc);
1354 } else if kind == Compare || kind == CompareSliceLength {
1355 Br(bcx, else_cx.llbb, debug_loc);
1359 let mut unpacked = Vec::new();
1361 Variant(disr_val, ref repr, _, _) => {
1362 let ExtractedBlock {vals: argvals, bcx: new_bcx} =
1363 extract_variant_args(opt_cx, &**repr, disr_val, val);
1364 size = argvals.len();
1368 SliceLengthEqual(len, _) => {
1369 let args = extract_vec_elems(opt_cx, left_ty, len, 0, val);
1370 size = args.vals.len();
1371 unpacked = args.vals.clone();
1374 SliceLengthGreaterOrEqual(before, after, _) => {
1375 let args = extract_vec_elems(opt_cx, left_ty, before, after, val);
1376 size = args.vals.len();
1377 unpacked = args.vals.clone();
1380 ConstantValue(..) | ConstantRange(..) => ()
1382 let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
1383 let mut opt_vals: Vec<_> = unpacked.into_iter()
1384 .map(|v|MatchInput::from_val(v))
1386 opt_vals.push_all(&vals_left[..]);
1387 compile_submatch(opt_cx,
1390 branch_chk.as_ref().unwrap_or(chk),
1391 has_genuine_default);
1394 // Compile the fall-through case, if any
1395 if !exhaustive && kind != Single {
1396 if kind == Compare || kind == CompareSliceLength {
1397 Br(bcx, else_cx.llbb, DebugLoc::None);
1400 // If there is only one default arm left, move on to the next
1401 // condition explicitly rather than (eventually) falling back to
1402 // the last default arm.
1403 &JumpToBasicBlock(_) if defaults.len() == 1 && has_genuine_default => {
1404 chk.handle_fail(else_cx);
1407 compile_submatch(else_cx,
1411 has_genuine_default);
1417 pub fn trans_match<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1418 match_expr: &hir::Expr,
1419 discr_expr: &hir::Expr,
1422 -> Block<'blk, 'tcx> {
1423 let _icx = push_ctxt("match::trans_match");
1424 trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
1427 /// Checks whether the binding in `discr` is assigned to anywhere in the expression `body`
1428 fn is_discr_reassigned(bcx: Block, discr: &hir::Expr, body: &hir::Expr) -> bool {
1429 let (vid, field) = match discr.node {
1430 hir::ExprPath(..) => match bcx.def(discr.id) {
1431 def::DefLocal(vid) | def::DefUpvar(vid, _, _) => (vid, None),
1434 hir::ExprField(ref base, field) => {
1435 let vid = match bcx.tcx().def_map.borrow().get(&base.id).map(|d| d.full_def()) {
1436 Some(def::DefLocal(vid)) | Some(def::DefUpvar(vid, _, _)) => vid,
1439 (vid, Some(mc::NamedField(field.node.name)))
1441 hir::ExprTupField(ref base, field) => {
1442 let vid = match bcx.tcx().def_map.borrow().get(&base.id).map(|d| d.full_def()) {
1443 Some(def::DefLocal(vid)) | Some(def::DefUpvar(vid, _, _)) => vid,
1446 (vid, Some(mc::PositionalField(field.node)))
1451 let mut rc = ReassignmentChecker {
1457 let infcx = infer::normalizing_infer_ctxt(bcx.tcx(), &bcx.tcx().tables);
1458 let mut visitor = euv::ExprUseVisitor::new(&mut rc, &infcx);
1459 visitor.walk_expr(body);
1464 struct ReassignmentChecker {
1466 field: Option<mc::FieldName>,
1470 // Determine if the expression we're matching on is reassigned to within
1471 // the body of the match's arm.
1472 // We only care for the `mutate` callback since this check only matters
1473 // for cases where the matched value is moved.
1474 impl<'tcx> euv::Delegate<'tcx> for ReassignmentChecker {
1475 fn consume(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: euv::ConsumeMode) {}
1476 fn matched_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::MatchMode) {}
1477 fn consume_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::ConsumeMode) {}
1478 fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
1479 _: ty::BorrowKind, _: euv::LoanCause) {}
1480 fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
1482 fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
1484 mc::cat_upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
1485 mc::cat_local(vid) => self.reassigned |= self.node == vid,
1486 mc::cat_interior(ref base_cmt, mc::InteriorField(field)) => {
1487 match base_cmt.cat {
1488 mc::cat_upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
1489 mc::cat_local(vid) => {
1490 self.reassigned |= self.node == vid &&
1491 (self.field.is_none() || Some(field) == self.field)
1501 fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
1502 discr: &hir::Expr, body: &hir::Expr)
1503 -> BindingsMap<'tcx> {
1504 // Create the bindings map, which is a mapping from each binding name
1505 // to an alloca() that will be the value for that local variable.
1506 // Note that we use the names because each binding will have many ids
1507 // from the various alternatives.
1508 let ccx = bcx.ccx();
1509 let tcx = bcx.tcx();
1510 let reassigned = is_discr_reassigned(bcx, discr, body);
1511 let mut bindings_map = FnvHashMap();
1512 pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
1513 let ident = path1.node;
1514 let name = ident.name;
1515 let variable_ty = node_id_type(bcx, p_id);
1516 let llvariable_ty = type_of::type_of(ccx, variable_ty);
1517 let tcx = bcx.tcx();
1518 let param_env = tcx.empty_parameter_environment();
1522 let moves_by_default = variable_ty.moves_by_default(¶m_env, span);
1524 hir::BindByValue(_) if !moves_by_default || reassigned =>
1526 llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
1527 let llcopy = alloca(bcx, llvariable_ty, &bcx.name(name));
1528 trmode = if moves_by_default {
1529 TrByMoveIntoCopy(llcopy)
1534 hir::BindByValue(_) => {
1535 // in this case, the final type of the variable will be T,
1536 // but during matching we need to store a *T as explained
1538 llmatch = alloca(bcx, llvariable_ty.ptr_to(), &bcx.name(name));
1539 trmode = TrByMoveRef;
1541 hir::BindByRef(_) => {
1542 llmatch = alloca(bcx, llvariable_ty, &bcx.name(name));
1546 bindings_map.insert(ident, BindingInfo {
1554 return bindings_map;
1557 fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
1558 match_id: ast::NodeId,
1559 discr_expr: &hir::Expr,
1561 dest: Dest) -> Block<'blk, 'tcx> {
1562 let _icx = push_ctxt("match::trans_match_inner");
1563 let fcx = scope_cx.fcx;
1564 let mut bcx = scope_cx;
1565 let tcx = bcx.tcx();
1567 let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
1569 if bcx.unreachable.get() {
1573 let t = node_id_type(bcx, discr_expr.id);
1574 let chk = if t.is_empty(tcx) {
1580 let arm_datas: Vec<ArmData> = arms.iter().map(|arm| ArmData {
1581 bodycx: fcx.new_id_block("case_body", arm.body.id),
1583 bindings_map: create_bindings_map(bcx, &*arm.pats[0], discr_expr, &*arm.body)
1586 let mut pat_renaming_map = if scope_cx.sess().opts.debuginfo != NoDebugInfo {
1592 let arm_pats: Vec<Vec<P<hir::Pat>>> = {
1593 let mut static_inliner = StaticInliner::new(scope_cx.tcx(),
1594 pat_renaming_map.as_mut());
1595 arm_datas.iter().map(|arm_data| {
1596 arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect()
1600 let mut matches = Vec::new();
1601 for (arm_data, pats) in arm_datas.iter().zip(&arm_pats) {
1602 matches.extend(pats.iter().map(|p| Match {
1605 bound_ptrs: Vec::new(),
1606 pat_renaming_map: pat_renaming_map.as_ref()
1610 // `compile_submatch` works one column of arm patterns a time and
1611 // then peels that column off. So as we progress, it may become
1612 // impossible to tell whether we have a genuine default arm, i.e.
1613 // `_ => foo` or not. Sometimes it is important to know that in order
1614 // to decide whether moving on to the next condition or falling back
1615 // to the default arm.
1616 let has_default = arms.last().map_or(false, |arm| {
1618 && arm.pats.last().unwrap().node == hir::PatWild(hir::PatWildSingle)
1621 compile_submatch(bcx, &matches[..], &[discr_datum.match_input()], &chk, has_default);
1623 let mut arm_cxs = Vec::new();
1624 for arm_data in &arm_datas {
1625 let mut bcx = arm_data.bodycx;
1627 // insert bindings into the lllocals map and add cleanups
1628 let cs = fcx.push_custom_cleanup_scope();
1629 bcx = insert_lllocals(bcx, &arm_data.bindings_map, Some(cleanup::CustomScope(cs)));
1630 bcx = expr::trans_into(bcx, &*arm_data.arm.body, dest);
1631 bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cs);
1635 bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
1639 /// Generates code for a local variable declaration like `let <pat>;` or `let <pat> =
1640 /// <opt_init_expr>`.
1641 pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1643 -> Block<'blk, 'tcx> {
1644 let _icx = push_ctxt("match::store_local");
1646 let tcx = bcx.tcx();
1647 let pat = &*local.pat;
1649 fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1651 -> Block<'blk, 'tcx> {
1652 let _icx = push_ctxt("create_dummy_locals");
1653 // create dummy memory for the variables if we have no
1654 // value to store into them immediately
1655 let tcx = bcx.tcx();
1656 pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| {
1657 let scope = cleanup::var_scope(tcx, p_id);
1658 bcx = mk_binding_alloca(
1659 bcx, p_id, path1.node.name, scope, (),
1660 "_match::store_local::create_dummy_locals",
1661 |(), bcx, Datum { val: llval, ty, kind }| {
1662 // Dummy-locals start out uninitialized, so set their
1663 // drop-flag hints (if any) to "moved."
1664 if let Some(hint) = kind.dropflag_hint(bcx) {
1665 let moved_hint = adt::DTOR_MOVED_HINT;
1666 debug!("store moved_hint={} for hint={:?}, uninitialized dummy",
1668 Store(bcx, C_u8(bcx.fcx.ccx, moved_hint), hint.to_value().value());
1671 if kind.drop_flag_info.must_zero() {
1672 // if no drop-flag hint, or the hint requires
1673 // we maintain the embedded drop-flag, then
1674 // mark embedded drop-flag(s) as moved
1675 // (i.e. "already dropped").
1676 drop_done_fill_mem(bcx, llval, ty);
1685 Some(ref init_expr) => {
1686 // Optimize the "let x = expr" case. This just writes
1687 // the result of evaluating `expr` directly into the alloca
1688 // for `x`. Often the general path results in similar or the
1689 // same code post-optimization, but not always. In particular,
1690 // in unsafe code, you can have expressions like
1692 // let x = intrinsics::uninit();
1694 // In such cases, the more general path is unsafe, because
1695 // it assumes it is matching against a valid value.
1696 match simple_identifier(&*pat) {
1698 let var_scope = cleanup::var_scope(tcx, local.id);
1699 return mk_binding_alloca(
1700 bcx, pat.id, ident.name, var_scope, (),
1701 "_match::store_local",
1702 |(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &**init_expr,
1711 unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &**init_expr, "let"));
1712 if bcx.sess().asm_comments() {
1713 add_comment(bcx, "creating zeroable ref llval");
1715 let var_scope = cleanup::var_scope(tcx, local.id);
1716 bind_irrefutable_pat(bcx, pat, init_datum.match_input(), var_scope)
1719 create_dummy_locals(bcx, pat)
1724 fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
1727 cleanup_scope: cleanup::ScopeId,
1729 caller_name: &'static str,
1731 -> Block<'blk, 'tcx> where
1732 F: FnOnce(A, Block<'blk, 'tcx>, Datum<'tcx, Lvalue>) -> Block<'blk, 'tcx>,
1734 let var_ty = node_id_type(bcx, p_id);
1736 // Allocate memory on stack for the binding.
1737 let llval = alloc_ty(bcx, var_ty, &bcx.name(name));
1738 let lvalue = Lvalue::new_with_hint(caller_name, bcx, p_id, HintKind::DontZeroJustUse);
1739 let datum = Datum::new(llval, var_ty, lvalue);
1741 // Subtle: be sure that we *populate* the memory *before*
1742 // we schedule the cleanup.
1743 call_lifetime_start(bcx, llval);
1744 let bcx = populate(arg, bcx, datum);
1745 bcx.fcx.schedule_lifetime_end(cleanup_scope, llval);
1746 bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty, lvalue.dropflag_hint(bcx));
1748 // Now that memory is initialized and has cleanup scheduled,
1749 // insert datum into the local variable map.
1750 bcx.fcx.lllocals.borrow_mut().insert(p_id, datum);
1754 /// A simple version of the pattern matching code that only handles
1755 /// irrefutable patterns. This is used in let/argument patterns,
1756 /// not in match statements. Unifying this code with the code above
1757 /// sounds nice, but in practice it produces very inefficient code,
1758 /// since the match code is so much more general. In most cases,
1759 /// LLVM is able to optimize the code, but it causes longer compile
1760 /// times and makes the generated code nigh impossible to read.
1763 /// - bcx: starting basic block context
1764 /// - pat: the irrefutable pattern being matched.
1765 /// - val: the value being matched -- must be an lvalue (by ref, with cleanup)
1766 pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1769 cleanup_scope: cleanup::ScopeId)
1770 -> Block<'blk, 'tcx> {
1771 debug!("bind_irrefutable_pat(bcx={}, pat={:?})",
1775 if bcx.sess().asm_comments() {
1776 add_comment(bcx, &format!("bind_irrefutable_pat(pat={:?})",
1780 let _indenter = indenter();
1782 let _icx = push_ctxt("match::bind_irrefutable_pat");
1784 let tcx = bcx.tcx();
1785 let ccx = bcx.ccx();
1787 hir::PatIdent(pat_binding_mode, ref path1, ref inner) => {
1788 if pat_is_binding(&tcx.def_map, &*pat) {
1789 // Allocate the stack slot where the value of this
1790 // binding will live and place it into the appropriate
1792 bcx = mk_binding_alloca(
1793 bcx, pat.id, path1.node.name, cleanup_scope, (),
1794 "_match::bind_irrefutable_pat",
1795 |(), bcx, Datum { val: llval, ty, kind: _ }| {
1796 match pat_binding_mode {
1797 hir::BindByValue(_) => {
1798 // By value binding: move the value that `val`
1799 // points at into the binding's stack slot.
1800 let d = val.to_datum(ty);
1801 d.store_to(bcx, llval)
1804 hir::BindByRef(_) => {
1805 // By ref binding: the value of the variable
1806 // is the pointer `val` itself or fat pointer referenced by `val`
1807 if type_is_fat_ptr(bcx.tcx(), ty) {
1808 expr::copy_fat_ptr(bcx, val.val, llval);
1811 Store(bcx, val.val, llval);
1820 if let Some(ref inner_pat) = *inner {
1821 bcx = bind_irrefutable_pat(bcx, &**inner_pat, val, cleanup_scope);
1824 hir::PatEnum(_, ref sub_pats) => {
1825 let opt_def = bcx.tcx().def_map.borrow().get(&pat.id).map(|d| d.full_def());
1827 Some(def::DefVariant(enum_id, var_id, _)) => {
1828 let repr = adt::represent_node(bcx, pat.id);
1829 let vinfo = ccx.tcx().lookup_adt_def(enum_id).variant_with_id(var_id);
1830 let args = extract_variant_args(bcx,
1834 if let Some(ref sub_pat) = *sub_pats {
1835 for (i, &argval) in args.vals.iter().enumerate() {
1836 bcx = bind_irrefutable_pat(
1839 MatchInput::from_val(argval),
1844 Some(def::DefStruct(..)) => {
1847 // This is a unit-like struct. Nothing to do here.
1849 Some(ref elems) => {
1850 // This is the tuple struct case.
1851 let repr = adt::represent_node(bcx, pat.id);
1852 for (i, elem) in elems.iter().enumerate() {
1853 let fldptr = adt::trans_field_ptr(bcx, &*repr,
1855 bcx = bind_irrefutable_pat(
1858 MatchInput::from_val(fldptr),
1865 // Nothing to do here.
1869 hir::PatStruct(_, ref fields, _) => {
1870 let tcx = bcx.tcx();
1871 let pat_ty = node_id_type(bcx, pat.id);
1872 let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
1873 let pat_v = VariantInfo::of_node(tcx, pat_ty, pat.id);
1875 let name = f.node.ident.name;
1876 let fldptr = adt::trans_field_ptr(
1881 pat_v.field_index(name));
1882 bcx = bind_irrefutable_pat(bcx,
1884 MatchInput::from_val(fldptr),
1888 hir::PatTup(ref elems) => {
1889 let repr = adt::represent_node(bcx, pat.id);
1890 for (i, elem) in elems.iter().enumerate() {
1891 let fldptr = adt::trans_field_ptr(bcx, &*repr, val.val, 0, i);
1892 bcx = bind_irrefutable_pat(
1895 MatchInput::from_val(fldptr),
1899 hir::PatBox(ref inner) => {
1900 let llbox = Load(bcx, val.val);
1901 bcx = bind_irrefutable_pat(
1902 bcx, &**inner, MatchInput::from_val(llbox), cleanup_scope);
1904 hir::PatRegion(ref inner, _) => {
1905 let loaded_val = Load(bcx, val.val);
1906 bcx = bind_irrefutable_pat(
1909 MatchInput::from_val(loaded_val),
1912 hir::PatVec(ref before, ref slice, ref after) => {
1913 let pat_ty = node_id_type(bcx, pat.id);
1914 let mut extracted = extract_vec_elems(bcx, pat_ty, before.len(), after.len(), val);
1917 extracted.vals.insert(
1919 bind_subslice_pat(bcx, pat.id, val, before.len(), after.len())
1926 .chain(slice.iter())
1927 .chain(after.iter())
1928 .zip(extracted.vals)
1929 .fold(bcx, |bcx, (inner, elem)| {
1930 bind_irrefutable_pat(
1933 MatchInput::from_val(elem),
1937 hir::PatQPath(..) | hir::PatWild(_) | hir::PatLit(_) |
1938 hir::PatRange(_, _) => ()