1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use rustc_data_structures::graph;
21 struct CFGBuilder<'a, 'tcx: 'a> {
22 tcx: &'a ty::ctxt<'tcx>,
25 loop_scopes: Vec<LoopScope>,
28 #[derive(Copy, Clone)]
30 loop_id: ast::NodeId, // id of loop/while node
31 continue_index: CFGIndex, // where to go on a `loop`
32 break_index: CFGIndex, // where to go on a `break
35 pub fn construct(tcx: &ty::ctxt,
36 blk: &hir::Block) -> CFG {
37 let mut graph = graph::Graph::new();
38 let entry = graph.add_node(CFGNodeData::Entry);
40 // `fn_exit` is target of return exprs, which lies somewhere
41 // outside input `blk`. (Distinguishing `fn_exit` and `block_exit`
42 // also resolves chicken-and-egg problem that arises if you try to
43 // have return exprs jump to `block_exit` during construction.)
44 let fn_exit = graph.add_node(CFGNodeData::Exit);
47 let mut cfg_builder = CFGBuilder {
51 loop_scopes: Vec::new()
53 block_exit = cfg_builder.block(blk, entry);
54 cfg_builder.add_contained_edge(block_exit, fn_exit);
55 let CFGBuilder {graph, ..} = cfg_builder;
61 impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
62 fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex {
63 let mut stmts_exit = pred;
64 for stmt in &blk.stmts {
65 stmts_exit = self.stmt(&**stmt, stmts_exit);
68 let expr_exit = self.opt_expr(&blk.expr, stmts_exit);
70 self.add_ast_node(blk.id, &[expr_exit])
73 fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex {
75 hir::StmtDecl(ref decl, id) => {
76 let exit = self.decl(&**decl, pred);
77 self.add_ast_node(id, &[exit])
80 hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) => {
81 let exit = self.expr(&**expr, pred);
82 self.add_ast_node(id, &[exit])
87 fn decl(&mut self, decl: &hir::Decl, pred: CFGIndex) -> CFGIndex {
89 hir::DeclLocal(ref local) => {
90 let init_exit = self.opt_expr(&local.init, pred);
91 self.pat(&*local.pat, init_exit)
100 fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
102 hir::PatIdent(_, _, None) |
103 hir::PatEnum(_, None) |
108 self.add_ast_node(pat.id, &[pred])
111 hir::PatBox(ref subpat) |
112 hir::PatRegion(ref subpat, _) |
113 hir::PatIdent(_, _, Some(ref subpat)) => {
114 let subpat_exit = self.pat(&**subpat, pred);
115 self.add_ast_node(pat.id, &[subpat_exit])
118 hir::PatEnum(_, Some(ref subpats)) |
119 hir::PatTup(ref subpats) => {
120 let pats_exit = self.pats_all(subpats.iter(), pred);
121 self.add_ast_node(pat.id, &[pats_exit])
124 hir::PatStruct(_, ref subpats, _) => {
126 self.pats_all(subpats.iter().map(|f| &f.node.pat), pred);
127 self.add_ast_node(pat.id, &[pats_exit])
130 hir::PatVec(ref pre, ref vec, ref post) => {
131 let pre_exit = self.pats_all(pre.iter(), pred);
132 let vec_exit = self.pats_all(vec.iter(), pre_exit);
133 let post_exit = self.pats_all(post.iter(), vec_exit);
134 self.add_ast_node(pat.id, &[post_exit])
139 fn pats_all<'b, I: Iterator<Item=&'b P<hir::Pat>>>(&mut self,
141 pred: CFGIndex) -> CFGIndex {
142 //! Handles case where all of the patterns must match.
143 pats.fold(pred, |pred, pat| self.pat(&**pat, pred))
146 fn expr(&mut self, expr: &hir::Expr, pred: CFGIndex) -> CFGIndex {
148 hir::ExprBlock(ref blk) => {
149 let blk_exit = self.block(&**blk, pred);
150 self.add_ast_node(expr.id, &[blk_exit])
153 hir::ExprIf(ref cond, ref then, None) => {
168 let cond_exit = self.expr(&**cond, pred); // 1
169 let then_exit = self.block(&**then, cond_exit); // 2
170 self.add_ast_node(expr.id, &[cond_exit, then_exit]) // 3,4
173 hir::ExprIf(ref cond, ref then, Some(ref otherwise)) => {
188 let cond_exit = self.expr(&**cond, pred); // 1
189 let then_exit = self.block(&**then, cond_exit); // 2
190 let else_exit = self.expr(&**otherwise, cond_exit); // 3
191 self.add_ast_node(expr.id, &[then_exit, else_exit]) // 4, 5
194 hir::ExprWhile(ref cond, ref body, _) => {
209 // Note that `break` and `continue` statements
210 // may cause additional edges.
212 // Is the condition considered part of the loop?
213 let loopback = self.add_dummy_node(&[pred]); // 1
214 let cond_exit = self.expr(&**cond, loopback); // 2
215 let expr_exit = self.add_ast_node(expr.id, &[cond_exit]); // 3
216 self.loop_scopes.push(LoopScope {
218 continue_index: loopback,
219 break_index: expr_exit
221 let body_exit = self.block(&**body, cond_exit); // 4
222 self.add_contained_edge(body_exit, loopback); // 5
223 self.loop_scopes.pop();
227 hir::ExprLoop(ref body, _) => {
239 // Note that `break` and `loop` statements
240 // may cause additional edges.
242 let loopback = self.add_dummy_node(&[pred]); // 1
243 let expr_exit = self.add_ast_node(expr.id, &[]); // 2
244 self.loop_scopes.push(LoopScope {
246 continue_index: loopback,
247 break_index: expr_exit,
249 let body_exit = self.block(&**body, loopback); // 3
250 self.add_contained_edge(body_exit, loopback); // 4
251 self.loop_scopes.pop();
255 hir::ExprMatch(ref discr, ref arms, _) => {
256 self.match_(expr.id, &discr, &arms, pred)
259 hir::ExprBinary(op, ref l, ref r) if ::rustc_front::util::lazy_binop(op.node) => {
274 let l_exit = self.expr(&**l, pred); // 1
275 let r_exit = self.expr(&**r, l_exit); // 2
276 self.add_ast_node(expr.id, &[l_exit, r_exit]) // 3,4
279 hir::ExprRet(ref v) => {
280 let v_exit = self.opt_expr(v, pred);
281 let b = self.add_ast_node(expr.id, &[v_exit]);
282 self.add_returning_edge(expr, b);
283 self.add_unreachable_node()
286 hir::ExprBreak(label) => {
287 let loop_scope = self.find_scope(expr, label);
288 let b = self.add_ast_node(expr.id, &[pred]);
289 self.add_exiting_edge(expr, b,
290 loop_scope, loop_scope.break_index);
291 self.add_unreachable_node()
294 hir::ExprAgain(label) => {
295 let loop_scope = self.find_scope(expr, label);
296 let a = self.add_ast_node(expr.id, &[pred]);
297 self.add_exiting_edge(expr, a,
298 loop_scope, loop_scope.continue_index);
299 self.add_unreachable_node()
302 hir::ExprVec(ref elems) => {
303 self.straightline(expr, pred, elems.iter().map(|e| &**e))
306 hir::ExprCall(ref func, ref args) => {
307 self.call(expr, pred, &**func, args.iter().map(|e| &**e))
310 hir::ExprMethodCall(_, _, ref args) => {
311 self.call(expr, pred, &*args[0], args[1..].iter().map(|e| &**e))
314 hir::ExprIndex(ref l, ref r) |
315 hir::ExprBinary(_, ref l, ref r) if self.tcx.is_method_call(expr.id) => {
316 self.call(expr, pred, &**l, Some(&**r).into_iter())
319 hir::ExprRange(ref start, ref end) => {
320 let fields = start.as_ref().map(|e| &**e).into_iter()
321 .chain(end.as_ref().map(|e| &**e));
322 self.straightline(expr, pred, fields)
325 hir::ExprUnary(_, ref e) if self.tcx.is_method_call(expr.id) => {
326 self.call(expr, pred, &**e, None::<hir::Expr>.iter())
329 hir::ExprTup(ref exprs) => {
330 self.straightline(expr, pred, exprs.iter().map(|e| &**e))
333 hir::ExprStruct(_, ref fields, ref base) => {
334 let field_cfg = self.straightline(expr, pred, fields.iter().map(|f| &*f.expr));
335 self.opt_expr(base, field_cfg)
338 hir::ExprRepeat(ref elem, ref count) => {
339 self.straightline(expr, pred, [elem, count].iter().map(|&e| &**e))
342 hir::ExprAssign(ref l, ref r) |
343 hir::ExprAssignOp(_, ref l, ref r) => {
344 self.straightline(expr, pred, [r, l].iter().map(|&e| &**e))
347 hir::ExprBox(Some(ref l), ref r) |
348 hir::ExprIndex(ref l, ref r) |
349 hir::ExprBinary(_, ref l, ref r) => { // NB: && and || handled earlier
350 self.straightline(expr, pred, [l, r].iter().map(|&e| &**e))
353 hir::ExprBox(None, ref e) |
354 hir::ExprAddrOf(_, ref e) |
355 hir::ExprCast(ref e, _) |
356 hir::ExprUnary(_, ref e) |
357 hir::ExprParen(ref e) |
358 hir::ExprField(ref e, _) |
359 hir::ExprTupField(ref e, _) => {
360 self.straightline(expr, pred, Some(&**e).into_iter())
363 hir::ExprInlineAsm(ref inline_asm) => {
364 let inputs = inline_asm.inputs.iter();
365 let outputs = inline_asm.outputs.iter();
366 let post_inputs = self.exprs(inputs.map(|a| {
367 debug!("cfg::construct InlineAsm id:{} input:{:?}", expr.id, a);
368 let &(_, ref expr) = a;
371 let post_outputs = self.exprs(outputs.map(|a| {
372 debug!("cfg::construct InlineAsm id:{} output:{:?}", expr.id, a);
373 let &(_, ref expr, _) = a;
376 self.add_ast_node(expr.id, &[post_outputs])
379 hir::ExprClosure(..) |
381 hir::ExprPath(..) => {
382 self.straightline(expr, pred, None::<hir::Expr>.iter())
387 fn call<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
388 call_expr: &hir::Expr,
390 func_or_rcvr: &hir::Expr,
391 args: I) -> CFGIndex {
392 let method_call = ty::MethodCall::expr(call_expr.id);
393 let fn_ty = match self.tcx.tables.borrow().method_map.get(&method_call) {
394 Some(method) => method.ty,
395 None => self.tcx.expr_ty_adjusted(func_or_rcvr)
398 let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
399 let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
400 if fn_ty.fn_ret().diverges() {
401 self.add_unreachable_node()
407 fn exprs<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
409 pred: CFGIndex) -> CFGIndex {
410 //! Constructs graph for `exprs` evaluated in order
411 exprs.fold(pred, |p, e| self.expr(e, p))
414 fn opt_expr(&mut self,
415 opt_expr: &Option<P<hir::Expr>>,
416 pred: CFGIndex) -> CFGIndex {
417 //! Constructs graph for `opt_expr` evaluated, if Some
418 opt_expr.iter().fold(pred, |p, e| self.expr(&**e, p))
421 fn straightline<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
424 subexprs: I) -> CFGIndex {
425 //! Handles case of an expression that evaluates `subexprs` in order
427 let subexprs_exit = self.exprs(subexprs, pred);
428 self.add_ast_node(expr.id, &[subexprs_exit])
431 fn match_(&mut self, id: ast::NodeId, discr: &hir::Expr,
432 arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex {
433 // The CFG for match expression is quite complex, so no ASCII
436 // The CFG generated below matches roughly what trans puts
437 // out. Each pattern and guard is visited in parallel, with
438 // arms containing multiple patterns generating multiple nodes
439 // for the same guard expression. The guard expressions chain
440 // into each other from top to bottom, with a specific
441 // exception to allow some additional valid programs
442 // (explained below). Trans differs slightly in that the
443 // pattern matching may continue after a guard but the visible
444 // behaviour should be the same.
446 // What is going on is explained in further comments.
448 // Visit the discriminant expression
449 let discr_exit = self.expr(discr, pred);
451 // Add a node for the exit of the match expression as a whole.
452 let expr_exit = self.add_ast_node(id, &[]);
454 // Keep track of the previous guard expressions
455 let mut prev_guards = Vec::new();
456 // Track if the previous pattern contained bindings or wildcards
457 let mut prev_has_bindings = false;
460 // Add an exit node for when we've visited all the
461 // patterns and the guard (if there is one) in the arm.
462 let arm_exit = self.add_dummy_node(&[]);
464 for pat in &arm.pats {
465 // Visit the pattern, coming from the discriminant exit
466 let mut pat_exit = self.pat(&**pat, discr_exit);
468 // If there is a guard expression, handle it here
469 if let Some(ref guard) = arm.guard {
470 // Add a dummy node for the previous guard
471 // expression to target
472 let guard_start = self.add_dummy_node(&[pat_exit]);
473 // Visit the guard expression
474 let guard_exit = self.expr(&**guard, guard_start);
476 let this_has_bindings = pat_util::pat_contains_bindings_or_wild(
477 &self.tcx.def_map, &**pat);
479 // If both this pattern and the previous pattern
480 // were free of bindings, they must consist only
481 // of "constant" patterns. Note we cannot match an
482 // all-constant pattern, fail the guard, and then
483 // match *another* all-constant pattern. This is
484 // because if the previous pattern matches, then
485 // we *cannot* match this one, unless all the
486 // constants are the same (which is rejected by
489 // We can use this to be smarter about the flow
490 // along guards. If the previous pattern matched,
491 // then we know we will not visit the guard in
492 // this one (whether or not the guard succeeded),
493 // if the previous pattern failed, then we know
494 // the guard for that pattern will not have been
495 // visited. Thus, it is not possible to visit both
496 // the previous guard and the current one when
497 // both patterns consist only of constant
500 // However, if the above does not hold, then all
501 // previous guards need to be wired to visit the
502 // current guard pattern.
503 if prev_has_bindings || this_has_bindings {
504 while let Some(prev) = prev_guards.pop() {
505 self.add_contained_edge(prev, guard_start);
509 prev_has_bindings = this_has_bindings;
511 // Push the guard onto the list of previous guards
512 prev_guards.push(guard_exit);
514 // Update the exit node for the pattern
515 pat_exit = guard_exit;
518 // Add an edge from the exit of this pattern to the
520 self.add_contained_edge(pat_exit, arm_exit);
523 // Visit the body of this arm
524 let body_exit = self.expr(&arm.body, arm_exit);
526 // Link the body to the exit of the expression
527 self.add_contained_edge(body_exit, expr_exit);
533 fn add_dummy_node(&mut self, preds: &[CFGIndex]) -> CFGIndex {
534 self.add_node(CFGNodeData::Dummy, preds)
537 fn add_ast_node(&mut self, id: ast::NodeId, preds: &[CFGIndex]) -> CFGIndex {
538 assert!(id != ast::DUMMY_NODE_ID);
539 self.add_node(CFGNodeData::AST(id), preds)
542 fn add_unreachable_node(&mut self) -> CFGIndex {
543 self.add_node(CFGNodeData::Unreachable, &[])
546 fn add_node(&mut self, data: CFGNodeData, preds: &[CFGIndex]) -> CFGIndex {
547 let node = self.graph.add_node(data);
549 self.add_contained_edge(pred, node);
554 fn add_contained_edge(&mut self,
557 let data = CFGEdgeData {exiting_scopes: vec!() };
558 self.graph.add_edge(source, target, data);
561 fn add_exiting_edge(&mut self,
562 from_expr: &hir::Expr,
563 from_index: CFGIndex,
565 to_index: CFGIndex) {
566 let mut data = CFGEdgeData {exiting_scopes: vec!() };
567 let mut scope = self.tcx.region_maps.node_extent(from_expr.id);
568 let target_scope = self.tcx.region_maps.node_extent(to_loop.loop_id);
569 while scope != target_scope {
570 data.exiting_scopes.push(scope.node_id(&self.tcx.region_maps));
571 scope = self.tcx.region_maps.encl_scope(scope);
573 self.graph.add_edge(from_index, to_index, data);
576 fn add_returning_edge(&mut self,
577 _from_expr: &hir::Expr,
578 from_index: CFGIndex) {
579 let mut data = CFGEdgeData {
580 exiting_scopes: vec!(),
582 for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
583 data.exiting_scopes.push(id);
585 self.graph.add_edge(from_index, self.fn_exit, data);
590 label: Option<ast::Ident>) -> LoopScope {
592 return *self.loop_scopes.last().unwrap();
595 match self.tcx.def_map.borrow().get(&expr.id).map(|d| d.full_def()) {
596 Some(def::DefLabel(loop_id)) => {
597 for l in &self.loop_scopes {
598 if l.loop_id == loop_id {
602 self.tcx.sess.span_bug(expr.span,
603 &format!("no loop scope for id {}", loop_id));
607 self.tcx.sess.span_bug(expr.span,
608 &format!("bad entry `{:?}` in def_map for label", r));