1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use rustc_data_structures::graph;
13 use ty::{self, TyCtxt};
17 use hir::{self, PatKind};
19 struct CFGBuilder<'a, 'tcx: 'a> {
20 tcx: TyCtxt<'a, 'tcx, 'tcx>,
21 tables: &'a ty::TypeckTables<'tcx>,
24 loop_scopes: Vec<LoopScope>,
27 #[derive(Copy, Clone)]
29 loop_id: ast::NodeId, // id of loop/while node
30 continue_index: CFGIndex, // where to go on a `loop`
31 break_index: CFGIndex, // where to go on a `break
34 pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
35 body: &hir::Body) -> CFG {
36 let mut graph = graph::Graph::new();
37 let entry = graph.add_node(CFGNodeData::Entry);
39 // `fn_exit` is target of return exprs, which lies somewhere
40 // outside input `body`. (Distinguishing `fn_exit` and `body_exit`
41 // also resolves chicken-and-egg problem that arises if you try to
42 // have return exprs jump to `body_exit` during construction.)
43 let fn_exit = graph.add_node(CFGNodeData::Exit);
46 // Find the tables for this body.
47 let owner_def_id = tcx.hir.local_def_id(tcx.hir.body_owner(body.id()));
48 let tables = tcx.item_tables(owner_def_id);
50 let mut cfg_builder = CFGBuilder {
55 loop_scopes: Vec::new(),
57 body_exit = cfg_builder.expr(&body.value, entry);
58 cfg_builder.add_contained_edge(body_exit, fn_exit);
59 let CFGBuilder { graph, .. } = cfg_builder;
67 impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
68 fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex {
69 let mut stmts_exit = pred;
70 for stmt in &blk.stmts {
71 stmts_exit = self.stmt(stmt, stmts_exit);
74 let expr_exit = self.opt_expr(&blk.expr, stmts_exit);
76 self.add_ast_node(blk.id, &[expr_exit])
79 fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex {
81 hir::StmtDecl(ref decl, id) => {
82 let exit = self.decl(&decl, pred);
83 self.add_ast_node(id, &[exit])
86 hir::StmtExpr(ref expr, id) |
87 hir::StmtSemi(ref expr, id) => {
88 let exit = self.expr(&expr, pred);
89 self.add_ast_node(id, &[exit])
94 fn decl(&mut self, decl: &hir::Decl, pred: CFGIndex) -> CFGIndex {
96 hir::DeclLocal(ref local) => {
97 let init_exit = self.opt_expr(&local.init, pred);
98 self.pat(&local.pat, init_exit)
101 hir::DeclItem(_) => pred,
105 fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
107 PatKind::Binding(.., None) |
111 PatKind::Wild => self.add_ast_node(pat.id, &[pred]),
113 PatKind::Box(ref subpat) |
114 PatKind::Ref(ref subpat, _) |
115 PatKind::Binding(.., Some(ref subpat)) => {
116 let subpat_exit = self.pat(&subpat, pred);
117 self.add_ast_node(pat.id, &[subpat_exit])
120 PatKind::TupleStruct(_, ref subpats, _) |
121 PatKind::Tuple(ref subpats, _) => {
122 let pats_exit = self.pats_all(subpats.iter(), pred);
123 self.add_ast_node(pat.id, &[pats_exit])
126 PatKind::Struct(_, ref subpats, _) => {
127 let pats_exit = self.pats_all(subpats.iter().map(|f| &f.node.pat), pred);
128 self.add_ast_node(pat.id, &[pats_exit])
131 PatKind::Slice(ref pre, ref vec, ref post) => {
132 let pre_exit = self.pats_all(pre.iter(), pred);
133 let vec_exit = self.pats_all(vec.iter(), pre_exit);
134 let post_exit = self.pats_all(post.iter(), vec_exit);
135 self.add_ast_node(pat.id, &[post_exit])
140 fn pats_all<'b, I: Iterator<Item=&'b P<hir::Pat>>>(&mut self,
142 pred: CFGIndex) -> CFGIndex {
143 //! Handles case where all of the patterns must match.
144 pats.fold(pred, |pred, pat| self.pat(&pat, pred))
147 fn expr(&mut self, expr: &hir::Expr, pred: CFGIndex) -> CFGIndex {
149 hir::ExprBlock(ref blk) => {
150 let blk_exit = self.block(&blk, pred);
151 self.add_ast_node(expr.id, &[blk_exit])
154 hir::ExprIf(ref cond, ref then, None) => {
169 let cond_exit = self.expr(&cond, pred); // 1
170 let then_exit = self.block(&then, cond_exit); // 2
171 self.add_ast_node(expr.id, &[cond_exit, then_exit]) // 3,4
174 hir::ExprIf(ref cond, ref then, Some(ref otherwise)) => {
189 let cond_exit = self.expr(&cond, pred); // 1
190 let then_exit = self.block(&then, cond_exit); // 2
191 let else_exit = self.expr(&otherwise, cond_exit); // 3
192 self.add_ast_node(expr.id, &[then_exit, else_exit]) // 4, 5
195 hir::ExprWhile(ref cond, ref body, _) => {
210 // Note that `break` and `continue` statements
211 // may cause additional edges.
213 let loopback = self.add_dummy_node(&[pred]); // 1
215 // Create expr_exit without pred (cond_exit)
216 let expr_exit = self.add_ast_node(expr.id, &[]); // 3
218 // The LoopScope needs to be on the loop_scopes stack while evaluating the
219 // condition and the body of the loop (both can break out of the loop)
220 self.loop_scopes.push(LoopScope {
222 continue_index: loopback,
223 break_index: expr_exit
226 let cond_exit = self.expr(&cond, loopback); // 2
228 // Add pred (cond_exit) to expr_exit
229 self.add_contained_edge(cond_exit, expr_exit);
231 let body_exit = self.block(&body, cond_exit); // 4
232 self.add_contained_edge(body_exit, loopback); // 5
233 self.loop_scopes.pop();
237 hir::ExprLoop(ref body, _, _) => {
249 // Note that `break` and `loop` statements
250 // may cause additional edges.
252 let loopback = self.add_dummy_node(&[pred]); // 1
253 let expr_exit = self.add_ast_node(expr.id, &[]); // 2
254 self.loop_scopes.push(LoopScope {
256 continue_index: loopback,
257 break_index: expr_exit,
259 let body_exit = self.block(&body, loopback); // 3
260 self.add_contained_edge(body_exit, loopback); // 4
261 self.loop_scopes.pop();
265 hir::ExprMatch(ref discr, ref arms, _) => {
266 self.match_(expr.id, &discr, &arms, pred)
269 hir::ExprBinary(op, ref l, ref r) if op.node.is_lazy() => {
284 let l_exit = self.expr(&l, pred); // 1
285 let r_exit = self.expr(&r, l_exit); // 2
286 self.add_ast_node(expr.id, &[l_exit, r_exit]) // 3,4
289 hir::ExprRet(ref v) => {
290 let v_exit = self.opt_expr(v, pred);
291 let b = self.add_ast_node(expr.id, &[v_exit]);
292 self.add_returning_edge(expr, b);
293 self.add_unreachable_node()
296 hir::ExprBreak(destination, ref opt_expr) => {
297 let v = self.opt_expr(opt_expr, pred);
298 let loop_scope = self.find_scope(expr, destination);
299 let b = self.add_ast_node(expr.id, &[v]);
300 self.add_exiting_edge(expr, b,
301 loop_scope, loop_scope.break_index);
302 self.add_unreachable_node()
305 hir::ExprAgain(destination) => {
306 let loop_scope = self.find_scope(expr, destination);
307 let a = self.add_ast_node(expr.id, &[pred]);
308 self.add_exiting_edge(expr, a,
309 loop_scope, loop_scope.continue_index);
310 self.add_unreachable_node()
313 hir::ExprArray(ref elems) => {
314 self.straightline(expr, pred, elems.iter().map(|e| &*e))
317 hir::ExprCall(ref func, ref args) => {
318 self.call(expr, pred, &func, args.iter().map(|e| &*e))
321 hir::ExprMethodCall(.., ref args) => {
322 self.call(expr, pred, &args[0], args[1..].iter().map(|e| &*e))
325 hir::ExprIndex(ref l, ref r) |
326 hir::ExprBinary(_, ref l, ref r) if self.tables.is_method_call(expr.id) => {
327 self.call(expr, pred, &l, Some(&**r).into_iter())
330 hir::ExprUnary(_, ref e) if self.tables.is_method_call(expr.id) => {
331 self.call(expr, pred, &e, None::<hir::Expr>.iter())
334 hir::ExprTup(ref exprs) => {
335 self.straightline(expr, pred, exprs.iter().map(|e| &*e))
338 hir::ExprStruct(_, ref fields, ref base) => {
339 let field_cfg = self.straightline(expr, pred, fields.iter().map(|f| &*f.expr));
340 self.opt_expr(base, field_cfg)
343 hir::ExprAssign(ref l, ref r) |
344 hir::ExprAssignOp(_, ref l, ref r) => {
345 self.straightline(expr, pred, [r, l].iter().map(|&e| &**e))
348 hir::ExprIndex(ref l, ref r) |
349 hir::ExprBinary(_, ref l, ref r) => { // NB: && and || handled earlier
350 self.straightline(expr, pred, [l, r].iter().map(|&e| &**e))
353 hir::ExprBox(ref e) |
354 hir::ExprAddrOf(_, ref e) |
355 hir::ExprCast(ref e, _) |
356 hir::ExprType(ref e, _) |
357 hir::ExprUnary(_, ref e) |
358 hir::ExprField(ref e, _) |
359 hir::ExprTupField(ref e, _) |
360 hir::ExprRepeat(ref e, _) => {
361 self.straightline(expr, pred, Some(&**e).into_iter())
364 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
365 let post_outputs = self.exprs(outputs.iter().map(|e| &*e), pred);
366 let post_inputs = self.exprs(inputs.iter().map(|e| &*e), post_outputs);
367 self.add_ast_node(expr.id, &[post_inputs])
370 hir::ExprClosure(..) |
372 hir::ExprPath(_) => {
373 self.straightline(expr, pred, None::<hir::Expr>.iter())
378 fn call<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
379 call_expr: &hir::Expr,
381 func_or_rcvr: &hir::Expr,
382 args: I) -> CFGIndex {
383 let method_call = ty::MethodCall::expr(call_expr.id);
384 let fn_ty = match self.tables.method_map.get(&method_call) {
385 Some(method) => method.ty,
386 None => self.tables.expr_ty_adjusted(func_or_rcvr),
389 let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
390 let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
391 // FIXME(canndrew): This is_never should probably be an is_uninhabited.
392 if fn_ty.fn_ret().0.is_never() {
393 self.add_unreachable_node()
399 fn exprs<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
401 pred: CFGIndex) -> CFGIndex {
402 //! Constructs graph for `exprs` evaluated in order
403 exprs.fold(pred, |p, e| self.expr(e, p))
406 fn opt_expr(&mut self,
407 opt_expr: &Option<P<hir::Expr>>,
408 pred: CFGIndex) -> CFGIndex {
409 //! Constructs graph for `opt_expr` evaluated, if Some
410 opt_expr.iter().fold(pred, |p, e| self.expr(&e, p))
413 fn straightline<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
416 subexprs: I) -> CFGIndex {
417 //! Handles case of an expression that evaluates `subexprs` in order
419 let subexprs_exit = self.exprs(subexprs, pred);
420 self.add_ast_node(expr.id, &[subexprs_exit])
423 fn match_(&mut self, id: ast::NodeId, discr: &hir::Expr,
424 arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex {
425 // The CFG for match expression is quite complex, so no ASCII
428 // The CFG generated below matches roughly what trans puts
429 // out. Each pattern and guard is visited in parallel, with
430 // arms containing multiple patterns generating multiple nodes
431 // for the same guard expression. The guard expressions chain
432 // into each other from top to bottom, with a specific
433 // exception to allow some additional valid programs
434 // (explained below). Trans differs slightly in that the
435 // pattern matching may continue after a guard but the visible
436 // behaviour should be the same.
438 // What is going on is explained in further comments.
440 // Visit the discriminant expression
441 let discr_exit = self.expr(discr, pred);
443 // Add a node for the exit of the match expression as a whole.
444 let expr_exit = self.add_ast_node(id, &[]);
446 // Keep track of the previous guard expressions
447 let mut prev_guards = Vec::new();
448 // Track if the previous pattern contained bindings or wildcards
449 let mut prev_has_bindings = false;
452 // Add an exit node for when we've visited all the
453 // patterns and the guard (if there is one) in the arm.
454 let arm_exit = self.add_dummy_node(&[]);
456 for pat in &arm.pats {
457 // Visit the pattern, coming from the discriminant exit
458 let mut pat_exit = self.pat(&pat, discr_exit);
460 // If there is a guard expression, handle it here
461 if let Some(ref guard) = arm.guard {
462 // Add a dummy node for the previous guard
463 // expression to target
464 let guard_start = self.add_dummy_node(&[pat_exit]);
465 // Visit the guard expression
466 let guard_exit = self.expr(&guard, guard_start);
468 let this_has_bindings = pat.contains_bindings_or_wild();
470 // If both this pattern and the previous pattern
471 // were free of bindings, they must consist only
472 // of "constant" patterns. Note we cannot match an
473 // all-constant pattern, fail the guard, and then
474 // match *another* all-constant pattern. This is
475 // because if the previous pattern matches, then
476 // we *cannot* match this one, unless all the
477 // constants are the same (which is rejected by
480 // We can use this to be smarter about the flow
481 // along guards. If the previous pattern matched,
482 // then we know we will not visit the guard in
483 // this one (whether or not the guard succeeded),
484 // if the previous pattern failed, then we know
485 // the guard for that pattern will not have been
486 // visited. Thus, it is not possible to visit both
487 // the previous guard and the current one when
488 // both patterns consist only of constant
491 // However, if the above does not hold, then all
492 // previous guards need to be wired to visit the
493 // current guard pattern.
494 if prev_has_bindings || this_has_bindings {
495 while let Some(prev) = prev_guards.pop() {
496 self.add_contained_edge(prev, guard_start);
500 prev_has_bindings = this_has_bindings;
502 // Push the guard onto the list of previous guards
503 prev_guards.push(guard_exit);
505 // Update the exit node for the pattern
506 pat_exit = guard_exit;
509 // Add an edge from the exit of this pattern to the
511 self.add_contained_edge(pat_exit, arm_exit);
514 // Visit the body of this arm
515 let body_exit = self.expr(&arm.body, arm_exit);
517 // Link the body to the exit of the expression
518 self.add_contained_edge(body_exit, expr_exit);
524 fn add_dummy_node(&mut self, preds: &[CFGIndex]) -> CFGIndex {
525 self.add_node(CFGNodeData::Dummy, preds)
528 fn add_ast_node(&mut self, id: ast::NodeId, preds: &[CFGIndex]) -> CFGIndex {
529 assert!(id != ast::DUMMY_NODE_ID);
530 self.add_node(CFGNodeData::AST(id), preds)
533 fn add_unreachable_node(&mut self) -> CFGIndex {
534 self.add_node(CFGNodeData::Unreachable, &[])
537 fn add_node(&mut self, data: CFGNodeData, preds: &[CFGIndex]) -> CFGIndex {
538 let node = self.graph.add_node(data);
540 self.add_contained_edge(pred, node);
545 fn add_contained_edge(&mut self,
548 let data = CFGEdgeData {exiting_scopes: vec![] };
549 self.graph.add_edge(source, target, data);
552 fn add_exiting_edge(&mut self,
553 from_expr: &hir::Expr,
554 from_index: CFGIndex,
556 to_index: CFGIndex) {
557 let mut data = CFGEdgeData { exiting_scopes: vec![] };
558 let mut scope = self.tcx.region_maps.node_extent(from_expr.id);
559 let target_scope = self.tcx.region_maps.node_extent(to_loop.loop_id);
560 while scope != target_scope {
561 data.exiting_scopes.push(scope.node_id(&self.tcx.region_maps));
562 scope = self.tcx.region_maps.encl_scope(scope);
564 self.graph.add_edge(from_index, to_index, data);
567 fn add_returning_edge(&mut self,
568 _from_expr: &hir::Expr,
569 from_index: CFGIndex) {
570 let mut data = CFGEdgeData {
571 exiting_scopes: vec![],
573 for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
574 data.exiting_scopes.push(id);
576 self.graph.add_edge(from_index, self.fn_exit, data);
581 destination: hir::Destination) -> LoopScope {
583 match destination.loop_id.into() {
585 for l in &self.loop_scopes {
586 if l.loop_id == loop_id {
590 span_bug!(expr.span, "no loop scope for id {}", loop_id);
592 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),