1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 * A module for propagating forward dataflow information. The analysis
14 * assumes that the items to be propagated can be represented as bits
15 * and thus uses bitvectors. Your job is simply to specify the so-called
16 * GEN and KILL bits for each expression.
26 use syntax::ast_util::IdRange;
27 use syntax::print::{pp, pprust};
30 use util::ppaux::Repr;
31 use util::nodemap::NodeMap;
34 pub struct DataFlowContext<'a, O> {
35 priv tcx: &'a ty::ctxt,
36 priv method_map: typeck::MethodMap,
38 /// the data flow operator
41 /// number of bits to propagate per id
42 priv bits_per_id: uint,
44 /// number of words we will use to store bits_per_id.
45 /// equal to bits_per_id/uint::BITS rounded up.
46 priv words_per_id: uint,
48 // mapping from node to bitset index.
49 priv nodeid_to_bitset: NodeMap<uint>,
51 // Bit sets per id. The following three fields (`gens`, `kills`,
52 // and `on_entry`) all have the same structure. For each id in
53 // `id_range`, there is a range of words equal to `words_per_id`.
54 // So, to access the bits for any given id, you take a slice of
55 // the full vector (see the method `compute_id_range()`).
57 /// bits generated as we exit the scope `id`. Updated by `add_gen()`.
58 priv gens: Vec<uint> ,
60 /// bits killed as we exit the scope `id`. Updated by `add_kill()`.
61 priv kills: Vec<uint> ,
63 /// bits that are valid on entry to the scope `id`. Updated by
65 priv on_entry: Vec<uint> }
67 /// Parameterization for the precise form of data flow that is used.
68 pub trait DataFlowOperator {
69 /// Specifies the initial value for each bit in the `on_entry` set
70 fn initial_value(&self) -> bool;
72 /// Joins two predecessor bits together, typically either `|` or `&`
73 fn join(&self, succ: uint, pred: uint) -> uint;
76 struct PropagationContext<'a, 'b, O> {
77 dfcx: &'a mut DataFlowContext<'b, O>,
81 struct LoopScope<'a> {
86 impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
88 ps: &mut pprust::State<DataFlowContext<'a, O>>,
89 node: pprust::AnnNode) -> io::IoResult<()> {
91 pprust::NodeExpr(expr) => expr.id,
92 pprust::NodeBlock(blk) => blk.id,
93 pprust::NodeItem(_) => 0,
94 pprust::NodePat(pat) => pat.id
97 if self.nodeid_to_bitset.contains_key(&id) {
98 let (start, end) = self.compute_id_range_frozen(id);
99 let on_entry = self.on_entry.slice(start, end);
100 let entry_str = bits_to_str(on_entry);
102 let gens = self.gens.slice(start, end);
103 let gens_str = if gens.iter().any(|&u| u != 0) {
104 format!(" gen: {}", bits_to_str(gens))
109 let kills = self.kills.slice(start, end);
110 let kills_str = if kills.iter().any(|&u| u != 0) {
111 format!(" kill: {}", bits_to_str(kills))
116 try!(ps.synth_comment(format!("id {}: {}{}{}", id, entry_str,
117 gens_str, kills_str)));
118 try!(pp::space(&mut ps.s));
124 impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
125 pub fn new(tcx: &'a ty::ctxt,
126 method_map: typeck::MethodMap,
129 bits_per_id: uint) -> DataFlowContext<'a, O> {
130 let words_per_id = (bits_per_id + uint::BITS - 1) / uint::BITS;
132 debug!("DataFlowContext::new(id_range={:?}, bits_per_id={:?}, words_per_id={:?})",
133 id_range, bits_per_id, words_per_id);
135 let gens = Vec::new();
136 let kills = Vec::new();
137 let on_entry = Vec::new();
141 method_map: method_map,
142 words_per_id: words_per_id,
143 nodeid_to_bitset: NodeMap::new(),
144 bits_per_id: bits_per_id,
152 pub fn add_gen(&mut self, id: ast::NodeId, bit: uint) {
153 //! Indicates that `id` generates `bit`
155 debug!("add_gen(id={:?}, bit={:?})", id, bit);
156 let (start, end) = self.compute_id_range(id);
158 let gens = self.gens.mut_slice(start, end);
163 pub fn add_kill(&mut self, id: ast::NodeId, bit: uint) {
164 //! Indicates that `id` kills `bit`
166 debug!("add_kill(id={:?}, bit={:?})", id, bit);
167 let (start, end) = self.compute_id_range(id);
169 let kills = self.kills.mut_slice(start, end);
174 fn apply_gen_kill(&mut self, id: ast::NodeId, bits: &mut [uint]) {
175 //! Applies the gen and kill sets for `id` to `bits`
177 debug!("apply_gen_kill(id={:?}, bits={}) [before]",
178 id, mut_bits_to_str(bits));
179 let (start, end) = self.compute_id_range(id);
180 let gens = self.gens.slice(start, end);
181 bitwise(bits, gens, |a, b| a | b);
182 let kills = self.kills.slice(start, end);
183 bitwise(bits, kills, |a, b| a & !b);
185 debug!("apply_gen_kill(id={:?}, bits={}) [after]",
186 id, mut_bits_to_str(bits));
189 fn apply_kill(&mut self, id: ast::NodeId, bits: &mut [uint]) {
190 debug!("apply_kill(id={:?}, bits={}) [before]",
191 id, mut_bits_to_str(bits));
192 let (start, end) = self.compute_id_range(id);
193 let kills = self.kills.slice(start, end);
194 bitwise(bits, kills, |a, b| a & !b);
195 debug!("apply_kill(id={:?}, bits={}) [after]",
196 id, mut_bits_to_str(bits));
199 fn compute_id_range_frozen(&self, id: ast::NodeId) -> (uint, uint) {
200 let n = *self.nodeid_to_bitset.get(&id);
201 let start = n * self.words_per_id;
202 let end = start + self.words_per_id;
206 fn compute_id_range(&mut self, id: ast::NodeId) -> (uint, uint) {
207 let mut expanded = false;
208 let len = self.nodeid_to_bitset.len();
209 let n = self.nodeid_to_bitset.find_or_insert_with(id, |_| {
214 let entry = if self.oper.initial_value() { uint::MAX } else {0};
215 for _ in range(0, self.words_per_id) {
218 self.on_entry.push(entry);
221 let start = *n * self.words_per_id;
222 let end = start + self.words_per_id;
224 assert!(start < self.gens.len());
225 assert!(end <= self.gens.len());
226 assert!(self.gens.len() == self.kills.len());
227 assert!(self.gens.len() == self.on_entry.len());
233 pub fn each_bit_on_entry_frozen(&self,
237 //! Iterates through each bit that is set on entry to `id`.
238 //! Only useful after `propagate()` has been called.
239 if !self.nodeid_to_bitset.contains_key(&id) {
242 let (start, end) = self.compute_id_range_frozen(id);
243 let on_entry = self.on_entry.slice(start, end);
244 debug!("each_bit_on_entry_frozen(id={:?}, on_entry={})",
245 id, bits_to_str(on_entry));
246 self.each_bit(on_entry, f)
249 pub fn each_bit_on_entry(&mut self,
253 //! Iterates through each bit that is set on entry to `id`.
254 //! Only useful after `propagate()` has been called.
256 let (start, end) = self.compute_id_range(id);
257 let on_entry = self.on_entry.slice(start, end);
258 debug!("each_bit_on_entry(id={:?}, on_entry={})",
259 id, bits_to_str(on_entry));
260 self.each_bit(on_entry, f)
263 pub fn each_gen_bit(&mut self, id: ast::NodeId, f: |uint| -> bool)
265 //! Iterates through each bit in the gen set for `id`.
267 let (start, end) = self.compute_id_range(id);
268 let gens = self.gens.slice(start, end);
269 debug!("each_gen_bit(id={:?}, gens={})",
270 id, bits_to_str(gens));
271 self.each_bit(gens, f)
274 pub fn each_gen_bit_frozen(&self, id: ast::NodeId, f: |uint| -> bool)
276 //! Iterates through each bit in the gen set for `id`.
277 if !self.nodeid_to_bitset.contains_key(&id) {
280 let (start, end) = self.compute_id_range_frozen(id);
281 let gens = self.gens.slice(start, end);
282 debug!("each_gen_bit(id={:?}, gens={})",
283 id, bits_to_str(gens));
284 self.each_bit(gens, f)
287 fn each_bit(&self, words: &[uint], f: |uint| -> bool) -> bool {
288 //! Helper for iterating over the bits in a bit set.
290 for (word_index, &word) in words.iter().enumerate() {
292 let base_index = word_index * uint::BITS;
293 for offset in range(0u, uint::BITS) {
294 let bit = 1 << offset;
295 if (word & bit) != 0 {
296 // NB: we round up the total number of bits
297 // that we store in any given bit set so that
298 // it is an even multiple of uint::BITS. This
299 // means that there may be some stray bits at
300 // the end that do not correspond to any
301 // actual value. So before we callback, check
302 // whether the bit_index is greater than the
303 // actual value the user specified and stop
305 let bit_index = base_index + offset;
306 if bit_index >= self.bits_per_id {
308 } else if !f(bit_index) {
319 impl<'a, O:DataFlowOperator+Clone+'static> DataFlowContext<'a, O> {
320 // ^^^^^^^^^^^^^ only needed for pretty printing
321 pub fn propagate(&mut self, blk: &ast::Block) {
322 //! Performs the data flow analysis.
324 if self.bits_per_id == 0 {
325 // Optimize the surprisingly common degenerate case.
330 let mut propcx = PropagationContext {
335 let mut temp = vec::from_elem(self.words_per_id, 0u);
336 let mut loop_scopes = Vec::new();
338 while propcx.changed {
339 propcx.changed = false;
341 propcx.walk_block(blk, temp, &mut loop_scopes);
345 debug!("Dataflow result:");
347 self.pretty_print_to(~io::stderr(), blk).unwrap();
352 fn pretty_print_to(&self, wr: ~io::Writer,
353 blk: &ast::Block) -> io::IoResult<()> {
354 let mut ps = pprust::rust_printer_annotated(wr, self);
355 try!(ps.cbox(pprust::indent_unit));
357 try!(ps.print_block(blk));
362 impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
363 fn tcx(&self) -> &'b ty::ctxt {
367 fn walk_block(&mut self,
370 loop_scopes: &mut Vec<LoopScope> ) {
371 debug!("DataFlowContext::walk_block(blk.id={}, in_out={})",
372 blk.id, bits_to_str(in_out));
374 self.merge_with_entry_set(blk.id, in_out);
376 for &stmt in blk.stmts.iter() {
377 self.walk_stmt(stmt, in_out, loop_scopes);
380 self.walk_opt_expr(blk.expr, in_out, loop_scopes);
382 self.dfcx.apply_gen_kill(blk.id, in_out);
385 fn walk_stmt(&mut self,
388 loop_scopes: &mut Vec<LoopScope> ) {
390 ast::StmtDecl(decl, _) => {
391 self.walk_decl(decl, in_out, loop_scopes);
394 ast::StmtExpr(expr, _) | ast::StmtSemi(expr, _) => {
395 self.walk_expr(expr, in_out, loop_scopes);
398 ast::StmtMac(..) => {
399 self.tcx().sess.span_bug(stmt.span, "unexpanded macro");
404 fn walk_decl(&mut self,
407 loop_scopes: &mut Vec<LoopScope> ) {
409 ast::DeclLocal(local) => {
410 self.walk_opt_expr(local.init, in_out, loop_scopes);
411 self.walk_pat(local.pat, in_out, loop_scopes);
414 ast::DeclItem(_) => {}
418 fn walk_expr(&mut self,
421 loop_scopes: &mut Vec<LoopScope> ) {
422 debug!("DataFlowContext::walk_expr(expr={}, in_out={})",
423 expr.repr(self.dfcx.tcx), bits_to_str(in_out));
425 self.merge_with_entry_set(expr.id, in_out);
428 ast::ExprFnBlock(..) | ast::ExprProc(..) => {
431 ast::ExprIf(cond, then, els) => {
445 self.walk_expr(cond, in_out, loop_scopes);
447 let mut then_bits = in_out.to_owned();
448 self.walk_block(then, then_bits, loop_scopes);
450 self.walk_opt_expr(els, in_out, loop_scopes);
451 join_bits(&self.dfcx.oper, then_bits, in_out);
454 ast::ExprWhile(cond, blk) => {
467 self.walk_expr(cond, in_out, loop_scopes);
469 let mut body_bits = in_out.to_owned();
470 loop_scopes.push(LoopScope {
472 break_bits: Vec::from_slice(in_out)
474 self.walk_block(blk, body_bits, loop_scopes);
475 self.add_to_entry_set(expr.id, body_bits);
476 let new_loop_scope = loop_scopes.pop().unwrap();
477 copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
480 ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"),
482 ast::ExprLoop(blk, _) => {
492 let mut body_bits = in_out.to_owned();
494 loop_scopes.push(LoopScope {
496 break_bits: Vec::from_slice(in_out)
498 self.walk_block(blk, body_bits, loop_scopes);
499 self.add_to_entry_set(expr.id, body_bits);
501 let new_loop_scope = loop_scopes.pop().unwrap();
502 assert_eq!(new_loop_scope.loop_id, expr.id);
503 copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
506 ast::ExprMatch(discr, ref arms) => {
518 self.walk_expr(discr, in_out, loop_scopes);
520 let mut guards = in_out.to_owned();
522 // We know that exactly one arm will be taken, so we
523 // can start out with a blank slate and just union
524 // together the bits from each arm:
527 for arm in arms.iter() {
528 // in_out reflects the discr and all guards to date
529 self.walk_opt_expr(arm.guard, guards, loop_scopes);
531 // determine the bits for the body and then union
532 // them into `in_out`, which reflects all bodies to date
533 let mut body = guards.to_owned();
534 self.walk_pat_alternatives(arm.pats.as_slice(),
537 self.walk_expr(arm.body, body, loop_scopes);
538 join_bits(&self.dfcx.oper, body, in_out);
542 ast::ExprRet(o_e) => {
543 self.walk_opt_expr(o_e, in_out, loop_scopes);
547 ast::ExprBreak(label) => {
548 let scope = self.find_scope(expr, label, loop_scopes);
549 self.break_from_to(expr, scope, in_out);
553 ast::ExprAgain(label) => {
554 let scope = self.find_scope(expr, label, loop_scopes);
555 self.pop_scopes(expr, scope, in_out);
556 self.add_to_entry_set(scope.loop_id, in_out);
560 ast::ExprAssign(l, r) |
561 ast::ExprAssignOp(_, l, r) => {
562 self.walk_expr(r, in_out, loop_scopes);
563 self.walk_expr(l, in_out, loop_scopes);
566 ast::ExprVec(ref exprs, _) => {
567 self.walk_exprs(exprs.as_slice(), in_out, loop_scopes)
570 ast::ExprRepeat(l, r, _) => {
571 self.walk_expr(l, in_out, loop_scopes);
572 self.walk_expr(r, in_out, loop_scopes);
575 ast::ExprStruct(_, ref fields, with_expr) => {
576 for field in fields.iter() {
577 self.walk_expr(field.expr, in_out, loop_scopes);
579 self.walk_opt_expr(with_expr, in_out, loop_scopes);
582 ast::ExprCall(f, ref args) => {
583 self.walk_expr(f, in_out, loop_scopes);
584 self.walk_call(expr.id, args.as_slice(), in_out, loop_scopes);
587 ast::ExprMethodCall(_, _, ref args) => {
588 self.walk_call(expr.id, args.as_slice(), in_out, loop_scopes);
591 ast::ExprIndex(l, r) |
592 ast::ExprBinary(_, l, r) if self.is_method_call(expr) => {
593 self.walk_call(expr.id, [l, r], in_out, loop_scopes);
596 ast::ExprUnary(_, e) if self.is_method_call(expr) => {
597 self.walk_call(expr.id, [e], in_out, loop_scopes);
600 ast::ExprTup(ref exprs) => {
601 self.walk_exprs(exprs.as_slice(), in_out, loop_scopes);
604 ast::ExprBinary(op, l, r) if ast_util::lazy_binop(op) => {
605 self.walk_expr(l, in_out, loop_scopes);
606 let temp = in_out.to_owned();
607 self.walk_expr(r, in_out, loop_scopes);
608 join_bits(&self.dfcx.oper, temp, in_out);
611 ast::ExprIndex(l, r) |
612 ast::ExprBinary(_, l, r) => {
613 self.walk_exprs([l, r], in_out, loop_scopes);
617 ast::ExprPath(..) => {}
619 ast::ExprAddrOf(_, e) |
620 ast::ExprCast(e, _) |
621 ast::ExprUnary(_, e) |
623 ast::ExprVstore(e, _) |
624 ast::ExprField(e, _, _) => {
625 self.walk_expr(e, in_out, loop_scopes);
628 ast::ExprBox(s, e) => {
629 self.walk_expr(s, in_out, loop_scopes);
630 self.walk_expr(e, in_out, loop_scopes);
633 ast::ExprInlineAsm(ref inline_asm) => {
634 for &(_, expr) in inline_asm.inputs.iter() {
635 self.walk_expr(expr, in_out, loop_scopes);
637 for &(_, expr) in inline_asm.outputs.iter() {
638 self.walk_expr(expr, in_out, loop_scopes);
642 ast::ExprBlock(blk) => {
643 self.walk_block(blk, in_out, loop_scopes);
646 ast::ExprMac(..) => {
647 self.tcx().sess.span_bug(expr.span, "unexpanded macro");
651 self.dfcx.apply_gen_kill(expr.id, in_out);
654 fn pop_scopes(&mut self,
655 from_expr: &ast::Expr,
656 to_scope: &mut LoopScope,
657 in_out: &mut [uint]) {
658 //! Whenever you have a `break` or a `loop` statement, flow
659 //! exits through any number of enclosing scopes on its
660 //! way to the new destination. This function applies the kill
661 //! sets of those enclosing scopes to `in_out` (those kill sets
662 //! concern items that are going out of scope).
664 let tcx = self.tcx();
666 debug!("pop_scopes(from_expr={}, to_scope={:?}, in_out={})",
667 from_expr.repr(tcx), to_scope.loop_id,
668 bits_to_str(in_out));
670 let mut id = from_expr.id;
671 while id != to_scope.loop_id {
672 self.dfcx.apply_kill(id, in_out);
674 match tcx.region_maps.opt_encl_scope(id) {
675 Some(i) => { id = i; }
679 format!("pop_scopes(from_expr={}, to_scope={:?}) \
680 to_scope does not enclose from_expr",
681 from_expr.repr(tcx), to_scope.loop_id));
687 fn break_from_to(&mut self,
688 from_expr: &ast::Expr,
689 to_scope: &mut LoopScope,
690 in_out: &mut [uint]) {
691 self.pop_scopes(from_expr, to_scope, in_out);
692 self.dfcx.apply_kill(from_expr.id, in_out);
693 join_bits(&self.dfcx.oper,
695 to_scope.break_bits.as_mut_slice());
696 debug!("break_from_to(from_expr={}, to_scope={}) final break_bits={}",
697 from_expr.repr(self.tcx()),
699 bits_to_str(in_out));
702 fn walk_exprs(&mut self,
703 exprs: &[@ast::Expr],
705 loop_scopes: &mut Vec<LoopScope> ) {
706 for &expr in exprs.iter() {
707 self.walk_expr(expr, in_out, loop_scopes);
711 fn walk_opt_expr(&mut self,
712 opt_expr: Option<@ast::Expr>,
714 loop_scopes: &mut Vec<LoopScope> ) {
715 for &expr in opt_expr.iter() {
716 self.walk_expr(expr, in_out, loop_scopes);
720 fn walk_call(&mut self,
721 call_id: ast::NodeId,
724 loop_scopes: &mut Vec<LoopScope> ) {
725 self.walk_exprs(args, in_out, loop_scopes);
727 // FIXME(#6268) nested method calls
728 // self.merge_with_entry_set(in_out);
729 // self.dfcx.apply_gen_kill(in_out);
731 let return_ty = ty::node_id_to_type(self.tcx(), call_id);
732 let fails = ty::type_is_bot(return_ty);
738 fn walk_pat(&mut self,
741 _loop_scopes: &mut Vec<LoopScope> ) {
742 debug!("DataFlowContext::walk_pat(pat={}, in_out={})",
743 pat.repr(self.dfcx.tcx), bits_to_str(in_out));
745 ast_util::walk_pat(pat, |p| {
746 debug!(" p.id={} in_out={}", p.id, bits_to_str(in_out));
747 self.merge_with_entry_set(p.id, in_out);
748 self.dfcx.apply_gen_kill(p.id, in_out);
753 fn walk_pat_alternatives(&mut self,
756 loop_scopes: &mut Vec<LoopScope> ) {
758 // Common special case:
759 return self.walk_pat(pats[0], in_out, loop_scopes);
762 // In the general case, the patterns in `pats` are
763 // alternatives, so we must treat this like an N-way select
765 let initial_state = in_out.to_owned();
766 for &pat in pats.iter() {
767 let mut temp = initial_state.clone();
768 self.walk_pat(pat, temp, loop_scopes);
769 join_bits(&self.dfcx.oper, temp, in_out);
773 fn find_scope<'a,'b>(
776 label: Option<ast::Ident>,
777 loop_scopes: &'a mut Vec<LoopScope<'b>>)
778 -> &'a mut LoopScope<'b> {
779 let index = match label {
781 let len = loop_scopes.len();
786 let def_map = self.tcx().def_map.borrow();
787 match def_map.get().find(&expr.id) {
788 Some(&ast::DefLabel(loop_id)) => {
789 match loop_scopes.iter().position(|l| l.loop_id == loop_id) {
792 self.tcx().sess.span_bug(
794 format!("no loop scope for id {:?}", loop_id));
800 self.tcx().sess.span_bug(
802 format!("bad entry `{:?}` in def_map for label", r));
808 loop_scopes.get_mut(index)
811 fn is_method_call(&self, expr: &ast::Expr) -> bool {
812 let method_call = typeck::MethodCall::expr(expr.id);
813 self.dfcx.method_map.borrow().get().contains_key(&method_call)
816 fn reset(&mut self, bits: &mut [uint]) {
817 let e = if self.dfcx.oper.initial_value() {uint::MAX} else {0};
818 for b in bits.mut_iter() { *b = e; }
821 fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
822 debug!("add_to_entry_set(id={:?}, pred_bits={})",
823 id, bits_to_str(pred_bits));
824 let (start, end) = self.dfcx.compute_id_range(id);
825 let changed = { // FIXME(#5074) awkward construction
826 let on_entry = self.dfcx.on_entry.mut_slice(start, end);
827 join_bits(&self.dfcx.oper, pred_bits, on_entry)
830 debug!("changed entry set for {:?} to {}",
831 id, bits_to_str(self.dfcx.on_entry.slice(start, end)));
836 fn merge_with_entry_set(&mut self,
838 pred_bits: &mut [uint]) {
839 debug!("merge_with_entry_set(id={:?}, pred_bits={})",
840 id, mut_bits_to_str(pred_bits));
841 let (start, end) = self.dfcx.compute_id_range(id);
842 let changed = { // FIXME(#5074) awkward construction
843 let on_entry = self.dfcx.on_entry.mut_slice(start, end);
844 let changed = join_bits(&self.dfcx.oper, pred_bits, on_entry);
845 copy_bits(on_entry, pred_bits);
849 debug!("changed entry set for {:?} to {}",
850 id, bits_to_str(self.dfcx.on_entry.slice(start, end)));
856 fn mut_bits_to_str(words: &mut [uint]) -> ~str {
860 fn bits_to_str(words: &[uint]) -> ~str {
861 let mut result = ~"";
864 // Note: this is a little endian printout of bytes.
866 for &word in words.iter() {
868 for _ in range(0u, uint::BYTES) {
869 result.push_char(sep);
870 result.push_str(format!("{:02x}", v & 0xFF));
875 result.push_char(']');
879 fn copy_bits(in_vec: &[uint], out_vec: &mut [uint]) -> bool {
880 bitwise(out_vec, in_vec, |_, b| b)
883 fn join_bits<O:DataFlowOperator>(oper: &O,
885 out_vec: &mut [uint]) -> bool {
886 bitwise(out_vec, in_vec, |a, b| oper.join(a, b))
890 fn bitwise(out_vec: &mut [uint], in_vec: &[uint], op: |uint, uint| -> uint)
892 assert_eq!(out_vec.len(), in_vec.len());
893 let mut changed = false;
894 for (out_elt, in_elt) in out_vec.mut_iter().zip(in_vec.iter()) {
895 let old_val = *out_elt;
896 let new_val = op(old_val, *in_elt);
898 changed |= old_val != new_val;
903 fn set_bit(words: &mut [uint], bit: uint) -> bool {
904 debug!("set_bit: words={} bit={}",
905 mut_bits_to_str(words), bit_str(bit));
906 let word = bit / uint::BITS;
907 let bit_in_word = bit % uint::BITS;
908 let bit_mask = 1 << bit_in_word;
909 debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, word);
910 let oldv = words[word];
911 let newv = oldv | bit_mask;
916 fn bit_str(bit: uint) -> ~str {
918 let lobits = 1 << (bit & 0xFF);
919 format!("[{}:{}-{:02x}]", bit, byte, lobits)