]> git.lizzy.rs Git - rust.git/commitdiff
libsyntax: Remove all non-`proc` `do` syntax.
authorPatrick Walton <pcwalton@mimiga.net>
Thu, 21 Nov 2013 00:23:04 +0000 (16:23 -0800)
committerPatrick Walton <pcwalton@mimiga.net>
Tue, 26 Nov 2013 16:24:18 +0000 (08:24 -0800)
26 files changed:
src/libsyntax/abi.rs
src/libsyntax/ast_map.rs
src/libsyntax/ast_util.rs
src/libsyntax/attr.rs
src/libsyntax/diagnostic.rs
src/libsyntax/ext/build.rs
src/libsyntax/ext/deriving/clone.rs
src/libsyntax/ext/deriving/decodable.rs
src/libsyntax/ext/deriving/default.rs
src/libsyntax/ext/deriving/encodable.rs
src/libsyntax/ext/deriving/generic.rs
src/libsyntax/ext/deriving/iter_bytes.rs
src/libsyntax/ext/deriving/mod.rs
src/libsyntax/ext/deriving/rand.rs
src/libsyntax/ext/deriving/ty.rs
src/libsyntax/ext/deriving/zero.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/format.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/comments.rs
src/libsyntax/parse/lexer.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pprust.rs

index 60d49b4c9edb7e0d7f38efd7de7c755090e8c603..9f69379f3c58cbd42169b918959d2d9ff4803086 100644 (file)
@@ -104,14 +104,14 @@ pub fn lookup(name: &str) -> Option<Abi> {
 
     let mut res = None;
 
-    do each_abi |abi| {
+    each_abi(|abi| {
         if name == abi.data().name {
             res = Some(abi);
             false
         } else {
             true
         }
-    };
+    });
     res
 }
 
@@ -217,21 +217,21 @@ pub fn for_target(&self, os: Os, arch: Architecture) -> Option<Abi> {
 
         let mut res = None;
 
-        do self.each |abi| {
+        self.each(|abi| {
             let data = abi.data();
             match data.abi_arch {
                 Archs(a) if (a & arch.bit()) != 0 => { res = Some(abi); false }
                 Archs(_) => { true }
                 RustArch | AllArch => { res = Some(abi); false }
             }
-        };
+        });
 
         res.map(|r| r.for_target(os, arch))
     }
 
     pub fn check_valid(&self) -> Option<(Abi, Abi)> {
         let mut abis = ~[];
-        do self.each |abi| { abis.push(abi); true };
+        self.each(|abi| { abis.push(abi); true });
 
         for (i, abi) in abis.iter().enumerate() {
             let data = abi.data();
@@ -285,10 +285,10 @@ fn to_str(&self) -> ~str {
 impl ToStr for AbiSet {
     fn to_str(&self) -> ~str {
         let mut strs = ~[];
-        do self.each |abi| {
+        self.each(|abi| {
             strs.push(abi.data().name);
             true
-        };
+        });
         format!("\"{}\"", strs.connect(" "))
     }
 }
index f0be7057c70bf5dcc5ccc0f21514a9e816a1ab6e..32b270643afd23069ee273db7730dd1cf4c90369 100644 (file)
@@ -52,13 +52,13 @@ pub fn ident(&self) -> Ident {
 
 pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
                          -> ~str {
-    let strs = do p.map |e| {
+    let strs = p.map(|e| {
         match *e {
             path_mod(s) | path_name(s) | path_pretty_name(s, _) => {
                 itr.get(s.name)
             }
         }
-    };
+    });
     strs.connect(sep)
 }
 
index c14375604b1e2aaca64976ca0ff450eb08495e68..ef3c23f515380c1f3b2b230cd49e37b8ae8ebefc 100644 (file)
@@ -254,12 +254,12 @@ pub fn unguarded_pat(a: &Arm) -> Option<~[@Pat]> {
 }
 
 pub fn public_methods(ms: ~[@method]) -> ~[@method] {
-    do ms.move_iter().filter |m| {
+    ms.move_iter().filter(|m| {
         match m.vis {
             public => true,
             _   => false
         }
-    }.collect()
+    }).collect()
 }
 
 // extract a TypeMethod from a trait_method. if the trait_method is
index 7a5a326add40f8ac2010d48b188ede7a31d80e81..ecde00aa3021141a9c973978146d284e76ab0133 100644 (file)
@@ -169,18 +169,18 @@ pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
 pub fn contains(haystack: &[@ast::MetaItem],
                 needle: @ast::MetaItem) -> bool {
     debug!("attr::contains (name={})", needle.name());
-    do haystack.iter().any |item| {
+    haystack.iter().any(|item| {
         debug!("  testing: {}", item.name());
         item.node == needle.node
-    }
+    })
 }
 
 pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
     debug!("attr::contains_name (name={})", name);
-    do metas.iter().any |item| {
+    metas.iter().any(|item| {
         debug!("  testing: {}", item.name());
         name == item.name()
-    }
+    })
 }
 
 pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
@@ -204,12 +204,10 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
         .map(|&mi| (mi.name(), mi))
         .collect::<~[(@str, @MetaItem)]>();
 
-    do extra::sort::quick_sort(v) |&(a, _), &(b, _)| {
-        a <= b
-    }
+    extra::sort::quick_sort(v, |&(a, _), &(b, _)| a <= b);
 
     // There doesn't seem to be a more optimal way to do this
-    do v.move_iter().map |(_, m)| {
+    v.move_iter().map(|(_, m)| {
         match m.node {
             MetaList(n, ref mis) => {
                 @Spanned {
@@ -219,7 +217,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
             }
             _ => m
         }
-    }.collect()
+    }).collect()
 }
 
 /**
@@ -248,7 +246,7 @@ pub enum InlineAttr {
 /// True if something like #[inline] is found in the list of attrs.
 pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
     // FIXME (#2809)---validate the usage of #[inline] and #[inline]
-    do attrs.iter().fold(InlineNone) |ia,attr| {
+    attrs.iter().fold(InlineNone, |ia,attr| {
         match attr.node.value.node {
           MetaWord(n) if "inline" == n => InlineHint,
           MetaList(n, ref items) if "inline" == n => {
@@ -262,7 +260,7 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
           }
           _ => ia
         }
-    }
+    })
 }
 
 /// Tests if any `cfg(...)` meta items in `metas` match `cfg`. e.g.
@@ -278,7 +276,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
 
     // this would be much nicer as a chain of iterator adaptors, but
     // this doesn't work.
-    let some_cfg_matches = do metas.any |mi| {
+    let some_cfg_matches = metas.any(|mi| {
         debug!("testing name: {}", mi.name());
         if "cfg" == mi.name() { // it is a #[cfg()] attribute
             debug!("is cfg");
@@ -287,7 +285,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
             match mi.meta_item_list() {
                 Some(cfg_meta) => {
                     debug!("is cfg(...)");
-                    do cfg_meta.iter().all |cfg_mi| {
+                    cfg_meta.iter().all(|cfg_mi| {
                         debug!("cfg({}[...])", cfg_mi.name());
                         match cfg_mi.node {
                             ast::MetaList(s, ref not_cfgs) if "not" == s => {
@@ -301,14 +299,14 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
                             }
                             _ => contains(cfg, *cfg_mi)
                         }
-                    }
+                    })
                 }
                 None => false
             }
         } else {
             false
         }
-    };
+    });
     debug!("test_cfg (no_cfgs={}, some_cfg_matches={})", no_cfgs, some_cfg_matches);
     no_cfgs || some_cfg_matches
 }
index bb908919678b8f110dacf471af80af79463190dd..36e6bf32451f9199b92c9fbbc952d46be2452646 100644 (file)
@@ -314,9 +314,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
         // Skip is the number of characters we need to skip because they are
         // part of the 'filename:line ' part of the previous line.
         let skip = fm.name.len() + digits + 3u;
-        do skip.times() {
-            s.push_char(' ');
-        }
+        skip.times(|| s.push_char(' '));
         let orig = fm.get_line(lines.lines[0] as int);
         for pos in range(0u, left-skip) {
             let curChar = (orig[pos] as char);
@@ -335,9 +333,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
         if hi.col != lo.col {
             // the ^ already takes up one space
             let num_squigglies = hi.col.to_uint()-lo.col.to_uint()-1u;
-            do num_squigglies.times() {
-                s.push_char('~')
-            }
+            num_squigglies.times(|| s.push_char('~'));
         }
         print_maybe_styled(s + "\n", term::attr::ForegroundColor(diagnosticcolor(lvl)));
     }
index 5ae158045e0e72e289430f48732e84ef8d52a4a1..2a7f6dfe2d20a112b23cd7ae2e542bf8b94ec397 100644 (file)
@@ -370,9 +370,9 @@ fn ty_vars_global(&self, ty_params: &OptVec<ast::TyParam>) -> ~[ast::Ty] {
     }
 
     fn strip_bounds(&self, generics: &Generics) -> Generics {
-        let new_params = do generics.ty_params.map |ty_param| {
+        let new_params = generics.ty_params.map(|ty_param| {
             ast::TyParam { bounds: opt_vec::Empty, ..*ty_param }
-        };
+        });
         Generics {
             ty_params: new_params,
             .. (*generics).clone()
@@ -883,9 +883,9 @@ fn view_use(&self, sp: Span,
 
     fn view_use_list(&self, sp: Span, vis: ast::visibility,
                      path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::view_item {
-        let imports = do imports.map |id| {
+        let imports = imports.map(|id| {
             respan(sp, ast::path_list_ident_ { name: *id, id: ast::DUMMY_NODE_ID })
-        };
+        });
 
         self.view_use(sp, vis,
                       ~[@respan(sp,
index 118ada116d9eb4779fd042d0588b380dac7b4134..6dd358144a474e37a0b5a935f65febc3e9cfeda9 100644 (file)
@@ -103,7 +103,7 @@ fn cs_clone(
         },
         _ => {
             // struct-like
-            let fields = do all_fields.map |field| {
+            let fields = all_fields.map(|field| {
                 let ident = match field.name {
                     Some(i) => i,
                     None => cx.span_bug(span,
@@ -111,7 +111,7 @@ fn cs_clone(
                                              name))
                 };
                 cx.field_imm(span, ident, subcall(field.self_))
-            };
+            });
 
             if fields.is_empty() {
                 // no fields, so construct like `None`
index 2f9222ccb56f63e7b1daacd7c2dcd25c06dac5ff..4ab0cc3bc67f59e6dd977bd95e310ac5f0e83309 100644 (file)
@@ -70,13 +70,16 @@ fn decodable_substructure(cx: @ExtCtxt, span: Span,
             };
             let read_struct_field = cx.ident_of("read_struct_field");
 
-            let result = do decode_static_fields(cx, span, substr.type_ident,
-                                                 summary) |span, name, field| {
+            let result = decode_static_fields(cx,
+                                              span,
+                                              substr.type_ident,
+                                              summary,
+                                              |span, name, field| {
                 cx.expr_method_call(span, blkdecoder, read_struct_field,
                                     ~[cx.expr_str(span, name),
                                       cx.expr_uint(span, field),
                                       lambdadecode])
-            };
+            });
             cx.expr_method_call(span, decoder, cx.ident_of("read_struct"),
                                 ~[cx.expr_str(span, cx.str_of(substr.type_ident)),
                                   cx.expr_uint(span, nfields),
@@ -93,12 +96,15 @@ fn decodable_substructure(cx: @ExtCtxt, span: Span,
                 let (name, parts) = match *f { (i, ref p) => (i, p) };
                 variants.push(cx.expr_str(span, cx.str_of(name)));
 
-                let decoded = do decode_static_fields(cx, span, name,
-                                                      parts) |span, _, field| {
+                let decoded = decode_static_fields(cx,
+                                                   span,
+                                                   name,
+                                                   parts,
+                                                   |span, _, field| {
                     cx.expr_method_call(span, blkdecoder, rvariant_arg,
                                         ~[cx.expr_uint(span, field),
                                           lambdadecode])
-                };
+                });
 
                 arms.push(cx.arm(span,
                                  ~[cx.pat_lit(span, cx.expr_uint(span, i))],
@@ -135,18 +141,18 @@ fn decode_static_fields(cx: @ExtCtxt,
             if fields.is_empty() {
                 cx.expr_ident(outer_span, outer_pat_ident)
             } else {
-                let fields = do fields.iter().enumerate().map |(i, &span)| {
+                let fields = fields.iter().enumerate().map(|(i, &span)| {
                     getarg(span, format!("_field{}", i).to_managed(), i)
-                }.collect();
+                }).collect();
 
                 cx.expr_call_ident(outer_span, outer_pat_ident, fields)
             }
         }
         Named(ref fields) => {
             // use the field's span to get nicer error messages.
-            let fields = do fields.iter().enumerate().map |(i, &(name, span))| {
+            let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
                 cx.field_imm(span, name, getarg(span, cx.str_of(name), i))
-            }.collect();
+            }).collect();
             cx.expr_struct_ident(outer_span, outer_pat_ident, fields)
         }
     }
index 866df36bba4ad301f09b29e52215b0e9b82085fb..015083f11d35ba4128746547854390fe0b91a68c 100644 (file)
@@ -60,9 +60,9 @@ fn default_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Exp
                     }
                 }
                 Named(ref fields) => {
-                    let default_fields = do fields.map |&(ident, span)| {
+                    let default_fields = fields.map(|&(ident, span)| {
                         cx.field_imm(span, ident, default_call(span))
-                    };
+                    });
                     cx.expr_struct_ident(span, substr.type_ident, default_fields)
                 }
             }
index 899100f14ba77f041e656221e962a4b2403f53c1..96b77c4c16246f32a6d14ecdaac1be5242982275 100644 (file)
@@ -24,19 +24,19 @@ struct Node {id: uint}
 
 impl<S:extra::serialize::Encoder> Encodable<S> for Node {
     fn encode(&self, s: &S) {
-        do s.emit_struct("Node", 1) {
+        s.emit_struct("Node", 1, || {
             s.emit_field("id", 0, || s.emit_uint(self.id))
-        }
+        })
     }
 }
 
 impl<D:Decoder> Decodable for node_id {
     fn decode(d: &D) -> Node {
-        do d.read_struct("Node", 1) {
+        d.read_struct("Node", 1, || {
             Node {
                 id: d.read_field(~"x", 0, || decode(d))
             }
-        }
+        })
     }
 }
 
@@ -53,10 +53,10 @@ impl<
         T: Encodable<S>
     > spanned<T>: Encodable<S> {
         fn encode<S:Encoder>(s: &S) {
-            do s.emit_rec {
+            s.emit_rec(|| {
                 s.emit_field("node", 0, || self.node.encode(s));
                 s.emit_field("span", 1, || self.span.encode(s));
-            }
+            })
         }
     }
 
@@ -65,12 +65,12 @@ impl<
         T: Decodable<D>
     > spanned<T>: Decodable<D> {
         fn decode(d: &D) -> spanned<T> {
-            do d.read_rec {
+            d.read_rec(|| {
                 {
                     node: d.read_field(~"node", 0, || decode(d)),
                     span: d.read_field(~"span", 1, || decode(d)),
                 }
-            }
+            })
         }
     }
 */
index 2f6cfeb24da1f12ccf05fb500561d46d517c69ef..614c719e0a2be2a33020c790b7b9c923f7d8bda9 100644 (file)
@@ -362,9 +362,9 @@ fn create_derived_impl(&self, cx: @ExtCtxt, trait_span: Span,
             // a TyParamBound requires an ast id
             let mut bounds = opt_vec::from(
                 // extra restrictions on the generics parameters to the type being derived upon
-                do self.additional_bounds.map |p| {
+                self.additional_bounds.map(|p| {
                     cx.typarambound(p.to_path(cx, trait_span, type_ident, generics))
-                });
+                }));
             // require the current trait
             bounds.push(cx.typarambound(trait_path.clone()));
 
@@ -375,9 +375,9 @@ fn create_derived_impl(&self, cx: @ExtCtxt, trait_span: Span,
         let trait_ref = cx.trait_ref(trait_path);
 
         // Create the type parameters on the `self` path.
-        let self_ty_params = do generics.ty_params.map |ty_param| {
+        let self_ty_params = generics.ty_params.map(|ty_param| {
             cx.ty_ident(trait_span, ty_param.ident)
-        };
+        });
 
         let self_lifetimes = generics.lifetimes.clone();
 
@@ -405,7 +405,7 @@ fn expand_struct_def(&self, cx: @ExtCtxt,
                          struct_def: &struct_def,
                          type_ident: Ident,
                          generics: &Generics) -> @ast::item {
-        let methods = do self.methods.map |method_def| {
+        let methods = self.methods.map(|method_def| {
             let (explicit_self, self_args, nonself_args, tys) =
                 method_def.split_self_nonself_args(cx, trait_span, type_ident, generics);
 
@@ -426,7 +426,7 @@ fn expand_struct_def(&self, cx: @ExtCtxt,
                                      type_ident, generics,
                                      explicit_self, tys,
                                      body)
-        };
+        });
 
         self.create_derived_impl(cx, trait_span, type_ident, generics, methods)
     }
@@ -436,7 +436,7 @@ fn expand_enum_def(&self,
                        enum_def: &enum_def,
                        type_ident: Ident,
                        generics: &Generics) -> @ast::item {
-        let methods = do self.methods.map |method_def| {
+        let methods = self.methods.map(|method_def| {
             let (explicit_self, self_args, nonself_args, tys) =
                 method_def.split_self_nonself_args(cx, trait_span, type_ident, generics);
 
@@ -457,7 +457,7 @@ fn expand_enum_def(&self,
                                      type_ident, generics,
                                      explicit_self, tys,
                                      body)
-        };
+        });
 
         self.create_derived_impl(cx, trait_span, type_ident, generics, methods)
     }
@@ -547,9 +547,9 @@ fn create_method(&self, cx: @ExtCtxt, trait_span: Span,
         // create the generics that aren't for Self
         let fn_generics = self.generics.to_generics(cx, trait_span, type_ident, generics);
 
-        let args = do arg_types.move_iter().map |(name, ty)| {
+        let args = arg_types.move_iter().map(|(name, ty)| {
             cx.arg(trait_span, name, ty)
-        }.collect();
+        }).collect();
 
         let ret_type = self.get_ret_ty(cx, trait_span, generics, type_ident);
 
@@ -624,19 +624,19 @@ fn expand_struct_method_body(&self,
         // transpose raw_fields
         let fields = match raw_fields {
             [ref self_arg, .. rest] => {
-                do self_arg.iter().enumerate().map |(i, &(span, opt_id, field))| {
-                    let other_fields = do rest.map |l| {
+                self_arg.iter().enumerate().map(|(i, &(span, opt_id, field))| {
+                    let other_fields = rest.map(|l| {
                         match &l[i] {
                             &(_, _, ex) => ex
                         }
-                    };
+                    });
                     FieldInfo {
                         span: span,
                         name: opt_id,
                         self_: field,
                         other: other_fields
                     }
-                }.collect()
+                }).collect()
             }
             [] => { cx.span_bug(trait_span, "No self arguments to non-static \
                                        method in generic `deriving`") }
@@ -787,16 +787,16 @@ fn build_enum_match(&self,
                         }
                     }
                     let field_tuples =
-                        do self_vec.iter()
-                           .zip(enum_matching_fields.iter())
-                           .map |(&(span, id, self_f), other)| {
+                        self_vec.iter()
+                                .zip(enum_matching_fields.iter())
+                                .map(|(&(span, id, self_f), other)| {
                         FieldInfo {
                             span: span,
                             name: id,
                             self_: self_f,
                             other: (*other).clone()
                         }
-                    }.collect();
+                    }).collect();
                     substructure = EnumMatching(variant_index, variant, field_tuples);
                 }
                 None => {
@@ -901,7 +901,7 @@ fn expand_static_enum_method_body(&self,
                                       self_args: &[@Expr],
                                       nonself_args: &[@Expr])
         -> @Expr {
-        let summary = do enum_def.variants.map |v| {
+        let summary = enum_def.variants.map(|v| {
             let ident = v.node.name;
             let summary = match v.node.kind {
                 ast::tuple_variant_kind(ref args) => Unnamed(args.map(|va| va.ty.span)),
@@ -910,7 +910,7 @@ fn expand_static_enum_method_body(&self,
                 }
             };
             (ident, summary)
-        };
+        });
         self.call_substructure_method(cx,
                                       trait_span, type_ident,
                                       self_args, nonself_args,
@@ -944,10 +944,10 @@ pub fn create_subpatterns(cx: @ExtCtxt,
                           field_paths: ~[ast::Path],
                           mutbl: ast::Mutability)
                    -> ~[@ast::Pat] {
-    do field_paths.map |path| {
+    field_paths.map(|path| {
         cx.pat(path.span,
                ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
-    }
+    })
 }
 
 #[deriving(Eq)] // dogfooding!
@@ -1003,10 +1003,10 @@ fn create_struct_pattern(cx: @ExtCtxt,
     // struct_type is definitely not Unknown, since struct_def.fields
     // must be nonempty to reach here
     let pattern = if struct_type == Record {
-        let field_pats = do subpats.iter().zip(ident_expr.iter()).map |(&pat, &(_, id, _))| {
+        let field_pats = subpats.iter().zip(ident_expr.iter()).map(|(&pat, &(_, id, _))| {
             // id is guaranteed to be Some
             ast::FieldPat { ident: id.unwrap(), pat: pat }
-        }.collect();
+        }).collect();
         cx.pat_struct(trait_span, matching_path, field_pats)
     } else {
         cx.pat_enum(trait_span, matching_path, subpats)
@@ -1075,13 +1075,13 @@ pub fn cs_fold(use_foldl: bool,
     match *substructure.fields {
         EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
             if use_foldl {
-                do all_fields.iter().fold(base) |old, field| {
+                all_fields.iter().fold(base, |old, field| {
                     f(cx, field.span, old, field.self_, field.other)
-                }
+                })
             } else {
-                do all_fields.rev_iter().fold(base) |old, field| {
+                all_fields.rev_iter().fold(base, |old, field| {
                     f(cx, field.span, old, field.self_, field.other)
-                }
+                })
             }
         },
         EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, trait_span,
@@ -1113,12 +1113,12 @@ pub fn cs_same_method(f: |@ExtCtxt, Span, ~[@Expr]| -> @Expr,
     match *substructure.fields {
         EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
             // call self_n.method(other_1_n, other_2_n, ...)
-            let called = do all_fields.map |field| {
+            let called = all_fields.map(|field| {
                 cx.expr_method_call(field.span,
                                     field.self_,
                                     substructure.method_ident,
                                     field.other.clone())
-            };
+            });
 
             f(cx, trait_span, called)
         },
@@ -1148,13 +1148,13 @@ pub fn cs_same_method_fold(use_foldl: bool,
     cs_same_method(
         |cx, span, vals| {
             if use_foldl {
-                do vals.iter().fold(base) |old, &new| {
+                vals.iter().fold(base, |old, &new| {
                     f(cx, span, old, new)
-                }
+                })
             } else {
-                do vals.rev_iter().fold(base) |old, &new| {
+                vals.rev_iter().fold(base, |old, &new| {
                     f(cx, span, old, new)
-                }
+                })
             }
         },
         enum_nonmatch_f,
index fed630cc6681b66080efc90b59195dcf1c2565fd..7e3debd7967f60f48ce977ab25aa8dc23f67bdf4 100644 (file)
@@ -90,7 +90,7 @@ fn iter_bytes_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @
         cx.span_bug(span, "#[deriving(IterBytes)] needs at least one field");
     }
 
-    do exprs.slice(1, exprs.len()).iter().fold(exprs[0]) |prev, me| {
+    exprs.slice(1, exprs.len()).iter().fold(exprs[0], |prev, me| {
         cx.expr_binary(span, BiAnd, prev, *me)
-    }
+    })
 }
index 8729382141b8ad2d89491f8404674707dcdf7bf8..9c611d7e7b2844fffd08b09ce289edc6434514d0 100644 (file)
@@ -74,7 +74,7 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
             in_items
         }
         MetaList(_, ref titems) => {
-            do titems.rev_iter().fold(in_items) |in_items, &titem| {
+            titems.rev_iter().fold(in_items, |in_items, &titem| {
                 match titem.node {
                     MetaNameValue(tname, _) |
                     MetaList(tname, _) |
@@ -112,7 +112,7 @@ macro_rules! expand(($func:path) => ($func(cx, titem.span,
                         }
                     }
                 }
-            }
+            })
         }
     }
 }
index 1877a6eb85bef277ed89211fa16a05ac9343376f..40fdee481edd98d90e942bc04c88a7e93308299a 100644 (file)
@@ -104,7 +104,7 @@ fn rand_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
                                               value_ref,
                                               variant_count);
 
-            let mut arms = do variants.iter().enumerate().map |(i, id_sum)| {
+            let mut arms = variants.iter().enumerate().map(|(i, id_sum)| {
                 let i_expr = cx.expr_uint(span, i);
                 let pat = cx.pat_lit(span, i_expr);
 
@@ -115,7 +115,7 @@ fn rand_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
                                rand_thing(cx, span, ident, summary, |sp| rand_call(sp)))
                     }
                 }
-            }.collect::<~[ast::Arm]>();
+            }).collect::<~[ast::Arm]>();
 
             // _ => {} at the end. Should never occur
             arms.push(cx.arm_unreachable(span));
@@ -144,9 +144,9 @@ fn rand_thing(cx: @ExtCtxt,
                 }
             }
             Named(ref fields) => {
-                let rand_fields = do fields.map |&(ident, span)| {
+                let rand_fields = fields.map(|&(ident, span)| {
                     cx.field_imm(span, ident, rand_call(span))
-                };
+                });
                 cx.expr_struct_ident(span, ctor_ident, rand_fields)
             }
         }
index d1a5af5f7e8911102811fa26ed2c04a0569bd848..eb957e80835ffedc96b34d18d876a2b9bbe95e84 100644 (file)
@@ -171,9 +171,9 @@ pub fn to_path(&self,
                    -> ast::Path {
         match *self {
             Self => {
-                let self_params = do self_generics.ty_params.map |ty_param| {
+                let self_params = self_generics.ty_params.map(|ty_param| {
                     cx.ty_ident(span, ty_param.ident)
-                };
+                });
                 let lifetimes = self_generics.lifetimes.clone();
 
                 cx.path_all(span, false, ~[self_ty], lifetimes,
@@ -192,10 +192,10 @@ pub fn to_path(&self,
 fn mk_ty_param(cx: @ExtCtxt, span: Span, name: &str, bounds: &[Path],
                self_ident: Ident, self_generics: &Generics) -> ast::TyParam {
     let bounds = opt_vec::from(
-        do bounds.map |b| {
+        bounds.map(|b| {
             let path = b.to_path(cx, span, self_ident, self_generics);
             cx.typarambound(path)
-        });
+        }));
     cx.typaram(cx.ident_of(name), bounds)
 }
 
@@ -224,16 +224,16 @@ pub fn to_generics(&self,
                        self_ty: Ident,
                        self_generics: &Generics)
                        -> Generics {
-        let lifetimes = do self.lifetimes.map |lt| {
+        let lifetimes = self.lifetimes.map(|lt| {
             cx.lifetime(span, cx.ident_of(*lt))
-        };
-        let ty_params = do self.bounds.map |t| {
+        });
+        let ty_params = self.bounds.map(|t| {
             match t {
                 &(ref name, ref bounds) => {
                     mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics)
                 }
             }
-        };
+        });
         mk_generics(lifetimes, ty_params)
     }
 }
index 939c7b5584432d3a4560bcaf06a5c1c61740daa6..a37cb586f59d457308283d69ae308a5e98949eab 100644 (file)
@@ -76,9 +76,9 @@ fn zero_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
                     }
                 }
                 Named(ref fields) => {
-                    let zero_fields = do fields.map |&(ident, span)| {
+                    let zero_fields = fields.map(|&(ident, span)| {
                         cx.field_imm(span, ident, zero_call(span))
-                    };
+                    });
                     cx.expr_struct_ident(span, substr.type_ident, zero_fields)
                 }
             }
index 9b6f2a275eebb2ce21f56f9d593bf4bb307c7487..f23e13b89311c25f644321f16dd5f25b2dc0ca3f 100644 (file)
@@ -261,8 +261,8 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
     // For each item, look through the attributes.  If any of them are
     // decorated with "item decorators", then use that function to transform
     // the item into a new set of items.
-    let new_items = do vec::flat_map(module_.items) |item| {
-        do item.attrs.rev_iter().fold(~[*item]) |items, attr| {
+    let new_items = vec::flat_map(module_.items, |item| {
+        item.attrs.rev_iter().fold(~[*item], |items, attr| {
             let mname = attr.name();
 
             match (*extsbox).find(&intern(mname)) {
@@ -280,8 +280,8 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
               },
               _ => items,
             }
-        }
-    };
+        })
+    });
 
     ast::_mod {
         items: new_items,
index 00919fce5db6cf79ddf1451630c24dc0e757113f..1c0930f984a71343ce80282d98c19accd04eeb9a 100644 (file)
@@ -742,12 +742,12 @@ pub fn expand_args(ecx: @ExtCtxt, sp: Span,
                                "format argument must be a string literal.");
 
     let mut err = false;
-    do parse::parse_error::cond.trap(|m| {
+    parse::parse_error::cond.trap(|m| {
         if !err {
             err = true;
             ecx.span_err(efmt.span, m);
         }
-    }).inside {
+    }).inside(|| {
         for piece in parse::Parser::new(fmt) {
             if !err {
                 cx.verify_piece(&piece);
@@ -755,7 +755,7 @@ pub fn expand_args(ecx: @ExtCtxt, sp: Span,
                 cx.pieces.push(piece);
             }
         }
-    }
+    });
     if err { return MRExpr(efmt) }
 
     // Make sure that all arguments were used and all arguments have types.
index 50688afc56a5fe3ba3302bda13ae3e40cc07dda5..3e877d29300dab03766b903d6325e287d97cc6dd 100644 (file)
@@ -127,12 +127,12 @@ pub fn copy_up(mpu: &matcher_pos_up) -> ~MatcherPos {
 }
 
 pub fn count_names(ms: &[matcher]) -> uint {
-    do ms.iter().fold(0) |ct, m| {
+    ms.iter().fold(0, |ct, m| {
         ct + match m.node {
           match_tok(_) => 0u,
           match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)),
           match_nonterminal(_,_,_) => 1u
-        }}
+        }})
 }
 
 pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
@@ -416,9 +416,9 @@ pub fn parse(
                 }
                 cur_eis.push(ei);
 
-                do rust_parser.tokens_consumed.times() || {
-                    rdr.next_token();
-                }
+                rust_parser.tokens_consumed.times(|| {
+                    let _ = rdr.next_token();
+                });
             }
         }
 
index f8d48d00db9185d1fa7e3b06fbf1d38b984867d3..0f7b92b5b064050a962692380c0895941040d892 100644 (file)
@@ -151,10 +151,10 @@ fn lis_merge(lhs: lis, rhs: lis) -> lis {
     }
     match *t {
       tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => {
-        do tts.iter().fold(lis_unconstrained) |lis, tt| {
+        tts.iter().fold(lis_unconstrained, |lis, tt| {
             let lis2 = lockstep_iter_size(tt, r);
             lis_merge(lis, lis2)
-        }
+        })
       }
       tt_tok(*) => lis_unconstrained,
       tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {
index e81973835fe55fe05491c70dd600c047e229a9c9..b268988c3c567ec407dfea4d929f0fccbac6bece 100644 (file)
@@ -187,12 +187,12 @@ fn fold_pat(&self, p: @Pat) -> @Pat {
             }
             PatStruct(ref pth, ref fields, etc) => {
                 let pth_ = self.fold_path(pth);
-                let fs = do fields.map |f| {
+                let fs = fields.map(|f| {
                     ast::FieldPat {
                         ident: f.ident,
                         pat: self.fold_pat(f.pat)
                     }
-                };
+                });
                 PatStruct(pth_, fs, etc)
             }
             PatTup(ref elts) => PatTup(elts.map(|x| self.fold_pat(*x))),
@@ -455,7 +455,7 @@ fn fold_arg_<T:ast_fold>(a: &arg, fld: &T) -> arg {
 // build a new vector of tts by appling the ast_fold's fold_ident to
 // all of the identifiers in the token trees.
 pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
-    do tts.map |tt| {
+    tts.map(|tt| {
         match *tt {
             tt_tok(span, ref tok) =>
             tt_tok(span,maybe_fold_ident(tok,fld)),
@@ -468,7 +468,7 @@ pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
             tt_nonterminal(sp,ref ident) =>
             tt_nonterminal(sp,fld.fold_ident(*ident))
         }
-    }
+    })
 }
 
 // apply ident folder if it's an ident, otherwise leave it alone
@@ -601,11 +601,11 @@ fn fold_field<T:ast_fold>(f: TypeField, folder: &T) -> TypeField {
 
 fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T)
                                -> Option<OptVec<TyParamBound>> {
-    do b.as_ref().map |bounds| {
-        do bounds.map |bound| {
+    b.as_ref().map(|bounds| {
+        bounds.map(|bound| {
             fold_ty_param_bound(bound, folder)
-        }
-    }
+        })
+    })
 }
 
 fn fold_variant_arg_<T:ast_fold>(va: &variant_arg, folder: &T)
@@ -660,9 +660,9 @@ pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
         item_enum(ref enum_definition, ref generics) => {
             item_enum(
                 ast::enum_def {
-                    variants: do enum_definition.variants.map |x| {
+                    variants: enum_definition.variants.map(|x| {
                         folder.fold_variant(x)
-                    },
+                    }),
                 },
                 fold_generics(generics, folder))
         }
@@ -678,12 +678,12 @@ pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
             )
         }
         item_trait(ref generics, ref traits, ref methods) => {
-            let methods = do methods.map |method| {
+            let methods = methods.map(|method| {
                 match *method {
                     required(ref m) => required(folder.fold_type_method(m)),
                     provided(method) => provided(folder.fold_method(method))
                 }
-            };
+            });
             item_trait(fold_generics(generics, folder),
                        traits.map(|p| fold_trait_ref(p, folder)),
                        methods)
index 8defd8a7b6cb6a117f9342696c387780fc5802f1..d8f2d8a53807a6050900cb8c822249b65b5f70bd 100644 (file)
@@ -106,9 +106,7 @@ fn horizontal_trim(lines: ~[~str]) -> ~[~str] {
         }
 
         if can_trim {
-            do lines.map |line| {
-                line.slice(i + 1, line.len()).to_owned()
-            }
+            lines.map(|line| line.slice(i + 1, line.len()).to_owned())
         } else {
             lines
         }
@@ -377,10 +375,10 @@ pub fn gather_comments_and_literals(span_diagnostic:
         //discard, and look ahead; we're working with internal state
         let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
         if token::is_lit(&tok) {
-            do with_str_from(rdr, bstart) |s| {
+            with_str_from(rdr, bstart, |s| {
                 debug!("tok lit: {}", s);
                 literals.push(lit {lit: s.to_owned(), pos: sp.lo});
-            }
+            })
         } else {
             debug!("tok: {}", token::to_str(get_ident_interner(), &tok));
         }
index 6aa3962a0e7de5b30de8f5b6d8606855dda11ddb..e4b93c3b4d5b567035234955d8e3a5c7f96cdddd 100644 (file)
@@ -337,7 +337,7 @@ fn consume_any_line_comment(rdr: @mut StringReader)
                 while rdr.curr != '\n' && !is_eof(rdr) {
                     bump(rdr);
                 }
-                let ret = do with_str_from(rdr, start_bpos) |string| {
+                let ret = with_str_from(rdr, start_bpos, |string| {
                     // but comments with only more "/"s are not
                     if !is_line_non_doc_comment(string) {
                         Some(TokenAndSpan{
@@ -347,7 +347,7 @@ fn consume_any_line_comment(rdr: @mut StringReader)
                     } else {
                         None
                     }
-                };
+                });
 
                 if ret.is_some() {
                     return ret;
@@ -412,7 +412,7 @@ fn consume_block_comment(rdr: @mut StringReader)
     }
 
     let res = if is_doc_comment {
-        do with_str_from(rdr, start_bpos) |string| {
+        with_str_from(rdr, start_bpos, |string| {
             // but comments with only "*"s between two "/"s are not
             if !is_block_non_doc_comment(string) {
                 Some(TokenAndSpan{
@@ -422,7 +422,7 @@ fn consume_block_comment(rdr: @mut StringReader)
             } else {
                 None
             }
-        }
+        })
     } else {
         None
     };
@@ -652,7 +652,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
             bump(rdr);
         }
 
-        return do with_str_from(rdr, start) |string| {
+        return with_str_from(rdr, start, |string| {
             if string == "_" {
                 token::UNDERSCORE
             } else {
@@ -661,7 +661,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
                 // FIXME: perform NFKC normalization here. (Issue #2253)
                 token::IDENT(str_to_ident(string), is_mod_name)
             }
-        }
+        })
     }
     if is_dec_digit(c) {
         return scan_number(c, rdr);
@@ -775,9 +775,9 @@ fn binop(rdr: @mut StringReader, op: token::binop) -> token::Token {
             while ident_continue(rdr.curr) {
                 bump(rdr);
             }
-            return do with_str_from(rdr, start) |lifetime_name| {
+            return with_str_from(rdr, start, |lifetime_name| {
                 token::LIFETIME(str_to_ident(lifetime_name))
-            }
+            })
         }
 
         // Otherwise it is a character constant:
index 5db26dd99dda68adc29f9d39ef23e7639872481d..b9a7ec33ee4ffcd8af5358196c8a820b7a45d289 100644 (file)
@@ -1031,11 +1031,11 @@ pub fn parse_ty_fn_decl(&self, allow_variadic: bool) -> (fn_decl, OptVec<ast::Li
 
     // parse the methods in a trait declaration
     pub fn parse_trait_methods(&self) -> ~[trait_method] {
-        do self.parse_unspanned_seq(
+        self.parse_unspanned_seq(
             &token::LBRACE,
             &token::RBRACE,
-            seq_sep_none()
-        ) |p| {
+            seq_sep_none(),
+            |p| {
             let attrs = p.parse_outer_attributes();
             let lo = p.span.lo;
 
@@ -1048,11 +1048,11 @@ pub fn parse_trait_methods(&self) -> ~[trait_method] {
 
             let generics = p.parse_generics();
 
-            let (explicit_self, d) = do self.parse_fn_decl_with_self() |p| {
+            let (explicit_self, d) = self.parse_fn_decl_with_self(|p| {
                 // This is somewhat dubious; We don't want to allow argument
                 // names to be left off if there is a definition...
                 p.parse_arg_general(false)
-            };
+            });
 
             let hi = p.last_span.hi;
             debug!("parse_trait_methods(): trait method signature ends in \
@@ -1108,7 +1108,7 @@ pub fn parse_trait_methods(&self) -> ~[trait_method] {
                     );
                 }
             }
-        }
+        })
     }
 
     // parse a possibly mutable type
@@ -3000,13 +3000,13 @@ pub fn parse_pat(&self) -> @Pat {
             let mutbl = self.parse_mutability();
             pat = self.parse_pat_ident(BindByRef(mutbl));
         } else {
-            let can_be_enum_or_struct = do self.look_ahead(1) |t| {
+            let can_be_enum_or_struct = self.look_ahead(1, |t| {
                 match *t {
                     token::LPAREN | token::LBRACKET | token::LT |
                     token::LBRACE | token::MOD_SEP => true,
                     _ => false,
                 }
-            };
+            });
 
             if self.look_ahead(1, |t| *t == token::DOTDOT) {
                 let start = self.parse_expr_res(RESTRICT_NO_BAR_OP);
@@ -3040,18 +3040,18 @@ pub fn parse_pat(&self) -> @Pat {
                         let mut args: ~[@Pat] = ~[];
                         match *self.token {
                           token::LPAREN => {
-                            let is_star = do self.look_ahead(1) |t| {
+                            let is_star = self.look_ahead(1, |t| {
                                 match *t {
                                     token::BINOP(token::STAR) => true,
                                     _ => false,
                                 }
-                            };
-                            let is_dotdot = do self.look_ahead(1) |t| {
+                            });
+                            let is_dotdot = self.look_ahead(1, |t| {
                                 match *t {
                                     token::DOTDOT => true,
                                     _ => false,
                                 }
-                            };
+                            });
                             if is_star | is_dotdot {
                                 // This is a "top constructor only" pat
                                 self.bump();
@@ -3884,9 +3884,9 @@ fn parse_method(&self, already_parsed_attrs: Option<~[Attribute]>) -> @method {
         let pur = self.parse_fn_purity();
         let ident = self.parse_ident();
         let generics = self.parse_generics();
-        let (explicit_self, decl) = do self.parse_fn_decl_with_self() |p| {
+        let (explicit_self, decl) = self.parse_fn_decl_with_self(|p| {
             p.parse_arg()
-        };
+        });
 
         let (inner_attrs, body) = self.parse_inner_attrs_and_block();
         let hi = body.span.hi;
@@ -4027,11 +4027,11 @@ fn parse_item_struct(&self) -> item_info {
         } else if *self.token == token::LPAREN {
             // It's a tuple-like struct.
             is_tuple_like = true;
-            fields = do self.parse_unspanned_seq(
+            fields = self.parse_unspanned_seq(
                 &token::LPAREN,
                 &token::RPAREN,
-                seq_sep_trailing_allowed(token::COMMA)
-            ) |p| {
+                seq_sep_trailing_allowed(token::COMMA),
+                |p| {
                 let attrs = self.parse_outer_attributes();
                 let lo = p.span.lo;
                 let struct_field_ = ast::struct_field_ {
@@ -4041,7 +4041,7 @@ fn parse_item_struct(&self) -> item_info {
                     attrs: attrs,
                 };
                 @spanned(lo, p.span.hi, struct_field_)
-            };
+            });
             self.expect(&token::SEMI);
         } else if self.eat(&token::SEMI) {
             // It's a unit-like struct.
@@ -4259,20 +4259,16 @@ fn eval_src_mod_from_path(&self,
                               path: Path,
                               outer_attrs: ~[ast::Attribute],
                               id_sp: Span) -> (ast::item_, ~[ast::Attribute]) {
-        let maybe_i = do self.sess.included_mod_stack.iter().position |p| { *p == path };
+        let maybe_i = self.sess.included_mod_stack.iter().position(|p| *p == path);
         match maybe_i {
             Some(i) => {
                 let stack = &self.sess.included_mod_stack;
                 let mut err = ~"circular modules: ";
                 for p in stack.slice(i, stack.len()).iter() {
-                    do p.display().with_str |s| {
-                        err.push_str(s);
-                    }
+                    p.display().with_str(|s| err.push_str(s));
                     err.push_str(" -> ");
                 }
-                do path.display().with_str |s| {
-                    err.push_str(s);
-                }
+                path.display().with_str(|s| err.push_str(s));
                 self.span_fatal(id_sp, err);
             }
             None => ()
index 63f4f97889c6dfd82ef6ed011222c0cf4f68cafc..870c1bd74b1b0b13ea5ba4802479a9a7511803a4 100644 (file)
@@ -169,9 +169,9 @@ pub fn to_str(input: @ident_interner, t: &Token) -> ~str {
       /* Literals */
       LIT_CHAR(c) => {
           let mut res = ~"'";
-          do char::from_u32(c).unwrap().escape_default |c| {
+          char::from_u32(c).unwrap().escape_default(|c| {
               res.push_char(c);
-          }
+          });
           res.push_char('\'');
           res
       }
index c41460485da67cb3b58ffbec43255626a55e6e05..a8f82221fa199abf30ff7cb245d9e63516db2dfe 100644 (file)
@@ -704,7 +704,7 @@ pub fn print_struct(s: @ps,
     if ast_util::struct_def_is_tuple_like(struct_def) {
         if !struct_def.fields.is_empty() {
             popen(s);
-            do commasep(s, inconsistent, struct_def.fields) |s, field| {
+            commasep(s, inconsistent, struct_def.fields, |s, field| {
                 match field.node.kind {
                     ast::named_field(*) => fail!("unexpected named field"),
                     ast::unnamed_field => {
@@ -712,7 +712,7 @@ pub fn print_struct(s: @ps,
                         print_type(s, &field.node.ty);
                     }
                 }
-            }
+            });
             pclose(s);
         }
         word(s.s, ";");
@@ -1699,9 +1699,7 @@ fn print_field(s: @ps, f: &ast::FieldPat) {
       }
       ast::PatVec(ref before, slice, ref after) => {
         word(s.s, "[");
-        do commasep(s, inconsistent, *before) |s, &p| {
-            print_pat(s, p);
-        }
+        commasep(s, inconsistent, *before, |s, &p| print_pat(s, p));
         for &p in slice.iter() {
             if !before.is_empty() { word_space(s, ","); }
             match p {
@@ -1713,9 +1711,7 @@ fn print_field(s: @ps, f: &ast::FieldPat) {
             print_pat(s, p);
             if !after.is_empty() { word_space(s, ","); }
         }
-        do commasep(s, inconsistent, *after) |s, &p| {
-            print_pat(s, p);
-        }
+        commasep(s, inconsistent, *after, |s, &p| print_pat(s, p));
         word(s.s, "]");
       }
     }
@@ -1937,9 +1933,9 @@ pub fn print_view_path(s: @ps, vp: &ast::view_path) {
       ast::view_path_list(ref path, ref idents, _) => {
         print_path(s, path, false);
         word(s.s, "::{");
-        do commasep(s, inconsistent, (*idents)) |s, w| {
+        commasep(s, inconsistent, (*idents), |s, w| {
             print_ident(s, w.node.name);
-        }
+        });
         word(s.s, "}");
       }
     }
@@ -2053,9 +2049,7 @@ pub fn print_ty_fn(s: @ps,
     match id { Some(id) => { word(s.s, " "); print_ident(s, id); } _ => () }
 
     if opt_sigil != Some(ast::BorrowedSigil) {
-        do opt_bounds.as_ref().map |bounds| {
-            print_bounds(s, bounds, true);
-        };
+        opt_bounds.as_ref().map(|bounds| print_bounds(s, bounds, true));
     }
 
     match generics { Some(g) => print_generics(s, g), _ => () }
@@ -2157,9 +2151,7 @@ pub fn print_literal(s: @ps, lit: &ast::lit) {
       ast::lit_str(st, style) => print_string(s, st, style),
       ast::lit_char(ch) => {
           let mut res = ~"'";
-          do char::from_u32(ch).unwrap().escape_default |c| {
-              res.push_char(c);
-          }
+          char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c));
           res.push_char('\'');
           word(s.s, res);
       }