]> git.lizzy.rs Git - rust.git/commitdiff
ICH: Hash MacroDefs in a mostly stable way.
authorMichael Woerister <michaelwoerister@posteo.net>
Tue, 15 Nov 2016 20:20:39 +0000 (15:20 -0500)
committerMichael Woerister <michaelwoerister@posteo.net>
Fri, 18 Nov 2016 21:45:59 +0000 (16:45 -0500)
src/librustc_incremental/calculate_svh/mod.rs
src/librustc_incremental/calculate_svh/svh_visitor.rs

index 0339f488272574750c5bfdedf8d0550f099aeb0d..f98e698a1c9d416b740457a4cfb8f0150977c0da 100644 (file)
@@ -46,6 +46,7 @@
 use self::hasher::IchHasher;
 use ich::Fingerprint;
 
+
 mod def_path_hash;
 mod svh_visitor;
 mod caching_codemap_view;
@@ -113,8 +114,12 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
     record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
         visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX),
                                  |v| visit::walk_crate(v, krate));
-        // FIXME(#37713) if foreign items were item likes, could use ItemLikeVisitor
         krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
+
+        for macro_def in krate.exported_macros.iter() {
+            visitor.calculate_node_id(macro_def.id,
+                                      |v| v.visit_macro_def(macro_def));
+        }
     });
 
     tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
index fa2eff817eaa8e33685cfe282d7cd255c4a6af55..e8608b187d84cb4241fca4e0eb09a091e2359382 100644 (file)
@@ -24,6 +24,7 @@
 use syntax::attr;
 use syntax::parse::token;
 use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
+use syntax::tokenstream;
 use rustc::hir;
 use rustc::hir::*;
 use rustc::hir::def::{Def, PathResolution};
@@ -769,9 +770,10 @@ fn visit_macro_def(&mut self, macro_def: &'tcx MacroDef) {
         debug!("visit_macro_def: st={:?}", self.st);
         SawMacroDef.hash(self.st);
         hash_attrs!(self, &macro_def.attrs);
+        for tt in &macro_def.body {
+            self.hash_token_tree(tt);
+        }
         visit::walk_macro_def(self, macro_def)
-        // FIXME(mw): We should hash the body of the macro too but we don't
-        //            have a stable way of doing so yet.
     }
 }
 
@@ -941,4 +943,137 @@ fn maybe_enable_overflow_checks(&mut self, item_attrs: &[ast::Attribute]) {
             self.overflow_checks_enabled = true;
         }
     }
+
+    fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
+        self.hash_discriminant(tt);
+        match *tt {
+            tokenstream::TokenTree::Token(span, ref token) => {
+                hash_span!(self, span);
+                self.hash_token(token);
+            }
+            tokenstream::TokenTree::Delimited(span, ref delimited) => {
+                hash_span!(self, span);
+                let tokenstream::Delimited {
+                    ref delim,
+                    open_span,
+                    ref tts,
+                    close_span,
+                } = **delimited;
+
+                delim.hash(self.st);
+                hash_span!(self, open_span);
+                tts.len().hash(self.st);
+                for sub_tt in tts {
+                    self.hash_token_tree(sub_tt);
+                }
+                hash_span!(self, close_span);
+            }
+            tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
+                hash_span!(self, span);
+                let tokenstream::SequenceRepetition {
+                    ref tts,
+                    ref separator,
+                    op,
+                    num_captures,
+                } = **sequence_repetition;
+
+                tts.len().hash(self.st);
+                for sub_tt in tts {
+                    self.hash_token_tree(sub_tt);
+                }
+                self.hash_discriminant(separator);
+                if let Some(ref separator) = *separator {
+                    self.hash_token(separator);
+                }
+                op.hash(self.st);
+                num_captures.hash(self.st);
+            }
+        }
+    }
+
+    fn hash_token(&mut self, token: &token::Token) {
+        self.hash_discriminant(token);
+        match *token {
+            token::Token::Eq |
+            token::Token::Lt |
+            token::Token::Le |
+            token::Token::EqEq |
+            token::Token::Ne |
+            token::Token::Ge |
+            token::Token::Gt |
+            token::Token::AndAnd |
+            token::Token::OrOr |
+            token::Token::Not |
+            token::Token::Tilde |
+            token::Token::At |
+            token::Token::Dot |
+            token::Token::DotDot |
+            token::Token::DotDotDot |
+            token::Token::Comma |
+            token::Token::Semi |
+            token::Token::Colon |
+            token::Token::ModSep |
+            token::Token::RArrow |
+            token::Token::LArrow |
+            token::Token::FatArrow |
+            token::Token::Pound |
+            token::Token::Dollar |
+            token::Token::Question |
+            token::Token::Underscore |
+            token::Token::Whitespace |
+            token::Token::Comment |
+            token::Token::Eof => {}
+
+            token::Token::BinOp(bin_op_token) |
+            token::Token::BinOpEq(bin_op_token) => bin_op_token.hash(self.st),
+
+            token::Token::OpenDelim(delim_token) |
+            token::Token::CloseDelim(delim_token) => delim_token.hash(self.st),
+
+            token::Token::Literal(ref lit, ref opt_name) => {
+                self.hash_discriminant(lit);
+                match *lit {
+                    token::Lit::Byte(val) |
+                    token::Lit::Char(val) |
+                    token::Lit::Integer(val) |
+                    token::Lit::Float(val) |
+                    token::Lit::Str_(val) |
+                    token::Lit::ByteStr(val) => val.as_str().hash(self.st),
+                    token::Lit::StrRaw(val, n) |
+                    token::Lit::ByteStrRaw(val, n) => {
+                        val.as_str().hash(self.st);
+                        n.hash(self.st);
+                    }
+                };
+                opt_name.map(ast::Name::as_str).hash(self.st);
+            }
+
+            token::Token::Ident(ident) |
+            token::Token::Lifetime(ident) |
+            token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
+            token::Token::MatchNt(ident1, ident2) => {
+                ident1.name.as_str().hash(self.st);
+                ident2.name.as_str().hash(self.st);
+            }
+
+            token::Token::Interpolated(ref non_terminal) => {
+                // FIXME(mw): This could be implemented properly. It's just a
+                //            lot of work, since we would need to hash the AST
+                //            in a stable way, in addition to the HIR.
+                //            Since this is hardly used anywhere, just emit a
+                //            warning for now.
+                if self.tcx.sess.opts.debugging_opts.incremental.is_some() {
+                    let msg = format!("Quasi-quoting might make incremental \
+                                       compilation very inefficient: {:?}",
+                                      non_terminal);
+                    self.tcx.sess.warn(&msg[..]);
+                }
+
+                non_terminal.hash(self.st);
+            }
+
+            token::Token::DocComment(val) |
+            token::Token::Shebang(val) => val.as_str().hash(self.st),
+        }
+    }
 }