DepGraphQuery::new(&nodes[..], &edges[..])
}
- pub fn in_ignore<'graph>(&'graph self) -> Option<raii::IgnoreTask<'graph>> {
- self.data.as_ref().map(|data| raii::IgnoreTask::new(&data.current))
+ pub fn assert_ignored(&self)
+ {
+ if let Some(ref data) = self.data {
+ match data.current.borrow().task_stack.last() {
+ Some(&OpenTask::Ignore) | None => {
+ // ignored
+ }
+ _ => panic!("expected an ignore context")
+ }
+ }
}
pub fn with_ignore<OP,R>(&self, op: OP) -> R
where OP: FnOnce() -> R
{
- let _task = self.in_ignore();
+ let _task = self.data.as_ref().map(|data| raii::IgnoreTask::new(&data.current));
op()
}
// We're constructing the HIR here; we don't care what we will
// read, since we haven't even constructed the *input* to
// incr. comp. yet.
- let _ignore = dep_graph.in_ignore();
+ dep_graph.assert_ignored();
LoweringContext {
crate_root: std_inject::injected_crate_name(),
errors: vec![],
};
- hir_map.dep_graph.with_ignore(|| {
- hir_map.krate().visit_all_item_likes(&mut outer_visitor);
- if !outer_visitor.errors.is_empty() {
- let message = outer_visitor
- .errors
- .iter()
- .fold(String::new(), |s1, s2| s1 + "\n" + s2);
- bug!("{}", message);
- }
- });
+ hir_map.dep_graph.assert_ignored();
+
+ hir_map.krate().visit_all_item_likes(&mut outer_visitor);
+ if !outer_visitor.errors.is_empty() {
+ let message = outer_visitor
+ .errors
+ .iter()
+ .fold(String::new(), |s1, s2| s1 + "\n" + s2);
+ bug!("{}", message);
+ }
}
struct HirIdValidator<'a, 'hir: 'a> {
where E: ty_codec::TyEncoder
{
// Serializing the DepGraph should not modify it:
- let _in_ignore = tcx.dep_graph.in_ignore();
-
- // Allocate FileMapIndices
- let (file_to_file_index, file_index_to_stable_id) = {
- let mut file_to_file_index = FxHashMap();
- let mut file_index_to_stable_id = FxHashMap();
-
- for (index, file) in tcx.sess.codemap().files().iter().enumerate() {
- let index = FileMapIndex(index as u32);
- let file_ptr: *const FileMap = &**file as *const _;
- file_to_file_index.insert(file_ptr, index);
- file_index_to_stable_id.insert(index, StableFilemapId::new(&file));
- }
-
- (file_to_file_index, file_index_to_stable_id)
- };
-
- let mut encoder = CacheEncoder {
- tcx,
- encoder,
- type_shorthands: FxHashMap(),
- predicate_shorthands: FxHashMap(),
- expn_info_shorthands: FxHashMap(),
- codemap: CachingCodemapView::new(tcx.sess.codemap()),
- file_to_file_index,
- };
-
- // Load everything into memory so we can write it out to the on-disk
- // cache. The vast majority of cacheable query results should already
- // be in memory, so this should be a cheap operation.
- tcx.dep_graph.exec_cache_promotions(tcx);
-
- // Encode query results
- let mut query_result_index = EncodedQueryResultIndex::new();
-
- {
- use ty::maps::queries::*;
- let enc = &mut encoder;
- let qri = &mut query_result_index;
-
- // Encode TypeckTables
- encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?;
- encode_query_results::<optimized_mir, _>(tcx, enc, qri)?;
- encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?;
- encode_query_results::<borrowck, _>(tcx, enc, qri)?;
- encode_query_results::<mir_borrowck, _>(tcx, enc, qri)?;
- encode_query_results::<mir_const_qualif, _>(tcx, enc, qri)?;
- encode_query_results::<def_symbol_name, _>(tcx, enc, qri)?;
- encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?;
- encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?;
- encode_query_results::<symbol_name, _>(tcx, enc, qri)?;
- encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
- encode_query_results::<check_match, _>(tcx, enc, qri)?;
- }
+ tcx.dep_graph.with_ignore(|| {
+ // Allocate FileMapIndices
+ let (file_to_file_index, file_index_to_stable_id) = {
+ let mut file_to_file_index = FxHashMap();
+ let mut file_index_to_stable_id = FxHashMap();
+
+ for (index, file) in tcx.sess.codemap().files().iter().enumerate() {
+ let index = FileMapIndex(index as u32);
+ let file_ptr: *const FileMap = &**file as *const _;
+ file_to_file_index.insert(file_ptr, index);
+ file_index_to_stable_id.insert(index, StableFilemapId::new(&file));
+ }
- // Encode diagnostics
- let diagnostics_index = {
- let mut diagnostics_index = EncodedDiagnosticsIndex::new();
-
- for (dep_node_index, diagnostics) in self.current_diagnostics
- .borrow()
- .iter() {
- let pos = AbsoluteBytePos::new(encoder.position());
- // Let's make sure we get the expected type here:
- let diagnostics: &EncodedDiagnostics = diagnostics;
- let dep_node_index =
- SerializedDepNodeIndex::new(dep_node_index.index());
- encoder.encode_tagged(dep_node_index, diagnostics)?;
- diagnostics_index.push((dep_node_index, pos));
+ (file_to_file_index, file_index_to_stable_id)
+ };
+
+ let mut encoder = CacheEncoder {
+ tcx,
+ encoder,
+ type_shorthands: FxHashMap(),
+ predicate_shorthands: FxHashMap(),
+ expn_info_shorthands: FxHashMap(),
+ codemap: CachingCodemapView::new(tcx.sess.codemap()),
+ file_to_file_index,
+ };
+
+ // Load everything into memory so we can write it out to the on-disk
+ // cache. The vast majority of cacheable query results should already
+ // be in memory, so this should be a cheap operation.
+ tcx.dep_graph.exec_cache_promotions(tcx);
+
+ // Encode query results
+ let mut query_result_index = EncodedQueryResultIndex::new();
+
+ {
+ use ty::maps::queries::*;
+ let enc = &mut encoder;
+ let qri = &mut query_result_index;
+
+ // Encode TypeckTables
+ encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?;
+ encode_query_results::<optimized_mir, _>(tcx, enc, qri)?;
+ encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?;
+ encode_query_results::<borrowck, _>(tcx, enc, qri)?;
+ encode_query_results::<mir_borrowck, _>(tcx, enc, qri)?;
+ encode_query_results::<mir_const_qualif, _>(tcx, enc, qri)?;
+ encode_query_results::<def_symbol_name, _>(tcx, enc, qri)?;
+ encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?;
+ encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?;
+ encode_query_results::<symbol_name, _>(tcx, enc, qri)?;
+ encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
+ encode_query_results::<check_match, _>(tcx, enc, qri)?;
}
- diagnostics_index
- };
+ // Encode diagnostics
+ let diagnostics_index = {
+ let mut diagnostics_index = EncodedDiagnosticsIndex::new();
+
+ for (dep_node_index, diagnostics) in self.current_diagnostics
+ .borrow()
+ .iter() {
+ let pos = AbsoluteBytePos::new(encoder.position());
+ // Let's make sure we get the expected type here:
+ let diagnostics: &EncodedDiagnostics = diagnostics;
+ let dep_node_index =
+ SerializedDepNodeIndex::new(dep_node_index.index());
+ encoder.encode_tagged(dep_node_index, diagnostics)?;
+ diagnostics_index.push((dep_node_index, pos));
+ }
- let sorted_cnums = sorted_cnums_including_local_crate(tcx);
- let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
- let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
- let crate_disambiguator = tcx.crate_disambiguator(cnum);
- (cnum.as_u32(), crate_name, crate_disambiguator)
- }).collect();
-
- // Encode the file footer
- let footer_pos = encoder.position() as u64;
- encoder.encode_tagged(TAG_FILE_FOOTER, &Footer {
- file_index_to_stable_id,
- prev_cnums,
- query_result_index,
- diagnostics_index,
- })?;
-
- // Encode the position of the footer as the last 8 bytes of the
- // file so we know where to look for it.
- IntEncodedWithFixedSize(footer_pos).encode(encoder.encoder)?;
-
- // DO NOT WRITE ANYTHING TO THE ENCODER AFTER THIS POINT! The address
- // of the footer must be the last thing in the data stream.
-
- return Ok(());
-
- fn sorted_cnums_including_local_crate(tcx: TyCtxt) -> Vec<CrateNum> {
- let mut cnums = vec![LOCAL_CRATE];
- cnums.extend_from_slice(&tcx.crates()[..]);
- cnums.sort_unstable();
- // Just to be sure...
- cnums.dedup();
- cnums
- }
+ diagnostics_index
+ };
+
+ let sorted_cnums = sorted_cnums_including_local_crate(tcx);
+ let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
+ let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
+ let crate_disambiguator = tcx.crate_disambiguator(cnum);
+ (cnum.as_u32(), crate_name, crate_disambiguator)
+ }).collect();
+
+ // Encode the file footer
+ let footer_pos = encoder.position() as u64;
+ encoder.encode_tagged(TAG_FILE_FOOTER, &Footer {
+ file_index_to_stable_id,
+ prev_cnums,
+ query_result_index,
+ diagnostics_index,
+ })?;
+
+ // Encode the position of the footer as the last 8 bytes of the
+ // file so we know where to look for it.
+ IntEncodedWithFixedSize(footer_pos).encode(encoder.encoder)?;
+
+ // DO NOT WRITE ANYTHING TO THE ENCODER AFTER THIS POINT! The address
+ // of the footer must be the last thing in the data stream.
+
+ return Ok(());
+
+ fn sorted_cnums_including_local_crate(tcx: TyCtxt) -> Vec<CrateNum> {
+ let mut cnums = vec![LOCAL_CRATE];
+ cnums.extend_from_slice(&tcx.crates()[..]);
+ cnums.sort_unstable();
+ // Just to be sure...
+ cnums.dedup();
+ cnums
+ }
+ })
}
/// Load a diagnostic emitted during the previous compilation session.
prev_cnums: &[(u32, String, CrateDisambiguator)])
-> IndexVec<CrateNum, Option<CrateNum>>
{
- let _in_ignore = tcx.dep_graph.in_ignore();
-
- let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| {
- let crate_name = tcx.original_crate_name(cnum)
- .as_str()
- .to_string();
- let crate_disambiguator = tcx.crate_disambiguator(cnum);
- ((crate_name, crate_disambiguator), cnum)
- }).collect::<FxHashMap<_,_>>();
-
- let map_size = prev_cnums.iter()
- .map(|&(cnum, ..)| cnum)
- .max()
- .unwrap_or(0) + 1;
- let mut map = IndexVec::new();
- map.resize(map_size as usize, None);
-
- for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums {
- let key = (crate_name.clone(), crate_disambiguator);
- map[CrateNum::from_u32(prev_cnum)] = current_cnums.get(&key).cloned();
- }
+ tcx.dep_graph.with_ignore(|| {
+ let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| {
+ let crate_name = tcx.original_crate_name(cnum)
+ .as_str()
+ .to_string();
+ let crate_disambiguator = tcx.crate_disambiguator(cnum);
+ ((crate_name, crate_disambiguator), cnum)
+ }).collect::<FxHashMap<_,_>>();
+
+ let map_size = prev_cnums.iter()
+ .map(|&(cnum, ..)| cnum)
+ .max()
+ .unwrap_or(0) + 1;
+ let mut map = IndexVec::new();
+ map.resize(map_size as usize, None);
+
+ for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums {
+ let key = (crate_name.clone(), crate_disambiguator);
+ map[CrateNum::from_u32(prev_cnum)] = current_cnums.get(&key).cloned();
+ }
- map[LOCAL_CRATE] = Some(LOCAL_CRATE);
- map
+ map[LOCAL_CRATE] = Some(LOCAL_CRATE);
+ map
+ })
}
}
|| hir_map::map_crate(sess, cstore, &mut hir_forest, &defs));
{
- let _ignore = hir_map.dep_graph.in_ignore();
+ hir_map.dep_graph.assert_ignored();
controller_entry_point!(after_hir_lowering,
sess,
CompileState::state_after_hir_lowering(input,
|tcx, analysis, rx, result| {
{
// Eventually, we will want to track plugins.
- let _ignore = tcx.dep_graph.in_ignore();
-
- let mut state = CompileState::state_after_analysis(input,
- sess,
- outdir,
- output,
- opt_crate,
- tcx.hir.krate(),
- &analysis,
- tcx,
- &crate_name);
- (control.after_analysis.callback)(&mut state);
+ tcx.dep_graph.with_ignore(|| {
+ let mut state = CompileState::state_after_analysis(input,
+ sess,
+ outdir,
+ output,
+ opt_crate,
+ tcx.hir.krate(),
+ &analysis,
+ tcx,
+ &crate_name);
+ (control.after_analysis.callback)(&mut state);
+ });
if control.after_analysis.stop == Compilation::Stop {
return result.and_then(|_| Err(CompileIncomplete::Stopped));
tcx,
tables: Cell::new(&empty_tables)
};
- let _ignore = tcx.dep_graph.in_ignore();
- f(&annotation, hir_map.forest.krate())
+ tcx.dep_graph.with_ignore(|| {
+ f(&annotation, hir_map.forest.krate())
+ })
}),
sess)
}
use syntax_pos::Span;
pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let _ignore = tcx.dep_graph.in_ignore();
-
- if tcx.sess.opts.debugging_opts.dump_dep_graph {
- dump_graph(tcx);
- }
-
- // if the `rustc_attrs` feature is not enabled, then the
- // attributes we are interested in cannot be present anyway, so
- // skip the walk.
- if !tcx.sess.features.borrow().rustc_attrs {
- return;
- }
+ tcx.dep_graph.with_ignore(|| {
+ if tcx.sess.opts.debugging_opts.dump_dep_graph {
+ dump_graph(tcx);
+ }
- // Find annotations supplied by user (if any).
- let (if_this_changed, then_this_would_need) = {
- let mut visitor = IfThisChanged { tcx,
- if_this_changed: vec![],
- then_this_would_need: vec![] };
- visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.hir.krate().attrs);
- tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
- (visitor.if_this_changed, visitor.then_this_would_need)
- };
+ // if the `rustc_attrs` feature is not enabled, then the
+ // attributes we are interested in cannot be present anyway, so
+ // skip the walk.
+ if !tcx.sess.features.borrow().rustc_attrs {
+ return;
+ }
- if !if_this_changed.is_empty() || !then_this_would_need.is_empty() {
- assert!(tcx.sess.opts.debugging_opts.query_dep_graph,
- "cannot use the `#[{}]` or `#[{}]` annotations \
- without supplying `-Z query-dep-graph`",
- ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED);
- }
+ // Find annotations supplied by user (if any).
+ let (if_this_changed, then_this_would_need) = {
+ let mut visitor = IfThisChanged { tcx,
+ if_this_changed: vec![],
+ then_this_would_need: vec![] };
+ visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.hir.krate().attrs);
+ tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
+ (visitor.if_this_changed, visitor.then_this_would_need)
+ };
+
+ if !if_this_changed.is_empty() || !then_this_would_need.is_empty() {
+ assert!(tcx.sess.opts.debugging_opts.query_dep_graph,
+ "cannot use the `#[{}]` or `#[{}]` annotations \
+ without supplying `-Z query-dep-graph`",
+ ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED);
+ }
- // Check paths.
- check_paths(tcx, &if_this_changed, &then_this_would_need);
+ // Check paths.
+ check_paths(tcx, &if_this_changed, &then_this_would_need);
+ })
}
type Sources = Vec<(Span, DefId, DepNode)>;
return;
}
- let _ignore = tcx.dep_graph.in_ignore();
- let krate = tcx.hir.krate();
- let mut dirty_clean_visitor = DirtyCleanVisitor {
- tcx,
- checked_attrs: FxHashSet(),
- };
- krate.visit_all_item_likes(&mut dirty_clean_visitor);
-
- let mut all_attrs = FindAllAttrs {
- tcx,
- attr_names: vec![ATTR_DIRTY, ATTR_CLEAN],
- found_attrs: vec![],
- };
- intravisit::walk_crate(&mut all_attrs, krate);
-
- // Note that we cannot use the existing "unused attribute"-infrastructure
- // here, since that is running before trans. This is also the reason why
- // all trans-specific attributes are `Whitelisted` in syntax::feature_gate.
- all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs);
+ tcx.dep_graph.with_ignore(|| {
+ let krate = tcx.hir.krate();
+ let mut dirty_clean_visitor = DirtyCleanVisitor {
+ tcx,
+ checked_attrs: FxHashSet(),
+ };
+ krate.visit_all_item_likes(&mut dirty_clean_visitor);
+
+ let mut all_attrs = FindAllAttrs {
+ tcx,
+ attr_names: vec![ATTR_DIRTY, ATTR_CLEAN],
+ found_attrs: vec![],
+ };
+ intravisit::walk_crate(&mut all_attrs, krate);
+
+ // Note that we cannot use the existing "unused attribute"-infrastructure
+ // here, since that is running before trans. This is also the reason why
+ // all trans-specific attributes are `Whitelisted` in syntax::feature_gate.
+ all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs);
+ })
}
pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
debug!("save_dep_graph()");
- let _ignore = tcx.dep_graph.in_ignore();
- let sess = tcx.sess;
- if sess.opts.incremental.is_none() {
- return;
- }
-
- time(sess.time_passes(), "persist query result cache", || {
- save_in(sess,
- query_cache_path(sess),
- |e| encode_query_cache(tcx, e));
- });
+ tcx.dep_graph.with_ignore(|| {
+ let sess = tcx.sess;
+ if sess.opts.incremental.is_none() {
+ return;
+ }
- if tcx.sess.opts.debugging_opts.incremental_queries {
- time(sess.time_passes(), "persist dep-graph", || {
+ time(sess.time_passes(), "persist query result cache", || {
save_in(sess,
- dep_graph_path(sess),
- |e| encode_dep_graph(tcx, e));
+ query_cache_path(sess),
+ |e| encode_query_cache(tcx, e));
});
- }
- dirty_clean::check_dirty_clean_annotations(tcx);
+ if tcx.sess.opts.debugging_opts.incremental_queries {
+ time(sess.time_passes(), "persist dep-graph", || {
+ save_in(sess,
+ dep_graph_path(sess),
+ |e| encode_dep_graph(tcx, e));
+ });
+ }
+
+ dirty_clean::check_dirty_clean_annotations(tcx);
+ })
}
pub fn save_work_products(sess: &Session, dep_graph: &DepGraph) {
}
debug!("save_work_products()");
- let _ignore = dep_graph.in_ignore();
+ dep_graph.assert_ignored();
let path = work_products_path(sess);
save_in(sess, path, |e| encode_work_products(dep_graph, e));
fn encode_fn_arg_names_for_body(&mut self, body_id: hir::BodyId)
-> LazySeq<ast::Name> {
- let _ignore = self.tcx.dep_graph.in_ignore();
- let body = self.tcx.hir.body(body_id);
- self.lazy_seq(body.arguments.iter().map(|arg| {
- match arg.pat.node {
- PatKind::Binding(_, _, name, _) => name.node,
- _ => Symbol::intern("")
- }
- }))
+ self.tcx.dep_graph.with_ignore(|| {
+ let body = self.tcx.hir.body(body_id);
+ self.lazy_seq(body.arguments.iter().map(|arg| {
+ match arg.pat.node {
+ PatKind::Binding(_, _, name, _) => name.node,
+ _ => Symbol::intern("")
+ }
+ }))
+ })
}
fn encode_fn_arg_names(&mut self, names: &[Spanned<ast::Name>])
where DATA: DepGraphRead
{
assert!(id.is_local());
- let tcx: TyCtxt<'b, 'tcx, 'tcx> = self.ecx.tcx;
// We don't track this since we are explicitly computing the incr. comp.
// hashes anyway. In theory we could do some tracking here and use it to
// avoid rehashing things (and instead cache the hashes) but it's
// unclear whether that would be a win since hashing is cheap enough.
- let _task = tcx.dep_graph.in_ignore();
+ self.ecx.tcx.dep_graph.with_ignore(move || {
+ let mut entry_builder = IsolatedEncoder::new(self.ecx);
+ let entry = op(&mut entry_builder, data);
+ let entry = entry_builder.lazy(&entry);
- let mut entry_builder = IsolatedEncoder::new(self.ecx);
- let entry = op(&mut entry_builder, data);
- let entry = entry_builder.lazy(&entry);
-
- self.items.record(id, entry);
+ self.items.record(id, entry);
+ })
}
pub fn into_items(self) -> Index {
config: Option<Config>,
mut handler: H,
) {
- let _ignore = tcx.dep_graph.in_ignore();
+ tcx.dep_graph.with_ignore(|| {
+ assert!(analysis.glob_map.is_some());
- assert!(analysis.glob_map.is_some());
+ info!("Dumping crate {}", cratename);
- info!("Dumping crate {}", cratename);
-
- let save_ctxt = SaveContext {
- tcx,
- tables: &ty::TypeckTables::empty(None),
- analysis,
- span_utils: SpanUtils::new(&tcx.sess),
- config: find_config(config),
- };
+ let save_ctxt = SaveContext {
+ tcx,
+ tables: &ty::TypeckTables::empty(None),
+ analysis,
+ span_utils: SpanUtils::new(&tcx.sess),
+ config: find_config(config),
+ };
- handler.save(save_ctxt, krate, cratename)
+ handler.save(save_ctxt, krate, cratename)
+ })
}
fn find_config(supplied: Option<Config>) -> Config {
enum Disposition { Reused, Translated }
pub(crate) fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let _ignore = tcx.dep_graph.in_ignore();
-
- if tcx.sess.opts.incremental.is_none() {
- return;
- }
+ tcx.dep_graph.with_ignore(|| {
+ if tcx.sess.opts.incremental.is_none() {
+ return;
+ }
- let ams = AssertModuleSource { tcx };
- for attr in &tcx.hir.krate().attrs {
- ams.check_attr(attr);
- }
+ let ams = AssertModuleSource { tcx };
+ for attr in &tcx.hir.krate().attrs {
+ ams.check_attr(attr);
+ }
+ })
}
struct AssertModuleSource<'a, 'tcx: 'a> {
return;
}
- let _ignore = tcx.dep_graph.in_ignore();
- let mut visitor = SymbolNamesTest { tcx: tcx };
- // FIXME(#37712) could use ItemLikeVisitor if trait items were item-like
- tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
+ tcx.dep_graph.with_ignore(|| {
+ let mut visitor = SymbolNamesTest { tcx: tcx };
+ // FIXME(#37712) could use ItemLikeVisitor if trait items were item-like
+ tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
+ })
}
struct SymbolNamesTest<'a, 'tcx:'a> {