]> git.lizzy.rs Git - rust.git/commitdiff
Make PerfStats thread-safe and remove unused fields
authorJohn Kåre Alsaker <john.kare.alsaker@gmail.com>
Sun, 1 Apr 2018 06:17:25 +0000 (08:17 +0200)
committerJohn Kåre Alsaker <john.kare.alsaker@gmail.com>
Tue, 10 Apr 2018 12:40:25 +0000 (14:40 +0200)
src/librustc/infer/canonical.rs
src/librustc/session/mod.rs
src/librustc/util/common.rs
src/librustc_traits/normalize_erasing_regions.rs
src/librustc_traits/normalize_projection_ty.rs

index 4357c9a5a776acd3cdec0084200f0e8337b3972f..8ea6eb005a140b0eb105e7fbbc839074fccd2837 100644 (file)
 use serialize::UseSpecializedDecodable;
 use std::fmt::Debug;
 use std::ops::Index;
+use std::sync::atomic::Ordering;
 use syntax::codemap::Span;
 use traits::{Obligation, ObligationCause, PredicateObligation};
 use ty::{self, CanonicalVar, Lift, Region, Slice, Ty, TyCtxt, TypeFlags};
 use ty::subst::{Kind, UnpackedKind};
 use ty::fold::{TypeFoldable, TypeFolder};
 use util::captures::Captures;
-use util::common::CellUsizeExt;
 
 use rustc_data_structures::indexed_vec::IndexVec;
 use rustc_data_structures::fx::FxHashMap;
@@ -473,7 +473,7 @@ pub fn canonicalize_query<V>(&self, value: &V) -> (V::Canonicalized, CanonicalVa
     where
         V: Canonicalize<'gcx, 'tcx>,
     {
-        self.tcx.sess.perf_stats.queries_canonicalized.increment();
+        self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
 
         Canonicalizer::canonicalize(
             value,
index afb62aca582feb084833c4f997a1dd18f53cbdaa..c084c8684817390234a88a1d101f2305ae78b557 100644 (file)
@@ -55,6 +55,7 @@
 use std::path::{Path, PathBuf};
 use std::time::Duration;
 use std::sync::mpsc;
+use std::sync::atomic::{AtomicUsize, Ordering};
 
 mod code_stats;
 pub mod config;
@@ -165,27 +166,16 @@ pub struct Session {
 }
 
 pub struct PerfStats {
-    /// The accumulated time needed for computing the SVH of the crate
-    pub svh_time: Cell<Duration>,
-    /// The accumulated time spent on computing incr. comp. hashes
-    pub incr_comp_hashes_time: Cell<Duration>,
-    /// The number of incr. comp. hash computations performed
-    pub incr_comp_hashes_count: Cell<u64>,
-    /// The number of bytes hashed when computing ICH values
-    pub incr_comp_bytes_hashed: Cell<u64>,
     /// The accumulated time spent on computing symbol hashes
-    pub symbol_hash_time: Cell<Duration>,
+    pub symbol_hash_time: Lock<Duration>,
     /// The accumulated time spent decoding def path tables from metadata
-    pub decode_def_path_tables_time: Cell<Duration>,
+    pub decode_def_path_tables_time: Lock<Duration>,
     /// Total number of values canonicalized queries constructed.
-    pub queries_canonicalized: Cell<usize>,
-    /// Number of times we canonicalized a value and found that the
-    /// result had already been canonicalized.
-    pub canonicalized_values_allocated: Cell<usize>,
+    pub queries_canonicalized: AtomicUsize,
     /// Number of times this query is invoked.
-    pub normalize_ty_after_erasing_regions: Cell<usize>,
+    pub normalize_ty_after_erasing_regions: AtomicUsize,
     /// Number of times this query is invoked.
-    pub normalize_projection_ty: Cell<usize>,
+    pub normalize_projection_ty: AtomicUsize,
 }
 
 /// Enum to support dispatch of one-time diagnostics (in Session.diag_once)
@@ -838,47 +828,20 @@ pub fn incr_comp_session_dir_opt(&self) -> Option<cell::Ref<PathBuf>> {
     }
 
     pub fn print_perf_stats(&self) {
-        println!(
-            "Total time spent computing SVHs:               {}",
-            duration_to_secs_str(self.perf_stats.svh_time.get())
-        );
-        println!(
-            "Total time spent computing incr. comp. hashes: {}",
-            duration_to_secs_str(self.perf_stats.incr_comp_hashes_time.get())
-        );
-        println!(
-            "Total number of incr. comp. hashes computed:   {}",
-            self.perf_stats.incr_comp_hashes_count.get()
-        );
-        println!(
-            "Total number of bytes hashed for incr. comp.:  {}",
-            self.perf_stats.incr_comp_bytes_hashed.get()
-        );
-        if self.perf_stats.incr_comp_hashes_count.get() != 0 {
-            println!(
-                "Average bytes hashed per incr. comp. HIR node: {}",
-                self.perf_stats.incr_comp_bytes_hashed.get()
-                    / self.perf_stats.incr_comp_hashes_count.get()
-            );
-        } else {
-            println!("Average bytes hashed per incr. comp. HIR node: N/A");
-        }
         println!(
             "Total time spent computing symbol hashes:      {}",
-            duration_to_secs_str(self.perf_stats.symbol_hash_time.get())
+            duration_to_secs_str(*self.perf_stats.symbol_hash_time.lock())
         );
         println!(
             "Total time spent decoding DefPath tables:      {}",
-            duration_to_secs_str(self.perf_stats.decode_def_path_tables_time.get())
+            duration_to_secs_str(*self.perf_stats.decode_def_path_tables_time.lock())
         );
         println!("Total queries canonicalized:                   {}",
-                 self.perf_stats.queries_canonicalized.get());
-        println!("Total canonical values interned:               {}",
-                 self.perf_stats.canonicalized_values_allocated.get());
+                 self.perf_stats.queries_canonicalized.load(Ordering::Relaxed));
         println!("normalize_ty_after_erasing_regions:            {}",
-                 self.perf_stats.normalize_ty_after_erasing_regions.get());
+                 self.perf_stats.normalize_ty_after_erasing_regions.load(Ordering::Relaxed));
         println!("normalize_projection_ty:                       {}",
-                 self.perf_stats.normalize_projection_ty.get());
+                 self.perf_stats.normalize_projection_ty.load(Ordering::Relaxed));
     }
 
     /// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n.
@@ -1160,16 +1123,11 @@ pub fn build_session_(
         ignored_attr_names: ich::compute_ignored_attr_names(),
         profile_channel: Lock::new(None),
         perf_stats: PerfStats {
-            svh_time: Cell::new(Duration::from_secs(0)),
-            incr_comp_hashes_time: Cell::new(Duration::from_secs(0)),
-            incr_comp_hashes_count: Cell::new(0),
-            incr_comp_bytes_hashed: Cell::new(0),
-            symbol_hash_time: Cell::new(Duration::from_secs(0)),
-            decode_def_path_tables_time: Cell::new(Duration::from_secs(0)),
-            queries_canonicalized: Cell::new(0),
-            canonicalized_values_allocated: Cell::new(0),
-            normalize_ty_after_erasing_regions: Cell::new(0),
-            normalize_projection_ty: Cell::new(0),
+            symbol_hash_time: Lock::new(Duration::from_secs(0)),
+            decode_def_path_tables_time: Lock::new(Duration::from_secs(0)),
+            queries_canonicalized: AtomicUsize::new(0),
+            normalize_ty_after_erasing_regions: AtomicUsize::new(0),
+            normalize_projection_ty: AtomicUsize::new(0),
         },
         code_stats: RefCell::new(CodeStats::new()),
         optimization_fuel_crate,
index 32ec837f031bfad02368fdd26831aead7377c46a..bb6aa654c296008015e75ab0dca47e8d9ab127bc 100644 (file)
@@ -10,6 +10,8 @@
 
 #![allow(non_camel_case_types)]
 
+use rustc_data_structures::sync::Lock;
+
 use std::cell::{RefCell, Cell};
 use std::collections::HashMap;
 use std::ffi::CString;
@@ -236,13 +238,14 @@ pub fn to_readable_str(mut val: usize) -> String {
     groups.join("_")
 }
 
-pub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where
+pub fn record_time<T, F>(accu: &Lock<Duration>, f: F) -> T where
     F: FnOnce() -> T,
 {
     let start = Instant::now();
     let rv = f();
     let duration = start.elapsed();
-    accu.set(duration + accu.get());
+    let mut accu = accu.lock();
+    *accu = *accu + duration;
     rv
 }
 
@@ -382,13 +385,3 @@ fn test_to_readable_str() {
     assert_eq!("1_000_000", to_readable_str(1_000_000));
     assert_eq!("1_234_567", to_readable_str(1_234_567));
 }
-
-pub trait CellUsizeExt {
-    fn increment(&self);
-}
-
-impl CellUsizeExt for Cell<usize> {
-    fn increment(&self) {
-        self.set(self.get() + 1);
-    }
-}
index 14f8694dbf72a0a2e9acf82681f3bb2e2e210cc2..1857df5717bbac2c773d9e623118530ffd96aa83 100644 (file)
 use rustc::traits::{Normalized, ObligationCause};
 use rustc::traits::query::NoSolution;
 use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
-use rustc::util::common::CellUsizeExt;
+use std::sync::atomic::Ordering;
 
 crate fn normalize_ty_after_erasing_regions<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
     goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
 ) -> Ty<'tcx> {
     let ParamEnvAnd { param_env, value } = goal;
-    tcx.sess
-        .perf_stats
-        .normalize_ty_after_erasing_regions
-        .increment();
+    tcx.sess.perf_stats.normalize_ty_after_erasing_regions.fetch_add(1, Ordering::Relaxed);
     tcx.infer_ctxt().enter(|infcx| {
         let cause = ObligationCause::dummy();
         match infcx.at(&cause, param_env).normalize(&value) {
index 62d5ef11551c0a044f3da4c499957bd51c911fac..8fc00c937e69c18fb393d40e3721ad55e9a481c5 100644 (file)
                     SelectionContext};
 use rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};
 use rustc::ty::{ParamEnvAnd, TyCtxt};
-use rustc::util::common::CellUsizeExt;
 use rustc_data_structures::sync::Lrc;
 use syntax::ast::DUMMY_NODE_ID;
 use syntax_pos::DUMMY_SP;
 use util;
+use std::sync::atomic::Ordering;
 
 crate fn normalize_projection_ty<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
@@ -25,7 +25,7 @@
 ) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
     debug!("normalize_provider(goal={:#?})", goal);
 
-    tcx.sess.perf_stats.normalize_projection_ty.increment();
+    tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
     tcx.infer_ctxt().enter(|ref infcx| {
         let (
             ParamEnvAnd {