]> git.lizzy.rs Git - rust.git/commitdiff
Replace FnvHasher use with FxHasher.
authorNicholas Nethercote <nnethercote@mozilla.com>
Tue, 8 Nov 2016 03:02:55 +0000 (14:02 +1100)
committerNicholas Nethercote <nnethercote@mozilla.com>
Tue, 8 Nov 2016 04:14:59 +0000 (15:14 +1100)
This speeds up compilation by 3--6% across most of rustc-benchmarks.

91 files changed:
src/librustc/dep_graph/dep_tracking_map.rs
src/librustc/dep_graph/edges.rs
src/librustc/dep_graph/graph.rs
src/librustc/dep_graph/query.rs
src/librustc/hir/map/definitions.rs
src/librustc/hir/mod.rs
src/librustc/infer/freshen.rs
src/librustc/infer/higher_ranked/mod.rs
src/librustc/infer/mod.rs
src/librustc/infer/region_inference/graphviz.rs
src/librustc/infer/region_inference/mod.rs
src/librustc/lint/context.rs
src/librustc/middle/dead.rs
src/librustc/middle/dependency_format.rs
src/librustc/middle/lang_items.rs
src/librustc/middle/privacy.rs
src/librustc/middle/reachable.rs
src/librustc/middle/region.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/middle/stability.rs
src/librustc/session/mod.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/fulfill.rs
src/librustc/traits/select.rs
src/librustc/traits/specialize/mod.rs
src/librustc/traits/specialize/specialization_graph.rs
src/librustc/traits/util.rs
src/librustc/ty/contents.rs
src/librustc/ty/context.rs
src/librustc/ty/fold.rs
src/librustc/ty/mod.rs
src/librustc/ty/trait_def.rs
src/librustc/ty/util.rs
src/librustc/util/nodemap.rs
src/librustc_borrowck/borrowck/mir/elaborate_drops.rs
src/librustc_borrowck/borrowck/mir/gather_moves.rs
src/librustc_borrowck/borrowck/move_data.rs
src/librustc_const_eval/_match.rs
src/librustc_data_structures/lib.rs
src/librustc_data_structures/obligation_forest/mod.rs
src/librustc_data_structures/snapshot_map/mod.rs
src/librustc_incremental/assert_dep_graph.rs
src/librustc_incremental/calculate_svh/mod.rs
src/librustc_incremental/persist/data.rs
src/librustc_incremental/persist/dirty_clean.rs
src/librustc_incremental/persist/fs.rs
src/librustc_incremental/persist/hash.rs
src/librustc_incremental/persist/load.rs
src/librustc_incremental/persist/preds.rs
src/librustc_incremental/persist/save.rs
src/librustc_lint/types.rs
src/librustc_lint/unused.rs
src/librustc_metadata/creader.rs
src/librustc_metadata/cstore.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/locator.rs
src/librustc_mir/build/expr/as_rvalue.rs
src/librustc_mir/build/matches/mod.rs
src/librustc_mir/build/matches/test.rs
src/librustc_mir/build/scope.rs
src/librustc_mir/pretty.rs
src/librustc_mir/transform/instcombine.rs
src/librustc_passes/hir_stats.rs
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/lib.rs
src/librustc_trans/base.rs
src/librustc_trans/builder.rs
src/librustc_trans/collector.rs
src/librustc_trans/context.rs
src/librustc_trans/debuginfo/metadata.rs
src/librustc_trans/debuginfo/mod.rs
src/librustc_trans/mir/block.rs
src/librustc_trans/partitioning.rs
src/librustc_trans/symbol_map.rs
src/librustc_trans/type_.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/_match.rs
src/librustc_typeck/check/dropck.rs
src/librustc_typeck/check/intrinsic.rs
src/librustc_typeck/check/method/probe.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/check/wfcheck.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/constrained_type_params.rs
src/librustdoc/clean/inline.rs
src/librustdoc/clean/mod.rs
src/librustdoc/core.rs
src/librustdoc/html/render.rs
src/librustdoc/visit_ast.rs

index 51f7890c7a2f4c8e13d31b97e557a26cc0b1c8f3..50a478fcc2fd91d8da5b6e6bd6ecdbbe01de571f 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use hir::def_id::DefId;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use std::cell::RefCell;
 use std::ops::Index;
 use std::hash::Hash;
@@ -24,7 +24,7 @@
 pub struct DepTrackingMap<M: DepTrackingMapConfig> {
     phantom: PhantomData<M>,
     graph: DepGraph,
-    map: FnvHashMap<M::Key, M::Value>,
+    map: FxHashMap<M::Key, M::Value>,
 }
 
 pub trait DepTrackingMapConfig {
@@ -38,7 +38,7 @@ pub fn new(graph: DepGraph) -> DepTrackingMap<M> {
         DepTrackingMap {
             phantom: PhantomData,
             graph: graph,
-            map: FnvHashMap()
+            map: FxHashMap()
         }
     }
 
index 10f3d21f2af6d37eeb265f293278785543b99dfb..8657a3e5a587899f80ebb536847bcb8f8a9a9f0c 100644 (file)
@@ -8,15 +8,15 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use std::fmt::Debug;
 use std::hash::Hash;
 use super::{DepGraphQuery, DepNode};
 
 pub struct DepGraphEdges<D: Clone + Debug + Eq + Hash> {
     nodes: Vec<DepNode<D>>,
-    indices: FnvHashMap<DepNode<D>, IdIndex>,
-    edges: FnvHashSet<(IdIndex, IdIndex)>,
+    indices: FxHashMap<DepNode<D>, IdIndex>,
+    edges: FxHashSet<(IdIndex, IdIndex)>,
     open_nodes: Vec<OpenNode>,
 }
 
@@ -46,8 +46,8 @@ impl<D: Clone + Debug + Eq + Hash> DepGraphEdges<D> {
     pub fn new() -> DepGraphEdges<D> {
         DepGraphEdges {
             nodes: vec![],
-            indices: FnvHashMap(),
-            edges: FnvHashSet(),
+            indices: FxHashMap(),
+            edges: FxHashSet(),
             open_nodes: Vec::new()
         }
     }
index fac3586afc7b965f80f5e7e24c778f7cd82ca579..2637d34c5c56e569d91a3e1bcff2ad47310efa50 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use hir::def_id::DefId;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use session::config::OutputType;
 use std::cell::{Ref, RefCell};
 use std::rc::Rc;
@@ -34,10 +34,10 @@ struct DepGraphData {
     /// things available to us. If we find that they are not dirty, we
     /// load the path to the file storing those work-products here into
     /// this map. We can later look for and extract that data.
-    previous_work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
+    previous_work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,
 
     /// Work-products that we generate in this run.
-    work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
+    work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,
 }
 
 impl DepGraph {
@@ -45,8 +45,8 @@ pub fn new(enabled: bool) -> DepGraph {
         DepGraph {
             data: Rc::new(DepGraphData {
                 thread: DepGraphThreadData::new(enabled),
-                previous_work_products: RefCell::new(FnvHashMap()),
-                work_products: RefCell::new(FnvHashMap()),
+                previous_work_products: RefCell::new(FxHashMap()),
+                work_products: RefCell::new(FxHashMap()),
             })
         }
     }
@@ -117,7 +117,7 @@ pub fn previous_work_product(&self, v: &Arc<WorkProductId>) -> Option<WorkProduc
 
     /// Access the map of work-products created during this run. Only
     /// used during saving of the dep-graph.
-    pub fn work_products(&self) -> Ref<FnvHashMap<Arc<WorkProductId>, WorkProduct>> {
+    pub fn work_products(&self) -> Ref<FxHashMap<Arc<WorkProductId>, WorkProduct>> {
         self.data.work_products.borrow()
     }
 }
index 7a780c1d4ae2478bc6d80d2e9c932bb0063e4b1e..4c791f9655342e8fae1ad00a057766e67037f6f4 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
 use std::fmt::Debug;
 use std::hash::Hash;
@@ -17,7 +17,7 @@
 
 pub struct DepGraphQuery<D: Clone + Debug + Hash + Eq> {
     pub graph: Graph<DepNode<D>, ()>,
-    pub indices: FnvHashMap<DepNode<D>, NodeIndex>,
+    pub indices: FxHashMap<DepNode<D>, NodeIndex>,
 }
 
 impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> {
@@ -25,7 +25,7 @@ pub fn new(nodes: &[DepNode<D>],
                edges: &[(DepNode<D>, DepNode<D>)])
                -> DepGraphQuery<D> {
         let mut graph = Graph::new();
-        let mut indices = FnvHashMap();
+        let mut indices = FxHashMap();
         for node in nodes {
             indices.insert(node.clone(), graph.next_node_index());
             graph.add_node(node.clone());
index e8b3714bbe3b885bbaa57035bd4a9ed344f8d30d..38157c7e565646c45b21a5dc985ad32606c56998 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use std::fmt::Write;
 use std::hash::{Hash, Hasher};
 use std::collections::hash_map::DefaultHasher;
@@ -22,7 +22,7 @@
 #[derive(Clone)]
 pub struct Definitions {
     data: Vec<DefData>,
-    key_map: FnvHashMap<DefKey, DefIndex>,
+    key_map: FxHashMap<DefKey, DefIndex>,
     node_map: NodeMap<DefIndex>,
 }
 
@@ -219,7 +219,7 @@ impl Definitions {
     pub fn new() -> Definitions {
         Definitions {
             data: vec![],
-            key_map: FnvHashMap(),
+            key_map: FxHashMap(),
             node_map: NodeMap(),
         }
     }
index 5f57ceac353cc62e27d8438d60dd020f48278257..cbd3e39f8703aa9735fc433a13870fcc1785af61 100644 (file)
@@ -33,7 +33,7 @@
 
 use hir::def::Def;
 use hir::def_id::DefId;
-use util::nodemap::{NodeMap, FnvHashSet};
+use util::nodemap::{NodeMap, FxHashSet};
 
 use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
 use syntax::codemap::{self, respan, Spanned};
@@ -1605,4 +1605,4 @@ pub struct TraitCandidate {
 
 // Map from the NodeId of a glob import to a list of items which are actually
 // imported.
-pub type GlobMap = NodeMap<FnvHashSet<Name>>;
+pub type GlobMap = NodeMap<FxHashSet<Name>>;
index 828f9f32baac8d3446b066ed01f7e20dbc20ed49..30e18a4c569b2fa7093c9b53b66aeb7c8cea8e81 100644 (file)
@@ -32,7 +32,7 @@
 
 use ty::{self, Ty, TyCtxt, TypeFoldable};
 use ty::fold::TypeFolder;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 use std::collections::hash_map::Entry;
 
 use super::InferCtxt;
@@ -41,7 +41,7 @@
 pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
     freshen_count: u32,
-    freshen_map: FnvHashMap<ty::InferTy, Ty<'tcx>>,
+    freshen_map: FxHashMap<ty::InferTy, Ty<'tcx>>,
 }
 
 impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
@@ -50,7 +50,7 @@ pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
         TypeFreshener {
             infcx: infcx,
             freshen_count: 0,
-            freshen_map: FnvHashMap(),
+            freshen_map: FxHashMap(),
         }
     }
 
index 25b899b3c56cd1bae9d3a454e3d732e68434adce..737ce8bdf681daf25cd0b3dc30e3888b9d1b3623 100644 (file)
@@ -24,7 +24,7 @@
 use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
 use syntax_pos::Span;
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 
 pub struct HrMatchResult<U> {
     pub value: U,
@@ -135,7 +135,7 @@ pub fn higher_ranked_match<T, U>(&mut self,
             // Map each skolemized region to a vector of other regions that it
             // must be equated with. (Note that this vector may include other
             // skolemized regions from `skol_map`.)
-            let skol_resolution_map: FnvHashMap<_, _> =
+            let skol_resolution_map: FxHashMap<_, _> =
                 skol_map
                 .iter()
                 .map(|(&br, &skol)| {
@@ -158,7 +158,7 @@ pub fn higher_ranked_match<T, U>(&mut self,
             // `skol_map`. There should always be a representative if things
             // are properly well-formed.
             let mut unconstrained_regions = vec![];
-            let skol_representatives: FnvHashMap<_, _> =
+            let skol_representatives: FxHashMap<_, _> =
                 skol_resolution_map
                 .iter()
                 .map(|(&skol, &(br, ref regions))| {
@@ -268,7 +268,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                                              snapshot: &CombinedSnapshot,
                                              debruijn: ty::DebruijnIndex,
                                              new_vars: &[ty::RegionVid],
-                                             a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>,
+                                             a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
                                              r0: &'tcx ty::Region)
                                              -> &'tcx ty::Region {
             // Regions that pre-dated the LUB computation stay as they are.
@@ -364,8 +364,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                                              snapshot: &CombinedSnapshot,
                                              debruijn: ty::DebruijnIndex,
                                              new_vars: &[ty::RegionVid],
-                                             a_map: &FnvHashMap<ty::BoundRegion,
-                                                                &'tcx ty::Region>,
+                                             a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
                                              a_vars: &[ty::RegionVid],
                                              b_vars: &[ty::RegionVid],
                                              r0: &'tcx ty::Region)
@@ -434,7 +433,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
 
         fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                                       span: Span,
-                                      a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>,
+                                      a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
                                       r: &'tcx ty::Region) -> &'tcx ty::Region
         {
             for (a_br, a_r) in a_map {
@@ -457,7 +456,7 @@ fn fresh_bound_variable<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
 }
 
 fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
-                           map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>)
+                           map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
                            -> Vec<ty::RegionVid> {
     map.iter()
        .map(|(_, &r)| match *r {
@@ -504,7 +503,7 @@ fn tainted_regions(&self,
                        snapshot: &CombinedSnapshot,
                        r: &'tcx ty::Region,
                        directions: TaintDirections)
-                       -> FnvHashSet<&'tcx ty::Region> {
+                       -> FxHashSet<&'tcx ty::Region> {
         self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
     }
 
@@ -568,7 +567,7 @@ fn region_vars_confined_to_snapshot(&self,
         let escaping_types =
             self.type_variables.borrow_mut().types_escaping_snapshot(&snapshot.type_snapshot);
 
-        let mut escaping_region_vars = FnvHashSet();
+        let mut escaping_region_vars = FxHashSet();
         for ty in &escaping_types {
             self.tcx.collect_regions(ty, &mut escaping_region_vars);
         }
@@ -764,7 +763,7 @@ pub fn plug_leaks<T>(&self,
         // region back to the `ty::BoundRegion` that it originally
         // represented. Because `leak_check` passed, we know that
         // these taint sets are mutually disjoint.
-        let inv_skol_map: FnvHashMap<&'tcx ty::Region, ty::BoundRegion> =
+        let inv_skol_map: FxHashMap<&'tcx ty::Region, ty::BoundRegion> =
             skol_map
             .iter()
             .flat_map(|(&skol_br, &skol)| {
@@ -837,7 +836,7 @@ pub fn pop_skolemized(&self,
                           snapshot: &CombinedSnapshot)
     {
         debug!("pop_skolemized({:?})", skol_map);
-        let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect();
+        let skol_regions: FxHashSet<_> = skol_map.values().cloned().collect();
         self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
         if !skol_map.is_empty() {
             self.projection_cache.borrow_mut().rollback_skolemized(
index 21820ca07192103c3d55b1cc98ef3935f6a52148..ebafd206e26e293e1dff442946ecd5efcaaa2d0e 100644 (file)
@@ -39,7 +39,7 @@
 use syntax::ast;
 use errors::DiagnosticBuilder;
 use syntax_pos::{self, Span, DUMMY_SP};
-use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
+use util::nodemap::{FxHashMap, FxHashSet, NodeMap};
 
 use self::combine::CombineFields;
 use self::higher_ranked::HrMatchResult;
@@ -134,7 +134,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 
     // the set of predicates on which errors have been reported, to
     // avoid reporting the same error twice.
-    pub reported_trait_errors: RefCell<FnvHashSet<traits::TraitErrorKey<'tcx>>>,
+    pub reported_trait_errors: RefCell<FxHashSet<traits::TraitErrorKey<'tcx>>>,
 
     // Sadly, the behavior of projection varies a bit depending on the
     // stage of compilation. The specifics are given in the
@@ -170,7 +170,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 
 /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized
 /// region that each late-bound region was replaced with.
-pub type SkolemizationMap<'tcx> = FnvHashMap<ty::BoundRegion, &'tcx ty::Region>;
+pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, &'tcx ty::Region>;
 
 /// Why did we require that the two types be related?
 ///
@@ -492,7 +492,7 @@ pub fn borrowck_fake_infer_ctxt(self, param_env: ty::ParameterEnvironment<'gcx>)
             selection_cache: traits::SelectionCache::new(),
             evaluation_cache: traits::EvaluationCache::new(),
             projection_cache: RefCell::new(traits::ProjectionCache::new()),
-            reported_trait_errors: RefCell::new(FnvHashSet()),
+            reported_trait_errors: RefCell::new(FxHashSet()),
             projection_mode: Reveal::NotSpecializable,
             tainted_by_errors_flag: Cell::new(false),
             err_count_on_creation: self.sess.err_count(),
@@ -531,7 +531,7 @@ pub fn enter<F, R>(&'tcx mut self, f: F) -> R
             parameter_environment: param_env,
             selection_cache: traits::SelectionCache::new(),
             evaluation_cache: traits::EvaluationCache::new(),
-            reported_trait_errors: RefCell::new(FnvHashSet()),
+            reported_trait_errors: RefCell::new(FxHashSet()),
             projection_mode: projection_mode,
             tainted_by_errors_flag: Cell::new(false),
             err_count_on_creation: tcx.sess.err_count(),
@@ -1530,7 +1530,7 @@ pub fn replace_late_bound_regions_with_fresh_var<T>(
         span: Span,
         lbrct: LateBoundRegionConversionTime,
         value: &ty::Binder<T>)
-        -> (T, FnvHashMap<ty::BoundRegion, &'tcx ty::Region>)
+        -> (T, FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
         where T : TypeFoldable<'tcx>
     {
         self.tcx.replace_late_bound_regions(
index 289f7d6c7380088ba4a8bb6f25d346a864f4f284..95ce8d39ff488b0c7fc099d60528988f94b4ed37 100644 (file)
@@ -23,7 +23,7 @@
 use super::Constraint;
 use infer::SubregionOrigin;
 use infer::region_inference::RegionVarBindings;
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 
 use std::borrow::Cow;
 use std::collections::hash_map::Entry::Vacant;
@@ -122,8 +122,8 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>(
 struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     graph_name: String,
-    map: &'a FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
-    node_ids: FnvHashMap<Node, usize>,
+    map: &'a FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
+    node_ids: FxHashMap<Node, usize>,
 }
 
 #[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)]
@@ -145,7 +145,7 @@ fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
            map: &'a ConstraintMap<'tcx>)
            -> ConstraintGraph<'a, 'gcx, 'tcx> {
         let mut i = 0;
-        let mut node_ids = FnvHashMap();
+        let mut node_ids = FxHashMap();
         {
             let mut add_node = |node| {
                 if let Vacant(e) = node_ids.entry(node) {
@@ -235,7 +235,7 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
     type Node = Node;
     type Edge = Edge<'tcx>;
     fn nodes(&self) -> dot::Nodes<Node> {
-        let mut set = FnvHashSet();
+        let mut set = FxHashSet();
         for node in self.node_ids.keys() {
             set.insert(*node);
         }
@@ -261,7 +261,7 @@ fn target(&self, edge: &Edge<'tcx>) -> Node {
     }
 }
 
-pub type ConstraintMap<'tcx> = FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;
+pub type ConstraintMap<'tcx> = FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;
 
 fn dump_region_constraints_to<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
                                               map: &ConstraintMap<'tcx>,
index ef36ffa83192117e10a2435e79b378e479214e90..af6f2c50e72fc2b0ebfbeefb59f76c99811ec4b3 100644 (file)
@@ -19,7 +19,7 @@
 use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable};
 use super::unify_key;
 
-use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
 use rustc_data_structures::unify::{self, UnificationTable};
 use middle::free_region::FreeRegionMap;
@@ -213,7 +213,7 @@ pub fn push(&mut self, other: BoundRegion) {
     }
 }
 
-pub type CombineMap<'tcx> = FnvHashMap<TwoRegions<'tcx>, RegionVid>;
+pub type CombineMap<'tcx> = FxHashMap<TwoRegions<'tcx>, RegionVid>;
 
 pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
@@ -222,7 +222,7 @@ pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     // Constraints of the form `A <= B` introduced by the region
     // checker.  Here at least one of `A` and `B` must be a region
     // variable.
-    constraints: RefCell<FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>>,
+    constraints: RefCell<FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>>,
 
     // A "verify" is something that we need to verify after inference is
     // done, but which does not directly affect inference in any way.
@@ -248,7 +248,7 @@ pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     // record the fact that `'a <= 'b` is implied by the fn signature,
     // and then ignore the constraint when solving equations. This is
     // a bit of a hack but seems to work.
-    givens: RefCell<FnvHashSet<(ty::FreeRegion, ty::RegionVid)>>,
+    givens: RefCell<FxHashSet<(ty::FreeRegion, ty::RegionVid)>>,
 
     lubs: RefCell<CombineMap<'tcx>>,
     glbs: RefCell<CombineMap<'tcx>>,
@@ -305,14 +305,14 @@ pub fn both() -> Self {
 
 struct TaintSet<'tcx> {
     directions: TaintDirections,
-    regions: FnvHashSet<&'tcx ty::Region>
+    regions: FxHashSet<&'tcx ty::Region>
 }
 
 impl<'a, 'gcx, 'tcx> TaintSet<'tcx> {
     fn new(directions: TaintDirections,
            initial_region: &'tcx ty::Region)
            -> Self {
-        let mut regions = FnvHashSet();
+        let mut regions = FxHashSet();
         regions.insert(initial_region);
         TaintSet { directions: directions, regions: regions }
     }
@@ -362,7 +362,7 @@ fn fixed_point(&mut self,
         }
     }
 
-    fn into_set(self) -> FnvHashSet<&'tcx ty::Region> {
+    fn into_set(self) -> FxHashSet<&'tcx ty::Region> {
         self.regions
     }
 
@@ -393,11 +393,11 @@ pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> RegionVarBindings<'a, 'gcx, 'tcx> {
             tcx: tcx,
             var_origins: RefCell::new(Vec::new()),
             values: RefCell::new(None),
-            constraints: RefCell::new(FnvHashMap()),
+            constraints: RefCell::new(FxHashMap()),
             verifys: RefCell::new(Vec::new()),
-            givens: RefCell::new(FnvHashSet()),
-            lubs: RefCell::new(FnvHashMap()),
-            glbs: RefCell::new(FnvHashMap()),
+            givens: RefCell::new(FxHashSet()),
+            lubs: RefCell::new(FxHashMap()),
+            glbs: RefCell::new(FxHashMap()),
             skolemization_count: Cell::new(0),
             bound_count: Cell::new(0),
             undo_log: RefCell::new(Vec::new()),
@@ -547,7 +547,7 @@ pub fn push_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot)
     /// completes to remove all trace of the skolemized regions
     /// created in that time.
     pub fn pop_skolemized(&self,
-                          skols: &FnvHashSet<&'tcx ty::Region>,
+                          skols: &FxHashSet<&'tcx ty::Region>,
                           snapshot: &RegionSnapshot) {
         debug!("pop_skolemized_regions(skols={:?})", skols);
 
@@ -601,7 +601,7 @@ pub fn pop_skolemized(&self,
         self.skolemization_count.set(snapshot.skolemization_count);
         return;
 
-        fn kill_constraint<'tcx>(skols: &FnvHashSet<&'tcx ty::Region>,
+        fn kill_constraint<'tcx>(skols: &FxHashSet<&'tcx ty::Region>,
                                  undo_entry: &UndoLogEntry<'tcx>)
                                  -> bool {
             match undo_entry {
@@ -905,7 +905,7 @@ pub fn tainted(&self,
                    mark: &RegionSnapshot,
                    r0: &'tcx Region,
                    directions: TaintDirections)
-                   -> FnvHashSet<&'tcx ty::Region> {
+                   -> FxHashSet<&'tcx ty::Region> {
         debug!("tainted(mark={:?}, r0={:?}, directions={:?})",
                mark, r0, directions);
 
@@ -1414,13 +1414,13 @@ fn collect_concrete_regions(&self,
                                 dup_vec: &mut [u32])
                                 -> (Vec<RegionAndOrigin<'tcx>>, bool) {
         struct WalkState<'tcx> {
-            set: FnvHashSet<RegionVid>,
+            set: FxHashSet<RegionVid>,
             stack: Vec<RegionVid>,
             result: Vec<RegionAndOrigin<'tcx>>,
             dup_found: bool,
         }
         let mut state = WalkState {
-            set: FnvHashSet(),
+            set: FxHashSet(),
             stack: vec![orig_node_idx],
             result: Vec::new(),
             dup_found: false,
index f08aa2eb49f72bd0b8a5701ad7450a7249faf5ea..9cc2337e3dd1ede9f0d16879f0d978a35ad00fe4 100644 (file)
@@ -33,7 +33,7 @@
 use lint::{EarlyLintPassObject, LateLintPassObject};
 use lint::{Default, CommandLine, Node, Allow, Warn, Deny, Forbid};
 use lint::builtin;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use std::cmp;
 use std::default::Default as StdDefault;
@@ -64,18 +64,18 @@ pub struct LintStore {
     late_passes: Option<Vec<LateLintPassObject>>,
 
     /// Lints indexed by name.
-    by_name: FnvHashMap<String, TargetLint>,
+    by_name: FxHashMap<String, TargetLint>,
 
     /// Current levels of each lint, and where they were set.
-    levels: FnvHashMap<LintId, LevelSource>,
+    levels: FxHashMap<LintId, LevelSource>,
 
     /// Map of registered lint groups to what lints they expand to. The bool
     /// is true if the lint group was added by a plugin.
-    lint_groups: FnvHashMap<&'static str, (Vec<LintId>, bool)>,
+    lint_groups: FxHashMap<&'static str, (Vec<LintId>, bool)>,
 
     /// Extra info for future incompatibility lints, descibing the
     /// issue or RFC that caused the incompatibility.
-    future_incompatible: FnvHashMap<LintId, FutureIncompatibleInfo>,
+    future_incompatible: FxHashMap<LintId, FutureIncompatibleInfo>,
 
     /// Maximum level a lint can be
     lint_cap: Option<Level>,
@@ -171,10 +171,10 @@ pub fn new() -> LintStore {
             lints: vec![],
             early_passes: Some(vec![]),
             late_passes: Some(vec![]),
-            by_name: FnvHashMap(),
-            levels: FnvHashMap(),
-            future_incompatible: FnvHashMap(),
-            lint_groups: FnvHashMap(),
+            by_name: FxHashMap(),
+            levels: FxHashMap(),
+            future_incompatible: FxHashMap(),
+            lint_groups: FxHashMap(),
             lint_cap: None,
         }
     }
@@ -304,8 +304,8 @@ pub fn process_command_line(&mut self, sess: &Session) {
                 Err(FindLintError::Removed) => { }
                 Err(_) => {
                     match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
-                                                 .collect::<FnvHashMap<&'static str,
-                                                                       Vec<LintId>>>()
+                                                 .collect::<FxHashMap<&'static str,
+                                                                      Vec<LintId>>>()
                                                  .get(&lint_name[..]) {
                         Some(v) => {
                             v.iter()
index 4212b1fb05ee3a6eeffc5ff277ac84a964f0cf36..7fc698fdbebf5150da582891e22fd75920686862 100644 (file)
@@ -22,7 +22,7 @@
 use hir::def::Def;
 use hir::def_id::{DefId};
 use lint;
-use util::nodemap::FnvHashSet;
+use util::nodemap::FxHashSet;
 
 use syntax::{ast, codemap};
 use syntax::attr;
@@ -48,7 +48,7 @@ fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 struct MarkSymbolVisitor<'a, 'tcx: 'a> {
     worklist: Vec<ast::NodeId>,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    live_symbols: Box<FnvHashSet<ast::NodeId>>,
+    live_symbols: Box<FxHashSet<ast::NodeId>>,
     struct_has_extern_repr: bool,
     ignore_non_const_paths: bool,
     inherited_pub_visibility: bool,
@@ -61,7 +61,7 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         MarkSymbolVisitor {
             worklist: worklist,
             tcx: tcx,
-            live_symbols: box FnvHashSet(),
+            live_symbols: box FxHashSet(),
             struct_has_extern_repr: false,
             ignore_non_const_paths: false,
             inherited_pub_visibility: false,
@@ -163,7 +163,7 @@ fn handle_field_pattern_match(&mut self, lhs: &hir::Pat,
     }
 
     fn mark_live_symbols(&mut self) {
-        let mut scanned = FnvHashSet();
+        let mut scanned = FxHashSet();
         while !self.worklist.is_empty() {
             let id = self.worklist.pop().unwrap();
             if scanned.contains(&id) {
@@ -396,7 +396,7 @@ fn create_and_seed_worklist<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                        access_levels: &privacy::AccessLevels,
                        krate: &hir::Crate)
-                       -> Box<FnvHashSet<ast::NodeId>> {
+                       -> Box<FxHashSet<ast::NodeId>> {
     let worklist = create_and_seed_worklist(tcx, access_levels, krate);
     let mut symbol_visitor = MarkSymbolVisitor::new(tcx, worklist);
     symbol_visitor.mark_live_symbols();
@@ -414,7 +414,7 @@ fn get_struct_ctor_id(item: &hir::Item) -> Option<ast::NodeId> {
 
 struct DeadVisitor<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    live_symbols: Box<FnvHashSet<ast::NodeId>>,
+    live_symbols: Box<FxHashSet<ast::NodeId>>,
 }
 
 impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
index 656d3146fe5d124c314f664e6444fd06efe6df2a..c658f47ec1be0dbbfa58913ec02427b23d754d1b 100644 (file)
@@ -66,7 +66,7 @@
 use session;
 use session::config;
 use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic};
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 use rustc_back::PanicStrategy;
 
 /// A list of dependencies for a certain crate type.
@@ -80,7 +80,7 @@
 /// A mapping of all required dependencies for a particular flavor of output.
 ///
 /// This is local to the tcx, and is generally relevant to one session.
-pub type Dependencies = FnvHashMap<config::CrateType, DependencyList>;
+pub type Dependencies = FxHashMap<config::CrateType, DependencyList>;
 
 #[derive(Copy, Clone, PartialEq, Debug)]
 pub enum Linkage {
@@ -149,7 +149,7 @@ fn calculate_type(sess: &session::Session,
         config::CrateTypeProcMacro => {},
     }
 
-    let mut formats = FnvHashMap();
+    let mut formats = FxHashMap();
 
     // Sweep all crates for found dylibs. Add all dylibs, as well as their
     // dependencies, ensuring there are no conflicts. The only valid case for a
@@ -240,7 +240,7 @@ fn calculate_type(sess: &session::Session,
 fn add_library(sess: &session::Session,
                cnum: CrateNum,
                link: LinkagePreference,
-               m: &mut FnvHashMap<CrateNum, LinkagePreference>) {
+               m: &mut FxHashMap<CrateNum, LinkagePreference>) {
     match m.get(&cnum) {
         Some(&link2) => {
             // If the linkages differ, then we'd have two copies of the library
index 3175230ab6a5e43694a9731ee2bbbdf0443546f3..3e7de79246b667b59b41ac7a0f3fdbd9d573757f 100644 (file)
@@ -27,7 +27,7 @@
 use hir::def_id::DefId;
 use ty;
 use middle::weak_lang_items;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use syntax::ast;
 use syntax::parse::token::InternedString;
@@ -146,7 +146,7 @@ struct LanguageItemCollector<'a, 'tcx: 'a> {
 
     session: &'a Session,
 
-    item_refs: FnvHashMap<&'static str, usize>,
+    item_refs: FxHashMap<&'static str, usize>,
 }
 
 impl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> {
@@ -169,7 +169,7 @@ fn visit_item(&mut self, item: &hir::Item) {
 impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {
     pub fn new(session: &'a Session, ast_map: &'a hir_map::Map<'tcx>)
                -> LanguageItemCollector<'a, 'tcx> {
-        let mut item_refs = FnvHashMap();
+        let mut item_refs = FxHashMap();
 
         $( item_refs.insert($name, $variant as usize); )*
 
index 189150d426463f4835af37c86cde53e43245dfbf..1376886968f74a4b9535569bbe8133f774a32e5d 100644 (file)
@@ -12,7 +12,7 @@
 //! outside their scopes. This pass will also generate a set of exported items
 //! which are available for use externally when compiled as a library.
 
-use util::nodemap::{DefIdSet, FnvHashMap};
+use util::nodemap::{DefIdSet, FxHashMap};
 
 use std::hash::Hash;
 use std::fmt;
@@ -35,7 +35,7 @@ pub enum AccessLevel {
 // Accessibility levels for reachable HIR nodes
 #[derive(Clone)]
 pub struct AccessLevels<Id = NodeId> {
-    pub map: FnvHashMap<Id, AccessLevel>
+    pub map: FxHashMap<Id, AccessLevel>
 }
 
 impl<Id: Hash + Eq> AccessLevels<Id> {
index 1a50d7aa0adc7f34107bf6f5d05ee20541c4f624..9898ec7597d90ca9d493a1a4876e7072278d6283 100644 (file)
@@ -22,7 +22,7 @@
 use ty::{self, TyCtxt};
 use middle::privacy;
 use session::config;
-use util::nodemap::{NodeSet, FnvHashSet};
+use util::nodemap::{NodeSet, FxHashSet};
 
 use syntax::abi::Abi;
 use syntax::ast;
@@ -204,7 +204,7 @@ fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool {
 
     // Step 2: Mark all symbols that the symbols on the worklist touch.
     fn propagate(&mut self) {
-        let mut scanned = FnvHashSet();
+        let mut scanned = FxHashSet();
         loop {
             let search_item = match self.worklist.pop() {
                 Some(item) => item,
index 30b735b9c24e359c9561a2e5f0b19ce4b8a01841..8d51fda0cf2b1fa27dbcd1fba91764a9a6f8e86e 100644 (file)
@@ -19,7 +19,7 @@
 use dep_graph::DepNode;
 use hir::map as ast_map;
 use session::Session;
-use util::nodemap::{FnvHashMap, NodeMap, NodeSet};
+use util::nodemap::{FxHashMap, NodeMap, NodeSet};
 use ty;
 
 use std::cell::RefCell;
@@ -251,7 +251,7 @@ pub fn span(&self, region_maps: &RegionMaps, ast_map: &ast_map::Map) -> Option<S
 /// The region maps encode information about region relationships.
 pub struct RegionMaps {
     code_extents: RefCell<Vec<CodeExtentData>>,
-    code_extent_interner: RefCell<FnvHashMap<CodeExtentData, CodeExtent>>,
+    code_extent_interner: RefCell<FxHashMap<CodeExtentData, CodeExtent>>,
     /// `scope_map` maps from a scope id to the enclosing scope id;
     /// this is usually corresponding to the lexical nesting, though
     /// in the case of closures the parent scope is the innermost
@@ -1217,7 +1217,7 @@ pub fn resolve_crate(sess: &Session, map: &ast_map::Map) -> RegionMaps {
 
     let maps = RegionMaps {
         code_extents: RefCell::new(vec![]),
-        code_extent_interner: RefCell::new(FnvHashMap()),
+        code_extent_interner: RefCell::new(FxHashMap()),
         scope_map: RefCell::new(vec![]),
         var_map: RefCell::new(NodeMap()),
         rvalue_scopes: RefCell::new(NodeMap()),
index 2d93c33afb4095e743978841418b33b9ce65dc2b..e6d960735299c069aac21a156bac913b4dc2ce0b 100644 (file)
@@ -31,7 +31,7 @@
 use syntax_pos::Span;
 use util::nodemap::NodeMap;
 
-use rustc_data_structures::fnv::FnvHashSet;
+use rustc_data_structures::fx::FxHashSet;
 use hir;
 use hir::print::lifetime_to_string;
 use hir::intravisit::{self, Visitor, FnKind};
@@ -847,13 +847,13 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
                                generics: &hir::Generics) {
     debug!("insert_late_bound_lifetimes(decl={:?}, generics={:?})", decl, generics);
 
-    let mut constrained_by_input = ConstrainedCollector { regions: FnvHashSet() };
+    let mut constrained_by_input = ConstrainedCollector { regions: FxHashSet() };
     for arg in &decl.inputs {
         constrained_by_input.visit_ty(&arg.ty);
     }
 
     let mut appears_in_output = AllCollector {
-        regions: FnvHashSet(),
+        regions: FxHashSet(),
         impl_trait: false
     };
     intravisit::walk_fn_ret_ty(&mut appears_in_output, &decl.output);
@@ -866,7 +866,7 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
     // Subtle point: because we disallow nested bindings, we can just
     // ignore binders here and scrape up all names we see.
     let mut appears_in_where_clause = AllCollector {
-        regions: FnvHashSet(),
+        regions: FxHashSet(),
         impl_trait: false
     };
     for ty_param in generics.ty_params.iter() {
@@ -926,7 +926,7 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
     return;
 
     struct ConstrainedCollector {
-        regions: FnvHashSet<ast::Name>,
+        regions: FxHashSet<ast::Name>,
     }
 
     impl<'v> Visitor<'v> for ConstrainedCollector {
@@ -961,7 +961,7 @@ fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
     }
 
     struct AllCollector {
-        regions: FnvHashSet<ast::Name>,
+        regions: FxHashSet<ast::Name>,
         impl_trait: bool
     }
 
index fd17e378787a59008fd9415c5c93cf783f26e269..f1755c82b8cbd3f59b7fa2851556b613ac758ef6 100644 (file)
@@ -27,7 +27,7 @@
 use syntax::ast::{NodeId, Attribute};
 use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version};
 use syntax::attr::{self, Stability, Deprecation};
-use util::nodemap::{DefIdMap, FnvHashSet, FnvHashMap};
+use util::nodemap::{DefIdMap, FxHashSet, FxHashMap};
 
 use hir;
 use hir::{Item, Generics, StructField, Variant, PatKind};
@@ -102,7 +102,7 @@ pub struct Index<'tcx> {
     depr_map: DefIdMap<Option<DeprecationEntry>>,
 
     /// Maps for each crate whether it is part of the staged API.
-    staged_api: FnvHashMap<CrateNum, bool>
+    staged_api: FxHashMap<CrateNum, bool>
 }
 
 // A private tree-walker for producing an Index.
@@ -343,7 +343,7 @@ pub fn new(hir_map: &hir_map::Map) -> Index<'tcx> {
             }
         }
 
-        let mut staged_api = FnvHashMap();
+        let mut staged_api = FxHashMap();
         staged_api.insert(LOCAL_CRATE, is_staged_api);
         Index {
             staged_api: staged_api,
@@ -357,7 +357,7 @@ pub fn new(hir_map: &hir_map::Map) -> Index<'tcx> {
 /// features and possibly prints errors. Returns a list of all
 /// features used.
 pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-                                          -> FnvHashMap<InternedString, attr::StabilityLevel> {
+                                          -> FxHashMap<InternedString, attr::StabilityLevel> {
     let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck);
     let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
 
@@ -367,7 +367,7 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
     let mut checker = Checker {
         tcx: tcx,
         active_features: active_features,
-        used_features: FnvHashMap(),
+        used_features: FxHashMap(),
         in_skip_block: 0,
     };
     intravisit::walk_crate(&mut checker, tcx.map.krate());
@@ -377,8 +377,8 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
 
 struct Checker<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    active_features: FnvHashSet<InternedString>,
-    used_features: FnvHashMap<InternedString, attr::StabilityLevel>,
+    active_features: FxHashSet<InternedString>,
+    used_features: FxHashMap<InternedString, attr::StabilityLevel>,
     // Within a block where feature gate checking can be skipped.
     in_skip_block: u32,
 }
@@ -746,10 +746,10 @@ fn lookup_deprecation_uncached(self, id: DefId) -> Option<DeprecationEntry> {
 /// were expected to be library features), and the list of features used from
 /// libraries, identify activated features that don't exist and error about them.
 pub fn check_unused_or_stable_features(sess: &Session,
-                                       lib_features_used: &FnvHashMap<InternedString,
-                                                                      attr::StabilityLevel>) {
+                                       lib_features_used: &FxHashMap<InternedString,
+                                                                     attr::StabilityLevel>) {
     let ref declared_lib_features = sess.features.borrow().declared_lib_features;
-    let mut remaining_lib_features: FnvHashMap<InternedString, Span>
+    let mut remaining_lib_features: FxHashMap<InternedString, Span>
         = declared_lib_features.clone().into_iter().collect();
 
     fn format_stable_since_msg(version: &str) -> String {
index b4dadbf7961fb46593dacd228cabef4a327d572e..724b32d2cd7150e5aa377555921f04dc034fccd7 100644 (file)
@@ -17,7 +17,7 @@
 use session::search_paths::PathKind;
 use session::config::DebugInfoLevel;
 use ty::tls;
-use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet};
+use util::nodemap::{NodeMap, FxHashMap, FxHashSet};
 use util::common::duration_to_secs_str;
 use mir::transform as mir_pass;
 
@@ -78,7 +78,7 @@ pub struct Session {
     /// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
     /// that have been set once, but should not be set again, in order to avoid
     /// redundantly verbose output (Issue #24690).
-    pub one_time_diagnostics: RefCell<FnvHashSet<(lint::LintId, Span, String)>>,
+    pub one_time_diagnostics: RefCell<FxHashSet<(lint::LintId, Span, String)>>,
     pub plugin_llvm_passes: RefCell<Vec<String>>,
     pub mir_passes: RefCell<mir_pass::Passes>,
     pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
@@ -603,12 +603,12 @@ pub fn build_session_(sopts: config::Options,
         working_dir: env::current_dir().unwrap(),
         lint_store: RefCell::new(lint::LintStore::new()),
         lints: RefCell::new(NodeMap()),
-        one_time_diagnostics: RefCell::new(FnvHashSet()),
+        one_time_diagnostics: RefCell::new(FxHashSet()),
         plugin_llvm_passes: RefCell::new(Vec::new()),
         mir_passes: RefCell::new(mir_pass::Passes::new()),
         plugin_attributes: RefCell::new(Vec::new()),
         crate_types: RefCell::new(Vec::new()),
-        dependency_formats: RefCell::new(FnvHashMap()),
+        dependency_formats: RefCell::new(FxHashMap()),
         crate_disambiguator: RefCell::new(token::intern("").as_str()),
         features: RefCell::new(feature_gate::Features::new()),
         recursion_limit: Cell::new(64),
index 89c8162456c427e7e80a7de27190bfa33e1ce4b8..3522c738c160c524e562cc1d1466ee21a655265a 100644 (file)
@@ -33,7 +33,7 @@
 use ty::fast_reject;
 use ty::fold::TypeFolder;
 use ty::subst::Subst;
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 
 use std::cmp;
 use std::fmt;
@@ -252,7 +252,7 @@ fn on_unimplemented_note(&self,
                     let generic_map = def.generics.types.iter().map(|param| {
                         (param.name.as_str().to_string(),
                          trait_ref.substs.type_for_def(param).to_string())
-                    }).collect::<FnvHashMap<String, String>>();
+                    }).collect::<FxHashMap<String, String>>();
                     let parser = Parser::new(&istring);
                     let mut errored = false;
                     let err: String = parser.filter_map(|p| {
@@ -647,7 +647,7 @@ pub fn report_object_safety_error(self,
             "the trait `{}` cannot be made into an object", trait_str
         ));
 
-        let mut reported_violations = FnvHashSet();
+        let mut reported_violations = FxHashSet();
         for violation in violations {
             if !reported_violations.insert(violation.clone()) {
                 continue;
@@ -786,7 +786,7 @@ fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) {
     fn predicate_can_apply(&self, pred: ty::PolyTraitRef<'tcx>) -> bool {
         struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
             infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
-            var_map: FnvHashMap<Ty<'tcx>, Ty<'tcx>>
+            var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>
         }
 
         impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> {
@@ -807,7 +807,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
 
             let cleaned_pred = pred.fold_with(&mut ParamToVarFolder {
                 infcx: self,
-                var_map: FnvHashMap()
+                var_map: FxHashMap()
             });
 
             let cleaned_pred = super::project::normalize(
index 906da4290361e75c0c507b0c2e10c1b096d8b049..6de93adce3f8302ee80a23f9e6dcb424b11ca520 100644 (file)
@@ -18,7 +18,7 @@
 use std::mem;
 use syntax::ast;
 use util::common::ErrorReported;
-use util::nodemap::{FnvHashSet, NodeMap};
+use util::nodemap::{FxHashSet, NodeMap};
 
 use super::CodeAmbiguity;
 use super::CodeProjectionError;
@@ -37,7 +37,7 @@ fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate }
 }
 
 pub struct GlobalFulfilledPredicates<'tcx> {
-    set: FnvHashSet<ty::PolyTraitPredicate<'tcx>>,
+    set: FxHashSet<ty::PolyTraitPredicate<'tcx>>,
     dep_graph: DepGraph,
 }
 
@@ -673,7 +673,7 @@ fn register_region_obligation<'tcx>(t_a: Ty<'tcx>,
 impl<'a, 'gcx, 'tcx> GlobalFulfilledPredicates<'gcx> {
     pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'gcx> {
         GlobalFulfilledPredicates {
-            set: FnvHashSet(),
+            set: FxHashSet(),
             dep_graph: dep_graph,
         }
     }
index e75c8bd43340482372ecd0e8dc4520075f78dd8c..5e3f78b1208d51d096d1c360065787b9743ed091 100644 (file)
@@ -51,7 +51,7 @@
 use std::rc::Rc;
 use syntax::abi::Abi;
 use hir;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 struct InferredObligationsSnapshotVecDelegate<'tcx> {
     phantom: PhantomData<&'tcx i32>,
@@ -104,8 +104,8 @@ struct TraitObligationStack<'prev, 'tcx: 'prev> {
 
 #[derive(Clone)]
 pub struct SelectionCache<'tcx> {
-    hashmap: RefCell<FnvHashMap<ty::TraitRef<'tcx>,
-                                SelectionResult<'tcx, SelectionCandidate<'tcx>>>>,
+    hashmap: RefCell<FxHashMap<ty::TraitRef<'tcx>,
+                               SelectionResult<'tcx, SelectionCandidate<'tcx>>>>,
 }
 
 pub enum MethodMatchResult {
@@ -306,7 +306,7 @@ enum EvaluationResult {
 
 #[derive(Clone)]
 pub struct EvaluationCache<'tcx> {
-    hashmap: RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, EvaluationResult>>
+    hashmap: RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, EvaluationResult>>
 }
 
 impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
@@ -2937,7 +2937,7 @@ pub fn derived_cause(&self,
 impl<'tcx> SelectionCache<'tcx> {
     pub fn new() -> SelectionCache<'tcx> {
         SelectionCache {
-            hashmap: RefCell::new(FnvHashMap())
+            hashmap: RefCell::new(FxHashMap())
         }
     }
 }
@@ -2945,7 +2945,7 @@ pub fn new() -> SelectionCache<'tcx> {
 impl<'tcx> EvaluationCache<'tcx> {
     pub fn new() -> EvaluationCache<'tcx> {
         EvaluationCache {
-            hashmap: RefCell::new(FnvHashMap())
+            hashmap: RefCell::new(FxHashMap())
         }
     }
 }
index 909247d1cb245b45ebd5b20f7d1a801afa506e12..4eef6944974c0a6962b41f12b96ee8baa050fe32 100644 (file)
@@ -20,7 +20,7 @@
 use super::{SelectionContext, FulfillmentContext};
 use super::util::impl_trait_ref_and_oblig;
 
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use hir::def_id::DefId;
 use infer::{InferCtxt, InferOk, TypeOrigin};
 use middle::region;
@@ -270,13 +270,13 @@ fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
 }
 
 pub struct SpecializesCache {
-    map: FnvHashMap<(DefId, DefId), bool>
+    map: FxHashMap<(DefId, DefId), bool>
 }
 
 impl SpecializesCache {
     pub fn new() -> Self {
         SpecializesCache {
-            map: FnvHashMap()
+            map: FxHashMap()
         }
     }
 
index 1374719ef49c4875bd7eafe97a4fdd348a414a35..c746145474c758fa497aeace12e1efe9bd31c8a3 100644 (file)
@@ -17,7 +17,7 @@
 use ty::{self, TyCtxt, ImplOrTraitItem, TraitDef, TypeFoldable};
 use ty::fast_reject::{self, SimplifiedType};
 use syntax::ast::Name;
-use util::nodemap::{DefIdMap, FnvHashMap};
+use util::nodemap::{DefIdMap, FxHashMap};
 
 /// A per-trait graph of impls in specialization order. At the moment, this
 /// graph forms a tree rooted with the trait itself, with all other nodes
@@ -57,7 +57,7 @@ struct Children {
     // the specialization graph.
 
     /// Impls of the trait.
-    nonblanket_impls: FnvHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
+    nonblanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
 
     /// Blanket impls associated with the trait.
     blanket_impls: Vec<DefId>,
@@ -78,7 +78,7 @@ enum Inserted {
 impl<'a, 'gcx, 'tcx> Children {
     fn new() -> Children {
         Children {
-            nonblanket_impls: FnvHashMap(),
+            nonblanket_impls: FxHashMap(),
             blanket_impls: vec![],
         }
     }
index a3d974216b6e00d882ed6702d933df405da66960..52830164d1d91d3ad77b137c3d664065b0bee367 100644 (file)
@@ -13,7 +13,7 @@
 use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef};
 use ty::outlives::Component;
 use util::common::ErrorReported;
-use util::nodemap::FnvHashSet;
+use util::nodemap::FxHashSet;
 
 use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized};
 
@@ -50,12 +50,12 @@ fn anonymize_predicate<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
 
 struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    set: FnvHashSet<ty::Predicate<'tcx>>,
+    set: FxHashSet<ty::Predicate<'tcx>>,
 }
 
 impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> {
     fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> {
-        PredicateSet { tcx: tcx, set: FnvHashSet() }
+        PredicateSet { tcx: tcx, set: FxHashSet() }
     }
 
     fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool {
@@ -272,7 +272,7 @@ pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
 pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     stack: Vec<DefId>,
-    visited: FnvHashSet<DefId>,
+    visited: FxHashSet<DefId>,
 }
 
 pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
index b499e1346e73c12afbffa50ac82be68cbce50b85..7ed4de38be97eef26ceedafc418491b0e27a2859 100644 (file)
@@ -11,7 +11,7 @@
 use hir::def_id::{DefId};
 use ty::{self, Ty, TyCtxt};
 use util::common::MemoizationMap;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use std::fmt;
 use std::ops;
@@ -141,11 +141,11 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 
 impl<'a, 'tcx> ty::TyS<'tcx> {
     pub fn type_contents(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> TypeContents {
-        return tcx.tc_cache.memoize(self, || tc_ty(tcx, self, &mut FnvHashMap()));
+        return tcx.tc_cache.memoize(self, || tc_ty(tcx, self, &mut FxHashMap()));
 
         fn tc_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                            ty: Ty<'tcx>,
-                           cache: &mut FnvHashMap<Ty<'tcx>, TypeContents>) -> TypeContents
+                           cache: &mut FxHashMap<Ty<'tcx>, TypeContents>) -> TypeContents
         {
             // Subtle: Note that we are *not* using tcx.tc_cache here but rather a
             // private cache for this walk.  This is needed in the case of cyclic
index 7e5e10435d51683d520fc299a801a67b082eeb58..b19f935123519be1fd0893d29e8e1e700dd5a6ca 100644 (file)
@@ -36,7 +36,7 @@
 use ty::maps;
 use util::common::MemoizationMap;
 use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 use rustc_data_structures::accumulate_vec::AccumulateVec;
 
 use arena::TypedArena;
@@ -96,26 +96,26 @@ pub struct CtxtInterners<'tcx> {
 
     /// Specifically use a speedy hash algorithm for these hash sets,
     /// they're accessed quite often.
-    type_: RefCell<FnvHashSet<Interned<'tcx, TyS<'tcx>>>>,
-    type_list: RefCell<FnvHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
-    substs: RefCell<FnvHashSet<Interned<'tcx, Substs<'tcx>>>>,
-    bare_fn: RefCell<FnvHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
-    region: RefCell<FnvHashSet<Interned<'tcx, Region>>>,
-    stability: RefCell<FnvHashSet<&'tcx attr::Stability>>,
-    layout: RefCell<FnvHashSet<&'tcx Layout>>,
+    type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
+    type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
+    substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
+    bare_fn: RefCell<FxHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
+    region: RefCell<FxHashSet<Interned<'tcx, Region>>>,
+    stability: RefCell<FxHashSet<&'tcx attr::Stability>>,
+    layout: RefCell<FxHashSet<&'tcx Layout>>,
 }
 
 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
     fn new(arenas: &'tcx CtxtArenas<'tcx>) -> CtxtInterners<'tcx> {
         CtxtInterners {
             arenas: arenas,
-            type_: RefCell::new(FnvHashSet()),
-            type_list: RefCell::new(FnvHashSet()),
-            substs: RefCell::new(FnvHashSet()),
-            bare_fn: RefCell::new(FnvHashSet()),
-            region: RefCell::new(FnvHashSet()),
-            stability: RefCell::new(FnvHashSet()),
-            layout: RefCell::new(FnvHashSet())
+            type_: RefCell::new(FxHashSet()),
+            type_list: RefCell::new(FxHashSet()),
+            substs: RefCell::new(FxHashSet()),
+            bare_fn: RefCell::new(FxHashSet()),
+            region: RefCell::new(FxHashSet()),
+            stability: RefCell::new(FxHashSet()),
+            layout: RefCell::new(FxHashSet())
         }
     }
 
@@ -244,11 +244,11 @@ pub struct Tables<'tcx> {
 impl<'a, 'gcx, 'tcx> Tables<'tcx> {
     pub fn empty() -> Tables<'tcx> {
         Tables {
-            node_types: FnvHashMap(),
+            node_types: FxHashMap(),
             item_substs: NodeMap(),
             adjustments: NodeMap(),
-            method_map: FnvHashMap(),
-            upvar_capture_map: FnvHashMap(),
+            method_map: FxHashMap(),
+            upvar_capture_map: FxHashMap(),
             closure_tys: DefIdMap(),
             closure_kinds: DefIdMap(),
             liberated_fn_sigs: NodeMap(),
@@ -451,16 +451,16 @@ pub struct GlobalCtxt<'tcx> {
     pub tcache: RefCell<DepTrackingMap<maps::Tcache<'tcx>>>,
 
     // Internal cache for metadata decoding. No need to track deps on this.
-    pub rcache: RefCell<FnvHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
+    pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
 
     // Cache for the type-contents routine. FIXME -- track deps?
-    pub tc_cache: RefCell<FnvHashMap<Ty<'tcx>, ty::contents::TypeContents>>,
+    pub tc_cache: RefCell<FxHashMap<Ty<'tcx>, ty::contents::TypeContents>>,
 
     // FIXME no dep tracking, but we should be able to remove this
     pub ty_param_defs: RefCell<NodeMap<ty::TypeParameterDef<'tcx>>>,
 
     // FIXME dep tracking -- should be harmless enough
-    pub normalized_cache: RefCell<FnvHashMap<Ty<'tcx>, Ty<'tcx>>>,
+    pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
 
     pub lang_items: middle::lang_items::LanguageItems,
 
@@ -571,7 +571,7 @@ pub struct GlobalCtxt<'tcx> {
     pub data_layout: TargetDataLayout,
 
     /// Cache for layouts computed from types.
-    pub layout_cache: RefCell<FnvHashMap<Ty<'tcx>, &'tcx Layout>>,
+    pub layout_cache: RefCell<FxHashMap<Ty<'tcx>, &'tcx Layout>>,
 
     /// Used to prevent layout from recursing too deeply.
     pub layout_depth: Cell<usize>,
@@ -801,7 +801,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             types: common_types,
             named_region_map: named_region_map,
             region_maps: region_maps,
-            free_region_maps: RefCell::new(FnvHashMap()),
+            free_region_maps: RefCell::new(FxHashMap()),
             item_variance_map: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             variance_computed: Cell::new(false),
             sess: s,
@@ -820,13 +820,13 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             freevars: RefCell::new(freevars),
             maybe_unused_trait_imports: maybe_unused_trait_imports,
             tcache: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
-            rcache: RefCell::new(FnvHashMap()),
-            tc_cache: RefCell::new(FnvHashMap()),
+            rcache: RefCell::new(FxHashMap()),
+            tc_cache: RefCell::new(FxHashMap()),
             impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             impl_or_trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             ty_param_defs: RefCell::new(NodeMap()),
-            normalized_cache: RefCell::new(FnvHashMap()),
+            normalized_cache: RefCell::new(FxHashMap()),
             lang_items: lang_items,
             inherent_impls: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             used_unsafe: RefCell::new(NodeSet()),
@@ -846,7 +846,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             fragment_infos: RefCell::new(DefIdMap()),
             crate_name: token::intern_and_get_ident(crate_name),
             data_layout: data_layout,
-            layout_cache: RefCell::new(FnvHashMap()),
+            layout_cache: RefCell::new(FxHashMap()),
             layout_depth: Cell::new(0),
             derive_macros: RefCell::new(NodeMap()),
        }, f)
index b79ebdb14f552ba0977135013ba10f6782a9b0b3..354658ec4397f3cd284e1a4a580f6db642616b87 100644 (file)
@@ -45,7 +45,7 @@
 use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
 
 use std::fmt;
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 
 /// The TypeFoldable trait is implemented for every type that can be folded.
 /// Basically, every type that has a corresponding method in TypeFolder.
@@ -225,7 +225,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     /// whether any late-bound regions were skipped
     pub fn collect_regions<T>(self,
         value: &T,
-        region_set: &mut FnvHashSet<&'tcx ty::Region>)
+        region_set: &mut FxHashSet<&'tcx ty::Region>)
         -> bool
         where T : TypeFoldable<'tcx>
     {
@@ -319,14 +319,14 @@ struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     current_depth: u32,
     fld_r: &'a mut (FnMut(ty::BoundRegion) -> &'tcx ty::Region + 'a),
-    map: FnvHashMap<ty::BoundRegion, &'tcx ty::Region>
+    map: FxHashMap<ty::BoundRegion, &'tcx ty::Region>
 }
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     pub fn replace_late_bound_regions<T,F>(self,
         value: &Binder<T>,
         mut f: F)
-        -> (T, FnvHashMap<ty::BoundRegion, &'tcx ty::Region>)
+        -> (T, FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
         where F : FnMut(ty::BoundRegion) -> &'tcx ty::Region,
               T : TypeFoldable<'tcx>,
     {
@@ -390,7 +390,7 @@ pub fn no_late_bound_regions<T>(self, value: &Binder<T>) -> Option<T>
     /// variables and equate `value` with something else, those
     /// variables will also be equated.
     pub fn collect_constrained_late_bound_regions<T>(&self, value: &Binder<T>)
-                                                     -> FnvHashSet<ty::BoundRegion>
+                                                     -> FxHashSet<ty::BoundRegion>
         where T : TypeFoldable<'tcx>
     {
         self.collect_late_bound_regions(value, true)
@@ -398,14 +398,14 @@ pub fn collect_constrained_late_bound_regions<T>(&self, value: &Binder<T>)
 
     /// Returns a set of all late-bound regions that appear in `value` anywhere.
     pub fn collect_referenced_late_bound_regions<T>(&self, value: &Binder<T>)
-                                                    -> FnvHashSet<ty::BoundRegion>
+                                                    -> FxHashSet<ty::BoundRegion>
         where T : TypeFoldable<'tcx>
     {
         self.collect_late_bound_regions(value, false)
     }
 
     fn collect_late_bound_regions<T>(&self, value: &Binder<T>, just_constraint: bool)
-                                     -> FnvHashSet<ty::BoundRegion>
+                                     -> FxHashSet<ty::BoundRegion>
         where T : TypeFoldable<'tcx>
     {
         let mut collector = LateBoundRegionsCollector::new(just_constraint);
@@ -450,7 +450,7 @@ fn new<F>(tcx: TyCtxt<'a, 'gcx, 'tcx>, fld_r: &'a mut F)
             tcx: tcx,
             current_depth: 1,
             fld_r: fld_r,
-            map: FnvHashMap()
+            map: FxHashMap()
         }
     }
 }
@@ -650,7 +650,7 @@ fn visit_region(&mut self, r: &'tcx ty::Region) -> bool {
 /// Collects all the late-bound regions it finds into a hash set.
 struct LateBoundRegionsCollector {
     current_depth: u32,
-    regions: FnvHashSet<ty::BoundRegion>,
+    regions: FxHashSet<ty::BoundRegion>,
     just_constrained: bool,
 }
 
@@ -658,7 +658,7 @@ impl LateBoundRegionsCollector {
     fn new(just_constrained: bool) -> Self {
         LateBoundRegionsCollector {
             current_depth: 1,
-            regions: FnvHashSet(),
+            regions: FxHashSet(),
             just_constrained: just_constrained,
         }
     }
index 2c15f08e89822667ad9fd9cdab7cce33fb9ca20d..fcf9b5ff2730cbd5b4bd5285e179685b52696318 100644 (file)
@@ -31,7 +31,7 @@
 use ty::walk::TypeWalker;
 use util::common::MemoizationMap;
 use util::nodemap::NodeSet;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use serialize::{self, Encodable, Encoder};
 use std::borrow::Cow;
@@ -418,7 +418,7 @@ pub fn autoderef(expr_id: NodeId, autoderef: u32) -> MethodCall {
 
 // maps from an expression id that corresponds to a method call to the details
 // of the method to be invoked
-pub type MethodMap<'tcx> = FnvHashMap<MethodCall, MethodCallee<'tcx>>;
+pub type MethodMap<'tcx> = FxHashMap<MethodCall, MethodCallee<'tcx>>;
 
 // Contains information needed to resolve types and (in the future) look up
 // the types of AST nodes.
@@ -650,7 +650,7 @@ pub struct UpvarBorrow<'tcx> {
     pub region: &'tcx ty::Region,
 }
 
-pub type UpvarCaptureMap<'tcx> = FnvHashMap<UpvarId, UpvarCapture<'tcx>>;
+pub type UpvarCaptureMap<'tcx> = FxHashMap<UpvarId, UpvarCapture<'tcx>>;
 
 #[derive(Copy, Clone)]
 pub struct ClosureUpvar<'tcx> {
@@ -1251,10 +1251,10 @@ pub struct ParameterEnvironment<'tcx> {
     pub free_id_outlive: CodeExtent,
 
     /// A cache for `moves_by_default`.
-    pub is_copy_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
+    pub is_copy_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
 
     /// A cache for `type_is_sized`
-    pub is_sized_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
+    pub is_sized_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
 }
 
 impl<'a, 'tcx> ParameterEnvironment<'tcx> {
@@ -1267,8 +1267,8 @@ pub fn with_caller_bounds(&self,
             implicit_region_bound: self.implicit_region_bound,
             caller_bounds: caller_bounds,
             free_id_outlive: self.free_id_outlive,
-            is_copy_cache: RefCell::new(FnvHashMap()),
-            is_sized_cache: RefCell::new(FnvHashMap()),
+            is_copy_cache: RefCell::new(FxHashMap()),
+            is_sized_cache: RefCell::new(FxHashMap()),
         }
     }
 
@@ -2752,8 +2752,8 @@ pub fn empty_parameter_environment(self) -> ParameterEnvironment<'tcx> {
             caller_bounds: Vec::new(),
             implicit_region_bound: self.mk_region(ty::ReEmpty),
             free_id_outlive: free_id_outlive,
-            is_copy_cache: RefCell::new(FnvHashMap()),
-            is_sized_cache: RefCell::new(FnvHashMap()),
+            is_copy_cache: RefCell::new(FxHashMap()),
+            is_sized_cache: RefCell::new(FxHashMap()),
         }
     }
 
@@ -2824,8 +2824,8 @@ pub fn construct_parameter_environment(self,
             implicit_region_bound: tcx.mk_region(ty::ReScope(free_id_outlive)),
             caller_bounds: predicates,
             free_id_outlive: free_id_outlive,
-            is_copy_cache: RefCell::new(FnvHashMap()),
-            is_sized_cache: RefCell::new(FnvHashMap()),
+            is_copy_cache: RefCell::new(FxHashMap()),
+            is_sized_cache: RefCell::new(FxHashMap()),
         };
 
         let cause = traits::ObligationCause::misc(span, free_id_outlive.node_id(&self.region_maps));
index 3ff2ed76e571e3a8b308657186d9a069331e57cf..fc32029948388d3046d37672b55e440e12d3a225 100644 (file)
@@ -16,7 +16,7 @@
 use ty::{Ty, TyCtxt, TraitRef};
 use std::cell::{Cell, RefCell};
 use hir;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 /// As `TypeScheme` but for a trait ref.
 pub struct TraitDef<'tcx> {
@@ -55,7 +55,7 @@ pub struct TraitDef<'tcx> {
 
     /// Impls of the trait.
     nonblanket_impls: RefCell<
-        FnvHashMap<fast_reject::SimplifiedType, Vec<DefId>>
+        FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>
     >,
 
     /// Blanket impls associated with the trait.
@@ -84,7 +84,7 @@ pub fn new(unsafety: hir::Unsafety,
             unsafety: unsafety,
             generics: generics,
             trait_ref: trait_ref,
-            nonblanket_impls: RefCell::new(FnvHashMap()),
+            nonblanket_impls: RefCell::new(FxHashMap()),
             blanket_impls: RefCell::new(vec![]),
             flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
             specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()),
index cca4069ba5a1799255be43dfd653e6d6b2f63ff5..b1aeaeb48d144c771aca644a7780a699754d01c8 100644 (file)
@@ -20,7 +20,7 @@
 use ty::fold::TypeVisitor;
 use ty::layout::{Layout, LayoutError};
 use ty::TypeVariants::*;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use rustc_const_math::{ConstInt, ConstIsize, ConstUsize};
 
@@ -594,7 +594,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
     fn impls_bound(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
                    param_env: &ParameterEnvironment<'tcx>,
                    bound: ty::BuiltinBound,
-                   cache: &RefCell<FnvHashMap<Ty<'tcx>, bool>>,
+                   cache: &RefCell<FxHashMap<Ty<'tcx>, bool>>,
                    span: Span) -> bool
     {
         if self.has_param_types() || self.has_self_ty() {
index 69bcc9cbfffea807018c7fa2c73b2fab2b8f8164..b03011fcb216dbc4e783f695fde007bc384936b5 100644 (file)
 use hir::def_id::DefId;
 use syntax::ast;
 
-pub use rustc_data_structures::fnv::FnvHashMap;
-pub use rustc_data_structures::fnv::FnvHashSet;
+pub use rustc_data_structures::fx::FxHashMap;
+pub use rustc_data_structures::fx::FxHashSet;
 
-pub type NodeMap<T> = FnvHashMap<ast::NodeId, T>;
-pub type DefIdMap<T> = FnvHashMap<DefId, T>;
+pub type NodeMap<T> = FxHashMap<ast::NodeId, T>;
+pub type DefIdMap<T> = FxHashMap<DefId, T>;
 
-pub type NodeSet = FnvHashSet<ast::NodeId>;
-pub type DefIdSet = FnvHashSet<DefId>;
+pub type NodeSet = FxHashSet<ast::NodeId>;
+pub type DefIdSet = FxHashSet<DefId>;
 
-pub fn NodeMap<T>() -> NodeMap<T> { FnvHashMap() }
-pub fn DefIdMap<T>() -> DefIdMap<T> { FnvHashMap() }
-pub fn NodeSet() -> NodeSet { FnvHashSet() }
-pub fn DefIdSet() -> DefIdSet { FnvHashSet() }
+pub fn NodeMap<T>() -> NodeMap<T> { FxHashMap() }
+pub fn DefIdMap<T>() -> DefIdMap<T> { FxHashMap() }
+pub fn NodeSet() -> NodeSet { FxHashSet() }
+pub fn DefIdSet() -> DefIdSet { FxHashSet() }
 
index 191cd981b61ebffe62533f1f70aa46cec46b501c..be85069db3135d7b1e2ce6218fc6cd0d5a2b1467 100644 (file)
@@ -21,7 +21,7 @@
 use rustc::mir::transform::{Pass, MirPass, MirSource};
 use rustc::middle::const_val::ConstVal;
 use rustc::middle::lang_items;
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 use rustc_data_structures::indexed_set::IdxSetBuf;
 use rustc_data_structures::indexed_vec::Idx;
 use syntax_pos::Span;
@@ -63,7 +63,7 @@ fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 env: &env,
                 flow_inits: flow_inits,
                 flow_uninits: flow_uninits,
-                drop_flags: FnvHashMap(),
+                drop_flags: FxHashMap(),
                 patch: MirPatch::new(mir),
             }.elaborate()
         };
@@ -118,7 +118,7 @@ struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
     env: &'a MoveDataParamEnv<'tcx>,
     flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
     flow_uninits:  DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
-    drop_flags: FnvHashMap<MovePathIndex, Local>,
+    drop_flags: FxHashMap<MovePathIndex, Local>,
     patch: MirPatch<'tcx>,
 }
 
index 1dc5769e63cf8e1518cef6aa70c27b9399ceb9e0..02064b52cb1fb2c24b264f621bc879294d29e021 100644 (file)
@@ -11,7 +11,7 @@
 
 use rustc::ty::{self, TyCtxt, ParameterEnvironment};
 use rustc::mir::*;
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 use rustc_data_structures::indexed_vec::{IndexVec};
 
 use syntax::codemap::DUMMY_SP;
@@ -181,7 +181,7 @@ pub struct MovePathLookup<'tcx> {
     /// subsequent search so that it is solely relative to that
     /// base-lvalue). For the remaining lookup, we map the projection
     /// elem to the associated MovePathIndex.
-    projections: FnvHashMap<(MovePathIndex, AbstractElem<'tcx>), MovePathIndex>
+    projections: FxHashMap<(MovePathIndex, AbstractElem<'tcx>), MovePathIndex>
 }
 
 struct MoveDataBuilder<'a, 'tcx: 'a> {
@@ -215,7 +215,7 @@ fn new(mir: &'a Mir<'tcx>,
                     locals: mir.local_decls.indices().map(Lvalue::Local).map(|v| {
                         Self::new_move_path(&mut move_paths, &mut path_map, None, v)
                     }).collect(),
-                    projections: FnvHashMap(),
+                    projections: FxHashMap(),
                 },
                 move_paths: move_paths,
                 path_map: path_map,
index ba036f1a8b157a00ee3d7ff415e3338e7260dae9..afc4ccef0cc0fe11278a9a35fc97f78d6ac7c304 100644 (file)
@@ -23,7 +23,7 @@
 use rustc::middle::expr_use_visitor::MutateMode;
 use rustc::middle::mem_categorization as mc;
 use rustc::ty::{self, TyCtxt};
-use rustc::util::nodemap::{FnvHashMap, NodeSet};
+use rustc::util::nodemap::{FxHashMap, NodeSet};
 
 use std::cell::RefCell;
 use std::rc::Rc;
@@ -41,7 +41,7 @@ pub struct MoveData<'tcx> {
     pub paths: RefCell<Vec<MovePath<'tcx>>>,
 
     /// Cache of loan path to move path index, for easy lookup.
-    pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
+    pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
 
     /// Each move or uninitialized variable gets an entry here.
     pub moves: RefCell<Vec<Move>>,
@@ -217,7 +217,7 @@ impl<'a, 'tcx> MoveData<'tcx> {
     pub fn new() -> MoveData<'tcx> {
         MoveData {
             paths: RefCell::new(Vec::new()),
-            path_map: RefCell::new(FnvHashMap()),
+            path_map: RefCell::new(FxHashMap()),
             moves: RefCell::new(Vec::new()),
             path_assignments: RefCell::new(Vec::new()),
             var_assignments: RefCell::new(Vec::new()),
index 7f5eb31612cb394f002a5e1cdfa68d99f562ed77..831d21b831042cd6d5f2a386e70183749c2453b6 100644 (file)
@@ -17,7 +17,7 @@
 
 use rustc_const_math::ConstInt;
 
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::Idx;
 
 use pattern::{FieldPattern, Pattern, PatternKind};
@@ -160,7 +160,7 @@ pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
     /// associated types to get field types.
     pub wild_pattern: &'a Pattern<'tcx>,
     pub pattern_arena: &'a TypedArena<Pattern<'tcx>>,
-    pub byte_array_map: FnvHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>,
+    pub byte_array_map: FxHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>,
 }
 
 impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
@@ -181,7 +181,7 @@ pub fn create_and_enter<F, R>(
             tcx: tcx,
             wild_pattern: &wild_pattern,
             pattern_arena: &pattern_arena,
-            byte_array_map: FnvHashMap(),
+            byte_array_map: FxHashMap(),
         })
     }
 
index fc963dac9495fc1b95d0350d05d99c5024281843..fdcbec6bac11a97f3867f70ce14eababca389755 100644 (file)
@@ -60,6 +60,7 @@
 pub mod transitive_relation;
 pub mod unify;
 pub mod fnv;
+pub mod fx;
 pub mod tuple_slice;
 pub mod veccell;
 pub mod control_flow_graph;
index a2bfa784e8aed0725ca6e50034fd096d3b2703a7..a46238309bb46a87e16b16ee72ee1505cbb65b65 100644 (file)
@@ -15,7 +15,7 @@
 //! in the first place). See README.md for a general overview of how
 //! to use this class.
 
-use fnv::{FnvHashMap, FnvHashSet};
+use fx::{FxHashMap, FxHashSet};
 
 use std::cell::Cell;
 use std::collections::hash_map::Entry;
@@ -68,9 +68,9 @@ pub struct ObligationForest<O: ForestObligation> {
     /// backtrace iterator (which uses `split_at`).
     nodes: Vec<Node<O>>,
     /// A cache of predicates that have been successfully completed.
-    done_cache: FnvHashSet<O::Predicate>,
+    done_cache: FxHashSet<O::Predicate>,
     /// An cache of the nodes in `nodes`, indexed by predicate.
-    waiting_cache: FnvHashMap<O::Predicate, NodeIndex>,
+    waiting_cache: FxHashMap<O::Predicate, NodeIndex>,
     /// A list of the obligations added in snapshots, to allow
     /// for their removal.
     cache_list: Vec<O::Predicate>,
@@ -158,8 +158,8 @@ pub fn new() -> ObligationForest<O> {
         ObligationForest {
             nodes: vec![],
             snapshots: vec![],
-            done_cache: FnvHashSet(),
-            waiting_cache: FnvHashMap(),
+            done_cache: FxHashSet(),
+            waiting_cache: FxHashMap(),
             cache_list: vec![],
             scratch: Some(vec![]),
         }
index a4e6166032d81de5900c9f1111fbf419d8e4bbdb..cd7143ad3ce8470d05826d005c44c9ff3efeae3a 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use fnv::FnvHashMap;
+use fx::FxHashMap;
 use std::hash::Hash;
 use std::ops;
 use std::mem;
@@ -19,7 +19,7 @@
 pub struct SnapshotMap<K, V>
     where K: Hash + Clone + Eq
 {
-    map: FnvHashMap<K, V>,
+    map: FxHashMap<K, V>,
     undo_log: Vec<UndoLog<K, V>>,
 }
 
@@ -40,7 +40,7 @@ impl<K, V> SnapshotMap<K, V>
 {
     pub fn new() -> Self {
         SnapshotMap {
-            map: FnvHashMap(),
+            map: FxHashMap(),
             undo_log: vec![],
         }
     }
index 28aab1fdd4167e4fc104ba23ebccd45d271d2a28..37477da755c9fb24226b42936c91aa27fdaba7fe 100644 (file)
@@ -48,7 +48,7 @@
 use rustc::dep_graph::debug::{DepNodeFilter, EdgeFilter};
 use rustc::hir::def_id::DefId;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashSet;
+use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
 use rustc::hir;
 use rustc::hir::intravisit::Visitor;
@@ -244,7 +244,7 @@ fn dump_graph(tcx: TyCtxt) {
     }
 }
 
-pub struct GraphvizDepGraph<'q>(FnvHashSet<&'q DepNode<DefId>>,
+pub struct GraphvizDepGraph<'q>(FxHashSet<&'q DepNode<DefId>>,
                                 Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>);
 
 impl<'a, 'tcx, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {
@@ -288,7 +288,7 @@ fn node_label(&self, n: &&'q DepNode<DefId>) -> dot::LabelText {
 // filter) or the set of nodes whose labels contain all of those
 // substrings.
 fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
-                -> Option<FnvHashSet<&'q DepNode<DefId>>>
+                -> Option<FxHashSet<&'q DepNode<DefId>>>
 {
     debug!("node_set(filter={:?})", filter);
 
@@ -300,9 +300,9 @@ fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
 }
 
 fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
-                    sources: &Option<FnvHashSet<&'q DepNode<DefId>>>,
-                    targets: &Option<FnvHashSet<&'q DepNode<DefId>>>)
-                    -> FnvHashSet<&'q DepNode<DefId>>
+                    sources: &Option<FxHashSet<&'q DepNode<DefId>>>,
+                    targets: &Option<FxHashSet<&'q DepNode<DefId>>>)
+                    -> FxHashSet<&'q DepNode<DefId>>
 {
     if let &Some(ref sources) = sources {
         if let &Some(ref targets) = targets {
@@ -318,11 +318,11 @@ fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
 }
 
 fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
-                  starts: &FnvHashSet<&'q DepNode<DefId>>,
+                  starts: &FxHashSet<&'q DepNode<DefId>>,
                   direction: Direction)
-                  -> FnvHashSet<&'q DepNode<DefId>>
+                  -> FxHashSet<&'q DepNode<DefId>>
 {
-    let mut set = FnvHashSet();
+    let mut set = FxHashSet();
     for &start in starts {
         debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING);
         if set.insert(start) {
@@ -342,9 +342,9 @@ fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
 }
 
 fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
-                    sources: &FnvHashSet<&'q DepNode<DefId>>,
-                    targets: &FnvHashSet<&'q DepNode<DefId>>)
-                    -> FnvHashSet<&'q DepNode<DefId>>
+                    sources: &FxHashSet<&'q DepNode<DefId>>,
+                    targets: &FxHashSet<&'q DepNode<DefId>>)
+                    -> FxHashSet<&'q DepNode<DefId>>
 {
     // This is a bit tricky. We want to include a node only if it is:
     // (a) reachable from a source and (b) will reach a target. And we
@@ -410,7 +410,7 @@ fn recurse(query: &DepGraphQuery<DefId>,
 }
 
 fn filter_edges<'q>(query: &'q DepGraphQuery<DefId>,
-                    nodes: &FnvHashSet<&'q DepNode<DefId>>)
+                    nodes: &FxHashSet<&'q DepNode<DefId>>)
                     -> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>
 {
     query.edges()
index 3b0b37bb01ce31a57f76890506572b0dbd76d715..58a21529974106bb65c538acda91c9fa7e185a13 100644 (file)
@@ -35,7 +35,7 @@
 use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
 use rustc::hir::intravisit as visit;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc::util::common::record_time;
 use rustc::session::config::DebugInfoLevel::NoDebugInfo;
 
 pub mod hasher;
 
 pub struct IncrementalHashesMap {
-    hashes: FnvHashMap<DepNode<DefId>, Fingerprint>,
+    hashes: FxHashMap<DepNode<DefId>, Fingerprint>,
 
     // These are the metadata hashes for the current crate as they were stored
     // during the last compilation session. They are only loaded if
     // -Z query-dep-graph was specified and are needed for auto-tests using
     // the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to
     // check whether some metadata hash has changed in between two revisions.
-    pub prev_metadata_hashes: RefCell<FnvHashMap<DefId, Fingerprint>>,
+    pub prev_metadata_hashes: RefCell<FxHashMap<DefId, Fingerprint>>,
 }
 
 impl IncrementalHashesMap {
     pub fn new() -> IncrementalHashesMap {
         IncrementalHashesMap {
-            hashes: FnvHashMap(),
-            prev_metadata_hashes: RefCell::new(FnvHashMap()),
+            hashes: FxHashMap(),
+            prev_metadata_hashes: RefCell::new(FxHashMap()),
         }
     }
 
index 734ffe6a94412024405564a6685cc5048ede21da..f0e4f4f99ef08e2977bbf069262e598db9d8bb8a 100644 (file)
@@ -13,7 +13,7 @@
 use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
 use rustc::hir::def_id::DefIndex;
 use std::sync::Arc;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use ich::Fingerprint;
 
 use super::directory::DefPathIndex;
@@ -106,7 +106,7 @@ pub struct SerializedMetadataHashes {
     /// is only populated if -Z query-dep-graph is specified. It will be
     /// empty otherwise. Importing crates are perfectly happy with just having
     /// the DefIndex.
-    pub index_map: FnvHashMap<DefIndex, DefPathIndex>
+    pub index_map: FxHashMap<DefIndex, DefPathIndex>
 }
 
 /// The hash for some metadata that (when saving) will be exported
index 94478f6603a6ee8dcb8d4980490a1f1bd77a8f7e..69b9be12de46c2de35a2438896546a3b81f80db8 100644 (file)
@@ -47,7 +47,7 @@
 use rustc::hir::def_id::DefId;
 use rustc::hir::intravisit::Visitor;
 use syntax::ast::{self, Attribute, NestedMetaItem};
-use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use syntax::parse::token::InternedString;
 use syntax_pos::Span;
 use rustc::ty::TyCtxt;
@@ -67,7 +67,7 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let _ignore = tcx.dep_graph.in_ignore();
-    let dirty_inputs: FnvHashSet<DepNode<DefId>> =
+    let dirty_inputs: FxHashSet<DepNode<DefId>> =
         dirty_inputs.iter()
                    .filter_map(|d| retraced.map(d))
                    .collect();
@@ -84,7 +84,7 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     query: &'a DepGraphQuery<DefId>,
-    dirty_inputs: FnvHashSet<DepNode<DefId>>,
+    dirty_inputs: FxHashSet<DepNode<DefId>>,
 }
 
 impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
@@ -187,8 +187,8 @@ fn visit_item(&mut self, item: &'tcx hir::Item) {
 }
 
 pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                  prev_metadata_hashes: &FnvHashMap<DefId, Fingerprint>,
-                                  current_metadata_hashes: &FnvHashMap<DefId, Fingerprint>) {
+                                  prev_metadata_hashes: &FxHashMap<DefId, Fingerprint>,
+                                  current_metadata_hashes: &FxHashMap<DefId, Fingerprint>) {
     if !tcx.sess.opts.debugging_opts.query_dep_graph {
         return;
     }
@@ -205,8 +205,8 @@ pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
 pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    prev_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>,
-    current_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>,
+    prev_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
+    current_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
 }
 
 impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
index ff7c3d0512e4f18c30c6e82686d6e0c8bdb539d7..ca9c119202322038911521a217625f81e2fc56c6 100644 (file)
 use rustc::ty::TyCtxt;
 use rustc::util::fs as fs_util;
 use rustc_data_structures::flock;
-use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 
 use std::ffi::OsString;
 use std::fs as std_fs;
@@ -195,7 +195,7 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result<bool, ()> {
     debug!("crate-dir: {}", crate_dir.display());
     try!(create_dir(tcx.sess, &crate_dir, "crate"));
 
-    let mut source_directories_already_tried = FnvHashSet();
+    let mut source_directories_already_tried = FxHashSet();
 
     loop {
         // Generate a session directory of the form:
@@ -490,7 +490,7 @@ fn delete_session_dir_lock_file(sess: &Session,
 /// Find the most recent published session directory that is not in the
 /// ignore-list.
 fn find_source_directory(crate_dir: &Path,
-                         source_directories_already_tried: &FnvHashSet<PathBuf>)
+                         source_directories_already_tried: &FxHashSet<PathBuf>)
                          -> Option<PathBuf> {
     let iter = crate_dir.read_dir()
                         .unwrap() // FIXME
@@ -500,7 +500,7 @@ fn find_source_directory(crate_dir: &Path,
 }
 
 fn find_source_directory_in_iter<I>(iter: I,
-                                    source_directories_already_tried: &FnvHashSet<PathBuf>)
+                                    source_directories_already_tried: &FxHashSet<PathBuf>)
                                     -> Option<PathBuf>
     where I: Iterator<Item=PathBuf>
 {
@@ -704,8 +704,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
 
     // First do a pass over the crate directory, collecting lock files and
     // session directories
-    let mut session_directories = FnvHashSet();
-    let mut lock_files = FnvHashSet();
+    let mut session_directories = FxHashSet();
+    let mut lock_files = FxHashSet();
 
     for dir_entry in try!(crate_directory.read_dir()) {
         let dir_entry = match dir_entry {
@@ -731,7 +731,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
     }
 
     // Now map from lock files to session directories
-    let lock_file_to_session_dir: FnvHashMap<String, Option<String>> =
+    let lock_file_to_session_dir: FxHashMap<String, Option<String>> =
         lock_files.into_iter()
                   .map(|lock_file_name| {
                         assert!(lock_file_name.ends_with(LOCK_FILE_EXT));
@@ -774,7 +774,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
     }
 
     // Filter out `None` directories
-    let lock_file_to_session_dir: FnvHashMap<String, String> =
+    let lock_file_to_session_dir: FxHashMap<String, String> =
         lock_file_to_session_dir.into_iter()
                                 .filter_map(|(lock_file_name, directory_name)| {
                                     directory_name.map(|n| (lock_file_name, n))
@@ -898,7 +898,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
 }
 
 fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option<flock::Lock>)>)
-                          -> FnvHashMap<PathBuf, Option<flock::Lock>> {
+                          -> FxHashMap<PathBuf, Option<flock::Lock>> {
     let most_recent = deletion_candidates.iter()
                                          .map(|&(timestamp, ..)| timestamp)
                                          .max();
@@ -909,7 +909,7 @@ fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option<
                            .map(|(_, path, lock)| (path, lock))
                            .collect()
     } else {
-        FnvHashMap()
+        FxHashMap()
     }
 }
 
@@ -946,19 +946,19 @@ fn test_all_except_most_recent() {
             (UNIX_EPOCH + Duration::new(5, 0), PathBuf::from("5"), None),
             (UNIX_EPOCH + Duration::new(3, 0), PathBuf::from("3"), None),
             (UNIX_EPOCH + Duration::new(2, 0), PathBuf::from("2"), None),
-        ]).keys().cloned().collect::<FnvHashSet<PathBuf>>(),
+        ]).keys().cloned().collect::<FxHashSet<PathBuf>>(),
         vec![
             PathBuf::from("1"),
             PathBuf::from("2"),
             PathBuf::from("3"),
             PathBuf::from("4"),
-        ].into_iter().collect::<FnvHashSet<PathBuf>>()
+        ].into_iter().collect::<FxHashSet<PathBuf>>()
     );
 
     assert_eq!(all_except_most_recent(
         vec![
-        ]).keys().cloned().collect::<FnvHashSet<PathBuf>>(),
-        FnvHashSet()
+        ]).keys().cloned().collect::<FxHashSet<PathBuf>>(),
+        FxHashSet()
     );
 }
 
@@ -973,7 +973,7 @@ fn test_timestamp_serialization() {
 
 #[test]
 fn test_find_source_directory_in_iter() {
-    let already_visited = FnvHashSet();
+    let already_visited = FxHashSet();
 
     // Find newest
     assert_eq!(find_source_directory_in_iter(
index e365cbbd3a9a10b417a4301cf77f96eaab44e1f9..73311ee96c5308712fae79aa667076efcbb20d63 100644 (file)
@@ -12,7 +12,7 @@
 use rustc::hir::def_id::{CrateNum, DefId};
 use rustc::hir::svh::Svh;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::flock;
 use rustc_serialize::Decodable;
 use rustc_serialize::opaque::Decoder;
@@ -26,8 +26,8 @@
 pub struct HashContext<'a, 'tcx: 'a> {
     pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
     incremental_hashes_map: &'a IncrementalHashesMap,
-    item_metadata_hashes: FnvHashMap<DefId, Fingerprint>,
-    crate_hashes: FnvHashMap<CrateNum, Svh>,
+    item_metadata_hashes: FxHashMap<DefId, Fingerprint>,
+    crate_hashes: FxHashMap<CrateNum, Svh>,
 }
 
 impl<'a, 'tcx> HashContext<'a, 'tcx> {
@@ -37,8 +37,8 @@ pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         HashContext {
             tcx: tcx,
             incremental_hashes_map: incremental_hashes_map,
-            item_metadata_hashes: FnvHashMap(),
-            crate_hashes: FnvHashMap(),
+            item_metadata_hashes: FxHashMap(),
+            crate_hashes: FxHashMap(),
         }
     }
 
index 7cef246b6cb2c5b6dc18c3abda5f0109a4694a70..12bf74c95116d508b197daf711a824071bcd511a 100644 (file)
@@ -15,7 +15,7 @@
 use rustc::hir::svh::Svh;
 use rustc::session::Session;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use rustc_serialize::Decodable as RustcDecodable;
 use rustc_serialize::opaque::Decoder;
 use std::fs;
@@ -30,7 +30,7 @@
 use super::fs::*;
 use super::file_format;
 
-pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>;
+pub type DirtyNodes = FxHashSet<DepNode<DefPathIndex>>;
 
 /// If we are in incremental mode, and a previous dep-graph exists,
 /// then load up those nodes/edges that are still valid into the
@@ -183,7 +183,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     // Compute which work-products have an input that has changed or
     // been removed. Put the dirty ones into a set.
-    let mut dirty_target_nodes = FnvHashSet();
+    let mut dirty_target_nodes = FxHashSet();
     for &(raw_source_node, ref target_node) in &retraced_edges {
         if dirty_raw_source_nodes.contains(raw_source_node) {
             if !dirty_target_nodes.contains(target_node) {
@@ -239,7 +239,7 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                          retraced: &RetracedDefIdDirectory)
                          -> DirtyNodes {
     let mut hcx = HashContext::new(tcx, incremental_hashes_map);
-    let mut dirty_nodes = FnvHashSet();
+    let mut dirty_nodes = FxHashSet();
 
     for hash in serialized_hashes {
         if let Some(dep_node) = retraced.map(&hash.dep_node) {
@@ -270,7 +270,7 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 /// otherwise no longer applicable.
 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                      work_products: Vec<SerializedWorkProduct>,
-                                     dirty_target_nodes: &FnvHashSet<DepNode<DefId>>) {
+                                     dirty_target_nodes: &FxHashSet<DepNode<DefId>>) {
     debug!("reconcile_work_products({:?})", work_products);
     for swp in work_products {
         if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) {
@@ -314,7 +314,7 @@ fn delete_dirty_work_product(tcx: TyCtxt,
 
 fn load_prev_metadata_hashes(tcx: TyCtxt,
                              retraced: &RetracedDefIdDirectory,
-                             output: &mut FnvHashMap<DefId, Fingerprint>) {
+                             output: &mut FxHashMap<DefId, Fingerprint>) {
     if !tcx.sess.opts.debugging_opts.query_dep_graph {
         return
     }
index fe1d627253f286a16c455bae6450be9dfb5cbdfe..e1968ce8d7b6ad541234ed9ff990b36022941633 100644 (file)
@@ -10,7 +10,7 @@
 
 use rustc::dep_graph::{DepGraphQuery, DepNode};
 use rustc::hir::def_id::DefId;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex};
 
 use super::hash::*;
@@ -23,11 +23,11 @@ pub struct Predecessors<'query> {
     //   nodes.
     // - Values: transitive predecessors of the key that are hashable
     //   (e.g., HIR nodes, input meta-data nodes)
-    pub inputs: FnvHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>,
+    pub inputs: FxHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>,
 
     // - Keys: some hashable node
     // - Values: the hash thereof
-    pub hashes: FnvHashMap<&'query DepNode<DefId>, Fingerprint>,
+    pub hashes: FxHashMap<&'query DepNode<DefId>, Fingerprint>,
 }
 
 impl<'q> Predecessors<'q> {
@@ -37,7 +37,7 @@ pub fn new(query: &'q DepGraphQuery<DefId>, hcx: &mut HashContext) -> Self {
         let all_nodes = query.graph.all_nodes();
         let tcx = hcx.tcx;
 
-        let inputs: FnvHashMap<_, _> = all_nodes.iter()
+        let inputs: FxHashMap<_, _> = all_nodes.iter()
             .enumerate()
             .filter(|&(_, node)| match node.data {
                 DepNode::WorkProduct(_) => true,
@@ -60,7 +60,7 @@ pub fn new(query: &'q DepGraphQuery<DefId>, hcx: &mut HashContext) -> Self {
             })
             .collect();
 
-        let mut hashes = FnvHashMap();
+        let mut hashes = FxHashMap();
         for input in inputs.values().flat_map(|v| v.iter().cloned()) {
             hashes.entry(input)
                   .or_insert_with(|| hcx.hash(input).unwrap());
index bc156b0e8913b21a159a1825eb43ecd93858ad82..289eebb2162083a1ee44de841288d95e5b45b918 100644 (file)
@@ -13,7 +13,7 @@
 use rustc::hir::svh::Svh;
 use rustc::session::Session;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_serialize::Encodable as RustcEncodable;
 use rustc_serialize::opaque::Encoder;
 use std::hash::Hash;
@@ -46,7 +46,7 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let query = tcx.dep_graph.query();
     let mut hcx = HashContext::new(tcx, incremental_hashes_map);
     let preds = Predecessors::new(&query, &mut hcx);
-    let mut current_metadata_hashes = FnvHashMap();
+    let mut current_metadata_hashes = FxHashMap();
 
     // IMPORTANT: We are saving the metadata hashes *before* the dep-graph,
     //            since metadata-encoding might add new entries to the
@@ -186,7 +186,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
                               svh: Svh,
                               preds: &Predecessors,
                               builder: &mut DefIdDirectoryBuilder,
-                              current_metadata_hashes: &mut FnvHashMap<DefId, Fingerprint>,
+                              current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
                               encoder: &mut Encoder)
                               -> io::Result<()> {
     // For each `MetaData(X)` node where `X` is local, accumulate a
@@ -198,10 +198,10 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
     // (I initially wrote this with an iterator, but it seemed harder to read.)
     let mut serialized_hashes = SerializedMetadataHashes {
         hashes: vec![],
-        index_map: FnvHashMap()
+        index_map: FxHashMap()
     };
 
-    let mut def_id_hashes = FnvHashMap();
+    let mut def_id_hashes = FxHashMap();
 
     for (&target, sources) in &preds.inputs {
         let def_id = match *target {
index b04759955a956b8d3a43153d9b98b4b89a7da7a5..48471282672adf8563ba7d0de76ad0ca99fa21ab 100644 (file)
@@ -18,7 +18,7 @@
 use middle::const_val::ConstVal;
 use rustc_const_eval::eval_const_expr_partial;
 use rustc_const_eval::EvalHint::ExprTypeChecked;
-use util::nodemap::FnvHashSet;
+use util::nodemap::FxHashSet;
 use lint::{LateContext, LintContext, LintArray};
 use lint::{LintPass, LateLintPass};
 
@@ -428,7 +428,7 @@ fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
     /// Check if the given type is "ffi-safe" (has a stable, well-defined
     /// representation which can be exported to C code).
-    fn check_type_for_ffi(&self, cache: &mut FnvHashSet<Ty<'tcx>>, ty: Ty<'tcx>) -> FfiResult {
+    fn check_type_for_ffi(&self, cache: &mut FxHashSet<Ty<'tcx>>, ty: Ty<'tcx>) -> FfiResult {
         use self::FfiResult::*;
         let cx = self.cx.tcx;
 
@@ -639,7 +639,7 @@ fn check_type_for_ffi_and_report_errors(&mut self, sp: Span, ty: Ty<'tcx>) {
         // any generic types right now:
         let ty = self.cx.tcx.normalize_associated_type(&ty);
 
-        match self.check_type_for_ffi(&mut FnvHashSet(), ty) {
+        match self.check_type_for_ffi(&mut FxHashSet(), ty) {
             FfiResult::FfiSafe => {}
             FfiResult::FfiUnsafe(s) => {
                 self.cx.span_lint(IMPROPER_CTYPES, sp, s);
index 15430a5c9f99d96ca583ae3c2767bb8d51dfd374..a5339f7326a63e6a8a40b132319315a7f3b52ac7 100644 (file)
@@ -11,7 +11,7 @@
 use rustc::hir::pat_util;
 use rustc::ty;
 use rustc::ty::adjustment;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 use lint::{LateContext, EarlyContext, LintContext, LintArray};
 use lint::{LintPass, EarlyLintPass, LateLintPass};
 
@@ -42,7 +42,7 @@ fn check_unused_mut_pat(&self, cx: &LateContext, pats: &[P<hir::Pat>]) {
         // collect all mutable pattern and group their NodeIDs by their Identifier to
         // avoid false warnings in match arms with multiple patterns
 
-        let mut mutables = FnvHashMap();
+        let mut mutables = FxHashMap();
         for p in pats {
             pat_util::pat_bindings(p, |mode, id, _, path1| {
                 let name = path1.node;
index e72ac8419941c12234cae3fe01a60775b1c2a6b4..43c97cbe004b376bbc8e518f47557a80835655fd 100644 (file)
@@ -22,7 +22,7 @@
 use rustc::session::search_paths::PathKind;
 use rustc::middle;
 use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate};
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 use rustc::hir::map::Definitions;
 
 use std::cell::{RefCell, Cell};
@@ -50,7 +50,7 @@ pub struct CrateLoader<'a> {
     pub sess: &'a Session,
     cstore: &'a CStore,
     next_crate_num: CrateNum,
-    foreign_item_map: FnvHashMap<String, Vec<ast::NodeId>>,
+    foreign_item_map: FxHashMap<String, Vec<ast::NodeId>>,
     local_crate_name: String,
 }
 
@@ -148,7 +148,7 @@ pub fn new(sess: &'a Session, cstore: &'a CStore, local_crate_name: &str) -> Sel
             sess: sess,
             cstore: cstore,
             next_crate_num: cstore.next_crate_num(),
-            foreign_item_map: FnvHashMap(),
+            foreign_item_map: FxHashMap(),
             local_crate_name: local_crate_name.to_owned(),
         }
     }
@@ -401,7 +401,7 @@ fn load(&mut self, locate_ctxt: &mut locator::Context) -> Option<LoadResult> {
     fn update_extern_crate(&mut self,
                            cnum: CrateNum,
                            mut extern_crate: ExternCrate,
-                           visited: &mut FnvHashSet<(CrateNum, bool)>)
+                           visited: &mut FxHashSet<(CrateNum, bool)>)
     {
         if !visited.insert((cnum, extern_crate.direct)) { return }
 
@@ -442,7 +442,7 @@ fn resolve_crate_deps(&mut self,
         // The map from crate numbers in the crate we're resolving to local crate
         // numbers
         let deps = crate_root.crate_deps.decode(metadata);
-        let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
+        let map: FxHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
             debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
             let (local_cnum, ..) = self.resolve_crate(root,
                                                         &dep.name.as_str(),
@@ -1021,7 +1021,7 @@ fn process_item(&mut self, item: &ast::Item, definitions: &Definitions, load_mac
 
         let extern_crate =
             ExternCrate { def_id: def_id, span: item.span, direct: true, path_len: len };
-        self.update_extern_crate(cnum, extern_crate, &mut FnvHashSet());
+        self.update_extern_crate(cnum, extern_crate, &mut FxHashSet());
         self.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
 
         loaded_macros
index 58c70f959b7cc35d058c25549bc75a2382367066..f452cc23b7330433d25fd4af3c7a47359a4f5691 100644 (file)
@@ -21,7 +21,7 @@
 use rustc::middle::cstore::ExternCrate;
 use rustc_back::PanicStrategy;
 use rustc_data_structures::indexed_vec::IndexVec;
-use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap};
+use rustc::util::nodemap::{FxHashMap, NodeMap, NodeSet, DefIdMap};
 
 use std::cell::{RefCell, Cell};
 use std::rc::Rc;
@@ -76,7 +76,7 @@ pub struct CrateMetadata {
     /// hashmap, which gives the reverse mapping.  This allows us to
     /// quickly retrace a `DefPath`, which is needed for incremental
     /// compilation support.
-    pub key_map: FnvHashMap<DefKey, DefIndex>,
+    pub key_map: FxHashMap<DefKey, DefIndex>,
 
     /// Flag if this crate is required by an rlib version of this crate, or in
     /// other words whether it was explicitly linked to. An example of a crate
@@ -94,7 +94,7 @@ pub struct CachedInlinedItem {
 
 pub struct CStore {
     pub dep_graph: DepGraph,
-    metas: RefCell<FnvHashMap<CrateNum, Rc<CrateMetadata>>>,
+    metas: RefCell<FxHashMap<CrateNum, Rc<CrateMetadata>>>,
     /// Map from NodeId's of local extern crate statements to crate numbers
     extern_mod_crate_map: RefCell<NodeMap<CrateNum>>,
     used_crate_sources: RefCell<Vec<CrateSource>>,
@@ -110,15 +110,15 @@ impl CStore {
     pub fn new(dep_graph: &DepGraph) -> CStore {
         CStore {
             dep_graph: dep_graph.clone(),
-            metas: RefCell::new(FnvHashMap()),
-            extern_mod_crate_map: RefCell::new(FnvHashMap()),
+            metas: RefCell::new(FxHashMap()),
+            extern_mod_crate_map: RefCell::new(FxHashMap()),
             used_crate_sources: RefCell::new(Vec::new()),
             used_libraries: RefCell::new(Vec::new()),
             used_link_args: RefCell::new(Vec::new()),
             statically_included_foreign_items: RefCell::new(NodeSet()),
-            visible_parent_map: RefCell::new(FnvHashMap()),
-            inlined_item_cache: RefCell::new(FnvHashMap()),
-            defid_for_inlined_node: RefCell::new(FnvHashMap()),
+            visible_parent_map: RefCell::new(FxHashMap()),
+            inlined_item_cache: RefCell::new(FxHashMap()),
+            defid_for_inlined_node: RefCell::new(FxHashMap()),
         }
     }
 
index ccd497860de8a19f2e1c442407d221fba26fd588..630b07744249b425652d02e60116d09acd98d47b 100644 (file)
@@ -17,7 +17,7 @@
 
 use rustc::hir::map as hir_map;
 use rustc::hir::map::{DefKey, DefPathData};
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 use rustc::hir;
 use rustc::hir::intravisit::IdRange;
 
@@ -432,7 +432,7 @@ pub fn get_root(&self) -> CrateRoot {
 
     /// Go through each item in the metadata and create a map from that
     /// item's def-key to the item's DefIndex.
-    pub fn load_key_map(&self, index: LazySeq<Index>) -> FnvHashMap<DefKey, DefIndex> {
+    pub fn load_key_map(&self, index: LazySeq<Index>) -> FxHashMap<DefKey, DefIndex> {
         index.iter_enumerated(self.raw_bytes())
             .map(|(index, item)| (item.decode(self).def_key.decode(self), index))
             .collect()
index fdb117ef81b13e731b638ad70e795466cdcdb0f6..fb4fb507296287d8d8dc84b87bf87dbc0ac37cc2 100644 (file)
@@ -23,7 +23,7 @@
 use rustc::ty::{self, Ty, TyCtxt};
 
 use rustc::session::config::{self, CrateTypeProcMacro};
-use rustc::util::nodemap::{FnvHashMap, NodeSet};
+use rustc::util::nodemap::{FxHashMap, NodeSet};
 
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
 use std::hash::Hash;
@@ -52,8 +52,8 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
     reachable: &'a NodeSet,
 
     lazy_state: LazyState,
-    type_shorthands: FnvHashMap<Ty<'tcx>, usize>,
-    predicate_shorthands: FnvHashMap<ty::Predicate<'tcx>, usize>,
+    type_shorthands: FxHashMap<Ty<'tcx>, usize>,
+    predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
 }
 
 macro_rules! encoder_methods {
@@ -200,7 +200,7 @@ fn encode_with_shorthand<T, U, M>(&mut self,
                                       variant: &U,
                                       map: M)
                                       -> Result<(), <Self as Encoder>::Error>
-        where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>,
+        where M: for<'b> Fn(&'b mut Self) -> &'b mut FxHashMap<T, usize>,
               T: Clone + Eq + Hash,
               U: Encodable
     {
@@ -1143,7 +1143,7 @@ fn encode_macro_defs(&mut self) -> LazySeq<MacroDef> {
 
 struct ImplVisitor<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    impls: FnvHashMap<DefId, Vec<DefIndex>>,
+    impls: FxHashMap<DefId, Vec<DefIndex>>,
 }
 
 impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> {
@@ -1165,7 +1165,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
     fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
         let mut visitor = ImplVisitor {
             tcx: self.tcx,
-            impls: FnvHashMap(),
+            impls: FxHashMap(),
         };
         self.tcx.map.krate().visit_all_items(&mut visitor);
 
index 0461d7ec061d4010bb206f50ccc2367e8af8e7c0..c31b209768c38518441099b4fd164beeaad9a06d 100644 (file)
 use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
 use rustc::session::search_paths::PathKind;
 use rustc::util::common;
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 
 use rustc_llvm as llvm;
 use rustc_llvm::{False, ObjectFile, mk_section_iter};
@@ -430,7 +430,7 @@ fn find_library_crate(&mut self) -> Option<Library> {
         let rlib_prefix = format!("lib{}", self.crate_name);
         let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name);
 
-        let mut candidates = FnvHashMap();
+        let mut candidates = FxHashMap();
         let mut staticlibs = vec![];
 
         // First, find all possible candidate rlibs and dylibs purely based on
@@ -469,7 +469,7 @@ fn find_library_crate(&mut self) -> Option<Library> {
 
             let hash_str = hash.to_string();
             let slot = candidates.entry(hash_str)
-                .or_insert_with(|| (FnvHashMap(), FnvHashMap()));
+                .or_insert_with(|| (FxHashMap(), FxHashMap()));
             let (ref mut rlibs, ref mut dylibs) = *slot;
             fs::canonicalize(path)
                 .map(|p| {
@@ -492,7 +492,7 @@ fn find_library_crate(&mut self) -> Option<Library> {
         // A Library candidate is created if the metadata for the set of
         // libraries corresponds to the crate id and hash criteria that this
         // search is being performed for.
-        let mut libraries = FnvHashMap();
+        let mut libraries = FxHashMap();
         for (_hash, (rlibs, dylibs)) in candidates {
             let mut slot = None;
             let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
@@ -544,7 +544,7 @@ fn find_library_crate(&mut self) -> Option<Library> {
     // be read, it is assumed that the file isn't a valid rust library (no
     // errors are emitted).
     fn extract_one(&mut self,
-                   m: FnvHashMap<PathBuf, PathKind>,
+                   m: FxHashMap<PathBuf, PathKind>,
                    flavor: CrateFlavor,
                    slot: &mut Option<(Svh, MetadataBlob)>)
                    -> Option<(PathBuf, PathKind)> {
@@ -690,8 +690,8 @@ fn find_commandline_library<'b, LOCS>(&mut self, locs: LOCS) -> Option<Library>
         // rlibs/dylibs.
         let sess = self.sess;
         let dylibname = self.dylibname();
-        let mut rlibs = FnvHashMap();
-        let mut dylibs = FnvHashMap();
+        let mut rlibs = FxHashMap();
+        let mut dylibs = FxHashMap();
         {
             let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| {
                 if !loc.exists() {
index 490f675c3d5e3595907d6d0b2d0c7698aade2f42..b75e52fd4b10d99ce80988d1178da245847d443c 100644 (file)
@@ -13,7 +13,7 @@
 use std;
 
 use rustc_const_math::{ConstMathErr, Op};
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::Idx;
 
 use build::{BlockAnd, BlockAndExtension, Builder};
@@ -190,7 +190,7 @@ fn expr_as_rvalue(&mut self,
 
                 // first process the set of fields that were provided
                 // (evaluating them in order given by user)
-                let fields_map: FnvHashMap<_, _> =
+                let fields_map: FxHashMap<_, _> =
                     fields.into_iter()
                           .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))
                           .collect();
index 727e634ef92db2051817ae03911fb73cbf9a6029..786299c370d82f964243d1790413dd3822f39f0d 100644 (file)
@@ -14,7 +14,7 @@
 //! details.
 
 use build::{BlockAnd, BlockAndExtension, Builder};
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::bitvec::BitVector;
 use rustc::middle::const_val::ConstVal;
 use rustc::ty::{AdtDef, Ty};
@@ -309,7 +309,7 @@ enum TestKind<'tcx> {
     SwitchInt {
         switch_ty: Ty<'tcx>,
         options: Vec<ConstVal>,
-        indices: FnvHashMap<ConstVal, usize>,
+        indices: FxHashMap<ConstVal, usize>,
     },
 
     // test for equality
index 5984b0f7893cdf856f8d22f9e83b11ad4743c38a..948ba7338cddb9800335e76aac567e686e05a741 100644 (file)
@@ -18,7 +18,7 @@
 use build::Builder;
 use build::matches::{Candidate, MatchPair, Test, TestKind};
 use hair::*;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::bitvec::BitVector;
 use rustc::middle::const_val::ConstVal;
 use rustc::ty::{self, Ty};
@@ -54,7 +54,7 @@ pub fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> {
                         // these maps are empty to start; cases are
                         // added below in add_cases_to_switch
                         options: vec![],
-                        indices: FnvHashMap(),
+                        indices: FxHashMap(),
                     }
                 }
             }
@@ -110,7 +110,7 @@ pub fn add_cases_to_switch<'pat>(&mut self,
                                      candidate: &Candidate<'pat, 'tcx>,
                                      switch_ty: Ty<'tcx>,
                                      options: &mut Vec<ConstVal>,
-                                     indices: &mut FnvHashMap<ConstVal, usize>)
+                                     indices: &mut FxHashMap<ConstVal, usize>)
                                      -> bool
     {
         let match_pair = match candidate.match_pairs.iter().find(|mp| mp.lvalue == *test_lvalue) {
index af8170a1b8f5504076d74df520e5e6a1c387c899..b5343975a9cdf39f1826fdba24bf4c6fa15048ab 100644 (file)
@@ -94,7 +94,7 @@
 use rustc::mir::*;
 use syntax_pos::Span;
 use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 
 pub struct Scope<'tcx> {
     /// the scope-id within the scope_auxiliary
@@ -140,7 +140,7 @@ pub struct Scope<'tcx> {
     free: Option<FreeData<'tcx>>,
 
     /// The cache for drop chain on “normal” exit into a particular BasicBlock.
-    cached_exits: FnvHashMap<(BasicBlock, CodeExtent), BasicBlock>,
+    cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>,
 }
 
 struct DropData<'tcx> {
@@ -298,7 +298,7 @@ pub fn push_scope(&mut self, extent: CodeExtent, entry: BasicBlock) {
             needs_cleanup: false,
             drops: vec![],
             free: None,
-            cached_exits: FnvHashMap()
+            cached_exits: FxHashMap()
         });
         self.scope_auxiliary.push(ScopeAuxiliary {
             extent: extent,
index d2fc8aeaa2eea743b2497e2da46a0e68bb0909fe..d6f514cfb913616b15d8176a584376529bdef3f7 100644 (file)
@@ -14,7 +14,7 @@
 use rustc::mir::*;
 use rustc::mir::transform::MirSource;
 use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::{Idx};
 use std::fmt::Display;
 use std::fs;
@@ -122,10 +122,10 @@ enum Annotation {
 }
 
 fn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>)
-                                -> FnvHashMap<Location, Vec<Annotation>>
+                                -> FxHashMap<Location, Vec<Annotation>>
 {
     // compute scope/entry exit annotations
-    let mut annotations = FnvHashMap();
+    let mut annotations = FxHashMap();
     if let Some(auxiliary) = auxiliary {
         for (scope_id, auxiliary) in auxiliary.iter_enumerated() {
             annotations.entry(auxiliary.dom)
@@ -166,7 +166,7 @@ fn write_basic_block(tcx: TyCtxt,
                      block: BasicBlock,
                      mir: &Mir,
                      w: &mut Write,
-                     annotations: &FnvHashMap<Location, Vec<Annotation>>)
+                     annotations: &FxHashMap<Location, Vec<Annotation>>)
                      -> io::Result<()> {
     let data = &mir[block];
 
@@ -217,7 +217,7 @@ fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {
 /// Returns the total number of variables printed.
 fn write_scope_tree(tcx: TyCtxt,
                     mir: &Mir,
-                    scope_tree: &FnvHashMap<VisibilityScope, Vec<VisibilityScope>>,
+                    scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,
                     w: &mut Write,
                     parent: VisibilityScope,
                     depth: usize)
@@ -283,7 +283,7 @@ fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     writeln!(w, " {{")?;
 
     // construct a scope tree and write it out
-    let mut scope_tree: FnvHashMap<VisibilityScope, Vec<VisibilityScope>> = FnvHashMap();
+    let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();
     for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {
         if let Some(parent) = scope_data.parent_scope {
             scope_tree.entry(parent)
index a01724d6d0e9bf9bfc862d93856533602c31e250..c4a8d34bda008c8fa6f4ef2494e793ff519a4c98 100644 (file)
@@ -14,7 +14,7 @@
 use rustc::mir::transform::{MirPass, MirSource, Pass};
 use rustc::mir::visit::{MutVisitor, Visitor};
 use rustc::ty::TyCtxt;
-use rustc::util::nodemap::FnvHashSet;
+use rustc::util::nodemap::FxHashSet;
 use rustc_data_structures::indexed_vec::Idx;
 use std::mem;
 
@@ -107,5 +107,5 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
 
 #[derive(Default)]
 struct OptimizationList {
-    and_stars: FnvHashSet<Location>,
+    and_stars: FxHashSet<Location>,
 }
index 18586715894f5042ab09819f02228f0ddcb9a229..84cf85e2fc4e6d46ccc21a8336ce068220504ad0 100644 (file)
@@ -15,7 +15,7 @@
 use rustc::hir;
 use rustc::hir::intravisit as hir_visit;
 use rustc::util::common::to_readable_str;
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 use syntax::ast::{self, NodeId, AttrId};
 use syntax::visit as ast_visit;
 use syntax_pos::Span;
@@ -34,15 +34,15 @@ struct NodeData {
 
 struct StatCollector<'k> {
     krate: Option<&'k hir::Crate>,
-    data: FnvHashMap<&'static str, NodeData>,
-    seen: FnvHashSet<Id>,
+    data: FxHashMap<&'static str, NodeData>,
+    seen: FxHashSet<Id>,
 }
 
 pub fn print_hir_stats(krate: &hir::Crate) {
     let mut collector = StatCollector {
         krate: Some(krate),
-        data: FnvHashMap(),
-        seen: FnvHashSet(),
+        data: FxHashMap(),
+        seen: FxHashSet(),
     };
     hir_visit::walk_crate(&mut collector, krate);
     collector.print("HIR STATS");
@@ -51,8 +51,8 @@ pub fn print_hir_stats(krate: &hir::Crate) {
 pub fn print_ast_stats(krate: &ast::Crate, title: &str) {
     let mut collector = StatCollector {
         krate: None,
-        data: FnvHashMap(),
-        seen: FnvHashSet(),
+        data: FxHashMap(),
+        seen: FxHashSet(),
     };
     ast_visit::walk_crate(&mut collector, krate);
     collector.print(title);
index d90fe769caf63a1f89fe863d20d20b718439fa8b..1e998a2a4d5b0e8d9532fa4a0ac0f9634dad97d9 100644 (file)
@@ -25,7 +25,7 @@
 use rustc::hir::def::*;
 use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
 use rustc::ty;
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 
 use std::cell::Cell;
 use std::rc::Rc;
@@ -539,7 +539,7 @@ fn import_extern_crate_macros(&mut self,
                     self.invocations.insert(mark, invocation);
                 }
 
-                let mut macros: FnvHashMap<_, _> = macros.into_iter().map(|mut def| {
+                let mut macros: FxHashMap<_, _> = macros.into_iter().map(|mut def| {
                     def.body = mark_tts(&def.body, mark);
                     let ext = macro_rules::compile(&self.session.parse_sess, &def);
                     (def.ident.name, (def, Rc::new(ext)))
index e7d83a64e03eb97b03530a896346574442e093f9..ef14153232b9fd8864290845b94f8f9b54e672a1 100644 (file)
@@ -51,7 +51,7 @@
 use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
 use rustc::ty;
 use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
-use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet};
 
 use syntax::ext::hygiene::{Mark, SyntaxContext};
 use syntax::ast::{self, FloatTy};
@@ -498,7 +498,7 @@ struct BindingInfo {
 }
 
 // Map from the name in a pattern to its binding mode.
-type BindingMap = FnvHashMap<Ident, BindingInfo>;
+type BindingMap = FxHashMap<Ident, BindingInfo>;
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
 enum PatternSource {
@@ -703,14 +703,14 @@ enum ModulePrefixResult<'a> {
 /// One local scope.
 #[derive(Debug)]
 struct Rib<'a> {
-    bindings: FnvHashMap<Ident, Def>,
+    bindings: FxHashMap<Ident, Def>,
     kind: RibKind<'a>,
 }
 
 impl<'a> Rib<'a> {
     fn new(kind: RibKind<'a>) -> Rib<'a> {
         Rib {
-            bindings: FnvHashMap(),
+            bindings: FxHashMap(),
             kind: kind,
         }
     }
@@ -769,7 +769,7 @@ pub struct ModuleS<'a> {
     // is the NodeId of the local `extern crate` item (otherwise, `extern_crate_id` is None).
     extern_crate_id: Option<NodeId>,
 
-    resolutions: RefCell<FnvHashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>,
+    resolutions: RefCell<FxHashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>,
 
     no_implicit_prelude: bool,
 
@@ -794,7 +794,7 @@ fn new(parent: Option<Module<'a>>, kind: ModuleKind) -> Self {
             kind: kind,
             normal_ancestor_id: None,
             extern_crate_id: None,
-            resolutions: RefCell::new(FnvHashMap()),
+            resolutions: RefCell::new(FxHashMap()),
             no_implicit_prelude: false,
             glob_importers: RefCell::new(Vec::new()),
             globs: RefCell::new((Vec::new())),
@@ -950,12 +950,12 @@ fn is_importable(&self) -> bool {
 
 /// Interns the names of the primitive types.
 struct PrimitiveTypeTable {
-    primitive_types: FnvHashMap<Name, PrimTy>,
+    primitive_types: FxHashMap<Name, PrimTy>,
 }
 
 impl PrimitiveTypeTable {
     fn new() -> PrimitiveTypeTable {
-        let mut table = PrimitiveTypeTable { primitive_types: FnvHashMap() };
+        let mut table = PrimitiveTypeTable { primitive_types: FxHashMap() };
 
         table.intern("bool", TyBool);
         table.intern("char", TyChar);
@@ -989,17 +989,17 @@ pub struct Resolver<'a> {
 
     // Maps the node id of a statement to the expansions of the `macro_rules!`s
     // immediately above the statement (if appropriate).
-    macros_at_scope: FnvHashMap<NodeId, Vec<Mark>>,
+    macros_at_scope: FxHashMap<NodeId, Vec<Mark>>,
 
     graph_root: Module<'a>,
 
     prelude: Option<Module<'a>>,
 
-    trait_item_map: FnvHashMap<(Name, DefId), bool /* is static method? */>,
+    trait_item_map: FxHashMap<(Name, DefId), bool /* is static method? */>,
 
     // Names of fields of an item `DefId` accessible with dot syntax.
     // Used for hints during error reporting.
-    field_names: FnvHashMap<DefId, Vec<Name>>,
+    field_names: FxHashMap<DefId, Vec<Name>>,
 
     // All imports known to succeed or fail.
     determined_imports: Vec<&'a ImportDirective<'a>>,
@@ -1061,8 +1061,8 @@ pub struct Resolver<'a> {
     // all imports, but only glob imports are actually interesting).
     pub glob_map: GlobMap,
 
-    used_imports: FnvHashSet<(NodeId, Namespace)>,
-    used_crates: FnvHashSet<CrateNum>,
+    used_imports: FxHashSet<(NodeId, Namespace)>,
+    used_crates: FxHashSet<CrateNum>,
     pub maybe_unused_trait_imports: NodeSet,
 
     privacy_errors: Vec<PrivacyError<'a>>,
@@ -1075,12 +1075,12 @@ pub struct Resolver<'a> {
 
     pub exported_macros: Vec<ast::MacroDef>,
     crate_loader: &'a mut CrateLoader,
-    macro_names: FnvHashSet<Name>,
-    builtin_macros: FnvHashMap<Name, Rc<SyntaxExtension>>,
+    macro_names: FxHashSet<Name>,
+    builtin_macros: FxHashMap<Name, Rc<SyntaxExtension>>,
     lexical_macro_resolutions: Vec<(Name, LegacyScope<'a>)>,
 
     // Maps the `Mark` of an expansion to its containing module or block.
-    invocations: FnvHashMap<Mark, &'a InvocationData<'a>>,
+    invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
 }
 
 pub struct ResolverArenas<'a> {
@@ -1206,7 +1206,7 @@ pub fn new(session: &'a Session,
         let mut definitions = Definitions::new();
         DefCollector::new(&mut definitions).collect_root();
 
-        let mut invocations = FnvHashMap();
+        let mut invocations = FxHashMap();
         invocations.insert(Mark::root(),
                            arenas.alloc_invocation_data(InvocationData::root(graph_root)));
 
@@ -1214,15 +1214,15 @@ pub fn new(session: &'a Session,
             session: session,
 
             definitions: definitions,
-            macros_at_scope: FnvHashMap(),
+            macros_at_scope: FxHashMap(),
 
             // The outermost module has def ID 0; this is not reflected in the
             // AST.
             graph_root: graph_root,
             prelude: None,
 
-            trait_item_map: FnvHashMap(),
-            field_names: FnvHashMap(),
+            trait_item_map: FxHashMap(),
+            field_names: FxHashMap(),
 
             determined_imports: Vec::new(),
             indeterminate_imports: Vec::new(),
@@ -1248,8 +1248,8 @@ pub fn new(session: &'a Session,
             make_glob_map: make_glob_map == MakeGlobMap::Yes,
             glob_map: NodeMap(),
 
-            used_imports: FnvHashSet(),
-            used_crates: FnvHashSet(),
+            used_imports: FxHashSet(),
+            used_crates: FxHashSet(),
             maybe_unused_trait_imports: NodeSet(),
 
             privacy_errors: Vec::new(),
@@ -1266,8 +1266,8 @@ pub fn new(session: &'a Session,
 
             exported_macros: Vec::new(),
             crate_loader: crate_loader,
-            macro_names: FnvHashSet(),
-            builtin_macros: FnvHashMap(),
+            macro_names: FxHashSet(),
+            builtin_macros: FxHashMap(),
             lexical_macro_resolutions: Vec::new(),
             invocations: invocations,
         }
@@ -1340,7 +1340,7 @@ fn record_use(&mut self, name: Name, ns: Namespace, binding: &'a NameBinding<'a>
 
     fn add_to_glob_map(&mut self, id: NodeId, name: Name) {
         if self.make_glob_map {
-            self.glob_map.entry(id).or_insert_with(FnvHashSet).insert(name);
+            self.glob_map.entry(id).or_insert_with(FxHashSet).insert(name);
         }
     }
 
@@ -1803,7 +1803,7 @@ fn with_type_parameter_rib<'b, F>(&'b mut self, type_parameters: TypeParameters<
         match type_parameters {
             HasTypeParameters(generics, rib_kind) => {
                 let mut function_type_rib = Rib::new(rib_kind);
-                let mut seen_bindings = FnvHashMap();
+                let mut seen_bindings = FxHashMap();
                 for type_parameter in &generics.ty_params {
                     let name = type_parameter.ident.name;
                     debug!("with_type_parameter_rib: {}", type_parameter.id);
@@ -1867,7 +1867,7 @@ fn resolve_function(&mut self,
         self.label_ribs.push(Rib::new(rib_kind));
 
         // Add each argument to the rib.
-        let mut bindings_list = FnvHashMap();
+        let mut bindings_list = FxHashMap();
         for argument in &declaration.inputs {
             self.resolve_pattern(&argument.pat, PatternSource::FnParam, &mut bindings_list);
 
@@ -2069,7 +2069,7 @@ fn resolve_local(&mut self, local: &Local) {
         walk_list!(self, visit_expr, &local.init);
 
         // Resolve the pattern.
-        self.resolve_pattern(&local.pat, PatternSource::Let, &mut FnvHashMap());
+        self.resolve_pattern(&local.pat, PatternSource::Let, &mut FxHashMap());
     }
 
     // build a map from pattern identifiers to binding-info's.
@@ -2077,7 +2077,7 @@ fn resolve_local(&mut self, local: &Local) {
     // that expands into an or-pattern where one 'x' was from the
     // user and one 'x' came from the macro.
     fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap {
-        let mut binding_map = FnvHashMap();
+        let mut binding_map = FxHashMap();
 
         pat.walk(&mut |pat| {
             if let PatKind::Ident(binding_mode, ident, ref sub_pat) = pat.node {
@@ -2137,7 +2137,7 @@ fn check_consistent_bindings(&mut self, arm: &Arm) {
     fn resolve_arm(&mut self, arm: &Arm) {
         self.value_ribs.push(Rib::new(NormalRibKind));
 
-        let mut bindings_list = FnvHashMap();
+        let mut bindings_list = FxHashMap();
         for pattern in &arm.pats {
             self.resolve_pattern(&pattern, PatternSource::Match, &mut bindings_list);
         }
@@ -2278,7 +2278,7 @@ fn fresh_binding(&mut self,
                      pat_id: NodeId,
                      outer_pat_id: NodeId,
                      pat_src: PatternSource,
-                     bindings: &mut FnvHashMap<Ident, NodeId>)
+                     bindings: &mut FxHashMap<Ident, NodeId>)
                      -> PathResolution {
         // Add the binding to the local ribs, if it
         // doesn't already exist in the bindings map. (We
@@ -2391,7 +2391,7 @@ fn resolve_pattern(&mut self,
                        pat_src: PatternSource,
                        // Maps idents to the node ID for the
                        // outermost pattern that binds them.
-                       bindings: &mut FnvHashMap<Ident, NodeId>) {
+                       bindings: &mut FxHashMap<Ident, NodeId>) {
         // Visit all direct subpatterns of this pattern.
         let outer_pat_id = pat.id;
         pat.walk(&mut |pat| {
@@ -3048,7 +3048,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                 self.visit_expr(subexpression);
 
                 self.value_ribs.push(Rib::new(NormalRibKind));
-                self.resolve_pattern(pattern, PatternSource::IfLet, &mut FnvHashMap());
+                self.resolve_pattern(pattern, PatternSource::IfLet, &mut FxHashMap());
                 self.visit_block(if_block);
                 self.value_ribs.pop();
 
@@ -3065,7 +3065,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
             ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => {
                 self.visit_expr(subexpression);
                 self.value_ribs.push(Rib::new(NormalRibKind));
-                self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FnvHashMap());
+                self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FxHashMap());
 
                 self.resolve_labeled_block(label, expr.id, block);
 
@@ -3075,7 +3075,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
             ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => {
                 self.visit_expr(subexpression);
                 self.value_ribs.push(Rib::new(NormalRibKind));
-                self.resolve_pattern(pattern, PatternSource::For, &mut FnvHashMap());
+                self.resolve_pattern(pattern, PatternSource::For, &mut FxHashMap());
 
                 self.resolve_labeled_block(label, expr.id, block);
 
@@ -3337,7 +3337,7 @@ fn is_accessible_from(&self, vis: ty::Visibility, module: Module<'a>) -> bool {
 
     fn report_errors(&mut self) {
         self.report_shadowing_errors();
-        let mut reported_spans = FnvHashSet();
+        let mut reported_spans = FxHashSet();
 
         for &AmbiguityError { span, name, b1, b2 } in &self.ambiguity_errors {
             if !reported_spans.insert(span) { continue }
@@ -3369,7 +3369,7 @@ fn report_shadowing_errors(&mut self) {
             self.resolve_macro_name(scope, name);
         }
 
-        let mut reported_errors = FnvHashSet();
+        let mut reported_errors = FxHashSet();
         for binding in replace(&mut self.disallowed_shadowing, Vec::new()) {
             if self.resolve_macro_name(binding.parent, binding.name).is_some() &&
                reported_errors.insert((binding.name, binding.span)) {
index bd15035b8a94ed26e5dc9b37d56d082e2c864634..d50669272f726bf0be560f5839cd2975d54b7097 100644 (file)
@@ -79,7 +79,7 @@
 use type_of;
 use value::Value;
 use Disr;
-use util::nodemap::{NodeSet, FnvHashMap, FnvHashSet};
+use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
 
 use arena::TypedArena;
 use libc::c_uint;
@@ -1318,7 +1318,7 @@ enum MetadataKind {
 fn internalize_symbols<'a, 'tcx>(sess: &Session,
                                  ccxs: &CrateContextList<'a, 'tcx>,
                                  symbol_map: &SymbolMap<'tcx>,
-                                 reachable: &FnvHashSet<&str>) {
+                                 reachable: &FxHashSet<&str>) {
     let scx = ccxs.shared();
     let tcx = scx.tcx();
 
@@ -1332,7 +1332,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
     // 'unsafe' because we are holding on to CStr's from the LLVM module within
     // this block.
     unsafe {
-        let mut referenced_somewhere = FnvHashSet();
+        let mut referenced_somewhere = FxHashSet();
 
         // Collect all symbols that need to stay externally visible because they
         // are referenced via a declaration in some other codegen unit.
@@ -1353,7 +1353,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
 
         // Also collect all symbols for which we cannot adjust linkage, because
         // it is fixed by some directive in the source code (e.g. #[no_mangle]).
-        let linkage_fixed_explicitly: FnvHashSet<_> = scx
+        let linkage_fixed_explicitly: FxHashSet<_> = scx
             .translation_items()
             .borrow()
             .iter()
@@ -1862,7 +1862,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
     }
 
     if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
-        let mut item_to_cgus = FnvHashMap();
+        let mut item_to_cgus = FxHashMap();
 
         for cgu in &codegen_units {
             for (&trans_item, &linkage) in cgu.items() {
index 8556e95903c18b51b055a0d9af6aa4c53bbc06a2..0480bb82a998e902396d78f98b282d1f0ec00875 100644 (file)
@@ -19,7 +19,7 @@
 use machine::llalign_of_pref;
 use type_::Type;
 use value::Value;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 use libc::{c_uint, c_char};
 
 use std::borrow::Cow;
@@ -62,7 +62,7 @@ pub fn count_insn(&self, category: &str) {
                 // Build version of path with cycles removed.
 
                 // Pass 1: scan table mapping str -> rightmost pos.
-                let mut mm = FnvHashMap();
+                let mut mm = FxHashMap();
                 let len = v.len();
                 let mut i = 0;
                 while i < len {
index a439d415ede151f16e2f7e1418498e89b4f5a69f..548554af9727f933b3fb2b8242be3bf99c73ab33 100644 (file)
 use common::{fulfill_obligation, type_is_sized};
 use glue::{self, DropGlueKind};
 use monomorphize::{self, Instance};
-use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap};
+use util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
 
 use trans_item::{TransItem, type_to_string, def_id_to_string};
 
@@ -228,7 +228,7 @@ pub struct InliningMap<'tcx> {
     // that are potentially inlined by LLVM into the source.
     // The two numbers in the tuple are the start (inclusive) and
     // end index (exclusive) within the `targets` vecs.
-    index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
+    index: FxHashMap<TransItem<'tcx>, (usize, usize)>,
     targets: Vec<TransItem<'tcx>>,
 }
 
@@ -236,7 +236,7 @@ impl<'tcx> InliningMap<'tcx> {
 
     fn new() -> InliningMap<'tcx> {
         InliningMap {
-            index: FnvHashMap(),
+            index: FxHashMap(),
             targets: Vec::new(),
         }
     }
@@ -269,7 +269,7 @@ pub fn with_inlining_candidates<F>(&self, source: TransItem<'tcx>, mut f: F)
 
 pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
                                                  mode: TransItemCollectionMode)
-                                                 -> (FnvHashSet<TransItem<'tcx>>,
+                                                 -> (FxHashSet<TransItem<'tcx>>,
                                                      InliningMap<'tcx>) {
     // We are not tracking dependencies of this pass as it has to be re-executed
     // every time no matter what.
@@ -277,7 +277,7 @@ pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
         let roots = collect_roots(scx, mode);
 
         debug!("Building translation item graph, beginning at roots");
-        let mut visited = FnvHashSet();
+        let mut visited = FxHashSet();
         let mut recursion_depths = DefIdMap();
         let mut inlining_map = InliningMap::new();
 
@@ -318,7 +318,7 @@ fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
 // Collect all monomorphized translation items reachable from `starting_point`
 fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
                                    starting_point: TransItem<'tcx>,
-                                   visited: &mut FnvHashSet<TransItem<'tcx>>,
+                                   visited: &mut FxHashSet<TransItem<'tcx>>,
                                    recursion_depths: &mut DefIdMap<usize>,
                                    inlining_map: &mut InliningMap<'tcx>) {
     if !visited.insert(starting_point.clone()) {
@@ -1179,9 +1179,9 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, '
 
             if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
                 let callee_substs = tcx.erase_regions(&trait_ref.substs);
-                let overridden_methods: FnvHashSet<_> = items.iter()
-                                                             .map(|item| item.name)
-                                                             .collect();
+                let overridden_methods: FxHashSet<_> = items.iter()
+                                                            .map(|item| item.name)
+                                                            .collect();
                 for method in tcx.provided_trait_methods(trait_ref.def_id) {
                     if overridden_methods.contains(&method.name) {
                         continue;
index fc75b1018ec35ef4bdd73b5e3718eac19879a3f5..264d4940c17f91d4a4ff3c7c3b5e9b09773587ab 100644 (file)
@@ -32,7 +32,7 @@
 use session::Session;
 use session::config;
 use symbol_map::SymbolMap;
-use util::nodemap::{NodeSet, DefIdMap, FnvHashMap, FnvHashSet};
+use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet};
 
 use std::ffi::{CStr, CString};
 use std::cell::{Cell, RefCell};
@@ -52,7 +52,7 @@ pub struct Stats {
     pub n_inlines: Cell<usize>,
     pub n_closures: Cell<usize>,
     pub n_llvm_insns: Cell<usize>,
-    pub llvm_insns: RefCell<FnvHashMap<String, usize>>,
+    pub llvm_insns: RefCell<FxHashMap<String, usize>>,
     // (ident, llvm-instructions)
     pub fn_stats: RefCell<Vec<(String, usize)> >,
 }
@@ -74,7 +74,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
 
     use_dll_storage_attrs: bool,
 
-    translation_items: RefCell<FnvHashSet<TransItem<'tcx>>>,
+    translation_items: RefCell<FxHashSet<TransItem<'tcx>>>,
     trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
     project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
 }
@@ -89,15 +89,15 @@ pub struct LocalCrateContext<'tcx> {
     previous_work_product: Option<WorkProduct>,
     tn: TypeNames, // FIXME: This seems to be largely unused.
     codegen_unit: CodegenUnit<'tcx>,
-    needs_unwind_cleanup_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
-    fn_pointer_shims: RefCell<FnvHashMap<Ty<'tcx>, ValueRef>>,
-    drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
+    needs_unwind_cleanup_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
+    fn_pointer_shims: RefCell<FxHashMap<Ty<'tcx>, ValueRef>>,
+    drop_glues: RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
     /// Cache instances of monomorphic and polymorphic items
-    instances: RefCell<FnvHashMap<Instance<'tcx>, ValueRef>>,
+    instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
     /// Cache generated vtables
-    vtables: RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, ValueRef>>,
+    vtables: RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, ValueRef>>,
     /// Cache of constant strings,
-    const_cstr_cache: RefCell<FnvHashMap<InternedString, ValueRef>>,
+    const_cstr_cache: RefCell<FxHashMap<InternedString, ValueRef>>,
 
     /// Reverse-direction for const ptrs cast from globals.
     /// Key is a ValueRef holding a *T,
@@ -107,24 +107,24 @@ pub struct LocalCrateContext<'tcx> {
     /// when we ptrcast, and we have to ptrcast during translation
     /// of a [T] const because we form a slice, a (*T,usize) pair, not
     /// a pointer to an LLVM array type. Similar for trait objects.
-    const_unsized: RefCell<FnvHashMap<ValueRef, ValueRef>>,
+    const_unsized: RefCell<FxHashMap<ValueRef, ValueRef>>,
 
     /// Cache of emitted const globals (value -> global)
-    const_globals: RefCell<FnvHashMap<ValueRef, ValueRef>>,
+    const_globals: RefCell<FxHashMap<ValueRef, ValueRef>>,
 
     /// Cache of emitted const values
-    const_values: RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
+    const_values: RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
 
     /// Cache of external const values
     extern_const_values: RefCell<DefIdMap<ValueRef>>,
 
     /// Mapping from static definitions to their DefId's.
-    statics: RefCell<FnvHashMap<ValueRef, DefId>>,
+    statics: RefCell<FxHashMap<ValueRef, DefId>>,
 
-    impl_method_cache: RefCell<FnvHashMap<(DefId, ast::Name), DefId>>,
+    impl_method_cache: RefCell<FxHashMap<(DefId, ast::Name), DefId>>,
 
     /// Cache of closure wrappers for bare fn's.
-    closure_bare_wrapper_cache: RefCell<FnvHashMap<ValueRef, ValueRef>>,
+    closure_bare_wrapper_cache: RefCell<FxHashMap<ValueRef, ValueRef>>,
 
     /// List of globals for static variables which need to be passed to the
     /// LLVM function ReplaceAllUsesWith (RAUW) when translation is complete.
@@ -132,15 +132,15 @@ pub struct LocalCrateContext<'tcx> {
     /// to constants.)
     statics_to_rauw: RefCell<Vec<(ValueRef, ValueRef)>>,
 
-    lltypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>,
-    llsizingtypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>,
-    type_hashcodes: RefCell<FnvHashMap<Ty<'tcx>, String>>,
+    lltypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
+    llsizingtypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
+    type_hashcodes: RefCell<FxHashMap<Ty<'tcx>, String>>,
     int_type: Type,
     opaque_vec_type: Type,
     builder: BuilderRef_res,
 
     /// Holds the LLVM values for closure IDs.
-    closure_vals: RefCell<FnvHashMap<Instance<'tcx>, ValueRef>>,
+    closure_vals: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
 
     dbg_cx: Option<debuginfo::CrateDebugContext<'tcx>>,
 
@@ -148,7 +148,7 @@ pub struct LocalCrateContext<'tcx> {
     eh_unwind_resume: Cell<Option<ValueRef>>,
     rust_try_fn: Cell<Option<ValueRef>>,
 
-    intrinsics: RefCell<FnvHashMap<&'static str, ValueRef>>,
+    intrinsics: RefCell<FxHashMap<&'static str, ValueRef>>,
 
     /// Number of LLVM instructions translated into this `LocalCrateContext`.
     /// This is used to perform some basic load-balancing to keep all LLVM
@@ -502,12 +502,12 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
                 n_inlines: Cell::new(0),
                 n_closures: Cell::new(0),
                 n_llvm_insns: Cell::new(0),
-                llvm_insns: RefCell::new(FnvHashMap()),
+                llvm_insns: RefCell::new(FxHashMap()),
                 fn_stats: RefCell::new(Vec::new()),
             },
             check_overflow: check_overflow,
             use_dll_storage_attrs: use_dll_storage_attrs,
-            translation_items: RefCell::new(FnvHashSet()),
+            translation_items: RefCell::new(FxHashSet()),
             trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
             project_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
         }
@@ -557,7 +557,7 @@ pub fn use_dll_storage_attrs(&self) -> bool {
         self.use_dll_storage_attrs
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
+    pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
         &self.translation_items
     }
 
@@ -612,32 +612,32 @@ fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
                 previous_work_product: previous_work_product,
                 codegen_unit: codegen_unit,
                 tn: TypeNames::new(),
-                needs_unwind_cleanup_cache: RefCell::new(FnvHashMap()),
-                fn_pointer_shims: RefCell::new(FnvHashMap()),
-                drop_glues: RefCell::new(FnvHashMap()),
-                instances: RefCell::new(FnvHashMap()),
-                vtables: RefCell::new(FnvHashMap()),
-                const_cstr_cache: RefCell::new(FnvHashMap()),
-                const_unsized: RefCell::new(FnvHashMap()),
-                const_globals: RefCell::new(FnvHashMap()),
-                const_values: RefCell::new(FnvHashMap()),
+                needs_unwind_cleanup_cache: RefCell::new(FxHashMap()),
+                fn_pointer_shims: RefCell::new(FxHashMap()),
+                drop_glues: RefCell::new(FxHashMap()),
+                instances: RefCell::new(FxHashMap()),
+                vtables: RefCell::new(FxHashMap()),
+                const_cstr_cache: RefCell::new(FxHashMap()),
+                const_unsized: RefCell::new(FxHashMap()),
+                const_globals: RefCell::new(FxHashMap()),
+                const_values: RefCell::new(FxHashMap()),
                 extern_const_values: RefCell::new(DefIdMap()),
-                statics: RefCell::new(FnvHashMap()),
-                impl_method_cache: RefCell::new(FnvHashMap()),
-                closure_bare_wrapper_cache: RefCell::new(FnvHashMap()),
+                statics: RefCell::new(FxHashMap()),
+                impl_method_cache: RefCell::new(FxHashMap()),
+                closure_bare_wrapper_cache: RefCell::new(FxHashMap()),
                 statics_to_rauw: RefCell::new(Vec::new()),
-                lltypes: RefCell::new(FnvHashMap()),
-                llsizingtypes: RefCell::new(FnvHashMap()),
-                type_hashcodes: RefCell::new(FnvHashMap()),
+                lltypes: RefCell::new(FxHashMap()),
+                llsizingtypes: RefCell::new(FxHashMap()),
+                type_hashcodes: RefCell::new(FxHashMap()),
                 int_type: Type::from_ref(ptr::null_mut()),
                 opaque_vec_type: Type::from_ref(ptr::null_mut()),
                 builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),
-                closure_vals: RefCell::new(FnvHashMap()),
+                closure_vals: RefCell::new(FxHashMap()),
                 dbg_cx: dbg_cx,
                 eh_personality: Cell::new(None),
                 eh_unwind_resume: Cell::new(None),
                 rust_try_fn: Cell::new(None),
-                intrinsics: RefCell::new(FnvHashMap()),
+                intrinsics: RefCell::new(FxHashMap()),
                 n_llvm_insns: Cell::new(0),
                 type_of_depth: Cell::new(0),
                 symbol_map: symbol_map,
@@ -794,16 +794,16 @@ pub fn link_meta<'a>(&'a self) -> &'a LinkMeta {
         &self.shared.link_meta
     }
 
-    pub fn needs_unwind_cleanup_cache(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, bool>> {
+    pub fn needs_unwind_cleanup_cache(&self) -> &RefCell<FxHashMap<Ty<'tcx>, bool>> {
         &self.local().needs_unwind_cleanup_cache
     }
 
-    pub fn fn_pointer_shims(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, ValueRef>> {
+    pub fn fn_pointer_shims(&self) -> &RefCell<FxHashMap<Ty<'tcx>, ValueRef>> {
         &self.local().fn_pointer_shims
     }
 
     pub fn drop_glues<'a>(&'a self)
-                          -> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
+                          -> &'a RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
         &self.local().drop_glues
     }
 
@@ -815,28 +815,28 @@ pub fn defid_for_inlined_node<'a>(&'a self, node_id: ast::NodeId) -> Option<DefI
         self.sess().cstore.defid_for_inlined_node(node_id)
     }
 
-    pub fn instances<'a>(&'a self) -> &'a RefCell<FnvHashMap<Instance<'tcx>, ValueRef>> {
+    pub fn instances<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
         &self.local().instances
     }
 
-    pub fn vtables<'a>(&'a self) -> &'a RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, ValueRef>> {
+    pub fn vtables<'a>(&'a self) -> &'a RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, ValueRef>> {
         &self.local().vtables
     }
 
-    pub fn const_cstr_cache<'a>(&'a self) -> &'a RefCell<FnvHashMap<InternedString, ValueRef>> {
+    pub fn const_cstr_cache<'a>(&'a self) -> &'a RefCell<FxHashMap<InternedString, ValueRef>> {
         &self.local().const_cstr_cache
     }
 
-    pub fn const_unsized<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> {
+    pub fn const_unsized<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
         &self.local().const_unsized
     }
 
-    pub fn const_globals<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> {
+    pub fn const_globals<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
         &self.local().const_globals
     }
 
-    pub fn const_values<'a>(&'a self) -> &'a RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
-                                                                ValueRef>> {
+    pub fn const_values<'a>(&'a self) -> &'a RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
+                                                               ValueRef>> {
         &self.local().const_values
     }
 
@@ -844,16 +844,16 @@ pub fn extern_const_values<'a>(&'a self) -> &'a RefCell<DefIdMap<ValueRef>> {
         &self.local().extern_const_values
     }
 
-    pub fn statics<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, DefId>> {
+    pub fn statics<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, DefId>> {
         &self.local().statics
     }
 
     pub fn impl_method_cache<'a>(&'a self)
-            -> &'a RefCell<FnvHashMap<(DefId, ast::Name), DefId>> {
+            -> &'a RefCell<FxHashMap<(DefId, ast::Name), DefId>> {
         &self.local().impl_method_cache
     }
 
-    pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> {
+    pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
         &self.local().closure_bare_wrapper_cache
     }
 
@@ -861,15 +861,15 @@ pub fn statics_to_rauw<'a>(&'a self) -> &'a RefCell<Vec<(ValueRef, ValueRef)>> {
         &self.local().statics_to_rauw
     }
 
-    pub fn lltypes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, Type>> {
+    pub fn lltypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
         &self.local().lltypes
     }
 
-    pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, Type>> {
+    pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
         &self.local().llsizingtypes
     }
 
-    pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, String>> {
+    pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, String>> {
         &self.local().type_hashcodes
     }
 
@@ -885,7 +885,7 @@ pub fn opaque_vec_type(&self) -> Type {
         self.local().opaque_vec_type
     }
 
-    pub fn closure_vals<'a>(&'a self) -> &'a RefCell<FnvHashMap<Instance<'tcx>, ValueRef>> {
+    pub fn closure_vals<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
         &self.local().closure_vals
     }
 
@@ -905,7 +905,7 @@ pub fn rust_try_fn<'a>(&'a self) -> &'a Cell<Option<ValueRef>> {
         &self.local().rust_try_fn
     }
 
-    fn intrinsics<'a>(&'a self) -> &'a RefCell<FnvHashMap<&'static str, ValueRef>> {
+    fn intrinsics<'a>(&'a self) -> &'a RefCell<FxHashMap<&'static str, ValueRef>> {
         &self.local().intrinsics
     }
 
@@ -958,7 +958,7 @@ pub fn symbol_map(&self) -> &SymbolMap<'tcx> {
         &*self.local().symbol_map
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
+    pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
         &self.shared.translation_items
     }
 
index 4bb34850e0870c0cce998ceb9a57acb54ac81dbf..e81461b662172ecf4eaadac326b09fc59bc0b0b7 100644 (file)
@@ -36,7 +36,7 @@
 use type_::Type;
 use rustc::ty::{self, AdtKind, Ty, layout};
 use session::config;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 use util::common::path2cstr;
 
 use libc::{c_uint, c_longlong};
@@ -84,20 +84,20 @@ pub struct TypeMap<'tcx> {
     // The UniqueTypeIds created so far
     unique_id_interner: Interner,
     // A map from UniqueTypeId to debuginfo metadata for that type. This is a 1:1 mapping.
-    unique_id_to_metadata: FnvHashMap<UniqueTypeId, DIType>,
+    unique_id_to_metadata: FxHashMap<UniqueTypeId, DIType>,
     // A map from types to debuginfo metadata. This is a N:1 mapping.
-    type_to_metadata: FnvHashMap<Ty<'tcx>, DIType>,
+    type_to_metadata: FxHashMap<Ty<'tcx>, DIType>,
     // A map from types to UniqueTypeId. This is a N:1 mapping.
-    type_to_unique_id: FnvHashMap<Ty<'tcx>, UniqueTypeId>
+    type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>
 }
 
 impl<'tcx> TypeMap<'tcx> {
     pub fn new() -> TypeMap<'tcx> {
         TypeMap {
             unique_id_interner: Interner::new(),
-            type_to_metadata: FnvHashMap(),
-            unique_id_to_metadata: FnvHashMap(),
-            type_to_unique_id: FnvHashMap(),
+            type_to_metadata: FxHashMap(),
+            unique_id_to_metadata: FxHashMap(),
+            type_to_unique_id: FxHashMap(),
         }
     }
 
index 3bc5f4f3dbc4ba9e4dedc0ba6009d334ef677dc8..62fb40cc389c24393ed41146e515824e188e4813 100644 (file)
@@ -34,7 +34,7 @@
 use rustc::ty::{self, Ty};
 use rustc::mir;
 use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
-use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet};
+use util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
 
 use libc::c_uint;
 use std::cell::{Cell, RefCell};
@@ -68,15 +68,15 @@ pub struct CrateDebugContext<'tcx> {
     llcontext: ContextRef,
     builder: DIBuilderRef,
     current_debug_location: Cell<InternalDebugLocation>,
-    created_files: RefCell<FnvHashMap<String, DIFile>>,
-    created_enum_disr_types: RefCell<FnvHashMap<(DefId, layout::Integer), DIType>>,
+    created_files: RefCell<FxHashMap<String, DIFile>>,
+    created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>,
 
     type_map: RefCell<TypeMap<'tcx>>,
     namespace_map: RefCell<DefIdMap<DIScope>>,
 
     // This collection is used to assert that composite types (structs, enums,
     // ...) have their members only set once:
-    composite_types_completed: RefCell<FnvHashSet<DIType>>,
+    composite_types_completed: RefCell<FxHashSet<DIType>>,
 }
 
 impl<'tcx> CrateDebugContext<'tcx> {
@@ -89,11 +89,11 @@ pub fn new(llmod: ModuleRef) -> CrateDebugContext<'tcx> {
             llcontext: llcontext,
             builder: builder,
             current_debug_location: Cell::new(InternalDebugLocation::UnknownLocation),
-            created_files: RefCell::new(FnvHashMap()),
-            created_enum_disr_types: RefCell::new(FnvHashMap()),
+            created_files: RefCell::new(FxHashMap()),
+            created_enum_disr_types: RefCell::new(FxHashMap()),
             type_map: RefCell::new(TypeMap::new()),
             namespace_map: RefCell::new(DefIdMap()),
-            composite_types_completed: RefCell::new(FnvHashSet()),
+            composite_types_completed: RefCell::new(FxHashSet()),
         };
     }
 }
index 8bf27b4babfc2a054a2f0cbc7336548f4750efeb..b22bcf9825a2aa4239b23fbe49a9b11a4e0d9e40 100644 (file)
@@ -29,7 +29,7 @@
 use glue;
 use type_::Type;
 
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::fx::FxHashMap;
 use syntax::parse::token;
 
 use super::{MirContext, LocalRef};
@@ -144,7 +144,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
                     adt::trans_get_discr(bcx, ty, discr_lvalue.llval, None, true)
                 );
 
-                let mut bb_hist = FnvHashMap();
+                let mut bb_hist = FxHashMap();
                 for target in targets {
                     *bb_hist.entry(target).or_insert(0) += 1;
                 }
index 625b43c7d179255d44aefd33028a58ac1d26be7b..c9c12fb6d4534deb2f2dbea251e38ca64fe09faa 100644 (file)
 use syntax::ast::NodeId;
 use syntax::parse::token::{self, InternedString};
 use trans_item::TransItem;
-use util::nodemap::{FnvHashMap, FnvHashSet};
+use util::nodemap::{FxHashMap, FxHashSet};
 
 pub enum PartitioningStrategy {
     /// Generate one codegen unit per source-level module.
@@ -151,12 +151,12 @@ pub struct CodegenUnit<'tcx> {
     /// as well as the crate name and disambiguator.
     name: InternedString,
 
-    items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
+    items: FxHashMap<TransItem<'tcx>, llvm::Linkage>,
 }
 
 impl<'tcx> CodegenUnit<'tcx> {
     pub fn new(name: InternedString,
-               items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>)
+               items: FxHashMap<TransItem<'tcx>, llvm::Linkage>)
                -> Self {
         CodegenUnit {
             name: name,
@@ -165,7 +165,7 @@ pub fn new(name: InternedString,
     }
 
     pub fn empty(name: InternedString) -> Self {
-        Self::new(name, FnvHashMap())
+        Self::new(name, FxHashMap())
     }
 
     pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
@@ -176,7 +176,7 @@ pub fn name(&self) -> &str {
         &self.name
     }
 
-    pub fn items(&self) -> &FnvHashMap<TransItem<'tcx>, llvm::Linkage> {
+    pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, llvm::Linkage> {
         &self.items
     }
 
@@ -297,7 +297,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
 
 struct PreInliningPartitioning<'tcx> {
     codegen_units: Vec<CodegenUnit<'tcx>>,
-    roots: FnvHashSet<TransItem<'tcx>>,
+    roots: FxHashSet<TransItem<'tcx>>,
 }
 
 struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
@@ -308,8 +308,8 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
     where I: Iterator<Item = TransItem<'tcx>>
 {
     let tcx = scx.tcx();
-    let mut roots = FnvHashSet();
-    let mut codegen_units = FnvHashMap();
+    let mut roots = FxHashSet();
+    let mut codegen_units = FxHashMap();
 
     for trans_item in trans_items {
         let is_root = !trans_item.is_instantiated_only_on_demand(tcx);
@@ -419,7 +419,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
 
     for codegen_unit in &initial_partitioning.codegen_units[..] {
         // Collect all items that need to be available in this codegen unit
-        let mut reachable = FnvHashSet();
+        let mut reachable = FxHashSet();
         for root in codegen_unit.items.keys() {
             follow_inlining(*root, inlining_map, &mut reachable);
         }
@@ -465,7 +465,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
 
     fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>,
                              inlining_map: &InliningMap<'tcx>,
-                             visited: &mut FnvHashSet<TransItem<'tcx>>) {
+                             visited: &mut FxHashSet<TransItem<'tcx>>) {
         if !visited.insert(trans_item) {
             return;
         }
index 3faaa085dce14f3721642bd610065cab672f05db..c3e0ac1fee51578d33b238f3ebb4d3e4aba06265 100644 (file)
@@ -14,7 +14,7 @@
 use std::borrow::Cow;
 use syntax::codemap::Span;
 use trans_item::TransItem;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 // In the SymbolMap we collect the symbol names of all translation items of
 // the current crate. This map exists as a performance optimization. Symbol
@@ -22,7 +22,7 @@
 // Thus they could also always be recomputed if needed.
 
 pub struct SymbolMap<'tcx> {
-    index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
+    index: FxHashMap<TransItem<'tcx>, (usize, usize)>,
     arena: String,
 }
 
@@ -78,7 +78,7 @@ pub fn build<'a, I>(scx: &SharedCrateContext<'a, 'tcx>,
         }
 
         let mut symbol_map = SymbolMap {
-            index: FnvHashMap(),
+            index: FxHashMap(),
             arena: String::with_capacity(1024),
         };
 
index 03a71827b473b8675850fc6db006326ec53cb1d4..2a6f79d3ed57ae4fb5d3ceeb3a9ba5b314fcb58b 100644 (file)
@@ -15,7 +15,7 @@
 use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128};
 
 use context::CrateContext;
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use syntax::ast;
 use rustc::ty::layout;
@@ -325,13 +325,13 @@ pub fn from_primitive(ccx: &CrateContext, p: layout::Primitive) -> Type {
 /* Memory-managed object interface to type handles. */
 
 pub struct TypeNames {
-    named_types: RefCell<FnvHashMap<String, TypeRef>>,
+    named_types: RefCell<FxHashMap<String, TypeRef>>,
 }
 
 impl TypeNames {
     pub fn new() -> TypeNames {
         TypeNames {
-            named_types: RefCell::new(FnvHashMap())
+            named_types: RefCell::new(FxHashMap())
         }
     }
 
index c93f1c6c8e6102523bdb7a9fea545239b91112cd..57936f8a4b3be67922429629260cecd50134fc77 100644 (file)
@@ -66,7 +66,7 @@
              ElisionFailureInfo, ElidedLifetime};
 use rscope::{AnonTypeScope, MaybeWithAnonTypes};
 use util::common::{ErrorReported, FN_OUTPUT_NAME};
-use util::nodemap::{NodeMap, FnvHashSet};
+use util::nodemap::{NodeMap, FxHashSet};
 
 use std::cell::RefCell;
 use syntax::{abi, ast};
@@ -569,7 +569,7 @@ fn find_implied_output_region<F>(&self,
         let mut possible_implied_output_region = None;
 
         for input_type in input_tys.iter() {
-            let mut regions = FnvHashSet();
+            let mut regions = FxHashSet();
             let have_bound_regions = tcx.collect_regions(input_type, &mut regions);
 
             debug!("find_implied_output_regions: collected {:?} from {:?} \
@@ -1142,7 +1142,7 @@ fn trait_path_to_object_type(&self,
             return tcx.types.err;
         }
 
-        let mut associated_types = FnvHashSet::default();
+        let mut associated_types = FxHashSet::default();
         for tr in traits::supertraits(tcx, principal) {
             if let Some(trait_id) = tcx.map.as_local_node_id(tr.def_id()) {
                 use collect::trait_associated_type_names;
index c842514227ca090b50b19ed6abc854a7189e3035..15b29573ac4e8cc080259316ebe7709bf0a1dc73 100644 (file)
@@ -14,7 +14,7 @@
 use rustc::infer::{self, InferOk, TypeOrigin};
 use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference};
 use check::{FnCtxt, Expectation};
-use util::nodemap::FnvHashMap;
+use util::nodemap::FxHashMap;
 
 use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::cmp;
@@ -633,10 +633,10 @@ fn check_struct_pat_fields(&self,
         let field_map = variant.fields
             .iter()
             .map(|field| (field.name, field))
-            .collect::<FnvHashMap<_, _>>();
+            .collect::<FxHashMap<_, _>>();
 
         // Keep track of which fields have already appeared in the pattern.
-        let mut used_fields = FnvHashMap();
+        let mut used_fields = FxHashMap();
 
         // Typecheck each field.
         for &Spanned { node: ref field, span } in fields {
index e72bcb3079c5c4adffd660e610c9c4bcf41f29b3..d28eb85ebb49d925780d711766bbef66240bf177 100644 (file)
@@ -18,7 +18,7 @@
 use rustc::ty::subst::{Subst, Substs};
 use rustc::ty::{self, AdtKind, Ty, TyCtxt};
 use rustc::traits::{self, Reveal};
-use util::nodemap::FnvHashSet;
+use util::nodemap::FxHashSet;
 
 use syntax::ast;
 use syntax_pos::{self, Span};
@@ -289,7 +289,7 @@ pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>(
             rcx: rcx,
             span: span,
             parent_scope: parent_scope,
-            breadcrumbs: FnvHashSet()
+            breadcrumbs: FxHashSet()
         },
         TypeContext::Root,
         typ,
@@ -347,7 +347,7 @@ enum TypeContext {
 struct DropckContext<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> {
     rcx: &'a mut RegionCtxt<'b, 'gcx, 'tcx>,
     /// types that have already been traversed
-    breadcrumbs: FnvHashSet<Ty<'tcx>>,
+    breadcrumbs: FxHashSet<Ty<'tcx>>,
     /// span for error reporting
     span: Span,
     /// the scope reachable dtorck types must outlive
index 7d2547ec17f3a5b91f8e70c42aacf401a378a425..95d2b2211f5b418bb2bc3c0adf86a0c6a50d482d 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::ty::subst::Substs;
 use rustc::ty::FnSig;
 use rustc::ty::{self, Ty};
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 use {CrateCtxt, require_same_types};
 
 use syntax::abi::Abi;
@@ -372,7 +372,7 @@ pub fn check_platform_intrinsic_type(ccx: &CrateCtxt,
                         return
                     }
 
-                    let mut structural_to_nomimal = FnvHashMap();
+                    let mut structural_to_nomimal = FxHashMap();
 
                     let sig = tcx.no_late_bound_regions(i_ty.ty.fn_sig()).unwrap();
                     if intr.inputs.len() != sig.inputs.len() {
@@ -412,7 +412,7 @@ fn match_intrinsic_type_to_type<'tcx, 'a>(
         ccx: &CrateCtxt<'a, 'tcx>,
         position: &str,
         span: Span,
-        structural_to_nominal: &mut FnvHashMap<&'a intrinsics::Type, ty::Ty<'tcx>>,
+        structural_to_nominal: &mut FxHashMap<&'a intrinsics::Type, ty::Ty<'tcx>>,
         expected: &'a intrinsics::Type, t: ty::Ty<'tcx>)
 {
     use intrinsics::Type::*;
index 43837de2f345d2ec1b22a23dd7198fa441ad9560..54b1b6c6807dbe887e172525542253cf773ed833 100644 (file)
@@ -20,7 +20,7 @@
 use rustc::traits;
 use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable};
 use rustc::infer::{InferOk, TypeOrigin};
-use rustc::util::nodemap::FnvHashSet;
+use rustc::util::nodemap::FxHashSet;
 use syntax::ast;
 use syntax_pos::{Span, DUMMY_SP};
 use rustc::hir;
@@ -40,7 +40,7 @@ struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
     opt_simplified_steps: Option<Vec<ty::fast_reject::SimplifiedType>>,
     inherent_candidates: Vec<Candidate<'tcx>>,
     extension_candidates: Vec<Candidate<'tcx>>,
-    impl_dups: FnvHashSet<DefId>,
+    impl_dups: FxHashSet<DefId>,
     import_id: Option<ast::NodeId>,
 
     /// Collects near misses when the candidate functions are missing a `self` keyword and is only
@@ -263,7 +263,7 @@ fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
             item_name: item_name,
             inherent_candidates: Vec::new(),
             extension_candidates: Vec::new(),
-            impl_dups: FnvHashSet(),
+            impl_dups: FxHashSet(),
             import_id: None,
             steps: Rc::new(steps),
             opt_simplified_steps: opt_simplified_steps,
@@ -568,7 +568,7 @@ fn elaborate_bounds<F>(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand
     fn assemble_extension_candidates_for_traits_in_scope(&mut self,
                                                          expr_id: ast::NodeId)
                                                          -> Result<(), MethodError<'tcx>> {
-        let mut duplicates = FnvHashSet();
+        let mut duplicates = FxHashSet();
         let opt_applicable_traits = self.tcx.trait_map.get(&expr_id);
         if let Some(applicable_traits) = opt_applicable_traits {
             for trait_candidate in applicable_traits {
@@ -585,7 +585,7 @@ fn assemble_extension_candidates_for_traits_in_scope(&mut self,
     }
 
     fn assemble_extension_candidates_for_all_traits(&mut self) -> Result<(), MethodError<'tcx>> {
-        let mut duplicates = FnvHashSet();
+        let mut duplicates = FxHashSet();
         for trait_info in suggest::all_traits(self.ccx) {
             if duplicates.insert(trait_info.def_id) {
                 self.assemble_extension_candidates_for_trait(trait_info.def_id)?;
index 32bf839a4ed4e12b190ffc0238caa2cf03435c68..98d3957db70599376575876a60da162e422e8cf9 100644 (file)
@@ -20,7 +20,7 @@
 use hir::def_id::{CRATE_DEF_INDEX, DefId};
 use middle::lang_items::FnOnceTraitLangItem;
 use rustc::traits::{Obligation, SelectionContext};
-use util::nodemap::FnvHashSet;
+use util::nodemap::FxHashSet;
 
 use syntax::ast;
 use errors::DiagnosticBuilder;
@@ -470,10 +470,10 @@ fn visit_item(&mut self, i: &'v hir::Item) {
         });
 
         // Cross-crate:
-        let mut external_mods = FnvHashSet();
+        let mut external_mods = FxHashSet();
         fn handle_external_def(ccx: &CrateCtxt,
                                traits: &mut AllTraitsVec,
-                               external_mods: &mut FnvHashSet<DefId>,
+                               external_mods: &mut FxHashSet<DefId>,
                                def: Def) {
             let def_id = def.def_id();
             match def {
index d8314bd6c2aedc29cdc7b050ea18ad8bc4a65309..10523755277e89838ec1e2ac0c49b328e843f184 100644 (file)
 use TypeAndSubsts;
 use lint;
 use util::common::{block_query, ErrorReported, indenter, loop_query};
-use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet, NodeMap};
+use util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap};
 
 use std::cell::{Cell, Ref, RefCell};
 use std::mem::replace;
@@ -1975,13 +1975,13 @@ fn new_select_all_obligations_and_apply_defaults(&self) {
             // We must collect the defaults *before* we do any unification. Because we have
             // directly attached defaults to the type variables any unification that occurs
             // will erase defaults causing conflicting defaults to be completely ignored.
-            let default_map: FnvHashMap<_, _> =
+            let default_map: FxHashMap<_, _> =
                 unsolved_variables
                     .iter()
                     .filter_map(|t| self.default(t).map(|d| (t, d)))
                     .collect();
 
-            let mut unbound_tyvars = FnvHashSet();
+            let mut unbound_tyvars = FxHashSet();
 
             debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
 
@@ -2129,8 +2129,8 @@ fn new_select_all_obligations_and_apply_defaults(&self) {
     // table then apply defaults until we find a conflict. That default must be the one
     // that caused conflict earlier.
     fn find_conflicting_default(&self,
-                                unbound_vars: &FnvHashSet<Ty<'tcx>>,
-                                default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
+                                unbound_vars: &FxHashSet<Ty<'tcx>>,
+                                default_map: &FxHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
                                 conflict: Ty<'tcx>)
                                 -> Option<type_variable::Default<'tcx>> {
         use rustc::ty::error::UnconstrainedNumeric::Neither;
@@ -3123,12 +3123,12 @@ fn check_expr_struct_fields(&self,
             _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
         };
 
-        let mut remaining_fields = FnvHashMap();
+        let mut remaining_fields = FxHashMap();
         for field in &variant.fields {
             remaining_fields.insert(field.name, field);
         }
 
-        let mut seen_fields = FnvHashMap();
+        let mut seen_fields = FxHashMap();
 
         let mut error_happened = false;
 
index be1f2e35679d71e7ab191c5130a8fff33b87678a..741f327ac99e1a7f8b69d48ee1ca614af1119511 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::infer::TypeOrigin;
 use rustc::traits;
 use rustc::ty::{self, Ty, TyCtxt};
-use rustc::util::nodemap::{FnvHashSet, FnvHashMap};
+use rustc::util::nodemap::{FxHashSet, FxHashMap};
 
 use syntax::ast;
 use syntax_pos::Span;
@@ -529,7 +529,7 @@ fn check_variances_for_type_defn(&self,
         assert_eq!(ty_predicates.parent, None);
         let variances = self.tcx().item_variances(item_def_id);
 
-        let mut constrained_parameters: FnvHashSet<_> =
+        let mut constrained_parameters: FxHashSet<_> =
             variances.iter().enumerate()
                      .filter(|&(_, &variance)| variance != ty::Bivariant)
                      .map(|(index, _)| Parameter(index as u32))
@@ -580,10 +580,10 @@ fn report_bivariance(&self,
 
 fn reject_shadowing_type_parameters(tcx: TyCtxt, span: Span, generics: &ty::Generics) {
     let parent = tcx.lookup_generics(generics.parent.unwrap());
-    let impl_params: FnvHashMap<_, _> = parent.types
-                                        .iter()
-                                        .map(|tp| (tp.name, tp.def_id))
-                                        .collect();
+    let impl_params: FxHashMap<_, _> = parent.types
+                                       .iter()
+                                       .map(|tp| (tp.name, tp.def_id))
+                                       .collect();
 
     for method_param in &generics.types {
         if impl_params.contains_key(&method_param.name) {
index 0e0f5cb1a7e156517fbb97a3954f70e606ec3879..5c51877ae743ee09b9d6678003726e662c8e1606 100644 (file)
@@ -72,7 +72,7 @@
 use rscope::*;
 use rustc::dep_graph::DepNode;
 use util::common::{ErrorReported, MemoizationMap};
-use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet};
+use util::nodemap::{NodeMap, FxHashMap, FxHashSet};
 use {CrateCtxt, write_ty_to_tcx};
 
 use rustc_const_math::ConstInt;
@@ -786,8 +786,8 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) {
 
             // Convert all the associated consts.
             // Also, check if there are any duplicate associated items
-            let mut seen_type_items = FnvHashMap();
-            let mut seen_value_items = FnvHashMap();
+            let mut seen_type_items = FxHashMap();
+            let mut seen_value_items = FxHashMap();
 
             for impl_item in impl_items {
                 let seen_items = match impl_item.node {
@@ -1038,7 +1038,7 @@ fn convert_struct_variant<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                     disr_val: ty::Disr,
                                     def: &hir::VariantData)
                                     -> ty::VariantDefData<'tcx, 'tcx> {
-    let mut seen_fields: FnvHashMap<ast::Name, Span> = FnvHashMap();
+    let mut seen_fields: FxHashMap<ast::Name, Span> = FxHashMap();
     let node_id = ccx.tcx.map.as_local_node_id(did).unwrap();
     let fields = def.fields().iter().map(|f| {
         let fid = ccx.tcx.map.local_def_id(f.id);
@@ -1952,9 +1952,9 @@ fn compute_object_lifetime_default<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
 {
     let inline_bounds = from_bounds(ccx, param_bounds);
     let where_bounds = from_predicates(ccx, param_id, &where_clause.predicates);
-    let all_bounds: FnvHashSet<_> = inline_bounds.into_iter()
-                                                 .chain(where_bounds)
-                                                 .collect();
+    let all_bounds: FxHashSet<_> = inline_bounds.into_iter()
+                                                .chain(where_bounds)
+                                                .collect();
     return if all_bounds.len() > 1 {
         ty::ObjectLifetimeDefault::Ambiguous
     } else if all_bounds.len() == 0 {
@@ -2171,7 +2171,7 @@ fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
     // The trait reference is an input, so find all type parameters
     // reachable from there, to start (if this is an inherent impl,
     // then just examine the self type).
-    let mut input_parameters: FnvHashSet<_> =
+    let mut input_parameters: FxHashSet<_> =
         ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect();
     if let Some(ref trait_ref) = impl_trait_ref {
         input_parameters.extend(ctp::parameters_for(trait_ref, false));
@@ -2200,7 +2200,7 @@ fn enforce_impl_lifetimes_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
     let impl_predicates = ccx.tcx.lookup_predicates(impl_def_id);
     let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id);
 
-    let mut input_parameters: FnvHashSet<_> =
+    let mut input_parameters: FxHashSet<_> =
         ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect();
     if let Some(ref trait_ref) = impl_trait_ref {
         input_parameters.extend(ctp::parameters_for(trait_ref, false));
@@ -2208,7 +2208,7 @@ fn enforce_impl_lifetimes_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
     ctp::identify_constrained_type_params(
         &impl_predicates.predicates.as_slice(), impl_trait_ref, &mut input_parameters);
 
-    let lifetimes_in_associated_types: FnvHashSet<_> = impl_items.iter()
+    let lifetimes_in_associated_types: FxHashSet<_> = impl_items.iter()
         .map(|item| ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(item.id)))
         .filter_map(|item| match item {
             ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty,
index 39f9e4316b9c7c150668e9c72f44699359e6ce79..7918537a6c08f7ec4173a51fe68814c8ff3aeb42 100644 (file)
@@ -10,7 +10,7 @@
 
 use rustc::ty::{self, Ty};
 use rustc::ty::fold::{TypeFoldable, TypeVisitor};
-use rustc::util::nodemap::FnvHashSet;
+use rustc::util::nodemap::FxHashSet;
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
 pub struct Parameter(pub u32);
@@ -76,7 +76,7 @@ fn visit_region(&mut self, r: &'tcx ty::Region) -> bool {
 
 pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>],
                                               impl_trait_ref: Option<ty::TraitRef<'tcx>>,
-                                              input_parameters: &mut FnvHashSet<Parameter>)
+                                              input_parameters: &mut FxHashSet<Parameter>)
 {
     let mut predicates = predicates.to_owned();
     setup_constraining_predicates(&mut predicates, impl_trait_ref, input_parameters);
@@ -125,7 +125,7 @@ pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>]
 /// think of any.
 pub fn setup_constraining_predicates<'tcx>(predicates: &mut [ty::Predicate<'tcx>],
                                            impl_trait_ref: Option<ty::TraitRef<'tcx>>,
-                                           input_parameters: &mut FnvHashSet<Parameter>)
+                                           input_parameters: &mut FxHashSet<Parameter>)
 {
     // The canonical way of doing the needed topological sort
     // would be a DFS, but getting the graph and its ownership
index 31497b6bd335256621cb1a418c10c2025fa24891..1885b4276cc4168021dcec56770e5ce19d19271b 100644 (file)
@@ -19,7 +19,7 @@
 use rustc::hir::def_id::DefId;
 use rustc::hir::print as pprust;
 use rustc::ty::{self, TyCtxt};
-use rustc::util::nodemap::FnvHashSet;
+use rustc::util::nodemap::FxHashSet;
 
 use rustc_const_eval::lookup_const_by_id;
 
@@ -460,7 +460,7 @@ pub fn build_impl<'a, 'tcx>(cx: &DocContext,
                 .into_iter()
                 .map(|meth| meth.name.to_string())
                 .collect()
-    }).unwrap_or(FnvHashSet());
+    }).unwrap_or(FxHashSet());
 
     ret.push(clean::Item {
         inner: clean::ImplItem(clean::Impl {
@@ -496,7 +496,7 @@ fn fill_in<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
         // If we're reexporting a reexport it may actually reexport something in
         // two namespaces, so the target may be listed twice. Make sure we only
         // visit each node at most once.
-        let mut visited = FnvHashSet();
+        let mut visited = FxHashSet();
         for item in tcx.sess.cstore.item_children(did) {
             let def_id = item.def.def_id();
             if tcx.sess.cstore.visibility(def_id) == ty::Visibility::Public {
index 265b66b01ea52b2908a9d4870545f9b70d32dfc1..df13e384d9615a4b926319a59284fb26da3e950e 100644 (file)
@@ -38,7 +38,7 @@
 use rustc::ty::subst::Substs;
 use rustc::ty::{self, AdtKind};
 use rustc::middle::stability;
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 
 use rustc::hir;
 
@@ -116,7 +116,7 @@ pub struct Crate {
     pub access_levels: Arc<AccessLevels<DefId>>,
     // These are later on moved into `CACHEKEY`, leaving the map empty.
     // Only here so that they can be filtered through the rustdoc passes.
-    pub external_traits: FnvHashMap<DefId, Trait>,
+    pub external_traits: FxHashMap<DefId, Trait>,
 }
 
 struct CrateNum(def_id::CrateNum);
@@ -993,7 +993,7 @@ fn clean(&self, cx: &DocContext) -> Generics {
         // Note that associated types also have a sized bound by default, but we
         // don't actually know the set of associated types right here so that's
         // handled in cleaning associated types
-        let mut sized_params = FnvHashSet();
+        let mut sized_params = FxHashSet();
         where_predicates.retain(|pred| {
             match *pred {
                 WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
@@ -1693,8 +1693,8 @@ fn clean(&self, cx: &DocContext) -> Type {
                 });
                 if let Some((tcx, &hir::ItemTy(ref ty, ref generics))) = tcx_and_alias {
                     let provided_params = &path.segments.last().unwrap().parameters;
-                    let mut ty_substs = FnvHashMap();
-                    let mut lt_substs = FnvHashMap();
+                    let mut ty_substs = FxHashMap();
+                    let mut lt_substs = FxHashMap();
                     for (i, ty_param) in generics.ty_params.iter().enumerate() {
                         let ty_param_def = tcx.expect_def(ty_param.id);
                         if let Some(ty) = provided_params.types().get(i).cloned()
@@ -2368,7 +2368,7 @@ fn clean(&self, _: &DocContext) -> ImplPolarity {
 pub struct Impl {
     pub unsafety: hir::Unsafety,
     pub generics: Generics,
-    pub provided_trait_methods: FnvHashSet<String>,
+    pub provided_trait_methods: FxHashSet<String>,
     pub trait_: Option<Type>,
     pub for_: Type,
     pub items: Vec<Item>,
@@ -2394,7 +2394,7 @@ fn clean(&self, cx: &DocContext) -> Vec<Item> {
                    .map(|meth| meth.name.to_string())
                    .collect()
             })
-        }).unwrap_or(FnvHashSet());
+        }).unwrap_or(FxHashSet());
 
         ret.push(Item {
             name: None,
index f03b6a5ab3f1f200490dd370653e783c562c092c..810bea4c5b0983d06981154ae60d49f9d646f0f0 100644 (file)
@@ -19,7 +19,7 @@
 use rustc::ty::{self, TyCtxt};
 use rustc::hir::map as hir_map;
 use rustc::lint;
-use rustc::util::nodemap::FnvHashMap;
+use rustc::util::nodemap::FxHashMap;
 use rustc_trans::back::link;
 use rustc_resolve as resolve;
 use rustc_metadata::cstore::CStore;
@@ -48,7 +48,7 @@ pub enum MaybeTyped<'a, 'tcx: 'a> {
     NotTyped(&'a session::Session)
 }
 
-pub type ExternalPaths = FnvHashMap<DefId, (Vec<String>, clean::TypeKind)>;
+pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
 
 pub struct DocContext<'a, 'tcx: 'a> {
     pub map: &'a hir_map::Map<'tcx>,
@@ -65,15 +65,15 @@ pub struct DocContext<'a, 'tcx: 'a> {
     /// Later on moved into `html::render::CACHE_KEY`
     pub renderinfo: RefCell<RenderInfo>,
     /// Later on moved through `clean::Crate` into `html::render::CACHE_KEY`
-    pub external_traits: RefCell<FnvHashMap<DefId, clean::Trait>>,
+    pub external_traits: RefCell<FxHashMap<DefId, clean::Trait>>,
 
     // The current set of type and lifetime substitutions,
     // for expanding type aliases at the HIR level:
 
     /// Table type parameter definition -> substituted type
-    pub ty_substs: RefCell<FnvHashMap<Def, clean::Type>>,
+    pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
     /// Table node id of lifetime parameter definition -> substituted lifetime
-    pub lt_substs: RefCell<FnvHashMap<ast::NodeId, clean::Lifetime>>,
+    pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
 }
 
 impl<'b, 'tcx> DocContext<'b, 'tcx> {
@@ -99,8 +99,8 @@ pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
     /// Call the closure with the given parameters set as
     /// the substitutions for a type alias' RHS.
     pub fn enter_alias<F, R>(&self,
-                             ty_substs: FnvHashMap<Def, clean::Type>,
-                             lt_substs: FnvHashMap<ast::NodeId, clean::Lifetime>,
+                             ty_substs: FxHashMap<Def, clean::Type>,
+                             lt_substs: FxHashMap<ast::NodeId, clean::Lifetime>,
                              f: F) -> R
     where F: FnOnce() -> R {
         let (old_tys, old_lts) =
index a848a011f88dbfe6be21cb82c7d1bbe5ca333400..2078ad3ffbe26a089ae6c03b15dab20d1bcd0270 100644 (file)
@@ -59,7 +59,7 @@
 use rustc::middle::privacy::AccessLevels;
 use rustc::middle::stability;
 use rustc::hir;
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 use rustc_data_structures::flock;
 
 use clean::{self, Attributes, GetDefId, SelfTy, Mutability};
@@ -111,9 +111,9 @@ pub struct SharedContext {
     /// `true`.
     pub include_sources: bool,
     /// The local file sources we've emitted and their respective url-paths.
-    pub local_sources: FnvHashMap<PathBuf, String>,
+    pub local_sources: FxHashMap<PathBuf, String>,
     /// All the passes that were run on this crate.
-    pub passes: FnvHashSet<String>,
+    pub passes: FxHashSet<String>,
     /// The base-URL of the issue tracker for when an item has been tagged with
     /// an issue number.
     pub issue_tracker_base_url: Option<String>,
@@ -208,7 +208,7 @@ pub struct Cache {
     /// Mapping of typaram ids to the name of the type parameter. This is used
     /// when pretty-printing a type (so pretty printing doesn't have to
     /// painfully maintain a context like this)
-    pub typarams: FnvHashMap<DefId, String>,
+    pub typarams: FxHashMap<DefId, String>,
 
     /// Maps a type id to all known implementations for that type. This is only
     /// recognized for intra-crate `ResolvedPath` types, and is used to print
@@ -216,35 +216,35 @@ pub struct Cache {
     ///
     /// The values of the map are a list of implementations and documentation
     /// found on that implementation.
-    pub impls: FnvHashMap<DefId, Vec<Impl>>,
+    pub impls: FxHashMap<DefId, Vec<Impl>>,
 
     /// Maintains a mapping of local crate node ids to the fully qualified name
     /// and "short type description" of that node. This is used when generating
     /// URLs when a type is being linked to. External paths are not located in
     /// this map because the `External` type itself has all the information
     /// necessary.
-    pub paths: FnvHashMap<DefId, (Vec<String>, ItemType)>,
+    pub paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
 
     /// Similar to `paths`, but only holds external paths. This is only used for
     /// generating explicit hyperlinks to other crates.
-    pub external_paths: FnvHashMap<DefId, (Vec<String>, ItemType)>,
+    pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
 
     /// This map contains information about all known traits of this crate.
     /// Implementations of a crate should inherit the documentation of the
     /// parent trait if no extra documentation is specified, and default methods
     /// should show up in documentation about trait implementations.
-    pub traits: FnvHashMap<DefId, clean::Trait>,
+    pub traits: FxHashMap<DefId, clean::Trait>,
 
     /// When rendering traits, it's often useful to be able to list all
     /// implementors of the trait, and this mapping is exactly, that: a mapping
     /// of trait ids to the list of known implementors of the trait
-    pub implementors: FnvHashMap<DefId, Vec<Implementor>>,
+    pub implementors: FxHashMap<DefId, Vec<Implementor>>,
 
     /// Cache of where external crate documentation can be found.
-    pub extern_locations: FnvHashMap<CrateNum, (String, ExternalLocation)>,
+    pub extern_locations: FxHashMap<CrateNum, (String, ExternalLocation)>,
 
     /// Cache of where documentation for primitives can be found.
-    pub primitive_locations: FnvHashMap<clean::PrimitiveType, CrateNum>,
+    pub primitive_locations: FxHashMap<clean::PrimitiveType, CrateNum>,
 
     // Note that external items for which `doc(hidden)` applies to are shown as
     // non-reachable while local items aren't. This is because we're reusing
@@ -257,7 +257,7 @@ pub struct Cache {
     parent_stack: Vec<DefId>,
     parent_is_trait_impl: bool,
     search_index: Vec<IndexItem>,
-    seen_modules: FnvHashSet<DefId>,
+    seen_modules: FxHashSet<DefId>,
     seen_mod: bool,
     stripped_mod: bool,
     deref_trait_did: Option<DefId>,
@@ -275,9 +275,9 @@ pub struct Cache {
 /// Later on moved into `CACHE_KEY`.
 #[derive(Default)]
 pub struct RenderInfo {
-    pub inlined: FnvHashSet<DefId>,
+    pub inlined: FxHashSet<DefId>,
     pub external_paths: ::core::ExternalPaths,
-    pub external_typarams: FnvHashMap<DefId, String>,
+    pub external_typarams: FxHashMap<DefId, String>,
     pub deref_trait_did: Option<DefId>,
     pub deref_mut_trait_did: Option<DefId>,
 }
@@ -376,10 +376,10 @@ fn to_json(&self) -> Json {
 thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
 thread_local!(pub static CURRENT_LOCATION_KEY: RefCell<Vec<String>> =
                     RefCell::new(Vec::new()));
-thread_local!(static USED_ID_MAP: RefCell<FnvHashMap<String, usize>> =
+thread_local!(static USED_ID_MAP: RefCell<FxHashMap<String, usize>> =
                     RefCell::new(init_ids()));
 
-fn init_ids() -> FnvHashMap<String, usize> {
+fn init_ids() -> FxHashMap<String, usize> {
     [
      "main",
      "search",
@@ -406,7 +406,7 @@ pub fn reset_ids(embedded: bool) {
         *s.borrow_mut() = if embedded {
             init_ids()
         } else {
-            FnvHashMap()
+            FxHashMap()
         };
     });
 }
@@ -431,7 +431,7 @@ pub fn derive_id(candidate: String) -> String {
 pub fn run(mut krate: clean::Crate,
            external_html: &ExternalHtml,
            dst: PathBuf,
-           passes: FnvHashSet<String>,
+           passes: FxHashSet<String>,
            css_file_extension: Option<PathBuf>,
            renderinfo: RenderInfo) -> Result<(), Error> {
     let src_root = match krate.src.parent() {
@@ -442,7 +442,7 @@ pub fn run(mut krate: clean::Crate,
         src_root: src_root,
         passes: passes,
         include_sources: true,
-        local_sources: FnvHashMap(),
+        local_sources: FxHashMap(),
         issue_tracker_base_url: None,
         layout: layout::Layout {
             logo: "".to_string(),
@@ -510,22 +510,22 @@ pub fn run(mut krate: clean::Crate,
         .collect();
 
     let mut cache = Cache {
-        impls: FnvHashMap(),
+        impls: FxHashMap(),
         external_paths: external_paths,
-        paths: FnvHashMap(),
-        implementors: FnvHashMap(),
+        paths: FxHashMap(),
+        implementors: FxHashMap(),
         stack: Vec::new(),
         parent_stack: Vec::new(),
         search_index: Vec::new(),
         parent_is_trait_impl: false,
-        extern_locations: FnvHashMap(),
-        primitive_locations: FnvHashMap(),
-        seen_modules: FnvHashSet(),
+        extern_locations: FxHashMap(),
+        primitive_locations: FxHashMap(),
+        seen_modules: FxHashSet(),
         seen_mod: false,
         stripped_mod: false,
         access_levels: krate.access_levels.clone(),
         orphan_impl_items: Vec::new(),
-        traits: mem::replace(&mut krate.external_traits, FnvHashMap()),
+        traits: mem::replace(&mut krate.external_traits, FxHashMap()),
         deref_trait_did: deref_trait_did,
         deref_mut_trait_did: deref_mut_trait_did,
         typarams: external_typarams,
@@ -572,7 +572,7 @@ pub fn run(mut krate: clean::Crate,
 
 /// Build the search index from the collected metadata
 fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
-    let mut nodeid_to_pathid = FnvHashMap();
+    let mut nodeid_to_pathid = FxHashMap();
     let mut crate_items = Vec::with_capacity(cache.search_index.len());
     let mut crate_paths = Vec::<Json>::new();
 
@@ -2618,7 +2618,7 @@ fn render_union(w: &mut fmt::Formatter, it: &clean::Item,
 #[derive(Copy, Clone)]
 enum AssocItemLink<'a> {
     Anchor(Option<&'a str>),
-    GotoSource(DefId, &'a FnvHashSet<String>),
+    GotoSource(DefId, &'a FxHashSet<String>),
 }
 
 impl<'a> AssocItemLink<'a> {
index 4d1af1622724a37bdae20bb8d7e071b6e892f0b4..6e47c037ad3dbb2f1a900c78052a4ee94c710080 100644 (file)
@@ -22,7 +22,7 @@
 use rustc::hir::def::Def;
 use rustc::hir::def_id::LOCAL_CRATE;
 use rustc::middle::privacy::AccessLevel;
-use rustc::util::nodemap::FnvHashSet;
+use rustc::util::nodemap::FxHashSet;
 
 use rustc::hir;
 
@@ -42,14 +42,14 @@ pub struct RustdocVisitor<'a, 'tcx: 'a> {
     pub module: Module,
     pub attrs: hir::HirVec<ast::Attribute>,
     pub cx: &'a core::DocContext<'a, 'tcx>,
-    view_item_stack: FnvHashSet<ast::NodeId>,
+    view_item_stack: FxHashSet<ast::NodeId>,
     inlining_from_glob: bool,
 }
 
 impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
     pub fn new(cx: &'a core::DocContext<'a, 'tcx>) -> RustdocVisitor<'a, 'tcx> {
         // If the root is reexported, terminate all recursion.
-        let mut stack = FnvHashSet();
+        let mut stack = FxHashSet();
         stack.insert(ast::CRATE_NODE_ID);
         RustdocVisitor {
             module: Module::new(None),