]> git.lizzy.rs Git - rust.git/commitdiff
rustc/ty: whitespace fixes
authorljedrz <ljedrz@gmail.com>
Mon, 1 Oct 2018 13:26:53 +0000 (15:26 +0200)
committerljedrz <ljedrz@gmail.com>
Fri, 5 Oct 2018 06:58:05 +0000 (08:58 +0200)
src/librustc/ty/codec.rs
src/librustc/ty/context.rs
src/librustc/ty/query/config.rs
src/librustc/ty/query/on_disk_cache.rs
src/librustc/ty/query/plumbing.rs

index ae87d30ec942711254f433149c21612809efe235..8738f574148239e37966edc1ef3ef1a90f2f532e 100644 (file)
@@ -178,19 +178,19 @@ pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D)
     Ok(ty::GenericPredicates {
         parent: Decodable::decode(decoder)?,
         predicates: (0..decoder.read_usize()?).map(|_| {
-                // Handle shorthands first, if we have an usize > 0x80.
-                let predicate = if decoder.positioned_at_shorthand() {
-                    let pos = decoder.read_usize()?;
-                    assert!(pos >= SHORTHAND_OFFSET);
-                    let shorthand = pos - SHORTHAND_OFFSET;
-
-                    decoder.with_position(shorthand, ty::Predicate::decode)
-                } else {
-                    ty::Predicate::decode(decoder)
-                }?;
-                Ok((predicate, Decodable::decode(decoder)?))
-            })
-            .collect::<Result<Vec<_>, _>>()?,
+            // Handle shorthands first, if we have an usize > 0x80.
+            let predicate = if decoder.positioned_at_shorthand() {
+                let pos = decoder.read_usize()?;
+                assert!(pos >= SHORTHAND_OFFSET);
+                let shorthand = pos - SHORTHAND_OFFSET;
+
+                decoder.with_position(shorthand, ty::Predicate::decode)
+            } else {
+                ty::Predicate::decode(decoder)
+            }?;
+            Ok((predicate, Decodable::decode(decoder)?))
+        })
+        .collect::<Result<Vec<_>, _>>()?,
     })
 }
 
@@ -267,7 +267,7 @@ pub fn decode_const<'a, 'tcx, D>(decoder: &mut D)
 
 #[inline]
 pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D)
-                                 -> Result<&'tcx Allocation, D::Error>
+    -> Result<&'tcx Allocation, D::Error>
     where D: TyDecoder<'a, 'tcx>,
           'tcx: 'a,
 {
index 05b68b34989bf476803aa094e711cb34ee3da24c..daada8891d7a73ac80f37a652678497aed4e90e8 100644 (file)
@@ -190,8 +190,8 @@ fn intern_ty(
             // types/regions in the global interner
             if local as *const _ as usize == global as *const _ as usize {
                 bug!("Attempted to intern `{:?}` which contains \
-                    inference types/regions in the global type context",
-                    &ty_struct);
+                      inference types/regions in the global type context",
+                     &ty_struct);
             }
 
             // Don't be &mut TyS.
@@ -272,9 +272,9 @@ fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
 
                     bug!("node {} with HirId::owner {:?} cannot be placed in \
                           TypeckTables with local_id_root {:?}",
-                          tcx.hir.node_to_string(node_id),
-                          DefId::local(hir_id.owner),
-                          local_id_root)
+                         tcx.hir.node_to_string(node_id),
+                         DefId::local(hir_id.owner),
+                         local_id_root)
                 });
             }
         } else {
@@ -686,7 +686,7 @@ pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
     }
 
     pub fn pat_adjustments_mut(&mut self)
-                           -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
+                               -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
         LocalTableInContextMut {
             local_id_root: self.local_id_root,
             data: &mut self.pat_adjustments,
@@ -1199,8 +1199,8 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             let hir_id = hir.node_to_hir_id(k);
             let map = trait_map.entry(hir_id.owner).or_default();
             Lrc::get_mut(map).unwrap()
-                            .insert(hir_id.local_id,
-                                    Lrc::new(StableVec::new(v)));
+                             .insert(hir_id.local_id,
+                                     Lrc::new(StableVec::new(v)));
         }
 
         let gcx = &GlobalCtxt {
@@ -2188,7 +2188,6 @@ pub fn go(tcx: TyCtxt<'_, '_, '_>) {
                 };
                 $(let mut $variant = total;)*
 
-
                 for &Interned(t) in tcx.interners.type_.borrow().iter() {
                     let variant = match t.sty {
                         ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
@@ -2207,7 +2206,7 @@ pub fn go(tcx: TyCtxt<'_, '_, '_>) {
                 }
                 println!("Ty interner             total           ty region  both");
                 $(println!("    {:18}: {uses:6} {usespc:4.1}%, \
-{ty:4.1}% {region:5.1}% {both:4.1}%",
+                            {ty:4.1}% {region:5.1}% {both:4.1}%",
                            stringify!($variant),
                            uses = $variant.total,
                            usespc = $variant.total as f64 * 100.0 / total.total as f64,
@@ -2216,7 +2215,7 @@ pub fn go(tcx: TyCtxt<'_, '_, '_>) {
                            both = $variant.both_infer as f64 * 100.0  / total.total as f64);
                   )*
                 println!("                  total {uses:6}        \
-{ty:4.1}% {region:5.1}% {both:4.1}%",
+                          {ty:4.1}% {region:5.1}% {both:4.1}%",
                          uses = total.total,
                          ty = total.ty_infer as f64 * 100.0  / total.total as f64,
                          region = total.region_infer as f64 * 100.0  / total.total as f64,
@@ -2653,7 +2652,7 @@ pub fn mk_projection(self,
         }
 
     pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
-                                          -> Ty<'tcx> {
+                      -> Ty<'tcx> {
         self.mk_ty(Closure(closure_id, closure_substs))
     }
 
@@ -2686,8 +2685,8 @@ pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
     }
 
     pub fn mk_ty_param(self,
-                    index: u32,
-                    name: InternedString) -> Ty<'tcx> {
+                       index: u32,
+                       name: InternedString) -> Ty<'tcx> {
         self.mk_ty(Param(ParamTy { idx: index, name: name }))
     }
 
index 66d7541633cde29cd2a10af21ae72c0bccc4e129..0e11d6e0eec02c26ea16370dca38e89c78fccfbd 100644 (file)
@@ -386,8 +386,8 @@ fn cache_on_disk(_: Self::Key) -> bool {
 
     #[inline]
     fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          id: SerializedDepNodeIndex)
-                          -> Option<Self::Value> {
+                              id: SerializedDepNodeIndex)
+                              -> Option<Self::Value> {
         tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
     }
 }
index bb877864632237effd2ddd6921e7d8c02a99313c..7d3ce7307dc8593fd66672f5140fe364ee60c4ba 100644 (file)
@@ -448,9 +448,9 @@ fn compute_cnum_map(tcx: TyCtxt<'_, '_, '_>,
             }).collect::<FxHashMap<_,_>>();
 
             let map_size = prev_cnums.iter()
-                                    .map(|&(cnum, ..)| cnum)
-                                    .max()
-                                    .unwrap_or(0) + 1;
+                                     .map(|&(cnum, ..)| cnum)
+                                     .max()
+                                     .unwrap_or(0) + 1;
             let mut map = IndexVec::new();
             map.resize(map_size as usize, None);
 
@@ -465,7 +465,6 @@ fn compute_cnum_map(tcx: TyCtxt<'_, '_, '_>,
     }
 }
 
-
 //- DECODING -------------------------------------------------------------------
 
 /// A decoder that can read the incr. comp. cache. It is similar to the one
@@ -494,7 +493,7 @@ fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
         file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
             let stable_id = file_index_to_stable_id[&index];
             source_map.source_file_by_stable_id(stable_id)
-                   .expect("Failed to lookup SourceFile in new context.")
+                .expect("Failed to lookup SourceFile in new context.")
         }).clone()
     }
 }
@@ -761,7 +760,7 @@ fn specialized_decode(&mut self) -> Result<mir::ClearCrossCrate<T>, Self::Error>
 
 struct CacheEncoder<'enc, 'a, 'tcx, E>
     where E: 'enc + ty_codec::TyEncoder,
-          'tcx: 'a,
+             'tcx: 'a,
 {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     encoder: &'enc mut E,
@@ -839,9 +838,7 @@ fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
         let (file_lo, line_lo, col_lo) = match self.source_map
                                                    .byte_pos_to_line_and_col(span_data.lo) {
             Some(pos) => pos,
-            None => {
-                return TAG_INVALID_SPAN.encode(self);
-            }
+            None => return TAG_INVALID_SPAN.encode(self)
         };
 
         if !file_lo.contains(span_data.hi) {
index fbd3a8f69bc45f36b34df95903e19b3723748ae4..5f6fdbddf8fed7d63ba7dccd3382b79ca50a08a7 100644 (file)
@@ -449,14 +449,14 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
             let prev_dep_node_index =
                 self.dep_graph.prev_dep_node_index_of(dep_node);
             let result = Q::try_load_from_disk(self.global_tcx(),
-                                                    prev_dep_node_index);
+                                               prev_dep_node_index);
 
             // We always expect to find a cached result for things that
             // can be forced from DepNode.
             debug_assert!(!dep_node.kind.can_reconstruct_query_key() ||
-                            result.is_some(),
-                            "Missing on-disk cache entry for {:?}",
-                            dep_node);
+                          result.is_some(),
+                          "Missing on-disk cache entry for {:?}",
+                          dep_node);
             result
         } else {
             // Some things are never cached on disk.
@@ -491,7 +491,7 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
             assert!(Some(self.dep_graph.fingerprint_of(dep_node_index)) ==
                     self.dep_graph.prev_fingerprint_of(dep_node),
                     "Fingerprint for green query instance not loaded \
-                        from cache: {:?}", dep_node);
+                     from cache: {:?}", dep_node);
 
             debug!("BEGIN verify_ich({:?})", dep_node);
             let mut hcx = self.create_stable_hashing_context();
@@ -530,8 +530,8 @@ fn force_query_with_job<Q: QueryDescription<'gcx>>(
         //    (see for example #48923)
         assert!(!self.dep_graph.dep_node_exists(&dep_node),
                 "Forcing query with already existing DepNode.\n\
-                    - query-key: {:?}\n\
-                    - dep-node: {:?}",
+                 - query-key: {:?}\n\
+                 - dep-node: {:?}",
                 key, dep_node);
 
         profq_msg!(self, ProfileQueriesMsg::ProviderBegin);