]> git.lizzy.rs Git - rust.git/blobdiff - compiler/rustc_codegen_ssa/src/mir/place.rs
Auto merge of #105221 - alex:fat-archive-cleanup, r=bjorn3
[rust.git] / compiler / rustc_codegen_ssa / src / mir / place.rs
index 908555385891d85417e4710ad4b860b790e43676..fbe30154a7c8d26b23cb4af4ac5fa419e2a558ad 100644 (file)
@@ -309,14 +309,14 @@ pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
                 // In the algorithm above, we can change
                 // cast(relative_tag) + niche_variants.start()
                 // into
-                // cast(tag) + (niche_variants.start() - niche_start)
+                // cast(tag + (niche_variants.start() - niche_start))
                 // if either the casted type is no larger than the original
                 // type, or if the niche values are contiguous (in either the
                 // signed or unsigned sense).
-                let can_incr_after_cast = cast_smaller || niches_ule || niches_sle;
+                let can_incr = cast_smaller || niches_ule || niches_sle;
 
                 let data_for_boundary_niche = || -> Option<(IntPredicate, u128)> {
-                    if !can_incr_after_cast {
+                    if !can_incr {
                         None
                     } else if niche_start == low_unsigned {
                         Some((IntPredicate::IntULE, niche_end))
@@ -353,24 +353,33 @@ pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
                     // The algorithm is now this:
                     // is_niche = tag <= niche_end
                     // discr = if is_niche {
-                    //     cast(tag) + (niche_variants.start() - niche_start)
+                    //     cast(tag + (niche_variants.start() - niche_start))
                     // } else {
                     //     untagged_variant
                     // }
                     // (the first line may instead be tag >= niche_start,
                     // and may be a signed or unsigned comparison)
+                    // The arithmetic must be done before the cast, so we can
+                    // have the correct wrapping behavior. See issue #104519 for
+                    // the consequences of getting this wrong.
                     let is_niche =
                         bx.icmp(predicate, tag, bx.cx().const_uint_big(tag_llty, constant));
+                    let delta = (niche_variants.start().as_u32() as u128).wrapping_sub(niche_start);
+                    let incr_tag = if delta == 0 {
+                        tag
+                    } else {
+                        bx.add(tag, bx.cx().const_uint_big(tag_llty, delta))
+                    };
+
                     let cast_tag = if cast_smaller {
-                        bx.intcast(tag, cast_to, false)
+                        bx.intcast(incr_tag, cast_to, false)
                     } else if niches_ule {
-                        bx.zext(tag, cast_to)
+                        bx.zext(incr_tag, cast_to)
                     } else {
-                        bx.sext(tag, cast_to)
+                        bx.sext(incr_tag, cast_to)
                     };
 
-                    let delta = (niche_variants.start().as_u32() as u128).wrapping_sub(niche_start);
-                    (is_niche, cast_tag, delta)
+                    (is_niche, cast_tag, 0)
                 } else {
                     // The special cases don't apply, so we'll have to go with
                     // the general algorithm.