1 //! Masks that take up full SIMD vector registers.
3 /// The error type returned when converting an integer to a mask fails.
4 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
5 pub struct TryFromMaskError(());
7 impl core::fmt::Display for TryFromMaskError {
8 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
11 "mask vector must have all bits set or unset in each lane"
16 macro_rules! define_mask {
17 { $(#[$attr:meta])* struct $name:ident<const $lanes:ident: usize>($type:ty); } => {
19 #[derive(Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
21 pub struct $name<const $lanes: usize>($type)
23 $type: crate::LanesAtMost64;
25 impl<const LANES: usize> Copy for $name<LANES>
27 $type: crate::LanesAtMost64,
30 impl<const LANES: usize> Clone for $name<LANES>
32 $type: crate::LanesAtMost64,
35 fn clone(&self) -> Self {
40 impl<const $lanes: usize> $name<$lanes>
42 $type: crate::LanesAtMost64,
44 /// Construct a mask by setting all lanes to the given value.
45 pub fn splat(value: bool) -> Self {
55 /// Tests the value of the specified lane.
58 /// Panics if `lane` is greater than or equal to the number of lanes in the vector.
60 pub fn test(&self, lane: usize) -> bool {
61 assert!(lane < LANES, "lane index out of range");
65 /// Sets the value of the specified lane.
68 /// Panics if `lane` is greater than or equal to the number of lanes in the vector.
70 pub fn set(&mut self, lane: usize, value: bool) {
71 assert!(lane < LANES, "lane index out of range");
72 self.0[lane] = if value {
80 impl<const $lanes: usize> core::convert::From<bool> for $name<$lanes>
82 $type: crate::LanesAtMost64,
84 fn from(value: bool) -> Self {
89 impl<const $lanes: usize> core::convert::TryFrom<$type> for $name<$lanes>
91 $type: crate::LanesAtMost64,
93 type Error = TryFromMaskError;
94 fn try_from(value: $type) -> Result<Self, Self::Error> {
95 if value.as_slice().iter().all(|x| *x == 0 || *x == -1) {
98 Err(TryFromMaskError(()))
103 impl<const $lanes: usize> core::convert::From<$name<$lanes>> for $type
105 $type: crate::LanesAtMost64,
107 fn from(value: $name<$lanes>) -> Self {
112 impl<const $lanes: usize> core::convert::From<crate::BitMask<$lanes>> for $name<$lanes>
114 $type: crate::LanesAtMost64,
115 crate::BitMask<$lanes>: crate::LanesAtMost64,
117 fn from(value: crate::BitMask<$lanes>) -> Self {
118 // TODO use an intrinsic to do this efficiently (with LLVM's sext instruction)
119 let mut mask = Self::splat(false);
120 for lane in 0..LANES {
121 mask.set(lane, value.test(lane));
127 impl<const $lanes: usize> core::convert::From<$name<$lanes>> for crate::BitMask<$lanes>
129 $type: crate::LanesAtMost64,
130 crate::BitMask<$lanes>: crate::LanesAtMost64,
132 fn from(value: $name<$lanes>) -> Self {
133 // TODO use an intrinsic to do this efficiently (with LLVM's trunc instruction)
134 let mut mask = Self::splat(false);
135 for lane in 0..LANES {
136 mask.set(lane, value.test(lane));
142 impl<const $lanes: usize> core::fmt::Debug for $name<$lanes>
144 $type: crate::LanesAtMost64,
146 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
148 .entries((0..LANES).map(|lane| self.test(lane)))
153 impl<const $lanes: usize> core::fmt::Binary for $name<$lanes>
155 $type: crate::LanesAtMost64,
157 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
158 core::fmt::Binary::fmt(&self.0, f)
162 impl<const $lanes: usize> core::fmt::Octal for $name<$lanes>
164 $type: crate::LanesAtMost64,
166 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
167 core::fmt::Octal::fmt(&self.0, f)
171 impl<const $lanes: usize> core::fmt::LowerHex for $name<$lanes>
173 $type: crate::LanesAtMost64,
175 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
176 core::fmt::LowerHex::fmt(&self.0, f)
180 impl<const $lanes: usize> core::fmt::UpperHex for $name<$lanes>
182 $type: crate::LanesAtMost64,
184 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
185 core::fmt::UpperHex::fmt(&self.0, f)
189 impl<const LANES: usize> core::ops::BitAnd for $name<LANES>
191 $type: crate::LanesAtMost64,
195 fn bitand(self, rhs: Self) -> Self {
200 impl<const LANES: usize> core::ops::BitAnd<bool> for $name<LANES>
202 $type: crate::LanesAtMost64,
206 fn bitand(self, rhs: bool) -> Self {
207 self & Self::splat(rhs)
211 impl<const LANES: usize> core::ops::BitAnd<$name<LANES>> for bool
213 $type: crate::LanesAtMost64,
215 type Output = $name<LANES>;
217 fn bitand(self, rhs: $name<LANES>) -> $name<LANES> {
218 $name::<LANES>::splat(self) & rhs
222 impl<const LANES: usize> core::ops::BitOr for $name<LANES>
224 $type: crate::LanesAtMost64,
228 fn bitor(self, rhs: Self) -> Self {
233 impl<const LANES: usize> core::ops::BitOr<bool> for $name<LANES>
235 $type: crate::LanesAtMost64,
239 fn bitor(self, rhs: bool) -> Self {
240 self | Self::splat(rhs)
244 impl<const LANES: usize> core::ops::BitOr<$name<LANES>> for bool
246 $type: crate::LanesAtMost64,
248 type Output = $name<LANES>;
250 fn bitor(self, rhs: $name<LANES>) -> $name<LANES> {
251 $name::<LANES>::splat(self) | rhs
255 impl<const LANES: usize> core::ops::BitXor for $name<LANES>
257 $type: crate::LanesAtMost64,
261 fn bitxor(self, rhs: Self) -> Self::Output {
266 impl<const LANES: usize> core::ops::BitXor<bool> for $name<LANES>
268 $type: crate::LanesAtMost64,
272 fn bitxor(self, rhs: bool) -> Self::Output {
273 self ^ Self::splat(rhs)
277 impl<const LANES: usize> core::ops::BitXor<$name<LANES>> for bool
279 $type: crate::LanesAtMost64,
281 type Output = $name<LANES>;
283 fn bitxor(self, rhs: $name<LANES>) -> Self::Output {
284 $name::<LANES>::splat(self) ^ rhs
288 impl<const LANES: usize> core::ops::Not for $name<LANES>
290 $type: crate::LanesAtMost64,
292 type Output = $name<LANES>;
294 fn not(self) -> Self::Output {
299 impl<const LANES: usize> core::ops::BitAndAssign for $name<LANES>
301 $type: crate::LanesAtMost64,
304 fn bitand_assign(&mut self, rhs: Self) {
309 impl<const LANES: usize> core::ops::BitAndAssign<bool> for $name<LANES>
311 $type: crate::LanesAtMost64,
314 fn bitand_assign(&mut self, rhs: bool) {
315 *self &= Self::splat(rhs);
319 impl<const LANES: usize> core::ops::BitOrAssign for $name<LANES>
321 $type: crate::LanesAtMost64,
324 fn bitor_assign(&mut self, rhs: Self) {
329 impl<const LANES: usize> core::ops::BitOrAssign<bool> for $name<LANES>
331 $type: crate::LanesAtMost64,
334 fn bitor_assign(&mut self, rhs: bool) {
335 *self |= Self::splat(rhs);
339 impl<const LANES: usize> core::ops::BitXorAssign for $name<LANES>
341 $type: crate::LanesAtMost64,
344 fn bitxor_assign(&mut self, rhs: Self) {
349 impl<const LANES: usize> core::ops::BitXorAssign<bool> for $name<LANES>
351 $type: crate::LanesAtMost64,
354 fn bitxor_assign(&mut self, rhs: bool) {
355 *self ^= Self::splat(rhs);
362 /// A mask equivalent to [SimdI8](crate::SimdI8), where all bits in the lane must be either set
364 struct SimdMask8<const LANES: usize>(crate::SimdI8<LANES>);
368 /// A mask equivalent to [SimdI16](crate::SimdI16), where all bits in the lane must be either set
370 struct SimdMask16<const LANES: usize>(crate::SimdI16<LANES>);
374 /// A mask equivalent to [SimdI32](crate::SimdI32), where all bits in the lane must be either set
376 struct SimdMask32<const LANES: usize>(crate::SimdI32<LANES>);
380 /// A mask equivalent to [SimdI64](crate::SimdI64), where all bits in the lane must be either set
382 struct SimdMask64<const LANES: usize>(crate::SimdI64<LANES>);
386 /// A mask equivalent to [SimdI128](crate::SimdI128), where all bits in the lane must be either set
388 struct SimdMask128<const LANES: usize>(crate::SimdI128<LANES>);
392 /// A mask equivalent to [SimdIsize](crate::SimdIsize), where all bits in the lane must be either set
394 struct SimdMaskSize<const LANES: usize>(crate::SimdIsize<LANES>);