summaryrefslogtreecommitdiffstats
path: root/library/stdarch/crates/core_arch/src/mips/msa.rs
diff options
context:
space:
mode:
Diffstat (limited to 'library/stdarch/crates/core_arch/src/mips/msa.rs')
-rw-r--r--library/stdarch/crates/core_arch/src/mips/msa.rs295
1 files changed, 146 insertions, 149 deletions
diff --git a/library/stdarch/crates/core_arch/src/mips/msa.rs b/library/stdarch/crates/core_arch/src/mips/msa.rs
index cded73a54..3e93db85e 100644
--- a/library/stdarch/crates/core_arch/src/mips/msa.rs
+++ b/library/stdarch/crates/core_arch/src/mips/msa.rs
@@ -10,9 +10,6 @@ use stdarch_test::assert_instr;
use crate::mem;
-#[macro_use]
-mod macros;
-
types! {
// / MIPS-specific 128-bit wide vector of 16 packed `i8`.
pub struct v16i8(
@@ -1413,7 +1410,7 @@ pub unsafe fn __msa_addv_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(addvi.b, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_addvi_b<const IMM5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_addvi_b(a, IMM5)
}
@@ -1428,7 +1425,7 @@ pub unsafe fn __msa_addvi_b<const IMM5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(addvi.h, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_addvi_h<const IMM5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_addvi_h(a, IMM5)
}
@@ -1443,7 +1440,7 @@ pub unsafe fn __msa_addvi_h<const IMM5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(addvi.w, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_addvi_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_addvi_w(a, IMM5)
}
@@ -1458,7 +1455,7 @@ pub unsafe fn __msa_addvi_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(addvi.d, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_addvi_d<const IMM5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_addvi_d(a, IMM5)
}
@@ -1487,7 +1484,7 @@ pub unsafe fn __msa_and_v(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(andi.b, imm8 = 0b10010111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_andi_b<const IMM8: i32>(a: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_andi_b(a, IMM8)
}
@@ -1910,7 +1907,7 @@ pub unsafe fn __msa_bclr_d(a: v2u64, b: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(bclri.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bclri_b<const IMM3: i32>(a: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_bclri_b(a, IMM3)
}
@@ -1925,7 +1922,7 @@ pub unsafe fn __msa_bclri_b<const IMM3: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bclri.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bclri_h<const IMM4: i32>(a: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_bclri_h(a, IMM4)
}
@@ -1940,7 +1937,7 @@ pub unsafe fn __msa_bclri_h<const IMM4: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(bclri.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bclri_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_bclri_w(a, IMM5)
}
@@ -1955,7 +1952,7 @@ pub unsafe fn __msa_bclri_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(bclri.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bclri_d<const IMM6: i32>(a: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_bclri_d(a, IMM6)
}
@@ -2026,7 +2023,7 @@ pub unsafe fn __msa_binsl_d(a: v2u64, b: v2u64, c: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(binsli.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsli_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_binsli_b(a, mem::transmute(b), IMM3)
}
@@ -2041,7 +2038,7 @@ pub unsafe fn __msa_binsli_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(binsli.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsli_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_binsli_h(a, mem::transmute(b), IMM4)
}
@@ -2056,7 +2053,7 @@ pub unsafe fn __msa_binsli_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(binsli.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsli_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_binsli_w(a, mem::transmute(b), IMM5)
}
@@ -2071,7 +2068,7 @@ pub unsafe fn __msa_binsli_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(binsli.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsli_d<const IMM6: i32>(a: v2u64, b: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_binsli_d(a, mem::transmute(b), IMM6)
}
@@ -2142,7 +2139,7 @@ pub unsafe fn __msa_binsr_d(a: v2u64, b: v2u64, c: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(binsri.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsri_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_binsri_b(a, mem::transmute(b), IMM3)
}
@@ -2157,7 +2154,7 @@ pub unsafe fn __msa_binsri_b<const IMM3: i32>(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(binsri.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsri_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_binsri_h(a, mem::transmute(b), IMM4)
}
@@ -2172,7 +2169,7 @@ pub unsafe fn __msa_binsri_h<const IMM4: i32>(a: v8u16, b: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(binsri.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsri_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_binsri_w(a, mem::transmute(b), IMM5)
}
@@ -2187,7 +2184,7 @@ pub unsafe fn __msa_binsri_w<const IMM5: i32>(a: v4u32, b: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(binsri.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_binsri_d<const IMM6: i32>(a: v2u64, b: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_binsri_d(a, mem::transmute(b), IMM6)
}
@@ -2216,7 +2213,7 @@ pub unsafe fn __msa_bmnz_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bmnzi.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_bmnzi_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_bmnzi_b(a, mem::transmute(b), IMM8)
}
@@ -2245,7 +2242,7 @@ pub unsafe fn __msa_bmz_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bmzi.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_bmzi_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_bmzi_b(a, mem::transmute(b), IMM8)
}
@@ -2316,7 +2313,7 @@ pub unsafe fn __msa_bneg_d(a: v2u64, b: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(bnegi.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bnegi_b<const IMM3: i32>(a: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_bnegi_b(a, IMM3)
}
@@ -2331,7 +2328,7 @@ pub unsafe fn __msa_bnegi_b<const IMM3: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bnegi.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bnegi_h<const IMM4: i32>(a: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_bnegi_h(a, IMM4)
}
@@ -2346,7 +2343,7 @@ pub unsafe fn __msa_bnegi_h<const IMM4: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(bnegi.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bnegi_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_bnegi_w(a, IMM5)
}
@@ -2361,7 +2358,7 @@ pub unsafe fn __msa_bnegi_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(bnegi.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bnegi_d<const IMM6: i32>(a: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_bnegi_d(a, IMM6)
}
@@ -2446,7 +2443,7 @@ pub unsafe fn __msa_bsel_v(a: v16u8, b: v16u8, c: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bseli.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_bseli_b<const IMM8: i32>(a: v16u8, b: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_bseli_b(a, mem::transmute(b), IMM8)
}
@@ -2517,7 +2514,7 @@ pub unsafe fn __msa_bset_d(a: v2u64, b: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(bseti.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bseti_b<const IMM3: i32>(a: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_bseti_b(a, IMM3)
}
@@ -2532,7 +2529,7 @@ pub unsafe fn __msa_bseti_b<const IMM3: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(bseti.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bseti_h<const IMM4: i32>(a: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_bseti_h(a, IMM4)
}
@@ -2547,7 +2544,7 @@ pub unsafe fn __msa_bseti_h<const IMM4: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(bseti.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bseti_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_bseti_w(a, IMM5)
}
@@ -2562,7 +2559,7 @@ pub unsafe fn __msa_bseti_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(bseti.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_bseti_d<const IMM6: i32>(a: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_bseti_d(a, IMM6)
}
@@ -2685,7 +2682,7 @@ pub unsafe fn __msa_ceq_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(ceqi.b, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ceqi_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_ceqi_b(a, IMM_S5)
}
@@ -2700,7 +2697,7 @@ pub unsafe fn __msa_ceqi_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(ceqi.h, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ceqi_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_ceqi_h(a, IMM_S5)
}
@@ -2715,7 +2712,7 @@ pub unsafe fn __msa_ceqi_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(ceqi.w, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ceqi_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_ceqi_w(a, IMM_S5)
}
@@ -2730,7 +2727,7 @@ pub unsafe fn __msa_ceqi_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(ceqi.d, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ceqi_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_ceqi_d(a, IMM_S5)
}
@@ -2744,7 +2741,7 @@ pub unsafe fn __msa_ceqi_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(cfcmsa, imm5 = 0b11111))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_cfcmsa<const IMM5: i32>() -> i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_cfcmsa(IMM5)
}
@@ -2872,7 +2869,7 @@ pub unsafe fn __msa_cle_u_d(a: v2u64, b: v2u64) -> v2i64 {
#[cfg_attr(test, assert_instr(clei_s.b, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clei_s_b(a, IMM_S5)
}
@@ -2888,7 +2885,7 @@ pub unsafe fn __msa_clei_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(clei_s.h, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clei_s_h(a, IMM_S5)
}
@@ -2904,7 +2901,7 @@ pub unsafe fn __msa_clei_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(clei_s.w, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clei_s_w(a, IMM_S5)
}
@@ -2920,7 +2917,7 @@ pub unsafe fn __msa_clei_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(clei_s.d, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clei_s_d(a, IMM_S5)
}
@@ -2936,7 +2933,7 @@ pub unsafe fn __msa_clei_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(clei_u.b, imm5 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_u_b<const IMM5: i32>(a: v16u8) -> v16i8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clei_u_b(a, IMM5)
}
@@ -2952,7 +2949,7 @@ pub unsafe fn __msa_clei_u_b<const IMM5: i32>(a: v16u8) -> v16i8 {
#[cfg_attr(test, assert_instr(clei_u.h, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_u_h<const IMM5: i32>(a: v8u16) -> v8i16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clei_u_h(a, IMM5)
}
@@ -2968,7 +2965,7 @@ pub unsafe fn __msa_clei_u_h<const IMM5: i32>(a: v8u16) -> v8i16 {
#[cfg_attr(test, assert_instr(clei_u.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_u_w<const IMM5: i32>(a: v4u32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clei_u_w(a, IMM5)
}
@@ -2984,7 +2981,7 @@ pub unsafe fn __msa_clei_u_w<const IMM5: i32>(a: v4u32) -> v4i32 {
#[cfg_attr(test, assert_instr(clei_u.d, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clei_u_d<const IMM5: i32>(a: v2u64) -> v2i64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clei_u_d(a, IMM5)
}
@@ -3112,7 +3109,7 @@ pub unsafe fn __msa_clt_u_d(a: v2u64, b: v2u64) -> v2i64 {
#[cfg_attr(test, assert_instr(clti_s.b, imm_s5 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clti_s_b(a, IMM_S5)
}
@@ -3128,7 +3125,7 @@ pub unsafe fn __msa_clti_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(clti_s.h, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clti_s_h(a, IMM_S5)
}
@@ -3144,7 +3141,7 @@ pub unsafe fn __msa_clti_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(clti_s.w, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clti_s_w(a, IMM_S5)
}
@@ -3160,7 +3157,7 @@ pub unsafe fn __msa_clti_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(clti_s.d, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_clti_s_d(a, IMM_S5)
}
@@ -3176,7 +3173,7 @@ pub unsafe fn __msa_clti_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(clti_u.b, imm5 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_u_b<const IMM5: i32>(a: v16u8) -> v16i8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clti_u_b(a, IMM5)
}
@@ -3192,7 +3189,7 @@ pub unsafe fn __msa_clti_u_b<const IMM5: i32>(a: v16u8) -> v16i8 {
#[cfg_attr(test, assert_instr(clti_u.h, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_u_h<const IMM5: i32>(a: v8u16) -> v8i16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clti_u_h(a, IMM5)
}
@@ -3208,7 +3205,7 @@ pub unsafe fn __msa_clti_u_h<const IMM5: i32>(a: v8u16) -> v8i16 {
#[cfg_attr(test, assert_instr(clti_u.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_u_w<const IMM5: i32>(a: v4u32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clti_u_w(a, IMM5)
}
@@ -3224,7 +3221,7 @@ pub unsafe fn __msa_clti_u_w<const IMM5: i32>(a: v4u32) -> v4i32 {
#[cfg_attr(test, assert_instr(clti_u.d, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_clti_u_d<const IMM5: i32>(a: v2u64) -> v2i64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_clti_u_d(a, IMM5)
}
@@ -3238,7 +3235,7 @@ pub unsafe fn __msa_clti_u_d<const IMM5: i32>(a: v2u64) -> v2i64 {
#[cfg_attr(test, assert_instr(copy_s.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_s_b<const IMM4: i32>(a: v16i8) -> i32 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_copy_s_b(a, IMM4)
}
@@ -3252,7 +3249,7 @@ pub unsafe fn __msa_copy_s_b<const IMM4: i32>(a: v16i8) -> i32 {
#[cfg_attr(test, assert_instr(copy_s.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_s_h<const IMM3: i32>(a: v8i16) -> i32 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_copy_s_h(a, IMM3)
}
@@ -3266,7 +3263,7 @@ pub unsafe fn __msa_copy_s_h<const IMM3: i32>(a: v8i16) -> i32 {
#[cfg_attr(test, assert_instr(copy_s.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_s_w<const IMM2: i32>(a: v4i32) -> i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_copy_s_w(a, IMM2)
}
@@ -3280,7 +3277,7 @@ pub unsafe fn __msa_copy_s_w<const IMM2: i32>(a: v4i32) -> i32 {
#[cfg_attr(test, assert_instr(copy_s.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_s_d<const IMM1: i32>(a: v2i64) -> i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_copy_s_d(a, IMM1)
}
@@ -3294,7 +3291,7 @@ pub unsafe fn __msa_copy_s_d<const IMM1: i32>(a: v2i64) -> i64 {
#[cfg_attr(test, assert_instr(copy_u.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_u_b<const IMM4: i32>(a: v16i8) -> u32 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_copy_u_b(a, IMM4)
}
@@ -3308,7 +3305,7 @@ pub unsafe fn __msa_copy_u_b<const IMM4: i32>(a: v16i8) -> u32 {
#[cfg_attr(test, assert_instr(copy_u.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_u_h<const IMM3: i32>(a: v8i16) -> u32 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_copy_u_h(a, IMM3)
}
@@ -3322,7 +3319,7 @@ pub unsafe fn __msa_copy_u_h<const IMM3: i32>(a: v8i16) -> u32 {
#[cfg_attr(test, assert_instr(copy_u.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_u_w<const IMM2: i32>(a: v4i32) -> u32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_copy_u_w(a, IMM2)
}
@@ -3336,7 +3333,7 @@ pub unsafe fn __msa_copy_u_w<const IMM2: i32>(a: v4i32) -> u32 {
#[cfg_attr(test, assert_instr(copy_u.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_copy_u_d<const IMM1: i32>(a: v2i64) -> u64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_copy_u_d(a, IMM1)
}
@@ -3352,7 +3349,7 @@ pub unsafe fn __msa_copy_u_d<const IMM1: i32>(a: v2i64) -> u64 {
#[cfg_attr(test, assert_instr(ctcmsa, imm1 = 0b1))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_ctcmsa<const IMM5: i32>(a: i32) -> () {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_ctcmsa(IMM5, a)
}
@@ -5568,7 +5565,7 @@ pub unsafe fn __msa_ilvr_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(insert.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insert_b<const IMM4: i32>(a: v16i8, c: i32) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_insert_b(a, IMM4, c)
}
@@ -5583,7 +5580,7 @@ pub unsafe fn __msa_insert_b<const IMM4: i32>(a: v16i8, c: i32) -> v16i8 {
#[cfg_attr(test, assert_instr(insert.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insert_h<const IMM3: i32>(a: v8i16, c: i32) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_insert_h(a, IMM3, c)
}
@@ -5598,7 +5595,7 @@ pub unsafe fn __msa_insert_h<const IMM3: i32>(a: v8i16, c: i32) -> v8i16 {
#[cfg_attr(test, assert_instr(insert.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insert_w<const IMM2: i32>(a: v4i32, c: i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_insert_w(a, IMM2, c)
}
@@ -5613,7 +5610,7 @@ pub unsafe fn __msa_insert_w<const IMM2: i32>(a: v4i32, c: i32) -> v4i32 {
#[cfg_attr(test, assert_instr(insert.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insert_d<const IMM1: i32>(a: v2i64, c: i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_insert_d(a, IMM1, c)
}
@@ -5628,7 +5625,7 @@ pub unsafe fn __msa_insert_d<const IMM1: i32>(a: v2i64, c: i64) -> v2i64 {
#[cfg_attr(test, assert_instr(insve.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insve_b<const IMM4: i32>(a: v16i8, c: v16i8) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_insve_b(a, IMM4, c)
}
@@ -5643,7 +5640,7 @@ pub unsafe fn __msa_insve_b<const IMM4: i32>(a: v16i8, c: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(insve.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insve_h<const IMM3: i32>(a: v8i16, c: v8i16) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_insve_h(a, IMM3, c)
}
@@ -5658,7 +5655,7 @@ pub unsafe fn __msa_insve_h<const IMM3: i32>(a: v8i16, c: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(insve.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insve_w<const IMM2: i32>(a: v4i32, c: v4i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_insve_w(a, IMM2, c)
}
@@ -5673,7 +5670,7 @@ pub unsafe fn __msa_insve_w<const IMM2: i32>(a: v4i32, c: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(insve.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_insve_d<const IMM1: i32>(a: v2i64, c: v2i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_insve_d(a, IMM1, c)
}
@@ -5688,7 +5685,7 @@ pub unsafe fn __msa_insve_d<const IMM1: i32>(a: v2i64, c: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(ld.b, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ld_b<const IMM_S10: i32>(mem_addr: *mut u8) -> v16i8 {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_ld_b(mem_addr, IMM_S10)
}
@@ -5703,8 +5700,8 @@ pub unsafe fn __msa_ld_b<const IMM_S10: i32>(mem_addr: *mut u8) -> v16i8 {
#[cfg_attr(test, assert_instr(ld.h, imm_s11 = 0b11111111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ld_h<const IMM_S11: i32>(mem_addr: *mut u8) -> v8i16 {
- static_assert_imm_s11!(IMM_S11);
- static_assert!(IMM_S11: i32 where IMM_S11 % 2 == 0);
+ static_assert_simm_bits!(IMM_S11, 11);
+ static_assert!(IMM_S11 % 2 == 0);
msa_ld_h(mem_addr, IMM_S11)
}
@@ -5719,8 +5716,8 @@ pub unsafe fn __msa_ld_h<const IMM_S11: i32>(mem_addr: *mut u8) -> v8i16 {
#[cfg_attr(test, assert_instr(ld.w, imm_s12 = 0b111111111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ld_w<const IMM_S12: i32>(mem_addr: *mut u8) -> v4i32 {
- static_assert_imm_s12!(IMM_S12);
- static_assert!(IMM_S12: i32 where IMM_S12 % 4 == 0);
+ static_assert_simm_bits!(IMM_S12, 12);
+ static_assert!(IMM_S12 % 4 == 0);
msa_ld_w(mem_addr, IMM_S12)
}
@@ -5735,8 +5732,8 @@ pub unsafe fn __msa_ld_w<const IMM_S12: i32>(mem_addr: *mut u8) -> v4i32 {
#[cfg_attr(test, assert_instr(ld.d, imm_s13 = 0b1111111111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ld_d<const IMM_S13: i32>(mem_addr: *mut u8) -> v2i64 {
- static_assert_imm_s13!(IMM_S13);
- static_assert!(IMM_S13: i32 where IMM_S13 % 8 == 0);
+ static_assert_simm_bits!(IMM_S13, 13);
+ static_assert!(IMM_S13 % 8 == 0);
msa_ld_d(mem_addr, IMM_S13)
}
@@ -5751,7 +5748,7 @@ pub unsafe fn __msa_ld_d<const IMM_S13: i32>(mem_addr: *mut u8) -> v2i64 {
#[cfg_attr(test, assert_instr(ldi.b, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_ldi_b<const IMM_S10: i32>() -> v16i8 {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_ldi_b(IMM_S10)
}
@@ -5766,7 +5763,7 @@ pub unsafe fn __msa_ldi_b<const IMM_S10: i32>() -> v16i8 {
#[cfg_attr(test, assert_instr(ldi.h, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_ldi_h<const IMM_S10: i32>() -> v8i16 {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_ldi_h(IMM_S10)
}
@@ -5781,7 +5778,7 @@ pub unsafe fn __msa_ldi_h<const IMM_S10: i32>() -> v8i16 {
#[cfg_attr(test, assert_instr(ldi.w, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_ldi_w<const IMM_S10: i32>() -> v4i32 {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_ldi_w(IMM_S10)
}
@@ -5796,7 +5793,7 @@ pub unsafe fn __msa_ldi_w<const IMM_S10: i32>() -> v4i32 {
#[cfg_attr(test, assert_instr(ldi.d, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(0)]
pub unsafe fn __msa_ldi_d<const IMM_S10: i32>() -> v2i64 {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_ldi_d(IMM_S10)
}
@@ -6087,7 +6084,7 @@ pub unsafe fn __msa_max_u_d(a: v2u64, b: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(maxi_s.b, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_maxi_s_b(a, IMM_S5)
}
@@ -6102,7 +6099,7 @@ pub unsafe fn __msa_maxi_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(maxi_s.h, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_maxi_s_h(a, IMM_S5)
}
@@ -6117,7 +6114,7 @@ pub unsafe fn __msa_maxi_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(maxi_s.w, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_maxi_s_w(a, IMM_S5)
}
@@ -6132,7 +6129,7 @@ pub unsafe fn __msa_maxi_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(maxi_s.d, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_maxi_s_d(a, IMM_S5)
}
@@ -6147,7 +6144,7 @@ pub unsafe fn __msa_maxi_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(maxi_u.b, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_u_b<const IMM5: i32>(a: v16u8) -> v16u8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_maxi_u_b(a, IMM5)
}
@@ -6162,7 +6159,7 @@ pub unsafe fn __msa_maxi_u_b<const IMM5: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(maxi_u.h, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_u_h<const IMM5: i32>(a: v8u16) -> v8u16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_maxi_u_h(a, IMM5)
}
@@ -6177,7 +6174,7 @@ pub unsafe fn __msa_maxi_u_h<const IMM5: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(maxi_u.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_maxi_u_w(a, IMM5)
}
@@ -6192,7 +6189,7 @@ pub unsafe fn __msa_maxi_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(maxi_u.d, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_maxi_u_d<const IMM5: i32>(a: v2u64) -> v2u64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_maxi_u_d(a, IMM5)
}
@@ -6315,7 +6312,7 @@ pub unsafe fn __msa_min_s_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(mini_s.b, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_mini_s_b(a, IMM_S5)
}
@@ -6330,7 +6327,7 @@ pub unsafe fn __msa_mini_s_b<const IMM_S5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(mini_s.h, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_mini_s_h(a, IMM_S5)
}
@@ -6345,7 +6342,7 @@ pub unsafe fn __msa_mini_s_h<const IMM_S5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(mini_s.w, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_mini_s_w(a, IMM_S5)
}
@@ -6360,7 +6357,7 @@ pub unsafe fn __msa_mini_s_w<const IMM_S5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(mini_s.d, imm_s5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_s_d<const IMM_S5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm_s5!(IMM_S5);
+ static_assert_simm_bits!(IMM_S5, 5);
msa_mini_s_d(a, IMM_S5)
}
@@ -6427,7 +6424,7 @@ pub unsafe fn __msa_min_u_d(a: v2u64, b: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(mini_u.b, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_u_b<const IMM5: i32>(a: v16u8) -> v16u8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_mini_u_b(a, IMM5)
}
@@ -6442,7 +6439,7 @@ pub unsafe fn __msa_mini_u_b<const IMM5: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(mini_u.h, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_u_h<const IMM5: i32>(a: v8u16) -> v8u16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_mini_u_h(a, IMM5)
}
@@ -6457,7 +6454,7 @@ pub unsafe fn __msa_mini_u_h<const IMM5: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(mini_u.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_mini_u_w(a, IMM5)
}
@@ -6472,7 +6469,7 @@ pub unsafe fn __msa_mini_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(mini_u.d, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_mini_u_d<const IMM5: i32>(a: v2u64) -> v2u64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_mini_u_d(a, IMM5)
}
@@ -6958,7 +6955,7 @@ pub unsafe fn __msa_nor_v(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(nori.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_nori_b<const IMM8: i32>(a: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_nori_b(a, IMM8)
}
@@ -6988,7 +6985,7 @@ pub unsafe fn __msa_or_v(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(ori.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_ori_b<const IMM8: i32>(a: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_ori_b(a, IMM8)
}
@@ -7155,7 +7152,7 @@ pub unsafe fn __msa_pcnt_d(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(sat_s.b, imm4 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_s_b<const IMM3: i32>(a: v16i8) -> v16i8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_sat_s_b(a, IMM3)
}
@@ -7170,7 +7167,7 @@ pub unsafe fn __msa_sat_s_b<const IMM3: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(sat_s.h, imm3 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_s_h<const IMM4: i32>(a: v8i16) -> v8i16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_sat_s_h(a, IMM4)
}
@@ -7185,7 +7182,7 @@ pub unsafe fn __msa_sat_s_h<const IMM4: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(sat_s.w, imm2 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_s_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_sat_s_w(a, IMM5)
}
@@ -7200,7 +7197,7 @@ pub unsafe fn __msa_sat_s_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(sat_s.d, imm1 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_s_d<const IMM6: i32>(a: v2i64) -> v2i64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_sat_s_d(a, IMM6)
}
@@ -7215,7 +7212,7 @@ pub unsafe fn __msa_sat_s_d<const IMM6: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(sat_u.b, imm4 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_u_b<const IMM3: i32>(a: v16u8) -> v16u8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_sat_u_b(a, IMM3)
}
@@ -7230,7 +7227,7 @@ pub unsafe fn __msa_sat_u_b<const IMM3: i32>(a: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(sat_u.h, imm3 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_u_h<const IMM4: i32>(a: v8u16) -> v8u16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_sat_u_h(a, IMM4)
}
@@ -7245,7 +7242,7 @@ pub unsafe fn __msa_sat_u_h<const IMM4: i32>(a: v8u16) -> v8u16 {
#[cfg_attr(test, assert_instr(sat_u.w, imm2 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_sat_u_w(a, IMM5)
}
@@ -7260,7 +7257,7 @@ pub unsafe fn __msa_sat_u_w<const IMM5: i32>(a: v4u32) -> v4u32 {
#[cfg_attr(test, assert_instr(sat_u.d, imm1 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_sat_u_d<const IMM6: i32>(a: v2u64) -> v2u64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_sat_u_d(a, IMM6)
}
@@ -7276,7 +7273,7 @@ pub unsafe fn __msa_sat_u_d<const IMM6: i32>(a: v2u64) -> v2u64 {
#[cfg_attr(test, assert_instr(shf.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_shf_b<const IMM8: i32>(a: v16i8) -> v16i8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_shf_b(a, IMM8)
}
@@ -7292,7 +7289,7 @@ pub unsafe fn __msa_shf_b<const IMM8: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(shf.h, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_shf_h<const IMM8: i32>(a: v8i16) -> v8i16 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_shf_h(a, IMM8)
}
@@ -7308,7 +7305,7 @@ pub unsafe fn __msa_shf_h<const IMM8: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(shf.w, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_shf_w<const IMM8: i32>(a: v4i32) -> v4i32 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_shf_w(a, IMM8)
}
@@ -7408,7 +7405,7 @@ pub unsafe fn __msa_sld_d(a: v2i64, b: v2i64, c: i32) -> v2i64 {
#[cfg_attr(test, assert_instr(sldi.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_sldi_b<const IMM4: i32>(a: v16i8, b: v16i8) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_sldi_b(a, mem::transmute(b), IMM4)
}
@@ -7428,7 +7425,7 @@ pub unsafe fn __msa_sldi_b<const IMM4: i32>(a: v16i8, b: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(sldi.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_sldi_h<const IMM3: i32>(a: v8i16, b: v8i16) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_sldi_h(a, mem::transmute(b), IMM3)
}
@@ -7448,7 +7445,7 @@ pub unsafe fn __msa_sldi_h<const IMM3: i32>(a: v8i16, b: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(sldi.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_sldi_w<const IMM2: i32>(a: v4i32, b: v4i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_sldi_w(a, mem::transmute(b), IMM2)
}
@@ -7468,7 +7465,7 @@ pub unsafe fn __msa_sldi_w<const IMM2: i32>(a: v4i32, b: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(sldi.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_sldi_d<const IMM1: i32>(a: v2i64, b: v2i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_sldi_d(a, mem::transmute(b), IMM1)
}
@@ -7539,7 +7536,7 @@ pub unsafe fn __msa_sll_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(slli.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_slli_b<const IMM4: i32>(a: v16i8) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_slli_b(a, IMM4)
}
@@ -7554,7 +7551,7 @@ pub unsafe fn __msa_slli_b<const IMM4: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(slli.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_slli_h<const IMM3: i32>(a: v8i16) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_slli_h(a, IMM3)
}
@@ -7569,7 +7566,7 @@ pub unsafe fn __msa_slli_h<const IMM3: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(slli.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_slli_w<const IMM2: i32>(a: v4i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_slli_w(a, IMM2)
}
@@ -7584,7 +7581,7 @@ pub unsafe fn __msa_slli_w<const IMM2: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(slli.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_slli_d<const IMM1: i32>(a: v2i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_slli_d(a, IMM1)
}
@@ -7654,7 +7651,7 @@ pub unsafe fn __msa_splat_d(a: v2i64, b: i32) -> v2i64 {
#[cfg_attr(test, assert_instr(splati.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_splati_b<const IMM4: i32>(a: v16i8) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_splati_b(a, IMM4)
}
@@ -7668,7 +7665,7 @@ pub unsafe fn __msa_splati_b<const IMM4: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(splati.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_splati_h<const IMM3: i32>(a: v8i16) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_splati_h(a, IMM3)
}
@@ -7682,7 +7679,7 @@ pub unsafe fn __msa_splati_h<const IMM3: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(splati.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_splati_w<const IMM2: i32>(a: v4i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_splati_w(a, IMM2)
}
@@ -7696,7 +7693,7 @@ pub unsafe fn __msa_splati_w<const IMM2: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(splati.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_splati_d<const IMM1: i32>(a: v2i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_splati_d(a, IMM1)
}
@@ -7767,7 +7764,7 @@ pub unsafe fn __msa_sra_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(srai.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srai_b<const IMM3: i32>(a: v16i8) -> v16i8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_srai_b(a, IMM3)
}
@@ -7782,7 +7779,7 @@ pub unsafe fn __msa_srai_b<const IMM3: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(srai.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srai_h<const IMM4: i32>(a: v8i16) -> v8i16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_srai_h(a, IMM4)
}
@@ -7797,7 +7794,7 @@ pub unsafe fn __msa_srai_h<const IMM4: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(srai.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srai_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_srai_w(a, IMM5)
}
@@ -7812,7 +7809,7 @@ pub unsafe fn __msa_srai_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(srai.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srai_d<const IMM6: i32>(a: v2i64) -> v2i64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_srai_d(a, IMM6)
}
@@ -7888,7 +7885,7 @@ pub unsafe fn __msa_srar_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(srari.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srari_b<const IMM3: i32>(a: v16i8) -> v16i8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_srari_b(a, IMM3)
}
@@ -7904,7 +7901,7 @@ pub unsafe fn __msa_srari_b<const IMM3: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(srari.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srari_h<const IMM4: i32>(a: v8i16) -> v8i16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_srari_h(a, IMM4)
}
@@ -7920,7 +7917,7 @@ pub unsafe fn __msa_srari_h<const IMM4: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(srari.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srari_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_srari_w(a, IMM5)
}
@@ -7936,7 +7933,7 @@ pub unsafe fn __msa_srari_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(srari.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srari_d<const IMM6: i32>(a: v2i64) -> v2i64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_srari_d(a, IMM6)
}
@@ -8007,7 +8004,7 @@ pub unsafe fn __msa_srl_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(srli.b, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srli_b<const IMM4: i32>(a: v16i8) -> v16i8 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_srli_b(a, IMM4)
}
@@ -8022,7 +8019,7 @@ pub unsafe fn __msa_srli_b<const IMM4: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(srli.h, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srli_h<const IMM3: i32>(a: v8i16) -> v8i16 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_srli_h(a, IMM3)
}
@@ -8037,7 +8034,7 @@ pub unsafe fn __msa_srli_h<const IMM3: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(srli.w, imm2 = 0b11))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srli_w<const IMM2: i32>(a: v4i32) -> v4i32 {
- static_assert_imm2!(IMM2);
+ static_assert_uimm_bits!(IMM2, 2);
msa_srli_w(a, IMM2)
}
@@ -8052,7 +8049,7 @@ pub unsafe fn __msa_srli_w<const IMM2: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(srli.d, imm1 = 0b1))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srli_d<const IMM1: i32>(a: v2i64) -> v2i64 {
- static_assert_imm1!(IMM1);
+ static_assert_uimm_bits!(IMM1, 1);
msa_srli_d(a, IMM1)
}
@@ -8128,7 +8125,7 @@ pub unsafe fn __msa_srlr_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(srlri.b, imm3 = 0b111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srlri_b<const IMM3: i32>(a: v16i8) -> v16i8 {
- static_assert_imm3!(IMM3);
+ static_assert_uimm_bits!(IMM3, 3);
msa_srlri_b(a, IMM3)
}
@@ -8144,7 +8141,7 @@ pub unsafe fn __msa_srlri_b<const IMM3: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(srlri.h, imm4 = 0b1111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srlri_h<const IMM4: i32>(a: v8i16) -> v8i16 {
- static_assert_imm4!(IMM4);
+ static_assert_uimm_bits!(IMM4, 4);
msa_srlri_h(a, IMM4)
}
@@ -8160,7 +8157,7 @@ pub unsafe fn __msa_srlri_h<const IMM4: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(srlri.w, imm5 = 0b11111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srlri_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_srlri_w(a, IMM5)
}
@@ -8176,7 +8173,7 @@ pub unsafe fn __msa_srlri_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(srlri.d, imm6 = 0b111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_srlri_d<const IMM6: i32>(a: v2i64) -> v2i64 {
- static_assert_imm6!(IMM6);
+ static_assert_uimm_bits!(IMM6, 6);
msa_srlri_d(a, IMM6)
}
@@ -8191,7 +8188,7 @@ pub unsafe fn __msa_srlri_d<const IMM6: i32>(a: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(st.b, imm_s10 = 0b1111111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_st_b<const IMM_S10: i32>(a: v16i8, mem_addr: *mut u8) -> () {
- static_assert_imm_s10!(IMM_S10);
+ static_assert_simm_bits!(IMM_S10, 10);
msa_st_b(a, mem_addr, IMM_S10)
}
@@ -8206,8 +8203,8 @@ pub unsafe fn __msa_st_b<const IMM_S10: i32>(a: v16i8, mem_addr: *mut u8) -> ()
#[cfg_attr(test, assert_instr(st.h, imm_s11 = 0b11111111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_st_h<const IMM_S11: i32>(a: v8i16, mem_addr: *mut u8) -> () {
- static_assert_imm_s11!(IMM_S11);
- static_assert!(IMM_S11: i32 where IMM_S11 % 2 == 0);
+ static_assert_simm_bits!(IMM_S11, 11);
+ static_assert!(IMM_S11 % 2 == 0);
msa_st_h(a, mem_addr, IMM_S11)
}
@@ -8222,8 +8219,8 @@ pub unsafe fn __msa_st_h<const IMM_S11: i32>(a: v8i16, mem_addr: *mut u8) -> ()
#[cfg_attr(test, assert_instr(st.w, imm_s12 = 0b111111111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_st_w<const IMM_S12: i32>(a: v4i32, mem_addr: *mut u8) -> () {
- static_assert_imm_s12!(IMM_S12);
- static_assert!(IMM_S12: i32 where IMM_S12 % 4 == 0);
+ static_assert_simm_bits!(IMM_S12, 12);
+ static_assert!(IMM_S12 % 4 == 0);
msa_st_w(a, mem_addr, IMM_S12)
}
@@ -8238,8 +8235,8 @@ pub unsafe fn __msa_st_w<const IMM_S12: i32>(a: v4i32, mem_addr: *mut u8) -> ()
#[cfg_attr(test, assert_instr(st.d, imm_s13 = 0b1111111111111))]
#[rustc_legacy_const_generics(2)]
pub unsafe fn __msa_st_d<const IMM_S13: i32>(a: v2i64, mem_addr: *mut u8) -> () {
- static_assert_imm_s13!(IMM_S13);
- static_assert!(IMM_S13: i32 where IMM_S13 % 8 == 0);
+ static_assert_simm_bits!(IMM_S13, 13);
+ static_assert!(IMM_S13 % 8 == 0);
msa_st_d(a, mem_addr, IMM_S13)
}
@@ -8530,7 +8527,7 @@ pub unsafe fn __msa_subv_d(a: v2i64, b: v2i64) -> v2i64 {
#[cfg_attr(test, assert_instr(subvi.b, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_subvi_b<const IMM5: i32>(a: v16i8) -> v16i8 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_subvi_b(a, IMM5)
}
@@ -8545,7 +8542,7 @@ pub unsafe fn __msa_subvi_b<const IMM5: i32>(a: v16i8) -> v16i8 {
#[cfg_attr(test, assert_instr(subvi.h, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_subvi_h<const IMM5: i32>(a: v8i16) -> v8i16 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_subvi_h(a, IMM5)
}
@@ -8560,7 +8557,7 @@ pub unsafe fn __msa_subvi_h<const IMM5: i32>(a: v8i16) -> v8i16 {
#[cfg_attr(test, assert_instr(subvi.w, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_subvi_w<const IMM5: i32>(a: v4i32) -> v4i32 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_subvi_w(a, IMM5)
}
@@ -8575,7 +8572,7 @@ pub unsafe fn __msa_subvi_w<const IMM5: i32>(a: v4i32) -> v4i32 {
#[cfg_attr(test, assert_instr(subvi.d, imm5 = 0b10111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_subvi_d<const IMM5: i32>(a: v2i64) -> v2i64 {
- static_assert_imm5!(IMM5);
+ static_assert_uimm_bits!(IMM5, 5);
msa_subvi_d(a, IMM5)
}
@@ -8673,7 +8670,7 @@ pub unsafe fn __msa_xor_v(a: v16u8, b: v16u8) -> v16u8 {
#[cfg_attr(test, assert_instr(xori.b, imm8 = 0b11111111))]
#[rustc_legacy_const_generics(1)]
pub unsafe fn __msa_xori_b<const IMM8: i32>(a: v16u8) -> v16u8 {
- static_assert_imm8!(IMM8);
+ static_assert_uimm_bits!(IMM8, 8);
msa_xori_b(a, IMM8)
}