From 2a6cf40dd457555cd728e63d110d9b2979f7527b Mon Sep 17 00:00:00 2001 From: Kiva Date: Tue, 16 Jan 2024 08:55:36 +0800 Subject: [PATCH] [LLVM][Clang][XTHeadVector] Add indexed load/store intrinsics (#54) * [Clang][XTHeadVector] Rename multiclass to avoid confusion * [Clang][XTHeadVector] Add `vlxb`, `vlxh`, `vlxw` and corresponding unsigned version * [Clang][XTHeadVector] Add `vloxei` intrinsic * [Clang][XTHeadVector] Add `vsoxei` intrinsic * [Clang][XTHeadVector] Rename * [Clang][XTHeadVector] Add `vsxb`, `vsxh`, `vsxw` * [Clang][XTHeadVector] Fix typo * [Clang][XTHeadVector] Fix multiclass * [Clang][XTHeadVector] Add todo * [Clang][XTHeadVector] Use `RVVOutOp1Builtin` * [Clang][XTHeadVector] Add simple handcrafted tests * [Clang][XTHeadVector] Add generate tests * [Clang][XTHeadVector] Add generate tests * [Clang][XTHeadVector] Add wrapper macros * [Clang][XTHeadVector] Add more tests * [NFC][XTHeadVector] Update README --- README.md | 1 + .../clang/Basic/riscv_vector_xtheadv.td | 162 ++- .../Basic/riscv_vector_xtheadv_wrappers.td | 198 ++++ .../indexed/thead/vloxei16.c | 336 ++++++ .../indexed/thead/vloxei32.c | 346 +++++++ .../indexed/thead/vloxei64.c | 296 ++++++ .../indexed/thead/vloxei8.c | 266 +++++ .../indexed/thead/vlx.c | 966 ++++++++++++++++++ .../indexed/thead/vsoxei16.c | 336 ++++++ .../indexed/thead/vsoxei32.c | 347 +++++++ .../indexed/thead/vsoxei64.c | 296 ++++++ .../indexed/thead/vsoxei8.c | 266 +++++ .../indexed/thead/vsx.c | 966 ++++++++++++++++++ .../indexed/wrappers/vlx.c | 966 ++++++++++++++++++ .../indexed/wrappers/vsx.c | 966 ++++++++++++++++++ 15 files changed, 6698 insertions(+), 16 deletions(-) create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c diff --git a/README.md b/README.md index 57e2f0aef54184..ad990691c54dcc 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,7 @@ Any feature not listed below but present in the specification should be consider - (WIP) `7. Vector Load/Store` - (Done) `7.1. Vector Unit-Stride Operations` - (Done) `7.2. Vector Strided Load/Store Operations` + - (Done) `7.3. Vector Indexed Load/Store Operations` - (Done) `7.4 Unit-stride Fault-Only-First Loads Operations` ## Q & A diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index b67fb94b9af5b4..9f0226f4580326 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -19,6 +19,11 @@ class RVVOutBuiltin let IntrinsicTypes = [-1]; } +class RVVOutOp1Builtin + : RVVBuiltin { + let IntrinsicTypes = [-1, 1]; +} + multiclass RVVBuiltinSet> suffixes_prototypes, list intrinsic_types> { @@ -53,6 +58,11 @@ multiclass RVVIntBinBuiltinSet : RVVSignedBinBuiltinSet, RVVUnsignedBinBuiltinSet; +defvar TypeList = ["c", "s", "i", "l", "x", "f", "d"]; +defvar EEWList = [["8", "(Log2EEW:3)"], + ["16", "(Log2EEW:4)"], + ["32", "(Log2EEW:5)"], + ["64", "(Log2EEW:6)"]]; //===----------------------------------------------------------------------===// // 6. Configuration-Setting and Utility @@ -193,7 +203,7 @@ let SupportOverloading = false, } // 7.1 Unit-stride load: vlb/h/w/bu/hu/wu - multiclass RVVVLXBuiltin types> { + multiclass RVVVLBHWBuiltin types> { foreach type = types in { // `vPCe` is type `const T * -> {VL} -> VectorType` // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` @@ -227,7 +237,7 @@ let SupportOverloading = false, } // 7.2 Strided load: vlsb/h/w/bu/hu/wu - multiclass RVVVLSXBuiltin types> { + multiclass RVVVLSBHWBuiltin types> { foreach type = types in { // `vPCez` is type `const T * -> SizeT -> {VL} -> VectorType` // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` @@ -243,6 +253,24 @@ let SupportOverloading = false, } } + + // 7.3 Indexed Load Operations: vlxb/h/w/bu/hu/wu + multiclass RVVVLXBHWBuiltin types> { + foreach type = types in { + // `vPCeUv` is type `const T * -> unsigned VectorType -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in + def : RVVOutOp1Builtin<"v", "vPCeUv", type>; + // `UvPCUeUv` is type `const unsigned T * -> unsigned VectorType -> {VL} -> unsigned VectorType` + let Name = NAME # "u_v", + IRName = ir # "u", + MaskedIRName = ir # "u_mask" in + def : RVVOutOp1Builtin<"Uv", "UvPCUeUv", type>; + } + } + // 7.4. Unit-stride Fault-Only-First Loads Operations multiclass RVVVLEFFBuiltin types> { let Name = NAME # "_v", @@ -292,6 +320,36 @@ let SupportOverloading = false, } } +// 7.3 Indexed Load Operations: vlxei +multiclass RVVVLXEEWBuiltin types> { + let UnMaskedPolicyScheme = HasPassthruOperand in { + foreach type = types in { + foreach eew_list = EEWList in { + defvar eew = eew_list[0]; + defvar eew_type = eew_list[1]; + let Name = NAME # eew # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + // Compare the following two signatures of vloxei: + // vint8m1_t vloxei8_v_i8m1 (const int8_t *base, vuint8m1_t bindex, size_t vl); + // vint8m1_t vloxei16_v_i8m1 (const int8_t *base, vuint16m2_t bindex, size_t vl); + // The type of `bindex` should not be computed from `type` (aka, i8m1, i8m2, etc.), + // which is not the same as what we do in other intirnsics. + + // `vPCeUv` is type `const T * -> unsigned VectorType -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def: RVVOutOp1Builtin<"v", "vPCe" # eew_type # "Uv", type>; + if !not(IsFloat.val) then { + // `UvPCUeUv` is type `const unsigned T * -> unsigned VectorType -> {VL} -> unsigned VectorType` + def: RVVOutOp1Builtin<"Uv", "UvPCUe" # eew_type # "Uv", type>; + } + } + } + } + } +} + +// 7.1 Unit-strided Store Operations let HasMaskedOffOperand = false, MaskedPolicyScheme = NonePolicy, ManualCodegen = [{ @@ -326,7 +384,7 @@ let HasMaskedOffOperand = false, } // 7.1 Unit-stride store: vsb/h/w/bu/hu/wu - multiclass RVVVSXBuiltin types> { + multiclass RVVVSBHWBuiltin types> { let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in { @@ -341,6 +399,7 @@ let HasMaskedOffOperand = false, } } +// 7.2 Strided Store Operations let HasMaskedOffOperand = false, MaskedPolicyScheme = NonePolicy, ManualCodegen = [{ @@ -375,7 +434,7 @@ let HasMaskedOffOperand = false, } // 7.2 Strided store: vssb/h/w/bu/hu/wu - multiclass RVVVSSXBuiltin types> { + multiclass RVVVSSBHWBuiltin types> { let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in { @@ -390,40 +449,111 @@ let HasMaskedOffOperand = false, } } +// 7.3 Indexed Store Operations +let HasMaskedOffOperand = false, + MaskedPolicyScheme = NonePolicy, + ManualCodegen = [{ + if (IsMasked) { + // Builtin: (mask, ptr, index, value, vl). Intrinsic: (value, ptr, index, mask, vl) + std::swap(Ops[0], Ops[3]); + } else { + // Builtin: (ptr, index, value, vl). Intrinsic: (value, ptr, index, vl) + std::rotate(Ops.begin(), Ops.begin() + 2, Ops.begin() + 3); + } + Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); + if (IsMasked) + IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType(), Ops[4]->getType()}; + else + IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType(), Ops[3]->getType()}; + }] in { + multiclass RVVVSXEEWBuiltin types> { + // 7.3 Indexed store: vsxei + foreach type = types in { + foreach eew_list = EEWList in { + defvar eew = eew_list[0]; + defvar eew_type = eew_list[1]; + let Name = NAME # eew # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + // `0PeUvv` is type `T * -> unsigned VectorType -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def : RVVBuiltin<"v", "0Pe" # eew_type # "Uvv", type>; + if !not(IsFloat.val) then { + // `0PUeUvUv` is type `unsigned T * -> unsigned VectorType -> unsigned VectorType -> {VL} -> void` + def : RVVBuiltin<"Uv", "0PUe" # eew_type # "UvUv", type>; + } + } + } + } + } + + // 7.3 Indexed store: vsxb/h/w/bu/hu/wu + multiclass RVVVSXBHWBuiltin types> { + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + foreach type = types in { + // `0PeUvv` is type `T * -> unsigned VectorType -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def : RVVBuiltin<"v", "0PeUvv", type>; + // `0PUeUvUv` is type `unsigned T * -> unsigned VectorType -> unsigned VectorType -> {VL} -> void` + def : RVVBuiltin<"Uv", "0PUeUvUv", type>; + } + } + } +} + // 7.1. Vector Unit-Stride Operations -defm th_vlb : RVVVLXBuiltin<"th_vlb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vlh : RVVVLXBuiltin<"th_vlh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vlw : RVVVLXBuiltin<"th_vlw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlb : RVVVLBHWBuiltin<"th_vlb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlh : RVVVLBHWBuiltin<"th_vlh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlw : RVVVLBHWBuiltin<"th_vlw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 defm th_vle8 : RVVVLEBuiltin<"th_vle", ["c"]>; // i8 defm th_vle16: RVVVLEBuiltin<"th_vle", ["s","x"]>; // i16, f16 defm th_vle32: RVVVLEBuiltin<"th_vle", ["i","f"]>; // i32, f32 defm th_vle64: RVVVLEBuiltin<"th_vle", ["l","d"]>; // i64, f64 -defm th_vsb : RVVVSXBuiltin<"th_vsb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vsh : RVVVSXBuiltin<"th_vsh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vsw : RVVVSXBuiltin<"th_vsw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsb : RVVVSBHWBuiltin<"th_vsb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsh : RVVVSBHWBuiltin<"th_vsh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsw : RVVVSBHWBuiltin<"th_vsw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 defm th_vse8 : RVVVSEBuiltin<"th_vse", ["c"]>; // i8 defm th_vse16: RVVVSEBuiltin<"th_vse", ["s","x"]>; // i16, f16 defm th_vse32: RVVVSEBuiltin<"th_vse", ["i","f"]>; // i32, f32 defm th_vse64: RVVVSEBuiltin<"th_vse", ["l","d"]>; // i64, f64 // 7.2. Vector Strided Load/Store Operations -defm th_vlsb : RVVVLSXBuiltin<"th_vlsb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vlsh : RVVVLSXBuiltin<"th_vlsh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vlsw : RVVVLSXBuiltin<"th_vlsw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlsb : RVVVLSBHWBuiltin<"th_vlsb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlsh : RVVVLSBHWBuiltin<"th_vlsh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlsw : RVVVLSBHWBuiltin<"th_vlsw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 defm th_vlse8 : RVVVLSEBuiltin<"th_vlse", ["c"]>; // i8 defm th_vlse16: RVVVLSEBuiltin<"th_vlse", ["s","x"]>; // i16, f16 defm th_vlse32: RVVVLSEBuiltin<"th_vlse", ["i","f"]>; // i32, f32 defm th_vlse64: RVVVLSEBuiltin<"th_vlse", ["l","d"]>; // i64, f64 -defm th_vssb : RVVVSSXBuiltin<"th_vssb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vssh : RVVVSSXBuiltin<"th_vssh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 -defm th_vssw : RVVVSSXBuiltin<"th_vssw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vssb : RVVVSSBHWBuiltin<"th_vssb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vssh : RVVVSSBHWBuiltin<"th_vssh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vssw : RVVVSSBHWBuiltin<"th_vssw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 defm th_vsse8 : RVVVSSEBuiltin<"th_vsse", ["c"]>; // i8 defm th_vsse16: RVVVSSEBuiltin<"th_vsse", ["s","x"]>; // i16, f16 defm th_vsse32: RVVVSSEBuiltin<"th_vsse", ["i","f"]>; // i32, f32 defm th_vsse64: RVVVSSEBuiltin<"th_vsse", ["l","d"]>; // i64, f64 +// 7.3 Vector Indexed Load/Store Operations +defm th_vlxb : RVVVLXBHWBuiltin<"th_vlxb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlxh : RVVVLXBHWBuiltin<"th_vlxh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlxw : RVVVLXBHWBuiltin<"th_vlxw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vloxei : RVVVLXEEWBuiltin<"th_vlxe", TypeList>; // all types + +defm th_vsxb : RVVVSXBHWBuiltin<"th_vsxb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsxh : RVVVSXBHWBuiltin<"th_vsxh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsxw : RVVVSXBHWBuiltin<"th_vsxw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsoxei : RVVVSXEEWBuiltin<"th_vsxe", TypeList>; // all types + +// TODO: LLVM intrinsic th_vsuxb, th_vsuxh, th_vsuxw, th_xsuxei for the following: +//defm th_vsuxb : RVVVSXBHWBuiltin<"th_vsuxb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +//defm th_vsuxh : RVVVSXBHWBuiltin<"th_vsuxh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +//defm th_vsuxw : RVVVSXBHWBuiltin<"th_vsuxw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +//defm th_vsuxei : RVVVSXEEWBuiltin<"th_vsuxe", TypeList>; // all types + // 7.4. Unit-stride Fault-Only-First Loads Operations defm th_vle8ff : RVVVLEFFBuiltin<"th_vleff", ["c"]>; // i8 defm th_vle16ff: RVVVLEFFBuiltin<"th_vleff", ["s","x"]>; // i16, f16 diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td index cad66aea356199..c26268342e6abb 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td @@ -645,3 +645,201 @@ let HeaderCode = #define __riscv_vle64ff_v_f64m8(src_ptr, new_vl_ptr, vl) __riscv_th_vle64ff_v_f64m8(src_ptr, new_vl_ptr, vl) }] in def th_unit_stride_ff_wrapper_macros: RVVHeader; + +let HeaderCode = +[{ +// Vector Indexed Load/Store Operations +#define __riscv_vlxb_v_i8m1(src_ptr, indexed, vl) __riscv_th_vlxb_v_i8m1(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i8m2(src_ptr, indexed, vl) __riscv_th_vlxb_v_i8m2(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i8m4(src_ptr, indexed, vl) __riscv_th_vlxb_v_i8m4(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i8m8(src_ptr, indexed, vl) __riscv_th_vlxb_v_i8m8(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i16m1(src_ptr, indexed, vl) __riscv_th_vlxb_v_i16m1(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i16m2(src_ptr, indexed, vl) __riscv_th_vlxb_v_i16m2(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i16m4(src_ptr, indexed, vl) __riscv_th_vlxb_v_i16m4(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i16m8(src_ptr, indexed, vl) __riscv_th_vlxb_v_i16m8(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i32m1(src_ptr, indexed, vl) __riscv_th_vlxb_v_i32m1(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i32m2(src_ptr, indexed, vl) __riscv_th_vlxb_v_i32m2(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i32m4(src_ptr, indexed, vl) __riscv_th_vlxb_v_i32m4(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i32m8(src_ptr, indexed, vl) __riscv_th_vlxb_v_i32m8(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i64m1(src_ptr, indexed, vl) __riscv_th_vlxb_v_i64m1(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i64m2(src_ptr, indexed, vl) __riscv_th_vlxb_v_i64m2(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i64m4(src_ptr, indexed, vl) __riscv_th_vlxb_v_i64m4(src_ptr, indexed, vl) +#define __riscv_vlxb_v_i64m8(src_ptr, indexed, vl) __riscv_th_vlxb_v_i64m8(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i8m1(src_ptr, indexed, vl) __riscv_th_vlxh_v_i8m1(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i8m2(src_ptr, indexed, vl) __riscv_th_vlxh_v_i8m2(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i8m4(src_ptr, indexed, vl) __riscv_th_vlxh_v_i8m4(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i8m8(src_ptr, indexed, vl) __riscv_th_vlxh_v_i8m8(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i16m1(src_ptr, indexed, vl) __riscv_th_vlxh_v_i16m1(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i16m2(src_ptr, indexed, vl) __riscv_th_vlxh_v_i16m2(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i16m4(src_ptr, indexed, vl) __riscv_th_vlxh_v_i16m4(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i16m8(src_ptr, indexed, vl) __riscv_th_vlxh_v_i16m8(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i32m1(src_ptr, indexed, vl) __riscv_th_vlxh_v_i32m1(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i32m2(src_ptr, indexed, vl) __riscv_th_vlxh_v_i32m2(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i32m4(src_ptr, indexed, vl) __riscv_th_vlxh_v_i32m4(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i32m8(src_ptr, indexed, vl) __riscv_th_vlxh_v_i32m8(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i64m1(src_ptr, indexed, vl) __riscv_th_vlxh_v_i64m1(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i64m2(src_ptr, indexed, vl) __riscv_th_vlxh_v_i64m2(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i64m4(src_ptr, indexed, vl) __riscv_th_vlxh_v_i64m4(src_ptr, indexed, vl) +#define __riscv_vlxh_v_i64m8(src_ptr, indexed, vl) __riscv_th_vlxh_v_i64m8(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i8m1(src_ptr, indexed, vl) __riscv_th_vlxw_v_i8m1(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i8m2(src_ptr, indexed, vl) __riscv_th_vlxw_v_i8m2(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i8m4(src_ptr, indexed, vl) __riscv_th_vlxw_v_i8m4(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i8m8(src_ptr, indexed, vl) __riscv_th_vlxw_v_i8m8(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i16m1(src_ptr, indexed, vl) __riscv_th_vlxw_v_i16m1(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i16m2(src_ptr, indexed, vl) __riscv_th_vlxw_v_i16m2(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i16m4(src_ptr, indexed, vl) __riscv_th_vlxw_v_i16m4(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i16m8(src_ptr, indexed, vl) __riscv_th_vlxw_v_i16m8(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i32m1(src_ptr, indexed, vl) __riscv_th_vlxw_v_i32m1(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i32m2(src_ptr, indexed, vl) __riscv_th_vlxw_v_i32m2(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i32m4(src_ptr, indexed, vl) __riscv_th_vlxw_v_i32m4(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i32m8(src_ptr, indexed, vl) __riscv_th_vlxw_v_i32m8(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i64m1(src_ptr, indexed, vl) __riscv_th_vlxw_v_i64m1(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i64m2(src_ptr, indexed, vl) __riscv_th_vlxw_v_i64m2(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i64m4(src_ptr, indexed, vl) __riscv_th_vlxw_v_i64m4(src_ptr, indexed, vl) +#define __riscv_vlxw_v_i64m8(src_ptr, indexed, vl) __riscv_th_vlxw_v_i64m8(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u8m1(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u8m1(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u8m2(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u8m2(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u8m4(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u8m4(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u8m8(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u8m8(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u16m1(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u16m1(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u16m2(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u16m2(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u16m4(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u16m4(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u16m8(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u16m8(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u32m1(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u32m1(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u32m2(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u32m2(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u32m4(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u32m4(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u32m8(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u32m8(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u64m1(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u64m1(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u64m2(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u64m2(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u64m4(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u64m4(src_ptr, indexed, vl) +#define __riscv_vlxbu_v_u64m8(src_ptr, indexed, vl) __riscv_th_vlxbu_v_u64m8(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u8m1(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u8m1(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u8m2(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u8m2(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u8m4(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u8m4(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u8m8(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u8m8(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u16m1(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u16m1(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u16m2(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u16m2(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u16m4(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u16m4(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u16m8(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u16m8(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u32m1(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u32m1(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u32m2(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u32m2(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u32m4(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u32m4(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u32m8(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u32m8(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u64m1(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u64m1(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u64m2(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u64m2(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u64m4(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u64m4(src_ptr, indexed, vl) +#define __riscv_vlxhu_v_u64m8(src_ptr, indexed, vl) __riscv_th_vlxhu_v_u64m8(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u8m1(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u8m1(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u8m2(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u8m2(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u8m4(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u8m4(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u8m8(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u8m8(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u16m1(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u16m1(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u16m2(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u16m2(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u16m4(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u16m4(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u16m8(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u16m8(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u32m1(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u32m1(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u32m2(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u32m2(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u32m4(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u32m4(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u32m8(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u32m8(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u64m1(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u64m1(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u64m2(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u64m2(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u64m4(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u64m4(src_ptr, indexed, vl) +#define __riscv_vlxwu_v_u64m8(src_ptr, indexed, vl) __riscv_th_vlxwu_v_u64m8(src_ptr, indexed, vl) +#define __riscv_vsxb_v_i8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_i64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_i64m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_i64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_i64m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_i64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_i64m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxb_v_u64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxb_v_u64m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxh_v_u64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxh_v_u64m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u8m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u8m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u8m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u8m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u8m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u8m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u8m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u8m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u16m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u16m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u16m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u16m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u16m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u16m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u16m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u16m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u32m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u32m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u32m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u32m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u32m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u32m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u32m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u32m8(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u64m1(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u64m1(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u64m2(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u64m2(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u64m4(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u64m4(dst_ptr, indexed, value, vl) +#define __riscv_vsxw_v_u64m8(dst_ptr, indexed, value, vl) __riscv_th_vsxw_v_u64m8(dst_ptr, indexed, value, vl) +}] in +def th_indexed_wrapper_macros: RVVHeader; diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c new file mode 100644 index 00000000000000..39c884184b1d94 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei16.c @@ -0,0 +1,336 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f16.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m1_t test_th_vloxei16_v_f16m1(const _Float16 *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vloxei16_v_f16m2(const _Float16 *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m4_t test_th_vloxei16_v_f16m4(const _Float16 *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32f16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m8_t test_th_vloxei16_v_f16m8(const _Float16 *base, vuint16m8_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m2_t test_th_vloxei16_v_f32m2(const float *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vloxei16_v_f32m4(const float *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m8_t test_th_vloxei16_v_f32m8(const float *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m4_t test_th_vloxei16_v_f64m4(const double *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vloxei16_v_f64m8(const double *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_f64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vloxei16_v_i8m1(const int8_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vloxei16_v_i8m2(const int8_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vloxei16_v_i8m4(const int8_t *base, vuint16m8_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i8m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vloxei16_v_i16m1(const int16_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vloxei16_v_i16m2(const int16_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vloxei16_v_i16m4(const int16_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vloxei16_v_i16m8(const int16_t *base, vuint16m8_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vloxei16_v_i32m2(const int32_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vloxei16_v_i32m4(const int32_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vloxei16_v_i32m8(const int32_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vloxei16_v_i64m4(const int64_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vloxei16_v_i64m8(const int64_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_i64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vloxei16_v_u8m1(const uint8_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vloxei16_v_u8m2(const uint8_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vloxei16_v_u8m4(const uint8_t *base, vuint16m8_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u8m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vloxei16_v_u16m1(const uint16_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vloxei16_v_u16m2(const uint16_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vloxei16_v_u16m4(const uint16_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vloxei16_v_u16m8(const uint16_t *base, vuint16m8_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vloxei16_v_u32m2(const uint32_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vloxei16_v_u32m4(const uint32_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vloxei16_v_u32m8(const uint32_t *base, vuint16m4_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vloxei16_v_u64m4(const uint64_t *base, vuint16m1_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei16_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i16.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vloxei16_v_u64m8(const uint64_t *base, vuint16m2_t bindex, size_t vl) { + return __riscv_th_vloxei16_v_u64m8(base, bindex, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c new file mode 100644 index 00000000000000..4abce254c22589 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei32.c @@ -0,0 +1,346 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f16.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m1_t test_th_vloxei32_v_f16m1(const _Float16 *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vloxei32_v_f16m2(const _Float16 *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m4_t test_th_vloxei32_v_f16m4(const _Float16 *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m1_t test_th_vloxei32_v_f32m1(const float *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m2_t test_th_vloxei32_v_f32m2(const float *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vloxei32_v_f32m4(const float *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m8_t test_th_vloxei32_v_f32m8(const float *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m2_t test_th_vloxei32_v_f64m2(const double *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m4_t test_th_vloxei32_v_f64m4(const double *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vloxei32_v_f64m8(const double *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_f64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vloxei32_v_i8m1(const int8_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vloxei32_v_i8m2(const int8_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vloxei32_v_i16m1(const int16_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vloxei32_v_i16m2(const int16_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vloxei32_v_i16m4(const int16_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vloxei32_v_i32m1(const int32_t *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vloxei32_v_i32m2(const int32_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vloxei32_v_i32m4(const int32_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vloxei32_v_i32m8(const int32_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vloxei32_v_i64m2(const int64_t *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vloxei32_v_i64m4(const int64_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vloxei32_v_i64m8(const int64_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_i64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vloxei32_v_u8m1(const uint8_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vloxei32_v_u8m2(const uint8_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vloxei32_v_u16m1(const uint16_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vloxei32_v_u16m2(const uint16_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vloxei32_v_u16m4(const uint16_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vloxei32_v_u32m1(const uint32_t *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vloxei32_v_u32m2(const uint32_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vloxei32_v_u32m4(const uint32_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vloxei32_v_u32m8(const uint32_t *base, vuint32m8_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vloxei32_v_u64m2(const uint64_t *base, vuint32m1_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vloxei32_v_u64m4(const uint64_t *base, vuint32m2_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei32_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i32.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vloxei32_v_u64m8(const uint64_t *base, vuint32m4_t bindex, size_t vl) { + return __riscv_th_vloxei32_v_u64m8(base, bindex, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c new file mode 100644 index 00000000000000..16a2171eda0da5 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei64.c @@ -0,0 +1,296 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f16.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m1_t test_th_vloxei64_v_f16m1(const _Float16 *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vloxei64_v_f16m2(const _Float16 *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m1_t test_th_vloxei64_v_f32m1(const float *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m2_t test_th_vloxei64_v_f32m2(const float *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vloxei64_v_f32m4(const float *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1f64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m1_t test_th_vloxei64_v_f64m1(const double *base, vuint64m1_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f64m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2f64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m2_t test_th_vloxei64_v_f64m2(const double *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4f64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m4_t test_th_vloxei64_v_f64m4(const double *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vloxei64_v_f64m8(const double *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_f64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vloxei64_v_i8m1(const int8_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vloxei64_v_i16m1(const int16_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vloxei64_v_i16m2(const int16_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vloxei64_v_i32m1(const int32_t *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vloxei64_v_i32m2(const int32_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vloxei64_v_i32m4(const int32_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vloxei64_v_i64m1(const int64_t *base, vuint64m1_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i64m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vloxei64_v_i64m2(const int64_t *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vloxei64_v_i64m4(const int64_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vloxei64_v_i64m8(const int64_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_i64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vloxei64_v_u8m1(const uint8_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i16.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vloxei64_v_u16m1(const uint16_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u16m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vloxei64_v_u16m2(const uint16_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i32.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vloxei64_v_u32m1(const uint32_t *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u32m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i32.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vloxei64_v_u32m2(const uint32_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u32m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vloxei64_v_u32m4(const uint32_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vloxei64_v_u64m1(const uint64_t *base, vuint64m1_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u64m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vloxei64_v_u64m2(const uint64_t *base, vuint64m2_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u64m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vloxei64_v_u64m4(const uint64_t *base, vuint64m4_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u64m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vloxei64_v_u64m8(const uint64_t *base, vuint64m8_t bindex, size_t vl) { + return __riscv_th_vloxei64_v_u64m8(base, bindex, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c new file mode 100644 index 00000000000000..68e3239f2fcef0 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vloxei8.c @@ -0,0 +1,266 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f16.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vloxei8_v_f16m2(const _Float16 *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m4_t test_th_vloxei8_v_f16m4(const _Float16 *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32f16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m8_t test_th_vloxei8_v_f16m8(const _Float16 *base, vuint8m4_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vloxei8_v_f32m4(const float *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16f32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m8_t test_th_vloxei8_v_f32m8(const float *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8f64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vloxei8_v_f64m8(const double *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_f64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vloxei8_v_i8m1(const int8_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vloxei8_v_i8m2(const int8_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vloxei8_v_i8m4(const int8_t *base, vuint8m4_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i8m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vloxei8_v_i8m8(const int8_t *base, vuint8m8_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i8m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vloxei8_v_i16m2(const int16_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vloxei8_v_i16m4(const int16_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vloxei8_v_i16m8(const int16_t *base, vuint8m4_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vloxei8_v_i32m4(const int32_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vloxei8_v_i32m8(const int32_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vloxei8_v_i64m8(const int64_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_i64m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vloxei8_v_u8m1(const uint8_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u8m1(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vloxei8_v_u8m2(const uint8_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u8m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vloxei8_v_u8m4(const uint8_t *base, vuint8m4_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u8m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vloxei8_v_u8m8(const uint8_t *base, vuint8m8_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u8m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i16.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vloxei8_v_u16m2(const uint16_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u16m2(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i16.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vloxei8_v_u16m4(const uint16_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u16m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv32i16.nxv32i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vloxei8_v_u16m8(const uint16_t *base, vuint8m4_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u16m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i32.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vloxei8_v_u32m4(const uint32_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u32m4(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv16i32.nxv16i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vloxei8_v_u32m8(const uint32_t *base, vuint8m2_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u32m8(base, bindex, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vloxei8_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxe.nxv8i64.nxv8i8.i64( poison, ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vloxei8_v_u64m8(const uint64_t *base, vuint8m1_t bindex, size_t vl) { + return __riscv_th_vloxei8_v_u64m8(base, bindex, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c new file mode 100644 index 00000000000000..2ade79a34d2cc9 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vlx.c @@ -0,0 +1,966 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxb_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxb_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxb_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxb_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxb_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxb_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxb_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxb_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxb_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxb_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxb_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxb_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxb_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxb_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxb_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxb_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxb_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxb_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxb_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxb_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxb_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxb_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxb_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxb_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxb_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxb_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxb_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxb_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxb_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxb_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxb_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxb_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxh_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxh_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxh_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxh_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxh_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxh_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxh_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxh_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxh_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxh_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxh_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxh_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxh_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxh_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxh_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxh_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxh_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxh_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxh_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxh_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxh_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxh_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxh_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxh_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxh_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxh_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxh_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxh_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxh_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxh_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxh_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxh_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxw_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxw_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxw_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxw_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxw_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxw_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxw_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxw_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxw_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxw_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxw_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxw_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxw_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxw_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxw_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxw_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxw_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxw_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxw_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxw_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxw_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxw_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxw_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxw_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxw_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxw_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxw_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxw_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxw_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxw_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxw_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxw_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxbu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxbu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxbu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxbu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxbu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxbu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxbu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxbu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxbu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxbu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxbu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxbu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxbu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxbu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxbu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxbu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxbu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxbu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxbu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxbu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxbu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxbu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxbu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxbu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxbu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxbu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxbu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxbu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxbu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxbu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxbu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxbu_v_u64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxhu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxhu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxhu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxhu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxhu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxhu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxhu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxhu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxhu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxhu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxhu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxhu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxhu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxhu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxhu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxhu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxhu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxhu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxhu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxhu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxhu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxhu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxhu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxhu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxhu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxhu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxhu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxhu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxhu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxhu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxhu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxhu_v_u64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxwu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_th_vlxwu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxwu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_th_vlxwu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxwu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_th_vlxwu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxwu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_th_vlxwu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxwu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_th_vlxwu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxwu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_th_vlxwu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxwu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_th_vlxwu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxwu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_th_vlxwu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxwu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_th_vlxwu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxwu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_th_vlxwu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxwu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_th_vlxwu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxwu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_th_vlxwu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxwu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_th_vlxwu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxwu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_th_vlxwu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxwu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_th_vlxwu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxwu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_th_vlxwu_v_u64m8(base, index, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c new file mode 100644 index 00000000000000..9ee382660f5983 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei16.c @@ -0,0 +1,336 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f16m1(_Float16 *base, vuint16m1_t bindex, vfloat16m1_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f16m2(_Float16 *base, vuint16m2_t bindex, vfloat16m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f16m4(_Float16 *base, vuint16m4_t bindex, vfloat16m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32f16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f16m8(_Float16 *base, vuint16m8_t bindex, vfloat16m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, vuint16m1_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, vuint16m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, vuint16m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, vuint16m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei16_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return __riscv_th_vsoxei16_v_u64m8(base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c new file mode 100644 index 00000000000000..f702c9eda2d3e1 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei32.c @@ -0,0 +1,347 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f16.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f16m1(_Float16 *base, vuint32m2_t bindex, vfloat16m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f16m2(_Float16 *base, vuint32m4_t bindex, vfloat16m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f16m4(_Float16 *base, vuint32m8_t bindex, vfloat16m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, vuint16m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, vuint16m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, vuint16m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei32_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return __riscv_th_vsoxei32_v_u64m8(base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c new file mode 100644 index 00000000000000..edf2b4a89428fb --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei64.c @@ -0,0 +1,296 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f16.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f16m1(_Float16 *base, vuint64m4_t bindex, vfloat16m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f16m2(_Float16 *base, vuint64m8_t bindex, vfloat16m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1f64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2f64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4f64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value, size_t vl) { + return __riscv_th_vsoxei64_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return __riscv_th_vsoxei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i16.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, vuint16m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, vuint16m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i32.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i32.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return __riscv_th_vsoxei64_v_u64m8(base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c new file mode 100644 index 00000000000000..b3491202589c88 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsoxei8.c @@ -0,0 +1,266 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f16.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f16m2(_Float16 *base, vuint8m1_t bindex, vfloat16m2_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f16m4(_Float16 *base, vuint8m2_t bindex, vfloat16m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32f16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f16m8(_Float16 *base, vuint8m4_t bindex, vfloat16m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16f32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8f64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i8m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u8m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i16.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i16.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv32i16.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i32.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv16i32.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsoxei8_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxe.nxv8i64.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[BINDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsoxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return __riscv_th_vsoxei8_v_u64m8(base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c new file mode 100644 index 00000000000000..5fc05703e36528 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/thead/vsx.c @@ -0,0 +1,966 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_th_vsxb_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_th_vsxb_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_th_vsxb_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_th_vsxb_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_th_vsxb_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_th_vsxb_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_th_vsxb_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_th_vsxb_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_th_vsxb_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_th_vsxb_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_th_vsxb_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_th_vsxb_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_th_vsxb_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_th_vsxb_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_th_vsxb_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_th_vsxb_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_th_vsxh_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_th_vsxh_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_th_vsxh_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_th_vsxh_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_th_vsxh_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_th_vsxh_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_th_vsxh_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_th_vsxh_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_th_vsxh_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_th_vsxh_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_th_vsxh_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_th_vsxh_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_th_vsxh_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_th_vsxh_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_th_vsxh_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_th_vsxh_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_th_vsxw_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_th_vsxw_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_th_vsxw_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_th_vsxw_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_th_vsxw_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_th_vsxw_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_th_vsxw_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_th_vsxw_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_th_vsxw_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_th_vsxw_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_th_vsxw_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_th_vsxw_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_th_vsxw_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_th_vsxw_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_th_vsxw_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_th_vsxw_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_th_vsxb_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_th_vsxb_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_th_vsxb_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_th_vsxb_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_th_vsxb_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_th_vsxb_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_th_vsxb_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_th_vsxb_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_th_vsxb_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_th_vsxb_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_th_vsxb_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_th_vsxb_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_th_vsxb_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_th_vsxb_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_th_vsxb_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_th_vsxb_v_u64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_th_vsxh_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_th_vsxh_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_th_vsxh_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_th_vsxh_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_th_vsxh_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_th_vsxh_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_th_vsxh_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_th_vsxh_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_th_vsxh_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_th_vsxh_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_th_vsxh_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_th_vsxh_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_th_vsxh_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_th_vsxh_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_th_vsxh_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_th_vsxh_v_u64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_th_vsxw_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_th_vsxw_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_th_vsxw_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_th_vsxw_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_th_vsxw_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_th_vsxw_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_th_vsxw_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_th_vsxw_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_th_vsxw_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_th_vsxw_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_th_vsxw_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_th_vsxw_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_th_vsxw_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_th_vsxw_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_th_vsxw_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_th_vsxw_v_u64m8(base, index, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c new file mode 100644 index 00000000000000..ba277cd78eea7e --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vlx.c @@ -0,0 +1,966 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxb_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxb_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxb_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxb_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxb_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxb_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxb_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxb_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxb_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxb_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxb_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxb_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxb_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxb_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxb_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxb_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxb_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxb_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxb_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxb_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxb_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxb_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxb_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxb_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxb_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxb_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxb_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxb_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxb_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxb_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxb.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxb_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxb_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxh_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxh_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxh_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxh_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxh_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxh_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxh_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxh_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxh_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxh_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxh_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxh_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxh_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxh_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxh_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxh_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxh_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxh_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxh_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxh_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxh_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxh_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxh_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxh_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxh_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxh_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxh_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxh_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxh_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxh_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxh.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxh_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxh_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlxw_v_i8m1(const int8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxw_v_i8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlxw_v_i8m2(const int8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxw_v_i8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlxw_v_i8m4(const int8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxw_v_i8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlxw_v_i8m8(const int8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxw_v_i8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlxw_v_i16m1(const int16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxw_v_i16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlxw_v_i16m2(const int16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxw_v_i16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlxw_v_i16m4(const int16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxw_v_i16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlxw_v_i16m8(const int16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxw_v_i16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlxw_v_i32m1(const int32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxw_v_i32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlxw_v_i32m2(const int32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxw_v_i32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlxw_v_i32m4(const int32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxw_v_i32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlxw_v_i32m8(const int32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxw_v_i32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlxw_v_i64m1(const int64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxw_v_i64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlxw_v_i64m2(const int64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxw_v_i64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlxw_v_i64m4(const int64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxw_v_i64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxw.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlxw_v_i64m8(const int64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxw_v_i64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxbu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxbu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxbu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxbu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxbu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxbu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxbu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxbu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxbu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxbu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxbu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxbu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxbu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxbu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxbu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxbu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxbu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxbu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxbu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxbu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxbu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxbu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxbu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxbu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxbu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxbu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxbu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxbu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxbu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxbu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxbu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxbu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxbu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxbu_v_u64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxhu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxhu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxhu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxhu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxhu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxhu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxhu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxhu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxhu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxhu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxhu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxhu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxhu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxhu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxhu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxhu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxhu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxhu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxhu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxhu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxhu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxhu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxhu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxhu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxhu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxhu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxhu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxhu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxhu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxhu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxhu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxhu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxhu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxhu_v_u64m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i8.nxv8i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlxwu_v_u8m1(const uint8_t *base, vuint8m1_t index, size_t vl) { + return __riscv_vlxwu_v_u8m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i8.nxv16i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlxwu_v_u8m2(const uint8_t *base, vuint8m2_t index, size_t vl) { + return __riscv_vlxwu_v_u8m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i8.nxv32i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlxwu_v_u8m4(const uint8_t *base, vuint8m4_t index, size_t vl) { + return __riscv_vlxwu_v_u8m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv64i8.nxv64i8.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlxwu_v_u8m8(const uint8_t *base, vuint8m8_t index, size_t vl) { + return __riscv_vlxwu_v_u8m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i16.nxv4i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlxwu_v_u16m1(const uint16_t *base, vuint16m1_t index, size_t vl) { + return __riscv_vlxwu_v_u16m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i16.nxv8i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlxwu_v_u16m2(const uint16_t *base, vuint16m2_t index, size_t vl) { + return __riscv_vlxwu_v_u16m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i16.nxv16i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlxwu_v_u16m4(const uint16_t *base, vuint16m4_t index, size_t vl) { + return __riscv_vlxwu_v_u16m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv32i16.nxv32i16.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlxwu_v_u16m8(const uint16_t *base, vuint16m8_t index, size_t vl) { + return __riscv_vlxwu_v_u16m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i32.nxv2i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlxwu_v_u32m1(const uint32_t *base, vuint32m1_t index, size_t vl) { + return __riscv_vlxwu_v_u32m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i32.nxv4i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlxwu_v_u32m2(const uint32_t *base, vuint32m2_t index, size_t vl) { + return __riscv_vlxwu_v_u32m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i32.nxv8i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlxwu_v_u32m4(const uint32_t *base, vuint32m4_t index, size_t vl) { + return __riscv_vlxwu_v_u32m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv16i32.nxv16i32.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlxwu_v_u32m8(const uint32_t *base, vuint32m8_t index, size_t vl) { + return __riscv_vlxwu_v_u32m8(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv1i64.nxv1i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlxwu_v_u64m1(const uint64_t *base, vuint64m1_t index, size_t vl) { + return __riscv_vlxwu_v_u64m1(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv2i64.nxv2i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlxwu_v_u64m2(const uint64_t *base, vuint64m2_t index, size_t vl) { + return __riscv_vlxwu_v_u64m2(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv4i64.nxv4i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlxwu_v_u64m4(const uint64_t *base, vuint64m4_t index, size_t vl) { + return __riscv_vlxwu_v_u64m4(base, index, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlxwu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlxwu.nxv8i64.nxv8i64.i64( poison, ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlxwu_v_u64m8(const uint64_t *base, vuint64m8_t index, size_t vl) { + return __riscv_vlxwu_v_u64m8(base, index, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c new file mode 100644 index 00000000000000..a397eb01cb6497 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/indexed/wrappers/vsx.c @@ -0,0 +1,966 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_vsxb_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_vsxb_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_vsxb_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_vsxb_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_vsxb_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_vsxb_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_vsxb_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_vsxb_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_vsxb_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_vsxb_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_vsxb_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_vsxb_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_vsxb_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_vsxb_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_vsxb_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_vsxb_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_vsxh_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_vsxh_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_vsxh_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_vsxh_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_vsxh_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_vsxh_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_vsxh_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_vsxh_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_vsxh_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_vsxh_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_vsxh_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_vsxh_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_vsxh_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_vsxh_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_vsxh_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_vsxh_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m1(int8_t *base, vuint8m1_t index, vint8m1_t value, size_t vl) { + return __riscv_vsxw_v_i8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m2(int8_t *base, vuint8m2_t index, vint8m2_t value, size_t vl) { + return __riscv_vsxw_v_i8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m4(int8_t *base, vuint8m4_t index, vint8m4_t value, size_t vl) { + return __riscv_vsxw_v_i8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i8m8(int8_t *base, vuint8m8_t index, vint8m8_t value, size_t vl) { + return __riscv_vsxw_v_i8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m1(int16_t *base, vuint16m1_t index, vint16m1_t value, size_t vl) { + return __riscv_vsxw_v_i16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m2(int16_t *base, vuint16m2_t index, vint16m2_t value, size_t vl) { + return __riscv_vsxw_v_i16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m4(int16_t *base, vuint16m4_t index, vint16m4_t value, size_t vl) { + return __riscv_vsxw_v_i16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i16m8(int16_t *base, vuint16m8_t index, vint16m8_t value, size_t vl) { + return __riscv_vsxw_v_i16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m1(int32_t *base, vuint32m1_t index, vint32m1_t value, size_t vl) { + return __riscv_vsxw_v_i32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m2(int32_t *base, vuint32m2_t index, vint32m2_t value, size_t vl) { + return __riscv_vsxw_v_i32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m4(int32_t *base, vuint32m4_t index, vint32m4_t value, size_t vl) { + return __riscv_vsxw_v_i32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i32m8(int32_t *base, vuint32m8_t index, vint32m8_t value, size_t vl) { + return __riscv_vsxw_v_i32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m1(int64_t *base, vuint64m1_t index, vint64m1_t value, size_t vl) { + return __riscv_vsxw_v_i64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m2(int64_t *base, vuint64m2_t index, vint64m2_t value, size_t vl) { + return __riscv_vsxw_v_i64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m4(int64_t *base, vuint64m4_t index, vint64m4_t value, size_t vl) { + return __riscv_vsxw_v_i64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_i64m8(int64_t *base, vuint64m8_t index, vint64m8_t value, size_t vl) { + return __riscv_vsxw_v_i64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_vsxb_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_vsxb_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_vsxb_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_vsxb_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_vsxb_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_vsxb_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_vsxb_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_vsxb_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_vsxb_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_vsxb_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_vsxb_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_vsxb_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_vsxb_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_vsxb_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_vsxb_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxb_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxb.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxb_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_vsxb_v_u64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_vsxh_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_vsxh_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_vsxh_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_vsxh_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_vsxh_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_vsxh_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_vsxh_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_vsxh_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_vsxh_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_vsxh_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_vsxh_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_vsxh_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_vsxh_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_vsxh_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_vsxh_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxh_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxh.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxh_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_vsxh_v_u64m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i8.nxv8i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m1(uint8_t *base, vuint8m1_t index, vuint8m1_t value, size_t vl) { + return __riscv_vsxw_v_u8m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i8.nxv16i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m2(uint8_t *base, vuint8m2_t index, vuint8m2_t value, size_t vl) { + return __riscv_vsxw_v_u8m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i8.nxv32i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m4(uint8_t *base, vuint8m4_t index, vuint8m4_t value, size_t vl) { + return __riscv_vsxw_v_u8m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv64i8.nxv64i8.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u8m8(uint8_t *base, vuint8m8_t index, vuint8m8_t value, size_t vl) { + return __riscv_vsxw_v_u8m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i16.nxv4i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m1(uint16_t *base, vuint16m1_t index, vuint16m1_t value, size_t vl) { + return __riscv_vsxw_v_u16m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i16.nxv8i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m2(uint16_t *base, vuint16m2_t index, vuint16m2_t value, size_t vl) { + return __riscv_vsxw_v_u16m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i16.nxv16i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m4(uint16_t *base, vuint16m4_t index, vuint16m4_t value, size_t vl) { + return __riscv_vsxw_v_u16m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv32i16.nxv32i16.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u16m8(uint16_t *base, vuint16m8_t index, vuint16m8_t value, size_t vl) { + return __riscv_vsxw_v_u16m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i32.nxv2i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m1(uint32_t *base, vuint32m1_t index, vuint32m1_t value, size_t vl) { + return __riscv_vsxw_v_u32m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i32.nxv4i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m2(uint32_t *base, vuint32m2_t index, vuint32m2_t value, size_t vl) { + return __riscv_vsxw_v_u32m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i32.nxv8i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m4(uint32_t *base, vuint32m4_t index, vuint32m4_t value, size_t vl) { + return __riscv_vsxw_v_u32m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv16i32.nxv16i32.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u32m8(uint32_t *base, vuint32m8_t index, vuint32m8_t value, size_t vl) { + return __riscv_vsxw_v_u32m8(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv1i64.nxv1i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m1(uint64_t *base, vuint64m1_t index, vuint64m1_t value, size_t vl) { + return __riscv_vsxw_v_u64m1(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv2i64.nxv2i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m2(uint64_t *base, vuint64m2_t index, vuint64m2_t value, size_t vl) { + return __riscv_vsxw_v_u64m2(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv4i64.nxv4i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m4(uint64_t *base, vuint64m4_t index, vuint64m4_t value, size_t vl) { + return __riscv_vsxw_v_u64m4(base, index, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsxw_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[INDEX:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsxw.nxv8i64.nxv8i64.i64( [[VALUE]], ptr [[BASE]], [[INDEX]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsxw_v_u64m8(uint64_t *base, vuint64m8_t index, vuint64m8_t value, size_t vl) { + return __riscv_vsxw_v_u64m8(base, index, value, vl); +}