From b1dcc16065791f84651fb194a515f900a0e68c27 Mon Sep 17 00:00:00 2001 From: Kiva Date: Mon, 8 Jan 2024 20:06:49 +0800 Subject: [PATCH] [Clang][XTHeadVector] Add vector strided load/store intrinsics (#49) * [Clang][XTHeadVector] Define Vector Strided Load/Store Intrinsics * [NFC][XTHeadVector] Make things clear * [NFC][XTHeadVector] Reorganize clang tests * [Clang][XTHeadVector] Initialize strided tests * [Clang][XTHeadVector] Add strided tests * [Clang][XTHeadVector] Unify all tests * [Clang][XTHeadVector] Unify all tests * [Clang][XTHeadVector] Extract wrappers to a standalone file * [Clang][XTHeadVector] Add wrapper tests --- .../clang/Basic/riscv_vector_xtheadv.td | 415 ++++-------- .../Basic/riscv_vector_xtheadv_wrappers.td | 596 ++++++++++++++++++ .../strided/thead/vlsb.c | 166 +++++ .../strided/thead/vlsbu.c | 166 +++++ .../strided/thead/vlse16.c | 126 ++++ .../strided/thead/vlse32.c | 126 ++++ .../strided/thead/vlse64.c | 126 ++++ .../strided/thead/vlse8.c | 86 +++ .../strided/thead/vlsh.c | 167 +++++ .../strided/thead/vlshu.c | 166 +++++ .../strided/thead/vlsw.c | 166 +++++ .../strided/thead/vlswu.c | 166 +++++ .../strided/thead/vssb.c | 327 ++++++++++ .../strided/thead/vsse16.c | 126 ++++ .../strided/thead/vsse32.c | 126 ++++ .../strided/thead/vsse64.c | 126 ++++ .../strided/thead/vsse8.c | 86 +++ .../strided/thead/vssh.c | 326 ++++++++++ .../strided/thead/vssw.c | 326 ++++++++++ .../strided/wrappers/vlsb.c | 166 +++++ .../strided/wrappers/vlsbu.c | 166 +++++ .../strided/wrappers/vlse16.c | 126 ++++ .../strided/wrappers/vlse32.c | 126 ++++ .../strided/wrappers/vlse64.c | 126 ++++ .../strided/wrappers/vlse8.c | 86 +++ .../strided/wrappers/vlsh.c | 167 +++++ .../strided/wrappers/vlshu.c | 166 +++++ .../strided/wrappers/vlsw.c | 166 +++++ .../strided/wrappers/vlswu.c | 166 +++++ .../strided/wrappers/vssb.c | 327 ++++++++++ .../strided/wrappers/vsse16.c | 126 ++++ .../strided/wrappers/vsse32.c | 126 ++++ .../strided/wrappers/vsse64.c | 126 ++++ .../strided/wrappers/vsse8.c | 86 +++ .../strided/wrappers/vssh.c | 326 ++++++++++ .../strided/wrappers/vssw.c | 326 ++++++++++ .../{ => unit-stride/thead}/vlb.c | 0 .../{ => unit-stride/thead}/vlbu.c | 0 .../{ => unit-stride/thead}/vle16.c | 22 +- .../{ => unit-stride/thead}/vle32.c | 22 +- .../{ => unit-stride/thead}/vle64.c | 22 +- .../{ => unit-stride/thead}/vle8.c | 14 +- .../{ => unit-stride/thead}/vlh.c | 0 .../{ => unit-stride/thead}/vlhu.c | 0 .../{ => unit-stride/thead}/vlw.c | 0 .../{ => unit-stride/thead}/vlwu.c | 0 .../{ => unit-stride/thead}/vsb.c | 0 .../{ => unit-stride/thead}/vse16.c | 22 +- .../{ => unit-stride/thead}/vse32.c | 22 +- .../{ => unit-stride/thead}/vse64.c | 22 +- .../{ => unit-stride/thead}/vse8.c | 14 +- .../{ => unit-stride/thead}/vsh.c | 0 .../{ => unit-stride/thead}/vsw.c | 0 .../{ => unit-stride}/wrappers/vlb.c | 0 .../{ => unit-stride}/wrappers/vlbu.c | 0 .../{ => unit-stride}/wrappers/vle16.c | 22 +- .../{ => unit-stride}/wrappers/vle32.c | 22 +- .../{ => unit-stride}/wrappers/vle64.c | 22 +- .../{ => unit-stride}/wrappers/vle8.c | 14 +- .../{ => unit-stride}/wrappers/vlh.c | 0 .../{ => unit-stride}/wrappers/vlhu.c | 0 .../{ => unit-stride}/wrappers/vlw.c | 0 .../{ => unit-stride}/wrappers/vlwu.c | 0 .../{ => unit-stride}/wrappers/vsb.c | 0 .../{ => unit-stride}/wrappers/vse16.c | 22 +- .../{ => unit-stride}/wrappers/vse32.c | 22 +- .../{ => unit-stride}/wrappers/vse64.c | 22 +- .../{ => unit-stride}/wrappers/vse8.c | 14 +- .../{ => unit-stride}/wrappers/vsh.c | 0 .../{ => unit-stride}/wrappers/vsw.c | 0 70 files changed, 6682 insertions(+), 457 deletions(-) create mode 100644 clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlb.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlbu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vle16.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vle32.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vle64.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vle8.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlh.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlhu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlw.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vlwu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vsb.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vse16.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vse32.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vse64.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vse8.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vsh.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride/thead}/vsw.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlb.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlbu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vle16.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vle32.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vle64.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vle8.c (95%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlh.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlhu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlw.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vlwu.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vsb.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vse16.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vse32.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vse64.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vse8.c (89%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vsh.c (100%) rename clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/{ => unit-stride}/wrappers/vsw.c (100%) diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index 935a0e264dd8f7..64eed61541eaca 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -173,326 +173,75 @@ let HasBuiltinAlias = false, // 7. Vector Loads and Stores //===----------------------------------------------------------------------===// -let HeaderCode = -[{ -// Vector Unit-stride loads -#define __riscv_vlb_v_i8m1(base, vl) __riscv_th_vlb_v_i8m1(base, vl) -#define __riscv_vlb_v_i8m2(base, vl) __riscv_th_vlb_v_i8m2(base, vl) -#define __riscv_vlb_v_i8m4(base, vl) __riscv_th_vlb_v_i8m4(base, vl) -#define __riscv_vlb_v_i8m8(base, vl) __riscv_th_vlb_v_i8m8(base, vl) -#define __riscv_vlb_v_i16m1(base, vl) __riscv_th_vlb_v_i16m1(base, vl) -#define __riscv_vlb_v_i16m2(base, vl) __riscv_th_vlb_v_i16m2(base, vl) -#define __riscv_vlb_v_i16m4(base, vl) __riscv_th_vlb_v_i16m4(base, vl) -#define __riscv_vlb_v_i16m8(base, vl) __riscv_th_vlb_v_i16m8(base, vl) -#define __riscv_vlb_v_i32m1(base, vl) __riscv_th_vlb_v_i32m1(base, vl) -#define __riscv_vlb_v_i32m2(base, vl) __riscv_th_vlb_v_i32m2(base, vl) -#define __riscv_vlb_v_i32m4(base, vl) __riscv_th_vlb_v_i32m4(base, vl) -#define __riscv_vlb_v_i32m8(base, vl) __riscv_th_vlb_v_i32m8(base, vl) -#define __riscv_vlb_v_i64m1(base, vl) __riscv_th_vlb_v_i64m1(base, vl) -#define __riscv_vlb_v_i64m2(base, vl) __riscv_th_vlb_v_i64m2(base, vl) -#define __riscv_vlb_v_i64m4(base, vl) __riscv_th_vlb_v_i64m4(base, vl) -#define __riscv_vlb_v_i64m8(base, vl) __riscv_th_vlb_v_i64m8(base, vl) -#define __riscv_vlh_v_i8m1(base, vl) __riscv_th_vlh_v_i8m1(base, vl) -#define __riscv_vlh_v_i8m2(base, vl) __riscv_th_vlh_v_i8m2(base, vl) -#define __riscv_vlh_v_i8m4(base, vl) __riscv_th_vlh_v_i8m4(base, vl) -#define __riscv_vlh_v_i8m8(base, vl) __riscv_th_vlh_v_i8m8(base, vl) -#define __riscv_vlh_v_i16m1(base, vl) __riscv_th_vlh_v_i16m1(base, vl) -#define __riscv_vlh_v_i16m2(base, vl) __riscv_th_vlh_v_i16m2(base, vl) -#define __riscv_vlh_v_i16m4(base, vl) __riscv_th_vlh_v_i16m4(base, vl) -#define __riscv_vlh_v_i16m8(base, vl) __riscv_th_vlh_v_i16m8(base, vl) -#define __riscv_vlh_v_i32m1(base, vl) __riscv_th_vlh_v_i32m1(base, vl) -#define __riscv_vlh_v_i32m2(base, vl) __riscv_th_vlh_v_i32m2(base, vl) -#define __riscv_vlh_v_i32m4(base, vl) __riscv_th_vlh_v_i32m4(base, vl) -#define __riscv_vlh_v_i32m8(base, vl) __riscv_th_vlh_v_i32m8(base, vl) -#define __riscv_vlh_v_i64m1(base, vl) __riscv_th_vlh_v_i64m1(base, vl) -#define __riscv_vlh_v_i64m2(base, vl) __riscv_th_vlh_v_i64m2(base, vl) -#define __riscv_vlh_v_i64m4(base, vl) __riscv_th_vlh_v_i64m4(base, vl) -#define __riscv_vlh_v_i64m8(base, vl) __riscv_th_vlh_v_i64m8(base, vl) -#define __riscv_vlw_v_i8m1(base, vl) __riscv_th_vlw_v_i8m1(base, vl) -#define __riscv_vlw_v_i8m2(base, vl) __riscv_th_vlw_v_i8m2(base, vl) -#define __riscv_vlw_v_i8m4(base, vl) __riscv_th_vlw_v_i8m4(base, vl) -#define __riscv_vlw_v_i8m8(base, vl) __riscv_th_vlw_v_i8m8(base, vl) -#define __riscv_vlw_v_i16m1(base, vl) __riscv_th_vlw_v_i16m1(base, vl) -#define __riscv_vlw_v_i16m2(base, vl) __riscv_th_vlw_v_i16m2(base, vl) -#define __riscv_vlw_v_i16m4(base, vl) __riscv_th_vlw_v_i16m4(base, vl) -#define __riscv_vlw_v_i16m8(base, vl) __riscv_th_vlw_v_i16m8(base, vl) -#define __riscv_vlw_v_i32m1(base, vl) __riscv_th_vlw_v_i32m1(base, vl) -#define __riscv_vlw_v_i32m2(base, vl) __riscv_th_vlw_v_i32m2(base, vl) -#define __riscv_vlw_v_i32m4(base, vl) __riscv_th_vlw_v_i32m4(base, vl) -#define __riscv_vlw_v_i32m8(base, vl) __riscv_th_vlw_v_i32m8(base, vl) -#define __riscv_vlw_v_i64m1(base, vl) __riscv_th_vlw_v_i64m1(base, vl) -#define __riscv_vlw_v_i64m2(base, vl) __riscv_th_vlw_v_i64m2(base, vl) -#define __riscv_vlw_v_i64m4(base, vl) __riscv_th_vlw_v_i64m4(base, vl) -#define __riscv_vlw_v_i64m8(base, vl) __riscv_th_vlw_v_i64m8(base, vl) -#define __riscv_vlbu_v_u8m1(base, vl) __riscv_th_vlbu_v_u8m1(base, vl) -#define __riscv_vlbu_v_u8m2(base, vl) __riscv_th_vlbu_v_u8m2(base, vl) -#define __riscv_vlbu_v_u8m4(base, vl) __riscv_th_vlbu_v_u8m4(base, vl) -#define __riscv_vlbu_v_u8m8(base, vl) __riscv_th_vlbu_v_u8m8(base, vl) -#define __riscv_vlbu_v_u16m1(base, vl) __riscv_th_vlbu_v_u16m1(base, vl) -#define __riscv_vlbu_v_u16m2(base, vl) __riscv_th_vlbu_v_u16m2(base, vl) -#define __riscv_vlbu_v_u16m4(base, vl) __riscv_th_vlbu_v_u16m4(base, vl) -#define __riscv_vlbu_v_u16m8(base, vl) __riscv_th_vlbu_v_u16m8(base, vl) -#define __riscv_vlbu_v_u32m1(base, vl) __riscv_th_vlbu_v_u32m1(base, vl) -#define __riscv_vlbu_v_u32m2(base, vl) __riscv_th_vlbu_v_u32m2(base, vl) -#define __riscv_vlbu_v_u32m4(base, vl) __riscv_th_vlbu_v_u32m4(base, vl) -#define __riscv_vlbu_v_u32m8(base, vl) __riscv_th_vlbu_v_u32m8(base, vl) -#define __riscv_vlbu_v_u64m1(base, vl) __riscv_th_vlbu_v_u64m1(base, vl) -#define __riscv_vlbu_v_u64m2(base, vl) __riscv_th_vlbu_v_u64m2(base, vl) -#define __riscv_vlbu_v_u64m4(base, vl) __riscv_th_vlbu_v_u64m4(base, vl) -#define __riscv_vlbu_v_u64m8(base, vl) __riscv_th_vlbu_v_u64m8(base, vl) -#define __riscv_vlhu_v_u8m1(base, vl) __riscv_th_vlhu_v_u8m1(base, vl) -#define __riscv_vlhu_v_u8m2(base, vl) __riscv_th_vlhu_v_u8m2(base, vl) -#define __riscv_vlhu_v_u8m4(base, vl) __riscv_th_vlhu_v_u8m4(base, vl) -#define __riscv_vlhu_v_u8m8(base, vl) __riscv_th_vlhu_v_u8m8(base, vl) -#define __riscv_vlhu_v_u16m1(base, vl) __riscv_th_vlhu_v_u16m1(base, vl) -#define __riscv_vlhu_v_u16m2(base, vl) __riscv_th_vlhu_v_u16m2(base, vl) -#define __riscv_vlhu_v_u16m4(base, vl) __riscv_th_vlhu_v_u16m4(base, vl) -#define __riscv_vlhu_v_u16m8(base, vl) __riscv_th_vlhu_v_u16m8(base, vl) -#define __riscv_vlhu_v_u32m1(base, vl) __riscv_th_vlhu_v_u32m1(base, vl) -#define __riscv_vlhu_v_u32m2(base, vl) __riscv_th_vlhu_v_u32m2(base, vl) -#define __riscv_vlhu_v_u32m4(base, vl) __riscv_th_vlhu_v_u32m4(base, vl) -#define __riscv_vlhu_v_u32m8(base, vl) __riscv_th_vlhu_v_u32m8(base, vl) -#define __riscv_vlhu_v_u64m1(base, vl) __riscv_th_vlhu_v_u64m1(base, vl) -#define __riscv_vlhu_v_u64m2(base, vl) __riscv_th_vlhu_v_u64m2(base, vl) -#define __riscv_vlhu_v_u64m4(base, vl) __riscv_th_vlhu_v_u64m4(base, vl) -#define __riscv_vlhu_v_u64m8(base, vl) __riscv_th_vlhu_v_u64m8(base, vl) -#define __riscv_vlwu_v_u8m1(base, vl) __riscv_th_vlwu_v_u8m1(base, vl) -#define __riscv_vlwu_v_u8m2(base, vl) __riscv_th_vlwu_v_u8m2(base, vl) -#define __riscv_vlwu_v_u8m4(base, vl) __riscv_th_vlwu_v_u8m4(base, vl) -#define __riscv_vlwu_v_u8m8(base, vl) __riscv_th_vlwu_v_u8m8(base, vl) -#define __riscv_vlwu_v_u16m1(base, vl) __riscv_th_vlwu_v_u16m1(base, vl) -#define __riscv_vlwu_v_u16m2(base, vl) __riscv_th_vlwu_v_u16m2(base, vl) -#define __riscv_vlwu_v_u16m4(base, vl) __riscv_th_vlwu_v_u16m4(base, vl) -#define __riscv_vlwu_v_u16m8(base, vl) __riscv_th_vlwu_v_u16m8(base, vl) -#define __riscv_vlwu_v_u32m1(base, vl) __riscv_th_vlwu_v_u32m1(base, vl) -#define __riscv_vlwu_v_u32m2(base, vl) __riscv_th_vlwu_v_u32m2(base, vl) -#define __riscv_vlwu_v_u32m4(base, vl) __riscv_th_vlwu_v_u32m4(base, vl) -#define __riscv_vlwu_v_u32m8(base, vl) __riscv_th_vlwu_v_u32m8(base, vl) -#define __riscv_vlwu_v_u64m1(base, vl) __riscv_th_vlwu_v_u64m1(base, vl) -#define __riscv_vlwu_v_u64m2(base, vl) __riscv_th_vlwu_v_u64m2(base, vl) -#define __riscv_vlwu_v_u64m4(base, vl) __riscv_th_vlwu_v_u64m4(base, vl) -#define __riscv_vlwu_v_u64m8(base, vl) __riscv_th_vlwu_v_u64m8(base, vl) -#define __riscv_vle8_v_i8m1(base, vl) __riscv_th_vle8_v_i8m1(base, vl) -#define __riscv_vle8_v_i8m2(base, vl) __riscv_th_vle8_v_i8m2(base, vl) -#define __riscv_vle8_v_i8m4(base, vl) __riscv_th_vle8_v_i8m4(base, vl) -#define __riscv_vle8_v_i8m8(base, vl) __riscv_th_vle8_v_i8m8(base, vl) -#define __riscv_vle16_v_i16m1(base, vl) __riscv_th_vle16_v_i16m1(base, vl) -#define __riscv_vle16_v_i16m2(base, vl) __riscv_th_vle16_v_i16m2(base, vl) -#define __riscv_vle16_v_i16m4(base, vl) __riscv_th_vle16_v_i16m4(base, vl) -#define __riscv_vle16_v_i16m8(base, vl) __riscv_th_vle16_v_i16m8(base, vl) -#define __riscv_vle32_v_i32m1(base, vl) __riscv_th_vle32_v_i32m1(base, vl) -#define __riscv_vle32_v_i32m2(base, vl) __riscv_th_vle32_v_i32m2(base, vl) -#define __riscv_vle32_v_i32m4(base, vl) __riscv_th_vle32_v_i32m4(base, vl) -#define __riscv_vle32_v_i32m8(base, vl) __riscv_th_vle32_v_i32m8(base, vl) -#define __riscv_vle64_v_i64m1(base, vl) __riscv_th_vle64_v_i64m1(base, vl) -#define __riscv_vle64_v_i64m2(base, vl) __riscv_th_vle64_v_i64m2(base, vl) -#define __riscv_vle64_v_i64m4(base, vl) __riscv_th_vle64_v_i64m4(base, vl) -#define __riscv_vle64_v_i64m8(base, vl) __riscv_th_vle64_v_i64m8(base, vl) -#define __riscv_vle8_v_u8m1(base, vl) __riscv_th_vle8_v_u8m1(base, vl) -#define __riscv_vle8_v_u8m2(base, vl) __riscv_th_vle8_v_u8m2(base, vl) -#define __riscv_vle8_v_u8m4(base, vl) __riscv_th_vle8_v_u8m4(base, vl) -#define __riscv_vle8_v_u8m8(base, vl) __riscv_th_vle8_v_u8m8(base, vl) -#define __riscv_vle16_v_u16m1(base, vl) __riscv_th_vle16_v_u16m1(base, vl) -#define __riscv_vle16_v_u16m2(base, vl) __riscv_th_vle16_v_u16m2(base, vl) -#define __riscv_vle16_v_u16m4(base, vl) __riscv_th_vle16_v_u16m4(base, vl) -#define __riscv_vle16_v_u16m8(base, vl) __riscv_th_vle16_v_u16m8(base, vl) -#define __riscv_vle32_v_u32m1(base, vl) __riscv_th_vle32_v_u32m1(base, vl) -#define __riscv_vle32_v_u32m2(base, vl) __riscv_th_vle32_v_u32m2(base, vl) -#define __riscv_vle32_v_u32m4(base, vl) __riscv_th_vle32_v_u32m4(base, vl) -#define __riscv_vle32_v_u32m8(base, vl) __riscv_th_vle32_v_u32m8(base, vl) -#define __riscv_vle64_v_u64m1(base, vl) __riscv_th_vle64_v_u64m1(base, vl) -#define __riscv_vle64_v_u64m2(base, vl) __riscv_th_vle64_v_u64m2(base, vl) -#define __riscv_vle64_v_u64m4(base, vl) __riscv_th_vle64_v_u64m4(base, vl) -#define __riscv_vle64_v_u64m8(base, vl) __riscv_th_vle64_v_u64m8(base, vl) -#define __riscv_vle16_v_f16m1(base, vl) __riscv_th_vle16_v_f16m1(base, vl) -#define __riscv_vle16_v_f16m2(base, vl) __riscv_th_vle16_v_f16m2(base, vl) -#define __riscv_vle16_v_f16m4(base, vl) __riscv_th_vle16_v_f16m4(base, vl) -#define __riscv_vle16_v_f16m8(base, vl) __riscv_th_vle16_v_f16m8(base, vl) -#define __riscv_vle32_v_f32m1(base, vl) __riscv_th_vle32_v_f32m1(base, vl) -#define __riscv_vle32_v_f32m2(base, vl) __riscv_th_vle32_v_f32m2(base, vl) -#define __riscv_vle32_v_f32m4(base, vl) __riscv_th_vle32_v_f32m4(base, vl) -#define __riscv_vle32_v_f32m8(base, vl) __riscv_th_vle32_v_f32m8(base, vl) -#define __riscv_vle64_v_f64m1(base, vl) __riscv_th_vle64_v_f64m1(base, vl) -#define __riscv_vle64_v_f64m2(base, vl) __riscv_th_vle64_v_f64m2(base, vl) -#define __riscv_vle64_v_f64m4(base, vl) __riscv_th_vle64_v_f64m4(base, vl) -#define __riscv_vle64_v_f64m8(base, vl) __riscv_th_vle64_v_f64m8(base, vl) - -// Vector Unit-stride stores -#define __riscv_vsb_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m8(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m1(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m2(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m4(dst_ptr, vector_value, vl) -#define __riscv_vsb_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m8(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m1(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m2(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m4(dst_ptr, vector_value, vl) -#define __riscv_vsh_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m8(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m1(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m2(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m4(dst_ptr, vector_value, vl) -#define __riscv_vsw_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m8(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m1(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m2(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m4(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m8(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m1(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m2(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m4(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m8(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m1(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m2(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m4(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m8(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m1(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m2(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m4(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m8(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m1(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m2(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m4(dst_ptr, vector_value, vl) -#define __riscv_vse8_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m8(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m1(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m2(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m4(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m8(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m1(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m2(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m4(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m8(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m1(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m2(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m4(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m8(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_f16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m1(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_f16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m2(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_f16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m4(dst_ptr, vector_value, vl) -#define __riscv_vse16_v_f16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m8(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_f32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m1(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_f32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m2(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_f32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m4(dst_ptr, vector_value, vl) -#define __riscv_vse32_v_f32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m8(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_f64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m1(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_f64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m2(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_f64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m4(dst_ptr, vector_value, vl) -#define __riscv_vse64_v_f64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m8(dst_ptr, vector_value, vl) - -}] in -def th_unit_stride_wrapper_macros: RVVHeader; - let SupportOverloading = false, UnMaskedPolicyScheme = HasPassthruOperand in { + // 7.1 Unit-stride load: vle8/16/32/64 multiclass RVVVLEBuiltin types> { let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in { foreach type = types in { - // `vPCe` is type `const T * -> VectorType` + // `vPCe` is type `const T * -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` def : RVVOutBuiltin<"v", "vPCe", type>; if !not(IsFloat.val) then { - // `UvPCUe` is type `const unsigned T * -> unsigned VectorType` + // `UvPCUe` is type `const unsigned T * -> {VL} -> unsigned VectorType` def : RVVOutBuiltin<"Uv", "UvPCUe", type>; } } } } + // 7.1 Unit-stride load: vlb/h/w/bu/hu/wu multiclass RVVVLXBuiltin types> { foreach type = types in { - // `vPCe` is type `const T * -> VectorType` + // `vPCe` is type `const T * -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in def : RVVOutBuiltin<"v", "vPCe", type>; - // `UvPCUe` is type `const unsigned T * -> unsigned VectorType` + // `UvPCUe` is type `const unsigned T * -> {VL} -> unsigned VectorType` let Name = NAME # "u_v", IRName = ir # "u", MaskedIRName = ir # "u_mask" in def : RVVOutBuiltin<"Uv", "UvPCUe", type>; } } + + // 7.2 Strided load: vlse8/16/32/64 + multiclass RVVVLSEBuiltin types> { + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + foreach type = types in { + // `vPCet` is type `const T * -> PtrDiffT -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def : RVVOutBuiltin<"v", "vPCet", type>; + if !not(IsFloat.val) then { + // `UvPCUet` is type `const unsigned T * -> PtrDiffT -> {VL} -> unsigned VectorType` + def : RVVOutBuiltin<"Uv", "UvPCUet", type>; + } + } + } + } + + // 7.2 Strided load: vlsb/h/w/bu/hu/wu + multiclass RVVVLSXBuiltin types> { + foreach type = types in { + // `vPCez` is type `const T * -> SizeT -> {VL} -> VectorType` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in + def : RVVOutBuiltin<"v", "vPCez", type>; + // `UvPCUez` is type `const unsigned T * -> SizeT -> {VL} -> unsigned VectorType` + let Name = NAME # "u_v", + IRName = ir # "u", + MaskedIRName = ir # "u_mask" in + def : RVVOutBuiltin<"Uv", "UvPCUez", type>; + } + } } let HasMaskedOffOperand = false, @@ -511,35 +260,88 @@ let HasMaskedOffOperand = false, else IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType()}; }] in { + // 7.1 Unit-stride store: vse8/16/32/64 multiclass RVVVSEBuiltin types> { let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in { foreach type = types in { - // `0Pev` is type `T * -> VectorType -> void` + // `0Pev` is type `T * -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` def : RVVBuiltin<"v", "0Pev", type>; if !not(IsFloat.val) then { - // `0PUeUv` is type `unsigned T * -> unsigned VectorType -> void` + // `0PUeUv` is type `unsigned T * -> unsigned VectorType -> {VL} -> void` def : RVVBuiltin<"Uv", "0PUeUv", type>; } } } } + // 7.1 Unit-stride store: vsb/h/w/bu/hu/wu multiclass RVVVSXBuiltin types> { let Name = NAME # "_v", IRName = ir, MaskedIRName = ir # "_mask" in { foreach type = types in { - // `0Pev` is type `T * -> VectorType -> void` + // `0Pev` is type `T * -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` def : RVVBuiltin<"v", "0Pev", type>; - // `0PUeUv` is type `unsigned T * -> unsigned VectorType -> void` + // `0PUeUv` is type `unsigned T * -> unsigned VectorType -> {VL} -> void` def : RVVBuiltin<"Uv", "0PUeUv", type>; } } } } +let HasMaskedOffOperand = false, + MaskedPolicyScheme = NonePolicy, + ManualCodegen = [{ + if (IsMasked) { + // Builtin: (mask, ptr, stride, value, vl). Intrinsic: (value, ptr, stride, mask, vl) + std::swap(Ops[0], Ops[3]); + } else { + // Builtin: (ptr, stride, value, vl). Intrinsic: (value, ptr, stride, vl) + std::rotate(Ops.begin(), Ops.begin() + 2, Ops.begin() + 3); + } + Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); + if (IsMasked) + IntrinsicTypes = {Ops[0]->getType(), Ops[4]->getType()}; + else + IntrinsicTypes = {Ops[0]->getType(), Ops[3]->getType()}; + }] in { + // 7.2 Strided store: vsse8/16/32/64 + multiclass RVVVSSEBuiltin types> { + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + foreach type = types in { + // `0Petv` is type `T * -> -> PtrDiffT -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def : RVVBuiltin<"v", "0Petv", type>; + if !not(IsFloat.val) then { + // `0PUetUv` is type `unsigned T * -> PtrDiffT -> unsigned VectorType -> {VL} -> void` + def : RVVBuiltin<"Uv", "0PUetUv", type>; + } + } + } + } + + // 7.2 Strided store: vssb/h/w/bu/hu/wu + multiclass RVVVSSXBuiltin types> { + let Name = NAME # "_v", + IRName = ir, + MaskedIRName = ir # "_mask" in { + foreach type = types in { + // `0Pezv` is type `T * -> SizeT -> VectorType -> {VL} -> void` + // Note: the last operand {VL} is inserted by `RVVIntrinsic::computeBuiltinTypes` + def : RVVBuiltin<"v", "0Pezv", type>; + // `0PUezUv` is type `unsigned T * -> SizeT -> unsigned VectorType -> {VL} -> void` + def : RVVBuiltin<"Uv", "0PUezUv", type>; + } + } + } +} + // 7.1. Vector Unit-Stride Operations defm th_vlb : RVVVLXBuiltin<"th_vlb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 defm th_vlh : RVVVLXBuiltin<"th_vlh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 @@ -557,6 +359,23 @@ defm th_vse16: RVVVSEBuiltin<"th_vse", ["s","x"]>; // i16, f16 defm th_vse32: RVVVSEBuiltin<"th_vse", ["i","f"]>; // i32, f32 defm th_vse64: RVVVSEBuiltin<"th_vse", ["l","d"]>; // i64, f64 +// 7.2. Vector Strided Load/Store Operations +defm th_vlsb : RVVVLSXBuiltin<"th_vlsb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlsh : RVVVLSXBuiltin<"th_vlsh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlsw : RVVVLSXBuiltin<"th_vlsw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vlse8 : RVVVLSEBuiltin<"th_vlse", ["c"]>; // i8 +defm th_vlse16: RVVVLSEBuiltin<"th_vlse", ["s","x"]>; // i16, f16 +defm th_vlse32: RVVVLSEBuiltin<"th_vlse", ["i","f"]>; // i32, f32 +defm th_vlse64: RVVVLSEBuiltin<"th_vlse", ["l","d"]>; // i64, f64 + +defm th_vssb : RVVVSSXBuiltin<"th_vssb", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vssh : RVVVSSXBuiltin<"th_vssh", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vssw : RVVVSSXBuiltin<"th_vssw", ["c", "s", "i", "l"]>; // i8, i16, i32, i64 +defm th_vsse8 : RVVVSSEBuiltin<"th_vsse", ["c"]>; // i8 +defm th_vsse16: RVVVSSEBuiltin<"th_vsse", ["s","x"]>; // i16, f16 +defm th_vsse32: RVVVSSEBuiltin<"th_vsse", ["i","f"]>; // i32, f32 +defm th_vsse64: RVVVSSEBuiltin<"th_vsse", ["l","d"]>; // i64, f64 + //===----------------------------------------------------------------------===// // 12. Vector Integer Arithmetic Operations //===----------------------------------------------------------------------===// @@ -564,3 +383,5 @@ defm th_vse64: RVVVSEBuiltin<"th_vse", ["l","d"]>; // i64, f64 let UnMaskedPolicyScheme = HasPassthruOperand in { defm th_vadd : RVVIntBinBuiltinSet; } + +include "riscv_vector_xtheadv_wrappers.td" diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td new file mode 100644 index 00000000000000..fa6d1a93b0c2a6 --- /dev/null +++ b/clang/include/clang/Basic/riscv_vector_xtheadv_wrappers.td @@ -0,0 +1,596 @@ +//==--- riscv_vector_xtheadv.td - RISC-V V-ext Builtin function list ------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// This file defines the builtins for RISC-V V-extension. See: +// +// https://github.com/riscv-non-isa/rvv-intrinsic-doc/tree/v0.7.1 +// +//===----------------------------------------------------------------------===// + +//===----------------------------------------------------------------------===// +// 7. Vector Loads and Stores +//===----------------------------------------------------------------------===// + +let HeaderCode = +[{ +// Vector Unit-stride loads +#define __riscv_vlb_v_i8m1(base, vl) __riscv_th_vlb_v_i8m1(base, vl) +#define __riscv_vlb_v_i8m2(base, vl) __riscv_th_vlb_v_i8m2(base, vl) +#define __riscv_vlb_v_i8m4(base, vl) __riscv_th_vlb_v_i8m4(base, vl) +#define __riscv_vlb_v_i8m8(base, vl) __riscv_th_vlb_v_i8m8(base, vl) +#define __riscv_vlb_v_i16m1(base, vl) __riscv_th_vlb_v_i16m1(base, vl) +#define __riscv_vlb_v_i16m2(base, vl) __riscv_th_vlb_v_i16m2(base, vl) +#define __riscv_vlb_v_i16m4(base, vl) __riscv_th_vlb_v_i16m4(base, vl) +#define __riscv_vlb_v_i16m8(base, vl) __riscv_th_vlb_v_i16m8(base, vl) +#define __riscv_vlb_v_i32m1(base, vl) __riscv_th_vlb_v_i32m1(base, vl) +#define __riscv_vlb_v_i32m2(base, vl) __riscv_th_vlb_v_i32m2(base, vl) +#define __riscv_vlb_v_i32m4(base, vl) __riscv_th_vlb_v_i32m4(base, vl) +#define __riscv_vlb_v_i32m8(base, vl) __riscv_th_vlb_v_i32m8(base, vl) +#define __riscv_vlb_v_i64m1(base, vl) __riscv_th_vlb_v_i64m1(base, vl) +#define __riscv_vlb_v_i64m2(base, vl) __riscv_th_vlb_v_i64m2(base, vl) +#define __riscv_vlb_v_i64m4(base, vl) __riscv_th_vlb_v_i64m4(base, vl) +#define __riscv_vlb_v_i64m8(base, vl) __riscv_th_vlb_v_i64m8(base, vl) +#define __riscv_vlh_v_i8m1(base, vl) __riscv_th_vlh_v_i8m1(base, vl) +#define __riscv_vlh_v_i8m2(base, vl) __riscv_th_vlh_v_i8m2(base, vl) +#define __riscv_vlh_v_i8m4(base, vl) __riscv_th_vlh_v_i8m4(base, vl) +#define __riscv_vlh_v_i8m8(base, vl) __riscv_th_vlh_v_i8m8(base, vl) +#define __riscv_vlh_v_i16m1(base, vl) __riscv_th_vlh_v_i16m1(base, vl) +#define __riscv_vlh_v_i16m2(base, vl) __riscv_th_vlh_v_i16m2(base, vl) +#define __riscv_vlh_v_i16m4(base, vl) __riscv_th_vlh_v_i16m4(base, vl) +#define __riscv_vlh_v_i16m8(base, vl) __riscv_th_vlh_v_i16m8(base, vl) +#define __riscv_vlh_v_i32m1(base, vl) __riscv_th_vlh_v_i32m1(base, vl) +#define __riscv_vlh_v_i32m2(base, vl) __riscv_th_vlh_v_i32m2(base, vl) +#define __riscv_vlh_v_i32m4(base, vl) __riscv_th_vlh_v_i32m4(base, vl) +#define __riscv_vlh_v_i32m8(base, vl) __riscv_th_vlh_v_i32m8(base, vl) +#define __riscv_vlh_v_i64m1(base, vl) __riscv_th_vlh_v_i64m1(base, vl) +#define __riscv_vlh_v_i64m2(base, vl) __riscv_th_vlh_v_i64m2(base, vl) +#define __riscv_vlh_v_i64m4(base, vl) __riscv_th_vlh_v_i64m4(base, vl) +#define __riscv_vlh_v_i64m8(base, vl) __riscv_th_vlh_v_i64m8(base, vl) +#define __riscv_vlw_v_i8m1(base, vl) __riscv_th_vlw_v_i8m1(base, vl) +#define __riscv_vlw_v_i8m2(base, vl) __riscv_th_vlw_v_i8m2(base, vl) +#define __riscv_vlw_v_i8m4(base, vl) __riscv_th_vlw_v_i8m4(base, vl) +#define __riscv_vlw_v_i8m8(base, vl) __riscv_th_vlw_v_i8m8(base, vl) +#define __riscv_vlw_v_i16m1(base, vl) __riscv_th_vlw_v_i16m1(base, vl) +#define __riscv_vlw_v_i16m2(base, vl) __riscv_th_vlw_v_i16m2(base, vl) +#define __riscv_vlw_v_i16m4(base, vl) __riscv_th_vlw_v_i16m4(base, vl) +#define __riscv_vlw_v_i16m8(base, vl) __riscv_th_vlw_v_i16m8(base, vl) +#define __riscv_vlw_v_i32m1(base, vl) __riscv_th_vlw_v_i32m1(base, vl) +#define __riscv_vlw_v_i32m2(base, vl) __riscv_th_vlw_v_i32m2(base, vl) +#define __riscv_vlw_v_i32m4(base, vl) __riscv_th_vlw_v_i32m4(base, vl) +#define __riscv_vlw_v_i32m8(base, vl) __riscv_th_vlw_v_i32m8(base, vl) +#define __riscv_vlw_v_i64m1(base, vl) __riscv_th_vlw_v_i64m1(base, vl) +#define __riscv_vlw_v_i64m2(base, vl) __riscv_th_vlw_v_i64m2(base, vl) +#define __riscv_vlw_v_i64m4(base, vl) __riscv_th_vlw_v_i64m4(base, vl) +#define __riscv_vlw_v_i64m8(base, vl) __riscv_th_vlw_v_i64m8(base, vl) +#define __riscv_vlbu_v_u8m1(base, vl) __riscv_th_vlbu_v_u8m1(base, vl) +#define __riscv_vlbu_v_u8m2(base, vl) __riscv_th_vlbu_v_u8m2(base, vl) +#define __riscv_vlbu_v_u8m4(base, vl) __riscv_th_vlbu_v_u8m4(base, vl) +#define __riscv_vlbu_v_u8m8(base, vl) __riscv_th_vlbu_v_u8m8(base, vl) +#define __riscv_vlbu_v_u16m1(base, vl) __riscv_th_vlbu_v_u16m1(base, vl) +#define __riscv_vlbu_v_u16m2(base, vl) __riscv_th_vlbu_v_u16m2(base, vl) +#define __riscv_vlbu_v_u16m4(base, vl) __riscv_th_vlbu_v_u16m4(base, vl) +#define __riscv_vlbu_v_u16m8(base, vl) __riscv_th_vlbu_v_u16m8(base, vl) +#define __riscv_vlbu_v_u32m1(base, vl) __riscv_th_vlbu_v_u32m1(base, vl) +#define __riscv_vlbu_v_u32m2(base, vl) __riscv_th_vlbu_v_u32m2(base, vl) +#define __riscv_vlbu_v_u32m4(base, vl) __riscv_th_vlbu_v_u32m4(base, vl) +#define __riscv_vlbu_v_u32m8(base, vl) __riscv_th_vlbu_v_u32m8(base, vl) +#define __riscv_vlbu_v_u64m1(base, vl) __riscv_th_vlbu_v_u64m1(base, vl) +#define __riscv_vlbu_v_u64m2(base, vl) __riscv_th_vlbu_v_u64m2(base, vl) +#define __riscv_vlbu_v_u64m4(base, vl) __riscv_th_vlbu_v_u64m4(base, vl) +#define __riscv_vlbu_v_u64m8(base, vl) __riscv_th_vlbu_v_u64m8(base, vl) +#define __riscv_vlhu_v_u8m1(base, vl) __riscv_th_vlhu_v_u8m1(base, vl) +#define __riscv_vlhu_v_u8m2(base, vl) __riscv_th_vlhu_v_u8m2(base, vl) +#define __riscv_vlhu_v_u8m4(base, vl) __riscv_th_vlhu_v_u8m4(base, vl) +#define __riscv_vlhu_v_u8m8(base, vl) __riscv_th_vlhu_v_u8m8(base, vl) +#define __riscv_vlhu_v_u16m1(base, vl) __riscv_th_vlhu_v_u16m1(base, vl) +#define __riscv_vlhu_v_u16m2(base, vl) __riscv_th_vlhu_v_u16m2(base, vl) +#define __riscv_vlhu_v_u16m4(base, vl) __riscv_th_vlhu_v_u16m4(base, vl) +#define __riscv_vlhu_v_u16m8(base, vl) __riscv_th_vlhu_v_u16m8(base, vl) +#define __riscv_vlhu_v_u32m1(base, vl) __riscv_th_vlhu_v_u32m1(base, vl) +#define __riscv_vlhu_v_u32m2(base, vl) __riscv_th_vlhu_v_u32m2(base, vl) +#define __riscv_vlhu_v_u32m4(base, vl) __riscv_th_vlhu_v_u32m4(base, vl) +#define __riscv_vlhu_v_u32m8(base, vl) __riscv_th_vlhu_v_u32m8(base, vl) +#define __riscv_vlhu_v_u64m1(base, vl) __riscv_th_vlhu_v_u64m1(base, vl) +#define __riscv_vlhu_v_u64m2(base, vl) __riscv_th_vlhu_v_u64m2(base, vl) +#define __riscv_vlhu_v_u64m4(base, vl) __riscv_th_vlhu_v_u64m4(base, vl) +#define __riscv_vlhu_v_u64m8(base, vl) __riscv_th_vlhu_v_u64m8(base, vl) +#define __riscv_vlwu_v_u8m1(base, vl) __riscv_th_vlwu_v_u8m1(base, vl) +#define __riscv_vlwu_v_u8m2(base, vl) __riscv_th_vlwu_v_u8m2(base, vl) +#define __riscv_vlwu_v_u8m4(base, vl) __riscv_th_vlwu_v_u8m4(base, vl) +#define __riscv_vlwu_v_u8m8(base, vl) __riscv_th_vlwu_v_u8m8(base, vl) +#define __riscv_vlwu_v_u16m1(base, vl) __riscv_th_vlwu_v_u16m1(base, vl) +#define __riscv_vlwu_v_u16m2(base, vl) __riscv_th_vlwu_v_u16m2(base, vl) +#define __riscv_vlwu_v_u16m4(base, vl) __riscv_th_vlwu_v_u16m4(base, vl) +#define __riscv_vlwu_v_u16m8(base, vl) __riscv_th_vlwu_v_u16m8(base, vl) +#define __riscv_vlwu_v_u32m1(base, vl) __riscv_th_vlwu_v_u32m1(base, vl) +#define __riscv_vlwu_v_u32m2(base, vl) __riscv_th_vlwu_v_u32m2(base, vl) +#define __riscv_vlwu_v_u32m4(base, vl) __riscv_th_vlwu_v_u32m4(base, vl) +#define __riscv_vlwu_v_u32m8(base, vl) __riscv_th_vlwu_v_u32m8(base, vl) +#define __riscv_vlwu_v_u64m1(base, vl) __riscv_th_vlwu_v_u64m1(base, vl) +#define __riscv_vlwu_v_u64m2(base, vl) __riscv_th_vlwu_v_u64m2(base, vl) +#define __riscv_vlwu_v_u64m4(base, vl) __riscv_th_vlwu_v_u64m4(base, vl) +#define __riscv_vlwu_v_u64m8(base, vl) __riscv_th_vlwu_v_u64m8(base, vl) +#define __riscv_vle8_v_i8m1(base, vl) __riscv_th_vle8_v_i8m1(base, vl) +#define __riscv_vle8_v_i8m2(base, vl) __riscv_th_vle8_v_i8m2(base, vl) +#define __riscv_vle8_v_i8m4(base, vl) __riscv_th_vle8_v_i8m4(base, vl) +#define __riscv_vle8_v_i8m8(base, vl) __riscv_th_vle8_v_i8m8(base, vl) +#define __riscv_vle16_v_i16m1(base, vl) __riscv_th_vle16_v_i16m1(base, vl) +#define __riscv_vle16_v_i16m2(base, vl) __riscv_th_vle16_v_i16m2(base, vl) +#define __riscv_vle16_v_i16m4(base, vl) __riscv_th_vle16_v_i16m4(base, vl) +#define __riscv_vle16_v_i16m8(base, vl) __riscv_th_vle16_v_i16m8(base, vl) +#define __riscv_vle32_v_i32m1(base, vl) __riscv_th_vle32_v_i32m1(base, vl) +#define __riscv_vle32_v_i32m2(base, vl) __riscv_th_vle32_v_i32m2(base, vl) +#define __riscv_vle32_v_i32m4(base, vl) __riscv_th_vle32_v_i32m4(base, vl) +#define __riscv_vle32_v_i32m8(base, vl) __riscv_th_vle32_v_i32m8(base, vl) +#define __riscv_vle64_v_i64m1(base, vl) __riscv_th_vle64_v_i64m1(base, vl) +#define __riscv_vle64_v_i64m2(base, vl) __riscv_th_vle64_v_i64m2(base, vl) +#define __riscv_vle64_v_i64m4(base, vl) __riscv_th_vle64_v_i64m4(base, vl) +#define __riscv_vle64_v_i64m8(base, vl) __riscv_th_vle64_v_i64m8(base, vl) +#define __riscv_vle8_v_u8m1(base, vl) __riscv_th_vle8_v_u8m1(base, vl) +#define __riscv_vle8_v_u8m2(base, vl) __riscv_th_vle8_v_u8m2(base, vl) +#define __riscv_vle8_v_u8m4(base, vl) __riscv_th_vle8_v_u8m4(base, vl) +#define __riscv_vle8_v_u8m8(base, vl) __riscv_th_vle8_v_u8m8(base, vl) +#define __riscv_vle16_v_u16m1(base, vl) __riscv_th_vle16_v_u16m1(base, vl) +#define __riscv_vle16_v_u16m2(base, vl) __riscv_th_vle16_v_u16m2(base, vl) +#define __riscv_vle16_v_u16m4(base, vl) __riscv_th_vle16_v_u16m4(base, vl) +#define __riscv_vle16_v_u16m8(base, vl) __riscv_th_vle16_v_u16m8(base, vl) +#define __riscv_vle32_v_u32m1(base, vl) __riscv_th_vle32_v_u32m1(base, vl) +#define __riscv_vle32_v_u32m2(base, vl) __riscv_th_vle32_v_u32m2(base, vl) +#define __riscv_vle32_v_u32m4(base, vl) __riscv_th_vle32_v_u32m4(base, vl) +#define __riscv_vle32_v_u32m8(base, vl) __riscv_th_vle32_v_u32m8(base, vl) +#define __riscv_vle64_v_u64m1(base, vl) __riscv_th_vle64_v_u64m1(base, vl) +#define __riscv_vle64_v_u64m2(base, vl) __riscv_th_vle64_v_u64m2(base, vl) +#define __riscv_vle64_v_u64m4(base, vl) __riscv_th_vle64_v_u64m4(base, vl) +#define __riscv_vle64_v_u64m8(base, vl) __riscv_th_vle64_v_u64m8(base, vl) +#define __riscv_vle16_v_f16m1(base, vl) __riscv_th_vle16_v_f16m1(base, vl) +#define __riscv_vle16_v_f16m2(base, vl) __riscv_th_vle16_v_f16m2(base, vl) +#define __riscv_vle16_v_f16m4(base, vl) __riscv_th_vle16_v_f16m4(base, vl) +#define __riscv_vle16_v_f16m8(base, vl) __riscv_th_vle16_v_f16m8(base, vl) +#define __riscv_vle32_v_f32m1(base, vl) __riscv_th_vle32_v_f32m1(base, vl) +#define __riscv_vle32_v_f32m2(base, vl) __riscv_th_vle32_v_f32m2(base, vl) +#define __riscv_vle32_v_f32m4(base, vl) __riscv_th_vle32_v_f32m4(base, vl) +#define __riscv_vle32_v_f32m8(base, vl) __riscv_th_vle32_v_f32m8(base, vl) +#define __riscv_vle64_v_f64m1(base, vl) __riscv_th_vle64_v_f64m1(base, vl) +#define __riscv_vle64_v_f64m2(base, vl) __riscv_th_vle64_v_f64m2(base, vl) +#define __riscv_vle64_v_f64m4(base, vl) __riscv_th_vle64_v_f64m4(base, vl) +#define __riscv_vle64_v_f64m8(base, vl) __riscv_th_vle64_v_f64m8(base, vl) + +// Vector Unit-stride stores +#define __riscv_vsb_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i8m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i16m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i32m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_i64m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i8m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i16m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i32m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_i64m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i8m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i16m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i32m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_i64m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u8m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u16m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u32m8(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m1(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m2(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m4(dst_ptr, vector_value, vl) +#define __riscv_vsb_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsb_v_u64m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u8m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u16m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u32m8(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m1(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m2(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m4(dst_ptr, vector_value, vl) +#define __riscv_vsh_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsh_v_u64m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u8m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u16m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u32m8(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m1(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m2(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m4(dst_ptr, vector_value, vl) +#define __riscv_vsw_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vsw_v_u64m8(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_i8m1(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m1(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_i8m2(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m2(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_i8m4(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m4(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_i8m8(dst_ptr, vector_value, vl) __riscv_th_vse8_v_i8m8(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_i16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m1(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_i16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m2(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_i16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m4(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_i16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_i16m8(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_i32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m1(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_i32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m2(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_i32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m4(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_i32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_i32m8(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_i64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m1(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_i64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m2(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_i64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m4(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_i64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_i64m8(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_u8m1(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m1(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_u8m2(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m2(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_u8m4(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m4(dst_ptr, vector_value, vl) +#define __riscv_vse8_v_u8m8(dst_ptr, vector_value, vl) __riscv_th_vse8_v_u8m8(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_u16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m1(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_u16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m2(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_u16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m4(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_u16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_u16m8(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_u32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m1(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_u32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m2(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_u32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m4(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_u32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_u32m8(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_u64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m1(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_u64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m2(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_u64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m4(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_u64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_u64m8(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_f16m1(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m1(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_f16m2(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m2(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_f16m4(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m4(dst_ptr, vector_value, vl) +#define __riscv_vse16_v_f16m8(dst_ptr, vector_value, vl) __riscv_th_vse16_v_f16m8(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_f32m1(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m1(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_f32m2(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m2(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_f32m4(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m4(dst_ptr, vector_value, vl) +#define __riscv_vse32_v_f32m8(dst_ptr, vector_value, vl) __riscv_th_vse32_v_f32m8(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_f64m1(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m1(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_f64m2(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m2(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_f64m4(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m4(dst_ptr, vector_value, vl) +#define __riscv_vse64_v_f64m8(dst_ptr, vector_value, vl) __riscv_th_vse64_v_f64m8(dst_ptr, vector_value, vl) + +}] in +def th_unit_stride_wrapper_macros: RVVHeader; + +let HeaderCode = +[{ +// Vector Strided loads +#define __riscv_vlsb_v_i8m1(base, stride, vl) __riscv_th_vlsb_v_i8m1(base, stride, vl) +#define __riscv_vlsb_v_i8m2(base, stride, vl) __riscv_th_vlsb_v_i8m2(base, stride, vl) +#define __riscv_vlsb_v_i8m4(base, stride, vl) __riscv_th_vlsb_v_i8m4(base, stride, vl) +#define __riscv_vlsb_v_i8m8(base, stride, vl) __riscv_th_vlsb_v_i8m8(base, stride, vl) +#define __riscv_vlsb_v_i16m1(base, stride, vl) __riscv_th_vlsb_v_i16m1(base, stride, vl) +#define __riscv_vlsb_v_i16m2(base, stride, vl) __riscv_th_vlsb_v_i16m2(base, stride, vl) +#define __riscv_vlsb_v_i16m4(base, stride, vl) __riscv_th_vlsb_v_i16m4(base, stride, vl) +#define __riscv_vlsb_v_i16m8(base, stride, vl) __riscv_th_vlsb_v_i16m8(base, stride, vl) +#define __riscv_vlsb_v_i32m1(base, stride, vl) __riscv_th_vlsb_v_i32m1(base, stride, vl) +#define __riscv_vlsb_v_i32m2(base, stride, vl) __riscv_th_vlsb_v_i32m2(base, stride, vl) +#define __riscv_vlsb_v_i32m4(base, stride, vl) __riscv_th_vlsb_v_i32m4(base, stride, vl) +#define __riscv_vlsb_v_i32m8(base, stride, vl) __riscv_th_vlsb_v_i32m8(base, stride, vl) +#define __riscv_vlsb_v_i64m1(base, stride, vl) __riscv_th_vlsb_v_i64m1(base, stride, vl) +#define __riscv_vlsb_v_i64m2(base, stride, vl) __riscv_th_vlsb_v_i64m2(base, stride, vl) +#define __riscv_vlsb_v_i64m4(base, stride, vl) __riscv_th_vlsb_v_i64m4(base, stride, vl) +#define __riscv_vlsb_v_i64m8(base, stride, vl) __riscv_th_vlsb_v_i64m8(base, stride, vl) +#define __riscv_vlsh_v_i8m1(base, stride, vl) __riscv_th_vlsh_v_i8m1(base, stride, vl) +#define __riscv_vlsh_v_i8m2(base, stride, vl) __riscv_th_vlsh_v_i8m2(base, stride, vl) +#define __riscv_vlsh_v_i8m4(base, stride, vl) __riscv_th_vlsh_v_i8m4(base, stride, vl) +#define __riscv_vlsh_v_i8m8(base, stride, vl) __riscv_th_vlsh_v_i8m8(base, stride, vl) +#define __riscv_vlsh_v_i16m1(base, stride, vl) __riscv_th_vlsh_v_i16m1(base, stride, vl) +#define __riscv_vlsh_v_i16m2(base, stride, vl) __riscv_th_vlsh_v_i16m2(base, stride, vl) +#define __riscv_vlsh_v_i16m4(base, stride, vl) __riscv_th_vlsh_v_i16m4(base, stride, vl) +#define __riscv_vlsh_v_i16m8(base, stride, vl) __riscv_th_vlsh_v_i16m8(base, stride, vl) +#define __riscv_vlsh_v_i32m1(base, stride, vl) __riscv_th_vlsh_v_i32m1(base, stride, vl) +#define __riscv_vlsh_v_i32m2(base, stride, vl) __riscv_th_vlsh_v_i32m2(base, stride, vl) +#define __riscv_vlsh_v_i32m4(base, stride, vl) __riscv_th_vlsh_v_i32m4(base, stride, vl) +#define __riscv_vlsh_v_i32m8(base, stride, vl) __riscv_th_vlsh_v_i32m8(base, stride, vl) +#define __riscv_vlsh_v_i64m1(base, stride, vl) __riscv_th_vlsh_v_i64m1(base, stride, vl) +#define __riscv_vlsh_v_i64m2(base, stride, vl) __riscv_th_vlsh_v_i64m2(base, stride, vl) +#define __riscv_vlsh_v_i64m4(base, stride, vl) __riscv_th_vlsh_v_i64m4(base, stride, vl) +#define __riscv_vlsh_v_i64m8(base, stride, vl) __riscv_th_vlsh_v_i64m8(base, stride, vl) +#define __riscv_vlsw_v_i8m1(base, stride, vl) __riscv_th_vlsw_v_i8m1(base, stride, vl) +#define __riscv_vlsw_v_i8m2(base, stride, vl) __riscv_th_vlsw_v_i8m2(base, stride, vl) +#define __riscv_vlsw_v_i8m4(base, stride, vl) __riscv_th_vlsw_v_i8m4(base, stride, vl) +#define __riscv_vlsw_v_i8m8(base, stride, vl) __riscv_th_vlsw_v_i8m8(base, stride, vl) +#define __riscv_vlsw_v_i16m1(base, stride, vl) __riscv_th_vlsw_v_i16m1(base, stride, vl) +#define __riscv_vlsw_v_i16m2(base, stride, vl) __riscv_th_vlsw_v_i16m2(base, stride, vl) +#define __riscv_vlsw_v_i16m4(base, stride, vl) __riscv_th_vlsw_v_i16m4(base, stride, vl) +#define __riscv_vlsw_v_i16m8(base, stride, vl) __riscv_th_vlsw_v_i16m8(base, stride, vl) +#define __riscv_vlsw_v_i32m1(base, stride, vl) __riscv_th_vlsw_v_i32m1(base, stride, vl) +#define __riscv_vlsw_v_i32m2(base, stride, vl) __riscv_th_vlsw_v_i32m2(base, stride, vl) +#define __riscv_vlsw_v_i32m4(base, stride, vl) __riscv_th_vlsw_v_i32m4(base, stride, vl) +#define __riscv_vlsw_v_i32m8(base, stride, vl) __riscv_th_vlsw_v_i32m8(base, stride, vl) +#define __riscv_vlsw_v_i64m1(base, stride, vl) __riscv_th_vlsw_v_i64m1(base, stride, vl) +#define __riscv_vlsw_v_i64m2(base, stride, vl) __riscv_th_vlsw_v_i64m2(base, stride, vl) +#define __riscv_vlsw_v_i64m4(base, stride, vl) __riscv_th_vlsw_v_i64m4(base, stride, vl) +#define __riscv_vlsw_v_i64m8(base, stride, vl) __riscv_th_vlsw_v_i64m8(base, stride, vl) +#define __riscv_vlsbu_v_u8m1(base, stride, vl) __riscv_th_vlsbu_v_u8m1(base, stride, vl) +#define __riscv_vlsbu_v_u8m2(base, stride, vl) __riscv_th_vlsbu_v_u8m2(base, stride, vl) +#define __riscv_vlsbu_v_u8m4(base, stride, vl) __riscv_th_vlsbu_v_u8m4(base, stride, vl) +#define __riscv_vlsbu_v_u8m8(base, stride, vl) __riscv_th_vlsbu_v_u8m8(base, stride, vl) +#define __riscv_vlsbu_v_u16m1(base, stride, vl) __riscv_th_vlsbu_v_u16m1(base, stride, vl) +#define __riscv_vlsbu_v_u16m2(base, stride, vl) __riscv_th_vlsbu_v_u16m2(base, stride, vl) +#define __riscv_vlsbu_v_u16m4(base, stride, vl) __riscv_th_vlsbu_v_u16m4(base, stride, vl) +#define __riscv_vlsbu_v_u16m8(base, stride, vl) __riscv_th_vlsbu_v_u16m8(base, stride, vl) +#define __riscv_vlsbu_v_u32m1(base, stride, vl) __riscv_th_vlsbu_v_u32m1(base, stride, vl) +#define __riscv_vlsbu_v_u32m2(base, stride, vl) __riscv_th_vlsbu_v_u32m2(base, stride, vl) +#define __riscv_vlsbu_v_u32m4(base, stride, vl) __riscv_th_vlsbu_v_u32m4(base, stride, vl) +#define __riscv_vlsbu_v_u32m8(base, stride, vl) __riscv_th_vlsbu_v_u32m8(base, stride, vl) +#define __riscv_vlsbu_v_u64m1(base, stride, vl) __riscv_th_vlsbu_v_u64m1(base, stride, vl) +#define __riscv_vlsbu_v_u64m2(base, stride, vl) __riscv_th_vlsbu_v_u64m2(base, stride, vl) +#define __riscv_vlsbu_v_u64m4(base, stride, vl) __riscv_th_vlsbu_v_u64m4(base, stride, vl) +#define __riscv_vlsbu_v_u64m8(base, stride, vl) __riscv_th_vlsbu_v_u64m8(base, stride, vl) +#define __riscv_vlshu_v_u8m1(base, stride, vl) __riscv_th_vlshu_v_u8m1(base, stride, vl) +#define __riscv_vlshu_v_u8m2(base, stride, vl) __riscv_th_vlshu_v_u8m2(base, stride, vl) +#define __riscv_vlshu_v_u8m4(base, stride, vl) __riscv_th_vlshu_v_u8m4(base, stride, vl) +#define __riscv_vlshu_v_u8m8(base, stride, vl) __riscv_th_vlshu_v_u8m8(base, stride, vl) +#define __riscv_vlshu_v_u16m1(base, stride, vl) __riscv_th_vlshu_v_u16m1(base, stride, vl) +#define __riscv_vlshu_v_u16m2(base, stride, vl) __riscv_th_vlshu_v_u16m2(base, stride, vl) +#define __riscv_vlshu_v_u16m4(base, stride, vl) __riscv_th_vlshu_v_u16m4(base, stride, vl) +#define __riscv_vlshu_v_u16m8(base, stride, vl) __riscv_th_vlshu_v_u16m8(base, stride, vl) +#define __riscv_vlshu_v_u32m1(base, stride, vl) __riscv_th_vlshu_v_u32m1(base, stride, vl) +#define __riscv_vlshu_v_u32m2(base, stride, vl) __riscv_th_vlshu_v_u32m2(base, stride, vl) +#define __riscv_vlshu_v_u32m4(base, stride, vl) __riscv_th_vlshu_v_u32m4(base, stride, vl) +#define __riscv_vlshu_v_u32m8(base, stride, vl) __riscv_th_vlshu_v_u32m8(base, stride, vl) +#define __riscv_vlshu_v_u64m1(base, stride, vl) __riscv_th_vlshu_v_u64m1(base, stride, vl) +#define __riscv_vlshu_v_u64m2(base, stride, vl) __riscv_th_vlshu_v_u64m2(base, stride, vl) +#define __riscv_vlshu_v_u64m4(base, stride, vl) __riscv_th_vlshu_v_u64m4(base, stride, vl) +#define __riscv_vlshu_v_u64m8(base, stride, vl) __riscv_th_vlshu_v_u64m8(base, stride, vl) +#define __riscv_vlswu_v_u8m1(base, stride, vl) __riscv_th_vlswu_v_u8m1(base, stride, vl) +#define __riscv_vlswu_v_u8m2(base, stride, vl) __riscv_th_vlswu_v_u8m2(base, stride, vl) +#define __riscv_vlswu_v_u8m4(base, stride, vl) __riscv_th_vlswu_v_u8m4(base, stride, vl) +#define __riscv_vlswu_v_u8m8(base, stride, vl) __riscv_th_vlswu_v_u8m8(base, stride, vl) +#define __riscv_vlswu_v_u16m1(base, stride, vl) __riscv_th_vlswu_v_u16m1(base, stride, vl) +#define __riscv_vlswu_v_u16m2(base, stride, vl) __riscv_th_vlswu_v_u16m2(base, stride, vl) +#define __riscv_vlswu_v_u16m4(base, stride, vl) __riscv_th_vlswu_v_u16m4(base, stride, vl) +#define __riscv_vlswu_v_u16m8(base, stride, vl) __riscv_th_vlswu_v_u16m8(base, stride, vl) +#define __riscv_vlswu_v_u32m1(base, stride, vl) __riscv_th_vlswu_v_u32m1(base, stride, vl) +#define __riscv_vlswu_v_u32m2(base, stride, vl) __riscv_th_vlswu_v_u32m2(base, stride, vl) +#define __riscv_vlswu_v_u32m4(base, stride, vl) __riscv_th_vlswu_v_u32m4(base, stride, vl) +#define __riscv_vlswu_v_u32m8(base, stride, vl) __riscv_th_vlswu_v_u32m8(base, stride, vl) +#define __riscv_vlswu_v_u64m1(base, stride, vl) __riscv_th_vlswu_v_u64m1(base, stride, vl) +#define __riscv_vlswu_v_u64m2(base, stride, vl) __riscv_th_vlswu_v_u64m2(base, stride, vl) +#define __riscv_vlswu_v_u64m4(base, stride, vl) __riscv_th_vlswu_v_u64m4(base, stride, vl) +#define __riscv_vlswu_v_u64m8(base, stride, vl) __riscv_th_vlswu_v_u64m8(base, stride, vl) +#define __riscv_vlse8_v_i8m1(base, stride, vl) __riscv_th_vlse8_v_i8m1(base, stride, vl) +#define __riscv_vlse8_v_i8m2(base, stride, vl) __riscv_th_vlse8_v_i8m2(base, stride, vl) +#define __riscv_vlse8_v_i8m4(base, stride, vl) __riscv_th_vlse8_v_i8m4(base, stride, vl) +#define __riscv_vlse8_v_i8m8(base, stride, vl) __riscv_th_vlse8_v_i8m8(base, stride, vl) +#define __riscv_vlse16_v_i16m1(base, stride, vl) __riscv_th_vlse16_v_i16m1(base, stride, vl) +#define __riscv_vlse16_v_i16m2(base, stride, vl) __riscv_th_vlse16_v_i16m2(base, stride, vl) +#define __riscv_vlse16_v_i16m4(base, stride, vl) __riscv_th_vlse16_v_i16m4(base, stride, vl) +#define __riscv_vlse16_v_i16m8(base, stride, vl) __riscv_th_vlse16_v_i16m8(base, stride, vl) +#define __riscv_vlse32_v_i32m1(base, stride, vl) __riscv_th_vlse32_v_i32m1(base, stride, vl) +#define __riscv_vlse32_v_i32m2(base, stride, vl) __riscv_th_vlse32_v_i32m2(base, stride, vl) +#define __riscv_vlse32_v_i32m4(base, stride, vl) __riscv_th_vlse32_v_i32m4(base, stride, vl) +#define __riscv_vlse32_v_i32m8(base, stride, vl) __riscv_th_vlse32_v_i32m8(base, stride, vl) +#define __riscv_vlse64_v_i64m1(base, stride, vl) __riscv_th_vlse64_v_i64m1(base, stride, vl) +#define __riscv_vlse64_v_i64m2(base, stride, vl) __riscv_th_vlse64_v_i64m2(base, stride, vl) +#define __riscv_vlse64_v_i64m4(base, stride, vl) __riscv_th_vlse64_v_i64m4(base, stride, vl) +#define __riscv_vlse64_v_i64m8(base, stride, vl) __riscv_th_vlse64_v_i64m8(base, stride, vl) +#define __riscv_vlse8_v_u8m1(base, stride, vl) __riscv_th_vlse8_v_u8m1(base, stride, vl) +#define __riscv_vlse8_v_u8m2(base, stride, vl) __riscv_th_vlse8_v_u8m2(base, stride, vl) +#define __riscv_vlse8_v_u8m4(base, stride, vl) __riscv_th_vlse8_v_u8m4(base, stride, vl) +#define __riscv_vlse8_v_u8m8(base, stride, vl) __riscv_th_vlse8_v_u8m8(base, stride, vl) +#define __riscv_vlse16_v_u16m1(base, stride, vl) __riscv_th_vlse16_v_u16m1(base, stride, vl) +#define __riscv_vlse16_v_u16m2(base, stride, vl) __riscv_th_vlse16_v_u16m2(base, stride, vl) +#define __riscv_vlse16_v_u16m4(base, stride, vl) __riscv_th_vlse16_v_u16m4(base, stride, vl) +#define __riscv_vlse16_v_u16m8(base, stride, vl) __riscv_th_vlse16_v_u16m8(base, stride, vl) +#define __riscv_vlse32_v_u32m1(base, stride, vl) __riscv_th_vlse32_v_u32m1(base, stride, vl) +#define __riscv_vlse32_v_u32m2(base, stride, vl) __riscv_th_vlse32_v_u32m2(base, stride, vl) +#define __riscv_vlse32_v_u32m4(base, stride, vl) __riscv_th_vlse32_v_u32m4(base, stride, vl) +#define __riscv_vlse32_v_u32m8(base, stride, vl) __riscv_th_vlse32_v_u32m8(base, stride, vl) +#define __riscv_vlse64_v_u64m1(base, stride, vl) __riscv_th_vlse64_v_u64m1(base, stride, vl) +#define __riscv_vlse64_v_u64m2(base, stride, vl) __riscv_th_vlse64_v_u64m2(base, stride, vl) +#define __riscv_vlse64_v_u64m4(base, stride, vl) __riscv_th_vlse64_v_u64m4(base, stride, vl) +#define __riscv_vlse64_v_u64m8(base, stride, vl) __riscv_th_vlse64_v_u64m8(base, stride, vl) +#define __riscv_vlse16_v_f16m1(base, stride, vl) __riscv_th_vlse16_v_f16m1(base, stride, vl) +#define __riscv_vlse16_v_f16m2(base, stride, vl) __riscv_th_vlse16_v_f16m2(base, stride, vl) +#define __riscv_vlse16_v_f16m4(base, stride, vl) __riscv_th_vlse16_v_f16m4(base, stride, vl) +#define __riscv_vlse16_v_f16m8(base, stride, vl) __riscv_th_vlse16_v_f16m8(base, stride, vl) +#define __riscv_vlse32_v_f32m1(base, stride, vl) __riscv_th_vlse32_v_f32m1(base, stride, vl) +#define __riscv_vlse32_v_f32m2(base, stride, vl) __riscv_th_vlse32_v_f32m2(base, stride, vl) +#define __riscv_vlse32_v_f32m4(base, stride, vl) __riscv_th_vlse32_v_f32m4(base, stride, vl) +#define __riscv_vlse32_v_f32m8(base, stride, vl) __riscv_th_vlse32_v_f32m8(base, stride, vl) +#define __riscv_vlse64_v_f64m1(base, stride, vl) __riscv_th_vlse64_v_f64m1(base, stride, vl) +#define __riscv_vlse64_v_f64m2(base, stride, vl) __riscv_th_vlse64_v_f64m2(base, stride, vl) +#define __riscv_vlse64_v_f64m4(base, stride, vl) __riscv_th_vlse64_v_f64m4(base, stride, vl) +#define __riscv_vlse64_v_f64m8(base, stride, vl) __riscv_th_vlse64_v_f64m8(base, stride, vl) + +// Vector Strided stores +#define __riscv_vssb_v_i8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_i64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_i64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_i64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_i64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_i64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_i64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssb_v_u64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssb_v_u64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssh_v_u64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssh_v_u64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vssw_v_u64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vssw_v_u64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_i8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_i8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_i8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_i8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_i8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_i8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_i8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_i8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_i16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_i16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_i16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_i16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_i16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_i16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_i16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_i16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_i32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_i32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_i32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_i32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_i32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_i32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_i32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_i32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_i64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_i64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_i64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_i64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_i64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_i64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_i64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_i64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_u8m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_u8m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_u8m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_u8m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_u8m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_u8m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse8_v_u8m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse8_v_u8m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_u16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_u16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_u16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_u16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_u16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_u16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_u16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_u16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_u32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_u32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_u32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_u32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_u32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_u32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_u32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_u32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_u64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_u64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_u64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_u64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_u64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_u64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_u64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_u64m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_f16m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_f16m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_f16m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_f16m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_f16m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_f16m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse16_v_f16m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse16_v_f16m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_f32m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_f32m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_f32m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_f32m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_f32m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_f32m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse32_v_f32m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse32_v_f32m8(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_f64m1(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_f64m1(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_f64m2(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_f64m2(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_f64m4(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_f64m4(dst_ptr, stride, vector_value, vl) +#define __riscv_vsse64_v_f64m8(dst_ptr, stride, vector_value, vl) __riscv_th_vsse64_v_f64m8(dst_ptr, stride, vector_value, vl) + +}] in +def th_strided_wrapper_macros: RVVHeader; + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c new file mode 100644 index 00000000000000..465c29a7fb5039 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsb.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsb_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsb_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsb_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsb_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsb_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsb_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsb_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsb_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsb_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsb_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsb_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsb_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsb_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsb_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsb_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsb_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsb_v_i64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c new file mode 100644 index 00000000000000..918912ce4878d5 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsbu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlsbu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlsbu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlsbu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlsbu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlsbu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlsbu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlsbu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlsbu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlsbu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlsbu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlsbu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlsbu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlsbu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlsbu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlsbu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlsbu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsbu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c new file mode 100644 index 00000000000000..6a9788d845c0e6 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse16.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m1_t test_th_vlse16_v_f16m1(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_f16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vlse16_v_f16m2(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_f16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m4_t test_th_vlse16_v_f16m4(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_f16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m8_t test_th_vlse16_v_f16m8(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_f16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlse16_v_i16m1(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlse16_v_i16m2(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlse16_v_i16m4(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlse16_v_i16m8(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlse16_v_u16m1(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlse16_v_u16m2(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlse16_v_u16m4(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlse16_v_u16m8(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse16_v_u16m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c new file mode 100644 index 00000000000000..0200b2071f4df3 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse32.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m1_t test_th_vlse32_v_f32m1(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_f32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m2_t test_th_vlse32_v_f32m2(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_f32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vlse32_v_f32m4(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_f32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m8_t test_th_vlse32_v_f32m8(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_f32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlse32_v_i32m1(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlse32_v_i32m2(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlse32_v_i32m4(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlse32_v_i32m8(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlse32_v_u32m1(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlse32_v_u32m2(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlse32_v_u32m4(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlse32_v_u32m8(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse32_v_u32m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c new file mode 100644 index 00000000000000..ecaf41f95c777d --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse64.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m1_t test_th_vlse64_v_f64m1(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_f64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m2_t test_th_vlse64_v_f64m2(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_f64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m4_t test_th_vlse64_v_f64m4(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_f64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vlse64_v_f64m8(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_f64m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlse64_v_i64m1(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlse64_v_i64m2(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlse64_v_i64m4(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlse64_v_i64m8(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_i64m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlse64_v_u64m1(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlse64_v_u64m2(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlse64_v_u64m4(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlse64_v_u64m8(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse64_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c new file mode 100644 index 00000000000000..bc302566ccaa03 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlse8.c @@ -0,0 +1,86 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlse8_v_i8m1(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlse8_v_i8m2(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlse8_v_i8m4(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlse8_v_i8m8(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlse8_v_u8m1(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlse8_v_u8m2(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlse8_v_u8m4(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlse8_v_u8m8(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_th_vlse8_v_u8m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c new file mode 100644 index 00000000000000..dd62c13ef10e01 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsh.c @@ -0,0 +1,167 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsh_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsh_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsh_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsh_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsh_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsh_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsh_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsh_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsh_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsh_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsh_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsh_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsh_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsh_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsh_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsh_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsh_v_i64m8(base, stride, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c new file mode 100644 index 00000000000000..5ead421d53157f --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlshu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlshu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlshu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlshu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlshu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlshu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlshu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlshu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlshu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlshu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlshu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlshu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlshu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlshu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlshu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlshu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlshu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlshu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c new file mode 100644 index 00000000000000..e1848d522e47a8 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlsw.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsw_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsw_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsw_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsw_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsw_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsw_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsw_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsw_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsw_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsw_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsw_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsw_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsw_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsw_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsw_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsw_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlsw_v_i64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c new file mode 100644 index 00000000000000..3a3ca7bb270a7d --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vlswu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlswu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlswu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlswu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlswu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlswu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlswu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlswu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlswu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlswu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlswu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlswu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlswu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlswu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlswu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlswu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlswu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_th_vlswu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c new file mode 100644 index 00000000000000..8fb77fd9e55082 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssb.c @@ -0,0 +1,327 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_th_vssb_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_th_vssb_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_th_vssb_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_th_vssb_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_th_vssb_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_th_vssb_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_th_vssb_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_th_vssb_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_th_vssb_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_th_vssb_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_th_vssb_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_th_vssb_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_th_vssb_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_th_vssb_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_th_vssb_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_th_vssb_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_th_vssb_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_th_vssb_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_th_vssb_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_th_vssb_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_th_vssb_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_th_vssb_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_th_vssb_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_th_vssb_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_th_vssb_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_th_vssb_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_th_vssb_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_th_vssb_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_th_vssb_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_th_vssb_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_th_vssb_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_th_vssb_v_u64m8(base, stride, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c new file mode 100644 index 00000000000000..9fb3ef4c470e4a --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse16.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m1(_Float16 *base, ptrdiff_t stride, vfloat16m1_t value, size_t vl) { + return __riscv_th_vsse16_v_f16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m2(_Float16 *base, ptrdiff_t stride, vfloat16m2_t value, size_t vl) { + return __riscv_th_vsse16_v_f16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m4(_Float16 *base, ptrdiff_t stride, vfloat16m4_t value, size_t vl) { + return __riscv_th_vsse16_v_f16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m8(_Float16 *base, ptrdiff_t stride, vfloat16m8_t value, size_t vl) { + return __riscv_th_vsse16_v_f16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m1(int16_t *base, ptrdiff_t stride, vint16m1_t value, size_t vl) { + return __riscv_th_vsse16_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m2(int16_t *base, ptrdiff_t stride, vint16m2_t value, size_t vl) { + return __riscv_th_vsse16_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m4(int16_t *base, ptrdiff_t stride, vint16m4_t value, size_t vl) { + return __riscv_th_vsse16_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m8(int16_t *base, ptrdiff_t stride, vint16m8_t value, size_t vl) { + return __riscv_th_vsse16_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m1(uint16_t *base, ptrdiff_t stride, vuint16m1_t value, size_t vl) { + return __riscv_th_vsse16_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m2(uint16_t *base, ptrdiff_t stride, vuint16m2_t value, size_t vl) { + return __riscv_th_vsse16_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m4(uint16_t *base, ptrdiff_t stride, vuint16m4_t value, size_t vl) { + return __riscv_th_vsse16_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m8(uint16_t *base, ptrdiff_t stride, vuint16m8_t value, size_t vl) { + return __riscv_th_vsse16_v_u16m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c new file mode 100644 index 00000000000000..f9459863e8c1ed --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse32.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m1(float *base, ptrdiff_t stride, vfloat32m1_t value, size_t vl) { + return __riscv_th_vsse32_v_f32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m2(float *base, ptrdiff_t stride, vfloat32m2_t value, size_t vl) { + return __riscv_th_vsse32_v_f32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m4(float *base, ptrdiff_t stride, vfloat32m4_t value, size_t vl) { + return __riscv_th_vsse32_v_f32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m8(float *base, ptrdiff_t stride, vfloat32m8_t value, size_t vl) { + return __riscv_th_vsse32_v_f32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m1(int32_t *base, ptrdiff_t stride, vint32m1_t value, size_t vl) { + return __riscv_th_vsse32_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m2(int32_t *base, ptrdiff_t stride, vint32m2_t value, size_t vl) { + return __riscv_th_vsse32_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m4(int32_t *base, ptrdiff_t stride, vint32m4_t value, size_t vl) { + return __riscv_th_vsse32_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m8(int32_t *base, ptrdiff_t stride, vint32m8_t value, size_t vl) { + return __riscv_th_vsse32_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m1(uint32_t *base, ptrdiff_t stride, vuint32m1_t value, size_t vl) { + return __riscv_th_vsse32_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m2(uint32_t *base, ptrdiff_t stride, vuint32m2_t value, size_t vl) { + return __riscv_th_vsse32_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m4(uint32_t *base, ptrdiff_t stride, vuint32m4_t value, size_t vl) { + return __riscv_th_vsse32_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m8(uint32_t *base, ptrdiff_t stride, vuint32m8_t value, size_t vl) { + return __riscv_th_vsse32_v_u32m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c new file mode 100644 index 00000000000000..a8bd79c3273276 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse64.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m1(double *base, ptrdiff_t stride, vfloat64m1_t value, size_t vl) { + return __riscv_th_vsse64_v_f64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m2(double *base, ptrdiff_t stride, vfloat64m2_t value, size_t vl) { + return __riscv_th_vsse64_v_f64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m4(double *base, ptrdiff_t stride, vfloat64m4_t value, size_t vl) { + return __riscv_th_vsse64_v_f64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m8(double *base, ptrdiff_t stride, vfloat64m8_t value, size_t vl) { + return __riscv_th_vsse64_v_f64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m1(int64_t *base, ptrdiff_t stride, vint64m1_t value, size_t vl) { + return __riscv_th_vsse64_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m2(int64_t *base, ptrdiff_t stride, vint64m2_t value, size_t vl) { + return __riscv_th_vsse64_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m4(int64_t *base, ptrdiff_t stride, vint64m4_t value, size_t vl) { + return __riscv_th_vsse64_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m8(int64_t *base, ptrdiff_t stride, vint64m8_t value, size_t vl) { + return __riscv_th_vsse64_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m1(uint64_t *base, ptrdiff_t stride, vuint64m1_t value, size_t vl) { + return __riscv_th_vsse64_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m2(uint64_t *base, ptrdiff_t stride, vuint64m2_t value, size_t vl) { + return __riscv_th_vsse64_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m4(uint64_t *base, ptrdiff_t stride, vuint64m4_t value, size_t vl) { + return __riscv_th_vsse64_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m8(uint64_t *base, ptrdiff_t stride, vuint64m8_t value, size_t vl) { + return __riscv_th_vsse64_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c new file mode 100644 index 00000000000000..5c15c20c0007e8 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vsse8.c @@ -0,0 +1,86 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m1(int8_t *base, ptrdiff_t stride, vint8m1_t value, size_t vl) { + return __riscv_th_vsse8_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m2(int8_t *base, ptrdiff_t stride, vint8m2_t value, size_t vl) { + return __riscv_th_vsse8_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m4(int8_t *base, ptrdiff_t stride, vint8m4_t value, size_t vl) { + return __riscv_th_vsse8_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m8(int8_t *base, ptrdiff_t stride, vint8m8_t value, size_t vl) { + return __riscv_th_vsse8_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m1(uint8_t *base, ptrdiff_t stride, vuint8m1_t value, size_t vl) { + return __riscv_th_vsse8_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m2(uint8_t *base, ptrdiff_t stride, vuint8m2_t value, size_t vl) { + return __riscv_th_vsse8_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m4(uint8_t *base, ptrdiff_t stride, vuint8m4_t value, size_t vl) { + return __riscv_th_vsse8_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m8(uint8_t *base, ptrdiff_t stride, vuint8m8_t value, size_t vl) { + return __riscv_th_vsse8_v_u8m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c new file mode 100644 index 00000000000000..f077c768e61393 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssh.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_th_vssh_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_th_vssh_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_th_vssh_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_th_vssh_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_th_vssh_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_th_vssh_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_th_vssh_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_th_vssh_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_th_vssh_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_th_vssh_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_th_vssh_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_th_vssh_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_th_vssh_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_th_vssh_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_th_vssh_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_th_vssh_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_th_vssh_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_th_vssh_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_th_vssh_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_th_vssh_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_th_vssh_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_th_vssh_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_th_vssh_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_th_vssh_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_th_vssh_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_th_vssh_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_th_vssh_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_th_vssh_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_th_vssh_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_th_vssh_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_th_vssh_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_th_vssh_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c new file mode 100644 index 00000000000000..a0e883b97e3d50 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/thead/vssw.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_th_vssw_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_th_vssw_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_th_vssw_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_th_vssw_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_th_vssw_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_th_vssw_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_th_vssw_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_th_vssw_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_th_vssw_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_th_vssw_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_th_vssw_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_th_vssw_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_th_vssw_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_th_vssw_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_th_vssw_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_th_vssw_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_th_vssw_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_th_vssw_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_th_vssw_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_th_vssw_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_th_vssw_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_th_vssw_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_th_vssw_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_th_vssw_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_th_vssw_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_th_vssw_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_th_vssw_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_th_vssw_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_th_vssw_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_th_vssw_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_th_vssw_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_th_vssw_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c new file mode 100644 index 00000000000000..0ab9e8974e0ed6 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsb.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsb_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsb_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsb_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsb_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsb_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsb_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsb_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsb_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsb_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsb_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsb_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsb_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsb_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsb_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsb_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsb.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsb_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsb_v_i64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c new file mode 100644 index 00000000000000..0d2e2630677226 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsbu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlsbu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlsbu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlsbu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlsbu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlsbu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlsbu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlsbu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlsbu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlsbu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlsbu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlsbu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlsbu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlsbu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlsbu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlsbu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsbu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsbu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlsbu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlsbu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c new file mode 100644 index 00000000000000..a5c80b8886c0cb --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse16.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m1_t test_th_vlse16_v_f16m1(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_f16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m2_t test_th_vlse16_v_f16m2(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_f16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m4_t test_th_vlse16_v_f16m4(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_f16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32f16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat16m8_t test_th_vlse16_v_f16m8(const _Float16 *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_f16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlse16_v_i16m1(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlse16_v_i16m2(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlse16_v_i16m4(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlse16_v_i16m8(const int16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlse16_v_u16m1(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlse16_v_u16m2(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlse16_v_u16m4(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlse16_v_u16m8(const uint16_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse16_v_u16m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c new file mode 100644 index 00000000000000..0c69d4be25b316 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse32.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m1_t test_th_vlse32_v_f32m1(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_f32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m2_t test_th_vlse32_v_f32m2(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_f32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m4_t test_th_vlse32_v_f32m4(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_f32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16f32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat32m8_t test_th_vlse32_v_f32m8(const float *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_f32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlse32_v_i32m1(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlse32_v_i32m2(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlse32_v_i32m4(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlse32_v_i32m8(const int32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlse32_v_u32m1(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlse32_v_u32m2(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlse32_v_u32m4(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlse32_v_u32m8(const uint32_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse32_v_u32m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c new file mode 100644 index 00000000000000..47a29309acd880 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse64.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m1_t test_th_vlse64_v_f64m1(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_f64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m2_t test_th_vlse64_v_f64m2(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_f64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m4_t test_th_vlse64_v_f64m4(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_f64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8f64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vfloat64m8_t test_th_vlse64_v_f64m8(const double *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_f64m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlse64_v_i64m1(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlse64_v_i64m2(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlse64_v_i64m4(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlse64_v_i64m8(const int64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_i64m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlse64_v_u64m1(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlse64_v_u64m2(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlse64_v_u64m4(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlse64_v_u64m8(const uint64_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse64_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c new file mode 100644 index 00000000000000..711d12df685ed1 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlse8.c @@ -0,0 +1,86 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlse8_v_i8m1(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlse8_v_i8m2(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlse8_v_i8m4(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlse8_v_i8m8(const int8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlse8_v_u8m1(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlse8_v_u8m2(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlse8_v_u8m4(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlse8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlse.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlse8_v_u8m8(const uint8_t *base, ptrdiff_t stride, size_t vl) { + return __riscv_vlse8_v_u8m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c new file mode 100644 index 00000000000000..f8aba0aa063f12 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsh.c @@ -0,0 +1,167 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsh_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsh_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsh_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsh_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsh_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsh_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsh_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsh_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsh_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsh_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsh_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsh_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsh_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsh_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsh_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsh.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsh_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsh_v_i64m8(base, stride, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c new file mode 100644 index 00000000000000..5cdeff3d2b16d1 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlshu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlshu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlshu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlshu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlshu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlshu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlshu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlshu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlshu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlshu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlshu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlshu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlshu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlshu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlshu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlshu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlshu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlshu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlshu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlshu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c new file mode 100644 index 00000000000000..0f0f854d4b75cc --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlsw.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_th_vlsw_v_i8m1(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_th_vlsw_v_i8m2(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_th_vlsw_v_i8m4(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_th_vlsw_v_i8m8(const int8_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_th_vlsw_v_i16m1(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_th_vlsw_v_i16m2(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_th_vlsw_v_i16m4(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_th_vlsw_v_i16m8(const int16_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_th_vlsw_v_i32m1(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_th_vlsw_v_i32m2(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_th_vlsw_v_i32m4(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_th_vlsw_v_i32m8(const int32_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_th_vlsw_v_i64m1(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_th_vlsw_v_i64m2(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_th_vlsw_v_i64m4(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlsw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlsw.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_th_vlsw_v_i64m8(const int64_t *base, size_t stride, size_t vl) { + return __riscv_vlsw_v_i64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c new file mode 100644 index 00000000000000..61e52fb2876300 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vlswu.c @@ -0,0 +1,166 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_th_vlswu_v_u8m1(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u8m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_th_vlswu_v_u8m2(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u8m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_th_vlswu_v_u8m4(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u8m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv64i8.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_th_vlswu_v_u8m8(const uint8_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u8m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_th_vlswu_v_u16m1(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u16m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_th_vlswu_v_u16m2(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u16m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_th_vlswu_v_u16m4(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u16m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv32i16.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_th_vlswu_v_u16m8(const uint16_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u16m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_th_vlswu_v_u32m1(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u32m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_th_vlswu_v_u32m2(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u32m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_th_vlswu_v_u32m4(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u32m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv16i32.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_th_vlswu_v_u32m8(const uint32_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u32m8(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv1i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_th_vlswu_v_u64m1(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u64m1(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv2i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_th_vlswu_v_u64m2(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u64m2(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv4i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_th_vlswu_v_u64m4(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u64m4(base, stride, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_th_vlswu_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vlswu.nxv8i64.i64( poison, ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_th_vlswu_v_u64m8(const uint64_t *base, size_t stride, size_t vl) { + return __riscv_vlswu_v_u64m8(base, stride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c new file mode 100644 index 00000000000000..29f8d9a725c4dd --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssb.c @@ -0,0 +1,327 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_vssb_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_vssb_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_vssb_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_vssb_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_vssb_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_vssb_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_vssb_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_vssb_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_vssb_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_vssb_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_vssb_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_vssb_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_vssb_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_vssb_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_vssb_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_vssb_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_vssb_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_vssb_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_vssb_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_vssb_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_vssb_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_vssb_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_vssb_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_vssb_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_vssb_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_vssb_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_vssb_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_vssb_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_vssb_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_vssb_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_vssb_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssb_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssb.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssb_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_vssb_v_u64m8(base, stride, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c new file mode 100644 index 00000000000000..a778e8c6adddf7 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse16.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m1(_Float16 *base, ptrdiff_t stride, vfloat16m1_t value, size_t vl) { + return __riscv_vsse16_v_f16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m2(_Float16 *base, ptrdiff_t stride, vfloat16m2_t value, size_t vl) { + return __riscv_vsse16_v_f16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m4(_Float16 *base, ptrdiff_t stride, vfloat16m4_t value, size_t vl) { + return __riscv_vsse16_v_f16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_f16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_f16m8(_Float16 *base, ptrdiff_t stride, vfloat16m8_t value, size_t vl) { + return __riscv_vsse16_v_f16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m1(int16_t *base, ptrdiff_t stride, vint16m1_t value, size_t vl) { + return __riscv_vsse16_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m2(int16_t *base, ptrdiff_t stride, vint16m2_t value, size_t vl) { + return __riscv_vsse16_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m4(int16_t *base, ptrdiff_t stride, vint16m4_t value, size_t vl) { + return __riscv_vsse16_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_i16m8(int16_t *base, ptrdiff_t stride, vint16m8_t value, size_t vl) { + return __riscv_vsse16_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m1(uint16_t *base, ptrdiff_t stride, vuint16m1_t value, size_t vl) { + return __riscv_vsse16_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m2(uint16_t *base, ptrdiff_t stride, vuint16m2_t value, size_t vl) { + return __riscv_vsse16_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m4(uint16_t *base, ptrdiff_t stride, vuint16m4_t value, size_t vl) { + return __riscv_vsse16_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse16_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse16_v_u16m8(uint16_t *base, ptrdiff_t stride, vuint16m8_t value, size_t vl) { + return __riscv_vsse16_v_u16m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c new file mode 100644 index 00000000000000..949cfd8ede2060 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse32.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m1(float *base, ptrdiff_t stride, vfloat32m1_t value, size_t vl) { + return __riscv_vsse32_v_f32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m2(float *base, ptrdiff_t stride, vfloat32m2_t value, size_t vl) { + return __riscv_vsse32_v_f32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m4(float *base, ptrdiff_t stride, vfloat32m4_t value, size_t vl) { + return __riscv_vsse32_v_f32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_f32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_f32m8(float *base, ptrdiff_t stride, vfloat32m8_t value, size_t vl) { + return __riscv_vsse32_v_f32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m1(int32_t *base, ptrdiff_t stride, vint32m1_t value, size_t vl) { + return __riscv_vsse32_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m2(int32_t *base, ptrdiff_t stride, vint32m2_t value, size_t vl) { + return __riscv_vsse32_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m4(int32_t *base, ptrdiff_t stride, vint32m4_t value, size_t vl) { + return __riscv_vsse32_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_i32m8(int32_t *base, ptrdiff_t stride, vint32m8_t value, size_t vl) { + return __riscv_vsse32_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m1(uint32_t *base, ptrdiff_t stride, vuint32m1_t value, size_t vl) { + return __riscv_vsse32_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m2(uint32_t *base, ptrdiff_t stride, vuint32m2_t value, size_t vl) { + return __riscv_vsse32_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m4(uint32_t *base, ptrdiff_t stride, vuint32m4_t value, size_t vl) { + return __riscv_vsse32_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse32_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse32_v_u32m8(uint32_t *base, ptrdiff_t stride, vuint32m8_t value, size_t vl) { + return __riscv_vsse32_v_u32m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c new file mode 100644 index 00000000000000..f76b6951952574 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse64.c @@ -0,0 +1,126 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m1(double *base, ptrdiff_t stride, vfloat64m1_t value, size_t vl) { + return __riscv_vsse64_v_f64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m2(double *base, ptrdiff_t stride, vfloat64m2_t value, size_t vl) { + return __riscv_vsse64_v_f64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m4(double *base, ptrdiff_t stride, vfloat64m4_t value, size_t vl) { + return __riscv_vsse64_v_f64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_f64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_f64m8(double *base, ptrdiff_t stride, vfloat64m8_t value, size_t vl) { + return __riscv_vsse64_v_f64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m1(int64_t *base, ptrdiff_t stride, vint64m1_t value, size_t vl) { + return __riscv_vsse64_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m2(int64_t *base, ptrdiff_t stride, vint64m2_t value, size_t vl) { + return __riscv_vsse64_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m4(int64_t *base, ptrdiff_t stride, vint64m4_t value, size_t vl) { + return __riscv_vsse64_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_i64m8(int64_t *base, ptrdiff_t stride, vint64m8_t value, size_t vl) { + return __riscv_vsse64_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m1(uint64_t *base, ptrdiff_t stride, vuint64m1_t value, size_t vl) { + return __riscv_vsse64_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m2(uint64_t *base, ptrdiff_t stride, vuint64m2_t value, size_t vl) { + return __riscv_vsse64_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m4(uint64_t *base, ptrdiff_t stride, vuint64m4_t value, size_t vl) { + return __riscv_vsse64_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse64_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse64_v_u64m8(uint64_t *base, ptrdiff_t stride, vuint64m8_t value, size_t vl) { + return __riscv_vsse64_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c new file mode 100644 index 00000000000000..106770712fc5a4 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vsse8.c @@ -0,0 +1,86 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m1(int8_t *base, ptrdiff_t stride, vint8m1_t value, size_t vl) { + return __riscv_vsse8_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m2(int8_t *base, ptrdiff_t stride, vint8m2_t value, size_t vl) { + return __riscv_vsse8_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m4(int8_t *base, ptrdiff_t stride, vint8m4_t value, size_t vl) { + return __riscv_vsse8_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_i8m8(int8_t *base, ptrdiff_t stride, vint8m8_t value, size_t vl) { + return __riscv_vsse8_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m1(uint8_t *base, ptrdiff_t stride, vuint8m1_t value, size_t vl) { + return __riscv_vsse8_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m2(uint8_t *base, ptrdiff_t stride, vuint8m2_t value, size_t vl) { + return __riscv_vsse8_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m4(uint8_t *base, ptrdiff_t stride, vuint8m4_t value, size_t vl) { + return __riscv_vsse8_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vsse8_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vsse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vsse8_v_u8m8(uint8_t *base, ptrdiff_t stride, vuint8m8_t value, size_t vl) { + return __riscv_vsse8_v_u8m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c new file mode 100644 index 00000000000000..c6948023c4c4be --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssh.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_vssh_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_vssh_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_vssh_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_vssh_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_vssh_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_vssh_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_vssh_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_vssh_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_vssh_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_vssh_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_vssh_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_vssh_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_vssh_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_vssh_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_vssh_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_vssh_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_vssh_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_vssh_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_vssh_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_vssh_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_vssh_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_vssh_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_vssh_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_vssh_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_vssh_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_vssh_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_vssh_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_vssh_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_vssh_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_vssh_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_vssh_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssh_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssh.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssh_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_vssh_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c new file mode 100644 index 00000000000000..46d0c0bc20ae13 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/strided/wrappers/vssw.c @@ -0,0 +1,326 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m1(int8_t *base, size_t stride, vint8m1_t value, size_t vl) { + return __riscv_vssw_v_i8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m2(int8_t *base, size_t stride, vint8m2_t value, size_t vl) { + return __riscv_vssw_v_i8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m4(int8_t *base, size_t stride, vint8m4_t value, size_t vl) { + return __riscv_vssw_v_i8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i8m8(int8_t *base, size_t stride, vint8m8_t value, size_t vl) { + return __riscv_vssw_v_i8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m1(int16_t *base, size_t stride, vint16m1_t value, size_t vl) { + return __riscv_vssw_v_i16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m2(int16_t *base, size_t stride, vint16m2_t value, size_t vl) { + return __riscv_vssw_v_i16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m4(int16_t *base, size_t stride, vint16m4_t value, size_t vl) { + return __riscv_vssw_v_i16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i16m8(int16_t *base, size_t stride, vint16m8_t value, size_t vl) { + return __riscv_vssw_v_i16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m1(int32_t *base, size_t stride, vint32m1_t value, size_t vl) { + return __riscv_vssw_v_i32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m2(int32_t *base, size_t stride, vint32m2_t value, size_t vl) { + return __riscv_vssw_v_i32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m4(int32_t *base, size_t stride, vint32m4_t value, size_t vl) { + return __riscv_vssw_v_i32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i32m8(int32_t *base, size_t stride, vint32m8_t value, size_t vl) { + return __riscv_vssw_v_i32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m1(int64_t *base, size_t stride, vint64m1_t value, size_t vl) { + return __riscv_vssw_v_i64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m2(int64_t *base, size_t stride, vint64m2_t value, size_t vl) { + return __riscv_vssw_v_i64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m4(int64_t *base, size_t stride, vint64m4_t value, size_t vl) { + return __riscv_vssw_v_i64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_i64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_i64m8(int64_t *base, size_t stride, vint64m8_t value, size_t vl) { + return __riscv_vssw_v_i64m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m1(uint8_t *base, size_t stride, vuint8m1_t value, size_t vl) { + return __riscv_vssw_v_u8m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m2(uint8_t *base, size_t stride, vuint8m2_t value, size_t vl) { + return __riscv_vssw_v_u8m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m4(uint8_t *base, size_t stride, vuint8m4_t value, size_t vl) { + return __riscv_vssw_v_u8m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u8m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u8m8(uint8_t *base, size_t stride, vuint8m8_t value, size_t vl) { + return __riscv_vssw_v_u8m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m1(uint16_t *base, size_t stride, vuint16m1_t value, size_t vl) { + return __riscv_vssw_v_u16m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m2(uint16_t *base, size_t stride, vuint16m2_t value, size_t vl) { + return __riscv_vssw_v_u16m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m4(uint16_t *base, size_t stride, vuint16m4_t value, size_t vl) { + return __riscv_vssw_v_u16m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u16m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u16m8(uint16_t *base, size_t stride, vuint16m8_t value, size_t vl) { + return __riscv_vssw_v_u16m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m1(uint32_t *base, size_t stride, vuint32m1_t value, size_t vl) { + return __riscv_vssw_v_u32m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m2(uint32_t *base, size_t stride, vuint32m2_t value, size_t vl) { + return __riscv_vssw_v_u32m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m4(uint32_t *base, size_t stride, vuint32m4_t value, size_t vl) { + return __riscv_vssw_v_u32m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u32m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u32m8(uint32_t *base, size_t stride, vuint32m8_t value, size_t vl) { + return __riscv_vssw_v_u32m8(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m1 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m1(uint64_t *base, size_t stride, vuint64m1_t value, size_t vl) { + return __riscv_vssw_v_u64m1(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m2 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m2(uint64_t *base, size_t stride, vuint64m2_t value, size_t vl) { + return __riscv_vssw_v_u64m2(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m4 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m4(uint64_t *base, size_t stride, vuint64m4_t value, size_t vl) { + return __riscv_vssw_v_u64m4(base, stride, value, vl); +} + +// CHECK-RV64-LABEL: define dso_local void @test_th_vssw_v_u64m8 +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[STRIDE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: call void @llvm.riscv.th.vssw.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[STRIDE]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret void +// +void test_th_vssw_v_u64m8(uint64_t *base, size_t stride, vuint64m8_t value, size_t vl) { + return __riscv_vssw_v_u64m8(base, stride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlb.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlb.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlbu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlbu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle16.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c index 7c1bc0c88ef8a7..b5835f1adde755 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle16.c @@ -16,7 +16,7 @@ vfloat16m1_t test_th_vle16_v_f16m1(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat16m2_t test_th_vle16_v_f16m2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat16m4_t test_th_vle16_v_f16m4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat16m8_t test_th_vle16_v_f16m8(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint16m1_t test_th_vle16_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint16m2_t test_th_vle16_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint16m4_t test_th_vle16_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint16m8_t test_th_vle16_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint16m1_t test_th_vle16_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint16m2_t test_th_vle16_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint16m4_t test_th_vle16_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle32.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c index d94a9ee587ba4d..12d6dca8ae8f8d 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle32.c @@ -16,7 +16,7 @@ vfloat32m1_t test_th_vle32_v_f32m1(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat32m2_t test_th_vle32_v_f32m2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat32m4_t test_th_vle32_v_f32m4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat32m8_t test_th_vle32_v_f32m8(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint32m1_t test_th_vle32_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint32m2_t test_th_vle32_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint32m4_t test_th_vle32_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint32m8_t test_th_vle32_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint32m1_t test_th_vle32_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint32m2_t test_th_vle32_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint32m4_t test_th_vle32_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle64.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c index 8759f260a8570e..a80788fef14108 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle64.c @@ -16,7 +16,7 @@ vfloat64m1_t test_th_vle64_v_f64m1(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat64m2_t test_th_vle64_v_f64m2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat64m4_t test_th_vle64_v_f64m4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat64m8_t test_th_vle64_v_f64m8(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint64m1_t test_th_vle64_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint64m2_t test_th_vle64_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint64m4_t test_th_vle64_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint64m8_t test_th_vle64_v_i64m8(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint64m1_t test_th_vle64_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint64m2_t test_th_vle64_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint64m4_t test_th_vle64_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle8.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c index 8b8ab812283a6e..669c2755b2cd4e 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vle8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vle8.c @@ -16,7 +16,7 @@ vint8m1_t test_th_vle8_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vint8m2_t test_th_vle8_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vint8m4_t test_th_vle8_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vint8m8_t test_th_vle8_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vuint8m1_t test_th_vle8_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vuint8m2_t test_th_vle8_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vuint8m4_t test_th_vle8_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlh.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlh.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlhu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlhu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlhu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlw.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlw.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlwu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vlwu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vlwu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsb.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsb.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse16.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c index b3a1f966e0376d..43aa311d5b8bad 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse16.c @@ -16,7 +16,7 @@ void test_th_vse16_v_f16m1(_Float16 *base, vfloat16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse16_v_f16m2(_Float16 *base, vfloat16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse16_v_f16m4(_Float16 *base, vfloat16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse16_v_f16m8(_Float16 *base, vfloat16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse16_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse16_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse16_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse16_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse16_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse16_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse16_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse32.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c index 23292f9c737e2e..823c0fa3c53ac2 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse32.c @@ -16,7 +16,7 @@ void test_th_vse32_v_f32m1(float *base, vfloat32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse32_v_f32m2(float *base, vfloat32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse32_v_f32m4(float *base, vfloat32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse32_v_f32m8(float *base, vfloat32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse32_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse32_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse32_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse32_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse32_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse32_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse32_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse64.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c index 06dc7d45eebcc4..89941d6c3ed5d0 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse64.c @@ -16,7 +16,7 @@ void test_th_vse64_v_f64m1(double *base, vfloat64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse64_v_f64m2(double *base, vfloat64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse64_v_f64m4(double *base, vfloat64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse64_v_f64m8(double *base, vfloat64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse64_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse64_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse64_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse64_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse64_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse64_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse64_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse8.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c index 8137971e0d8fb3..3011d80a5dbbee 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vse8.c @@ -16,7 +16,7 @@ void test_th_vse8_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse8_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse8_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse8_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse8_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse8_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse8_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsh.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsh.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vsw.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/thead/vsw.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlb.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlb.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlbu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlbu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlbu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle16.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c index 76eac2e825d02d..106a41f4d8dd12 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle16.c @@ -16,7 +16,7 @@ vfloat16m1_t test_th_vle16_v_f16m1(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat16m2_t test_th_vle16_v_f16m2(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat16m4_t test_th_vle16_v_f16m4(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32f16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat16m8_t test_th_vle16_v_f16m8(const _Float16 *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint16m1_t test_th_vle16_v_i16m1(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint16m2_t test_th_vle16_v_i16m2(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint16m4_t test_th_vle16_v_i16m4(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint16m8_t test_th_vle16_v_i16m8(const int16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint16m1_t test_th_vle16_v_u16m1(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint16m2_t test_th_vle16_v_u16m2(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint16m4_t test_th_vle16_v_u16m4(const uint16_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i16.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle32.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c index fd7ff33dcbd227..33cc39f67c44b3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle32.c @@ -16,7 +16,7 @@ vfloat32m1_t test_th_vle32_v_f32m1(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat32m2_t test_th_vle32_v_f32m2(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat32m4_t test_th_vle32_v_f32m4(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16f32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat32m8_t test_th_vle32_v_f32m8(const float *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint32m1_t test_th_vle32_v_i32m1(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint32m2_t test_th_vle32_v_i32m2(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint32m4_t test_th_vle32_v_i32m4(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint32m8_t test_th_vle32_v_i32m8(const int32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint32m1_t test_th_vle32_v_u32m1(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint32m2_t test_th_vle32_v_u32m2(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint32m4_t test_th_vle32_v_u32m4(const uint32_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i32.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle64.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c index 4964b037a53cc2..8d6271362b4c09 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle64.c @@ -16,7 +16,7 @@ vfloat64m1_t test_th_vle64_v_f64m1(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vfloat64m2_t test_th_vle64_v_f64m2(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vfloat64m4_t test_th_vle64_v_f64m4(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8f64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vfloat64m8_t test_th_vle64_v_f64m8(const double *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vint64m1_t test_th_vle64_v_i64m1(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vint64m2_t test_th_vle64_v_i64m2(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vint64m4_t test_th_vle64_v_i64m4(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -86,7 +86,7 @@ vint64m8_t test_th_vle64_v_i64m8(const int64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv1i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -96,7 +96,7 @@ vuint64m1_t test_th_vle64_v_u64m1(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv2i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -106,7 +106,7 @@ vuint64m2_t test_th_vle64_v_u64m2(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv4i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -116,7 +116,7 @@ vuint64m4_t test_th_vle64_v_u64m4(const uint64_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i64.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c similarity index 95% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle8.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c index 7d82425872bfc6..b3d4fdd635dabd 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vle8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vle8.c @@ -16,7 +16,7 @@ vint8m1_t test_th_vle8_v_i8m1(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -26,7 +26,7 @@ vint8m2_t test_th_vle8_v_i8m2(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -36,7 +36,7 @@ vint8m4_t test_th_vle8_v_i8m4(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -46,7 +46,7 @@ vint8m8_t test_th_vle8_v_i8m8(const int8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv8i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -56,7 +56,7 @@ vuint8m1_t test_th_vle8_v_u8m1(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv16i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -66,7 +66,7 @@ vuint8m2_t test_th_vle8_v_u8m2(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv32i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] @@ -76,7 +76,7 @@ vuint8m4_t test_th_vle8_v_u8m4(const uint8_t *base, size_t vl) { } // CHECK-RV64-LABEL: define dso_local @test_th_vle8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vle.nxv64i8.i64( poison, ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlh.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlh.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlhu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlhu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlhu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlw.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlw.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlwu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlwu.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vlwu.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsb.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsb.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsb.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse16.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse16.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c index 653d88cb48efd5..db53818cba24c3 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse16.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse16.c @@ -16,7 +16,7 @@ void test_th_vse16_v_f16m1(_Float16 *base, vfloat16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse16_v_f16m2(_Float16 *base, vfloat16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse16_v_f16m4(_Float16 *base, vfloat16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_f16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32f16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse16_v_f16m8(_Float16 *base, vfloat16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse16_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse16_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse16_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_i16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse16_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse16_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse16_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse16_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse16_v_u16m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i16.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse32.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse32.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c index 840117eac0046c..f579072948b781 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse32.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse32.c @@ -16,7 +16,7 @@ void test_th_vse32_v_f32m1(float *base, vfloat32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse32_v_f32m2(float *base, vfloat32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse32_v_f32m4(float *base, vfloat32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_f32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16f32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse32_v_f32m8(float *base, vfloat32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse32_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse32_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse32_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_i32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse32_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse32_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse32_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse32_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse32_v_u32m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i32.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse64.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse64.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c index 14f52a22de0061..6905618b2932ec 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse64.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse64.c @@ -16,7 +16,7 @@ void test_th_vse64_v_f64m1(double *base, vfloat64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse64_v_f64m2(double *base, vfloat64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse64_v_f64m4(double *base, vfloat64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_f64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8f64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse64_v_f64m8(double *base, vfloat64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse64_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse64_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse64_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_i64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -86,7 +86,7 @@ void test_th_vse64_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv1i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -96,7 +96,7 @@ void test_th_vse64_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv2i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -106,7 +106,7 @@ void test_th_vse64_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv4i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -116,7 +116,7 @@ void test_th_vse64_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse64_v_u64m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i64.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse8.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c similarity index 89% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse8.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c index 5d3f62cb19431a..788320f08293b6 100644 --- a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vse8.c +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vse8.c @@ -16,7 +16,7 @@ void test_th_vse8_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -26,7 +26,7 @@ void test_th_vse8_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -36,7 +36,7 @@ void test_th_vse8_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_i8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -46,7 +46,7 @@ void test_th_vse8_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m1 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv8i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -56,7 +56,7 @@ void test_th_vse8_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m2 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv16i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -66,7 +66,7 @@ void test_th_vse8_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m4 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv32i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void @@ -76,7 +76,7 @@ void test_th_vse8_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { } // CHECK-RV64-LABEL: define dso_local void @test_th_vse8_v_u8m8 -// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], [[VALUE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: call void @llvm.riscv.th.vse.nxv64i8.i64( [[VALUE]], ptr [[BASE]], i64 [[VL]]) // CHECK-RV64-NEXT: ret void diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsh.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsh.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsh.c diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsw.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c similarity index 100% rename from clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vsw.c rename to clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/unit-stride/wrappers/vsw.c