Skip to content

Commit

Permalink
[Clang][XTHeadVector] Add wrapper macros that forwards intrinsic call…
Browse files Browse the repository at this point in the history
…s from 1.0 to xtheadvector (llvm#46)

* [Clang][XTHeadVector] Add intrinsic wrapper macros

in order to compatible with RVV 1.0

* [Clang][XTHeadVector] Test intrinsic wrapper macros
  • Loading branch information
imkiva authored and RevySR committed Apr 3, 2024
1 parent dfb955e commit 08c566b
Show file tree
Hide file tree
Showing 18 changed files with 3,193 additions and 0 deletions.
289 changes: 289 additions & 0 deletions clang/include/clang/Basic/riscv_vector_xtheadv.td

Large diffs are not rendered by default.

166 changes: 166 additions & 0 deletions clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/wrappers/vlb.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \
// RUN: -disable-O0-optnone -emit-llvm %s -o - | \
// RUN: opt -S -passes=mem2reg | \
// RUN: FileCheck --check-prefix=CHECK-RV64 %s

#include <riscv_vector.h>

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_th_vlb_v_i8m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.th.vlb.nxv8i8.i64(<vscale x 8 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_th_vlb_v_i8m1(const int8_t *base, size_t vl) {
return __riscv_vlb_v_i8m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_th_vlb_v_i8m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.th.vlb.nxv16i8.i64(<vscale x 16 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
//
vint8m2_t test_th_vlb_v_i8m2(const int8_t *base, size_t vl) {
return __riscv_vlb_v_i8m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_th_vlb_v_i8m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.th.vlb.nxv32i8.i64(<vscale x 32 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
//
vint8m4_t test_th_vlb_v_i8m4(const int8_t *base, size_t vl) {
return __riscv_vlb_v_i8m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_th_vlb_v_i8m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.th.vlb.nxv64i8.i64(<vscale x 64 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
//
vint8m8_t test_th_vlb_v_i8m8(const int8_t *base, size_t vl) {
return __riscv_vlb_v_i8m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_th_vlb_v_i16m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.th.vlb.nxv4i16.i64(<vscale x 4 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_th_vlb_v_i16m1(const int16_t *base, size_t vl) {
return __riscv_vlb_v_i16m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_th_vlb_v_i16m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.th.vlb.nxv8i16.i64(<vscale x 8 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
//
vint16m2_t test_th_vlb_v_i16m2(const int16_t *base, size_t vl) {
return __riscv_vlb_v_i16m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_th_vlb_v_i16m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.th.vlb.nxv16i16.i64(<vscale x 16 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
//
vint16m4_t test_th_vlb_v_i16m4(const int16_t *base, size_t vl) {
return __riscv_vlb_v_i16m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_th_vlb_v_i16m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.th.vlb.nxv32i16.i64(<vscale x 32 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
//
vint16m8_t test_th_vlb_v_i16m8(const int16_t *base, size_t vl) {
return __riscv_vlb_v_i16m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_th_vlb_v_i32m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.th.vlb.nxv2i32.i64(<vscale x 2 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_th_vlb_v_i32m1(const int32_t *base, size_t vl) {
return __riscv_vlb_v_i32m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_th_vlb_v_i32m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.th.vlb.nxv4i32.i64(<vscale x 4 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
//
vint32m2_t test_th_vlb_v_i32m2(const int32_t *base, size_t vl) {
return __riscv_vlb_v_i32m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_th_vlb_v_i32m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.th.vlb.nxv8i32.i64(<vscale x 8 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
//
vint32m4_t test_th_vlb_v_i32m4(const int32_t *base, size_t vl) {
return __riscv_vlb_v_i32m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_th_vlb_v_i32m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.th.vlb.nxv16i32.i64(<vscale x 16 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
//
vint32m8_t test_th_vlb_v_i32m8(const int32_t *base, size_t vl) {
return __riscv_vlb_v_i32m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_th_vlb_v_i64m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.th.vlb.nxv1i64.i64(<vscale x 1 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_th_vlb_v_i64m1(const int64_t *base, size_t vl) {
return __riscv_vlb_v_i64m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_th_vlb_v_i64m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.th.vlb.nxv2i64.i64(<vscale x 2 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
//
vint64m2_t test_th_vlb_v_i64m2(const int64_t *base, size_t vl) {
return __riscv_vlb_v_i64m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_th_vlb_v_i64m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.th.vlb.nxv4i64.i64(<vscale x 4 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
//
vint64m4_t test_th_vlb_v_i64m4(const int64_t *base, size_t vl) {
return __riscv_vlb_v_i64m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_th_vlb_v_i64m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.th.vlb.nxv8i64.i64(<vscale x 8 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
//
vint64m8_t test_th_vlb_v_i64m8(const int64_t *base, size_t vl) {
return __riscv_vlb_v_i64m8(base, vl);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \
// RUN: -disable-O0-optnone -emit-llvm %s -o - | \
// RUN: opt -S -passes=mem2reg | \
// RUN: FileCheck --check-prefix=CHECK-RV64 %s

#include <riscv_vector.h>

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_th_vlbu_v_u8m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.th.vlbu.nxv8i8.i64(<vscale x 8 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_th_vlbu_v_u8m1(const uint8_t *base, size_t vl) {
return __riscv_vlbu_v_u8m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_th_vlbu_v_u8m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.th.vlbu.nxv16i8.i64(<vscale x 16 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
//
vuint8m2_t test_th_vlbu_v_u8m2(const uint8_t *base, size_t vl) {
return __riscv_vlbu_v_u8m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_th_vlbu_v_u8m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.th.vlbu.nxv32i8.i64(<vscale x 32 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
//
vuint8m4_t test_th_vlbu_v_u8m4(const uint8_t *base, size_t vl) {
return __riscv_vlbu_v_u8m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_th_vlbu_v_u8m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.th.vlbu.nxv64i8.i64(<vscale x 64 x i8> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]]
//
vuint8m8_t test_th_vlbu_v_u8m8(const uint8_t *base, size_t vl) {
return __riscv_vlbu_v_u8m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_th_vlbu_v_u16m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.th.vlbu.nxv4i16.i64(<vscale x 4 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_th_vlbu_v_u16m1(const uint16_t *base, size_t vl) {
return __riscv_vlbu_v_u16m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_th_vlbu_v_u16m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.th.vlbu.nxv8i16.i64(<vscale x 8 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]]
//
vuint16m2_t test_th_vlbu_v_u16m2(const uint16_t *base, size_t vl) {
return __riscv_vlbu_v_u16m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_th_vlbu_v_u16m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.th.vlbu.nxv16i16.i64(<vscale x 16 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]]
//
vuint16m4_t test_th_vlbu_v_u16m4(const uint16_t *base, size_t vl) {
return __riscv_vlbu_v_u16m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_th_vlbu_v_u16m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.th.vlbu.nxv32i16.i64(<vscale x 32 x i16> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]]
//
vuint16m8_t test_th_vlbu_v_u16m8(const uint16_t *base, size_t vl) {
return __riscv_vlbu_v_u16m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_th_vlbu_v_u32m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.th.vlbu.nxv2i32.i64(<vscale x 2 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_th_vlbu_v_u32m1(const uint32_t *base, size_t vl) {
return __riscv_vlbu_v_u32m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_th_vlbu_v_u32m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.th.vlbu.nxv4i32.i64(<vscale x 4 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]]
//
vuint32m2_t test_th_vlbu_v_u32m2(const uint32_t *base, size_t vl) {
return __riscv_vlbu_v_u32m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_th_vlbu_v_u32m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.th.vlbu.nxv8i32.i64(<vscale x 8 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]]
//
vuint32m4_t test_th_vlbu_v_u32m4(const uint32_t *base, size_t vl) {
return __riscv_vlbu_v_u32m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_th_vlbu_v_u32m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.th.vlbu.nxv16i32.i64(<vscale x 16 x i32> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]]
//
vuint32m8_t test_th_vlbu_v_u32m8(const uint32_t *base, size_t vl) {
return __riscv_vlbu_v_u32m8(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_th_vlbu_v_u64m1
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.th.vlbu.nxv1i64.i64(<vscale x 1 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_th_vlbu_v_u64m1(const uint64_t *base, size_t vl) {
return __riscv_vlbu_v_u64m1(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_th_vlbu_v_u64m2
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.th.vlbu.nxv2i64.i64(<vscale x 2 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
//
vuint64m2_t test_th_vlbu_v_u64m2(const uint64_t *base, size_t vl) {
return __riscv_vlbu_v_u64m2(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_th_vlbu_v_u64m4
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.th.vlbu.nxv4i64.i64(<vscale x 4 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
//
vuint64m4_t test_th_vlbu_v_u64m4(const uint64_t *base, size_t vl) {
return __riscv_vlbu_v_u64m4(base, vl);
}

// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_th_vlbu_v_u64m8
// CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.th.vlbu.nxv8i64.i64(<vscale x 8 x i64> poison, ptr [[BASE]], i64 [[VL]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
//
vuint64m8_t test_th_vlbu_v_u64m8(const uint64_t *base, size_t vl) {
return __riscv_vlbu_v_u64m8(base, vl);
}
Loading

0 comments on commit 08c566b

Please sign in to comment.