Currently there's no OFP8 scalar type supported in both clang and llvm type system, the vector OFP8 RVV types are lowered to i8 llvm types for now. The reason to support only clang type is because of intrinsics definition capability. If we make the clang type also using uint8 vector types, it's not able to distinguish between E4M3 type and E5M2 type so that we have to append additional type suffix to it. intrinsic spec update pr: https://github.com/riscv-non-isa/riscv-rvv-intrinsic-doc/pull/432 vreinterpret intrinsic PR: https://github.com/llvm/llvm-project/pull/191626 DONT MERGE: We have to get the intrinsic spec merged first to be able to make zvfofp8min change
1109 lines
71 KiB
C
1109 lines
71 KiB
C
// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 5
|
|
// REQUIRES: riscv-registered-target
|
|
// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvfbfmin \
|
|
// RUN: -target-feature +experimental-zvfofp8min -disable-O0-optnone \
|
|
// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
|
|
// RUN: FileCheck --check-prefix=CHECK-RV64 %s
|
|
|
|
#include <riscv_vector.h>
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8(vbfloat16mf4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e4m3mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4(vbfloat16mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e4m3mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e4m3mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_w_bf16m1_f8e4m3mf2(vbfloat16m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e4m3mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e4m3m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_w_bf16m2_f8e4m3m1(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e4m3m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e4m3m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_w_bf16m4_f8e4m3m2(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e4m3m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e4m3m4(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_f_f_w_bf16m8_f8e4m3m4(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e4m3m4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e4m3mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e4m3m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_w_bf16m2_f8e4m3m1_m(vbool8_t vm, vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e4m3m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e4m3m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_w_bf16m4_f8e4m3m2_m(vbool4_t vm, vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e4m3m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e4m3m4_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_f_f_w_bf16m8_f8e4m3m4_m(vbool2_t vm, vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e4m3m4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm(vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm(vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm(vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm(vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm(vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e4m3mf8_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e4m3mf8_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e4m3mf4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e4m3mf4_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e4m3mf2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e4m3mf2_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e4m3m1_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e4m3m1_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e4m3m2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e4m3m2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e4m3m4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e4m3m4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8(vbfloat16mf4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e5m2mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4(vbfloat16mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e5m2mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e5m2mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_w_bf16m1_f8e5m2mf2(vbfloat16m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e5m2mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e5m2m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_w_bf16m2_f8e5m2m1(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e5m2m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e5m2m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_w_bf16m4_f8e5m2m2(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e5m2m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e5m2m4(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_f_f_w_bf16m8_f8e5m2m4(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e5m2m4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e5m2mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e5m2m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_w_bf16m2_f8e5m2m1_m(vbool8_t vm, vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e5m2m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e5m2m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_w_bf16m4_f8e5m2m2_m(vbool4_t vm, vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e5m2m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e5m2m4_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_f_f_w_bf16m8_f8e5m2m4_m(vbool2_t vm, vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e5m2m4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm(vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm(vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm(vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm(vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm(vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm(vbfloat16m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm(vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm(vbfloat16m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm(vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.alt.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm(vbfloat16m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm(vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf4_f8e5m2mf8_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv1i8.nxv1bf16.i64(<vscale x 1 x i8> poison, <vscale x 1 x bfloat> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm_m(vbool64_t vm,
|
|
vbfloat16mf4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf4_f8e5m2mf8_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16mf2_f8e5m2mf4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv2i8.nxv2bf16.i64(<vscale x 2 x i8> poison, <vscale x 2 x bfloat> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm_m(vbool32_t vm,
|
|
vbfloat16mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16mf2_f8e5m2mf4_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m1_f8e5m2mf2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv4i8.nxv4bf16.i64(<vscale x 4 x i8> poison, <vscale x 4 x bfloat> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm_m(vbool16_t vm,
|
|
vbfloat16m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m1_f8e5m2mf2_rm_m(vm, vs2,
|
|
__RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m2_f8e5m2m1_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv8i8.nxv8bf16.i64(<vscale x 8 x i8> poison, <vscale x 8 x bfloat> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm_m(vbool8_t vm,
|
|
vbfloat16m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m2_f8e5m2m1_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m4_f8e5m2m2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv16i8.nxv16bf16.i64(<vscale x 16 x i8> poison, <vscale x 16 x bfloat> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm_m(vbool4_t vm,
|
|
vbfloat16m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m4_f8e5m2m2_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.f.f.w.alt.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_w_bf16m8_f8e5m2m4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 32 x i1> [[VM:%.*]], <vscale x 32 x bfloat> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vfncvt.sat.f.f.w.alt.mask.nxv32i8.nxv32bf16.i64(<vscale x 32 x i8> poison, <vscale x 32 x bfloat> [[VS2]], <vscale x 32 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m4_t test_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm_m(vbool2_t vm,
|
|
vbfloat16m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_w_bf16m8_f8e5m2m4_rm_m(vm, vs2, __RISCV_FRM_RNE,
|
|
vl);
|
|
}
|