Currently there's no OFP8 scalar type supported in both clang and llvm type system, the vector OFP8 RVV types are lowered to i8 llvm types for now. The reason to support only clang type is because of intrinsics definition capability. If we make the clang type also using uint8 vector types, it's not able to distinguish between E4M3 type and E5M2 type so that we have to append additional type suffix to it. intrinsic spec update pr: https://github.com/riscv-non-isa/riscv-rvv-intrinsic-doc/pull/432 vreinterpret intrinsic PR: https://github.com/llvm/llvm-project/pull/191626 DONT MERGE: We have to get the intrinsic spec merged first to be able to make zvfofp8min change
849 lines
53 KiB
C
849 lines
53 KiB
C
// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 5
|
|
// REQUIRES: riscv-registered-target
|
|
// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvfbfmin \
|
|
// RUN: -target-feature +experimental-zvfofp8min -disable-O0-optnone \
|
|
// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
|
|
// RUN: FileCheck --check-prefix=CHECK-RV64 %s
|
|
|
|
#include <riscv_vector.h>
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e4m3mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_q_f8e4m3mf8(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_q_f8e4m3mf8(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e4m3mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_q_f8e4m3mf4(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_q_f8e4m3mf4(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e4m3mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_q_f8e4m3mf2(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_q_f8e4m3mf2(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e4m3m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_q_f8e4m3m1(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_q_f8e4m3m1(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e4m3m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_q_f8e4m3m2(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_q_f8e4m3m2(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e4m3mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_q_f8e4m3mf8_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_q_f8e4m3mf8_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e4m3mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_q_f8e4m3mf4_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_q_f8e4m3mf4_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e4m3mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_q_f8e4m3mf2_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_q_f8e4m3mf2_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e4m3m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_q_f8e4m3m1_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_q_f8e4m3m1_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e4m3m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_q_f8e4m3m2_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_q_f8e4m3m2_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e4m3mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_q_f8e4m3mf8_rm(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_q_f8e4m3mf8_rm(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e4m3mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_q_f8e4m3mf4_rm(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_q_f8e4m3mf4_rm(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e4m3mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_q_f8e4m3mf2_rm(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_q_f8e4m3mf2_rm(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e4m3m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_q_f8e4m3m1_rm(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_q_f8e4m3m1_rm(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e4m3m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_q_f8e4m3m2_rm(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_q_f8e4m3m2_rm(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e4m3mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_f_f_q_f8e4m3mf8_rm_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf8_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf8_t test_vfncvt_sat_f_f_q_f8e4m3mf8_rm_m(vbool64_t vm,
|
|
vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf8_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e4m3mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_f_f_q_f8e4m3mf4_rm_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf4_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf4_t test_vfncvt_sat_f_f_q_f8e4m3mf4_rm_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf4_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e4m3mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_f_f_q_f8e4m3mf2_rm_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3mf2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e4m3mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3mf2_t test_vfncvt_sat_f_f_q_f8e4m3mf2_rm_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3mf2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e4m3m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_f_f_q_f8e4m3m1_rm_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m1_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m1_t test_vfncvt_sat_f_f_q_f8e4m3m1_rm_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m1_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e4m3m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_f_f_q_f8e4m3m2_rm_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e4m3m2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e4m3m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e4m3m2_t test_vfncvt_sat_f_f_q_f8e4m3m2_rm_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e4m3m2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e5m2mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_q_f8e5m2mf8(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf8(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_q_f8e5m2mf8(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf8(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e5m2mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_q_f8e5m2mf4(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf4(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_q_f8e5m2mf4(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf4(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e5m2mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_q_f8e5m2mf2(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf2(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_q_f8e5m2mf2(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e5m2m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_q_f8e5m2m1(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m1(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_q_f8e5m2m1(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m1(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e5m2m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_q_f8e5m2m2(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m2(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 7, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_q_f8e5m2m2(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m2(vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e5m2mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_q_f8e5m2mf8_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf8_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_q_f8e5m2mf8_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf8_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e5m2mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_q_f8e5m2mf4_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf4_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_q_f8e5m2mf4_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf4_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e5m2mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_q_f8e5m2mf2_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf2_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_q_f8e5m2mf2_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e5m2m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_q_f8e5m2m1_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m1_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_q_f8e5m2m1_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m1_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e5m2m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_q_f8e5m2m2_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m2_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 7, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_q_f8e5m2m2_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m2_m(vm, vs2, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e5m2mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_q_f8e5m2mf8_rm(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf8_rm(
|
|
// CHECK-RV64-SAME: <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_q_f8e5m2mf8_rm(vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf8_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e5m2mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_q_f8e5m2mf4_rm(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf4_rm(
|
|
// CHECK-RV64-SAME: <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_q_f8e5m2mf4_rm(vfloat32m1_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf4_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e5m2mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_q_f8e5m2mf2_rm(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_q_f8e5m2mf2_rm(vfloat32m2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e5m2m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_q_f8e5m2m1_rm(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m1_rm(
|
|
// CHECK-RV64-SAME: <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_q_f8e5m2m1_rm(vfloat32m4_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m1_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e5m2m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.alt.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_q_f8e5m2m2_rm(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m2_rm(
|
|
// CHECK-RV64-SAME: <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], i64 0, i64 [[VL]])
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_q_f8e5m2m2_rm(vfloat32m8_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m2_rm(vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_f_f_q_f8e5m2mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_f_f_q_f8e5m2mf8_rm_m(vbool64_t vm, vfloat32mf2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf8_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf8_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 1 x i1> [[VM:%.*]], <vscale x 1 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv1i8.nxv1f32.i64(<vscale x 1 x i8> poison, <vscale x 1 x float> [[VS2]], <vscale x 1 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf8_t test_vfncvt_sat_f_f_q_f8e5m2mf8_rm_m(vbool64_t vm,
|
|
vfloat32mf2_t vs2, size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf8_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_f_f_q_f8e5m2mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_f_f_q_f8e5m2mf4_rm_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf4_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf4_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 2 x i1> [[VM:%.*]], <vscale x 2 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv2i8.nxv2f32.i64(<vscale x 2 x i8> poison, <vscale x 2 x float> [[VS2]], <vscale x 2 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf4_t test_vfncvt_sat_f_f_q_f8e5m2mf4_rm_m(vbool32_t vm, vfloat32m1_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf4_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_f_f_q_f8e5m2mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_f_f_q_f8e5m2mf2_rm_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2mf2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vfncvt_sat_f_f_q_f8e5m2mf2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 4 x i1> [[VM:%.*]], <vscale x 4 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv4i8.nxv4f32.i64(<vscale x 4 x i8> poison, <vscale x 4 x float> [[VS2]], <vscale x 4 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2mf2_t test_vfncvt_sat_f_f_q_f8e5m2mf2_rm_m(vbool16_t vm, vfloat32m2_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2mf2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_f_f_q_f8e5m2m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_f_f_q_f8e5m2m1_rm_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m1_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m1_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 8 x i1> [[VM:%.*]], <vscale x 8 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv8i8.nxv8f32.i64(<vscale x 8 x i8> poison, <vscale x 8 x float> [[VS2]], <vscale x 8 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m1_t test_vfncvt_sat_f_f_q_f8e5m2m1_rm_m(vbool8_t vm, vfloat32m4_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m1_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_f_f_q_f8e5m2m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.f.f.q.alt.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_f_f_q_f8e5m2m2_rm_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_f_f_q_f8e5m2m2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|
|
|
|
// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vfncvt_sat_f_f_q_f8e5m2m2_rm_m(
|
|
// CHECK-RV64-SAME: <vscale x 16 x i1> [[VM:%.*]], <vscale x 16 x float> [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
|
|
// CHECK-RV64-NEXT: [[ENTRY:.*:]]
|
|
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vfncvt.sat.f.f.q.alt.mask.nxv16i8.nxv16f32.i64(<vscale x 16 x i8> poison, <vscale x 16 x float> [[VS2]], <vscale x 16 x i1> [[VM]], i64 0, i64 [[VL]], i64 3)
|
|
// CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]]
|
|
//
|
|
vfloat8e5m2m2_t test_vfncvt_sat_f_f_q_f8e5m2m2_rm_m(vbool4_t vm, vfloat32m8_t vs2,
|
|
size_t vl) {
|
|
return __riscv_vfncvt_sat_f_f_q_f8e5m2m2_rm_m(vm, vs2, __RISCV_FRM_RNE, vl);
|
|
}
|