xref: /llvm-project/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-llrint.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
15e1d81acSRamkumar Ramachandra; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
25e1d81acSRamkumar Ramachandra; RUN: llc -mtriple=riscv32 -mattr=+v,+f,+d -target-abi=ilp32d \
35e1d81acSRamkumar Ramachandra; RUN:     -verify-machineinstrs < %s | FileCheck %s --check-prefix=RV32
45e1d81acSRamkumar Ramachandra; RUN: llc -mtriple=riscv64 -mattr=+v,+f,+d -target-abi=lp64d \
55e1d81acSRamkumar Ramachandra; RUN:     -verify-machineinstrs < %s | FileCheck %s --check-prefix=RV64
65e1d81acSRamkumar Ramachandra
75e1d81acSRamkumar Ramachandradefine <1 x i64> @llrint_v1i64_v1f32(<1 x float> %x) {
85e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v1i64_v1f32:
95e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
105e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -16
115e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 16
125e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
135e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
14e1065370SLuke Lau; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
155e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
16eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
17c28b1a19SCraig Topper; RV32-NEXT:    sw a0, 0(sp)
182967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 4(sp)
19c28b1a19SCraig Topper; RV32-NEXT:    mv a0, sp
20c28b1a19SCraig Topper; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
21c28b1a19SCraig Topper; RV32-NEXT:    vlse64.v v8, (a0), zero
225e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2397982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
245e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 16
2597982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
265e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
275e1d81acSRamkumar Ramachandra;
285e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v1i64_v1f32:
295e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
30e1065370SLuke Lau; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
315e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v8
325e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.s a0, fa5
33e1065370SLuke Lau; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
345e1d81acSRamkumar Ramachandra; RV64-NEXT:    vmv.s.x v8, a0
355e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
365e1d81acSRamkumar Ramachandra  %a = call <1 x i64> @llvm.llrint.v1i64.v1f32(<1 x float> %x)
375e1d81acSRamkumar Ramachandra  ret <1 x i64> %a
385e1d81acSRamkumar Ramachandra}
395e1d81acSRamkumar Ramachandradeclare <1 x i64> @llvm.llrint.v1i64.v1f32(<1 x float>)
405e1d81acSRamkumar Ramachandra
415e1d81acSRamkumar Ramachandradefine <2 x i64> @llrint_v2i64_v2f32(<2 x float> %x) {
425e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v2i64_v2f32:
435e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
445e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -32
455e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 32
465e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
475e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
485e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
495e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
505e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
515e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x20, 0x22, 0x11, 0x02, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 32 + 2 * vlenb
525e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
535e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
54e1065370SLuke Lau; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
56eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
575e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
58144b2f57SPhilip Reames; RV32-NEXT:    vmv.v.x v8, a0
595e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
605e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
615e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
625e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
635e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
645e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
655e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
66675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
675e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
685e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
69eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
705e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a2, vlenb
715e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a2, sp, a2
725e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, a2, 16
735e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a2) # Unknown-size Folded Reload
74675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
755e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
765e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
775e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
785e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
795e1d81acSRamkumar Ramachandra; RV32-NEXT:    add sp, sp, a0
8097982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 32
815e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
8297982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
835e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 32
8497982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
855e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
865e1d81acSRamkumar Ramachandra;
875e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v2i64_v2f32:
885e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
895e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
90144b2f57SPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v8, 1
915e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v8
92*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
93*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v9
94144b2f57SPhilip Reames; RV64-NEXT:    fcvt.l.s a1, fa5
955e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
96*9122c523SPengcheng Wang; RV64-NEXT:    vmv.v.x v8, a0
97*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a1
985e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
995e1d81acSRamkumar Ramachandra  %a = call <2 x i64> @llvm.llrint.v2i64.v2f32(<2 x float> %x)
1005e1d81acSRamkumar Ramachandra  ret <2 x i64> %a
1015e1d81acSRamkumar Ramachandra}
1025e1d81acSRamkumar Ramachandradeclare <2 x i64> @llvm.llrint.v2i64.v2f32(<2 x float>)
1035e1d81acSRamkumar Ramachandra
1045e1d81acSRamkumar Ramachandradefine <3 x i64> @llrint_v3i64_v3f32(<3 x float> %x) {
1055e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v3i64_v3f32:
1065e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
1075e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -32
1085e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 32
1095e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
1105e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
1115e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
112ab393ceeSCraig Topper; RV32-NEXT:    slli a1, a0, 1
113ab393ceeSCraig Topper; RV32-NEXT:    add a0, a1, a0
1145e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
115ab393ceeSCraig Topper; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x20, 0x22, 0x11, 0x03, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 32 + 3 * vlenb
1165e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
1175e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
1185e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
1195e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
1205e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
1215e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1225e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
123eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
1245e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
125144b2f57SPhilip Reames; RV32-NEXT:    vmv.v.x v8, a0
1265e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
1275e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
1285e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
1295e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
1305e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
1315e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
1325e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
1335e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
134675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1355e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
1365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
137eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
1385e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
1395e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
140675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
1415e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
1425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
1435e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
1445e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
1455e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
1465e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
1475e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
1485e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
1495e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
150675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1515e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
1525e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
153eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
1545e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
1555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
156675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
1575e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
1585e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
159fd887a36SRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
160fd887a36SRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
161fd887a36SRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
162fd887a36SRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
163fd887a36SRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
164fd887a36SRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
165fd887a36SRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
166675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
167fd887a36SRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
168fd887a36SRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
169eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
170fd887a36SRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
171fd887a36SRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
172675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
173fd887a36SRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
174fd887a36SRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
1755e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
176ab393ceeSCraig Topper; RV32-NEXT:    slli a1, a0, 1
177ab393ceeSCraig Topper; RV32-NEXT:    add a0, a1, a0
1785e1d81acSRamkumar Ramachandra; RV32-NEXT:    add sp, sp, a0
17997982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 32
1805e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
18197982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
1825e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 32
18397982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
1845e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
1855e1d81acSRamkumar Ramachandra;
1865e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v3i64_v3f32:
1875e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
1885e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1895e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v9, v8, 1
190144b2f57SPhilip Reames; RV64-NEXT:    vfmv.f.s fa5, v8
191*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 2
1925e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v8, v8, 3
1935e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.s a0, fa5
194*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v9
195*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a1, fa5
196*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v10
197*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
198*9122c523SPengcheng Wang; RV64-NEXT:    vmv.v.x v10, a0
199*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
200*9122c523SPengcheng Wang; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
201*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
202e3ffc4b6SLuke Lau; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
203*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v10, a1
204*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
205*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
206*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
2075e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
2085e1d81acSRamkumar Ramachandra  %a = call <3 x i64> @llvm.llrint.v3i64.v3f32(<3 x float> %x)
2095e1d81acSRamkumar Ramachandra  ret <3 x i64> %a
2105e1d81acSRamkumar Ramachandra}
2115e1d81acSRamkumar Ramachandradeclare <3 x i64> @llvm.llrint.v3i64.v3f32(<3 x float>)
2125e1d81acSRamkumar Ramachandra
2135e1d81acSRamkumar Ramachandradefine <4 x i64> @llrint_v4i64_v4f32(<4 x float> %x) {
2145e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v4i64_v4f32:
2155e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
2165e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -32
2175e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 32
2185e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
2195e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
2205e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
221ab393ceeSCraig Topper; RV32-NEXT:    slli a1, a0, 1
222ab393ceeSCraig Topper; RV32-NEXT:    add a0, a1, a0
2235e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
224ab393ceeSCraig Topper; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x20, 0x22, 0x11, 0x03, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 32 + 3 * vlenb
2255e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
2265e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
2275e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
2285e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
2295e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
2305e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
2315e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
232eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
2335e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
234144b2f57SPhilip Reames; RV32-NEXT:    vmv.v.x v8, a0
2355e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
2365e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
2375e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
2385e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
2395e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
2405e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
2415e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
2425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
243675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
2445e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
2455e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
246eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
2475e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
2485e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
249675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
2505e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
2515e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
2525e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
2535e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
2545e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
2555e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
2565e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
2575e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
2585e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
259675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
2605e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
2615e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
262eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
2635e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
2645e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
265675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
2665e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
2675e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
2685e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
2695e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
2705e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
2715e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
2725e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
2735e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
2745e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
275675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
2765e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
2775e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
278eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
2795e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
2805e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
281675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
2825e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
2835e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
2845e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
285ab393ceeSCraig Topper; RV32-NEXT:    slli a1, a0, 1
286ab393ceeSCraig Topper; RV32-NEXT:    add a0, a1, a0
2875e1d81acSRamkumar Ramachandra; RV32-NEXT:    add sp, sp, a0
28897982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 32
2895e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
29097982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
2915e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 32
29297982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
2935e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
2945e1d81acSRamkumar Ramachandra;
2955e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v4i64_v4f32:
2965e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
2975e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
2985e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v9, v8, 1
299144b2f57SPhilip Reames; RV64-NEXT:    vfmv.f.s fa5, v8
300*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 2
3015e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v8, v8, 3
3025e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.s a0, fa5
303*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v9
304*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a1, fa5
305*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v10
306*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
307*9122c523SPengcheng Wang; RV64-NEXT:    vmv.v.x v10, a0
308*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
309*9122c523SPengcheng Wang; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
310*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
311e3ffc4b6SLuke Lau; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
312*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v10, a1
313*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
314*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
315*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
3165e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
3175e1d81acSRamkumar Ramachandra  %a = call <4 x i64> @llvm.llrint.v4i64.v4f32(<4 x float> %x)
3185e1d81acSRamkumar Ramachandra  ret <4 x i64> %a
3195e1d81acSRamkumar Ramachandra}
3205e1d81acSRamkumar Ramachandradeclare <4 x i64> @llvm.llrint.v4i64.v4f32(<4 x float>)
3215e1d81acSRamkumar Ramachandra
3225e1d81acSRamkumar Ramachandradefine <8 x i64> @llrint_v8i64_v8f32(<8 x float> %x) {
3235e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v8i64_v8f32:
3245e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
3255e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -208
3265e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 208
3275e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 204(sp) # 4-byte Folded Spill
3285e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw s0, 200(sp) # 4-byte Folded Spill
3295e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
3305e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset s0, -8
3315e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi s0, sp, 208
3325e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa s0, 0
3335e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
3345e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
3355e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
3365e1d81acSRamkumar Ramachandra; RV32-NEXT:    andi sp, sp, -64
3375e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3385e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
3395e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
3405e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
341eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3425e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 64(sp)
3432967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 68(sp)
3445e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3455e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
346675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
3475e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 7
3485e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
349eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3505e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 120(sp)
3512967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 124(sp)
3525e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3535e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
354675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
3555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 6
3565e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
357eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3585e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 112(sp)
3592967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 116(sp)
3605e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3615e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
362675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
3635e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 5
3645e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
365eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3665e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 104(sp)
3672967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 108(sp)
3685e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3695e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
370675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
3715e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 4
3725e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
373eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3745e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 96(sp)
3752967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 100(sp)
3765e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3775e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
378675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
3795e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
3805e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
381eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3825e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 88(sp)
3832967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 92(sp)
3845e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3855e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
386675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
3875e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
3885e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
389eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3905e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 80(sp)
3912967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 84(sp)
3925e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 192
3935e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
394675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
3955e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
3965e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
397eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
3985e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 72(sp)
3992967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 76(sp)
4005e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 64
4015e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
4025e1d81acSRamkumar Ramachandra; RV32-NEXT:    vle32.v v8, (a0)
4035e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, s0, -208
40497982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 208
4055e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 204(sp) # 4-byte Folded Reload
4065e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw s0, 200(sp) # 4-byte Folded Reload
40797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
40897982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
4095e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 208
41097982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
4115e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
4125e1d81acSRamkumar Ramachandra;
4135e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v8i64_v8f32:
4145e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
4155e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, -128
4165e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa_offset 128
4175e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
4185e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
4195e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset ra, -8
4205e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset s0, -16
4215e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi s0, sp, 128
4225e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa s0, 0
4235e1d81acSRamkumar Ramachandra; RV64-NEXT:    andi sp, sp, -64
4245e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
4255e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v8
4265e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 7
427*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa5
4285e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v10
4295e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 6
430*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a1, fa5
4315e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v10
4325e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 5
433*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a2, fa5
4345e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v10
4355e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 4
436*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a3, fa5
4375e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v10
438*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
439*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v9, v8, 3
440*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 2
441*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v8, v8, 1
4422967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.s a4, fa5
443*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v9
444*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a5, fa5
445*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v10
446*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a6, fa5
447*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
4482967e5f8SAlex Bradbury; RV64-NEXT:    sd a4, 32(sp)
4492967e5f8SAlex Bradbury; RV64-NEXT:    sd a3, 40(sp)
4502967e5f8SAlex Bradbury; RV64-NEXT:    sd a2, 48(sp)
4512967e5f8SAlex Bradbury; RV64-NEXT:    sd a1, 56(sp)
4522967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.s a1, fa5
4532967e5f8SAlex Bradbury; RV64-NEXT:    sd a0, 0(sp)
454*9122c523SPengcheng Wang; RV64-NEXT:    sd a1, 8(sp)
455*9122c523SPengcheng Wang; RV64-NEXT:    sd a6, 16(sp)
456*9122c523SPengcheng Wang; RV64-NEXT:    sd a5, 24(sp)
4575e1d81acSRamkumar Ramachandra; RV64-NEXT:    mv a0, sp
4585e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
4595e1d81acSRamkumar Ramachandra; RV64-NEXT:    vle64.v v8, (a0)
4605e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, s0, -128
46197982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 128
4625e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
4635e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
46497982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
46597982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
4665e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, 128
46797982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
4685e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
4695e1d81acSRamkumar Ramachandra  %a = call <8 x i64> @llvm.llrint.v8i64.v8f32(<8 x float> %x)
4705e1d81acSRamkumar Ramachandra  ret <8 x i64> %a
4715e1d81acSRamkumar Ramachandra}
4725e1d81acSRamkumar Ramachandradeclare <8 x i64> @llvm.llrint.v8i64.v8f32(<8 x float>)
4735e1d81acSRamkumar Ramachandra
4745e1d81acSRamkumar Ramachandradefine <16 x i64> @llrint_v16i64_v16f32(<16 x float> %x) {
4755e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v16i64_v16f32:
4765e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
4775e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -400
4785e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 400
4795e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 396(sp) # 4-byte Folded Spill
4805e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw s0, 392(sp) # 4-byte Folded Spill
4815e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
4825e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset s0, -8
4835e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi s0, sp, 400
4845e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa s0, 0
4855e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
4865e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 2
4875e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
4885e1d81acSRamkumar Ramachandra; RV32-NEXT:    andi sp, sp, -128
4895e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
4905e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs4r.v v8, (a0) # Unknown-size Folded Spill
4915e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 64
4925e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
4935e1d81acSRamkumar Ramachandra; RV32-NEXT:    vse32.v v8, (a0)
4945e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 124(sp)
495eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
4965e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 248(sp)
4972967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 252(sp)
4985e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 120(sp)
499eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5005e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 240(sp)
5012967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 244(sp)
5025e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 116(sp)
503eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5045e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 232(sp)
5052967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 236(sp)
5065e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 112(sp)
507eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5085e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 224(sp)
5092967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 228(sp)
5105e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 108(sp)
511eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5125e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 216(sp)
5132967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 220(sp)
5145e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 104(sp)
515eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5165e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 208(sp)
5172967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 212(sp)
5185e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 100(sp)
519eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5205e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 200(sp)
5212967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 204(sp)
5225e1d81acSRamkumar Ramachandra; RV32-NEXT:    flw fa0, 96(sp)
523eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5245e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 192(sp)
5252967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 196(sp)
5265e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5275e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
528675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
5295e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
530eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5315e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 128(sp)
5322967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 132(sp)
5335e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5345e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
535675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
5365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
5375e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
538eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5395e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 152(sp)
5402967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 156(sp)
5415e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
543675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
5445e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
5455e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
546eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5475e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 144(sp)
5482967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 148(sp)
5495e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5505e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
551675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
5525e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
5535e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
554eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5555e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 136(sp)
5562967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 140(sp)
5575e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5585e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
559675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
5605e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 7
5615e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
562eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5635e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 184(sp)
5642967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 188(sp)
5655e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5665e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
567675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
5685e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 6
5695e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
570eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5715e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 176(sp)
5722967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 180(sp)
5735e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5745e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
575675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
5765e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 5
5775e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
578eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5795e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 168(sp)
5802967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 172(sp)
5815e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 384
5825e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
583675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
5845e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 4
5855e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
586eabaee0cSFangrui Song; RV32-NEXT:    call llrintf
5875e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 160(sp)
5882967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 164(sp)
5895e1d81acSRamkumar Ramachandra; RV32-NEXT:    li a0, 32
5905e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a1, sp, 128
5915e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetvli zero, a0, e32, m8, ta, ma
5925e1d81acSRamkumar Ramachandra; RV32-NEXT:    vle32.v v8, (a1)
5935e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, s0, -400
59497982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 400
5955e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 396(sp) # 4-byte Folded Reload
5965e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw s0, 392(sp) # 4-byte Folded Reload
59797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
59897982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
5995e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 400
60097982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
6015e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
6025e1d81acSRamkumar Ramachandra;
6035e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v16i64_v16f32:
6045e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
6055e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, -384
6065e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa_offset 384
6075e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd ra, 376(sp) # 8-byte Folded Spill
6085e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd s0, 368(sp) # 8-byte Folded Spill
6095e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset ra, -8
6105e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset s0, -16
6115e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi s0, sp, 384
6125e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa s0, 0
6135e1d81acSRamkumar Ramachandra; RV64-NEXT:    andi sp, sp, -128
6145e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi a0, sp, 64
6155e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
6165e1d81acSRamkumar Ramachandra; RV64-NEXT:    vse32.v v8, (a0)
6175e1d81acSRamkumar Ramachandra; RV64-NEXT:    flw fa5, 124(sp)
618*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v8
619*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
620*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 3
621*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v11, v8, 2
6225e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.s a0, fa5
6235e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd a0, 248(sp)
6245e1d81acSRamkumar Ramachandra; RV64-NEXT:    flw fa5, 120(sp)
625*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v12, v8, 1
626*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a0, fa4
627*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v10
6282967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.s a1, fa5
629*9122c523SPengcheng Wang; RV64-NEXT:    sd a1, 240(sp)
630*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 116(sp)
631*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
632*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v14, v8, 7
633*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a1, fa4
634*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v11
6352967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.s a2, fa5
636*9122c523SPengcheng Wang; RV64-NEXT:    sd a2, 232(sp)
637*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 112(sp)
638*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a2, fa4
639*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v12
640*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 6
6412967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.s a3, fa5
642*9122c523SPengcheng Wang; RV64-NEXT:    sd a3, 224(sp)
643*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 108(sp)
644*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a3, fa4
645*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v14
646*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v12, v8, 5
647*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a4, fa5
648*9122c523SPengcheng Wang; RV64-NEXT:    sd a4, 216(sp)
649*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 104(sp)
650*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a4, fa4
651*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v10
652*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a5, fa4
653*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a6, fa5
654*9122c523SPengcheng Wang; RV64-NEXT:    sd a6, 208(sp)
655*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 100(sp)
656*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v12
657*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a6, fa4
658*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v8, v8, 4
659*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a7, fa5
660*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
661*9122c523SPengcheng Wang; RV64-NEXT:    sd a7, 200(sp)
662*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.s a7, fa5
663*9122c523SPengcheng Wang; RV64-NEXT:    flw fa5, 96(sp)
6642967e5f8SAlex Bradbury; RV64-NEXT:    sd a0, 128(sp)
6652967e5f8SAlex Bradbury; RV64-NEXT:    sd a3, 136(sp)
6662967e5f8SAlex Bradbury; RV64-NEXT:    sd a2, 144(sp)
6672967e5f8SAlex Bradbury; RV64-NEXT:    sd a1, 152(sp)
668*9122c523SPengcheng Wang; RV64-NEXT:    sd a7, 160(sp)
669*9122c523SPengcheng Wang; RV64-NEXT:    sd a6, 168(sp)
670*9122c523SPengcheng Wang; RV64-NEXT:    sd a5, 176(sp)
671*9122c523SPengcheng Wang; RV64-NEXT:    sd a4, 184(sp)
6725e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.s a0, fa5
673*9122c523SPengcheng Wang; RV64-NEXT:    sd a0, 192(sp)
6745e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi a0, sp, 128
6755e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
6765e1d81acSRamkumar Ramachandra; RV64-NEXT:    vle64.v v8, (a0)
6775e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, s0, -384
67897982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 384
6795e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld ra, 376(sp) # 8-byte Folded Reload
6805e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld s0, 368(sp) # 8-byte Folded Reload
68197982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
68297982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
6835e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, 384
68497982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
6855e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
6865e1d81acSRamkumar Ramachandra  %a = call <16 x i64> @llvm.llrint.v16i64.v16f32(<16 x float> %x)
6875e1d81acSRamkumar Ramachandra  ret <16 x i64> %a
6885e1d81acSRamkumar Ramachandra}
6895e1d81acSRamkumar Ramachandradeclare <16 x i64> @llvm.llrint.v16i64.v16f32(<16 x float>)
6905e1d81acSRamkumar Ramachandra
6915e1d81acSRamkumar Ramachandradefine <1 x i64> @llrint_v1i64_v1f64(<1 x double> %x) {
6925e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v1i64_v1f64:
6935e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
6945e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -16
6955e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 16
6965e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
6975e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
6985e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
6995e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
700eabaee0cSFangrui Song; RV32-NEXT:    call llrint
701c28b1a19SCraig Topper; RV32-NEXT:    sw a0, 0(sp)
7022967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 4(sp)
703c28b1a19SCraig Topper; RV32-NEXT:    mv a0, sp
704c28b1a19SCraig Topper; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
705c28b1a19SCraig Topper; RV32-NEXT:    vlse64.v v8, (a0), zero
7065e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
70797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
7085e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 16
70997982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
7105e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
7115e1d81acSRamkumar Ramachandra;
7125e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v1i64_v1f64:
7135e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
7145e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
7155e1d81acSRamkumar Ramachandra; RV64-NEXT:    vfmv.f.s fa5, v8
7165e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.d a0, fa5
7175e1d81acSRamkumar Ramachandra; RV64-NEXT:    vmv.s.x v8, a0
7185e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
7195e1d81acSRamkumar Ramachandra  %a = call <1 x i64> @llvm.llrint.v1i64.v1f64(<1 x double> %x)
7205e1d81acSRamkumar Ramachandra  ret <1 x i64> %a
7215e1d81acSRamkumar Ramachandra}
7225e1d81acSRamkumar Ramachandradeclare <1 x i64> @llvm.llrint.v1i64.v1f64(<1 x double>)
7235e1d81acSRamkumar Ramachandra
7245e1d81acSRamkumar Ramachandradefine <2 x i64> @llrint_v2i64_v2f64(<2 x double> %x) {
7255e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v2i64_v2f64:
7265e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
7275e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -32
7285e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 32
7295e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
7305e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
7315e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
7325e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
7335e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
7345e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x20, 0x22, 0x11, 0x02, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 32 + 2 * vlenb
7355e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
7365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
7375e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
7385e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
739eabaee0cSFangrui Song; RV32-NEXT:    call llrint
7405e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
741144b2f57SPhilip Reames; RV32-NEXT:    vmv.v.x v8, a0
7425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
7435e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
7445e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
7455e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
7465e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs1r.v v8, (a0) # Unknown-size Folded Spill
7475e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
7485e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a0) # Unknown-size Folded Reload
749675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
7505e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
7515e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
752eabaee0cSFangrui Song; RV32-NEXT:    call llrint
7535e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a2, vlenb
7545e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a2, sp, a2
7555e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, a2, 16
7565e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl1r.v v8, (a2) # Unknown-size Folded Reload
757675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
7585e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
7595e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
7605e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
7615e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
7625e1d81acSRamkumar Ramachandra; RV32-NEXT:    add sp, sp, a0
76397982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 32
7645e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
76597982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
7665e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 32
76797982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
7685e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
7695e1d81acSRamkumar Ramachandra;
7705e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v2i64_v2f64:
7715e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
772e3ffc4b6SLuke Lau; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
773144b2f57SPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v8, 1
774144b2f57SPhilip Reames; RV64-NEXT:    vfmv.f.s fa5, v8
775*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a0, fa5
776*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v9
777144b2f57SPhilip Reames; RV64-NEXT:    fcvt.l.d a1, fa5
778*9122c523SPengcheng Wang; RV64-NEXT:    vmv.v.x v8, a0
779*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a1
7805e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
7815e1d81acSRamkumar Ramachandra  %a = call <2 x i64> @llvm.llrint.v2i64.v2f64(<2 x double> %x)
7825e1d81acSRamkumar Ramachandra  ret <2 x i64> %a
7835e1d81acSRamkumar Ramachandra}
7845e1d81acSRamkumar Ramachandradeclare <2 x i64> @llvm.llrint.v2i64.v2f64(<2 x double>)
7855e1d81acSRamkumar Ramachandra
7865e1d81acSRamkumar Ramachandradefine <4 x i64> @llrint_v4i64_v4f64(<4 x double> %x) {
7875e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v4i64_v4f64:
7885e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
7895e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -32
7905e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 32
7915e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
7925e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
7935e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
7945e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 2
7955e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
7965e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x20, 0x22, 0x11, 0x04, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 32 + 4 * vlenb
7975e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
7985e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
7995e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
8005e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
8015e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
8025e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
8035e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
804eabaee0cSFangrui Song; RV32-NEXT:    call llrint
8055e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
806144b2f57SPhilip Reames; RV32-NEXT:    vmv.v.x v8, a0
8075e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
8085e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
8095e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
8105e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
8115e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
8125e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
8135e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
8145e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
815675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
8165e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
8175e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
818eabaee0cSFangrui Song; RV32-NEXT:    call llrint
8195e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
8205e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
821675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
8225e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
8235e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
8245e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
8255e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
8265e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
8275e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
8285e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
8295e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
8305e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
831675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
8325e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
8335e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
834eabaee0cSFangrui Song; RV32-NEXT:    call llrint
8355e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
8365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
837675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
8385e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
8395e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
8405e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 16
8415e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs2r.v v8, (a0) # Unknown-size Folded Spill
8425e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
8435e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 1
8445e1d81acSRamkumar Ramachandra; RV32-NEXT:    add a0, sp, a0
8455e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, a0, 16
8465e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a0) # Unknown-size Folded Reload
847675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
8485e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
8495e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
850eabaee0cSFangrui Song; RV32-NEXT:    call llrint
8515e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a2, sp, 16
8525e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl2r.v v8, (a2) # Unknown-size Folded Reload
853675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
8545e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
8555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a1
8565e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
8575e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 2
8585e1d81acSRamkumar Ramachandra; RV32-NEXT:    add sp, sp, a0
85997982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 32
8605e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
86197982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
8625e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 32
86397982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
8645e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
8655e1d81acSRamkumar Ramachandra;
8665e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v4i64_v4f64:
8675e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
8685e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
869144b2f57SPhilip Reames; RV64-NEXT:    vslidedown.vi v10, v8, 1
870144b2f57SPhilip Reames; RV64-NEXT:    vfmv.f.s fa5, v8
871*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
8725e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v12, v8, 2
8735e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v8, v8, 3
8745e1d81acSRamkumar Ramachandra; RV64-NEXT:    fcvt.l.d a0, fa5
875*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v10
876*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a1, fa5
877*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v12
878*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
879*9122c523SPengcheng Wang; RV64-NEXT:    vmv.v.x v10, a0
880*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a0, fa5
881*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
882*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v10, a1
883*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
884*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a0, fa5
885*9122c523SPengcheng Wang; RV64-NEXT:    vslide1down.vx v8, v8, a0
8865e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
8875e1d81acSRamkumar Ramachandra  %a = call <4 x i64> @llvm.llrint.v4i64.v4f64(<4 x double> %x)
8885e1d81acSRamkumar Ramachandra  ret <4 x i64> %a
8895e1d81acSRamkumar Ramachandra}
8905e1d81acSRamkumar Ramachandradeclare <4 x i64> @llvm.llrint.v4i64.v4f64(<4 x double>)
8915e1d81acSRamkumar Ramachandra
8925e1d81acSRamkumar Ramachandradefine <8 x i64> @llrint_v8i64_v8f64(<8 x double> %x) {
8935e1d81acSRamkumar Ramachandra; RV32-LABEL: llrint_v8i64_v8f64:
8945e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
8955e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -272
8965e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 272
8975e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 268(sp) # 4-byte Folded Spill
8985e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw s0, 264(sp) # 4-byte Folded Spill
8995e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
9005e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset s0, -8
9015e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi s0, sp, 272
9025e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa s0, 0
9035e1d81acSRamkumar Ramachandra; RV32-NEXT:    csrr a0, vlenb
9045e1d81acSRamkumar Ramachandra; RV32-NEXT:    slli a0, a0, 2
9055e1d81acSRamkumar Ramachandra; RV32-NEXT:    sub sp, sp, a0
9065e1d81acSRamkumar Ramachandra; RV32-NEXT:    andi sp, sp, -64
9075e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 256
9085e1d81acSRamkumar Ramachandra; RV32-NEXT:    vs4r.v v8, (a0) # Unknown-size Folded Spill
9095e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 64
9105e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
9115e1d81acSRamkumar Ramachandra; RV32-NEXT:    vse64.v v8, (a0)
9125e1d81acSRamkumar Ramachandra; RV32-NEXT:    fld fa0, 120(sp)
913eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9145e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 184(sp)
9152967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 188(sp)
9165e1d81acSRamkumar Ramachandra; RV32-NEXT:    fld fa0, 112(sp)
917eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9185e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 176(sp)
9192967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 180(sp)
9205e1d81acSRamkumar Ramachandra; RV32-NEXT:    fld fa0, 104(sp)
921eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9225e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 168(sp)
9232967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 172(sp)
9245e1d81acSRamkumar Ramachandra; RV32-NEXT:    fld fa0, 96(sp)
925eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9265e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 160(sp)
9272967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 164(sp)
9285e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 256
9295e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
930675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
9315e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
932eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9335e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 128(sp)
9342967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 132(sp)
9355e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 256
9365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
937675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
9385e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 1
9395e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
940eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9415e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 136(sp)
9422967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 140(sp)
9435e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 256
9445e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
945675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
9465e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
9475e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
948eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9495e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 152(sp)
9502967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 156(sp)
9515e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 256
9525e1d81acSRamkumar Ramachandra; RV32-NEXT:    vl4r.v v8, (a0) # Unknown-size Folded Reload
953675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
9545e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 2
9555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa0, v8
956eabaee0cSFangrui Song; RV32-NEXT:    call llrint
9575e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw a0, 144(sp)
9582967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 148(sp)
9595e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi a0, sp, 128
9605e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
9615e1d81acSRamkumar Ramachandra; RV32-NEXT:    vle32.v v8, (a0)
9625e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, s0, -272
96397982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 272
9645e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 268(sp) # 4-byte Folded Reload
9655e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw s0, 264(sp) # 4-byte Folded Reload
96697982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
96797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
9685e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 272
96997982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
9705e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
9715e1d81acSRamkumar Ramachandra;
9725e1d81acSRamkumar Ramachandra; RV64-LABEL: llrint_v8i64_v8f64:
9735e1d81acSRamkumar Ramachandra; RV64:       # %bb.0:
9745e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, -192
9755e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa_offset 192
9765e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd ra, 184(sp) # 8-byte Folded Spill
9775e1d81acSRamkumar Ramachandra; RV64-NEXT:    sd s0, 176(sp) # 8-byte Folded Spill
9785e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset ra, -8
9795e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_offset s0, -16
9805e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi s0, sp, 192
9815e1d81acSRamkumar Ramachandra; RV64-NEXT:    .cfi_def_cfa s0, 0
9825e1d81acSRamkumar Ramachandra; RV64-NEXT:    andi sp, sp, -64
9835e1d81acSRamkumar Ramachandra; RV64-NEXT:    mv a0, sp
9845e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
9855e1d81acSRamkumar Ramachandra; RV64-NEXT:    vse64.v v8, (a0)
9865e1d81acSRamkumar Ramachandra; RV64-NEXT:    fld fa5, 56(sp)
987*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v8
9885e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
9895e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 1
990*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a0, fa4
9912967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.d a1, fa5
992*9122c523SPengcheng Wang; RV64-NEXT:    sd a1, 120(sp)
993*9122c523SPengcheng Wang; RV64-NEXT:    fld fa5, 48(sp)
994*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v10
9955e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
9965e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v10, v8, 3
997*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a1, fa4
9982967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.d a2, fa5
999*9122c523SPengcheng Wang; RV64-NEXT:    sd a2, 112(sp)
1000*9122c523SPengcheng Wang; RV64-NEXT:    fld fa5, 40(sp)
1001*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa4, v10
1002*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a2, fa4
10035e1d81acSRamkumar Ramachandra; RV64-NEXT:    vslidedown.vi v8, v8, 2
10042967e5f8SAlex Bradbury; RV64-NEXT:    fcvt.l.d a3, fa5
1005*9122c523SPengcheng Wang; RV64-NEXT:    vfmv.f.s fa5, v8
1006*9122c523SPengcheng Wang; RV64-NEXT:    sd a3, 104(sp)
1007*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a3, fa5
1008*9122c523SPengcheng Wang; RV64-NEXT:    fld fa5, 32(sp)
10092967e5f8SAlex Bradbury; RV64-NEXT:    sd a0, 64(sp)
10102967e5f8SAlex Bradbury; RV64-NEXT:    sd a1, 72(sp)
10112967e5f8SAlex Bradbury; RV64-NEXT:    sd a3, 80(sp)
10122967e5f8SAlex Bradbury; RV64-NEXT:    sd a2, 88(sp)
1013*9122c523SPengcheng Wang; RV64-NEXT:    fcvt.l.d a0, fa5
1014*9122c523SPengcheng Wang; RV64-NEXT:    sd a0, 96(sp)
10155e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi a0, sp, 64
10165e1d81acSRamkumar Ramachandra; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
10175e1d81acSRamkumar Ramachandra; RV64-NEXT:    vle64.v v8, (a0)
10185e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, s0, -192
101997982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 192
10205e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld ra, 184(sp) # 8-byte Folded Reload
10215e1d81acSRamkumar Ramachandra; RV64-NEXT:    ld s0, 176(sp) # 8-byte Folded Reload
102297982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
102397982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
10245e1d81acSRamkumar Ramachandra; RV64-NEXT:    addi sp, sp, 192
102597982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
10265e1d81acSRamkumar Ramachandra; RV64-NEXT:    ret
10275e1d81acSRamkumar Ramachandra  %a = call <8 x i64> @llvm.llrint.v8i64.v8f64(<8 x double> %x)
10285e1d81acSRamkumar Ramachandra  ret <8 x i64> %a
10295e1d81acSRamkumar Ramachandra}
10305e1d81acSRamkumar Ramachandradeclare <8 x i64> @llvm.llrint.v8i64.v8f64(<8 x double>)
1031