xref: /llvm-project/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-lrint.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
15e1d81acSRamkumar Ramachandra; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
25e1d81acSRamkumar Ramachandra; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+f,+d \
35e1d81acSRamkumar Ramachandra; RUN:     -target-abi=ilp32d -verify-machineinstrs | FileCheck %s --check-prefix=RV32
45e1d81acSRamkumar Ramachandra; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv64 -mattr=+v,+f,+d \
55e1d81acSRamkumar Ramachandra; RUN:     -target-abi=lp64d -verify-machineinstrs | FileCheck %s --check-prefix=RV64-i32
65e1d81acSRamkumar Ramachandra; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+f,+d \
75e1d81acSRamkumar Ramachandra; RUN:     -target-abi=lp64d -verify-machineinstrs | FileCheck %s --check-prefix=RV64-i64
85e1d81acSRamkumar Ramachandra
95e1d81acSRamkumar Ramachandradefine <1 x iXLen> @lrint_v1f32(<1 x float> %x) {
105e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v1f32:
115e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
12e1065370SLuke Lau; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
135e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
145e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.s a0, fa5
155e1d81acSRamkumar Ramachandra; RV32-NEXT:    vmv.s.x v8, a0
165e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
175e1d81acSRamkumar Ramachandra;
185e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v1f32:
195e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
20e1065370SLuke Lau; RV64-i32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
215e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
225e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.s a0, fa5
235e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vmv.s.x v8, a0
245e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
255e1d81acSRamkumar Ramachandra;
265e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v1f32:
275e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
28e1065370SLuke Lau; RV64-i64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
295e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v8
305e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fcvt.l.s a0, fa5
31e1065370SLuke Lau; RV64-i64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
325e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vmv.s.x v8, a0
335e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
345e1d81acSRamkumar Ramachandra  %a = call <1 x iXLen> @llvm.lrint.v1iXLen.v1f32(<1 x float> %x)
355e1d81acSRamkumar Ramachandra  ret <1 x iXLen> %a
365e1d81acSRamkumar Ramachandra}
375e1d81acSRamkumar Ramachandradeclare <1 x iXLen> @llvm.lrint.v1iXLen.v1f32(<1 x float>)
385e1d81acSRamkumar Ramachandra
395e1d81acSRamkumar Ramachandradefine <2 x iXLen> @lrint_v2f32(<2 x float> %x) {
405e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v2f32:
415e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
42e3ffc4b6SLuke Lau; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
43144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v8, 1
44144b2f57SPhilip Reames; RV32-NEXT:    vfmv.f.s fa5, v8
45*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
46*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v9
47144b2f57SPhilip Reames; RV32-NEXT:    fcvt.w.s a1, fa5
48*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v8, a0
49*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a1
505e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
515e1d81acSRamkumar Ramachandra;
525e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v2f32:
535e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
54e3ffc4b6SLuke Lau; RV64-i32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
55144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v9, v8, 1
56144b2f57SPhilip Reames; RV64-i32-NEXT:    vfmv.f.s fa5, v8
57*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
58*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v9
59144b2f57SPhilip Reames; RV64-i32-NEXT:    fcvt.l.s a1, fa5
60*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v8, a0
61*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a1
625e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
635e1d81acSRamkumar Ramachandra;
645e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v2f32:
655e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
665e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
67144b2f57SPhilip Reames; RV64-i64-NEXT:    vslidedown.vi v9, v8, 1
685e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v8
69*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
70*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v9
71144b2f57SPhilip Reames; RV64-i64-NEXT:    fcvt.l.s a1, fa5
725e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
73*9122c523SPengcheng Wang; RV64-i64-NEXT:    vmv.v.x v8, a0
74*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a1
755e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
765e1d81acSRamkumar Ramachandra  %a = call <2 x iXLen> @llvm.lrint.v2iXLen.v2f32(<2 x float> %x)
775e1d81acSRamkumar Ramachandra  ret <2 x iXLen> %a
785e1d81acSRamkumar Ramachandra}
795e1d81acSRamkumar Ramachandradeclare <2 x iXLen> @llvm.lrint.v2iXLen.v2f32(<2 x float>)
805e1d81acSRamkumar Ramachandra
815e1d81acSRamkumar Ramachandradefine <3 x iXLen> @lrint_v3f32(<3 x float> %x) {
825e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v3f32:
835e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
84e3ffc4b6SLuke Lau; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
85144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v8, 1
865e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
875e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v10, v8, 2
885e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
895e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.s a0, fa5
90*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v9
91*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
92*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v10
93*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v9, a0
94*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
95*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
96*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v9, a1
97*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
98*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
99*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
1005e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
1015e1d81acSRamkumar Ramachandra;
1025e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v3f32:
1035e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
104e3ffc4b6SLuke Lau; RV64-i32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
105144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v9, v8, 1
1065e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
107fd887a36SRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v10, v8, 2
108fd887a36SRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v8, v8, 3
1095e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.s a0, fa5
110*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v9
111*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
112*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v10
113*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v9, a0
114*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
115*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
116*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v9, a1
117*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
118*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
119*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
1205e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
1215e1d81acSRamkumar Ramachandra;
1225e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v3f32:
1235e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
1245e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1255e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v9, v8, 1
126144b2f57SPhilip Reames; RV64-i64-NEXT:    vfmv.f.s fa5, v8
127*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v10, v8, 2
1285e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v8, v8, 3
1295e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fcvt.l.s a0, fa5
130*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v9
131*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a1, fa5
132*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v10
133*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
134*9122c523SPengcheng Wang; RV64-i64-NEXT:    vmv.v.x v10, a0
135*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
136*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
137*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
138e3ffc4b6SLuke Lau; RV64-i64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
139*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v10, a1
140*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
141*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
142*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
1435e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
1445e1d81acSRamkumar Ramachandra  %a = call <3 x iXLen> @llvm.lrint.v3iXLen.v3f32(<3 x float> %x)
1455e1d81acSRamkumar Ramachandra  ret <3 x iXLen> %a
1465e1d81acSRamkumar Ramachandra}
1475e1d81acSRamkumar Ramachandradeclare <3 x iXLen> @llvm.lrint.v3iXLen.v3f32(<3 x float>)
1485e1d81acSRamkumar Ramachandra
1495e1d81acSRamkumar Ramachandradefine <4 x iXLen> @lrint_v4f32(<4 x float> %x) {
1505e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v4f32:
1515e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
152e3ffc4b6SLuke Lau; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
153144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v8, 1
1545e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
1555e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v10, v8, 2
1565e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
1575e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.s a0, fa5
158*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v9
159*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
160*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v10
161*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v9, a0
162*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
163*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
164*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v9, a1
165*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
166*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
167*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
1685e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
1695e1d81acSRamkumar Ramachandra;
1705e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v4f32:
1715e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
172e3ffc4b6SLuke Lau; RV64-i32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
173144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v9, v8, 1
1745e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
1755e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v10, v8, 2
1765e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v8, v8, 3
1775e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.s a0, fa5
178*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v9
179*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
180*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v10
181*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v9, a0
182*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
183*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
184*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v9, a1
185*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
186*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
187*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
1885e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
1895e1d81acSRamkumar Ramachandra;
1905e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v4f32:
1915e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
1925e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1935e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v9, v8, 1
194144b2f57SPhilip Reames; RV64-i64-NEXT:    vfmv.f.s fa5, v8
195*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v10, v8, 2
1965e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v8, v8, 3
1975e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fcvt.l.s a0, fa5
198*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v9
199*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a1, fa5
200*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v10
201*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
202*9122c523SPengcheng Wang; RV64-i64-NEXT:    vmv.v.x v10, a0
203*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
204*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
205*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
206e3ffc4b6SLuke Lau; RV64-i64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
207*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v10, a1
208*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
209*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
210*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
2115e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
2125e1d81acSRamkumar Ramachandra  %a = call <4 x iXLen> @llvm.lrint.v4iXLen.v4f32(<4 x float> %x)
2135e1d81acSRamkumar Ramachandra  ret <4 x iXLen> %a
2145e1d81acSRamkumar Ramachandra}
2155e1d81acSRamkumar Ramachandradeclare <4 x iXLen> @llvm.lrint.v4iXLen.v4f32(<4 x float>)
2165e1d81acSRamkumar Ramachandra
2175e1d81acSRamkumar Ramachandradefine <8 x iXLen> @lrint_v8f32(<8 x float> %x) {
2185e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v8f32:
2195e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
2205e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
221144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v10, v8, 1
222144b2f57SPhilip Reames; RV32-NEXT:    vfmv.f.s fa5, v8
223*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v11, v8, 2
2245e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v12, v8, 3
2255e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.s a0, fa5
226*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v10
227*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
228*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v11
2295e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
230*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v10, a0
231*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
232*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v12
2335e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v12, v8, 4
234*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v10, v10, a1
235*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
2365e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v12
2375e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v12, v8, 5
2385e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v10, v10, a0
239*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
240*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v12
2415e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v12, v8, 6
2425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 7
243*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v10, v10, a1
244*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
245*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v12
246*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v10, v10, a0
2475e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.s a0, fa5
248*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
249*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v10, a1
250*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
251*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa5
252*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
2535e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
2545e1d81acSRamkumar Ramachandra;
2555e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v8f32:
2565e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
2575e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
258144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v10, v8, 1
259144b2f57SPhilip Reames; RV64-i32-NEXT:    vfmv.f.s fa5, v8
260*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v11, v8, 2
2615e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v12, v8, 3
2625e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.s a0, fa5
263*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v10
264*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
265*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v11
2665e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
267*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v10, a0
268*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
269*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v12
2705e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v12, v8, 4
271*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v10, v10, a1
272*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
2735e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v12
2745e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v12, v8, 5
2755e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslide1down.vx v10, v10, a0
276*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
277*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v12
2785e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v12, v8, 6
2795e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v8, v8, 7
280*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v10, v10, a1
281*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
282*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v12
283*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v10, v10, a0
2845e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.s a0, fa5
285*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
286*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v10, a1
287*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
288*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa5
289*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
2905e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
2915e1d81acSRamkumar Ramachandra;
2925e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v8f32:
2935e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
2945e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, sp, -128
2955e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_def_cfa_offset 128
2965e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
2975e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
2985e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_offset ra, -8
2995e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_offset s0, -16
3005e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi s0, sp, 128
3015e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_def_cfa s0, 0
3025e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    andi sp, sp, -64
3035e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
3045e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v8
3055e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 7
306*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa5
3075e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v10
3085e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 6
309*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a1, fa5
3105e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v10
3115e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 5
312*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a2, fa5
3135e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v10
3145e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 4
315*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a3, fa5
3165e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v10
317*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
318*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v9, v8, 3
319*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v10, v8, 2
320*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v8, v8, 1
3212967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.s a4, fa5
322*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v9
323*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a5, fa5
324*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v10
325*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a6, fa5
326*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
3272967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a4, 32(sp)
3282967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a3, 40(sp)
3292967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a2, 48(sp)
3302967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a1, 56(sp)
3312967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.s a1, fa5
3322967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a0, 0(sp)
333*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a1, 8(sp)
334*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a6, 16(sp)
335*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a5, 24(sp)
3365e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    mv a0, sp
3375e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
3385e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vle64.v v8, (a0)
3395e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, s0, -128
34097982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa sp, 128
3415e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
3425e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
34397982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore ra
34497982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore s0
3455e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, sp, 128
34697982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa_offset 0
3475e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
3485e1d81acSRamkumar Ramachandra  %a = call <8 x iXLen> @llvm.lrint.v8iXLen.v8f32(<8 x float> %x)
3495e1d81acSRamkumar Ramachandra  ret <8 x iXLen> %a
3505e1d81acSRamkumar Ramachandra}
3515e1d81acSRamkumar Ramachandradeclare <8 x iXLen> @llvm.lrint.v8iXLen.v8f32(<8 x float>)
3525e1d81acSRamkumar Ramachandra
353afc7cc7bSCraig Topperdefine <16 x iXLen> @lrint_v16f32(<16 x float> %x) {
354afc7cc7bSCraig Topper; RV32-LABEL: lrint_v16f32:
355afc7cc7bSCraig Topper; RV32:       # %bb.0:
356afc7cc7bSCraig Topper; RV32-NEXT:    addi sp, sp, -192
357afc7cc7bSCraig Topper; RV32-NEXT:    .cfi_def_cfa_offset 192
358afc7cc7bSCraig Topper; RV32-NEXT:    sw ra, 188(sp) # 4-byte Folded Spill
359afc7cc7bSCraig Topper; RV32-NEXT:    sw s0, 184(sp) # 4-byte Folded Spill
360afc7cc7bSCraig Topper; RV32-NEXT:    .cfi_offset ra, -4
361afc7cc7bSCraig Topper; RV32-NEXT:    .cfi_offset s0, -8
362afc7cc7bSCraig Topper; RV32-NEXT:    addi s0, sp, 192
363afc7cc7bSCraig Topper; RV32-NEXT:    .cfi_def_cfa s0, 0
364afc7cc7bSCraig Topper; RV32-NEXT:    andi sp, sp, -64
365afc7cc7bSCraig Topper; RV32-NEXT:    mv a0, sp
366afc7cc7bSCraig Topper; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
367afc7cc7bSCraig Topper; RV32-NEXT:    vse32.v v8, (a0)
368afc7cc7bSCraig Topper; RV32-NEXT:    flw fa5, 60(sp)
369*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v8
370*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
371*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v10, v8, 3
372*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v11, v8, 2
373afc7cc7bSCraig Topper; RV32-NEXT:    fcvt.w.s a0, fa5
374afc7cc7bSCraig Topper; RV32-NEXT:    sw a0, 124(sp)
375afc7cc7bSCraig Topper; RV32-NEXT:    flw fa5, 56(sp)
376*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a0, fa4
377*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v10
378afc7cc7bSCraig Topper; RV32-NEXT:    vslidedown.vi v10, v8, 1
379*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa5
380*9122c523SPengcheng Wang; RV32-NEXT:    sw a1, 120(sp)
381*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 52(sp)
382*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a1, fa4
383*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v11
384*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a2, fa4
3852967e5f8SAlex Bradbury; RV32-NEXT:    fcvt.w.s a3, fa5
386*9122c523SPengcheng Wang; RV32-NEXT:    sw a3, 116(sp)
387*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 48(sp)
388*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v10
389*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
390*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v10, v8, 7
391*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a3, fa4
392*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a4, fa5
393*9122c523SPengcheng Wang; RV32-NEXT:    sw a4, 112(sp)
394*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 44(sp)
395*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v10
396*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v10, v8, 6
397*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a4, fa4
398*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a5, fa5
399*9122c523SPengcheng Wang; RV32-NEXT:    sw a5, 108(sp)
400*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 40(sp)
401*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v10
402*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v10, v8, 5
403*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a5, fa4
404*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a6, fa5
405*9122c523SPengcheng Wang; RV32-NEXT:    sw a6, 104(sp)
406*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 36(sp)
407*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v10
408*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a6, fa4
409*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v8, v8, 4
410*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a7, fa5
411*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
412*9122c523SPengcheng Wang; RV32-NEXT:    sw a7, 100(sp)
413*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.s a7, fa5
414*9122c523SPengcheng Wang; RV32-NEXT:    flw fa5, 32(sp)
4152967e5f8SAlex Bradbury; RV32-NEXT:    sw a0, 64(sp)
4162967e5f8SAlex Bradbury; RV32-NEXT:    sw a3, 68(sp)
4172967e5f8SAlex Bradbury; RV32-NEXT:    sw a2, 72(sp)
4182967e5f8SAlex Bradbury; RV32-NEXT:    sw a1, 76(sp)
419*9122c523SPengcheng Wang; RV32-NEXT:    sw a7, 80(sp)
420*9122c523SPengcheng Wang; RV32-NEXT:    sw a6, 84(sp)
421*9122c523SPengcheng Wang; RV32-NEXT:    sw a5, 88(sp)
422*9122c523SPengcheng Wang; RV32-NEXT:    sw a4, 92(sp)
423afc7cc7bSCraig Topper; RV32-NEXT:    fcvt.w.s a0, fa5
424*9122c523SPengcheng Wang; RV32-NEXT:    sw a0, 96(sp)
425afc7cc7bSCraig Topper; RV32-NEXT:    addi a0, sp, 64
426afc7cc7bSCraig Topper; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
427afc7cc7bSCraig Topper; RV32-NEXT:    vle32.v v8, (a0)
428afc7cc7bSCraig Topper; RV32-NEXT:    addi sp, s0, -192
42997982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 192
430afc7cc7bSCraig Topper; RV32-NEXT:    lw ra, 188(sp) # 4-byte Folded Reload
431afc7cc7bSCraig Topper; RV32-NEXT:    lw s0, 184(sp) # 4-byte Folded Reload
43297982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
43397982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
434afc7cc7bSCraig Topper; RV32-NEXT:    addi sp, sp, 192
43597982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
436afc7cc7bSCraig Topper; RV32-NEXT:    ret
437afc7cc7bSCraig Topper;
438afc7cc7bSCraig Topper; RV64-i32-LABEL: lrint_v16f32:
439afc7cc7bSCraig Topper; RV64-i32:       # %bb.0:
440afc7cc7bSCraig Topper; RV64-i32-NEXT:    addi sp, sp, -192
441afc7cc7bSCraig Topper; RV64-i32-NEXT:    .cfi_def_cfa_offset 192
442afc7cc7bSCraig Topper; RV64-i32-NEXT:    sd ra, 184(sp) # 8-byte Folded Spill
443afc7cc7bSCraig Topper; RV64-i32-NEXT:    sd s0, 176(sp) # 8-byte Folded Spill
444afc7cc7bSCraig Topper; RV64-i32-NEXT:    .cfi_offset ra, -8
445afc7cc7bSCraig Topper; RV64-i32-NEXT:    .cfi_offset s0, -16
446afc7cc7bSCraig Topper; RV64-i32-NEXT:    addi s0, sp, 192
447afc7cc7bSCraig Topper; RV64-i32-NEXT:    .cfi_def_cfa s0, 0
448afc7cc7bSCraig Topper; RV64-i32-NEXT:    andi sp, sp, -64
449afc7cc7bSCraig Topper; RV64-i32-NEXT:    mv a0, sp
450afc7cc7bSCraig Topper; RV64-i32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
451afc7cc7bSCraig Topper; RV64-i32-NEXT:    vse32.v v8, (a0)
452afc7cc7bSCraig Topper; RV64-i32-NEXT:    flw fa5, 60(sp)
453*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v8
454*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
455*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v10, v8, 3
456*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v11, v8, 2
457afc7cc7bSCraig Topper; RV64-i32-NEXT:    fcvt.l.s a0, fa5
458afc7cc7bSCraig Topper; RV64-i32-NEXT:    sw a0, 124(sp)
459afc7cc7bSCraig Topper; RV64-i32-NEXT:    flw fa5, 56(sp)
460*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a0, fa4
461*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v10
462afc7cc7bSCraig Topper; RV64-i32-NEXT:    vslidedown.vi v10, v8, 1
463*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa5
464*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a1, 120(sp)
465*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 52(sp)
466*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a1, fa4
467*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v11
468*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a2, fa4
4692967e5f8SAlex Bradbury; RV64-i32-NEXT:    fcvt.l.s a3, fa5
470*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a3, 116(sp)
471*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 48(sp)
472*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v10
473*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
474*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v10, v8, 7
475*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a3, fa4
476*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a4, fa5
477*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a4, 112(sp)
478*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 44(sp)
479*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v10
480*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v10, v8, 6
481*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a4, fa4
482*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a5, fa5
483*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a5, 108(sp)
484*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 40(sp)
485*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v10
486*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v10, v8, 5
487*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a5, fa4
488*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a6, fa5
489*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a6, 104(sp)
490*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 36(sp)
491*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v10
492*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a6, fa4
493*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v8, v8, 4
494*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a7, fa5
495*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
496*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a7, 100(sp)
497*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.s a7, fa5
498*9122c523SPengcheng Wang; RV64-i32-NEXT:    flw fa5, 32(sp)
4992967e5f8SAlex Bradbury; RV64-i32-NEXT:    sw a0, 64(sp)
5002967e5f8SAlex Bradbury; RV64-i32-NEXT:    sw a3, 68(sp)
5012967e5f8SAlex Bradbury; RV64-i32-NEXT:    sw a2, 72(sp)
5022967e5f8SAlex Bradbury; RV64-i32-NEXT:    sw a1, 76(sp)
503*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a7, 80(sp)
504*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a6, 84(sp)
505*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a5, 88(sp)
506*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a4, 92(sp)
507afc7cc7bSCraig Topper; RV64-i32-NEXT:    fcvt.l.s a0, fa5
508*9122c523SPengcheng Wang; RV64-i32-NEXT:    sw a0, 96(sp)
509afc7cc7bSCraig Topper; RV64-i32-NEXT:    addi a0, sp, 64
510afc7cc7bSCraig Topper; RV64-i32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
511afc7cc7bSCraig Topper; RV64-i32-NEXT:    vle32.v v8, (a0)
512afc7cc7bSCraig Topper; RV64-i32-NEXT:    addi sp, s0, -192
51397982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_def_cfa sp, 192
514afc7cc7bSCraig Topper; RV64-i32-NEXT:    ld ra, 184(sp) # 8-byte Folded Reload
515afc7cc7bSCraig Topper; RV64-i32-NEXT:    ld s0, 176(sp) # 8-byte Folded Reload
51697982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_restore ra
51797982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_restore s0
518afc7cc7bSCraig Topper; RV64-i32-NEXT:    addi sp, sp, 192
51997982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_def_cfa_offset 0
520afc7cc7bSCraig Topper; RV64-i32-NEXT:    ret
521afc7cc7bSCraig Topper;
522afc7cc7bSCraig Topper; RV64-i64-LABEL: lrint_v16f32:
523afc7cc7bSCraig Topper; RV64-i64:       # %bb.0:
524afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi sp, sp, -384
525afc7cc7bSCraig Topper; RV64-i64-NEXT:    .cfi_def_cfa_offset 384
526afc7cc7bSCraig Topper; RV64-i64-NEXT:    sd ra, 376(sp) # 8-byte Folded Spill
527afc7cc7bSCraig Topper; RV64-i64-NEXT:    sd s0, 368(sp) # 8-byte Folded Spill
528afc7cc7bSCraig Topper; RV64-i64-NEXT:    .cfi_offset ra, -8
529afc7cc7bSCraig Topper; RV64-i64-NEXT:    .cfi_offset s0, -16
530afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi s0, sp, 384
531afc7cc7bSCraig Topper; RV64-i64-NEXT:    .cfi_def_cfa s0, 0
532afc7cc7bSCraig Topper; RV64-i64-NEXT:    andi sp, sp, -128
533afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi a0, sp, 64
534afc7cc7bSCraig Topper; RV64-i64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
535afc7cc7bSCraig Topper; RV64-i64-NEXT:    vse32.v v8, (a0)
536afc7cc7bSCraig Topper; RV64-i64-NEXT:    flw fa5, 124(sp)
537*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v8
538*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
539*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v10, v8, 3
540*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v11, v8, 2
541afc7cc7bSCraig Topper; RV64-i64-NEXT:    fcvt.l.s a0, fa5
542afc7cc7bSCraig Topper; RV64-i64-NEXT:    sd a0, 248(sp)
543afc7cc7bSCraig Topper; RV64-i64-NEXT:    flw fa5, 120(sp)
544*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v12, v8, 1
545*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a0, fa4
546*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v10
5472967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.s a1, fa5
548*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a1, 240(sp)
549*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 116(sp)
550*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
551*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v14, v8, 7
552*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a1, fa4
553*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v11
5542967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.s a2, fa5
555*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a2, 232(sp)
556*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 112(sp)
557*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a2, fa4
558*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v12
559*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v10, v8, 6
5602967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.s a3, fa5
561*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a3, 224(sp)
562*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 108(sp)
563*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a3, fa4
564*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v14
565*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v12, v8, 5
566*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a4, fa5
567*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a4, 216(sp)
568*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 104(sp)
569*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a4, fa4
570*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v10
571*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a5, fa4
572*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a6, fa5
573*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a6, 208(sp)
574*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 100(sp)
575*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v12
576*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a6, fa4
577*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslidedown.vi v8, v8, 4
578*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a7, fa5
579*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
580*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a7, 200(sp)
581*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.s a7, fa5
582*9122c523SPengcheng Wang; RV64-i64-NEXT:    flw fa5, 96(sp)
5832967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a0, 128(sp)
5842967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a3, 136(sp)
5852967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a2, 144(sp)
5862967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a1, 152(sp)
587*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a7, 160(sp)
588*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a6, 168(sp)
589*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a5, 176(sp)
590*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a4, 184(sp)
591afc7cc7bSCraig Topper; RV64-i64-NEXT:    fcvt.l.s a0, fa5
592*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a0, 192(sp)
593afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi a0, sp, 128
594afc7cc7bSCraig Topper; RV64-i64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
595afc7cc7bSCraig Topper; RV64-i64-NEXT:    vle64.v v8, (a0)
596afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi sp, s0, -384
59797982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa sp, 384
598afc7cc7bSCraig Topper; RV64-i64-NEXT:    ld ra, 376(sp) # 8-byte Folded Reload
599afc7cc7bSCraig Topper; RV64-i64-NEXT:    ld s0, 368(sp) # 8-byte Folded Reload
60097982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore ra
60197982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore s0
602afc7cc7bSCraig Topper; RV64-i64-NEXT:    addi sp, sp, 384
60397982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa_offset 0
604afc7cc7bSCraig Topper; RV64-i64-NEXT:    ret
6055e1d81acSRamkumar Ramachandra  %a = call <16 x iXLen> @llvm.lrint.v16iXLen.v16f32(<16 x float> %x)
6065e1d81acSRamkumar Ramachandra  ret <16 x iXLen> %a
6075e1d81acSRamkumar Ramachandra}
6085e1d81acSRamkumar Ramachandradeclare <16 x iXLen> @llvm.lrint.v16iXLen.v16f32(<16 x float>)
6095e1d81acSRamkumar Ramachandra
6105e1d81acSRamkumar Ramachandradefine <1 x iXLen> @lrint_v1f64(<1 x double> %x) {
6115e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v1f64:
6125e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
6135e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
6145e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
6155e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.d a0, fa5
6165e1d81acSRamkumar Ramachandra; RV32-NEXT:    vmv.s.x v8, a0
6175e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
6185e1d81acSRamkumar Ramachandra;
6195e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v1f64:
6205e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
6215e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
6225e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
6235e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.d a0, fa5
6245e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vmv.s.x v8, a0
6255e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
6265e1d81acSRamkumar Ramachandra;
6275e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v1f64:
6285e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
6295e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
6305e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vfmv.f.s fa5, v8
6315e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fcvt.l.d a0, fa5
6325e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vmv.s.x v8, a0
6335e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
6345e1d81acSRamkumar Ramachandra  %a = call <1 x iXLen> @llvm.lrint.v1iXLen.v1f64(<1 x double> %x)
6355e1d81acSRamkumar Ramachandra  ret <1 x iXLen> %a
6365e1d81acSRamkumar Ramachandra}
6375e1d81acSRamkumar Ramachandradeclare <1 x iXLen> @llvm.lrint.v1iXLen.v1f64(<1 x double>)
6385e1d81acSRamkumar Ramachandra
6395e1d81acSRamkumar Ramachandradefine <2 x iXLen> @lrint_v2f64(<2 x double> %x) {
6405e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v2f64:
6415e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
6425e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
643144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v8, 1
6445e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
645*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a0, fa5
646*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v9
647144b2f57SPhilip Reames; RV32-NEXT:    fcvt.w.d a1, fa5
6485e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
649*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v8, a0
650*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a1
6515e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
6525e1d81acSRamkumar Ramachandra;
6535e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v2f64:
6545e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
6555e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
656144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v9, v8, 1
6575e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
658*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a0, fa5
659*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v9
660144b2f57SPhilip Reames; RV64-i32-NEXT:    fcvt.l.d a1, fa5
6615e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
662*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v8, a0
663*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a1
6645e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
6655e1d81acSRamkumar Ramachandra;
6665e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v2f64:
6675e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
668e3ffc4b6SLuke Lau; RV64-i64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
669144b2f57SPhilip Reames; RV64-i64-NEXT:    vslidedown.vi v9, v8, 1
670144b2f57SPhilip Reames; RV64-i64-NEXT:    vfmv.f.s fa5, v8
671*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a0, fa5
672*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v9
673144b2f57SPhilip Reames; RV64-i64-NEXT:    fcvt.l.d a1, fa5
674*9122c523SPengcheng Wang; RV64-i64-NEXT:    vmv.v.x v8, a0
675*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a1
6765e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
6775e1d81acSRamkumar Ramachandra  %a = call <2 x iXLen> @llvm.lrint.v2iXLen.v2f64(<2 x double> %x)
6785e1d81acSRamkumar Ramachandra  ret <2 x iXLen> %a
6795e1d81acSRamkumar Ramachandra}
6805e1d81acSRamkumar Ramachandradeclare <2 x iXLen> @llvm.lrint.v2iXLen.v2f64(<2 x double>)
6815e1d81acSRamkumar Ramachandra
6825e1d81acSRamkumar Ramachandradefine <4 x iXLen> @lrint_v4f64(<4 x double> %x) {
6835e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v4f64:
6845e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
6855e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
686144b2f57SPhilip Reames; RV32-NEXT:    vslidedown.vi v10, v8, 1
6875e1d81acSRamkumar Ramachandra; RV32-NEXT:    vfmv.f.s fa5, v8
688*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
6895e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v12, v8, 2
6905e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
6915e1d81acSRamkumar Ramachandra; RV32-NEXT:    fcvt.w.d a0, fa5
692*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v10
693*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a1, fa5
694*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v12
695*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
696*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v9, a0
697*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a0, fa5
698*9122c523SPengcheng Wang; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
699*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
700e3ffc4b6SLuke Lau; RV32-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
701*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v9, a1
702*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
703*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a0, fa5
704*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a0
7055e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
7065e1d81acSRamkumar Ramachandra;
7075e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v4f64:
7085e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
7095e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
710144b2f57SPhilip Reames; RV64-i32-NEXT:    vslidedown.vi v10, v8, 1
7115e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vfmv.f.s fa5, v8
712*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
7135e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v12, v8, 2
7145e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v8, v8, 3
7155e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    fcvt.l.d a0, fa5
716*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v10
717*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a1, fa5
718*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v12
719*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
720*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v9, a0
721*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a0, fa5
722*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
723*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
724e3ffc4b6SLuke Lau; RV64-i32-NEXT:    vsetvli zero, zero, e32, m1, ta, ma
725*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v9, a1
726*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
727*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a0, fa5
728*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
7295e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
7305e1d81acSRamkumar Ramachandra;
7315e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v4f64:
7325e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
7335e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
734144b2f57SPhilip Reames; RV64-i64-NEXT:    vslidedown.vi v10, v8, 1
735144b2f57SPhilip Reames; RV64-i64-NEXT:    vfmv.f.s fa5, v8
736*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
7375e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v12, v8, 2
7385e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v8, v8, 3
7395e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fcvt.l.d a0, fa5
740*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v10
741*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a1, fa5
742*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v12
743*9122c523SPengcheng Wang; RV64-i64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
744*9122c523SPengcheng Wang; RV64-i64-NEXT:    vmv.v.x v10, a0
745*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a0, fa5
746*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
747*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v10, a1
748*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
749*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a0, fa5
750*9122c523SPengcheng Wang; RV64-i64-NEXT:    vslide1down.vx v8, v8, a0
7515e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
7525e1d81acSRamkumar Ramachandra  %a = call <4 x iXLen> @llvm.lrint.v4iXLen.v4f64(<4 x double> %x)
7535e1d81acSRamkumar Ramachandra  ret <4 x iXLen> %a
7545e1d81acSRamkumar Ramachandra}
7555e1d81acSRamkumar Ramachandradeclare <4 x iXLen> @llvm.lrint.v4iXLen.v4f64(<4 x double>)
7565e1d81acSRamkumar Ramachandra
7575e1d81acSRamkumar Ramachandradefine <8 x iXLen> @lrint_v8f64(<8 x double> %x) {
7585e1d81acSRamkumar Ramachandra; RV32-LABEL: lrint_v8f64:
7595e1d81acSRamkumar Ramachandra; RV32:       # %bb.0:
7605e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, -128
7615e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa_offset 128
7625e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw ra, 124(sp) # 4-byte Folded Spill
7635e1d81acSRamkumar Ramachandra; RV32-NEXT:    sw s0, 120(sp) # 4-byte Folded Spill
7645e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset ra, -4
7655e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_offset s0, -8
7665e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi s0, sp, 128
7675e1d81acSRamkumar Ramachandra; RV32-NEXT:    .cfi_def_cfa s0, 0
7685e1d81acSRamkumar Ramachandra; RV32-NEXT:    andi sp, sp, -64
7695e1d81acSRamkumar Ramachandra; RV32-NEXT:    mv a0, sp
770*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
771*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v12, v8, 1
772*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v8
773*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
774*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v14, v8, 2
7755e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
7765e1d81acSRamkumar Ramachandra; RV32-NEXT:    vse64.v v8, (a0)
7775e1d81acSRamkumar Ramachandra; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
7785e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslidedown.vi v8, v8, 3
779*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa4, v12
780*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a0, fa5
781*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa5, v14
782*9122c523SPengcheng Wang; RV32-NEXT:    vfmv.f.s fa3, v8
783*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a1, fa4
784*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a2, fa5
785*9122c523SPengcheng Wang; RV32-NEXT:    fcvt.w.d a3, fa3
786ff313ee7SLuke Lau; RV32-NEXT:    fld fa5, 32(sp)
78714c4f28eSAlex Bradbury; RV32-NEXT:    fld fa4, 40(sp)
78814c4f28eSAlex Bradbury; RV32-NEXT:    fld fa3, 48(sp)
78914c4f28eSAlex Bradbury; RV32-NEXT:    fld fa2, 56(sp)
790675e7bd1SPiyou Chen; RV32-NEXT:    fcvt.w.d a4, fa5
79114c4f28eSAlex Bradbury; RV32-NEXT:    fcvt.w.d a5, fa4
79214c4f28eSAlex Bradbury; RV32-NEXT:    fcvt.w.d a6, fa3
793ff313ee7SLuke Lau; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
794*9122c523SPengcheng Wang; RV32-NEXT:    vmv.v.x v8, a0
795*9122c523SPengcheng Wang; RV32-NEXT:    vslide1down.vx v8, v8, a1
796675e7bd1SPiyou Chen; RV32-NEXT:    vslide1down.vx v8, v8, a2
797675e7bd1SPiyou Chen; RV32-NEXT:    vslide1down.vx v8, v8, a3
798675e7bd1SPiyou Chen; RV32-NEXT:    vslide1down.vx v8, v8, a4
79914c4f28eSAlex Bradbury; RV32-NEXT:    vslide1down.vx v8, v8, a5
80014c4f28eSAlex Bradbury; RV32-NEXT:    vslide1down.vx v8, v8, a6
80114c4f28eSAlex Bradbury; RV32-NEXT:    fcvt.w.d a0, fa2
8025e1d81acSRamkumar Ramachandra; RV32-NEXT:    vslide1down.vx v8, v8, a0
8035e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, s0, -128
80497982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 128
8055e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw ra, 124(sp) # 4-byte Folded Reload
8065e1d81acSRamkumar Ramachandra; RV32-NEXT:    lw s0, 120(sp) # 4-byte Folded Reload
80797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
80897982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
8095e1d81acSRamkumar Ramachandra; RV32-NEXT:    addi sp, sp, 128
81097982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
8115e1d81acSRamkumar Ramachandra; RV32-NEXT:    ret
8125e1d81acSRamkumar Ramachandra;
8135e1d81acSRamkumar Ramachandra; RV64-i32-LABEL: lrint_v8f64:
8145e1d81acSRamkumar Ramachandra; RV64-i32:       # %bb.0:
8155e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    addi sp, sp, -128
8165e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    .cfi_def_cfa_offset 128
8175e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
8185e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
8195e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    .cfi_offset ra, -8
8205e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    .cfi_offset s0, -16
8215e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    addi s0, sp, 128
8225e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    .cfi_def_cfa s0, 0
8235e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    andi sp, sp, -64
8245e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    mv a0, sp
825*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
826*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v12, v8, 1
827*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v8
828*9122c523SPengcheng Wang; RV64-i32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
829*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslidedown.vi v14, v8, 2
8305e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
8315e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vse64.v v8, (a0)
8325e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
8335e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslidedown.vi v8, v8, 3
834*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa4, v12
835*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a0, fa5
836*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa5, v14
837*9122c523SPengcheng Wang; RV64-i32-NEXT:    vfmv.f.s fa3, v8
838*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a1, fa4
839*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a2, fa5
840*9122c523SPengcheng Wang; RV64-i32-NEXT:    fcvt.l.d a3, fa3
841ff313ee7SLuke Lau; RV64-i32-NEXT:    fld fa5, 32(sp)
84214c4f28eSAlex Bradbury; RV64-i32-NEXT:    fld fa4, 40(sp)
84314c4f28eSAlex Bradbury; RV64-i32-NEXT:    fld fa3, 48(sp)
84414c4f28eSAlex Bradbury; RV64-i32-NEXT:    fld fa2, 56(sp)
845675e7bd1SPiyou Chen; RV64-i32-NEXT:    fcvt.l.d a4, fa5
84614c4f28eSAlex Bradbury; RV64-i32-NEXT:    fcvt.l.d a5, fa4
84714c4f28eSAlex Bradbury; RV64-i32-NEXT:    fcvt.l.d a6, fa3
848ff313ee7SLuke Lau; RV64-i32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
849*9122c523SPengcheng Wang; RV64-i32-NEXT:    vmv.v.x v8, a0
850*9122c523SPengcheng Wang; RV64-i32-NEXT:    vslide1down.vx v8, v8, a1
851675e7bd1SPiyou Chen; RV64-i32-NEXT:    vslide1down.vx v8, v8, a2
852675e7bd1SPiyou Chen; RV64-i32-NEXT:    vslide1down.vx v8, v8, a3
853675e7bd1SPiyou Chen; RV64-i32-NEXT:    vslide1down.vx v8, v8, a4
85414c4f28eSAlex Bradbury; RV64-i32-NEXT:    vslide1down.vx v8, v8, a5
85514c4f28eSAlex Bradbury; RV64-i32-NEXT:    vslide1down.vx v8, v8, a6
85614c4f28eSAlex Bradbury; RV64-i32-NEXT:    fcvt.l.d a0, fa2
8575e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    vslide1down.vx v8, v8, a0
8585e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    addi sp, s0, -128
85997982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_def_cfa sp, 128
8605e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
8615e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
86297982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_restore ra
86397982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_restore s0
8645e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    addi sp, sp, 128
86597982a8cSdlav-sc; RV64-i32-NEXT:    .cfi_def_cfa_offset 0
8665e1d81acSRamkumar Ramachandra; RV64-i32-NEXT:    ret
8675e1d81acSRamkumar Ramachandra;
8685e1d81acSRamkumar Ramachandra; RV64-i64-LABEL: lrint_v8f64:
8695e1d81acSRamkumar Ramachandra; RV64-i64:       # %bb.0:
8705e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, sp, -192
8715e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_def_cfa_offset 192
8725e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    sd ra, 184(sp) # 8-byte Folded Spill
8735e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    sd s0, 176(sp) # 8-byte Folded Spill
8745e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_offset ra, -8
8755e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_offset s0, -16
8765e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi s0, sp, 192
8775e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    .cfi_def_cfa s0, 0
8785e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    andi sp, sp, -64
8795e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    mv a0, sp
8805e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
8815e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vse64.v v8, (a0)
8825e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    fld fa5, 56(sp)
883*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v8
8845e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
8855e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 1
886*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a0, fa4
8872967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.d a1, fa5
888*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a1, 120(sp)
889*9122c523SPengcheng Wang; RV64-i64-NEXT:    fld fa5, 48(sp)
890*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v10
8915e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
8925e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v10, v8, 3
893*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a1, fa4
8942967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.d a2, fa5
895*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a2, 112(sp)
896*9122c523SPengcheng Wang; RV64-i64-NEXT:    fld fa5, 40(sp)
897*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa4, v10
898*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a2, fa4
8995e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vslidedown.vi v8, v8, 2
9002967e5f8SAlex Bradbury; RV64-i64-NEXT:    fcvt.l.d a3, fa5
901*9122c523SPengcheng Wang; RV64-i64-NEXT:    vfmv.f.s fa5, v8
902*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a3, 104(sp)
903*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a3, fa5
904*9122c523SPengcheng Wang; RV64-i64-NEXT:    fld fa5, 32(sp)
9052967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a0, 64(sp)
9062967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a1, 72(sp)
9072967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a3, 80(sp)
9082967e5f8SAlex Bradbury; RV64-i64-NEXT:    sd a2, 88(sp)
909*9122c523SPengcheng Wang; RV64-i64-NEXT:    fcvt.l.d a0, fa5
910*9122c523SPengcheng Wang; RV64-i64-NEXT:    sd a0, 96(sp)
9115e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi a0, sp, 64
9125e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
9135e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    vle64.v v8, (a0)
9145e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, s0, -192
91597982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa sp, 192
9165e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ld ra, 184(sp) # 8-byte Folded Reload
9175e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ld s0, 176(sp) # 8-byte Folded Reload
91897982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore ra
91997982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_restore s0
9205e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    addi sp, sp, 192
92197982a8cSdlav-sc; RV64-i64-NEXT:    .cfi_def_cfa_offset 0
9225e1d81acSRamkumar Ramachandra; RV64-i64-NEXT:    ret
9235e1d81acSRamkumar Ramachandra  %a = call <8 x iXLen> @llvm.lrint.v8iXLen.v8f64(<8 x double> %x)
9245e1d81acSRamkumar Ramachandra  ret <8 x iXLen> %a
9255e1d81acSRamkumar Ramachandra}
9265e1d81acSRamkumar Ramachandradeclare <8 x iXLen> @llvm.lrint.v8iXLen.v8f64(<8 x double>)
927