xref: /llvm-project/llvm/test/CodeGen/RISCV/llvm.exp10.ll (revision dae9cf3816bbb2b4589d258a82e6ac90fad71485)
1b14e83d1SMatt Arsenault; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
2b14e83d1SMatt Arsenault; RUN: llc -mtriple=riscv32 -mattr=+d \
3b14e83d1SMatt Arsenault; RUN:   -verify-machineinstrs -target-abi=ilp32d < %s \
4b14e83d1SMatt Arsenault; RUN:   | FileCheck -check-prefixes=CHECK,RV32IFD %s
5b14e83d1SMatt Arsenault; RUN: llc -mtriple=riscv64 -mattr=+d \
6b14e83d1SMatt Arsenault; RUN:   -verify-machineinstrs -target-abi=lp64d < %s \
7b14e83d1SMatt Arsenault; RUN:   | FileCheck -check-prefixes=CHECK,RV64IFD %s
8b14e83d1SMatt Arsenault
9b14e83d1SMatt Arsenaultdeclare <1 x half> @llvm.exp10.v1f16(<1 x half>)
10b14e83d1SMatt Arsenaultdeclare <2 x half> @llvm.exp10.v2f16(<2 x half>)
11b14e83d1SMatt Arsenaultdeclare <3 x half> @llvm.exp10.v3f16(<3 x half>)
12b14e83d1SMatt Arsenaultdeclare <4 x half> @llvm.exp10.v4f16(<4 x half>)
13b14e83d1SMatt Arsenaultdeclare <1 x float> @llvm.exp10.v1f32(<1 x float>)
14b14e83d1SMatt Arsenaultdeclare <2 x float> @llvm.exp10.v2f32(<2 x float>)
15b14e83d1SMatt Arsenaultdeclare <3 x float> @llvm.exp10.v3f32(<3 x float>)
16b14e83d1SMatt Arsenaultdeclare <4 x float> @llvm.exp10.v4f32(<4 x float>)
17b14e83d1SMatt Arsenaultdeclare <1 x double> @llvm.exp10.v1f64(<1 x double>)
18b14e83d1SMatt Arsenaultdeclare <2 x double> @llvm.exp10.v2f64(<2 x double>)
19b14e83d1SMatt Arsenaultdeclare <3 x double> @llvm.exp10.v3f64(<3 x double>)
20b14e83d1SMatt Arsenaultdeclare <4 x double> @llvm.exp10.v4f64(<4 x double>)
21b14e83d1SMatt Arsenault
22b14e83d1SMatt Arsenaultdefine <1 x half> @exp10_v1f16(<1 x half> %x) {
23b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v1f16:
24b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
25b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -16
26b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 16
27b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
28b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
29b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fa0, a0
30eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
31eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
32eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
33b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a0, fa0
34b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3597982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
36b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 16
3797982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
38b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
39b14e83d1SMatt Arsenault;
40b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v1f16:
41b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
42b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -16
43b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 16
44b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
45b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
46b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.w.x fa0, a0
47eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
48eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
49eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
50b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
51b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
5297982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
53b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 16
5497982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
55b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
56b14e83d1SMatt Arsenault  %r = call <1 x half> @llvm.exp10.v1f16(<1 x half> %x)
57b14e83d1SMatt Arsenault  ret <1 x half> %r
58b14e83d1SMatt Arsenault}
59b14e83d1SMatt Arsenault
60b14e83d1SMatt Arsenaultdefine <2 x half> @exp10_v2f16(<2 x half> %x) {
61b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v2f16:
62b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
63b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -16
64b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 16
65b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
66b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 8(sp) # 4-byte Folded Spill
67b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 0(sp) # 8-byte Folded Spill
68b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
69b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
70b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
71b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fs0, a1
72b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fa0, a0
73eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
74eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
75eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
76b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w s0, fa0
77b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
78eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
79eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
80eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
81b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a1, fa0
82b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv a0, s0
83b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
84b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 8(sp) # 4-byte Folded Reload
85b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 0(sp) # 8-byte Folded Reload
8697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
8797982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
8897982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
89b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 16
9097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
91b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
92b14e83d1SMatt Arsenault;
93b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v2f16:
94b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
95b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -32
96b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 32
97b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 24(sp) # 8-byte Folded Spill
98b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 16(sp) # 8-byte Folded Spill
99b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s1, 8(sp) # 8-byte Folded Spill
100b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
101b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
102b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s1, -24
103b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a1
104b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.w.x fa0, a0
105eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
106eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
107eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
108b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w s1, fa0
109b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.w.x fa0, s0
110eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
111eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
112eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
113b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a1, fa0
114b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv a0, s1
115b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 24(sp) # 8-byte Folded Reload
116b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 16(sp) # 8-byte Folded Reload
117b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s1, 8(sp) # 8-byte Folded Reload
11897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
11997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
12097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s1
121b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 32
12297982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
123b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
124b14e83d1SMatt Arsenault  %r = call <2 x half> @llvm.exp10.v2f16(<2 x half> %x)
125b14e83d1SMatt Arsenault  ret <2 x half> %r
126b14e83d1SMatt Arsenault}
127b14e83d1SMatt Arsenault
128b14e83d1SMatt Arsenaultdefine <3 x half> @exp10_v3f16(<3 x half> %x) {
129b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v3f16:
130b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
131b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -48
132b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 48
133b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 44(sp) # 4-byte Folded Spill
134b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 40(sp) # 4-byte Folded Spill
135b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s1, 36(sp) # 4-byte Folded Spill
136b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 24(sp) # 8-byte Folded Spill
137b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 16(sp) # 8-byte Folded Spill
138b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 8(sp) # 8-byte Folded Spill
139b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
140b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
141b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s1, -12
142b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -24
143b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -32
144b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -40
145b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
1469122c523SPengcheng Wang; RV32IFD-NEXT:    lhu a0, 8(a1)
1479122c523SPengcheng Wang; RV32IFD-NEXT:    lhu a2, 0(a1)
1489122c523SPengcheng Wang; RV32IFD-NEXT:    lhu a1, 4(a1)
1499122c523SPengcheng Wang; RV32IFD-NEXT:    fmv.w.x fs0, a0
1509122c523SPengcheng Wang; RV32IFD-NEXT:    fmv.w.x fs1, a2
151b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fa0, a1
152eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
153eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
154eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
155b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs2, fa0
156b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs1
157eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
158eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
159b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a0, fs2
160b14e83d1SMatt Arsenault; RV32IFD-NEXT:    slli s1, a0, 16
161eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
162b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a0, fa0
163b14e83d1SMatt Arsenault; RV32IFD-NEXT:    slli a0, a0, 16
164b14e83d1SMatt Arsenault; RV32IFD-NEXT:    srli a0, a0, 16
165b14e83d1SMatt Arsenault; RV32IFD-NEXT:    or s1, a0, s1
166b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
167eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
168eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
169eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
170b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a0, fa0
171b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s1, 0(s0)
1722967e5f8SAlex Bradbury; RV32IFD-NEXT:    sh a0, 4(s0)
173b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 44(sp) # 4-byte Folded Reload
174b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 40(sp) # 4-byte Folded Reload
175b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s1, 36(sp) # 4-byte Folded Reload
176b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 24(sp) # 8-byte Folded Reload
177b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 16(sp) # 8-byte Folded Reload
178b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 8(sp) # 8-byte Folded Reload
17997982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
18097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
18197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s1
18297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
18397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
18497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
185b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 48
18697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
187b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
188b14e83d1SMatt Arsenault;
189b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v3f16:
190b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
191b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -48
192b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 48
193b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 40(sp) # 8-byte Folded Spill
194b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 32(sp) # 8-byte Folded Spill
195b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s1, 24(sp) # 8-byte Folded Spill
196b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s2, 16(sp) # 8-byte Folded Spill
197b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 8(sp) # 8-byte Folded Spill
198b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
199b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
200b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s1, -24
201b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s2, -32
202b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -40
20314c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu s1, 0(a1)
20414c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu a2, 8(a1)
20514c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu s2, 16(a1)
206b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
20714c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.w.x fa0, a2
208eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
209eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
210eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
211b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs0, fa0
21214c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.w.x fa0, s1
213eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
214eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
215b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fs0
21614c4f28eSAlex Bradbury; RV64IFD-NEXT:    slli s1, a0, 16
217eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
218b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
219b14e83d1SMatt Arsenault; RV64IFD-NEXT:    slli a0, a0, 48
220b14e83d1SMatt Arsenault; RV64IFD-NEXT:    srli a0, a0, 48
22114c4f28eSAlex Bradbury; RV64IFD-NEXT:    or s1, a0, s1
22214c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.w.x fa0, s2
223eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
224eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
225eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
226b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
22714c4f28eSAlex Bradbury; RV64IFD-NEXT:    sw s1, 0(s0)
2282967e5f8SAlex Bradbury; RV64IFD-NEXT:    sh a0, 4(s0)
229b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 40(sp) # 8-byte Folded Reload
230b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 32(sp) # 8-byte Folded Reload
231b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s1, 24(sp) # 8-byte Folded Reload
232b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s2, 16(sp) # 8-byte Folded Reload
233b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 8(sp) # 8-byte Folded Reload
23497982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
23597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
23697982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s1
23797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s2
23897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
239b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 48
24097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
241b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
242b14e83d1SMatt Arsenault  %r = call <3 x half> @llvm.exp10.v3f16(<3 x half> %x)
243b14e83d1SMatt Arsenault  ret <3 x half> %r
244b14e83d1SMatt Arsenault}
245b14e83d1SMatt Arsenault
246b14e83d1SMatt Arsenaultdefine <4 x half> @exp10_v4f16(<4 x half> %x) {
247b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v4f16:
248b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
249b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -64
250b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 64
251b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 60(sp) # 4-byte Folded Spill
252b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 56(sp) # 4-byte Folded Spill
253b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s1, 52(sp) # 4-byte Folded Spill
254b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s2, 48(sp) # 4-byte Folded Spill
255b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s3, 44(sp) # 4-byte Folded Spill
256b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 32(sp) # 8-byte Folded Spill
257b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 24(sp) # 8-byte Folded Spill
258b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 16(sp) # 8-byte Folded Spill
259b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs3, 8(sp) # 8-byte Folded Spill
260b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
261b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
262b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s1, -12
263b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s2, -16
264b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s3, -20
265b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -32
266b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -40
267b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -48
268b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs3, -56
269b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
270b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lhu a0, 12(a1)
271b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lhu a2, 0(a1)
272b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lhu a3, 4(a1)
273b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lhu a1, 8(a1)
274b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fs0, a0
275b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fs1, a2
276b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fs2, a3
277b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.w.x fa0, a1
278eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
279eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
280eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
281b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs3, fa0
282b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs2
283eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
284eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
285eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
286b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs2, fa0
287b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs1
288eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
289eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
290eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
291b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa0
292b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
293eabaee0cSFangrui Song; RV32IFD-NEXT:    call __extendhfsf2
294eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
295b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w s1, fs1
296b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w s2, fs2
297b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w s3, fs3
298eabaee0cSFangrui Song; RV32IFD-NEXT:    call __truncsfhf2
299b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.x.w a0, fa0
300b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sh s1, 0(s0)
3012967e5f8SAlex Bradbury; RV32IFD-NEXT:    sh s2, 2(s0)
3022967e5f8SAlex Bradbury; RV32IFD-NEXT:    sh s3, 4(s0)
3032967e5f8SAlex Bradbury; RV32IFD-NEXT:    sh a0, 6(s0)
304b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 60(sp) # 4-byte Folded Reload
305b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 56(sp) # 4-byte Folded Reload
306b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s1, 52(sp) # 4-byte Folded Reload
307b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s2, 48(sp) # 4-byte Folded Reload
308b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s3, 44(sp) # 4-byte Folded Reload
309b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 32(sp) # 8-byte Folded Reload
310b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 24(sp) # 8-byte Folded Reload
311b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 16(sp) # 8-byte Folded Reload
312b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs3, 8(sp) # 8-byte Folded Reload
31397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
31497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
31597982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s1
31697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s2
31797982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s3
31897982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
31997982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
32097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
32197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs3
322b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 64
32397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
324b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
325b14e83d1SMatt Arsenault;
326b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v4f16:
327b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
328b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -64
329b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 64
330b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 56(sp) # 8-byte Folded Spill
331b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 48(sp) # 8-byte Folded Spill
332b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s1, 40(sp) # 8-byte Folded Spill
333b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s2, 32(sp) # 8-byte Folded Spill
334b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s3, 24(sp) # 8-byte Folded Spill
335b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
336b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
337b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs2, 0(sp) # 8-byte Folded Spill
338b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
339b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
340b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s1, -24
341b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s2, -32
342b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s3, -40
343b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -48
344b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -56
345b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs2, -64
34614c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu s1, 0(a1)
34714c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu s2, 8(a1)
34814c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu a2, 16(a1)
34914c4f28eSAlex Bradbury; RV64IFD-NEXT:    lhu s3, 24(a1)
350b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
35114c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.w.x fa0, a2
352eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
353eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
354eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
355b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs0, fa0
356b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.w.x fa0, s2
357eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
358eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
359eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
36014c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.s fs1, fa0
361b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.w.x fa0, s1
362eabaee0cSFangrui Song; RV64IFD-NEXT:    call __extendhfsf2
363eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
36414c4f28eSAlex Bradbury; RV64IFD-NEXT:    call __truncsfhf2
36514c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.s fs2, fa0
36614c4f28eSAlex Bradbury; RV64IFD-NEXT:    fmv.w.x fa0, s3
36714c4f28eSAlex Bradbury; RV64IFD-NEXT:    call __extendhfsf2
36814c4f28eSAlex Bradbury; RV64IFD-NEXT:    call exp10f
369b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w s1, fs2
370b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w s2, fs1
371b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w s3, fs0
372eabaee0cSFangrui Song; RV64IFD-NEXT:    call __truncsfhf2
373b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
374b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sh s1, 0(s0)
3752967e5f8SAlex Bradbury; RV64IFD-NEXT:    sh s2, 2(s0)
3762967e5f8SAlex Bradbury; RV64IFD-NEXT:    sh s3, 4(s0)
3772967e5f8SAlex Bradbury; RV64IFD-NEXT:    sh a0, 6(s0)
378b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 56(sp) # 8-byte Folded Reload
379b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 48(sp) # 8-byte Folded Reload
380b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s1, 40(sp) # 8-byte Folded Reload
381b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s2, 32(sp) # 8-byte Folded Reload
382b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s3, 24(sp) # 8-byte Folded Reload
383b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
384b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
385b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs2, 0(sp) # 8-byte Folded Reload
38697982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
38797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
38897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s1
38997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s2
39097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s3
39197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
39297982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
39397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs2
394b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 64
39597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
396b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
397b14e83d1SMatt Arsenault  %r = call <4 x half> @llvm.exp10.v4f16(<4 x half> %x)
398b14e83d1SMatt Arsenault  ret <4 x half> %r
399b14e83d1SMatt Arsenault}
400b14e83d1SMatt Arsenault
401b14e83d1SMatt Arsenaultdefine <1 x float> @exp10_v1f32(<1 x float> %x) {
402b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v1f32:
403b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
404b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -16
405b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 16
406b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
407b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
408eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
409b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
41097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
411b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 16
41297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
413b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
414b14e83d1SMatt Arsenault;
415b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v1f32:
416b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
417b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -16
418b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 16
419b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
420b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
421eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
422b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
42397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
424b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 16
42597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
426b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
427b14e83d1SMatt Arsenault  %r = call <1 x float> @llvm.exp10.v1f32(<1 x float> %x)
428b14e83d1SMatt Arsenault  ret <1 x float> %r
429b14e83d1SMatt Arsenault}
430b14e83d1SMatt Arsenault
431b14e83d1SMatt Arsenaultdefine <2 x float> @exp10_v2f32(<2 x float> %x) {
432b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v2f32:
433b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
434b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -32
435b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 32
436b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
437b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
438b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
439b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
440b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
441b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
442b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs0, fa1
443eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
444b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa0
445b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
446eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
447b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa1, fa0
448b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs1
449b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
450b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
451b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
45297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
45397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
45497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
455b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 32
45697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
457b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
458b14e83d1SMatt Arsenault;
459b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v2f32:
460b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
461b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -32
462b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 32
463b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 24(sp) # 8-byte Folded Spill
464b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
465b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
466b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
467b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -16
468b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -24
469b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs0, fa1
470eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
471b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs1, fa0
472b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs0
473eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
474b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa1, fa0
475b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs1
476b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 24(sp) # 8-byte Folded Reload
477b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
478b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
47997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
48097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
48197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
482b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 32
48397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
484b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
485b14e83d1SMatt Arsenault  %r = call <2 x float> @llvm.exp10.v2f32(<2 x float> %x)
486b14e83d1SMatt Arsenault  ret <2 x float> %r
487b14e83d1SMatt Arsenault}
488b14e83d1SMatt Arsenault
489b14e83d1SMatt Arsenaultdefine <3 x float> @exp10_v3f32(<3 x float> %x) {
490b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v3f32:
491b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
492b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -32
493b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 32
494b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
495b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 24(sp) # 4-byte Folded Spill
496b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
497b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
498b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 0(sp) # 8-byte Folded Spill
499b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
500b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
501b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
502b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
503b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -32
504b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs0, fa2
505b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa1
506b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
507eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
508b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs2, fa0
509b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs1
510eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
511b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa0
512b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
513eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
514b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsw fs2, 0(s0)
5152967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsw fs1, 4(s0)
5162967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsw fa0, 8(s0)
517b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
518b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 24(sp) # 4-byte Folded Reload
519b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
520b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
521b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 0(sp) # 8-byte Folded Reload
52297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
52397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
52497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
52597982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
52697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
527b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 32
52897982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
529b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
530b14e83d1SMatt Arsenault;
531b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v3f32:
532b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
533b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -48
534b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 48
535b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 40(sp) # 8-byte Folded Spill
536b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 32(sp) # 8-byte Folded Spill
537b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s1, 24(sp) # 8-byte Folded Spill
538b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
539b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
540b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
541b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
542b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s1, -24
543b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -32
544b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -40
545b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs0, fa2
546b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs1, fa0
547b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
548b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fa1
549eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
550b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
551b14e83d1SMatt Arsenault; RV64IFD-NEXT:    slli s1, a0, 32
552b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs1
553eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
554b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.x.w a0, fa0
555b14e83d1SMatt Arsenault; RV64IFD-NEXT:    slli a0, a0, 32
556b14e83d1SMatt Arsenault; RV64IFD-NEXT:    srli a0, a0, 32
557b14e83d1SMatt Arsenault; RV64IFD-NEXT:    or s1, a0, s1
558b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs0
559eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
560b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s1, 0(s0)
5612967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsw fa0, 8(s0)
562b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 40(sp) # 8-byte Folded Reload
563b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 32(sp) # 8-byte Folded Reload
564b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s1, 24(sp) # 8-byte Folded Reload
565b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
566b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
56797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
56897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
56997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s1
57097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
57197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
572b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 48
57397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
574b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
575b14e83d1SMatt Arsenault  %r = call <3 x float> @llvm.exp10.v3f32(<3 x float> %x)
576b14e83d1SMatt Arsenault  ret <3 x float> %r
577b14e83d1SMatt Arsenault}
578b14e83d1SMatt Arsenault
579b14e83d1SMatt Arsenaultdefine <4 x float> @exp10_v4f32(<4 x float> %x) {
580b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v4f32:
581b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
582b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -48
583b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 48
584b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 44(sp) # 4-byte Folded Spill
585b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 40(sp) # 4-byte Folded Spill
586b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 32(sp) # 8-byte Folded Spill
587b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 24(sp) # 8-byte Folded Spill
588b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 16(sp) # 8-byte Folded Spill
589b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs3, 8(sp) # 8-byte Folded Spill
590b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
591b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
592b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
593b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
594b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -32
595b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs3, -40
596b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs0, fa3
597b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa2
598b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs2, fa1
599b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
600eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
601b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs3, fa0
602b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs2
603eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
604b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs2, fa0
605b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs1
606eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
607b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fs1, fa0
608b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.s fa0, fs0
609eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10f
610b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsw fs3, 0(s0)
6112967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsw fs2, 4(s0)
6122967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsw fs1, 8(s0)
6132967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsw fa0, 12(s0)
614b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 44(sp) # 4-byte Folded Reload
615b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 40(sp) # 4-byte Folded Reload
616b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 32(sp) # 8-byte Folded Reload
617b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 24(sp) # 8-byte Folded Reload
618b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 16(sp) # 8-byte Folded Reload
619b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs3, 8(sp) # 8-byte Folded Reload
62097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
62197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
62297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
62397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
62497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
62597982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs3
626b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 48
62797982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
628b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
629b14e83d1SMatt Arsenault;
630b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v4f32:
631b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
632b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -48
633b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 48
634b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 40(sp) # 8-byte Folded Spill
635b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 32(sp) # 8-byte Folded Spill
636b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 24(sp) # 8-byte Folded Spill
637b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 16(sp) # 8-byte Folded Spill
638b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs2, 8(sp) # 8-byte Folded Spill
639b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs3, 0(sp) # 8-byte Folded Spill
640b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
641b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
642b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -24
643b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -32
644b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs2, -40
645b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs3, -48
646b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs0, fa3
647b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs1, fa2
648b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs2, fa1
649b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
650eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
651b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs3, fa0
652b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs2
653eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
654b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs2, fa0
655b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs1
656eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
657b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fs1, fa0
658b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.s fa0, fs0
659eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10f
660b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsw fs3, 0(s0)
6612967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsw fs2, 4(s0)
6622967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsw fs1, 8(s0)
6632967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsw fa0, 12(s0)
664b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 40(sp) # 8-byte Folded Reload
665b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 32(sp) # 8-byte Folded Reload
666b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 24(sp) # 8-byte Folded Reload
667b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 16(sp) # 8-byte Folded Reload
668b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs2, 8(sp) # 8-byte Folded Reload
669b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs3, 0(sp) # 8-byte Folded Reload
67097982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
67197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
67297982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
67397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
67497982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs2
67597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs3
676b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 48
67797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
678b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
679b14e83d1SMatt Arsenault  %r = call <4 x float> @llvm.exp10.v4f32(<4 x float> %x)
680b14e83d1SMatt Arsenault  ret <4 x float> %r
681b14e83d1SMatt Arsenault}
682b14e83d1SMatt Arsenault
683b14e83d1SMatt Arsenault; FIXME: Broken
684b14e83d1SMatt Arsenault; define <1 x double> @exp10_v1f64(<1 x double> %x) {
685b14e83d1SMatt Arsenault;   %r = call <1 x double> @llvm.exp10.v1f64(<1 x double> %x)
686b14e83d1SMatt Arsenault;   ret <1 x double> %r
687b14e83d1SMatt Arsenault; }
688b14e83d1SMatt Arsenault
689b14e83d1SMatt Arsenaultdefine <2 x double> @exp10_v2f64(<2 x double> %x) {
690b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v2f64:
691b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
692b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -32
693b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 32
694b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
695b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
696b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
697b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
698b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
699b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
700b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs0, fa1
701eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
702b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs1, fa0
703b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs0
704eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
705b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa1, fa0
706b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs1
707b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
708b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
709b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
71097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
71197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
71297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
713b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 32
71497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
715b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
716b14e83d1SMatt Arsenault;
717b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v2f64:
718b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
719b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -32
720b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 32
721b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 24(sp) # 8-byte Folded Spill
722b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
723b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
724b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
725b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -16
726b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -24
727b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs0, fa1
728eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
729b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs1, fa0
730b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs0
731eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
732b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa1, fa0
733b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs1
734b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 24(sp) # 8-byte Folded Reload
735b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
736b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
73797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
73897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
73997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
740b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 32
74197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
742b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
743b14e83d1SMatt Arsenault  %r = call <2 x double> @llvm.exp10.v2f64(<2 x double> %x)
744b14e83d1SMatt Arsenault  ret <2 x double> %r
745b14e83d1SMatt Arsenault}
746b14e83d1SMatt Arsenault
747b14e83d1SMatt Arsenaultdefine <3 x double> @exp10_v3f64(<3 x double> %x) {
748b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v3f64:
749b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
750b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -32
751b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 32
752b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 28(sp) # 4-byte Folded Spill
753b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 24(sp) # 4-byte Folded Spill
754b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 16(sp) # 8-byte Folded Spill
755b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 8(sp) # 8-byte Folded Spill
756b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 0(sp) # 8-byte Folded Spill
757b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
758b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
759b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
760b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
761b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -32
762b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs0, fa2
763b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs1, fa1
764b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
765eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
766b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs2, fa0
767b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs1
768eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
769b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs1, fa0
770b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs0
771eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
772b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 0(s0)
7732967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsd fs1, 8(s0)
7742967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsd fa0, 16(s0)
775b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 28(sp) # 4-byte Folded Reload
776b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 24(sp) # 4-byte Folded Reload
777b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 16(sp) # 8-byte Folded Reload
778b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 8(sp) # 8-byte Folded Reload
779b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 0(sp) # 8-byte Folded Reload
78097982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
78197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
78297982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
78397982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
78497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
785b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 32
78697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
787b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
788b14e83d1SMatt Arsenault;
789b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v3f64:
790b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
791b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -48
792b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 48
793b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 40(sp) # 8-byte Folded Spill
794b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 32(sp) # 8-byte Folded Spill
795b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 24(sp) # 8-byte Folded Spill
796b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 16(sp) # 8-byte Folded Spill
797b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs2, 8(sp) # 8-byte Folded Spill
798b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
799b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
800b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -24
801b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -32
802b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs2, -40
803b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs0, fa2
804b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs1, fa1
805b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
806eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
807b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs2, fa0
808b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs1
809eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
810b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs1, fa0
811b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs0
812eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
813b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs2, 0(s0)
8142967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsd fs1, 8(s0)
8152967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsd fa0, 16(s0)
816b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 40(sp) # 8-byte Folded Reload
817b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 32(sp) # 8-byte Folded Reload
818b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 24(sp) # 8-byte Folded Reload
819b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 16(sp) # 8-byte Folded Reload
820b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs2, 8(sp) # 8-byte Folded Reload
82197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
82297982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
82397982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
82497982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
82597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs2
826b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 48
82797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
828b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
829b14e83d1SMatt Arsenault  %r = call <3 x double> @llvm.exp10.v3f64(<3 x double> %x)
830b14e83d1SMatt Arsenault  ret <3 x double> %r
831b14e83d1SMatt Arsenault}
832b14e83d1SMatt Arsenault
833b14e83d1SMatt Arsenaultdefine <4 x double> @exp10_v4f64(<4 x double> %x) {
834b14e83d1SMatt Arsenault; RV32IFD-LABEL: exp10_v4f64:
835b14e83d1SMatt Arsenault; RV32IFD:       # %bb.0:
836b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, -48
837b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_def_cfa_offset 48
838b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw ra, 44(sp) # 4-byte Folded Spill
839b14e83d1SMatt Arsenault; RV32IFD-NEXT:    sw s0, 40(sp) # 4-byte Folded Spill
840b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs0, 32(sp) # 8-byte Folded Spill
841b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs1, 24(sp) # 8-byte Folded Spill
842b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs2, 16(sp) # 8-byte Folded Spill
843b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs3, 8(sp) # 8-byte Folded Spill
844b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset ra, -4
845b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset s0, -8
846b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs0, -16
847b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs1, -24
848b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs2, -32
849b14e83d1SMatt Arsenault; RV32IFD-NEXT:    .cfi_offset fs3, -40
850b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs0, fa3
851b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs1, fa2
852b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs2, fa1
853b14e83d1SMatt Arsenault; RV32IFD-NEXT:    mv s0, a0
854eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
855b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs3, fa0
856b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs2
857eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
858b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs2, fa0
859b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs1
860eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
861b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fs1, fa0
862b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fmv.d fa0, fs0
863eabaee0cSFangrui Song; RV32IFD-NEXT:    call exp10
864b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fsd fs3, 0(s0)
8652967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsd fs2, 8(s0)
8662967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsd fs1, 16(s0)
8672967e5f8SAlex Bradbury; RV32IFD-NEXT:    fsd fa0, 24(s0)
868b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw ra, 44(sp) # 4-byte Folded Reload
869b14e83d1SMatt Arsenault; RV32IFD-NEXT:    lw s0, 40(sp) # 4-byte Folded Reload
870b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs0, 32(sp) # 8-byte Folded Reload
871b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs1, 24(sp) # 8-byte Folded Reload
872b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs2, 16(sp) # 8-byte Folded Reload
873b14e83d1SMatt Arsenault; RV32IFD-NEXT:    fld fs3, 8(sp) # 8-byte Folded Reload
87497982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore ra
87597982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore s0
87697982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs0
87797982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs1
87897982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs2
87997982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_restore fs3
880b14e83d1SMatt Arsenault; RV32IFD-NEXT:    addi sp, sp, 48
88197982a8cSdlav-sc; RV32IFD-NEXT:    .cfi_def_cfa_offset 0
882b14e83d1SMatt Arsenault; RV32IFD-NEXT:    ret
883b14e83d1SMatt Arsenault;
884b14e83d1SMatt Arsenault; RV64IFD-LABEL: exp10_v4f64:
885b14e83d1SMatt Arsenault; RV64IFD:       # %bb.0:
886b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, -48
887b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_def_cfa_offset 48
888b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd ra, 40(sp) # 8-byte Folded Spill
889b14e83d1SMatt Arsenault; RV64IFD-NEXT:    sd s0, 32(sp) # 8-byte Folded Spill
890b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs0, 24(sp) # 8-byte Folded Spill
891b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs1, 16(sp) # 8-byte Folded Spill
892b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs2, 8(sp) # 8-byte Folded Spill
893b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs3, 0(sp) # 8-byte Folded Spill
894b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset ra, -8
895b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset s0, -16
896b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs0, -24
897b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs1, -32
898b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs2, -40
899b14e83d1SMatt Arsenault; RV64IFD-NEXT:    .cfi_offset fs3, -48
900b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs0, fa3
901b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs1, fa2
902b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs2, fa1
903b14e83d1SMatt Arsenault; RV64IFD-NEXT:    mv s0, a0
904eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
905b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs3, fa0
906b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs2
907eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
908b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs2, fa0
909b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs1
910eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
911b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fs1, fa0
912b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fmv.d fa0, fs0
913eabaee0cSFangrui Song; RV64IFD-NEXT:    call exp10
914b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fsd fs3, 0(s0)
9152967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsd fs2, 8(s0)
9162967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsd fs1, 16(s0)
9172967e5f8SAlex Bradbury; RV64IFD-NEXT:    fsd fa0, 24(s0)
918b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld ra, 40(sp) # 8-byte Folded Reload
919b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ld s0, 32(sp) # 8-byte Folded Reload
920b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs0, 24(sp) # 8-byte Folded Reload
921b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs1, 16(sp) # 8-byte Folded Reload
922b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs2, 8(sp) # 8-byte Folded Reload
923b14e83d1SMatt Arsenault; RV64IFD-NEXT:    fld fs3, 0(sp) # 8-byte Folded Reload
92497982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore ra
92597982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore s0
92697982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs0
92797982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs1
92897982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs2
92997982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_restore fs3
930b14e83d1SMatt Arsenault; RV64IFD-NEXT:    addi sp, sp, 48
93197982a8cSdlav-sc; RV64IFD-NEXT:    .cfi_def_cfa_offset 0
932b14e83d1SMatt Arsenault; RV64IFD-NEXT:    ret
933b14e83d1SMatt Arsenault  %r = call <4 x double> @llvm.exp10.v4f64(<4 x double> %x)
934b14e83d1SMatt Arsenault  ret <4 x double> %r
935b14e83d1SMatt Arsenault}
936*dae9cf38SCraig Topper;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
937*dae9cf38SCraig Topper; CHECK: {{.*}}
938