xref: /llvm-project/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-int-explodevector.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
17c4f4559SPhilip Reames; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
27c4f4559SPhilip Reames; RUN: llc -mtriple=riscv32 -mattr=+v,+m -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV32
37c4f4559SPhilip Reames; RUN: llc -mtriple=riscv64 -mattr=+v,+m -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV64
47c4f4559SPhilip Reames
57c4f4559SPhilip Reamesdefine i8 @explode_2xi8(<2 x i8> %v) {
67c4f4559SPhilip Reames; CHECK-LABEL: explode_2xi8:
77c4f4559SPhilip Reames; CHECK:       # %bb.0:
845a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
945a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
1045a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
117c4f4559SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v8
127c4f4559SPhilip Reames; CHECK-NEXT:    ret
137c4f4559SPhilip Reames  %e0 = extractelement <2 x i8> %v, i32 0
147c4f4559SPhilip Reames  %e1 = extractelement <2 x i8> %v, i32 1
15f0505c3dSPhilip Reames  %add0 = xor i8 %e0, %e1
167c4f4559SPhilip Reames  ret i8 %add0
177c4f4559SPhilip Reames}
187c4f4559SPhilip Reames
197c4f4559SPhilip Reamesdefine i8 @explode_4xi8(<4 x i8> %v) {
207c4f4559SPhilip Reames; CHECK-LABEL: explode_4xi8:
217c4f4559SPhilip Reames; CHECK:       # %bb.0:
227c4f4559SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e8, mf4, ta, ma
23824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 2
2445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v9
2545a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 3
2645a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a1, v9
2745a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
2845a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
2945a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
3045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a2, v8
3145a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
3245a334d3SPhilip Reames; CHECK-NEXT:    add a0, a2, a0
337c4f4559SPhilip Reames; CHECK-NEXT:    ret
347c4f4559SPhilip Reames  %e0 = extractelement <4 x i8> %v, i32 0
357c4f4559SPhilip Reames  %e1 = extractelement <4 x i8> %v, i32 1
367c4f4559SPhilip Reames  %e2 = extractelement <4 x i8> %v, i32 2
377c4f4559SPhilip Reames  %e3 = extractelement <4 x i8> %v, i32 3
38f0505c3dSPhilip Reames  %add0 = xor i8 %e0, %e1
39f0505c3dSPhilip Reames  %add1 = add i8 %add0, %e2
407c4f4559SPhilip Reames  %add2 = add i8 %add1, %e3
417c4f4559SPhilip Reames  ret i8 %add2
427c4f4559SPhilip Reames}
437c4f4559SPhilip Reames
447c4f4559SPhilip Reames
457c4f4559SPhilip Reamesdefine i8 @explode_8xi8(<8 x i8> %v) {
467c4f4559SPhilip Reames; CHECK-LABEL: explode_8xi8:
477c4f4559SPhilip Reames; CHECK:       # %bb.0:
487c4f4559SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e8, mf2, ta, ma
49824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 2
5045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v9
51824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 3
5245a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a1, v9
53824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 4
5445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a2, v9
55824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 5
5645a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a3, v9
57824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 6
5845a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a4, v9
5945a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 7
6045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a5, v9
6145a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
6245a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
6345a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
6445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a6, v8
6545a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
66f0505c3dSPhilip Reames; CHECK-NEXT:    add a2, a2, a3
67*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, a6, a0
6845a334d3SPhilip Reames; CHECK-NEXT:    add a2, a2, a4
69f0505c3dSPhilip Reames; CHECK-NEXT:    add a0, a0, a2
7045a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a5
717c4f4559SPhilip Reames; CHECK-NEXT:    ret
727c4f4559SPhilip Reames  %e0 = extractelement <8 x i8> %v, i32 0
737c4f4559SPhilip Reames  %e1 = extractelement <8 x i8> %v, i32 1
747c4f4559SPhilip Reames  %e2 = extractelement <8 x i8> %v, i32 2
757c4f4559SPhilip Reames  %e3 = extractelement <8 x i8> %v, i32 3
767c4f4559SPhilip Reames  %e4 = extractelement <8 x i8> %v, i32 4
777c4f4559SPhilip Reames  %e5 = extractelement <8 x i8> %v, i32 5
787c4f4559SPhilip Reames  %e6 = extractelement <8 x i8> %v, i32 6
797c4f4559SPhilip Reames  %e7 = extractelement <8 x i8> %v, i32 7
80f0505c3dSPhilip Reames  %add0 = xor i8 %e0, %e1
81f0505c3dSPhilip Reames  %add1 = add i8 %add0, %e2
827c4f4559SPhilip Reames  %add2 = add i8 %add1, %e3
837c4f4559SPhilip Reames  %add3 = add i8 %add2, %e4
847c4f4559SPhilip Reames  %add4 = add i8 %add3, %e5
857c4f4559SPhilip Reames  %add5 = add i8 %add4, %e6
867c4f4559SPhilip Reames  %add6 = add i8 %add5, %e7
877c4f4559SPhilip Reames  ret i8 %add6
887c4f4559SPhilip Reames}
897c4f4559SPhilip Reames
907c4f4559SPhilip Reamesdefine i8 @explode_16xi8(<16 x i8> %v) {
9145a334d3SPhilip Reames; CHECK-LABEL: explode_16xi8:
9245a334d3SPhilip Reames; CHECK:       # %bb.0:
9345a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
9445a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 2
9545a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v9
9645a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 3
9745a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a1, v9
9845a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 4
9945a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a2, v9
10045a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 5
10145a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a3, v9
10245a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 6
10345a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a4, v9
10445a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 7
10545a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a5, v9
10645a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 8
10745a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a6, v9
10845a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 9
10945a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a7, v9
11045a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 10
11145a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t0, v9
11245a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 11
11345a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t1, v9
11445a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 12
11545a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t2, v9
11645a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 13
11745a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t3, v9
11845a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 14
11945a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t4, v9
12045a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 15
12145a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t5, v9
12245a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
12345a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
12445a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
12545a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t6, v8
12645a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
12745a334d3SPhilip Reames; CHECK-NEXT:    add a2, a2, a3
12845a334d3SPhilip Reames; CHECK-NEXT:    add a5, a5, a6
12945a334d3SPhilip Reames; CHECK-NEXT:    add t1, t1, t2
130*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, t6, a0
131*9122c523SPengcheng Wang; CHECK-NEXT:    add a2, a2, a4
132*9122c523SPengcheng Wang; CHECK-NEXT:    add a5, a5, a7
13345a334d3SPhilip Reames; CHECK-NEXT:    add t1, t1, t3
134*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, a0, a2
135*9122c523SPengcheng Wang; CHECK-NEXT:    add a5, a5, t0
13645a334d3SPhilip Reames; CHECK-NEXT:    add t1, t1, t4
137*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, a0, a5
13845a334d3SPhilip Reames; CHECK-NEXT:    add t1, t1, t5
13945a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, t1
14045a334d3SPhilip Reames; CHECK-NEXT:    ret
1417c4f4559SPhilip Reames  %e0 = extractelement <16 x i8> %v, i32 0
1427c4f4559SPhilip Reames  %e1 = extractelement <16 x i8> %v, i32 1
1437c4f4559SPhilip Reames  %e2 = extractelement <16 x i8> %v, i32 2
1447c4f4559SPhilip Reames  %e3 = extractelement <16 x i8> %v, i32 3
1457c4f4559SPhilip Reames  %e4 = extractelement <16 x i8> %v, i32 4
1467c4f4559SPhilip Reames  %e5 = extractelement <16 x i8> %v, i32 5
1477c4f4559SPhilip Reames  %e6 = extractelement <16 x i8> %v, i32 6
1487c4f4559SPhilip Reames  %e7 = extractelement <16 x i8> %v, i32 7
1497c4f4559SPhilip Reames  %e8 = extractelement <16 x i8> %v, i32 8
1507c4f4559SPhilip Reames  %e9 = extractelement <16 x i8> %v, i32 9
1517c4f4559SPhilip Reames  %e10 = extractelement <16 x i8> %v, i32 10
1527c4f4559SPhilip Reames  %e11 = extractelement <16 x i8> %v, i32 11
1537c4f4559SPhilip Reames  %e12 = extractelement <16 x i8> %v, i32 12
1547c4f4559SPhilip Reames  %e13 = extractelement <16 x i8> %v, i32 13
1557c4f4559SPhilip Reames  %e14 = extractelement <16 x i8> %v, i32 14
1567c4f4559SPhilip Reames  %e15 = extractelement <16 x i8> %v, i32 15
157f0505c3dSPhilip Reames  %add0 = xor i8 %e0, %e1
158f0505c3dSPhilip Reames  %add1 = add i8 %add0, %e2
1597c4f4559SPhilip Reames  %add2 = add i8 %add1, %e3
1607c4f4559SPhilip Reames  %add3 = add i8 %add2, %e4
1617c4f4559SPhilip Reames  %add4 = add i8 %add3, %e5
1627c4f4559SPhilip Reames  %add5 = add i8 %add4, %e6
1637c4f4559SPhilip Reames  %add6 = add i8 %add5, %e7
1647c4f4559SPhilip Reames  %add7 = add i8 %add6, %e8
1657c4f4559SPhilip Reames  %add8 = add i8 %add7, %e9
1667c4f4559SPhilip Reames  %add9 = add i8 %add8, %e10
1677c4f4559SPhilip Reames  %add10 = add i8 %add9, %e11
1687c4f4559SPhilip Reames  %add11 = add i8 %add10, %e12
1697c4f4559SPhilip Reames  %add12 = add i8 %add11, %e13
1707c4f4559SPhilip Reames  %add13 = add i8 %add12, %e14
1717c4f4559SPhilip Reames  %add14 = add i8 %add13, %e15
1727c4f4559SPhilip Reames  ret i8 %add14
1737c4f4559SPhilip Reames}
1747c4f4559SPhilip Reames
1757c4f4559SPhilip Reamesdefine i16 @explode_2xi16(<2 x i16> %v) {
1767c4f4559SPhilip Reames; CHECK-LABEL: explode_2xi16:
1777c4f4559SPhilip Reames; CHECK:       # %bb.0:
17845a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
17945a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
18045a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
1817c4f4559SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v8
1827c4f4559SPhilip Reames; CHECK-NEXT:    ret
1837c4f4559SPhilip Reames  %e0 = extractelement <2 x i16> %v, i32 0
1847c4f4559SPhilip Reames  %e1 = extractelement <2 x i16> %v, i32 1
185f0505c3dSPhilip Reames  %add0 = xor i16 %e0, %e1
1867c4f4559SPhilip Reames  ret i16 %add0
1877c4f4559SPhilip Reames}
1887c4f4559SPhilip Reames
1897c4f4559SPhilip Reamesdefine i16 @explode_4xi16(<4 x i16> %v) {
1907c4f4559SPhilip Reames; CHECK-LABEL: explode_4xi16:
1917c4f4559SPhilip Reames; CHECK:       # %bb.0:
1927c4f4559SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e16, mf2, ta, ma
193824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 2
19445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v9
19545a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 3
19645a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a1, v9
19745a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
19845a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
19945a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
20045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a2, v8
20145a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
20245a334d3SPhilip Reames; CHECK-NEXT:    add a0, a2, a0
2037c4f4559SPhilip Reames; CHECK-NEXT:    ret
2047c4f4559SPhilip Reames  %e0 = extractelement <4 x i16> %v, i32 0
2057c4f4559SPhilip Reames  %e1 = extractelement <4 x i16> %v, i32 1
2067c4f4559SPhilip Reames  %e2 = extractelement <4 x i16> %v, i32 2
2077c4f4559SPhilip Reames  %e3 = extractelement <4 x i16> %v, i32 3
208f0505c3dSPhilip Reames  %add0 = xor i16 %e0, %e1
209f0505c3dSPhilip Reames  %add1 = add i16 %add0, %e2
2107c4f4559SPhilip Reames  %add2 = add i16 %add1, %e3
2117c4f4559SPhilip Reames  ret i16 %add2
2127c4f4559SPhilip Reames}
2137c4f4559SPhilip Reames
2147c4f4559SPhilip Reames
2157c4f4559SPhilip Reamesdefine i16 @explode_8xi16(<8 x i16> %v) {
2167c4f4559SPhilip Reames; CHECK-LABEL: explode_8xi16:
2177c4f4559SPhilip Reames; CHECK:       # %bb.0:
2187c4f4559SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e16, m1, ta, ma
219824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 2
22045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v9
221824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 3
22245a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a1, v9
223824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 4
22445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a2, v9
225824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 5
22645a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a3, v9
227824251c9SAlex Bradbury; CHECK-NEXT:    vslidedown.vi v9, v8, 6
22845a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a4, v9
22945a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v9, v8, 7
23045a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a5, v9
23145a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
23245a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
23345a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
23445a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s a6, v8
23545a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
236f0505c3dSPhilip Reames; CHECK-NEXT:    add a2, a2, a3
237*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, a6, a0
23845a334d3SPhilip Reames; CHECK-NEXT:    add a2, a2, a4
239f0505c3dSPhilip Reames; CHECK-NEXT:    add a0, a0, a2
24045a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a5
2417c4f4559SPhilip Reames; CHECK-NEXT:    ret
2427c4f4559SPhilip Reames  %e0 = extractelement <8 x i16> %v, i32 0
2437c4f4559SPhilip Reames  %e1 = extractelement <8 x i16> %v, i32 1
2447c4f4559SPhilip Reames  %e2 = extractelement <8 x i16> %v, i32 2
2457c4f4559SPhilip Reames  %e3 = extractelement <8 x i16> %v, i32 3
2467c4f4559SPhilip Reames  %e4 = extractelement <8 x i16> %v, i32 4
2477c4f4559SPhilip Reames  %e5 = extractelement <8 x i16> %v, i32 5
2487c4f4559SPhilip Reames  %e6 = extractelement <8 x i16> %v, i32 6
2497c4f4559SPhilip Reames  %e7 = extractelement <8 x i16> %v, i32 7
250f0505c3dSPhilip Reames  %add0 = xor i16 %e0, %e1
251f0505c3dSPhilip Reames  %add1 = add i16 %add0, %e2
2527c4f4559SPhilip Reames  %add2 = add i16 %add1, %e3
2537c4f4559SPhilip Reames  %add3 = add i16 %add2, %e4
2547c4f4559SPhilip Reames  %add4 = add i16 %add3, %e5
2557c4f4559SPhilip Reames  %add5 = add i16 %add4, %e6
2567c4f4559SPhilip Reames  %add6 = add i16 %add5, %e7
2577c4f4559SPhilip Reames  ret i16 %add6
2587c4f4559SPhilip Reames}
2597c4f4559SPhilip Reames
2607c4f4559SPhilip Reamesdefine i16 @explode_16xi16(<16 x i16> %v) {
26145a334d3SPhilip Reames; CHECK-LABEL: explode_16xi16:
26245a334d3SPhilip Reames; CHECK:       # %bb.0:
26345a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 1, e16, m2, ta, ma
26445a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 8
265*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a0, v10
26645a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 9
267*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a1, v10
26845a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 10
269*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a2, v10
27045a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 11
271*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a3, v10
27245a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 12
273*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a4, v10
27445a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 13
275*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a5, v10
27645a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 14
277*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a6, v10
27845a334d3SPhilip Reames; CHECK-NEXT:    vslidedown.vi v10, v8, 15
279*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s a7, v10
280*9122c523SPengcheng Wang; CHECK-NEXT:    vsetivli zero, 1, e16, m1, ta, ma
281*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v9, v8, 2
282*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v10, v8, 3
283*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t0, v9
284*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v9, v8, 4
285*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t1, v10
286*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v10, v8, 5
287*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t2, v9
288*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v9, v8, 6
289*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t3, v10
290*9122c523SPengcheng Wang; CHECK-NEXT:    vslidedown.vi v10, v8, 7
291*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t4, v9
29245a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
293*9122c523SPengcheng Wang; CHECK-NEXT:    vmv.x.s t5, v10
29445a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
29545a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
29645a334d3SPhilip Reames; CHECK-NEXT:    vmv.x.s t6, v8
297*9122c523SPengcheng Wang; CHECK-NEXT:    add t0, t0, t1
298*9122c523SPengcheng Wang; CHECK-NEXT:    add t2, t2, t3
299*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, t5, a0
300*9122c523SPengcheng Wang; CHECK-NEXT:    add a3, a3, a4
301*9122c523SPengcheng Wang; CHECK-NEXT:    add t0, t6, t0
302*9122c523SPengcheng Wang; CHECK-NEXT:    add t2, t2, t4
30345a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a1
304*9122c523SPengcheng Wang; CHECK-NEXT:    add a3, a3, a5
305*9122c523SPengcheng Wang; CHECK-NEXT:    add t0, t0, t2
30645a334d3SPhilip Reames; CHECK-NEXT:    add a0, a0, a2
307*9122c523SPengcheng Wang; CHECK-NEXT:    add a3, a3, a6
308*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, t0, a0
309*9122c523SPengcheng Wang; CHECK-NEXT:    add a3, a3, a7
310*9122c523SPengcheng Wang; CHECK-NEXT:    add a0, a0, a3
31145a334d3SPhilip Reames; CHECK-NEXT:    ret
3127c4f4559SPhilip Reames  %e0 = extractelement <16 x i16> %v, i32 0
3137c4f4559SPhilip Reames  %e1 = extractelement <16 x i16> %v, i32 1
3147c4f4559SPhilip Reames  %e2 = extractelement <16 x i16> %v, i32 2
3157c4f4559SPhilip Reames  %e3 = extractelement <16 x i16> %v, i32 3
3167c4f4559SPhilip Reames  %e4 = extractelement <16 x i16> %v, i32 4
3177c4f4559SPhilip Reames  %e5 = extractelement <16 x i16> %v, i32 5
3187c4f4559SPhilip Reames  %e6 = extractelement <16 x i16> %v, i32 6
3197c4f4559SPhilip Reames  %e7 = extractelement <16 x i16> %v, i32 7
3207c4f4559SPhilip Reames  %e8 = extractelement <16 x i16> %v, i32 8
3217c4f4559SPhilip Reames  %e9 = extractelement <16 x i16> %v, i32 9
3227c4f4559SPhilip Reames  %e10 = extractelement <16 x i16> %v, i32 10
3237c4f4559SPhilip Reames  %e11 = extractelement <16 x i16> %v, i32 11
3247c4f4559SPhilip Reames  %e12 = extractelement <16 x i16> %v, i32 12
3257c4f4559SPhilip Reames  %e13 = extractelement <16 x i16> %v, i32 13
3267c4f4559SPhilip Reames  %e14 = extractelement <16 x i16> %v, i32 14
3277c4f4559SPhilip Reames  %e15 = extractelement <16 x i16> %v, i32 15
328f0505c3dSPhilip Reames  %add0 = xor i16 %e0, %e1
329f0505c3dSPhilip Reames  %add1 = add i16 %add0, %e2
3307c4f4559SPhilip Reames  %add2 = add i16 %add1, %e3
3317c4f4559SPhilip Reames  %add3 = add i16 %add2, %e4
3327c4f4559SPhilip Reames  %add4 = add i16 %add3, %e5
3337c4f4559SPhilip Reames  %add5 = add i16 %add4, %e6
3347c4f4559SPhilip Reames  %add6 = add i16 %add5, %e7
3357c4f4559SPhilip Reames  %add7 = add i16 %add6, %e8
3367c4f4559SPhilip Reames  %add8 = add i16 %add7, %e9
3377c4f4559SPhilip Reames  %add9 = add i16 %add8, %e10
3387c4f4559SPhilip Reames  %add10 = add i16 %add9, %e11
3397c4f4559SPhilip Reames  %add11 = add i16 %add10, %e12
3407c4f4559SPhilip Reames  %add12 = add i16 %add11, %e13
3417c4f4559SPhilip Reames  %add13 = add i16 %add12, %e14
3427c4f4559SPhilip Reames  %add14 = add i16 %add13, %e15
3437c4f4559SPhilip Reames  ret i16 %add14
3447c4f4559SPhilip Reames}
3457c4f4559SPhilip Reames
3467c4f4559SPhilip Reamesdefine i32 @explode_2xi32(<2 x i32> %v) {
347f0505c3dSPhilip Reames; CHECK-LABEL: explode_2xi32:
348f0505c3dSPhilip Reames; CHECK:       # %bb.0:
34945a334d3SPhilip Reames; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
35045a334d3SPhilip Reames; CHECK-NEXT:    vmv.s.x v9, zero
35145a334d3SPhilip Reames; CHECK-NEXT:    vredxor.vs v8, v8, v9
352f0505c3dSPhilip Reames; CHECK-NEXT:    vmv.x.s a0, v8
353f0505c3dSPhilip Reames; CHECK-NEXT:    ret
3547c4f4559SPhilip Reames  %e0 = extractelement <2 x i32> %v, i32 0
3557c4f4559SPhilip Reames  %e1 = extractelement <2 x i32> %v, i32 1
356f0505c3dSPhilip Reames  %add0 = xor i32 %e0, %e1
3577c4f4559SPhilip Reames  ret i32 %add0
3587c4f4559SPhilip Reames}
3597c4f4559SPhilip Reames
3607c4f4559SPhilip Reamesdefine i32 @explode_4xi32(<4 x i32> %v) {
3617c4f4559SPhilip Reames; RV32-LABEL: explode_4xi32:
3627c4f4559SPhilip Reames; RV32:       # %bb.0:
3637c4f4559SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
364824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v9, v8, 2
36545a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a0, v9
36645a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v8, 3
36745a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a1, v9
36845a334d3SPhilip Reames; RV32-NEXT:    vmv.s.x v9, zero
36945a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
37045a334d3SPhilip Reames; RV32-NEXT:    vredxor.vs v8, v8, v9
37145a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a2, v8
37245a334d3SPhilip Reames; RV32-NEXT:    add a0, a0, a1
37345a334d3SPhilip Reames; RV32-NEXT:    add a0, a2, a0
3747c4f4559SPhilip Reames; RV32-NEXT:    ret
3757c4f4559SPhilip Reames;
3767c4f4559SPhilip Reames; RV64-LABEL: explode_4xi32:
3777c4f4559SPhilip Reames; RV64:       # %bb.0:
3787c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
379824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v9, v8, 2
38045a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a0, v9
38145a334d3SPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v8, 3
38245a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a1, v9
38345a334d3SPhilip Reames; RV64-NEXT:    vmv.s.x v9, zero
38445a334d3SPhilip Reames; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
38545a334d3SPhilip Reames; RV64-NEXT:    vredxor.vs v8, v8, v9
38645a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a2, v8
38745a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, a1
38845a334d3SPhilip Reames; RV64-NEXT:    addw a0, a2, a0
3897c4f4559SPhilip Reames; RV64-NEXT:    ret
3907c4f4559SPhilip Reames  %e0 = extractelement <4 x i32> %v, i32 0
3917c4f4559SPhilip Reames  %e1 = extractelement <4 x i32> %v, i32 1
3927c4f4559SPhilip Reames  %e2 = extractelement <4 x i32> %v, i32 2
3937c4f4559SPhilip Reames  %e3 = extractelement <4 x i32> %v, i32 3
394f0505c3dSPhilip Reames  %add0 = xor i32 %e0, %e1
395f0505c3dSPhilip Reames  %add1 = add i32 %add0, %e2
3967c4f4559SPhilip Reames  %add2 = add i32 %add1, %e3
3977c4f4559SPhilip Reames  ret i32 %add2
3987c4f4559SPhilip Reames}
3997c4f4559SPhilip Reames
4007c4f4559SPhilip Reames
4017c4f4559SPhilip Reamesdefine i32 @explode_8xi32(<8 x i32> %v) {
4027c4f4559SPhilip Reames; RV32-LABEL: explode_8xi32:
4037c4f4559SPhilip Reames; RV32:       # %bb.0:
4047c4f4559SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
4057c4f4559SPhilip Reames; RV32-NEXT:    vslidedown.vi v10, v8, 4
406*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a0, v10
407824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v10, v8, 5
408*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v10
409824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v10, v8, 6
410*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v10
41145a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v10, v8, 7
412*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a3, v10
413*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
414*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v9, v8, 2
415*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v10, v8, 3
416*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a4, v9
41745a334d3SPhilip Reames; RV32-NEXT:    vmv.s.x v9, zero
418*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a5, v10
41945a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
42045a334d3SPhilip Reames; RV32-NEXT:    vredxor.vs v8, v8, v9
42145a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a6, v8
422*9122c523SPengcheng Wang; RV32-NEXT:    add a4, a4, a5
42345a334d3SPhilip Reames; RV32-NEXT:    add a0, a0, a1
424*9122c523SPengcheng Wang; RV32-NEXT:    add a4, a6, a4
425f0505c3dSPhilip Reames; RV32-NEXT:    add a0, a0, a2
426*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a4, a0
427*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a3
4287c4f4559SPhilip Reames; RV32-NEXT:    ret
4297c4f4559SPhilip Reames;
4307c4f4559SPhilip Reames; RV64-LABEL: explode_8xi32:
4317c4f4559SPhilip Reames; RV64:       # %bb.0:
4327c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
4337c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v10, v8, 4
434*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a0, v10
435824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v10, v8, 5
436*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a1, v10
437824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v10, v8, 6
438*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a2, v10
43945a334d3SPhilip Reames; RV64-NEXT:    vslidedown.vi v10, v8, 7
440*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a3, v10
441*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
442*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v9, v8, 2
443*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v10, v8, 3
444*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a4, v9
44545a334d3SPhilip Reames; RV64-NEXT:    vmv.s.x v9, zero
446*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a5, v10
44745a334d3SPhilip Reames; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
44845a334d3SPhilip Reames; RV64-NEXT:    vredxor.vs v8, v8, v9
44945a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a6, v8
450*9122c523SPengcheng Wang; RV64-NEXT:    add a4, a4, a5
45145a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, a1
452*9122c523SPengcheng Wang; RV64-NEXT:    add a4, a6, a4
453f0505c3dSPhilip Reames; RV64-NEXT:    add a0, a0, a2
454*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a4, a0
455*9122c523SPengcheng Wang; RV64-NEXT:    addw a0, a0, a3
4567c4f4559SPhilip Reames; RV64-NEXT:    ret
4577c4f4559SPhilip Reames  %e0 = extractelement <8 x i32> %v, i32 0
4587c4f4559SPhilip Reames  %e1 = extractelement <8 x i32> %v, i32 1
4597c4f4559SPhilip Reames  %e2 = extractelement <8 x i32> %v, i32 2
4607c4f4559SPhilip Reames  %e3 = extractelement <8 x i32> %v, i32 3
4617c4f4559SPhilip Reames  %e4 = extractelement <8 x i32> %v, i32 4
4627c4f4559SPhilip Reames  %e5 = extractelement <8 x i32> %v, i32 5
4637c4f4559SPhilip Reames  %e6 = extractelement <8 x i32> %v, i32 6
4647c4f4559SPhilip Reames  %e7 = extractelement <8 x i32> %v, i32 7
465f0505c3dSPhilip Reames  %add0 = xor i32 %e0, %e1
466f0505c3dSPhilip Reames  %add1 = add i32 %add0, %e2
4677c4f4559SPhilip Reames  %add2 = add i32 %add1, %e3
4687c4f4559SPhilip Reames  %add3 = add i32 %add2, %e4
4697c4f4559SPhilip Reames  %add4 = add i32 %add3, %e5
4707c4f4559SPhilip Reames  %add5 = add i32 %add4, %e6
4717c4f4559SPhilip Reames  %add6 = add i32 %add5, %e7
4727c4f4559SPhilip Reames  ret i32 %add6
4737c4f4559SPhilip Reames}
4747c4f4559SPhilip Reames
4757c4f4559SPhilip Reamesdefine i32 @explode_16xi32(<16 x i32> %v) {
4767c4f4559SPhilip Reames; RV32-LABEL: explode_16xi32:
4777c4f4559SPhilip Reames; RV32:       # %bb.0:
478299d710eSPhilip Reames; RV32-NEXT:    addi sp, sp, -128
479299d710eSPhilip Reames; RV32-NEXT:    .cfi_def_cfa_offset 128
480299d710eSPhilip Reames; RV32-NEXT:    sw ra, 124(sp) # 4-byte Folded Spill
481299d710eSPhilip Reames; RV32-NEXT:    sw s0, 120(sp) # 4-byte Folded Spill
482299d710eSPhilip Reames; RV32-NEXT:    .cfi_offset ra, -4
483299d710eSPhilip Reames; RV32-NEXT:    .cfi_offset s0, -8
484299d710eSPhilip Reames; RV32-NEXT:    addi s0, sp, 128
485299d710eSPhilip Reames; RV32-NEXT:    .cfi_def_cfa s0, 0
486299d710eSPhilip Reames; RV32-NEXT:    andi sp, sp, -64
4877c4f4559SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
4887c4f4559SPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 4
489*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a0, v12
490824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v12, v8, 5
491*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v12
492824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v12, v8, 6
493*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v12
494824251c9SAlex Bradbury; RV32-NEXT:    vslidedown.vi v12, v8, 7
495*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a3, v12
496*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
497*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v12, v8, 2
498*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v13, v8, 3
499*9122c523SPengcheng Wang; RV32-NEXT:    mv a4, sp
50045a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a5, v12
501*9122c523SPengcheng Wang; RV32-NEXT:    vmv.s.x v12, zero
502*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a6, v13
503299d710eSPhilip Reames; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
504*9122c523SPengcheng Wang; RV32-NEXT:    vse32.v v8, (a4)
505*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
506*9122c523SPengcheng Wang; RV32-NEXT:    vredxor.vs v8, v8, v12
507*9122c523SPengcheng Wang; RV32-NEXT:    lw a4, 32(sp)
50845a334d3SPhilip Reames; RV32-NEXT:    lw a7, 36(sp)
50945a334d3SPhilip Reames; RV32-NEXT:    lw t0, 40(sp)
51045a334d3SPhilip Reames; RV32-NEXT:    lw t1, 44(sp)
51145a334d3SPhilip Reames; RV32-NEXT:    lw t2, 48(sp)
51245a334d3SPhilip Reames; RV32-NEXT:    lw t3, 52(sp)
51345a334d3SPhilip Reames; RV32-NEXT:    lw t4, 56(sp)
51445a334d3SPhilip Reames; RV32-NEXT:    lw t5, 60(sp)
51545a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s t6, v8
51645a334d3SPhilip Reames; RV32-NEXT:    add a5, a5, a6
517*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a1
518*9122c523SPengcheng Wang; RV32-NEXT:    add a5, t6, a5
519*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a2
520*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a5, a0
521*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a3, a4
522f0505c3dSPhilip Reames; RV32-NEXT:    add a7, a7, t0
52345a334d3SPhilip Reames; RV32-NEXT:    add t2, t2, t3
524*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a3
525*9122c523SPengcheng Wang; RV32-NEXT:    add a7, a7, t1
52645a334d3SPhilip Reames; RV32-NEXT:    add t2, t2, t4
527*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a7
52845a334d3SPhilip Reames; RV32-NEXT:    add t2, t2, t5
52945a334d3SPhilip Reames; RV32-NEXT:    add a0, a0, t2
530299d710eSPhilip Reames; RV32-NEXT:    addi sp, s0, -128
53197982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa sp, 128
532299d710eSPhilip Reames; RV32-NEXT:    lw ra, 124(sp) # 4-byte Folded Reload
533299d710eSPhilip Reames; RV32-NEXT:    lw s0, 120(sp) # 4-byte Folded Reload
53497982a8cSdlav-sc; RV32-NEXT:    .cfi_restore ra
53597982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
536299d710eSPhilip Reames; RV32-NEXT:    addi sp, sp, 128
53797982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
5387c4f4559SPhilip Reames; RV32-NEXT:    ret
5397c4f4559SPhilip Reames;
5407c4f4559SPhilip Reames; RV64-LABEL: explode_16xi32:
5417c4f4559SPhilip Reames; RV64:       # %bb.0:
542299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, -128
543299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa_offset 128
544299d710eSPhilip Reames; RV64-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
545299d710eSPhilip Reames; RV64-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
546299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset ra, -8
547299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset s0, -16
548299d710eSPhilip Reames; RV64-NEXT:    addi s0, sp, 128
549299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa s0, 0
550299d710eSPhilip Reames; RV64-NEXT:    andi sp, sp, -64
5517c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e32, m2, ta, ma
5527c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v8, 4
553*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a0, v12
554824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v12, v8, 5
555*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a1, v12
556824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v12, v8, 6
557*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a2, v12
558824251c9SAlex Bradbury; RV64-NEXT:    vslidedown.vi v12, v8, 7
559*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a3, v12
560*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
561*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v12, v8, 2
562*9122c523SPengcheng Wang; RV64-NEXT:    vslidedown.vi v13, v8, 3
563*9122c523SPengcheng Wang; RV64-NEXT:    mv a4, sp
56445a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a5, v12
565*9122c523SPengcheng Wang; RV64-NEXT:    vmv.s.x v12, zero
566*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a6, v13
567299d710eSPhilip Reames; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
568*9122c523SPengcheng Wang; RV64-NEXT:    vse32.v v8, (a4)
569*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
570*9122c523SPengcheng Wang; RV64-NEXT:    vredxor.vs v8, v8, v12
571*9122c523SPengcheng Wang; RV64-NEXT:    lw a4, 32(sp)
57245a334d3SPhilip Reames; RV64-NEXT:    lw a7, 36(sp)
57345a334d3SPhilip Reames; RV64-NEXT:    lw t0, 40(sp)
57445a334d3SPhilip Reames; RV64-NEXT:    lw t1, 44(sp)
57545a334d3SPhilip Reames; RV64-NEXT:    lw t2, 48(sp)
57645a334d3SPhilip Reames; RV64-NEXT:    lw t3, 52(sp)
57745a334d3SPhilip Reames; RV64-NEXT:    lw t4, 56(sp)
57845a334d3SPhilip Reames; RV64-NEXT:    lw t5, 60(sp)
57945a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s t6, v8
58045a334d3SPhilip Reames; RV64-NEXT:    add a5, a5, a6
581*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a1
582*9122c523SPengcheng Wang; RV64-NEXT:    add a5, t6, a5
583*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a2
584*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a5, a0
585*9122c523SPengcheng Wang; RV64-NEXT:    add a3, a3, a4
586f0505c3dSPhilip Reames; RV64-NEXT:    add a7, a7, t0
58745a334d3SPhilip Reames; RV64-NEXT:    add t2, t2, t3
588*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a3
589*9122c523SPengcheng Wang; RV64-NEXT:    add a7, a7, t1
59045a334d3SPhilip Reames; RV64-NEXT:    add t2, t2, t4
591*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a7
59245a334d3SPhilip Reames; RV64-NEXT:    add t2, t2, t5
59345a334d3SPhilip Reames; RV64-NEXT:    addw a0, a0, t2
594299d710eSPhilip Reames; RV64-NEXT:    addi sp, s0, -128
59597982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 128
596299d710eSPhilip Reames; RV64-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
597299d710eSPhilip Reames; RV64-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
59897982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
59997982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
600299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, 128
60197982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
6027c4f4559SPhilip Reames; RV64-NEXT:    ret
6037c4f4559SPhilip Reames  %e0 = extractelement <16 x i32> %v, i32 0
6047c4f4559SPhilip Reames  %e1 = extractelement <16 x i32> %v, i32 1
6057c4f4559SPhilip Reames  %e2 = extractelement <16 x i32> %v, i32 2
6067c4f4559SPhilip Reames  %e3 = extractelement <16 x i32> %v, i32 3
6077c4f4559SPhilip Reames  %e4 = extractelement <16 x i32> %v, i32 4
6087c4f4559SPhilip Reames  %e5 = extractelement <16 x i32> %v, i32 5
6097c4f4559SPhilip Reames  %e6 = extractelement <16 x i32> %v, i32 6
6107c4f4559SPhilip Reames  %e7 = extractelement <16 x i32> %v, i32 7
6117c4f4559SPhilip Reames  %e8 = extractelement <16 x i32> %v, i32 8
6127c4f4559SPhilip Reames  %e9 = extractelement <16 x i32> %v, i32 9
6137c4f4559SPhilip Reames  %e10 = extractelement <16 x i32> %v, i32 10
6147c4f4559SPhilip Reames  %e11 = extractelement <16 x i32> %v, i32 11
6157c4f4559SPhilip Reames  %e12 = extractelement <16 x i32> %v, i32 12
6167c4f4559SPhilip Reames  %e13 = extractelement <16 x i32> %v, i32 13
6177c4f4559SPhilip Reames  %e14 = extractelement <16 x i32> %v, i32 14
6187c4f4559SPhilip Reames  %e15 = extractelement <16 x i32> %v, i32 15
619f0505c3dSPhilip Reames  %add0 = xor i32 %e0, %e1
620f0505c3dSPhilip Reames  %add1 = add i32 %add0, %e2
6217c4f4559SPhilip Reames  %add2 = add i32 %add1, %e3
6227c4f4559SPhilip Reames  %add3 = add i32 %add2, %e4
6237c4f4559SPhilip Reames  %add4 = add i32 %add3, %e5
6247c4f4559SPhilip Reames  %add5 = add i32 %add4, %e6
6257c4f4559SPhilip Reames  %add6 = add i32 %add5, %e7
6267c4f4559SPhilip Reames  %add7 = add i32 %add6, %e8
6277c4f4559SPhilip Reames  %add8 = add i32 %add7, %e9
6287c4f4559SPhilip Reames  %add9 = add i32 %add8, %e10
6297c4f4559SPhilip Reames  %add10 = add i32 %add9, %e11
6307c4f4559SPhilip Reames  %add11 = add i32 %add10, %e12
6317c4f4559SPhilip Reames  %add12 = add i32 %add11, %e13
6327c4f4559SPhilip Reames  %add13 = add i32 %add12, %e14
6337c4f4559SPhilip Reames  %add14 = add i32 %add13, %e15
6347c4f4559SPhilip Reames  ret i32 %add14
6357c4f4559SPhilip Reames}
6367c4f4559SPhilip Reames
6377c4f4559SPhilip Reamesdefine i64 @explode_2xi64(<2 x i64> %v) {
6387c4f4559SPhilip Reames; RV32-LABEL: explode_2xi64:
6397c4f4559SPhilip Reames; RV32:       # %bb.0:
64045a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
64145a334d3SPhilip Reames; RV32-NEXT:    vmv.s.x v9, zero
642*9122c523SPengcheng Wang; RV32-NEXT:    li a1, 32
64345a334d3SPhilip Reames; RV32-NEXT:    vredxor.vs v8, v8, v9
64445a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a0, v8
6457c4f4559SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
64645a334d3SPhilip Reames; RV32-NEXT:    vsrl.vx v8, v8, a1
64745a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a1, v8
6487c4f4559SPhilip Reames; RV32-NEXT:    ret
6497c4f4559SPhilip Reames;
6507c4f4559SPhilip Reames; RV64-LABEL: explode_2xi64:
6517c4f4559SPhilip Reames; RV64:       # %bb.0:
65245a334d3SPhilip Reames; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
65345a334d3SPhilip Reames; RV64-NEXT:    vmv.s.x v9, zero
65445a334d3SPhilip Reames; RV64-NEXT:    vredxor.vs v8, v8, v9
6557c4f4559SPhilip Reames; RV64-NEXT:    vmv.x.s a0, v8
6567c4f4559SPhilip Reames; RV64-NEXT:    ret
6577c4f4559SPhilip Reames  %e0 = extractelement <2 x i64> %v, i32 0
6587c4f4559SPhilip Reames  %e1 = extractelement <2 x i64> %v, i32 1
659f0505c3dSPhilip Reames  %add0 = xor i64 %e0, %e1
6607c4f4559SPhilip Reames  ret i64 %add0
6617c4f4559SPhilip Reames}
6627c4f4559SPhilip Reames
6637c4f4559SPhilip Reamesdefine i64 @explode_4xi64(<4 x i64> %v) {
6647c4f4559SPhilip Reames; RV32-LABEL: explode_4xi64:
6657c4f4559SPhilip Reames; RV32:       # %bb.0:
666824251c9SAlex Bradbury; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
66745a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v10, v8, 2
66845a334d3SPhilip Reames; RV32-NEXT:    li a0, 32
669*9122c523SPengcheng Wang; RV32-NEXT:    vmv.s.x v12, zero
67045a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
671*9122c523SPengcheng Wang; RV32-NEXT:    vredxor.vs v12, v8, v12
672*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
673*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v8, v8, 3
674*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v10
675*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v10, v10, a0
676*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v8
677*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v8, v8, a0
678*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a3, v10
679*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a4, v8
68045a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
681*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v8, v12, a0
682*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a0, v12
68345a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a5, v8
684*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a0, a1
685*9122c523SPengcheng Wang; RV32-NEXT:    sltu a6, a1, a0
686*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a5, a3
687*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a1, a2
688*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a3, a6
689*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a3, a4
690*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, a0, a1
691*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a3, a1
6927c4f4559SPhilip Reames; RV32-NEXT:    ret
6937c4f4559SPhilip Reames;
6947c4f4559SPhilip Reames; RV64-LABEL: explode_4xi64:
6957c4f4559SPhilip Reames; RV64:       # %bb.0:
6967c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
6977c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v10, v8, 2
69845a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a0, v10
69945a334d3SPhilip Reames; RV64-NEXT:    vslidedown.vi v10, v8, 3
70045a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a1, v10
70145a334d3SPhilip Reames; RV64-NEXT:    vmv.s.x v9, zero
70245a334d3SPhilip Reames; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
70345a334d3SPhilip Reames; RV64-NEXT:    vredxor.vs v8, v8, v9
70445a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a2, v8
70545a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, a1
70645a334d3SPhilip Reames; RV64-NEXT:    add a0, a2, a0
7077c4f4559SPhilip Reames; RV64-NEXT:    ret
7087c4f4559SPhilip Reames  %e0 = extractelement <4 x i64> %v, i32 0
7097c4f4559SPhilip Reames  %e1 = extractelement <4 x i64> %v, i32 1
7107c4f4559SPhilip Reames  %e2 = extractelement <4 x i64> %v, i32 2
7117c4f4559SPhilip Reames  %e3 = extractelement <4 x i64> %v, i32 3
712f0505c3dSPhilip Reames  %add0 = xor i64 %e0, %e1
713f0505c3dSPhilip Reames  %add1 = add i64 %add0, %e2
7147c4f4559SPhilip Reames  %add2 = add i64 %add1, %e3
7157c4f4559SPhilip Reames  ret i64 %add2
7167c4f4559SPhilip Reames}
7177c4f4559SPhilip Reames
7187c4f4559SPhilip Reames
7197c4f4559SPhilip Reamesdefine i64 @explode_8xi64(<8 x i64> %v) {
7207c4f4559SPhilip Reames; RV32-LABEL: explode_8xi64:
7217c4f4559SPhilip Reames; RV32:       # %bb.0:
722824251c9SAlex Bradbury; RV32-NEXT:    vsetivli zero, 1, e64, m4, ta, ma
72345a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 2
72445a334d3SPhilip Reames; RV32-NEXT:    li a0, 32
725*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v16, v8, 3
72645a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a2, v12
727*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v12, v12, a0
728*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v12
72945a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 4
730*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a4, v16
731*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v16, a0
732*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a3, v16
733*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v16, v8, 5
734*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a5, v12
735*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v12, v12, a0
7367c4f4559SPhilip Reames; RV32-NEXT:    vmv.x.s a6, v12
73745a334d3SPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 6
738*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a7, v16
739*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v16, a0
740*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t0, v16
741*9122c523SPengcheng Wang; RV32-NEXT:    vmv.s.x v16, zero
74245a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
743*9122c523SPengcheng Wang; RV32-NEXT:    vredxor.vs v16, v8, v16
744*9122c523SPengcheng Wang; RV32-NEXT:    vsetivli zero, 1, e64, m4, ta, ma
745*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v8, v8, 7
746*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t1, v12
747*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v12, v12, a0
748*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t2, v8
749*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v8, v8, a0
750*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t3, v12
751*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t4, v8
75245a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
753*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v8, v16, a0
754*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a0, v16
75545a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s t5, v8
756*9122c523SPengcheng Wang; RV32-NEXT:    add a2, a0, a2
757*9122c523SPengcheng Wang; RV32-NEXT:    sltu a0, a2, a0
758*9122c523SPengcheng Wang; RV32-NEXT:    add a1, t5, a1
75945a334d3SPhilip Reames; RV32-NEXT:    add a4, a2, a4
760*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a1, a0
76145a334d3SPhilip Reames; RV32-NEXT:    sltu a1, a4, a2
762*9122c523SPengcheng Wang; RV32-NEXT:    add a5, a4, a5
763*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a3
764*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, a6
765*9122c523SPengcheng Wang; RV32-NEXT:    sltu a2, a5, a4
766*9122c523SPengcheng Wang; RV32-NEXT:    add a7, a5, a7
76745a334d3SPhilip Reames; RV32-NEXT:    add a0, a0, a1
768*9122c523SPengcheng Wang; RV32-NEXT:    add a2, a2, t0
769*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, a7, a5
770*9122c523SPengcheng Wang; RV32-NEXT:    add t1, a7, t1
771*9122c523SPengcheng Wang; RV32-NEXT:    add a2, a0, a2
77245a334d3SPhilip Reames; RV32-NEXT:    add a1, a1, t3
773*9122c523SPengcheng Wang; RV32-NEXT:    sltu a3, t1, a7
774*9122c523SPengcheng Wang; RV32-NEXT:    add a0, t1, t2
775*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a2, a1
776*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a3, t4
777*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, a3
778*9122c523SPengcheng Wang; RV32-NEXT:    sltu a2, a0, t1
779824251c9SAlex Bradbury; RV32-NEXT:    add a1, a1, a2
7807c4f4559SPhilip Reames; RV32-NEXT:    ret
7817c4f4559SPhilip Reames;
7827c4f4559SPhilip Reames; RV64-LABEL: explode_8xi64:
7837c4f4559SPhilip Reames; RV64:       # %bb.0:
784299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, -128
785299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa_offset 128
786299d710eSPhilip Reames; RV64-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
787299d710eSPhilip Reames; RV64-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
788299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset ra, -8
789299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset s0, -16
790299d710eSPhilip Reames; RV64-NEXT:    addi s0, sp, 128
791299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa s0, 0
792299d710eSPhilip Reames; RV64-NEXT:    andi sp, sp, -64
7937c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
7947c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v8, 2
79545a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a0, v12
7967c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v8, 3
797*9122c523SPengcheng Wang; RV64-NEXT:    mv a1, sp
798*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a2, v12
799*9122c523SPengcheng Wang; RV64-NEXT:    vmv.s.x v12, zero
800299d710eSPhilip Reames; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
801*9122c523SPengcheng Wang; RV64-NEXT:    vse64.v v8, (a1)
802*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
803*9122c523SPengcheng Wang; RV64-NEXT:    vredxor.vs v8, v8, v12
804*9122c523SPengcheng Wang; RV64-NEXT:    ld a1, 32(sp)
80545a334d3SPhilip Reames; RV64-NEXT:    ld a3, 40(sp)
80645a334d3SPhilip Reames; RV64-NEXT:    ld a4, 48(sp)
80745a334d3SPhilip Reames; RV64-NEXT:    ld a5, 56(sp)
80845a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a6, v8
809f0505c3dSPhilip Reames; RV64-NEXT:    add a0, a0, a2
810*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a6, a0
811*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a1
81245a334d3SPhilip Reames; RV64-NEXT:    add a3, a3, a4
81345a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, a3
814f0505c3dSPhilip Reames; RV64-NEXT:    add a0, a0, a5
815299d710eSPhilip Reames; RV64-NEXT:    addi sp, s0, -128
81697982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 128
817299d710eSPhilip Reames; RV64-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
818299d710eSPhilip Reames; RV64-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
81997982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
82097982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
821299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, 128
82297982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
8237c4f4559SPhilip Reames; RV64-NEXT:    ret
8247c4f4559SPhilip Reames  %e0 = extractelement <8 x i64> %v, i32 0
8257c4f4559SPhilip Reames  %e1 = extractelement <8 x i64> %v, i32 1
8267c4f4559SPhilip Reames  %e2 = extractelement <8 x i64> %v, i32 2
8277c4f4559SPhilip Reames  %e3 = extractelement <8 x i64> %v, i32 3
8287c4f4559SPhilip Reames  %e4 = extractelement <8 x i64> %v, i32 4
8297c4f4559SPhilip Reames  %e5 = extractelement <8 x i64> %v, i32 5
8307c4f4559SPhilip Reames  %e6 = extractelement <8 x i64> %v, i32 6
8317c4f4559SPhilip Reames  %e7 = extractelement <8 x i64> %v, i32 7
832f0505c3dSPhilip Reames  %add0 = xor i64 %e0, %e1
833f0505c3dSPhilip Reames  %add1 = add i64 %add0, %e2
8347c4f4559SPhilip Reames  %add2 = add i64 %add1, %e3
8357c4f4559SPhilip Reames  %add3 = add i64 %add2, %e4
8367c4f4559SPhilip Reames  %add4 = add i64 %add3, %e5
8377c4f4559SPhilip Reames  %add5 = add i64 %add4, %e6
8387c4f4559SPhilip Reames  %add6 = add i64 %add5, %e7
8397c4f4559SPhilip Reames  ret i64 %add6
8407c4f4559SPhilip Reames}
8417c4f4559SPhilip Reames
8427c4f4559SPhilip Reamesdefine i64 @explode_16xi64(<16 x i64> %v) {
8437c4f4559SPhilip Reames; RV32-LABEL: explode_16xi64:
8447c4f4559SPhilip Reames; RV32:       # %bb.0:
845*9122c523SPengcheng Wang; RV32-NEXT:    addi sp, sp, -64
846*9122c523SPengcheng Wang; RV32-NEXT:    .cfi_def_cfa_offset 64
847*9122c523SPengcheng Wang; RV32-NEXT:    sw s0, 60(sp) # 4-byte Folded Spill
848*9122c523SPengcheng Wang; RV32-NEXT:    sw s1, 56(sp) # 4-byte Folded Spill
849*9122c523SPengcheng Wang; RV32-NEXT:    sw s2, 52(sp) # 4-byte Folded Spill
850*9122c523SPengcheng Wang; RV32-NEXT:    sw s3, 48(sp) # 4-byte Folded Spill
851*9122c523SPengcheng Wang; RV32-NEXT:    sw s4, 44(sp) # 4-byte Folded Spill
852*9122c523SPengcheng Wang; RV32-NEXT:    sw s5, 40(sp) # 4-byte Folded Spill
853*9122c523SPengcheng Wang; RV32-NEXT:    sw s6, 36(sp) # 4-byte Folded Spill
854*9122c523SPengcheng Wang; RV32-NEXT:    sw s7, 32(sp) # 4-byte Folded Spill
855*9122c523SPengcheng Wang; RV32-NEXT:    sw s8, 28(sp) # 4-byte Folded Spill
856*9122c523SPengcheng Wang; RV32-NEXT:    sw s9, 24(sp) # 4-byte Folded Spill
857*9122c523SPengcheng Wang; RV32-NEXT:    sw s10, 20(sp) # 4-byte Folded Spill
858*9122c523SPengcheng Wang; RV32-NEXT:    sw s11, 16(sp) # 4-byte Folded Spill
859675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s0, -4
860675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s1, -8
861675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s2, -12
862675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s3, -16
863675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s4, -20
864675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s5, -24
865675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s6, -28
866675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s7, -32
867675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s8, -36
868675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s9, -40
869675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s10, -44
870675e7bd1SPiyou Chen; RV32-NEXT:    .cfi_offset s11, -48
871*9122c523SPengcheng Wang; RV32-NEXT:    csrr a0, vlenb
872*9122c523SPengcheng Wang; RV32-NEXT:    slli a0, a0, 3
873*9122c523SPengcheng Wang; RV32-NEXT:    sub sp, sp, a0
874*9122c523SPengcheng Wang; RV32-NEXT:    .cfi_escape 0x0f, 0x0e, 0x72, 0x00, 0x11, 0xc0, 0x00, 0x22, 0x11, 0x08, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 64 + 8 * vlenb
875824251c9SAlex Bradbury; RV32-NEXT:    vsetivli zero, 1, e64, m8, ta, ma
876*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v24, v8, 2
877675e7bd1SPiyou Chen; RV32-NEXT:    li a0, 32
878*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v0, v8, 3
879286a366dSLuke Lau; RV32-NEXT:    vslidedown.vi v16, v8, 4
880*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v24
881*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v24, v24, a0
882*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v24
883*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v24, v8, 5
884*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a3, v0
885*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v0, a0
886*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a4, v0
887*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v0, v8, 6
888*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a5, v16
889*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v16, a0
89045a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s a6, v16
891286a366dSLuke Lau; RV32-NEXT:    vslidedown.vi v16, v8, 7
892*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a7, v24
893*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v24, v24, a0
894*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t0, v24
895*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v24, v8, 8
896*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t1, v0
897*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v0, a0
898*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t2, v0
899*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v0, v8, 9
900*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t3, v16
901*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v16, a0
90245a334d3SPhilip Reames; RV32-NEXT:    vmv.x.s t4, v16
903286a366dSLuke Lau; RV32-NEXT:    vslidedown.vi v16, v8, 10
904*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t5, v24
905*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v24, v24, a0
906*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t6, v24
907*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v24, v8, 11
908*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s0, v0
909*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v0, a0
910*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s1, v0
911*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v0, v8, 12
912*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s2, v16
913*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v16, a0
914286a366dSLuke Lau; RV32-NEXT:    vmv.x.s s3, v16
915286a366dSLuke Lau; RV32-NEXT:    vslidedown.vi v16, v8, 13
916*9122c523SPengcheng Wang; RV32-NEXT:    addi s4, sp, 16
917*9122c523SPengcheng Wang; RV32-NEXT:    vs8r.v v16, (s4) # Unknown-size Folded Spill
918*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s4, v24
919*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v24, v24, a0
920*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s5, v24
921*9122c523SPengcheng Wang; RV32-NEXT:    vslidedown.vi v24, v8, 14
922*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s6, v0
923*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v0, a0
924*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s7, v0
925*9122c523SPengcheng Wang; RV32-NEXT:    vmv.s.x v7, zero
92645a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
927*9122c523SPengcheng Wang; RV32-NEXT:    vredxor.vs v16, v8, v7
928675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m8, ta, ma
929675e7bd1SPiyou Chen; RV32-NEXT:    vslidedown.vi v8, v8, 15
930*9122c523SPengcheng Wang; RV32-NEXT:    addi s8, sp, 16
931*9122c523SPengcheng Wang; RV32-NEXT:    vl8r.v v0, (s8) # Unknown-size Folded Reload
932*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s8, v0
933*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v0, a0
934*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s9, v0
935*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v0, v24, a0
93645a334d3SPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
937*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v17, v16, a0
938*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s s10, v16
939675e7bd1SPiyou Chen; RV32-NEXT:    vmv.x.s s11, v17
940675e7bd1SPiyou Chen; RV32-NEXT:    vsetivli zero, 1, e64, m8, ta, ma
941*9122c523SPengcheng Wang; RV32-NEXT:    vsrl.vx v16, v8, a0
942675e7bd1SPiyou Chen; RV32-NEXT:    add a2, s11, a2
943*9122c523SPengcheng Wang; RV32-NEXT:    add a1, s10, a1
944*9122c523SPengcheng Wang; RV32-NEXT:    sltu a0, a1, s10
945*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a2, a0
946*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a4
947*9122c523SPengcheng Wang; RV32-NEXT:    add a3, a1, a3
948*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, a3, a1
949*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, a6
950675e7bd1SPiyou Chen; RV32-NEXT:    add a0, a0, a1
951*9122c523SPengcheng Wang; RV32-NEXT:    add a5, a3, a5
952*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, a5, a3
953*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, t0
9547b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
955*9122c523SPengcheng Wang; RV32-NEXT:    add a7, a5, a7
956*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, a7, a5
957*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, t2
958286a366dSLuke Lau; RV32-NEXT:    add a0, a0, a1
959*9122c523SPengcheng Wang; RV32-NEXT:    add t1, a7, t1
960*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, t1, a7
961*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, t4
962286a366dSLuke Lau; RV32-NEXT:    add a0, a0, a1
963*9122c523SPengcheng Wang; RV32-NEXT:    add t3, t1, t3
964*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, t3, t1
965*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, t6
966286a366dSLuke Lau; RV32-NEXT:    add a0, a0, a1
967*9122c523SPengcheng Wang; RV32-NEXT:    add t5, t3, t5
968*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, t5, t3
969*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, s1
9707b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
971*9122c523SPengcheng Wang; RV32-NEXT:    add s0, t5, s0
972*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, s0, t5
973*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, s3
9747b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
975*9122c523SPengcheng Wang; RV32-NEXT:    add s2, s0, s2
976*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, s2, s0
977*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, s5
9787b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
979*9122c523SPengcheng Wang; RV32-NEXT:    add s4, s2, s4
980*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, s4, s2
981*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, s7
9827b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
983*9122c523SPengcheng Wang; RV32-NEXT:    add s6, s4, s6
984*9122c523SPengcheng Wang; RV32-NEXT:    sltu a1, s6, s4
985675e7bd1SPiyou Chen; RV32-NEXT:    add a1, a1, s9
9867b3bbd83SJay Foad; RV32-NEXT:    add a0, a0, a1
987675e7bd1SPiyou Chen; RV32-NEXT:    vmv.x.s a1, v0
988*9122c523SPengcheng Wang; RV32-NEXT:    add s8, s6, s8
989*9122c523SPengcheng Wang; RV32-NEXT:    sltu a2, s8, s6
990*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a2, a1
991*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v24
992*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a1
993*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v16
994675e7bd1SPiyou Chen; RV32-NEXT:    add a2, s8, a2
995675e7bd1SPiyou Chen; RV32-NEXT:    sltu a3, a2, s8
996675e7bd1SPiyou Chen; RV32-NEXT:    add a1, a3, a1
9977b3bbd83SJay Foad; RV32-NEXT:    add a1, a0, a1
998675e7bd1SPiyou Chen; RV32-NEXT:    vmv.x.s a0, v8
999675e7bd1SPiyou Chen; RV32-NEXT:    add a0, a2, a0
1000675e7bd1SPiyou Chen; RV32-NEXT:    sltu a2, a0, a2
10017b3bbd83SJay Foad; RV32-NEXT:    add a1, a1, a2
1002*9122c523SPengcheng Wang; RV32-NEXT:    csrr a2, vlenb
1003*9122c523SPengcheng Wang; RV32-NEXT:    slli a2, a2, 3
1004*9122c523SPengcheng Wang; RV32-NEXT:    add sp, sp, a2
1005*9122c523SPengcheng Wang; RV32-NEXT:    .cfi_def_cfa sp, 64
1006*9122c523SPengcheng Wang; RV32-NEXT:    lw s0, 60(sp) # 4-byte Folded Reload
1007*9122c523SPengcheng Wang; RV32-NEXT:    lw s1, 56(sp) # 4-byte Folded Reload
1008*9122c523SPengcheng Wang; RV32-NEXT:    lw s2, 52(sp) # 4-byte Folded Reload
1009*9122c523SPengcheng Wang; RV32-NEXT:    lw s3, 48(sp) # 4-byte Folded Reload
1010*9122c523SPengcheng Wang; RV32-NEXT:    lw s4, 44(sp) # 4-byte Folded Reload
1011*9122c523SPengcheng Wang; RV32-NEXT:    lw s5, 40(sp) # 4-byte Folded Reload
1012*9122c523SPengcheng Wang; RV32-NEXT:    lw s6, 36(sp) # 4-byte Folded Reload
1013*9122c523SPengcheng Wang; RV32-NEXT:    lw s7, 32(sp) # 4-byte Folded Reload
1014*9122c523SPengcheng Wang; RV32-NEXT:    lw s8, 28(sp) # 4-byte Folded Reload
1015*9122c523SPengcheng Wang; RV32-NEXT:    lw s9, 24(sp) # 4-byte Folded Reload
1016*9122c523SPengcheng Wang; RV32-NEXT:    lw s10, 20(sp) # 4-byte Folded Reload
1017*9122c523SPengcheng Wang; RV32-NEXT:    lw s11, 16(sp) # 4-byte Folded Reload
101897982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s0
101997982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s1
102097982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s2
102197982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s3
102297982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s4
102397982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s5
102497982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s6
102597982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s7
102697982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s8
102797982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s9
102897982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s10
102997982a8cSdlav-sc; RV32-NEXT:    .cfi_restore s11
1030*9122c523SPengcheng Wang; RV32-NEXT:    addi sp, sp, 64
103197982a8cSdlav-sc; RV32-NEXT:    .cfi_def_cfa_offset 0
10327c4f4559SPhilip Reames; RV32-NEXT:    ret
10337c4f4559SPhilip Reames;
10347c4f4559SPhilip Reames; RV64-LABEL: explode_16xi64:
10357c4f4559SPhilip Reames; RV64:       # %bb.0:
1036299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, -256
1037299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa_offset 256
1038299d710eSPhilip Reames; RV64-NEXT:    sd ra, 248(sp) # 8-byte Folded Spill
1039299d710eSPhilip Reames; RV64-NEXT:    sd s0, 240(sp) # 8-byte Folded Spill
1040299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset ra, -8
1041299d710eSPhilip Reames; RV64-NEXT:    .cfi_offset s0, -16
1042299d710eSPhilip Reames; RV64-NEXT:    addi s0, sp, 256
1043299d710eSPhilip Reames; RV64-NEXT:    .cfi_def_cfa s0, 0
1044299d710eSPhilip Reames; RV64-NEXT:    andi sp, sp, -128
10457c4f4559SPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e64, m2, ta, ma
10467c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v16, v8, 2
104745a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s a0, v16
10487c4f4559SPhilip Reames; RV64-NEXT:    vslidedown.vi v16, v8, 3
1049*9122c523SPengcheng Wang; RV64-NEXT:    mv a1, sp
1050*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a2, v16
1051*9122c523SPengcheng Wang; RV64-NEXT:    vmv.s.x v16, zero
1052299d710eSPhilip Reames; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
1053*9122c523SPengcheng Wang; RV64-NEXT:    vse64.v v8, (a1)
1054*9122c523SPengcheng Wang; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
1055*9122c523SPengcheng Wang; RV64-NEXT:    vredxor.vs v8, v8, v16
1056*9122c523SPengcheng Wang; RV64-NEXT:    ld a1, 32(sp)
105745a334d3SPhilip Reames; RV64-NEXT:    ld a3, 40(sp)
105845a334d3SPhilip Reames; RV64-NEXT:    ld a4, 48(sp)
105945a334d3SPhilip Reames; RV64-NEXT:    ld a5, 56(sp)
106045a334d3SPhilip Reames; RV64-NEXT:    ld a6, 64(sp)
106145a334d3SPhilip Reames; RV64-NEXT:    ld a7, 72(sp)
106245a334d3SPhilip Reames; RV64-NEXT:    ld t0, 80(sp)
106345a334d3SPhilip Reames; RV64-NEXT:    ld t1, 88(sp)
106445a334d3SPhilip Reames; RV64-NEXT:    ld t2, 96(sp)
106545a334d3SPhilip Reames; RV64-NEXT:    ld t3, 104(sp)
106645a334d3SPhilip Reames; RV64-NEXT:    ld t4, 112(sp)
106745a334d3SPhilip Reames; RV64-NEXT:    ld t5, 120(sp)
106845a334d3SPhilip Reames; RV64-NEXT:    vmv.x.s t6, v8
1069f0505c3dSPhilip Reames; RV64-NEXT:    add a0, a0, a2
1070*9122c523SPengcheng Wang; RV64-NEXT:    add a0, t6, a0
1071*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a1
107245a334d3SPhilip Reames; RV64-NEXT:    add a3, a3, a4
1073f0505c3dSPhilip Reames; RV64-NEXT:    add a5, a5, a6
107445a334d3SPhilip Reames; RV64-NEXT:    add t0, t0, t1
1075*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a3
1076*9122c523SPengcheng Wang; RV64-NEXT:    add a5, a5, a7
107745a334d3SPhilip Reames; RV64-NEXT:    add t0, t0, t2
1078*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a5
107945a334d3SPhilip Reames; RV64-NEXT:    add t0, t0, t3
108045a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, t0
108145a334d3SPhilip Reames; RV64-NEXT:    add t4, t4, t5
108245a334d3SPhilip Reames; RV64-NEXT:    add a0, a0, t4
1083299d710eSPhilip Reames; RV64-NEXT:    addi sp, s0, -256
108497982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa sp, 256
1085299d710eSPhilip Reames; RV64-NEXT:    ld ra, 248(sp) # 8-byte Folded Reload
1086299d710eSPhilip Reames; RV64-NEXT:    ld s0, 240(sp) # 8-byte Folded Reload
108797982a8cSdlav-sc; RV64-NEXT:    .cfi_restore ra
108897982a8cSdlav-sc; RV64-NEXT:    .cfi_restore s0
1089299d710eSPhilip Reames; RV64-NEXT:    addi sp, sp, 256
109097982a8cSdlav-sc; RV64-NEXT:    .cfi_def_cfa_offset 0
10917c4f4559SPhilip Reames; RV64-NEXT:    ret
10927c4f4559SPhilip Reames  %e0 = extractelement <16 x i64> %v, i32 0
10937c4f4559SPhilip Reames  %e1 = extractelement <16 x i64> %v, i32 1
10947c4f4559SPhilip Reames  %e2 = extractelement <16 x i64> %v, i32 2
10957c4f4559SPhilip Reames  %e3 = extractelement <16 x i64> %v, i32 3
10967c4f4559SPhilip Reames  %e4 = extractelement <16 x i64> %v, i32 4
10977c4f4559SPhilip Reames  %e5 = extractelement <16 x i64> %v, i32 5
10987c4f4559SPhilip Reames  %e6 = extractelement <16 x i64> %v, i32 6
10997c4f4559SPhilip Reames  %e7 = extractelement <16 x i64> %v, i32 7
11007c4f4559SPhilip Reames  %e8 = extractelement <16 x i64> %v, i32 8
11017c4f4559SPhilip Reames  %e9 = extractelement <16 x i64> %v, i32 9
11027c4f4559SPhilip Reames  %e10 = extractelement <16 x i64> %v, i32 10
11037c4f4559SPhilip Reames  %e11 = extractelement <16 x i64> %v, i32 11
11047c4f4559SPhilip Reames  %e12 = extractelement <16 x i64> %v, i32 12
11057c4f4559SPhilip Reames  %e13 = extractelement <16 x i64> %v, i32 13
11067c4f4559SPhilip Reames  %e14 = extractelement <16 x i64> %v, i32 14
11077c4f4559SPhilip Reames  %e15 = extractelement <16 x i64> %v, i32 15
1108f0505c3dSPhilip Reames  %add0 = xor i64 %e0, %e1
1109f0505c3dSPhilip Reames  %add1 = add i64 %add0, %e2
11107c4f4559SPhilip Reames  %add2 = add i64 %add1, %e3
11117c4f4559SPhilip Reames  %add3 = add i64 %add2, %e4
11127c4f4559SPhilip Reames  %add4 = add i64 %add3, %e5
11137c4f4559SPhilip Reames  %add5 = add i64 %add4, %e6
11147c4f4559SPhilip Reames  %add6 = add i64 %add5, %e7
11157c4f4559SPhilip Reames  %add7 = add i64 %add6, %e8
11167c4f4559SPhilip Reames  %add8 = add i64 %add7, %e9
11177c4f4559SPhilip Reames  %add9 = add i64 %add8, %e10
11187c4f4559SPhilip Reames  %add10 = add i64 %add9, %e11
11197c4f4559SPhilip Reames  %add11 = add i64 %add10, %e12
11207c4f4559SPhilip Reames  %add12 = add i64 %add11, %e13
11217c4f4559SPhilip Reames  %add13 = add i64 %add12, %e14
11227c4f4559SPhilip Reames  %add14 = add i64 %add13, %e15
11237c4f4559SPhilip Reames  ret i64 %add14
11247c4f4559SPhilip Reames}
1125cf17a24aSPhilip Reames
1126cf17a24aSPhilip Reamesdefine i32 @explode_16xi32_exact_vlen(<16 x i32> %v) vscale_range(2, 2) {
1127cf17a24aSPhilip Reames; RV32-LABEL: explode_16xi32_exact_vlen:
1128cf17a24aSPhilip Reames; RV32:       # %bb.0:
1129cf17a24aSPhilip Reames; RV32-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1130cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 2
1131cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s a0, v12
1132cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v8, 3
1133*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a1, v9
1134*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a2, v12
1135cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v9, 1
1136cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s a3, v12
1137cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v12, v9, 2
1138cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s a4, v12
1139cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v9, 3
1140*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a5, v10
1141*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s a6, v9
1142cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v10, 1
1143cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s a7, v9
1144cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v10, 2
1145cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s t0, v9
1146cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v10, 3
1147*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t1, v11
1148*9122c523SPengcheng Wang; RV32-NEXT:    vmv.x.s t2, v9
1149cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v11, 1
1150cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s t3, v9
1151cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v11, 2
1152cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s t4, v9
1153cf17a24aSPhilip Reames; RV32-NEXT:    vslidedown.vi v9, v11, 3
1154cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s t5, v9
1155cf17a24aSPhilip Reames; RV32-NEXT:    vmv.s.x v9, zero
1156cf17a24aSPhilip Reames; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
1157cf17a24aSPhilip Reames; RV32-NEXT:    vredxor.vs v8, v8, v9
1158cf17a24aSPhilip Reames; RV32-NEXT:    vmv.x.s t6, v8
1159cf17a24aSPhilip Reames; RV32-NEXT:    add a0, a0, a2
1160*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, a3
1161*9122c523SPengcheng Wang; RV32-NEXT:    add a5, a6, a5
1162*9122c523SPengcheng Wang; RV32-NEXT:    add t1, t2, t1
1163*9122c523SPengcheng Wang; RV32-NEXT:    add a0, t6, a0
1164*9122c523SPengcheng Wang; RV32-NEXT:    add a1, a1, a4
1165cf17a24aSPhilip Reames; RV32-NEXT:    add a5, a5, a7
1166cf17a24aSPhilip Reames; RV32-NEXT:    add t1, t1, t3
1167*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a1
1168*9122c523SPengcheng Wang; RV32-NEXT:    add a5, a5, t0
1169cf17a24aSPhilip Reames; RV32-NEXT:    add t1, t1, t4
1170*9122c523SPengcheng Wang; RV32-NEXT:    add a0, a0, a5
1171cf17a24aSPhilip Reames; RV32-NEXT:    add t1, t1, t5
1172cf17a24aSPhilip Reames; RV32-NEXT:    add a0, a0, t1
1173cf17a24aSPhilip Reames; RV32-NEXT:    ret
1174cf17a24aSPhilip Reames;
1175cf17a24aSPhilip Reames; RV64-LABEL: explode_16xi32_exact_vlen:
1176cf17a24aSPhilip Reames; RV64:       # %bb.0:
1177cf17a24aSPhilip Reames; RV64-NEXT:    vsetivli zero, 1, e32, m1, ta, ma
1178cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v8, 2
1179cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s a0, v12
1180cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v8, 3
1181*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a1, v9
1182*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a2, v12
1183cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v9, 1
1184cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s a3, v12
1185cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v12, v9, 2
1186cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s a4, v12
1187cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v9, 3
1188*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a5, v10
1189*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s a6, v9
1190cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v10, 1
1191cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s a7, v9
1192cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v10, 2
1193cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s t0, v9
1194cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v10, 3
1195*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s t1, v11
1196*9122c523SPengcheng Wang; RV64-NEXT:    vmv.x.s t2, v9
1197cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v11, 1
1198cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s t3, v9
1199cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v11, 2
1200cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s t4, v9
1201cf17a24aSPhilip Reames; RV64-NEXT:    vslidedown.vi v9, v11, 3
1202cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s t5, v9
1203cf17a24aSPhilip Reames; RV64-NEXT:    vmv.s.x v9, zero
1204cf17a24aSPhilip Reames; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
1205cf17a24aSPhilip Reames; RV64-NEXT:    vredxor.vs v8, v8, v9
1206cf17a24aSPhilip Reames; RV64-NEXT:    vmv.x.s t6, v8
1207cf17a24aSPhilip Reames; RV64-NEXT:    add a0, a0, a2
1208*9122c523SPengcheng Wang; RV64-NEXT:    add a1, a1, a3
1209*9122c523SPengcheng Wang; RV64-NEXT:    add a5, a6, a5
1210*9122c523SPengcheng Wang; RV64-NEXT:    add t1, t2, t1
1211*9122c523SPengcheng Wang; RV64-NEXT:    add a0, t6, a0
1212*9122c523SPengcheng Wang; RV64-NEXT:    add a1, a1, a4
1213cf17a24aSPhilip Reames; RV64-NEXT:    add a5, a5, a7
1214cf17a24aSPhilip Reames; RV64-NEXT:    add t1, t1, t3
1215*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a1
1216*9122c523SPengcheng Wang; RV64-NEXT:    add a5, a5, t0
1217cf17a24aSPhilip Reames; RV64-NEXT:    add t1, t1, t4
1218*9122c523SPengcheng Wang; RV64-NEXT:    add a0, a0, a5
1219cf17a24aSPhilip Reames; RV64-NEXT:    add t1, t1, t5
1220cf17a24aSPhilip Reames; RV64-NEXT:    addw a0, a0, t1
1221cf17a24aSPhilip Reames; RV64-NEXT:    ret
1222cf17a24aSPhilip Reames  %e0 = extractelement <16 x i32> %v, i32 0
1223cf17a24aSPhilip Reames  %e1 = extractelement <16 x i32> %v, i32 1
1224cf17a24aSPhilip Reames  %e2 = extractelement <16 x i32> %v, i32 2
1225cf17a24aSPhilip Reames  %e3 = extractelement <16 x i32> %v, i32 3
1226cf17a24aSPhilip Reames  %e4 = extractelement <16 x i32> %v, i32 4
1227cf17a24aSPhilip Reames  %e5 = extractelement <16 x i32> %v, i32 5
1228cf17a24aSPhilip Reames  %e6 = extractelement <16 x i32> %v, i32 6
1229cf17a24aSPhilip Reames  %e7 = extractelement <16 x i32> %v, i32 7
1230cf17a24aSPhilip Reames  %e8 = extractelement <16 x i32> %v, i32 8
1231cf17a24aSPhilip Reames  %e9 = extractelement <16 x i32> %v, i32 9
1232cf17a24aSPhilip Reames  %e10 = extractelement <16 x i32> %v, i32 10
1233cf17a24aSPhilip Reames  %e11 = extractelement <16 x i32> %v, i32 11
1234cf17a24aSPhilip Reames  %e12 = extractelement <16 x i32> %v, i32 12
1235cf17a24aSPhilip Reames  %e13 = extractelement <16 x i32> %v, i32 13
1236cf17a24aSPhilip Reames  %e14 = extractelement <16 x i32> %v, i32 14
1237cf17a24aSPhilip Reames  %e15 = extractelement <16 x i32> %v, i32 15
1238cf17a24aSPhilip Reames  %add0 = xor i32 %e0, %e1
1239cf17a24aSPhilip Reames  %add1 = add i32 %add0, %e2
1240cf17a24aSPhilip Reames  %add2 = add i32 %add1, %e3
1241cf17a24aSPhilip Reames  %add3 = add i32 %add2, %e4
1242cf17a24aSPhilip Reames  %add4 = add i32 %add3, %e5
1243cf17a24aSPhilip Reames  %add5 = add i32 %add4, %e6
1244cf17a24aSPhilip Reames  %add6 = add i32 %add5, %e7
1245cf17a24aSPhilip Reames  %add7 = add i32 %add6, %e8
1246cf17a24aSPhilip Reames  %add8 = add i32 %add7, %e9
1247cf17a24aSPhilip Reames  %add9 = add i32 %add8, %e10
1248cf17a24aSPhilip Reames  %add10 = add i32 %add9, %e11
1249cf17a24aSPhilip Reames  %add11 = add i32 %add10, %e12
1250cf17a24aSPhilip Reames  %add12 = add i32 %add11, %e13
1251cf17a24aSPhilip Reames  %add13 = add i32 %add12, %e14
1252cf17a24aSPhilip Reames  %add14 = add i32 %add13, %e15
1253cf17a24aSPhilip Reames  ret i32 %add14
1254cf17a24aSPhilip Reames}
1255