xref: /llvm-project/llvm/test/CodeGen/RISCV/rvv/fixed-vector-i8-index-cornercase.ll (revision bc7449c790bab21d9e09c531ce07607fff5a7688)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
2; RUN: llc -mtriple=riscv64 -mattr=+v,+zvl512b < %s | FileCheck %s
3
4; A single source shuffle with an offset not representable in an i8
5; vector, and a type which can't be promoted to i16 element type while
6; remaining a valid type.  Note that splitting the vector is legal here
7define <512 x i8> @single_source(<512 x i8> %a) {
8; CHECK-LABEL: single_source:
9; CHECK:       # %bb.0:
10; CHECK-NEXT:    addi sp, sp, -1536
11; CHECK-NEXT:    .cfi_def_cfa_offset 1536
12; CHECK-NEXT:    sd ra, 1528(sp) # 8-byte Folded Spill
13; CHECK-NEXT:    sd s0, 1520(sp) # 8-byte Folded Spill
14; CHECK-NEXT:    .cfi_offset ra, -8
15; CHECK-NEXT:    .cfi_offset s0, -16
16; CHECK-NEXT:    addi s0, sp, 1536
17; CHECK-NEXT:    .cfi_def_cfa s0, 0
18; CHECK-NEXT:    andi sp, sp, -512
19; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
20; CHECK-NEXT:    vmv8r.v v16, v8
21; CHECK-NEXT:    li a0, 512
22; CHECK-NEXT:    addi a1, sp, 512
23; CHECK-NEXT:    vmv.x.s a2, v16
24; CHECK-NEXT:    vslidedown.vi v24, v16, 5
25; CHECK-NEXT:    li a3, 432
26; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
27; CHECK-NEXT:    vse8.v v8, (a1)
28; CHECK-NEXT:    vmv.v.x v8, a2
29; CHECK-NEXT:    lbu a0, 770(sp)
30; CHECK-NEXT:    li a1, 431
31; CHECK-NEXT:    vslide1down.vx v8, v8, a0
32; CHECK-NEXT:    lbu a0, 1012(sp)
33; CHECK-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
34; CHECK-NEXT:    vslideup.vx v8, v24, a1
35; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
36; CHECK-NEXT:    vslidedown.vi v24, v16, 4
37; CHECK-NEXT:    li a1, 466
38; CHECK-NEXT:    vmv.s.x v16, a0
39; CHECK-NEXT:    li a0, 465
40; CHECK-NEXT:    li a2, 501
41; CHECK-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
42; CHECK-NEXT:    vslideup.vx v8, v24, a0
43; CHECK-NEXT:    li a0, 500
44; CHECK-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
45; CHECK-NEXT:    vslideup.vx v8, v16, a0
46; CHECK-NEXT:    addi sp, s0, -1536
47; CHECK-NEXT:    .cfi_def_cfa sp, 1536
48; CHECK-NEXT:    ld ra, 1528(sp) # 8-byte Folded Reload
49; CHECK-NEXT:    ld s0, 1520(sp) # 8-byte Folded Reload
50; CHECK-NEXT:    .cfi_restore ra
51; CHECK-NEXT:    .cfi_restore s0
52; CHECK-NEXT:    addi sp, sp, 1536
53; CHECK-NEXT:    .cfi_def_cfa_offset 0
54; CHECK-NEXT:    ret
55  %res = shufflevector <512 x i8> %a, <512 x i8> poison, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 500, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 258>
56  ret <512 x i8> %res
57}
58
59; Like the above, but the actual values of the index are all representable
60define <512 x i8> @range_restriction(<512 x i8> %a) {
61; CHECK-LABEL: range_restriction:
62; CHECK:       # %bb.0:
63; CHECK-NEXT:    li a0, 512
64; CHECK-NEXT:    li a1, 254
65; CHECK-NEXT:    li a2, 432
66; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
67; CHECK-NEXT:    vmv.v.i v16, 0
68; CHECK-NEXT:    vslide1down.vx v24, v16, a1
69; CHECK-NEXT:    li a1, 431
70; CHECK-NEXT:    vsetvli zero, a0, e8, m1, ta, ma
71; CHECK-NEXT:    vmv.v.i v16, 5
72; CHECK-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
73; CHECK-NEXT:    vslideup.vx v24, v16, a1
74; CHECK-NEXT:    li a1, 466
75; CHECK-NEXT:    li a2, 465
76; CHECK-NEXT:    vsetvli zero, a0, e8, m1, ta, ma
77; CHECK-NEXT:    vmv.v.i v16, 4
78; CHECK-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
79; CHECK-NEXT:    vslideup.vx v24, v16, a2
80; CHECK-NEXT:    li a1, 44
81; CHECK-NEXT:    li a2, 501
82; CHECK-NEXT:    vmv.s.x v16, a1
83; CHECK-NEXT:    li a1, 500
84; CHECK-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
85; CHECK-NEXT:    vslideup.vx v24, v16, a1
86; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
87; CHECK-NEXT:    vrgather.vv v16, v8, v24
88; CHECK-NEXT:    vmv.v.v v8, v16
89; CHECK-NEXT:    ret
90  %res = shufflevector <512 x i8> %a, <512 x i8> poison, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 44, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 254>
91  ret <512 x i8> %res
92}
93
94
95define <512 x i8> @two_source(<512 x i8> %a, <512 x i8> %b) {
96; CHECK-LABEL: two_source:
97; CHECK:       # %bb.0:
98; CHECK-NEXT:    addi sp, sp, -1536
99; CHECK-NEXT:    .cfi_def_cfa_offset 1536
100; CHECK-NEXT:    sd ra, 1528(sp) # 8-byte Folded Spill
101; CHECK-NEXT:    sd s0, 1520(sp) # 8-byte Folded Spill
102; CHECK-NEXT:    .cfi_offset ra, -8
103; CHECK-NEXT:    .cfi_offset s0, -16
104; CHECK-NEXT:    addi s0, sp, 1536
105; CHECK-NEXT:    .cfi_def_cfa s0, 0
106; CHECK-NEXT:    csrr a0, vlenb
107; CHECK-NEXT:    slli a0, a0, 3
108; CHECK-NEXT:    sub sp, sp, a0
109; CHECK-NEXT:    andi sp, sp, -512
110; CHECK-NEXT:    addi a0, sp, 1520
111; CHECK-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
112; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
113; CHECK-NEXT:    vmv8r.v v24, v8
114; CHECK-NEXT:    li a0, 512
115; CHECK-NEXT:    addi a1, sp, 512
116; CHECK-NEXT:    vslidedown.vi v0, v24, 5
117; CHECK-NEXT:    vmv.x.s a2, v24
118; CHECK-NEXT:    li a3, 432
119; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
120; CHECK-NEXT:    vmv.v.x v8, a2
121; CHECK-NEXT:    li a2, 431
122; CHECK-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
123; CHECK-NEXT:    vslideup.vx v8, v0, a2
124; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
125; CHECK-NEXT:    vslidedown.vi v0, v24, 4
126; CHECK-NEXT:    li a2, 466
127; CHECK-NEXT:    li a3, 465
128; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
129; CHECK-NEXT:    vse8.v v24, (a1)
130; CHECK-NEXT:    lbu a1, 985(sp)
131; CHECK-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
132; CHECK-NEXT:    vslideup.vx v8, v0, a3
133; CHECK-NEXT:    li a2, 478
134; CHECK-NEXT:    lbu a3, 1012(sp)
135; CHECK-NEXT:    vmv.s.x v24, a1
136; CHECK-NEXT:    li a1, 477
137; CHECK-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
138; CHECK-NEXT:    vslideup.vx v8, v24, a1
139; CHECK-NEXT:    li a1, 501
140; CHECK-NEXT:    lui a2, %hi(.LCPI2_1)
141; CHECK-NEXT:    addi a2, a2, %lo(.LCPI2_1)
142; CHECK-NEXT:    vsetivli zero, 8, e64, m1, ta, ma
143; CHECK-NEXT:    vle64.v v0, (a2)
144; CHECK-NEXT:    li a2, 500
145; CHECK-NEXT:    vmv.s.x v24, a3
146; CHECK-NEXT:    lui a3, %hi(.LCPI2_0)
147; CHECK-NEXT:    addi a3, a3, %lo(.LCPI2_0)
148; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
149; CHECK-NEXT:    vle8.v v16, (a3)
150; CHECK-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
151; CHECK-NEXT:    vslideup.vx v8, v24, a2
152; CHECK-NEXT:    addi a1, sp, 1520
153; CHECK-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
154; CHECK-NEXT:    vsetvli zero, a0, e8, m8, ta, mu
155; CHECK-NEXT:    vrgather.vv v8, v24, v16, v0.t
156; CHECK-NEXT:    addi sp, s0, -1536
157; CHECK-NEXT:    .cfi_def_cfa sp, 1536
158; CHECK-NEXT:    ld ra, 1528(sp) # 8-byte Folded Reload
159; CHECK-NEXT:    ld s0, 1520(sp) # 8-byte Folded Reload
160; CHECK-NEXT:    .cfi_restore ra
161; CHECK-NEXT:    .cfi_restore s0
162; CHECK-NEXT:    addi sp, sp, 1536
163; CHECK-NEXT:    .cfi_def_cfa_offset 0
164; CHECK-NEXT:    ret
165  %res = shufflevector <512 x i8> %a, <512 x i8> %b, <512 x i32> <i32 0, i32 512, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 512, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 512, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 548, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 5, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 4, i32 574, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 512, i32 473, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 674, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 500, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 555>
166  ret <512 x i8> %res
167}
168
169