xref: /llvm-project/llvm/test/CodeGen/RISCV/rv64xtheadba.ll (revision c557ce9f27feccdbda3588555fcb3303d1f81935)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --extra_scrub
2; RUN: llc -mtriple=riscv64 -mattr=+m -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefixes=CHECK,RV64I
4; RUN: llc -mtriple=riscv64 -mattr=+m,+xtheadba -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s -check-prefixes=CHECK,RV64XTHEADBA
6
7define signext i16 @th_addsl_1(i64 %0, ptr %1) {
8; RV64I-LABEL: th_addsl_1:
9; RV64I:       # %bb.0:
10; RV64I-NEXT:    slli a0, a0, 1
11; RV64I-NEXT:    add a0, a1, a0
12; RV64I-NEXT:    lh a0, 0(a0)
13; RV64I-NEXT:    ret
14;
15; RV64XTHEADBA-LABEL: th_addsl_1:
16; RV64XTHEADBA:       # %bb.0:
17; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
18; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
19; RV64XTHEADBA-NEXT:    ret
20  %3 = getelementptr inbounds i16, ptr %1, i64 %0
21  %4 = load i16, ptr %3
22  ret i16 %4
23}
24
25define signext i32 @th_addsl_2(i64 %0, ptr %1) {
26; RV64I-LABEL: th_addsl_2:
27; RV64I:       # %bb.0:
28; RV64I-NEXT:    slli a0, a0, 2
29; RV64I-NEXT:    add a0, a1, a0
30; RV64I-NEXT:    lw a0, 0(a0)
31; RV64I-NEXT:    ret
32;
33; RV64XTHEADBA-LABEL: th_addsl_2:
34; RV64XTHEADBA:       # %bb.0:
35; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
36; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
37; RV64XTHEADBA-NEXT:    ret
38  %3 = getelementptr inbounds i32, ptr %1, i64 %0
39  %4 = load i32, ptr %3
40  ret i32 %4
41}
42
43define i64 @th_addsl_3(i64 %0, ptr %1) {
44; RV64I-LABEL: th_addsl_3:
45; RV64I:       # %bb.0:
46; RV64I-NEXT:    slli a0, a0, 3
47; RV64I-NEXT:    add a0, a1, a0
48; RV64I-NEXT:    ld a0, 0(a0)
49; RV64I-NEXT:    ret
50;
51; RV64XTHEADBA-LABEL: th_addsl_3:
52; RV64XTHEADBA:       # %bb.0:
53; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
54; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
55; RV64XTHEADBA-NEXT:    ret
56  %3 = getelementptr inbounds i64, ptr %1, i64 %0
57  %4 = load i64, ptr %3
58  ret i64 %4
59}
60
61; Type legalization inserts a sext_inreg after the first add. That add will be
62; selected as th.addsl which does not sign extend. SimplifyDemandedBits is unable
63; to remove the sext_inreg because it has multiple uses. The ashr will use the
64; sext_inreg to become sraiw. This leaves the sext_inreg only used by the shl.
65; If the shl is selected as sllw, we don't need the sext_inreg.
66define i64 @th_addsl_2_extra_sext(i32 %x, i32 %y, i32 %z) {
67; RV64I-LABEL: th_addsl_2_extra_sext:
68; RV64I:       # %bb.0:
69; RV64I-NEXT:    slli a0, a0, 2
70; RV64I-NEXT:    add a0, a0, a1
71; RV64I-NEXT:    sllw a1, a2, a0
72; RV64I-NEXT:    sraiw a0, a0, 2
73; RV64I-NEXT:    mul a0, a1, a0
74; RV64I-NEXT:    ret
75;
76; RV64XTHEADBA-LABEL: th_addsl_2_extra_sext:
77; RV64XTHEADBA:       # %bb.0:
78; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
79; RV64XTHEADBA-NEXT:    sllw a1, a2, a0
80; RV64XTHEADBA-NEXT:    sraiw a0, a0, 2
81; RV64XTHEADBA-NEXT:    mul a0, a1, a0
82; RV64XTHEADBA-NEXT:    ret
83  %a = shl i32 %x, 2
84  %b = add i32 %a, %y
85  %c = shl i32 %z, %b
86  %d = ashr i32 %b, 2
87  %e = sext i32 %c to i64
88  %f = sext i32 %d to i64
89  %g = mul i64 %e, %f
90  ret i64 %g
91}
92
93define i64 @addmul6(i64 %a, i64 %b) {
94; RV64I-LABEL: addmul6:
95; RV64I:       # %bb.0:
96; RV64I-NEXT:    slli a2, a0, 1
97; RV64I-NEXT:    slli a0, a0, 3
98; RV64I-NEXT:    sub a0, a0, a2
99; RV64I-NEXT:    add a0, a0, a1
100; RV64I-NEXT:    ret
101;
102; RV64XTHEADBA-LABEL: addmul6:
103; RV64XTHEADBA:       # %bb.0:
104; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
105; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
106; RV64XTHEADBA-NEXT:    ret
107  %c = mul i64 %a, 6
108  %d = add i64 %c, %b
109  ret i64 %d
110}
111
112define i64 @disjointormul6(i64 %a, i64 %b) {
113; RV64I-LABEL: disjointormul6:
114; RV64I:       # %bb.0:
115; RV64I-NEXT:    slli a2, a0, 1
116; RV64I-NEXT:    slli a0, a0, 3
117; RV64I-NEXT:    sub a0, a0, a2
118; RV64I-NEXT:    or a0, a0, a1
119; RV64I-NEXT:    ret
120;
121; RV64XTHEADBA-LABEL: disjointormul6:
122; RV64XTHEADBA:       # %bb.0:
123; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
124; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
125; RV64XTHEADBA-NEXT:    ret
126  %c = mul i64 %a, 6
127  %d = or disjoint i64 %c, %b
128  ret i64 %d
129}
130
131define i64 @addmul10(i64 %a, i64 %b) {
132; RV64I-LABEL: addmul10:
133; RV64I:       # %bb.0:
134; RV64I-NEXT:    li a2, 10
135; RV64I-NEXT:    mul a0, a0, a2
136; RV64I-NEXT:    add a0, a0, a1
137; RV64I-NEXT:    ret
138;
139; RV64XTHEADBA-LABEL: addmul10:
140; RV64XTHEADBA:       # %bb.0:
141; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
142; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
143; RV64XTHEADBA-NEXT:    ret
144  %c = mul i64 %a, 10
145  %d = add i64 %c, %b
146  ret i64 %d
147}
148
149define i64 @addmul12(i64 %a, i64 %b) {
150; RV64I-LABEL: addmul12:
151; RV64I:       # %bb.0:
152; RV64I-NEXT:    slli a2, a0, 2
153; RV64I-NEXT:    slli a0, a0, 4
154; RV64I-NEXT:    sub a0, a0, a2
155; RV64I-NEXT:    add a0, a0, a1
156; RV64I-NEXT:    ret
157;
158; RV64XTHEADBA-LABEL: addmul12:
159; RV64XTHEADBA:       # %bb.0:
160; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
161; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
162; RV64XTHEADBA-NEXT:    ret
163  %c = mul i64 %a, 12
164  %d = add i64 %c, %b
165  ret i64 %d
166}
167
168define i64 @addmul18(i64 %a, i64 %b) {
169; RV64I-LABEL: addmul18:
170; RV64I:       # %bb.0:
171; RV64I-NEXT:    li a2, 18
172; RV64I-NEXT:    mul a0, a0, a2
173; RV64I-NEXT:    add a0, a0, a1
174; RV64I-NEXT:    ret
175;
176; RV64XTHEADBA-LABEL: addmul18:
177; RV64XTHEADBA:       # %bb.0:
178; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
179; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
180; RV64XTHEADBA-NEXT:    ret
181  %c = mul i64 %a, 18
182  %d = add i64 %c, %b
183  ret i64 %d
184}
185
186define i64 @addmul20(i64 %a, i64 %b) {
187; RV64I-LABEL: addmul20:
188; RV64I:       # %bb.0:
189; RV64I-NEXT:    li a2, 20
190; RV64I-NEXT:    mul a0, a0, a2
191; RV64I-NEXT:    add a0, a0, a1
192; RV64I-NEXT:    ret
193;
194; RV64XTHEADBA-LABEL: addmul20:
195; RV64XTHEADBA:       # %bb.0:
196; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
197; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
198; RV64XTHEADBA-NEXT:    ret
199  %c = mul i64 %a, 20
200  %d = add i64 %c, %b
201  ret i64 %d
202}
203
204define i64 @addmul22(i64 %a, i64 %b) {
205; CHECK-LABEL: addmul22:
206; CHECK:       # %bb.0:
207; CHECK-NEXT:    li a2, 22
208; CHECK-NEXT:    mul a0, a0, a2
209; CHECK-NEXT:    add a0, a0, a1
210; CHECK-NEXT:    ret
211  %c = mul i64 %a, 22
212  %d = add i64 %c, %b
213  ret i64 %d
214}
215
216define i64 @addmul24(i64 %a, i64 %b) {
217; RV64I-LABEL: addmul24:
218; RV64I:       # %bb.0:
219; RV64I-NEXT:    slli a2, a0, 3
220; RV64I-NEXT:    slli a0, a0, 5
221; RV64I-NEXT:    sub a0, a0, a2
222; RV64I-NEXT:    add a0, a0, a1
223; RV64I-NEXT:    ret
224;
225; RV64XTHEADBA-LABEL: addmul24:
226; RV64XTHEADBA:       # %bb.0:
227; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
228; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
229; RV64XTHEADBA-NEXT:    ret
230  %c = mul i64 %a, 24
231  %d = add i64 %c, %b
232  ret i64 %d
233}
234
235define i64 @addmul36(i64 %a, i64 %b) {
236; RV64I-LABEL: addmul36:
237; RV64I:       # %bb.0:
238; RV64I-NEXT:    li a2, 36
239; RV64I-NEXT:    mul a0, a0, a2
240; RV64I-NEXT:    add a0, a0, a1
241; RV64I-NEXT:    ret
242;
243; RV64XTHEADBA-LABEL: addmul36:
244; RV64XTHEADBA:       # %bb.0:
245; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
246; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
247; RV64XTHEADBA-NEXT:    ret
248  %c = mul i64 %a, 36
249  %d = add i64 %c, %b
250  ret i64 %d
251}
252
253define i64 @addmul40(i64 %a, i64 %b) {
254; RV64I-LABEL: addmul40:
255; RV64I:       # %bb.0:
256; RV64I-NEXT:    li a2, 40
257; RV64I-NEXT:    mul a0, a0, a2
258; RV64I-NEXT:    add a0, a0, a1
259; RV64I-NEXT:    ret
260;
261; RV64XTHEADBA-LABEL: addmul40:
262; RV64XTHEADBA:       # %bb.0:
263; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
264; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
265; RV64XTHEADBA-NEXT:    ret
266  %c = mul i64 %a, 40
267  %d = add i64 %c, %b
268  ret i64 %d
269}
270
271define i64 @addmul72(i64 %a, i64 %b) {
272; RV64I-LABEL: addmul72:
273; RV64I:       # %bb.0:
274; RV64I-NEXT:    li a2, 72
275; RV64I-NEXT:    mul a0, a0, a2
276; RV64I-NEXT:    add a0, a0, a1
277; RV64I-NEXT:    ret
278;
279; RV64XTHEADBA-LABEL: addmul72:
280; RV64XTHEADBA:       # %bb.0:
281; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
282; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
283; RV64XTHEADBA-NEXT:    ret
284  %c = mul i64 %a, 72
285  %d = add i64 %c, %b
286  ret i64 %d
287}
288
289define i64 @mul50(i64 %a) {
290; RV64I-LABEL: mul50:
291; RV64I:       # %bb.0:
292; RV64I-NEXT:    li a1, 50
293; RV64I-NEXT:    mul a0, a0, a1
294; RV64I-NEXT:    ret
295;
296; RV64XTHEADBA-LABEL: mul50:
297; RV64XTHEADBA:       # %bb.0:
298; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
299; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
300; RV64XTHEADBA-NEXT:    slli a0, a0, 1
301; RV64XTHEADBA-NEXT:    ret
302  %c = mul i64 %a, 50
303  ret i64 %c
304}
305
306define i64 @addmul50(i64 %a, i64 %b) {
307; RV64I-LABEL: addmul50:
308; RV64I:       # %bb.0:
309; RV64I-NEXT:    li a2, 50
310; RV64I-NEXT:    mul a0, a0, a2
311; RV64I-NEXT:    add a0, a0, a1
312; RV64I-NEXT:    ret
313;
314; RV64XTHEADBA-LABEL: addmul50:
315; RV64XTHEADBA:       # %bb.0:
316; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
317; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
318; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
319; RV64XTHEADBA-NEXT:    ret
320  %c = mul i64 %a, 50
321  %d = add i64 %c, %b
322  ret i64 %d
323}
324
325define i64 @mul100(i64 %a) {
326; RV64I-LABEL: mul100:
327; RV64I:       # %bb.0:
328; RV64I-NEXT:    li a1, 100
329; RV64I-NEXT:    mul a0, a0, a1
330; RV64I-NEXT:    ret
331;
332; RV64XTHEADBA-LABEL: mul100:
333; RV64XTHEADBA:       # %bb.0:
334; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
335; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
336; RV64XTHEADBA-NEXT:    slli a0, a0, 2
337; RV64XTHEADBA-NEXT:    ret
338  %c = mul i64 %a, 100
339  ret i64 %c
340}
341
342define i64 @addmul100(i64 %a, i64 %b) {
343; RV64I-LABEL: addmul100:
344; RV64I:       # %bb.0:
345; RV64I-NEXT:    li a2, 100
346; RV64I-NEXT:    mul a0, a0, a2
347; RV64I-NEXT:    add a0, a0, a1
348; RV64I-NEXT:    ret
349;
350; RV64XTHEADBA-LABEL: addmul100:
351; RV64XTHEADBA:       # %bb.0:
352; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
353; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
354; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
355; RV64XTHEADBA-NEXT:    ret
356  %c = mul i64 %a, 100
357  %d = add i64 %c, %b
358  ret i64 %d
359}
360
361define i64 @mul162(i64 %a) {
362; RV64I-LABEL: mul162:
363; RV64I:       # %bb.0:
364; RV64I-NEXT:    li a1, 162
365; RV64I-NEXT:    mul a0, a0, a1
366; RV64I-NEXT:    ret
367;
368; RV64XTHEADBA-LABEL: mul162:
369; RV64XTHEADBA:       # %bb.0:
370; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
371; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
372; RV64XTHEADBA-NEXT:    slli a0, a0, 1
373; RV64XTHEADBA-NEXT:    ret
374  %c = mul i64 %a, 162
375  ret i64 %c
376}
377
378define i64 @addmul162(i64 %a, i64 %b) {
379; RV64I-LABEL: addmul162:
380; RV64I:       # %bb.0:
381; RV64I-NEXT:    li a2, 162
382; RV64I-NEXT:    mul a0, a0, a2
383; RV64I-NEXT:    add a0, a0, a1
384; RV64I-NEXT:    ret
385;
386; RV64XTHEADBA-LABEL: addmul162:
387; RV64XTHEADBA:       # %bb.0:
388; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
389; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
390; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
391; RV64XTHEADBA-NEXT:    ret
392  %c = mul i64 %a, 162
393  %d = add i64 %c, %b
394  ret i64 %d
395}
396
397define i64 @mul180(i64 %a) {
398; RV64I-LABEL: mul180:
399; RV64I:       # %bb.0:
400; RV64I-NEXT:    li a1, 180
401; RV64I-NEXT:    mul a0, a0, a1
402; RV64I-NEXT:    ret
403;
404; RV64XTHEADBA-LABEL: mul180:
405; RV64XTHEADBA:       # %bb.0:
406; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
407; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
408; RV64XTHEADBA-NEXT:    slli a0, a0, 2
409; RV64XTHEADBA-NEXT:    ret
410  %c = mul i64 %a, 180
411  ret i64 %c
412}
413
414define i64 @addmul180(i64 %a, i64 %b) {
415; RV64I-LABEL: addmul180:
416; RV64I:       # %bb.0:
417; RV64I-NEXT:    li a2, 180
418; RV64I-NEXT:    mul a0, a0, a2
419; RV64I-NEXT:    add a0, a0, a1
420; RV64I-NEXT:    ret
421;
422; RV64XTHEADBA-LABEL: addmul180:
423; RV64XTHEADBA:       # %bb.0:
424; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
425; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
426; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
427; RV64XTHEADBA-NEXT:    ret
428  %c = mul i64 %a, 180
429  %d = add i64 %c, %b
430  ret i64 %d
431}
432
433define i64 @add255mul180(i64 %a) {
434; RV64I-LABEL: add255mul180:
435; RV64I:       # %bb.0:
436; RV64I-NEXT:    li a1, 180
437; RV64I-NEXT:    mul a0, a0, a1
438; RV64I-NEXT:    addi a0, a0, 255
439; RV64I-NEXT:    ret
440;
441; RV64XTHEADBA-LABEL: add255mul180:
442; RV64XTHEADBA:       # %bb.0:
443; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
444; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
445; RV64XTHEADBA-NEXT:    slli a0, a0, 2
446; RV64XTHEADBA-NEXT:    addi a0, a0, 255
447; RV64XTHEADBA-NEXT:    ret
448  %c = mul i64 %a, 180
449  %d = add i64 %c, 255
450  ret i64 %d
451}
452
453define i64 @mul200(i64 %a) {
454; RV64I-LABEL: mul200:
455; RV64I:       # %bb.0:
456; RV64I-NEXT:    li a1, 200
457; RV64I-NEXT:    mul a0, a0, a1
458; RV64I-NEXT:    ret
459;
460; RV64XTHEADBA-LABEL: mul200:
461; RV64XTHEADBA:       # %bb.0:
462; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
463; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
464; RV64XTHEADBA-NEXT:    slli a0, a0, 3
465; RV64XTHEADBA-NEXT:    ret
466  %c = mul i64 %a, 200
467  ret i64 %c
468}
469
470define i64 @addmul200(i64 %a, i64 %b) {
471; RV64I-LABEL: addmul200:
472; RV64I:       # %bb.0:
473; RV64I-NEXT:    li a2, 200
474; RV64I-NEXT:    mul a0, a0, a2
475; RV64I-NEXT:    add a0, a0, a1
476; RV64I-NEXT:    ret
477;
478; RV64XTHEADBA-LABEL: addmul200:
479; RV64XTHEADBA:       # %bb.0:
480; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
481; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
482; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
483; RV64XTHEADBA-NEXT:    ret
484  %c = mul i64 %a, 200
485  %d = add i64 %c, %b
486  ret i64 %d
487}
488
489define i64 @addmul4096(i64 %a, i64 %b) {
490; CHECK-LABEL: addmul4096:
491; CHECK:       # %bb.0:
492; CHECK-NEXT:    slli a0, a0, 12
493; CHECK-NEXT:    add a0, a0, a1
494; CHECK-NEXT:    ret
495  %c = mul i64 %a, 4096
496  %d = add i64 %c, %b
497  ret i64 %d
498}
499
500define i64 @addmul4230(i64 %a, i64 %b) {
501; CHECK-LABEL: addmul4230:
502; CHECK:       # %bb.0:
503; CHECK-NEXT:    lui a2, 1
504; CHECK-NEXT:    addiw a2, a2, 134
505; CHECK-NEXT:    mul a0, a0, a2
506; CHECK-NEXT:    add a0, a0, a1
507; CHECK-NEXT:    ret
508  %c = mul i64 %a, 4230
509  %d = add i64 %c, %b
510  ret i64 %d
511}
512
513define i64 @mul96(i64 %a) {
514; RV64I-LABEL: mul96:
515; RV64I:       # %bb.0:
516; RV64I-NEXT:    slli a1, a0, 5
517; RV64I-NEXT:    slli a0, a0, 7
518; RV64I-NEXT:    sub a0, a0, a1
519; RV64I-NEXT:    ret
520;
521; RV64XTHEADBA-LABEL: mul96:
522; RV64XTHEADBA:       # %bb.0:
523; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
524; RV64XTHEADBA-NEXT:    slli a0, a0, 5
525; RV64XTHEADBA-NEXT:    ret
526  %c = mul i64 %a, 96
527  ret i64 %c
528}
529
530define i64 @mul119(i64 %a) {
531; RV64I-LABEL: mul119:
532; RV64I:       # %bb.0:
533; RV64I-NEXT:    li a1, 119
534; RV64I-NEXT:    mul a0, a0, a1
535; RV64I-NEXT:    ret
536;
537; RV64XTHEADBA-LABEL: mul119:
538; RV64XTHEADBA:       # %bb.0:
539; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 3
540; RV64XTHEADBA-NEXT:    slli a0, a0, 7
541; RV64XTHEADBA-NEXT:    sub a0, a0, a1
542; RV64XTHEADBA-NEXT:    ret
543  %c = mul i64 %a, 119
544  ret i64 %c
545}
546
547define i64 @mul123(i64 %a) {
548; RV64I-LABEL: mul123:
549; RV64I:       # %bb.0:
550; RV64I-NEXT:    li a1, 123
551; RV64I-NEXT:    mul a0, a0, a1
552; RV64I-NEXT:    ret
553;
554; RV64XTHEADBA-LABEL: mul123:
555; RV64XTHEADBA:       # %bb.0:
556; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 2
557; RV64XTHEADBA-NEXT:    slli a0, a0, 7
558; RV64XTHEADBA-NEXT:    sub a0, a0, a1
559; RV64XTHEADBA-NEXT:    ret
560  %c = mul i64 %a, 123
561  ret i64 %c
562}
563
564define i64 @mul125(i64 %a) {
565; RV64I-LABEL: mul125:
566; RV64I:       # %bb.0:
567; RV64I-NEXT:    li a1, 125
568; RV64I-NEXT:    mul a0, a0, a1
569; RV64I-NEXT:    ret
570;
571; RV64XTHEADBA-LABEL: mul125:
572; RV64XTHEADBA:       # %bb.0:
573; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 1
574; RV64XTHEADBA-NEXT:    slli a0, a0, 7
575; RV64XTHEADBA-NEXT:    sub a0, a0, a1
576; RV64XTHEADBA-NEXT:    ret
577  %c = mul i64 %a, 125
578  ret i64 %c
579}
580
581define i64 @mul131(i64 %a) {
582; RV64I-LABEL: mul131:
583; RV64I:       # %bb.0:
584; RV64I-NEXT:    li a1, 131
585; RV64I-NEXT:    mul a0, a0, a1
586; RV64I-NEXT:    ret
587;
588; RV64XTHEADBA-LABEL: mul131:
589; RV64XTHEADBA:       # %bb.0:
590; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 1
591; RV64XTHEADBA-NEXT:    slli a0, a0, 7
592; RV64XTHEADBA-NEXT:    add a0, a0, a1
593; RV64XTHEADBA-NEXT:    ret
594  %c = mul i64 %a, 131
595  ret i64 %c
596}
597
598define i64 @mul133(i64 %a) {
599; RV64I-LABEL: mul133:
600; RV64I:       # %bb.0:
601; RV64I-NEXT:    li a1, 133
602; RV64I-NEXT:    mul a0, a0, a1
603; RV64I-NEXT:    ret
604;
605; RV64XTHEADBA-LABEL: mul133:
606; RV64XTHEADBA:       # %bb.0:
607; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 2
608; RV64XTHEADBA-NEXT:    slli a0, a0, 7
609; RV64XTHEADBA-NEXT:    add a0, a0, a1
610; RV64XTHEADBA-NEXT:    ret
611  %c = mul i64 %a, 133
612  ret i64 %c
613}
614
615define i64 @mul137(i64 %a) {
616; RV64I-LABEL: mul137:
617; RV64I:       # %bb.0:
618; RV64I-NEXT:    li a1, 137
619; RV64I-NEXT:    mul a0, a0, a1
620; RV64I-NEXT:    ret
621;
622; RV64XTHEADBA-LABEL: mul137:
623; RV64XTHEADBA:       # %bb.0:
624; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 3
625; RV64XTHEADBA-NEXT:    slli a0, a0, 7
626; RV64XTHEADBA-NEXT:    add a0, a0, a1
627; RV64XTHEADBA-NEXT:    ret
628  %c = mul i64 %a, 137
629  ret i64 %c
630}
631
632define i64 @mul160(i64 %a) {
633; RV64I-LABEL: mul160:
634; RV64I:       # %bb.0:
635; RV64I-NEXT:    li a1, 160
636; RV64I-NEXT:    mul a0, a0, a1
637; RV64I-NEXT:    ret
638;
639; RV64XTHEADBA-LABEL: mul160:
640; RV64XTHEADBA:       # %bb.0:
641; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
642; RV64XTHEADBA-NEXT:    slli a0, a0, 5
643; RV64XTHEADBA-NEXT:    ret
644  %c = mul i64 %a, 160
645  ret i64 %c
646}
647
648define i64 @mul288(i64 %a) {
649; RV64I-LABEL: mul288:
650; RV64I:       # %bb.0:
651; RV64I-NEXT:    li a1, 288
652; RV64I-NEXT:    mul a0, a0, a1
653; RV64I-NEXT:    ret
654;
655; RV64XTHEADBA-LABEL: mul288:
656; RV64XTHEADBA:       # %bb.0:
657; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
658; RV64XTHEADBA-NEXT:    slli a0, a0, 5
659; RV64XTHEADBA-NEXT:    ret
660  %c = mul i64 %a, 288
661  ret i64 %c
662}
663
664define i64 @sh1add_imm(i64 %0) {
665; CHECK-LABEL: sh1add_imm:
666; CHECK:       # %bb.0:
667; CHECK-NEXT:    slli a0, a0, 1
668; CHECK-NEXT:    addi a0, a0, 5
669; CHECK-NEXT:    ret
670  %a = shl i64 %0, 1
671  %b = add i64 %a, 5
672  ret i64 %b
673}
674
675define i64 @sh2add_imm(i64 %0) {
676; CHECK-LABEL: sh2add_imm:
677; CHECK:       # %bb.0:
678; CHECK-NEXT:    slli a0, a0, 2
679; CHECK-NEXT:    addi a0, a0, -6
680; CHECK-NEXT:    ret
681  %a = shl i64 %0, 2
682  %b = add i64 %a, -6
683  ret i64 %b
684}
685
686define i64 @sh3add_imm(i64 %0) {
687; CHECK-LABEL: sh3add_imm:
688; CHECK:       # %bb.0:
689; CHECK-NEXT:    slli a0, a0, 3
690; CHECK-NEXT:    addi a0, a0, 7
691; CHECK-NEXT:    ret
692  %a = shl i64 %0, 3
693  %b = add i64 %a, 7
694  ret i64 %b
695}
696
697define i64 @mul258(i64 %a) {
698; RV64I-LABEL: mul258:
699; RV64I:       # %bb.0:
700; RV64I-NEXT:    li a1, 258
701; RV64I-NEXT:    mul a0, a0, a1
702; RV64I-NEXT:    ret
703;
704; RV64XTHEADBA-LABEL: mul258:
705; RV64XTHEADBA:       # %bb.0:
706; RV64XTHEADBA-NEXT:    slli a1, a0, 8
707; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
708; RV64XTHEADBA-NEXT:    ret
709  %c = mul i64 %a, 258
710  ret i64 %c
711}
712
713define i64 @mul260(i64 %a) {
714; RV64I-LABEL: mul260:
715; RV64I:       # %bb.0:
716; RV64I-NEXT:    li a1, 260
717; RV64I-NEXT:    mul a0, a0, a1
718; RV64I-NEXT:    ret
719;
720; RV64XTHEADBA-LABEL: mul260:
721; RV64XTHEADBA:       # %bb.0:
722; RV64XTHEADBA-NEXT:    slli a1, a0, 8
723; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
724; RV64XTHEADBA-NEXT:    ret
725  %c = mul i64 %a, 260
726  ret i64 %c
727}
728
729define i64 @mul264(i64 %a) {
730; RV64I-LABEL: mul264:
731; RV64I:       # %bb.0:
732; RV64I-NEXT:    li a1, 264
733; RV64I-NEXT:    mul a0, a0, a1
734; RV64I-NEXT:    ret
735;
736; RV64XTHEADBA-LABEL: mul264:
737; RV64XTHEADBA:       # %bb.0:
738; RV64XTHEADBA-NEXT:    slli a1, a0, 8
739; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
740; RV64XTHEADBA-NEXT:    ret
741  %c = mul i64 %a, 264
742  ret i64 %c
743}
744define i64 @mul11(i64 %a) {
745; RV64I-LABEL: mul11:
746; RV64I:       # %bb.0:
747; RV64I-NEXT:    li a1, 11
748; RV64I-NEXT:    mul a0, a0, a1
749; RV64I-NEXT:    ret
750;
751; RV64XTHEADBA-LABEL: mul11:
752; RV64XTHEADBA:       # %bb.0:
753; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 2
754; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 1
755; RV64XTHEADBA-NEXT:    ret
756  %c = mul i64 %a, 11
757  ret i64 %c
758}
759
760define i64 @mul19(i64 %a) {
761; RV64I-LABEL: mul19:
762; RV64I:       # %bb.0:
763; RV64I-NEXT:    li a1, 19
764; RV64I-NEXT:    mul a0, a0, a1
765; RV64I-NEXT:    ret
766;
767; RV64XTHEADBA-LABEL: mul19:
768; RV64XTHEADBA:       # %bb.0:
769; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 3
770; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 1
771; RV64XTHEADBA-NEXT:    ret
772  %c = mul i64 %a, 19
773  ret i64 %c
774}
775
776define i64 @mul13(i64 %a) {
777; RV64I-LABEL: mul13:
778; RV64I:       # %bb.0:
779; RV64I-NEXT:    li a1, 13
780; RV64I-NEXT:    mul a0, a0, a1
781; RV64I-NEXT:    ret
782;
783; RV64XTHEADBA-LABEL: mul13:
784; RV64XTHEADBA:       # %bb.0:
785; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 1
786; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
787; RV64XTHEADBA-NEXT:    ret
788  %c = mul i64 %a, 13
789  ret i64 %c
790}
791
792define i64 @mul21(i64 %a) {
793; RV64I-LABEL: mul21:
794; RV64I:       # %bb.0:
795; RV64I-NEXT:    li a1, 21
796; RV64I-NEXT:    mul a0, a0, a1
797; RV64I-NEXT:    ret
798;
799; RV64XTHEADBA-LABEL: mul21:
800; RV64XTHEADBA:       # %bb.0:
801; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 2
802; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
803; RV64XTHEADBA-NEXT:    ret
804  %c = mul i64 %a, 21
805  ret i64 %c
806}
807
808define i64 @mul37(i64 %a) {
809; RV64I-LABEL: mul37:
810; RV64I:       # %bb.0:
811; RV64I-NEXT:    li a1, 37
812; RV64I-NEXT:    mul a0, a0, a1
813; RV64I-NEXT:    ret
814;
815; RV64XTHEADBA-LABEL: mul37:
816; RV64XTHEADBA:       # %bb.0:
817; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 3
818; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
819; RV64XTHEADBA-NEXT:    ret
820  %c = mul i64 %a, 37
821  ret i64 %c
822}
823
824define i64 @mul25(i64 %a) {
825; RV64I-LABEL: mul25:
826; RV64I:       # %bb.0:
827; RV64I-NEXT:    li a1, 25
828; RV64I-NEXT:    mul a0, a0, a1
829; RV64I-NEXT:    ret
830;
831; RV64XTHEADBA-LABEL: mul25:
832; RV64XTHEADBA:       # %bb.0:
833; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
834; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
835; RV64XTHEADBA-NEXT:    ret
836  %c = mul i64 %a, 25
837  ret i64 %c
838}
839
840define i64 @mul41(i64 %a) {
841; RV64I-LABEL: mul41:
842; RV64I:       # %bb.0:
843; RV64I-NEXT:    li a1, 41
844; RV64I-NEXT:    mul a0, a0, a1
845; RV64I-NEXT:    ret
846;
847; RV64XTHEADBA-LABEL: mul41:
848; RV64XTHEADBA:       # %bb.0:
849; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 2
850; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
851; RV64XTHEADBA-NEXT:    ret
852  %c = mul i64 %a, 41
853  ret i64 %c
854}
855
856define i64 @mul73(i64 %a) {
857; RV64I-LABEL: mul73:
858; RV64I:       # %bb.0:
859; RV64I-NEXT:    li a1, 73
860; RV64I-NEXT:    mul a0, a0, a1
861; RV64I-NEXT:    ret
862;
863; RV64XTHEADBA-LABEL: mul73:
864; RV64XTHEADBA:       # %bb.0:
865; RV64XTHEADBA-NEXT:    th.addsl a1, a0, a0, 3
866; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
867; RV64XTHEADBA-NEXT:    ret
868  %c = mul i64 %a, 73
869  ret i64 %c
870}
871
872define i64 @mul27(i64 %a) {
873; RV64I-LABEL: mul27:
874; RV64I:       # %bb.0:
875; RV64I-NEXT:    li a1, 27
876; RV64I-NEXT:    mul a0, a0, a1
877; RV64I-NEXT:    ret
878;
879; RV64XTHEADBA-LABEL: mul27:
880; RV64XTHEADBA:       # %bb.0:
881; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
882; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
883; RV64XTHEADBA-NEXT:    ret
884  %c = mul i64 %a, 27
885  ret i64 %c
886}
887
888define i64 @mul45(i64 %a) {
889; RV64I-LABEL: mul45:
890; RV64I:       # %bb.0:
891; RV64I-NEXT:    li a1, 45
892; RV64I-NEXT:    mul a0, a0, a1
893; RV64I-NEXT:    ret
894;
895; RV64XTHEADBA-LABEL: mul45:
896; RV64XTHEADBA:       # %bb.0:
897; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
898; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
899; RV64XTHEADBA-NEXT:    ret
900  %c = mul i64 %a, 45
901  ret i64 %c
902}
903
904define i64 @mul81(i64 %a) {
905; RV64I-LABEL: mul81:
906; RV64I:       # %bb.0:
907; RV64I-NEXT:    li a1, 81
908; RV64I-NEXT:    mul a0, a0, a1
909; RV64I-NEXT:    ret
910;
911; RV64XTHEADBA-LABEL: mul81:
912; RV64XTHEADBA:       # %bb.0:
913; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
914; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
915; RV64XTHEADBA-NEXT:    ret
916  %c = mul i64 %a, 81
917  ret i64 %c
918}
919
920define i64 @mul4098(i64 %a) {
921; RV64I-LABEL: mul4098:
922; RV64I:       # %bb.0:
923; RV64I-NEXT:    slli a1, a0, 1
924; RV64I-NEXT:    slli a0, a0, 12
925; RV64I-NEXT:    add a0, a0, a1
926; RV64I-NEXT:    ret
927;
928; RV64XTHEADBA-LABEL: mul4098:
929; RV64XTHEADBA:       # %bb.0:
930; RV64XTHEADBA-NEXT:    slli a1, a0, 12
931; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 1
932; RV64XTHEADBA-NEXT:    ret
933  %c = mul i64 %a, 4098
934  ret i64 %c
935}
936
937define i64 @mul4100(i64 %a) {
938; RV64I-LABEL: mul4100:
939; RV64I:       # %bb.0:
940; RV64I-NEXT:    slli a1, a0, 2
941; RV64I-NEXT:    slli a0, a0, 12
942; RV64I-NEXT:    add a0, a0, a1
943; RV64I-NEXT:    ret
944;
945; RV64XTHEADBA-LABEL: mul4100:
946; RV64XTHEADBA:       # %bb.0:
947; RV64XTHEADBA-NEXT:    slli a1, a0, 12
948; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 2
949; RV64XTHEADBA-NEXT:    ret
950  %c = mul i64 %a, 4100
951  ret i64 %c
952}
953
954define i64 @mul4104(i64 %a) {
955; RV64I-LABEL: mul4104:
956; RV64I:       # %bb.0:
957; RV64I-NEXT:    slli a1, a0, 3
958; RV64I-NEXT:    slli a0, a0, 12
959; RV64I-NEXT:    add a0, a0, a1
960; RV64I-NEXT:    ret
961;
962; RV64XTHEADBA-LABEL: mul4104:
963; RV64XTHEADBA:       # %bb.0:
964; RV64XTHEADBA-NEXT:    slli a1, a0, 12
965; RV64XTHEADBA-NEXT:    th.addsl a0, a1, a0, 3
966; RV64XTHEADBA-NEXT:    ret
967  %c = mul i64 %a, 4104
968  ret i64 %c
969}
970
971define signext i32 @mulw192(i32 signext %a) {
972; RV64I-LABEL: mulw192:
973; RV64I:       # %bb.0:
974; RV64I-NEXT:    slli a1, a0, 6
975; RV64I-NEXT:    slli a0, a0, 8
976; RV64I-NEXT:    subw a0, a0, a1
977; RV64I-NEXT:    ret
978;
979; RV64XTHEADBA-LABEL: mulw192:
980; RV64XTHEADBA:       # %bb.0:
981; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
982; RV64XTHEADBA-NEXT:    slliw a0, a0, 6
983; RV64XTHEADBA-NEXT:    ret
984  %c = mul i32 %a, 192
985  ret i32 %c
986}
987
988define signext i32 @mulw320(i32 signext %a) {
989; RV64I-LABEL: mulw320:
990; RV64I:       # %bb.0:
991; RV64I-NEXT:    li a1, 320
992; RV64I-NEXT:    mulw a0, a0, a1
993; RV64I-NEXT:    ret
994;
995; RV64XTHEADBA-LABEL: mulw320:
996; RV64XTHEADBA:       # %bb.0:
997; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
998; RV64XTHEADBA-NEXT:    slliw a0, a0, 6
999; RV64XTHEADBA-NEXT:    ret
1000  %c = mul i32 %a, 320
1001  ret i32 %c
1002}
1003
1004define signext i32 @mulw576(i32 signext %a) {
1005; RV64I-LABEL: mulw576:
1006; RV64I:       # %bb.0:
1007; RV64I-NEXT:    li a1, 576
1008; RV64I-NEXT:    mulw a0, a0, a1
1009; RV64I-NEXT:    ret
1010;
1011; RV64XTHEADBA-LABEL: mulw576:
1012; RV64XTHEADBA:       # %bb.0:
1013; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 3
1014; RV64XTHEADBA-NEXT:    slliw a0, a0, 6
1015; RV64XTHEADBA-NEXT:    ret
1016  %c = mul i32 %a, 576
1017  ret i32 %c
1018}
1019
1020define i64 @add4104(i64 %a) {
1021; RV64I-LABEL: add4104:
1022; RV64I:       # %bb.0:
1023; RV64I-NEXT:    lui a1, 1
1024; RV64I-NEXT:    addiw a1, a1, 8
1025; RV64I-NEXT:    add a0, a0, a1
1026; RV64I-NEXT:    ret
1027;
1028; RV64XTHEADBA-LABEL: add4104:
1029; RV64XTHEADBA:       # %bb.0:
1030; RV64XTHEADBA-NEXT:    li a1, 1026
1031; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1032; RV64XTHEADBA-NEXT:    ret
1033  %c = add i64 %a, 4104
1034  ret i64 %c
1035}
1036
1037define i64 @add4104_2(i64 %a) {
1038; RV64I-LABEL: add4104_2:
1039; RV64I:       # %bb.0:
1040; RV64I-NEXT:    lui a1, 1
1041; RV64I-NEXT:    addiw a1, a1, 8
1042; RV64I-NEXT:    or a0, a0, a1
1043; RV64I-NEXT:    ret
1044;
1045; RV64XTHEADBA-LABEL: add4104_2:
1046; RV64XTHEADBA:       # %bb.0:
1047; RV64XTHEADBA-NEXT:    li a1, 1026
1048; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1049; RV64XTHEADBA-NEXT:    ret
1050  %c = or disjoint i64 %a, 4104
1051  ret i64 %c
1052}
1053
1054define i64 @add8208(i64 %a) {
1055; RV64I-LABEL: add8208:
1056; RV64I:       # %bb.0:
1057; RV64I-NEXT:    lui a1, 2
1058; RV64I-NEXT:    addiw a1, a1, 16
1059; RV64I-NEXT:    add a0, a0, a1
1060; RV64I-NEXT:    ret
1061;
1062; RV64XTHEADBA-LABEL: add8208:
1063; RV64XTHEADBA:       # %bb.0:
1064; RV64XTHEADBA-NEXT:    li a1, 1026
1065; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1066; RV64XTHEADBA-NEXT:    ret
1067  %c = add i64 %a, 8208
1068  ret i64 %c
1069}
1070
1071; Make sure we prefer LUI for the 8192 instead of using sh3add.
1072define signext i32 @add8192_i32(i32 signext %a) {
1073; CHECK-LABEL: add8192_i32:
1074; CHECK:       # %bb.0:
1075; CHECK-NEXT:    lui a1, 2
1076; CHECK-NEXT:    addw a0, a0, a1
1077; CHECK-NEXT:    ret
1078  %c = add i32 %a, 8192
1079  ret i32 %c
1080}
1081
1082; Make sure we prefer LUI for the 8192 instead of using sh3add.
1083define i64 @add8192(i64 %a) {
1084; CHECK-LABEL: add8192:
1085; CHECK:       # %bb.0:
1086; CHECK-NEXT:    lui a1, 2
1087; CHECK-NEXT:    add a0, a0, a1
1088; CHECK-NEXT:    ret
1089  %c = add i64 %a, 8192
1090  ret i64 %c
1091}
1092
1093define signext i32 @addshl32_5_6(i32 signext %a, i32 signext %b) {
1094; CHECK-LABEL: addshl32_5_6:
1095; CHECK:       # %bb.0:
1096; CHECK-NEXT:    slli a0, a0, 5
1097; CHECK-NEXT:    slli a1, a1, 6
1098; CHECK-NEXT:    addw a0, a0, a1
1099; CHECK-NEXT:    ret
1100  %c = shl i32 %a, 5
1101  %d = shl i32 %b, 6
1102  %e = add i32 %c, %d
1103  ret i32 %e
1104}
1105
1106define i64 @addshl64_5_6(i64 %a, i64 %b) {
1107; CHECK-LABEL: addshl64_5_6:
1108; CHECK:       # %bb.0:
1109; CHECK-NEXT:    slli a0, a0, 5
1110; CHECK-NEXT:    slli a1, a1, 6
1111; CHECK-NEXT:    add a0, a0, a1
1112; CHECK-NEXT:    ret
1113  %c = shl i64 %a, 5
1114  %d = shl i64 %b, 6
1115  %e = add i64 %c, %d
1116  ret i64 %e
1117}
1118
1119define signext i32 @addshl32_5_7(i32 signext %a, i32 signext %b) {
1120; CHECK-LABEL: addshl32_5_7:
1121; CHECK:       # %bb.0:
1122; CHECK-NEXT:    slli a0, a0, 5
1123; CHECK-NEXT:    slli a1, a1, 7
1124; CHECK-NEXT:    addw a0, a0, a1
1125; CHECK-NEXT:    ret
1126  %c = shl i32 %a, 5
1127  %d = shl i32 %b, 7
1128  %e = add i32 %c, %d
1129  ret i32 %e
1130}
1131
1132define i64 @addshl64_5_7(i64 %a, i64 %b) {
1133; CHECK-LABEL: addshl64_5_7:
1134; CHECK:       # %bb.0:
1135; CHECK-NEXT:    slli a0, a0, 5
1136; CHECK-NEXT:    slli a1, a1, 7
1137; CHECK-NEXT:    add a0, a0, a1
1138; CHECK-NEXT:    ret
1139  %c = shl i64 %a, 5
1140  %d = shl i64 %b, 7
1141  %e = add i64 %c, %d
1142  ret i64 %e
1143}
1144
1145define signext i32 @addshl32_5_8(i32 signext %a, i32 signext %b) {
1146; CHECK-LABEL: addshl32_5_8:
1147; CHECK:       # %bb.0:
1148; CHECK-NEXT:    slli a0, a0, 5
1149; CHECK-NEXT:    slli a1, a1, 8
1150; CHECK-NEXT:    addw a0, a0, a1
1151; CHECK-NEXT:    ret
1152  %c = shl i32 %a, 5
1153  %d = shl i32 %b, 8
1154  %e = add i32 %c, %d
1155  ret i32 %e
1156}
1157
1158define i64 @addshl64_5_8(i64 %a, i64 %b) {
1159; CHECK-LABEL: addshl64_5_8:
1160; CHECK:       # %bb.0:
1161; CHECK-NEXT:    slli a0, a0, 5
1162; CHECK-NEXT:    slli a1, a1, 8
1163; CHECK-NEXT:    add a0, a0, a1
1164; CHECK-NEXT:    ret
1165  %c = shl i64 %a, 5
1166  %d = shl i64 %b, 8
1167  %e = add i64 %c, %d
1168  ret i64 %e
1169}
1170
1171define i64 @sh6_sh3_add1(i64 noundef %x, i64 noundef %y, i64 noundef %z) {
1172; RV64I-LABEL: sh6_sh3_add1:
1173; RV64I:       # %bb.0: # %entry
1174; RV64I-NEXT:    slli a2, a2, 3
1175; RV64I-NEXT:    slli a1, a1, 6
1176; RV64I-NEXT:    add a1, a1, a2
1177; RV64I-NEXT:    add a0, a1, a0
1178; RV64I-NEXT:    ret
1179;
1180; RV64XTHEADBA-LABEL: sh6_sh3_add1:
1181; RV64XTHEADBA:       # %bb.0: # %entry
1182; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1183; RV64XTHEADBA-NEXT:    th.addsl a1, a1, a2, 3
1184; RV64XTHEADBA-NEXT:    add a0, a1, a0
1185; RV64XTHEADBA-NEXT:    ret
1186entry:
1187  %shl = shl i64 %z, 3
1188  %shl1 = shl i64 %y, 6
1189  %add = add nsw i64 %shl1, %shl
1190  %add2 = add nsw i64 %add, %x
1191  ret i64 %add2
1192}
1193
1194define i64 @sh6_sh3_add2(i64 noundef %x, i64 noundef %y, i64 noundef %z) {
1195; RV64I-LABEL: sh6_sh3_add2:
1196; RV64I:       # %bb.0: # %entry
1197; RV64I-NEXT:    slli a2, a2, 3
1198; RV64I-NEXT:    slli a1, a1, 6
1199; RV64I-NEXT:    add a0, a1, a0
1200; RV64I-NEXT:    add a0, a0, a2
1201; RV64I-NEXT:    ret
1202;
1203; RV64XTHEADBA-LABEL: sh6_sh3_add2:
1204; RV64XTHEADBA:       # %bb.0: # %entry
1205; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1206; RV64XTHEADBA-NEXT:    add a0, a1, a0
1207; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1208; RV64XTHEADBA-NEXT:    ret
1209entry:
1210  %shl = shl i64 %z, 3
1211  %shl1 = shl i64 %y, 6
1212  %add = add nsw i64 %shl1, %x
1213  %add2 = add nsw i64 %add, %shl
1214  ret i64 %add2
1215}
1216
1217define i64 @sh6_sh3_add3(i64 noundef %x, i64 noundef %y, i64 noundef %z) {
1218; RV64I-LABEL: sh6_sh3_add3:
1219; RV64I:       # %bb.0: # %entry
1220; RV64I-NEXT:    slli a2, a2, 3
1221; RV64I-NEXT:    slli a1, a1, 6
1222; RV64I-NEXT:    add a1, a1, a2
1223; RV64I-NEXT:    add a0, a0, a1
1224; RV64I-NEXT:    ret
1225;
1226; RV64XTHEADBA-LABEL: sh6_sh3_add3:
1227; RV64XTHEADBA:       # %bb.0: # %entry
1228; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1229; RV64XTHEADBA-NEXT:    th.addsl a1, a1, a2, 3
1230; RV64XTHEADBA-NEXT:    add a0, a0, a1
1231; RV64XTHEADBA-NEXT:    ret
1232entry:
1233  %shl = shl i64 %z, 3
1234  %shl1 = shl i64 %y, 6
1235  %add = add nsw i64 %shl1, %shl
1236  %add2 = add nsw i64 %x, %add
1237  ret i64 %add2
1238}
1239
1240define i64 @sh6_sh3_add4(i64 noundef %x, i64 noundef %y, i64 noundef %z) {
1241; RV64I-LABEL: sh6_sh3_add4:
1242; RV64I:       # %bb.0: # %entry
1243; RV64I-NEXT:    slli a2, a2, 3
1244; RV64I-NEXT:    slli a1, a1, 6
1245; RV64I-NEXT:    add a0, a0, a2
1246; RV64I-NEXT:    add a0, a0, a1
1247; RV64I-NEXT:    ret
1248;
1249; RV64XTHEADBA-LABEL: sh6_sh3_add4:
1250; RV64XTHEADBA:       # %bb.0: # %entry
1251; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1252; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1253; RV64XTHEADBA-NEXT:    add a0, a0, a1
1254; RV64XTHEADBA-NEXT:    ret
1255entry:
1256  %shl = shl i64 %z, 3
1257  %shl1 = shl i64 %y, 6
1258  %add = add nsw i64 %x, %shl
1259  %add2 = add nsw i64 %add, %shl1
1260  ret i64 %add2
1261}
1262
1263define signext i16 @srliw_1_sh1add(ptr %0, i32 signext %1) {
1264; CHECK-LABEL: srliw_1_sh1add:
1265; CHECK:       # %bb.0:
1266; CHECK-NEXT:    srliw a1, a1, 1
1267; CHECK-NEXT:    slli a1, a1, 1
1268; CHECK-NEXT:    add a0, a0, a1
1269; CHECK-NEXT:    lh a0, 0(a0)
1270; CHECK-NEXT:    ret
1271  %3 = lshr i32 %1, 1
1272  %4 = zext i32 %3 to i64
1273  %5 = getelementptr inbounds i16, ptr %0, i64 %4
1274  %6 = load i16, ptr %5, align 2
1275  ret i16 %6
1276}
1277
1278define signext i32 @srliw_2_sh2add(ptr %0, i32 signext %1) {
1279; CHECK-LABEL: srliw_2_sh2add:
1280; CHECK:       # %bb.0:
1281; CHECK-NEXT:    srliw a1, a1, 2
1282; CHECK-NEXT:    slli a1, a1, 2
1283; CHECK-NEXT:    add a0, a0, a1
1284; CHECK-NEXT:    lw a0, 0(a0)
1285; CHECK-NEXT:    ret
1286  %3 = lshr i32 %1, 2
1287  %4 = zext i32 %3 to i64
1288  %5 = getelementptr inbounds i32, ptr %0, i64 %4
1289  %6 = load i32, ptr %5, align 4
1290  ret i32 %6
1291}
1292
1293define i64 @srliw_3_sh3add(ptr %0, i32 signext %1) {
1294; CHECK-LABEL: srliw_3_sh3add:
1295; CHECK:       # %bb.0:
1296; CHECK-NEXT:    srliw a1, a1, 3
1297; CHECK-NEXT:    slli a1, a1, 3
1298; CHECK-NEXT:    add a0, a0, a1
1299; CHECK-NEXT:    ld a0, 0(a0)
1300; CHECK-NEXT:    ret
1301  %3 = lshr i32 %1, 3
1302  %4 = zext i32 %3 to i64
1303  %5 = getelementptr inbounds i64, ptr %0, i64 %4
1304  %6 = load i64, ptr %5, align 8
1305  ret i64 %6
1306}
1307
1308define signext i32 @srliw_1_sh2add(ptr %0, i32 signext %1) {
1309; RV64I-LABEL: srliw_1_sh2add:
1310; RV64I:       # %bb.0:
1311; RV64I-NEXT:    srliw a1, a1, 1
1312; RV64I-NEXT:    slli a1, a1, 2
1313; RV64I-NEXT:    add a0, a0, a1
1314; RV64I-NEXT:    lw a0, 0(a0)
1315; RV64I-NEXT:    ret
1316;
1317; RV64XTHEADBA-LABEL: srliw_1_sh2add:
1318; RV64XTHEADBA:       # %bb.0:
1319; RV64XTHEADBA-NEXT:    srliw a1, a1, 1
1320; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1321; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1322; RV64XTHEADBA-NEXT:    ret
1323  %3 = lshr i32 %1, 1
1324  %4 = zext i32 %3 to i64
1325  %5 = getelementptr inbounds i32, ptr %0, i64 %4
1326  %6 = load i32, ptr %5, align 4
1327  ret i32 %6
1328}
1329
1330define i64 @srliw_1_sh3add(ptr %0, i32 signext %1) {
1331; RV64I-LABEL: srliw_1_sh3add:
1332; RV64I:       # %bb.0:
1333; RV64I-NEXT:    srliw a1, a1, 1
1334; RV64I-NEXT:    slli a1, a1, 3
1335; RV64I-NEXT:    add a0, a0, a1
1336; RV64I-NEXT:    ld a0, 0(a0)
1337; RV64I-NEXT:    ret
1338;
1339; RV64XTHEADBA-LABEL: srliw_1_sh3add:
1340; RV64XTHEADBA:       # %bb.0:
1341; RV64XTHEADBA-NEXT:    srliw a1, a1, 1
1342; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1343; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1344; RV64XTHEADBA-NEXT:    ret
1345  %3 = lshr i32 %1, 1
1346  %4 = zext i32 %3 to i64
1347  %5 = getelementptr inbounds i64, ptr %0, i64 %4
1348  %6 = load i64, ptr %5, align 8
1349  ret i64 %6
1350}
1351
1352define i64 @srliw_2_sh3add(ptr %0, i32 signext %1) {
1353; RV64I-LABEL: srliw_2_sh3add:
1354; RV64I:       # %bb.0:
1355; RV64I-NEXT:    srliw a1, a1, 2
1356; RV64I-NEXT:    slli a1, a1, 3
1357; RV64I-NEXT:    add a0, a0, a1
1358; RV64I-NEXT:    ld a0, 0(a0)
1359; RV64I-NEXT:    ret
1360;
1361; RV64XTHEADBA-LABEL: srliw_2_sh3add:
1362; RV64XTHEADBA:       # %bb.0:
1363; RV64XTHEADBA-NEXT:    srliw a1, a1, 2
1364; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1365; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1366; RV64XTHEADBA-NEXT:    ret
1367  %3 = lshr i32 %1, 2
1368  %4 = zext i32 %3 to i64
1369  %5 = getelementptr inbounds i64, ptr %0, i64 %4
1370  %6 = load i64, ptr %5, align 8
1371  ret i64 %6
1372}
1373
1374define signext i16 @srliw_2_sh1add(ptr %0, i32 signext %1) {
1375; RV64I-LABEL: srliw_2_sh1add:
1376; RV64I:       # %bb.0:
1377; RV64I-NEXT:    srliw a1, a1, 2
1378; RV64I-NEXT:    slli a1, a1, 1
1379; RV64I-NEXT:    add a0, a0, a1
1380; RV64I-NEXT:    lh a0, 0(a0)
1381; RV64I-NEXT:    ret
1382;
1383; RV64XTHEADBA-LABEL: srliw_2_sh1add:
1384; RV64XTHEADBA:       # %bb.0:
1385; RV64XTHEADBA-NEXT:    srliw a1, a1, 2
1386; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 1
1387; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1388; RV64XTHEADBA-NEXT:    ret
1389  %3 = lshr i32 %1, 2
1390  %4 = zext i32 %3 to i64
1391  %5 = getelementptr inbounds i16, ptr %0, i64 %4
1392  %6 = load i16, ptr %5, align 2
1393  ret i16 %6
1394}
1395
1396
1397define signext i32 @srliw_3_sh2add(ptr %0, i32 signext %1) {
1398; RV64I-LABEL: srliw_3_sh2add:
1399; RV64I:       # %bb.0:
1400; RV64I-NEXT:    srliw a1, a1, 3
1401; RV64I-NEXT:    slli a1, a1, 2
1402; RV64I-NEXT:    add a0, a0, a1
1403; RV64I-NEXT:    lw a0, 0(a0)
1404; RV64I-NEXT:    ret
1405;
1406; RV64XTHEADBA-LABEL: srliw_3_sh2add:
1407; RV64XTHEADBA:       # %bb.0:
1408; RV64XTHEADBA-NEXT:    srliw a1, a1, 3
1409; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1410; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1411; RV64XTHEADBA-NEXT:    ret
1412  %3 = lshr i32 %1, 3
1413  %4 = zext i32 %3 to i64
1414  %5 = getelementptr inbounds i32, ptr %0, i64 %4
1415  %6 = load i32, ptr %5, align 4
1416  ret i32 %6
1417}
1418
1419define i64 @srliw_4_sh3add(ptr %0, i32 signext %1) {
1420; RV64I-LABEL: srliw_4_sh3add:
1421; RV64I:       # %bb.0:
1422; RV64I-NEXT:    srliw a1, a1, 4
1423; RV64I-NEXT:    slli a1, a1, 3
1424; RV64I-NEXT:    add a0, a0, a1
1425; RV64I-NEXT:    ld a0, 0(a0)
1426; RV64I-NEXT:    ret
1427;
1428; RV64XTHEADBA-LABEL: srliw_4_sh3add:
1429; RV64XTHEADBA:       # %bb.0:
1430; RV64XTHEADBA-NEXT:    srliw a1, a1, 4
1431; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1432; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1433; RV64XTHEADBA-NEXT:    ret
1434  %3 = lshr i32 %1, 4
1435  %4 = zext i32 %3 to i64
1436  %5 = getelementptr inbounds i64, ptr %0, i64 %4
1437  %6 = load i64, ptr %5, align 8
1438  ret i64 %6
1439}
1440
1441define signext i32 @srli_1_sh2add(ptr %0, i64 %1) {
1442; RV64I-LABEL: srli_1_sh2add:
1443; RV64I:       # %bb.0:
1444; RV64I-NEXT:    slli a1, a1, 1
1445; RV64I-NEXT:    andi a1, a1, -4
1446; RV64I-NEXT:    add a0, a0, a1
1447; RV64I-NEXT:    lw a0, 0(a0)
1448; RV64I-NEXT:    ret
1449;
1450; RV64XTHEADBA-LABEL: srli_1_sh2add:
1451; RV64XTHEADBA:       # %bb.0:
1452; RV64XTHEADBA-NEXT:    srli a1, a1, 1
1453; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1454; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1455; RV64XTHEADBA-NEXT:    ret
1456  %3 = lshr i64 %1, 1
1457  %4 = getelementptr inbounds i32, ptr %0, i64 %3
1458  %5 = load i32, ptr %4, align 4
1459  ret i32 %5
1460}
1461
1462define i64 @srli_2_sh3add(ptr %0, i64 %1) {
1463; RV64I-LABEL: srli_2_sh3add:
1464; RV64I:       # %bb.0:
1465; RV64I-NEXT:    slli a1, a1, 1
1466; RV64I-NEXT:    andi a1, a1, -8
1467; RV64I-NEXT:    add a0, a0, a1
1468; RV64I-NEXT:    ld a0, 0(a0)
1469; RV64I-NEXT:    ret
1470;
1471; RV64XTHEADBA-LABEL: srli_2_sh3add:
1472; RV64XTHEADBA:       # %bb.0:
1473; RV64XTHEADBA-NEXT:    srli a1, a1, 2
1474; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1475; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1476; RV64XTHEADBA-NEXT:    ret
1477  %3 = lshr i64 %1, 2
1478  %4 = getelementptr inbounds i64, ptr %0, i64 %3
1479  %5 = load i64, ptr %4, align 8
1480  ret i64 %5
1481}
1482
1483define signext i16 @srli_2_sh1add(ptr %0, i64 %1) {
1484; RV64I-LABEL: srli_2_sh1add:
1485; RV64I:       # %bb.0:
1486; RV64I-NEXT:    srli a1, a1, 1
1487; RV64I-NEXT:    andi a1, a1, -2
1488; RV64I-NEXT:    add a0, a0, a1
1489; RV64I-NEXT:    lh a0, 0(a0)
1490; RV64I-NEXT:    ret
1491;
1492; RV64XTHEADBA-LABEL: srli_2_sh1add:
1493; RV64XTHEADBA:       # %bb.0:
1494; RV64XTHEADBA-NEXT:    srli a1, a1, 2
1495; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 1
1496; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1497; RV64XTHEADBA-NEXT:    ret
1498  %3 = lshr i64 %1, 2
1499  %4 = getelementptr inbounds i16, ptr %0, i64 %3
1500  %5 = load i16, ptr %4, align 2
1501  ret i16 %5
1502}
1503
1504define signext i32 @srli_3_sh2add(ptr %0, i64 %1) {
1505; RV64I-LABEL: srli_3_sh2add:
1506; RV64I:       # %bb.0:
1507; RV64I-NEXT:    srli a1, a1, 1
1508; RV64I-NEXT:    andi a1, a1, -4
1509; RV64I-NEXT:    add a0, a0, a1
1510; RV64I-NEXT:    lw a0, 0(a0)
1511; RV64I-NEXT:    ret
1512;
1513; RV64XTHEADBA-LABEL: srli_3_sh2add:
1514; RV64XTHEADBA:       # %bb.0:
1515; RV64XTHEADBA-NEXT:    srli a1, a1, 3
1516; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1517; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1518; RV64XTHEADBA-NEXT:    ret
1519  %3 = lshr i64 %1, 3
1520  %4 = getelementptr inbounds i32, ptr %0, i64 %3
1521  %5 = load i32, ptr %4, align 4
1522  ret i32 %5
1523}
1524
1525define i64 @srli_4_sh3add(ptr %0, i64 %1) {
1526; RV64I-LABEL: srli_4_sh3add:
1527; RV64I:       # %bb.0:
1528; RV64I-NEXT:    srli a1, a1, 1
1529; RV64I-NEXT:    andi a1, a1, -8
1530; RV64I-NEXT:    add a0, a0, a1
1531; RV64I-NEXT:    ld a0, 0(a0)
1532; RV64I-NEXT:    ret
1533;
1534; RV64XTHEADBA-LABEL: srli_4_sh3add:
1535; RV64XTHEADBA:       # %bb.0:
1536; RV64XTHEADBA-NEXT:    srli a1, a1, 4
1537; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1538; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1539; RV64XTHEADBA-NEXT:    ret
1540  %3 = lshr i64 %1, 4
1541  %4 = getelementptr inbounds i64, ptr %0, i64 %3
1542  %5 = load i64, ptr %4, align 8
1543  ret i64 %5
1544}
1545
1546define i8 @array_index_sh1_sh0(ptr %p, i64 %idx1, i64 %idx2) {
1547; RV64I-LABEL: array_index_sh1_sh0:
1548; RV64I:       # %bb.0:
1549; RV64I-NEXT:    slli a1, a1, 1
1550; RV64I-NEXT:    add a0, a0, a2
1551; RV64I-NEXT:    add a0, a0, a1
1552; RV64I-NEXT:    lbu a0, 0(a0)
1553; RV64I-NEXT:    ret
1554;
1555; RV64XTHEADBA-LABEL: array_index_sh1_sh0:
1556; RV64XTHEADBA:       # %bb.0:
1557; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 1
1558; RV64XTHEADBA-NEXT:    add a0, a0, a2
1559; RV64XTHEADBA-NEXT:    lbu a0, 0(a0)
1560; RV64XTHEADBA-NEXT:    ret
1561  %a = getelementptr inbounds [2 x i8], ptr %p, i64 %idx1, i64 %idx2
1562  %b = load i8, ptr %a, align 1
1563  ret i8 %b
1564}
1565
1566define i16 @array_index_sh1_sh1(ptr %p, i64 %idx1, i64 %idx2) {
1567; RV64I-LABEL: array_index_sh1_sh1:
1568; RV64I:       # %bb.0:
1569; RV64I-NEXT:    slli a1, a1, 2
1570; RV64I-NEXT:    add a0, a0, a1
1571; RV64I-NEXT:    slli a2, a2, 1
1572; RV64I-NEXT:    add a0, a0, a2
1573; RV64I-NEXT:    lh a0, 0(a0)
1574; RV64I-NEXT:    ret
1575;
1576; RV64XTHEADBA-LABEL: array_index_sh1_sh1:
1577; RV64XTHEADBA:       # %bb.0:
1578; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1579; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 1
1580; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1581; RV64XTHEADBA-NEXT:    ret
1582  %a = getelementptr inbounds [2 x i16], ptr %p, i64 %idx1, i64 %idx2
1583  %b = load i16, ptr %a, align 2
1584  ret i16 %b
1585}
1586
1587define i32 @array_index_sh1_sh2(ptr %p, i64 %idx1, i64 %idx2) {
1588; RV64I-LABEL: array_index_sh1_sh2:
1589; RV64I:       # %bb.0:
1590; RV64I-NEXT:    slli a1, a1, 3
1591; RV64I-NEXT:    add a0, a0, a1
1592; RV64I-NEXT:    slli a2, a2, 2
1593; RV64I-NEXT:    add a0, a0, a2
1594; RV64I-NEXT:    lw a0, 0(a0)
1595; RV64I-NEXT:    ret
1596;
1597; RV64XTHEADBA-LABEL: array_index_sh1_sh2:
1598; RV64XTHEADBA:       # %bb.0:
1599; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1600; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 2
1601; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1602; RV64XTHEADBA-NEXT:    ret
1603  %a = getelementptr inbounds [2 x i32], ptr %p, i64 %idx1, i64 %idx2
1604  %b = load i32, ptr %a, align 4
1605  ret i32 %b
1606}
1607
1608define i64 @array_index_sh1_sh3(ptr %p, i64 %idx1, i64 %idx2) {
1609; RV64I-LABEL: array_index_sh1_sh3:
1610; RV64I:       # %bb.0:
1611; RV64I-NEXT:    slli a1, a1, 4
1612; RV64I-NEXT:    add a0, a0, a1
1613; RV64I-NEXT:    slli a2, a2, 3
1614; RV64I-NEXT:    add a0, a0, a2
1615; RV64I-NEXT:    ld a0, 0(a0)
1616; RV64I-NEXT:    ret
1617;
1618; RV64XTHEADBA-LABEL: array_index_sh1_sh3:
1619; RV64XTHEADBA:       # %bb.0:
1620; RV64XTHEADBA-NEXT:    slli a1, a1, 4
1621; RV64XTHEADBA-NEXT:    add a0, a0, a1
1622; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1623; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1624; RV64XTHEADBA-NEXT:    ret
1625  %a = getelementptr inbounds [2 x i64], ptr %p, i64 %idx1, i64 %idx2
1626  %b = load i64, ptr %a, align 8
1627  ret i64 %b
1628}
1629
1630define i8 @array_index_sh2_sh0(ptr %p, i64 %idx1, i64 %idx2) {
1631; RV64I-LABEL: array_index_sh2_sh0:
1632; RV64I:       # %bb.0:
1633; RV64I-NEXT:    slli a1, a1, 2
1634; RV64I-NEXT:    add a0, a0, a2
1635; RV64I-NEXT:    add a0, a0, a1
1636; RV64I-NEXT:    lbu a0, 0(a0)
1637; RV64I-NEXT:    ret
1638;
1639; RV64XTHEADBA-LABEL: array_index_sh2_sh0:
1640; RV64XTHEADBA:       # %bb.0:
1641; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 2
1642; RV64XTHEADBA-NEXT:    add a0, a0, a2
1643; RV64XTHEADBA-NEXT:    lbu a0, 0(a0)
1644; RV64XTHEADBA-NEXT:    ret
1645  %a = getelementptr inbounds [4 x i8], ptr %p, i64 %idx1, i64 %idx2
1646  %b = load i8, ptr %a, align 1
1647  ret i8 %b
1648}
1649
1650define i16 @array_index_sh2_sh1(ptr %p, i64 %idx1, i64 %idx2) {
1651; RV64I-LABEL: array_index_sh2_sh1:
1652; RV64I:       # %bb.0:
1653; RV64I-NEXT:    slli a1, a1, 3
1654; RV64I-NEXT:    add a0, a0, a1
1655; RV64I-NEXT:    slli a2, a2, 1
1656; RV64I-NEXT:    add a0, a0, a2
1657; RV64I-NEXT:    lh a0, 0(a0)
1658; RV64I-NEXT:    ret
1659;
1660; RV64XTHEADBA-LABEL: array_index_sh2_sh1:
1661; RV64XTHEADBA:       # %bb.0:
1662; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1663; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 1
1664; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1665; RV64XTHEADBA-NEXT:    ret
1666  %a = getelementptr inbounds [4 x i16], ptr %p, i64 %idx1, i64 %idx2
1667  %b = load i16, ptr %a, align 2
1668  ret i16 %b
1669}
1670
1671define i32 @array_index_sh2_sh2(ptr %p, i64 %idx1, i64 %idx2) {
1672; RV64I-LABEL: array_index_sh2_sh2:
1673; RV64I:       # %bb.0:
1674; RV64I-NEXT:    slli a1, a1, 4
1675; RV64I-NEXT:    add a0, a0, a1
1676; RV64I-NEXT:    slli a2, a2, 2
1677; RV64I-NEXT:    add a0, a0, a2
1678; RV64I-NEXT:    lw a0, 0(a0)
1679; RV64I-NEXT:    ret
1680;
1681; RV64XTHEADBA-LABEL: array_index_sh2_sh2:
1682; RV64XTHEADBA:       # %bb.0:
1683; RV64XTHEADBA-NEXT:    slli a1, a1, 4
1684; RV64XTHEADBA-NEXT:    add a0, a0, a1
1685; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 2
1686; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1687; RV64XTHEADBA-NEXT:    ret
1688  %a = getelementptr inbounds [4 x i32], ptr %p, i64 %idx1, i64 %idx2
1689  %b = load i32, ptr %a, align 4
1690  ret i32 %b
1691}
1692
1693define i64 @array_index_sh2_sh3(ptr %p, i64 %idx1, i64 %idx2) {
1694; RV64I-LABEL: array_index_sh2_sh3:
1695; RV64I:       # %bb.0:
1696; RV64I-NEXT:    slli a1, a1, 5
1697; RV64I-NEXT:    add a0, a0, a1
1698; RV64I-NEXT:    slli a2, a2, 3
1699; RV64I-NEXT:    add a0, a0, a2
1700; RV64I-NEXT:    ld a0, 0(a0)
1701; RV64I-NEXT:    ret
1702;
1703; RV64XTHEADBA-LABEL: array_index_sh2_sh3:
1704; RV64XTHEADBA:       # %bb.0:
1705; RV64XTHEADBA-NEXT:    slli a1, a1, 5
1706; RV64XTHEADBA-NEXT:    add a0, a0, a1
1707; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1708; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1709; RV64XTHEADBA-NEXT:    ret
1710  %a = getelementptr inbounds [4 x i64], ptr %p, i64 %idx1, i64 %idx2
1711  %b = load i64, ptr %a, align 8
1712  ret i64 %b
1713}
1714
1715define i8 @array_index_sh3_sh0(ptr %p, i64 %idx1, i64 %idx2) {
1716; RV64I-LABEL: array_index_sh3_sh0:
1717; RV64I:       # %bb.0:
1718; RV64I-NEXT:    slli a1, a1, 3
1719; RV64I-NEXT:    add a0, a0, a2
1720; RV64I-NEXT:    add a0, a0, a1
1721; RV64I-NEXT:    lbu a0, 0(a0)
1722; RV64I-NEXT:    ret
1723;
1724; RV64XTHEADBA-LABEL: array_index_sh3_sh0:
1725; RV64XTHEADBA:       # %bb.0:
1726; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
1727; RV64XTHEADBA-NEXT:    add a0, a0, a2
1728; RV64XTHEADBA-NEXT:    lbu a0, 0(a0)
1729; RV64XTHEADBA-NEXT:    ret
1730  %a = getelementptr inbounds [8 x i8], ptr %p, i64 %idx1, i64 %idx2
1731  %b = load i8, ptr %a, align 1
1732  ret i8 %b
1733}
1734
1735define i16 @array_index_sh3_sh1(ptr %p, i64 %idx1, i64 %idx2) {
1736; RV64I-LABEL: array_index_sh3_sh1:
1737; RV64I:       # %bb.0:
1738; RV64I-NEXT:    slli a1, a1, 4
1739; RV64I-NEXT:    add a0, a0, a1
1740; RV64I-NEXT:    slli a2, a2, 1
1741; RV64I-NEXT:    add a0, a0, a2
1742; RV64I-NEXT:    lh a0, 0(a0)
1743; RV64I-NEXT:    ret
1744;
1745; RV64XTHEADBA-LABEL: array_index_sh3_sh1:
1746; RV64XTHEADBA:       # %bb.0:
1747; RV64XTHEADBA-NEXT:    slli a1, a1, 4
1748; RV64XTHEADBA-NEXT:    add a0, a0, a1
1749; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 1
1750; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1751; RV64XTHEADBA-NEXT:    ret
1752  %a = getelementptr inbounds [8 x i16], ptr %p, i64 %idx1, i64 %idx2
1753  %b = load i16, ptr %a, align 2
1754  ret i16 %b
1755}
1756
1757define i32 @array_index_sh3_sh2(ptr %p, i64 %idx1, i64 %idx2) {
1758; RV64I-LABEL: array_index_sh3_sh2:
1759; RV64I:       # %bb.0:
1760; RV64I-NEXT:    slli a1, a1, 5
1761; RV64I-NEXT:    add a0, a0, a1
1762; RV64I-NEXT:    slli a2, a2, 2
1763; RV64I-NEXT:    add a0, a0, a2
1764; RV64I-NEXT:    lw a0, 0(a0)
1765; RV64I-NEXT:    ret
1766;
1767; RV64XTHEADBA-LABEL: array_index_sh3_sh2:
1768; RV64XTHEADBA:       # %bb.0:
1769; RV64XTHEADBA-NEXT:    slli a1, a1, 5
1770; RV64XTHEADBA-NEXT:    add a0, a0, a1
1771; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 2
1772; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1773; RV64XTHEADBA-NEXT:    ret
1774  %a = getelementptr inbounds [8 x i32], ptr %p, i64 %idx1, i64 %idx2
1775  %b = load i32, ptr %a, align 4
1776  ret i32 %b
1777}
1778
1779define i64 @array_index_sh3_sh3(ptr %p, i64 %idx1, i64 %idx2) {
1780; RV64I-LABEL: array_index_sh3_sh3:
1781; RV64I:       # %bb.0:
1782; RV64I-NEXT:    slli a1, a1, 6
1783; RV64I-NEXT:    add a0, a0, a1
1784; RV64I-NEXT:    slli a2, a2, 3
1785; RV64I-NEXT:    add a0, a0, a2
1786; RV64I-NEXT:    ld a0, 0(a0)
1787; RV64I-NEXT:    ret
1788;
1789; RV64XTHEADBA-LABEL: array_index_sh3_sh3:
1790; RV64XTHEADBA:       # %bb.0:
1791; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1792; RV64XTHEADBA-NEXT:    add a0, a0, a1
1793; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1794; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1795; RV64XTHEADBA-NEXT:    ret
1796  %a = getelementptr inbounds [8 x i64], ptr %p, i64 %idx1, i64 %idx2
1797  %b = load i64, ptr %a, align 8
1798  ret i64 %b
1799}
1800
1801; Similar to above, but with a lshr on one of the indices. This requires
1802; special handling during isel to form a shift pair.
1803define i64 @array_index_lshr_sh3_sh3(ptr %p, i64 %idx1, i64 %idx2) {
1804; RV64I-LABEL: array_index_lshr_sh3_sh3:
1805; RV64I:       # %bb.0:
1806; RV64I-NEXT:    srli a1, a1, 58
1807; RV64I-NEXT:    slli a2, a2, 3
1808; RV64I-NEXT:    slli a1, a1, 6
1809; RV64I-NEXT:    add a0, a0, a2
1810; RV64I-NEXT:    add a0, a0, a1
1811; RV64I-NEXT:    ld a0, 0(a0)
1812; RV64I-NEXT:    ret
1813;
1814; RV64XTHEADBA-LABEL: array_index_lshr_sh3_sh3:
1815; RV64XTHEADBA:       # %bb.0:
1816; RV64XTHEADBA-NEXT:    srli a1, a1, 58
1817; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1818; RV64XTHEADBA-NEXT:    add a0, a0, a1
1819; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1820; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1821; RV64XTHEADBA-NEXT:    ret
1822  %shr = lshr i64 %idx1, 58
1823  %a = getelementptr inbounds [8 x i64], ptr %p, i64 %shr, i64 %idx2
1824  %b = load i64, ptr %a, align 8
1825  ret i64 %b
1826}
1827
1828define i8 @array_index_sh4_sh0(ptr %p, i64 %idx1, i64 %idx2) {
1829; CHECK-LABEL: array_index_sh4_sh0:
1830; CHECK:       # %bb.0:
1831; CHECK-NEXT:    slli a1, a1, 4
1832; CHECK-NEXT:    add a0, a0, a2
1833; CHECK-NEXT:    add a0, a0, a1
1834; CHECK-NEXT:    lbu a0, 0(a0)
1835; CHECK-NEXT:    ret
1836  %a = getelementptr inbounds [16 x i8], ptr %p, i64 %idx1, i64 %idx2
1837  %b = load i8, ptr %a, align 1
1838  ret i8 %b
1839}
1840
1841define i16 @array_index_sh4_sh1(ptr %p, i64 %idx1, i64 %idx2) {
1842; RV64I-LABEL: array_index_sh4_sh1:
1843; RV64I:       # %bb.0:
1844; RV64I-NEXT:    slli a1, a1, 5
1845; RV64I-NEXT:    add a0, a0, a1
1846; RV64I-NEXT:    slli a2, a2, 1
1847; RV64I-NEXT:    add a0, a0, a2
1848; RV64I-NEXT:    lh a0, 0(a0)
1849; RV64I-NEXT:    ret
1850;
1851; RV64XTHEADBA-LABEL: array_index_sh4_sh1:
1852; RV64XTHEADBA:       # %bb.0:
1853; RV64XTHEADBA-NEXT:    slli a1, a1, 5
1854; RV64XTHEADBA-NEXT:    add a0, a0, a1
1855; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 1
1856; RV64XTHEADBA-NEXT:    lh a0, 0(a0)
1857; RV64XTHEADBA-NEXT:    ret
1858  %a = getelementptr inbounds [16 x i16], ptr %p, i64 %idx1, i64 %idx2
1859  %b = load i16, ptr %a, align 2
1860  ret i16 %b
1861}
1862
1863define i32 @array_index_sh4_sh2(ptr %p, i64 %idx1, i64 %idx2) {
1864; RV64I-LABEL: array_index_sh4_sh2:
1865; RV64I:       # %bb.0:
1866; RV64I-NEXT:    slli a1, a1, 6
1867; RV64I-NEXT:    add a0, a0, a1
1868; RV64I-NEXT:    slli a2, a2, 2
1869; RV64I-NEXT:    add a0, a0, a2
1870; RV64I-NEXT:    lw a0, 0(a0)
1871; RV64I-NEXT:    ret
1872;
1873; RV64XTHEADBA-LABEL: array_index_sh4_sh2:
1874; RV64XTHEADBA:       # %bb.0:
1875; RV64XTHEADBA-NEXT:    slli a1, a1, 6
1876; RV64XTHEADBA-NEXT:    add a0, a0, a1
1877; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 2
1878; RV64XTHEADBA-NEXT:    lw a0, 0(a0)
1879; RV64XTHEADBA-NEXT:    ret
1880  %a = getelementptr inbounds [16 x i32], ptr %p, i64 %idx1, i64 %idx2
1881  %b = load i32, ptr %a, align 4
1882  ret i32 %b
1883}
1884
1885define i64 @array_index_sh4_sh3(ptr %p, i64 %idx1, i64 %idx2) {
1886; RV64I-LABEL: array_index_sh4_sh3:
1887; RV64I:       # %bb.0:
1888; RV64I-NEXT:    slli a1, a1, 7
1889; RV64I-NEXT:    add a0, a0, a1
1890; RV64I-NEXT:    slli a2, a2, 3
1891; RV64I-NEXT:    add a0, a0, a2
1892; RV64I-NEXT:    ld a0, 0(a0)
1893; RV64I-NEXT:    ret
1894;
1895; RV64XTHEADBA-LABEL: array_index_sh4_sh3:
1896; RV64XTHEADBA:       # %bb.0:
1897; RV64XTHEADBA-NEXT:    slli a1, a1, 7
1898; RV64XTHEADBA-NEXT:    add a0, a0, a1
1899; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a2, 3
1900; RV64XTHEADBA-NEXT:    ld a0, 0(a0)
1901; RV64XTHEADBA-NEXT:    ret
1902  %a = getelementptr inbounds [16 x i64], ptr %p, i64 %idx1, i64 %idx2
1903  %b = load i64, ptr %a, align 8
1904  ret i64 %b
1905}
1906
1907define i64 @mul_neg1(i64 %a) {
1908; CHECK-LABEL: mul_neg1:
1909; CHECK:       # %bb.0:
1910; CHECK-NEXT:    neg a0, a0
1911; CHECK-NEXT:    ret
1912  %c = mul i64 %a, -1
1913  ret i64 %c
1914}
1915
1916define i64 @mul_neg2(i64 %a) {
1917; CHECK-LABEL: mul_neg2:
1918; CHECK:       # %bb.0:
1919; CHECK-NEXT:    slli a0, a0, 1
1920; CHECK-NEXT:    neg a0, a0
1921; CHECK-NEXT:    ret
1922  %c = mul i64 %a, -2
1923  ret i64 %c
1924}
1925
1926define i64 @mul_neg3(i64 %a) {
1927; RV64I-LABEL: mul_neg3:
1928; RV64I:       # %bb.0:
1929; RV64I-NEXT:    slli a1, a0, 1
1930; RV64I-NEXT:    neg a0, a0
1931; RV64I-NEXT:    sub a0, a0, a1
1932; RV64I-NEXT:    ret
1933;
1934; RV64XTHEADBA-LABEL: mul_neg3:
1935; RV64XTHEADBA:       # %bb.0:
1936; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 1
1937; RV64XTHEADBA-NEXT:    neg a0, a0
1938; RV64XTHEADBA-NEXT:    ret
1939  %c = mul i64 %a, -3
1940  ret i64 %c
1941}
1942
1943define i64 @mul_neg4(i64 %a) {
1944; CHECK-LABEL: mul_neg4:
1945; CHECK:       # %bb.0:
1946; CHECK-NEXT:    slli a0, a0, 2
1947; CHECK-NEXT:    neg a0, a0
1948; CHECK-NEXT:    ret
1949  %c = mul i64 %a, -4
1950  ret i64 %c
1951}
1952
1953define i64 @mul_neg5(i64 %a) {
1954; RV64I-LABEL: mul_neg5:
1955; RV64I:       # %bb.0:
1956; RV64I-NEXT:    slli a1, a0, 2
1957; RV64I-NEXT:    neg a0, a0
1958; RV64I-NEXT:    sub a0, a0, a1
1959; RV64I-NEXT:    ret
1960;
1961; RV64XTHEADBA-LABEL: mul_neg5:
1962; RV64XTHEADBA:       # %bb.0:
1963; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a0, 2
1964; RV64XTHEADBA-NEXT:    neg a0, a0
1965; RV64XTHEADBA-NEXT:    ret
1966  %c = mul i64 %a, -5
1967  ret i64 %c
1968}
1969
1970define i64 @mul_neg6(i64 %a) {
1971; CHECK-LABEL: mul_neg6:
1972; CHECK:       # %bb.0:
1973; CHECK-NEXT:    li a1, -6
1974; CHECK-NEXT:    mul a0, a0, a1
1975; CHECK-NEXT:    ret
1976  %c = mul i64 %a, -6
1977  ret i64 %c
1978}
1979
1980define i64 @mul_neg7(i64 %a) {
1981; CHECK-LABEL: mul_neg7:
1982; CHECK:       # %bb.0:
1983; CHECK-NEXT:    slli a1, a0, 3
1984; CHECK-NEXT:    sub a0, a0, a1
1985; CHECK-NEXT:    ret
1986  %c = mul i64 %a, -7
1987  ret i64 %c
1988}
1989
1990define i64 @mul_neg8(i64 %a) {
1991; CHECK-LABEL: mul_neg8:
1992; CHECK:       # %bb.0:
1993; CHECK-NEXT:    slli a0, a0, 3
1994; CHECK-NEXT:    neg a0, a0
1995; CHECK-NEXT:    ret
1996  %c = mul i64 %a, -8
1997  ret i64 %c
1998}
1999
2000define ptr @srai_srli_sh3add(ptr %0, i64 %1) nounwind {
2001; RV64I-LABEL: srai_srli_sh3add:
2002; RV64I:       # %bb.0: # %entry
2003; RV64I-NEXT:    srai a1, a1, 32
2004; RV64I-NEXT:    srli a1, a1, 6
2005; RV64I-NEXT:    slli a1, a1, 3
2006; RV64I-NEXT:    add a0, a0, a1
2007; RV64I-NEXT:    ret
2008;
2009; RV64XTHEADBA-LABEL: srai_srli_sh3add:
2010; RV64XTHEADBA:       # %bb.0: # %entry
2011; RV64XTHEADBA-NEXT:    srai a1, a1, 32
2012; RV64XTHEADBA-NEXT:    srli a1, a1, 6
2013; RV64XTHEADBA-NEXT:    th.addsl a0, a0, a1, 3
2014; RV64XTHEADBA-NEXT:    ret
2015entry:
2016  %2 = ashr i64 %1, 32
2017  %3 = lshr i64 %2, 6
2018  %4 = getelementptr i64, ptr %0, i64 %3
2019  ret ptr %4
2020}
2021
2022define ptr @srai_srli_slli(ptr %0, i64 %1) nounwind {
2023; CHECK-LABEL: srai_srli_slli:
2024; CHECK:       # %bb.0: # %entry
2025; CHECK-NEXT:    srai a1, a1, 32
2026; CHECK-NEXT:    srli a1, a1, 6
2027; CHECK-NEXT:    slli a1, a1, 4
2028; CHECK-NEXT:    add a0, a0, a1
2029; CHECK-NEXT:    ret
2030entry:
2031  %2 = ashr i64 %1, 32
2032  %3 = lshr i64 %2, 6
2033  %4 = getelementptr i128, ptr %0, i64 %3
2034  ret ptr %4
2035}
2036
2037; Negative to make sure the peephole added for srai_srli_slli and
2038; srai_srli_sh3add doesn't break this.
2039define i64 @srai_andi(i64 %x) nounwind {
2040; CHECK-LABEL: srai_andi:
2041; CHECK:       # %bb.0: # %entry
2042; CHECK-NEXT:    srai a0, a0, 8
2043; CHECK-NEXT:    andi a0, a0, -8
2044; CHECK-NEXT:    ret
2045entry:
2046  %y = ashr i64 %x, 8
2047  %z = and i64 %y, -8
2048  ret i64 %z
2049}
2050
2051; Negative to make sure the peephole added for srai_srli_slli and
2052; srai_srli_sh3add doesn't break this.
2053define i64 @srai_lui_and(i64 %x) nounwind {
2054; CHECK-LABEL: srai_lui_and:
2055; CHECK:       # %bb.0: # %entry
2056; CHECK-NEXT:    srai a0, a0, 8
2057; CHECK-NEXT:    lui a1, 1048574
2058; CHECK-NEXT:    and a0, a0, a1
2059; CHECK-NEXT:    ret
2060entry:
2061  %y = ashr i64 %x, 8
2062  %z = and i64 %y, -8192
2063  ret i64 %z
2064}
2065