xref: /llvm-project/llvm/test/CodeGen/LoongArch/rotl-rotr.ll (revision 3e2631c9c62990467eca3e173f714367d7e7c0dd)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc --mtriple=loongarch32 -mattr=+d < %s | FileCheck %s --check-prefix=LA32
3; RUN: llc --mtriple=loongarch64 -mattr=+d < %s | FileCheck %s --check-prefix=LA64
4
5define signext i32 @rotl_32(i32 signext %x, i32 signext %y) nounwind {
6; LA32-LABEL: rotl_32:
7; LA32:       # %bb.0:
8; LA32-NEXT:    sub.w $a1, $zero, $a1
9; LA32-NEXT:    rotr.w $a0, $a0, $a1
10; LA32-NEXT:    ret
11;
12; LA64-LABEL: rotl_32:
13; LA64:       # %bb.0:
14; LA64-NEXT:    sub.d $a1, $zero, $a1
15; LA64-NEXT:    rotr.w $a0, $a0, $a1
16; LA64-NEXT:    ret
17  %z = sub i32 32, %y
18  %b = shl i32 %x, %y
19  %c = lshr i32 %x, %z
20  %d = or i32 %b, %c
21  ret i32 %d
22}
23
24define signext i32 @rotr_32(i32 signext %x, i32 signext %y) nounwind {
25; LA32-LABEL: rotr_32:
26; LA32:       # %bb.0:
27; LA32-NEXT:    rotr.w $a0, $a0, $a1
28; LA32-NEXT:    ret
29;
30; LA64-LABEL: rotr_32:
31; LA64:       # %bb.0:
32; LA64-NEXT:    rotr.w $a0, $a0, $a1
33; LA64-NEXT:    ret
34  %z = sub i32 32, %y
35  %b = lshr i32 %x, %y
36  %c = shl i32 %x, %z
37  %d = or i32 %b, %c
38  ret i32 %d
39}
40
41define i64 @rotl_64(i64 %x, i64 %y) nounwind {
42; LA32-LABEL: rotl_64:
43; LA32:       # %bb.0:
44; LA32-NEXT:    sll.w $a3, $a1, $a2
45; LA32-NEXT:    xori $a4, $a2, 31
46; LA32-NEXT:    srli.w $a5, $a0, 1
47; LA32-NEXT:    srl.w $a4, $a5, $a4
48; LA32-NEXT:    or $a3, $a3, $a4
49; LA32-NEXT:    addi.w $a4, $a2, -32
50; LA32-NEXT:    slti $a5, $a4, 0
51; LA32-NEXT:    maskeqz $a3, $a3, $a5
52; LA32-NEXT:    sll.w $a6, $a0, $a4
53; LA32-NEXT:    masknez $a5, $a6, $a5
54; LA32-NEXT:    or $a3, $a3, $a5
55; LA32-NEXT:    sll.w $a5, $a0, $a2
56; LA32-NEXT:    srai.w $a4, $a4, 31
57; LA32-NEXT:    and $a4, $a4, $a5
58; LA32-NEXT:    sub.w $a5, $zero, $a2
59; LA32-NEXT:    srl.w $a6, $a1, $a5
60; LA32-NEXT:    ori $a7, $zero, 32
61; LA32-NEXT:    sub.w $a7, $a7, $a2
62; LA32-NEXT:    slti $t0, $a7, 0
63; LA32-NEXT:    masknez $t1, $a6, $t0
64; LA32-NEXT:    srl.w $a0, $a0, $a5
65; LA32-NEXT:    ori $a5, $zero, 64
66; LA32-NEXT:    sub.w $a2, $a5, $a2
67; LA32-NEXT:    xori $a2, $a2, 31
68; LA32-NEXT:    slli.w $a1, $a1, 1
69; LA32-NEXT:    sll.w $a1, $a1, $a2
70; LA32-NEXT:    or $a0, $a0, $a1
71; LA32-NEXT:    maskeqz $a0, $a0, $t0
72; LA32-NEXT:    or $a0, $a0, $t1
73; LA32-NEXT:    srai.w $a1, $a7, 31
74; LA32-NEXT:    and $a1, $a1, $a6
75; LA32-NEXT:    or $a1, $a3, $a1
76; LA32-NEXT:    or $a0, $a4, $a0
77; LA32-NEXT:    ret
78;
79; LA64-LABEL: rotl_64:
80; LA64:       # %bb.0:
81; LA64-NEXT:    sub.d $a1, $zero, $a1
82; LA64-NEXT:    rotr.d $a0, $a0, $a1
83; LA64-NEXT:    ret
84  %z = sub i64 64, %y
85  %b = shl i64 %x, %y
86  %c = lshr i64 %x, %z
87  %d = or i64 %b, %c
88  ret i64 %d
89}
90
91define i64 @rotr_64(i64 %x, i64 %y) nounwind {
92; LA32-LABEL: rotr_64:
93; LA32:       # %bb.0:
94; LA32-NEXT:    srl.w $a3, $a0, $a2
95; LA32-NEXT:    xori $a4, $a2, 31
96; LA32-NEXT:    slli.w $a5, $a1, 1
97; LA32-NEXT:    sll.w $a4, $a5, $a4
98; LA32-NEXT:    or $a3, $a3, $a4
99; LA32-NEXT:    addi.w $a4, $a2, -32
100; LA32-NEXT:    slti $a5, $a4, 0
101; LA32-NEXT:    maskeqz $a3, $a3, $a5
102; LA32-NEXT:    srl.w $a6, $a1, $a4
103; LA32-NEXT:    masknez $a5, $a6, $a5
104; LA32-NEXT:    or $a3, $a3, $a5
105; LA32-NEXT:    srl.w $a5, $a1, $a2
106; LA32-NEXT:    srai.w $a4, $a4, 31
107; LA32-NEXT:    and $a4, $a4, $a5
108; LA32-NEXT:    sub.w $a5, $zero, $a2
109; LA32-NEXT:    sll.w $a6, $a0, $a5
110; LA32-NEXT:    ori $a7, $zero, 32
111; LA32-NEXT:    sub.w $a7, $a7, $a2
112; LA32-NEXT:    slti $t0, $a7, 0
113; LA32-NEXT:    masknez $t1, $a6, $t0
114; LA32-NEXT:    sll.w $a1, $a1, $a5
115; LA32-NEXT:    ori $a5, $zero, 64
116; LA32-NEXT:    sub.w $a2, $a5, $a2
117; LA32-NEXT:    xori $a2, $a2, 31
118; LA32-NEXT:    srli.w $a0, $a0, 1
119; LA32-NEXT:    srl.w $a0, $a0, $a2
120; LA32-NEXT:    or $a0, $a1, $a0
121; LA32-NEXT:    maskeqz $a0, $a0, $t0
122; LA32-NEXT:    or $a1, $a0, $t1
123; LA32-NEXT:    srai.w $a0, $a7, 31
124; LA32-NEXT:    and $a0, $a0, $a6
125; LA32-NEXT:    or $a0, $a3, $a0
126; LA32-NEXT:    or $a1, $a4, $a1
127; LA32-NEXT:    ret
128;
129; LA64-LABEL: rotr_64:
130; LA64:       # %bb.0:
131; LA64-NEXT:    rotr.d $a0, $a0, $a1
132; LA64-NEXT:    ret
133  %z = sub i64 64, %y
134  %b = lshr i64 %x, %y
135  %c = shl i64 %x, %z
136  %d = or i64 %b, %c
137  ret i64 %d
138}
139
140define signext i32 @rotl_32_mask(i32 signext %x, i32 signext %y) nounwind {
141; LA32-LABEL: rotl_32_mask:
142; LA32:       # %bb.0:
143; LA32-NEXT:    sub.w $a1, $zero, $a1
144; LA32-NEXT:    rotr.w $a0, $a0, $a1
145; LA32-NEXT:    ret
146;
147; LA64-LABEL: rotl_32_mask:
148; LA64:       # %bb.0:
149; LA64-NEXT:    sub.d $a1, $zero, $a1
150; LA64-NEXT:    rotr.w $a0, $a0, $a1
151; LA64-NEXT:    ret
152  %z = sub i32 0, %y
153  %and = and i32 %z, 31
154  %b = shl i32 %x, %y
155  %c = lshr i32 %x, %and
156  %d = or i32 %b, %c
157  ret i32 %d
158}
159
160define signext i32 @rotl_32_mask_and_63_and_31(i32 signext %x, i32 signext %y) nounwind {
161; LA32-LABEL: rotl_32_mask_and_63_and_31:
162; LA32:       # %bb.0:
163; LA32-NEXT:    sub.w $a1, $zero, $a1
164; LA32-NEXT:    rotr.w $a0, $a0, $a1
165; LA32-NEXT:    ret
166;
167; LA64-LABEL: rotl_32_mask_and_63_and_31:
168; LA64:       # %bb.0:
169; LA64-NEXT:    sub.d $a1, $zero, $a1
170; LA64-NEXT:    rotr.w $a0, $a0, $a1
171; LA64-NEXT:    ret
172  %a = and i32 %y, 63
173  %b = shl i32 %x, %a
174  %c = sub i32 0, %y
175  %d = and i32 %c, 31
176  %e = lshr i32 %x, %d
177  %f = or i32 %b, %e
178  ret i32 %f
179}
180
181define signext i32 @rotl_32_mask_or_64_or_32(i32 signext %x, i32 signext %y) nounwind {
182; LA32-LABEL: rotl_32_mask_or_64_or_32:
183; LA32:       # %bb.0:
184; LA32-NEXT:    sub.w $a1, $zero, $a1
185; LA32-NEXT:    rotr.w $a0, $a0, $a1
186; LA32-NEXT:    ret
187;
188; LA64-LABEL: rotl_32_mask_or_64_or_32:
189; LA64:       # %bb.0:
190; LA64-NEXT:    sub.d $a1, $zero, $a1
191; LA64-NEXT:    rotr.w $a0, $a0, $a1
192; LA64-NEXT:    ret
193  %a = or i32 %y, 64
194  %b = shl i32 %x, %a
195  %c = sub i32 0, %y
196  %d = or i32 %c, 32
197  %e = lshr i32 %x, %d
198  %f = or i32 %b, %e
199  ret i32 %f
200}
201
202define signext i32 @rotr_32_mask(i32 signext %x, i32 signext %y) nounwind {
203; LA32-LABEL: rotr_32_mask:
204; LA32:       # %bb.0:
205; LA32-NEXT:    rotr.w $a0, $a0, $a1
206; LA32-NEXT:    ret
207;
208; LA64-LABEL: rotr_32_mask:
209; LA64:       # %bb.0:
210; LA64-NEXT:    rotr.w $a0, $a0, $a1
211; LA64-NEXT:    ret
212  %z = sub i32 0, %y
213  %and = and i32 %z, 31
214  %b = lshr i32 %x, %y
215  %c = shl i32 %x, %and
216  %d = or i32 %b, %c
217  ret i32 %d
218}
219
220define signext i32 @rotr_32_mask_and_63_and_31(i32 signext %x, i32 signext %y) nounwind {
221; LA32-LABEL: rotr_32_mask_and_63_and_31:
222; LA32:       # %bb.0:
223; LA32-NEXT:    rotr.w $a0, $a0, $a1
224; LA32-NEXT:    ret
225;
226; LA64-LABEL: rotr_32_mask_and_63_and_31:
227; LA64:       # %bb.0:
228; LA64-NEXT:    rotr.w $a0, $a0, $a1
229; LA64-NEXT:    ret
230  %a = and i32 %y, 63
231  %b = lshr i32 %x, %a
232  %c = sub i32 0, %y
233  %d = and i32 %c, 31
234  %e = shl i32 %x, %d
235  %f = or i32 %b, %e
236  ret i32 %f
237}
238
239define signext i32 @rotr_32_mask_or_64_or_32(i32 signext %x, i32 signext %y) nounwind {
240; LA32-LABEL: rotr_32_mask_or_64_or_32:
241; LA32:       # %bb.0:
242; LA32-NEXT:    rotr.w $a0, $a0, $a1
243; LA32-NEXT:    ret
244;
245; LA64-LABEL: rotr_32_mask_or_64_or_32:
246; LA64:       # %bb.0:
247; LA64-NEXT:    rotr.w $a0, $a0, $a1
248; LA64-NEXT:    ret
249  %a = or i32 %y, 64
250  %b = lshr i32 %x, %a
251  %c = sub i32 0, %y
252  %d = or i32 %c, 32
253  %e = shl i32 %x, %d
254  %f = or i32 %b, %e
255  ret i32 %f
256}
257
258define i64 @rotl_64_mask(i64 %x, i64 %y) nounwind {
259; LA32-LABEL: rotl_64_mask:
260; LA32:       # %bb.0:
261; LA32-NEXT:    sll.w $a3, $a1, $a2
262; LA32-NEXT:    xori $a4, $a2, 31
263; LA32-NEXT:    srli.w $a5, $a0, 1
264; LA32-NEXT:    srl.w $a4, $a5, $a4
265; LA32-NEXT:    or $a3, $a3, $a4
266; LA32-NEXT:    addi.w $a4, $a2, -32
267; LA32-NEXT:    slti $a5, $a4, 0
268; LA32-NEXT:    maskeqz $a3, $a3, $a5
269; LA32-NEXT:    sll.w $a6, $a0, $a4
270; LA32-NEXT:    masknez $a5, $a6, $a5
271; LA32-NEXT:    or $a3, $a3, $a5
272; LA32-NEXT:    sll.w $a5, $a0, $a2
273; LA32-NEXT:    srai.w $a4, $a4, 31
274; LA32-NEXT:    and $a4, $a4, $a5
275; LA32-NEXT:    sub.w $a2, $zero, $a2
276; LA32-NEXT:    andi $a5, $a2, 63
277; LA32-NEXT:    addi.w $a6, $a5, -32
278; LA32-NEXT:    srl.w $a7, $a1, $a6
279; LA32-NEXT:    slti $t0, $a6, 0
280; LA32-NEXT:    masknez $a7, $a7, $t0
281; LA32-NEXT:    srl.w $a0, $a0, $a2
282; LA32-NEXT:    xori $a5, $a5, 31
283; LA32-NEXT:    slli.w $t1, $a1, 1
284; LA32-NEXT:    sll.w $a5, $t1, $a5
285; LA32-NEXT:    or $a0, $a0, $a5
286; LA32-NEXT:    maskeqz $a0, $a0, $t0
287; LA32-NEXT:    or $a0, $a0, $a7
288; LA32-NEXT:    srl.w $a1, $a1, $a2
289; LA32-NEXT:    srai.w $a2, $a6, 31
290; LA32-NEXT:    and $a1, $a2, $a1
291; LA32-NEXT:    or $a1, $a3, $a1
292; LA32-NEXT:    or $a0, $a4, $a0
293; LA32-NEXT:    ret
294;
295; LA64-LABEL: rotl_64_mask:
296; LA64:       # %bb.0:
297; LA64-NEXT:    sub.d $a1, $zero, $a1
298; LA64-NEXT:    rotr.d $a0, $a0, $a1
299; LA64-NEXT:    ret
300  %z = sub i64 0, %y
301  %and = and i64 %z, 63
302  %b = shl i64 %x, %y
303  %c = lshr i64 %x, %and
304  %d = or i64 %b, %c
305  ret i64 %d
306}
307
308define i64 @rotl_64_mask_and_127_and_63(i64 %x, i64 %y) nounwind {
309; LA32-LABEL: rotl_64_mask_and_127_and_63:
310; LA32:       # %bb.0:
311; LA32-NEXT:    sll.w $a3, $a1, $a2
312; LA32-NEXT:    srli.w $a4, $a0, 1
313; LA32-NEXT:    andi $a5, $a2, 127
314; LA32-NEXT:    xori $a6, $a5, 31
315; LA32-NEXT:    srl.w $a4, $a4, $a6
316; LA32-NEXT:    or $a3, $a3, $a4
317; LA32-NEXT:    addi.w $a4, $a5, -32
318; LA32-NEXT:    slti $a5, $a4, 0
319; LA32-NEXT:    maskeqz $a3, $a3, $a5
320; LA32-NEXT:    sll.w $a6, $a0, $a4
321; LA32-NEXT:    masknez $a5, $a6, $a5
322; LA32-NEXT:    or $a3, $a3, $a5
323; LA32-NEXT:    sll.w $a5, $a0, $a2
324; LA32-NEXT:    srai.w $a4, $a4, 31
325; LA32-NEXT:    and $a4, $a4, $a5
326; LA32-NEXT:    sub.w $a2, $zero, $a2
327; LA32-NEXT:    andi $a5, $a2, 63
328; LA32-NEXT:    addi.w $a6, $a5, -32
329; LA32-NEXT:    srl.w $a7, $a1, $a6
330; LA32-NEXT:    slti $t0, $a6, 0
331; LA32-NEXT:    masknez $a7, $a7, $t0
332; LA32-NEXT:    srl.w $a0, $a0, $a2
333; LA32-NEXT:    xori $a5, $a5, 31
334; LA32-NEXT:    slli.w $t1, $a1, 1
335; LA32-NEXT:    sll.w $a5, $t1, $a5
336; LA32-NEXT:    or $a0, $a0, $a5
337; LA32-NEXT:    maskeqz $a0, $a0, $t0
338; LA32-NEXT:    or $a0, $a0, $a7
339; LA32-NEXT:    srl.w $a1, $a1, $a2
340; LA32-NEXT:    srai.w $a2, $a6, 31
341; LA32-NEXT:    and $a1, $a2, $a1
342; LA32-NEXT:    or $a1, $a3, $a1
343; LA32-NEXT:    or $a0, $a4, $a0
344; LA32-NEXT:    ret
345;
346; LA64-LABEL: rotl_64_mask_and_127_and_63:
347; LA64:       # %bb.0:
348; LA64-NEXT:    sub.d $a1, $zero, $a1
349; LA64-NEXT:    rotr.d $a0, $a0, $a1
350; LA64-NEXT:    ret
351  %a = and i64 %y, 127
352  %b = shl i64 %x, %a
353  %c = sub i64 0, %y
354  %d = and i64 %c, 63
355  %e = lshr i64 %x, %d
356  %f = or i64 %b, %e
357  ret i64 %f
358}
359
360define i64 @rotl_64_mask_or_128_or_64(i64 %x, i64 %y) nounwind {
361; LA32-LABEL: rotl_64_mask_or_128_or_64:
362; LA32:       # %bb.0:
363; LA32-NEXT:    move $a0, $zero
364; LA32-NEXT:    move $a1, $zero
365; LA32-NEXT:    ret
366;
367; LA64-LABEL: rotl_64_mask_or_128_or_64:
368; LA64:       # %bb.0:
369; LA64-NEXT:    sub.d $a1, $zero, $a1
370; LA64-NEXT:    rotr.d $a0, $a0, $a1
371; LA64-NEXT:    ret
372  %a = or i64 %y, 128
373  %b = shl i64 %x, %a
374  %c = sub i64 0, %y
375  %d = or i64 %c, 64
376  %e = lshr i64 %x, %d
377  %f = or i64 %b, %e
378  ret i64 %f
379}
380
381define i64 @rotr_64_mask(i64 %x, i64 %y) nounwind {
382; LA32-LABEL: rotr_64_mask:
383; LA32:       # %bb.0:
384; LA32-NEXT:    srl.w $a3, $a0, $a2
385; LA32-NEXT:    xori $a4, $a2, 31
386; LA32-NEXT:    slli.w $a5, $a1, 1
387; LA32-NEXT:    sll.w $a4, $a5, $a4
388; LA32-NEXT:    or $a3, $a3, $a4
389; LA32-NEXT:    addi.w $a4, $a2, -32
390; LA32-NEXT:    slti $a5, $a4, 0
391; LA32-NEXT:    maskeqz $a3, $a3, $a5
392; LA32-NEXT:    srl.w $a6, $a1, $a4
393; LA32-NEXT:    masknez $a5, $a6, $a5
394; LA32-NEXT:    or $a3, $a3, $a5
395; LA32-NEXT:    srl.w $a5, $a1, $a2
396; LA32-NEXT:    srai.w $a4, $a4, 31
397; LA32-NEXT:    and $a4, $a4, $a5
398; LA32-NEXT:    sub.w $a2, $zero, $a2
399; LA32-NEXT:    andi $a5, $a2, 63
400; LA32-NEXT:    addi.w $a6, $a5, -32
401; LA32-NEXT:    sll.w $a7, $a0, $a6
402; LA32-NEXT:    slti $t0, $a6, 0
403; LA32-NEXT:    masknez $a7, $a7, $t0
404; LA32-NEXT:    sll.w $a1, $a1, $a2
405; LA32-NEXT:    xori $a5, $a5, 31
406; LA32-NEXT:    srli.w $t1, $a0, 1
407; LA32-NEXT:    srl.w $a5, $t1, $a5
408; LA32-NEXT:    or $a1, $a1, $a5
409; LA32-NEXT:    maskeqz $a1, $a1, $t0
410; LA32-NEXT:    or $a1, $a1, $a7
411; LA32-NEXT:    sll.w $a0, $a0, $a2
412; LA32-NEXT:    srai.w $a2, $a6, 31
413; LA32-NEXT:    and $a0, $a2, $a0
414; LA32-NEXT:    or $a0, $a3, $a0
415; LA32-NEXT:    or $a1, $a4, $a1
416; LA32-NEXT:    ret
417;
418; LA64-LABEL: rotr_64_mask:
419; LA64:       # %bb.0:
420; LA64-NEXT:    rotr.d $a0, $a0, $a1
421; LA64-NEXT:    ret
422  %z = sub i64 0, %y
423  %and = and i64 %z, 63
424  %b = lshr i64 %x, %y
425  %c = shl i64 %x, %and
426  %d = or i64 %b, %c
427  ret i64 %d
428}
429
430define i64 @rotr_64_mask_and_127_and_63(i64 %x, i64 %y) nounwind {
431; LA32-LABEL: rotr_64_mask_and_127_and_63:
432; LA32:       # %bb.0:
433; LA32-NEXT:    srl.w $a3, $a0, $a2
434; LA32-NEXT:    slli.w $a4, $a1, 1
435; LA32-NEXT:    andi $a5, $a2, 127
436; LA32-NEXT:    xori $a6, $a5, 31
437; LA32-NEXT:    sll.w $a4, $a4, $a6
438; LA32-NEXT:    or $a3, $a3, $a4
439; LA32-NEXT:    addi.w $a4, $a5, -32
440; LA32-NEXT:    slti $a5, $a4, 0
441; LA32-NEXT:    maskeqz $a3, $a3, $a5
442; LA32-NEXT:    srl.w $a6, $a1, $a4
443; LA32-NEXT:    masknez $a5, $a6, $a5
444; LA32-NEXT:    or $a3, $a3, $a5
445; LA32-NEXT:    srl.w $a5, $a1, $a2
446; LA32-NEXT:    srai.w $a4, $a4, 31
447; LA32-NEXT:    and $a4, $a4, $a5
448; LA32-NEXT:    sub.w $a2, $zero, $a2
449; LA32-NEXT:    andi $a5, $a2, 63
450; LA32-NEXT:    addi.w $a6, $a5, -32
451; LA32-NEXT:    sll.w $a7, $a0, $a6
452; LA32-NEXT:    slti $t0, $a6, 0
453; LA32-NEXT:    masknez $a7, $a7, $t0
454; LA32-NEXT:    sll.w $a1, $a1, $a2
455; LA32-NEXT:    xori $a5, $a5, 31
456; LA32-NEXT:    srli.w $t1, $a0, 1
457; LA32-NEXT:    srl.w $a5, $t1, $a5
458; LA32-NEXT:    or $a1, $a1, $a5
459; LA32-NEXT:    maskeqz $a1, $a1, $t0
460; LA32-NEXT:    or $a1, $a1, $a7
461; LA32-NEXT:    sll.w $a0, $a0, $a2
462; LA32-NEXT:    srai.w $a2, $a6, 31
463; LA32-NEXT:    and $a0, $a2, $a0
464; LA32-NEXT:    or $a0, $a3, $a0
465; LA32-NEXT:    or $a1, $a4, $a1
466; LA32-NEXT:    ret
467;
468; LA64-LABEL: rotr_64_mask_and_127_and_63:
469; LA64:       # %bb.0:
470; LA64-NEXT:    rotr.d $a0, $a0, $a1
471; LA64-NEXT:    ret
472  %a = and i64 %y, 127
473  %b = lshr i64 %x, %a
474  %c = sub i64 0, %y
475  %d = and i64 %c, 63
476  %e = shl i64 %x, %d
477  %f = or i64 %b, %e
478  ret i64 %f
479}
480
481define i64 @rotr_64_mask_or_128_or_64(i64 %x, i64 %y) nounwind {
482; LA32-LABEL: rotr_64_mask_or_128_or_64:
483; LA32:       # %bb.0:
484; LA32-NEXT:    move $a0, $zero
485; LA32-NEXT:    move $a1, $zero
486; LA32-NEXT:    ret
487;
488; LA64-LABEL: rotr_64_mask_or_128_or_64:
489; LA64:       # %bb.0:
490; LA64-NEXT:    rotr.d $a0, $a0, $a1
491; LA64-NEXT:    ret
492  %a = or i64 %y, 128
493  %b = lshr i64 %x, %a
494  %c = sub i64 0, %y
495  %d = or i64 %c, 64
496  %e = shl i64 %x, %d
497  %f = or i64 %b, %e
498  ret i64 %f
499}
500
501define signext i32 @rotr_64_trunc_32(i64 %x, i64 %y) nounwind {
502; LA32-LABEL: rotr_64_trunc_32:
503; LA32:       # %bb.0:
504; LA32-NEXT:    srl.w $a3, $a0, $a2
505; LA32-NEXT:    xori $a4, $a2, 31
506; LA32-NEXT:    slli.w $a5, $a1, 1
507; LA32-NEXT:    sll.w $a4, $a5, $a4
508; LA32-NEXT:    or $a3, $a3, $a4
509; LA32-NEXT:    addi.w $a4, $a2, -32
510; LA32-NEXT:    slti $a5, $a4, 0
511; LA32-NEXT:    maskeqz $a3, $a3, $a5
512; LA32-NEXT:    srl.w $a1, $a1, $a4
513; LA32-NEXT:    masknez $a1, $a1, $a5
514; LA32-NEXT:    or $a1, $a3, $a1
515; LA32-NEXT:    sub.w $a3, $zero, $a2
516; LA32-NEXT:    sll.w $a0, $a0, $a3
517; LA32-NEXT:    ori $a3, $zero, 32
518; LA32-NEXT:    sub.w $a2, $a3, $a2
519; LA32-NEXT:    srai.w $a2, $a2, 31
520; LA32-NEXT:    and $a0, $a2, $a0
521; LA32-NEXT:    or $a0, $a1, $a0
522; LA32-NEXT:    ret
523;
524; LA64-LABEL: rotr_64_trunc_32:
525; LA64:       # %bb.0:
526; LA64-NEXT:    rotr.d $a0, $a0, $a1
527; LA64-NEXT:    addi.w $a0, $a0, 0
528; LA64-NEXT:    ret
529  %z = sub i64 64, %y
530  %b = lshr i64 %x, %y
531  %c = shl i64 %x, %z
532  %d = or i64 %b, %c
533  %e = trunc i64 %d to i32
534  ret i32 %e
535}
536
537define signext i32 @rotri_i32(i32 signext %a) nounwind {
538; LA32-LABEL: rotri_i32:
539; LA32:       # %bb.0:
540; LA32-NEXT:    rotri.w $a0, $a0, 16
541; LA32-NEXT:    ret
542;
543; LA64-LABEL: rotri_i32:
544; LA64:       # %bb.0:
545; LA64-NEXT:    rotri.w $a0, $a0, 16
546; LA64-NEXT:    ret
547  %shl = shl i32 %a, 16
548  %shr = lshr i32 %a, 16
549  %or = or i32 %shl, %shr
550  ret i32 %or
551}
552
553define i64 @rotri_i64(i64 %a) nounwind {
554; LA32-LABEL: rotri_i64:
555; LA32:       # %bb.0:
556; LA32-NEXT:    move $a2, $a0
557; LA32-NEXT:    move $a0, $a1
558; LA32-NEXT:    move $a1, $a2
559; LA32-NEXT:    ret
560;
561; LA64-LABEL: rotri_i64:
562; LA64:       # %bb.0:
563; LA64-NEXT:    rotri.d $a0, $a0, 32
564; LA64-NEXT:    ret
565  %shl = shl i64 %a, 32
566  %shr = lshr i64 %a, 32
567  %or = or i64 %shl, %shr
568  ret i64 %or
569}
570
571declare i32 @llvm.fshl.i32(i32, i32, i32)
572declare i64 @llvm.fshl.i64(i64, i64, i64)
573declare i32 @llvm.fshr.i32(i32, i32, i32)
574declare i64 @llvm.fshr.i64(i64, i64, i64)
575
576define signext i32 @rotl_i32_fshl(i32 signext %a) nounwind {
577; LA32-LABEL: rotl_i32_fshl:
578; LA32:       # %bb.0:
579; LA32-NEXT:    rotri.w $a0, $a0, 20
580; LA32-NEXT:    ret
581;
582; LA64-LABEL: rotl_i32_fshl:
583; LA64:       # %bb.0:
584; LA64-NEXT:    rotri.w $a0, $a0, 20
585; LA64-NEXT:    ret
586  %or = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 12)
587  ret i32 %or
588}
589
590define i64 @rotl_i64_fshl(i64 %a) nounwind {
591; LA32-LABEL: rotl_i64_fshl:
592; LA32:       # %bb.0:
593; LA32-NEXT:    srli.w $a2, $a1, 20
594; LA32-NEXT:    slli.w $a3, $a0, 12
595; LA32-NEXT:    or $a2, $a3, $a2
596; LA32-NEXT:    srli.w $a0, $a0, 20
597; LA32-NEXT:    slli.w $a1, $a1, 12
598; LA32-NEXT:    or $a1, $a1, $a0
599; LA32-NEXT:    move $a0, $a2
600; LA32-NEXT:    ret
601;
602; LA64-LABEL: rotl_i64_fshl:
603; LA64:       # %bb.0:
604; LA64-NEXT:    rotri.d $a0, $a0, 52
605; LA64-NEXT:    ret
606  %or = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 12)
607  ret i64 %or
608}
609
610define signext i32 @rotr_i32_fshr(i32 signext %a) nounwind {
611; LA32-LABEL: rotr_i32_fshr:
612; LA32:       # %bb.0:
613; LA32-NEXT:    rotri.w $a0, $a0, 12
614; LA32-NEXT:    ret
615;
616; LA64-LABEL: rotr_i32_fshr:
617; LA64:       # %bb.0:
618; LA64-NEXT:    rotri.w $a0, $a0, 12
619; LA64-NEXT:    ret
620  %or = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 12)
621  ret i32 %or
622}
623
624define i64 @rotr_i64_fshr(i64 %a) nounwind {
625; LA32-LABEL: rotr_i64_fshr:
626; LA32:       # %bb.0:
627; LA32-NEXT:    srli.w $a2, $a0, 12
628; LA32-NEXT:    slli.w $a3, $a1, 20
629; LA32-NEXT:    or $a2, $a3, $a2
630; LA32-NEXT:    srli.w $a1, $a1, 12
631; LA32-NEXT:    slli.w $a0, $a0, 20
632; LA32-NEXT:    or $a1, $a0, $a1
633; LA32-NEXT:    move $a0, $a2
634; LA32-NEXT:    ret
635;
636; LA64-LABEL: rotr_i64_fshr:
637; LA64:       # %bb.0:
638; LA64-NEXT:    rotri.d $a0, $a0, 12
639; LA64-NEXT:    ret
640  %or = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 12)
641  ret i64 %or
642}
643