xref: /llvm-project/llvm/test/CodeGen/LoongArch/ir-instruction/add.ll (revision 718331f55529469586c99a55e4b382a1c7485842)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc --mtriple=loongarch32 -mattr=+d < %s | FileCheck %s --check-prefix=LA32
3; RUN: llc --mtriple=loongarch64 -mattr=+d < %s | FileCheck %s --check-prefix=LA64
4
5;; Exercise the 'add' LLVM IR: https://llvm.org/docs/LangRef.html#add-instruction
6
7define i1 @add_i1(i1 %x, i1 %y) {
8; LA32-LABEL: add_i1:
9; LA32:       # %bb.0:
10; LA32-NEXT:    add.w $a0, $a0, $a1
11; LA32-NEXT:    ret
12;
13; LA64-LABEL: add_i1:
14; LA64:       # %bb.0:
15; LA64-NEXT:    add.d $a0, $a0, $a1
16; LA64-NEXT:    ret
17  %add = add i1 %x, %y
18  ret i1 %add
19}
20
21define i8 @add_i8(i8 %x, i8 %y) {
22; LA32-LABEL: add_i8:
23; LA32:       # %bb.0:
24; LA32-NEXT:    add.w $a0, $a0, $a1
25; LA32-NEXT:    ret
26;
27; LA64-LABEL: add_i8:
28; LA64:       # %bb.0:
29; LA64-NEXT:    add.d $a0, $a0, $a1
30; LA64-NEXT:    ret
31  %add = add i8 %x, %y
32  ret i8 %add
33}
34
35define i16 @add_i16(i16 %x, i16 %y) {
36; LA32-LABEL: add_i16:
37; LA32:       # %bb.0:
38; LA32-NEXT:    add.w $a0, $a0, $a1
39; LA32-NEXT:    ret
40;
41; LA64-LABEL: add_i16:
42; LA64:       # %bb.0:
43; LA64-NEXT:    add.d $a0, $a0, $a1
44; LA64-NEXT:    ret
45  %add = add i16 %x, %y
46  ret i16 %add
47}
48
49define i32 @add_i32(i32 %x, i32 %y) {
50; LA32-LABEL: add_i32:
51; LA32:       # %bb.0:
52; LA32-NEXT:    add.w $a0, $a0, $a1
53; LA32-NEXT:    ret
54;
55; LA64-LABEL: add_i32:
56; LA64:       # %bb.0:
57; LA64-NEXT:    add.w $a0, $a0, $a1
58; LA64-NEXT:    ret
59  %add = add i32 %x, %y
60  ret i32 %add
61}
62
63;; Match the pattern:
64;; def : PatGprGpr_32<add, ADD_W>;
65define signext i32 @add_i32_sext(i32 %x, i32 %y) {
66; LA32-LABEL: add_i32_sext:
67; LA32:       # %bb.0:
68; LA32-NEXT:    add.w $a0, $a0, $a1
69; LA32-NEXT:    ret
70;
71; LA64-LABEL: add_i32_sext:
72; LA64:       # %bb.0:
73; LA64-NEXT:    add.w $a0, $a0, $a1
74; LA64-NEXT:    ret
75  %add = add i32 %x, %y
76  ret i32 %add
77}
78
79define i64 @add_i64(i64 %x, i64 %y) {
80; LA32-LABEL: add_i64:
81; LA32:       # %bb.0:
82; LA32-NEXT:    add.w $a1, $a1, $a3
83; LA32-NEXT:    add.w $a2, $a0, $a2
84; LA32-NEXT:    sltu $a0, $a2, $a0
85; LA32-NEXT:    add.w $a1, $a1, $a0
86; LA32-NEXT:    move $a0, $a2
87; LA32-NEXT:    ret
88;
89; LA64-LABEL: add_i64:
90; LA64:       # %bb.0:
91; LA64-NEXT:    add.d $a0, $a0, $a1
92; LA64-NEXT:    ret
93  %add = add i64 %x, %y
94  ret i64 %add
95}
96
97define i1 @add_i1_3(i1 %x) {
98; LA32-LABEL: add_i1_3:
99; LA32:       # %bb.0:
100; LA32-NEXT:    addi.w $a0, $a0, 1
101; LA32-NEXT:    ret
102;
103; LA64-LABEL: add_i1_3:
104; LA64:       # %bb.0:
105; LA64-NEXT:    addi.d $a0, $a0, 1
106; LA64-NEXT:    ret
107  %add = add i1 %x, 3
108  ret i1 %add
109}
110
111define i8 @add_i8_3(i8 %x) {
112; LA32-LABEL: add_i8_3:
113; LA32:       # %bb.0:
114; LA32-NEXT:    addi.w $a0, $a0, 3
115; LA32-NEXT:    ret
116;
117; LA64-LABEL: add_i8_3:
118; LA64:       # %bb.0:
119; LA64-NEXT:    addi.d $a0, $a0, 3
120; LA64-NEXT:    ret
121  %add = add i8 %x, 3
122  ret i8 %add
123}
124
125define i16 @add_i16_3(i16 %x) {
126; LA32-LABEL: add_i16_3:
127; LA32:       # %bb.0:
128; LA32-NEXT:    addi.w $a0, $a0, 3
129; LA32-NEXT:    ret
130;
131; LA64-LABEL: add_i16_3:
132; LA64:       # %bb.0:
133; LA64-NEXT:    addi.d $a0, $a0, 3
134; LA64-NEXT:    ret
135  %add = add i16 %x, 3
136  ret i16 %add
137}
138
139define i32 @add_i32_3(i32 %x) {
140; LA32-LABEL: add_i32_3:
141; LA32:       # %bb.0:
142; LA32-NEXT:    addi.w $a0, $a0, 3
143; LA32-NEXT:    ret
144;
145; LA64-LABEL: add_i32_3:
146; LA64:       # %bb.0:
147; LA64-NEXT:    addi.w $a0, $a0, 3
148; LA64-NEXT:    ret
149  %add = add i32 %x, 3
150  ret i32 %add
151}
152
153;; Match the pattern:
154;; def : PatGprImm_32<add, ADDI_W, simm12>;
155define signext i32 @add_i32_3_sext(i32 %x) {
156; LA32-LABEL: add_i32_3_sext:
157; LA32:       # %bb.0:
158; LA32-NEXT:    addi.w $a0, $a0, 3
159; LA32-NEXT:    ret
160;
161; LA64-LABEL: add_i32_3_sext:
162; LA64:       # %bb.0:
163; LA64-NEXT:    addi.w $a0, $a0, 3
164; LA64-NEXT:    ret
165  %add = add i32 %x, 3
166  ret i32 %add
167}
168
169define i64 @add_i64_3(i64 %x) {
170; LA32-LABEL: add_i64_3:
171; LA32:       # %bb.0:
172; LA32-NEXT:    addi.w $a2, $a0, 3
173; LA32-NEXT:    sltu $a0, $a2, $a0
174; LA32-NEXT:    add.w $a1, $a1, $a0
175; LA32-NEXT:    move $a0, $a2
176; LA32-NEXT:    ret
177;
178; LA64-LABEL: add_i64_3:
179; LA64:       # %bb.0:
180; LA64-NEXT:    addi.d $a0, $a0, 3
181; LA64-NEXT:    ret
182  %add = add i64 %x, 3
183  ret i64 %add
184}
185
186;; Check that `addu16i.d` is emitted for these cases.
187
188define i32 @add_i32_0x12340000(i32 %x) {
189; LA32-LABEL: add_i32_0x12340000:
190; LA32:       # %bb.0:
191; LA32-NEXT:    lu12i.w $a1, 74560
192; LA32-NEXT:    add.w $a0, $a0, $a1
193; LA32-NEXT:    ret
194;
195; LA64-LABEL: add_i32_0x12340000:
196; LA64:       # %bb.0:
197; LA64-NEXT:    addu16i.d $a0, $a0, 4660
198; LA64-NEXT:    addi.w $a0, $a0, 0
199; LA64-NEXT:    ret
200  %add = add i32 %x, 305397760
201  ret i32 %add
202}
203
204define signext i32 @add_i32_0x12340000_sext(i32 %x) {
205; LA32-LABEL: add_i32_0x12340000_sext:
206; LA32:       # %bb.0:
207; LA32-NEXT:    lu12i.w $a1, 74560
208; LA32-NEXT:    add.w $a0, $a0, $a1
209; LA32-NEXT:    ret
210;
211; LA64-LABEL: add_i32_0x12340000_sext:
212; LA64:       # %bb.0:
213; LA64-NEXT:    addu16i.d $a0, $a0, 4660
214; LA64-NEXT:    addi.w $a0, $a0, 0
215; LA64-NEXT:    ret
216  %add = add i32 %x, 305397760
217  ret i32 %add
218}
219
220define i64 @add_i64_0x12340000(i64 %x) {
221; LA32-LABEL: add_i64_0x12340000:
222; LA32:       # %bb.0:
223; LA32-NEXT:    lu12i.w $a2, 74560
224; LA32-NEXT:    add.w $a2, $a0, $a2
225; LA32-NEXT:    sltu $a0, $a2, $a0
226; LA32-NEXT:    add.w $a1, $a1, $a0
227; LA32-NEXT:    move $a0, $a2
228; LA32-NEXT:    ret
229;
230; LA64-LABEL: add_i64_0x12340000:
231; LA64:       # %bb.0:
232; LA64-NEXT:    addu16i.d $a0, $a0, 4660
233; LA64-NEXT:    ret
234  %add = add i64 %x, 305397760
235  ret i64 %add
236}
237
238define i32 @add_i32_0x7fff0000(i32 %x) {
239; LA32-LABEL: add_i32_0x7fff0000:
240; LA32:       # %bb.0:
241; LA32-NEXT:    lu12i.w $a1, 524272
242; LA32-NEXT:    add.w $a0, $a0, $a1
243; LA32-NEXT:    ret
244;
245; LA64-LABEL: add_i32_0x7fff0000:
246; LA64:       # %bb.0:
247; LA64-NEXT:    addu16i.d $a0, $a0, 32767
248; LA64-NEXT:    addi.w $a0, $a0, 0
249; LA64-NEXT:    ret
250  %add = add i32 %x, 2147418112
251  ret i32 %add
252}
253
254define signext i32 @add_i32_0x7fff0000_sext(i32 %x) {
255; LA32-LABEL: add_i32_0x7fff0000_sext:
256; LA32:       # %bb.0:
257; LA32-NEXT:    lu12i.w $a1, 524272
258; LA32-NEXT:    add.w $a0, $a0, $a1
259; LA32-NEXT:    ret
260;
261; LA64-LABEL: add_i32_0x7fff0000_sext:
262; LA64:       # %bb.0:
263; LA64-NEXT:    addu16i.d $a0, $a0, 32767
264; LA64-NEXT:    addi.w $a0, $a0, 0
265; LA64-NEXT:    ret
266  %add = add i32 %x, 2147418112
267  ret i32 %add
268}
269
270define i64 @add_i64_0x7fff0000(i64 %x) {
271; LA32-LABEL: add_i64_0x7fff0000:
272; LA32:       # %bb.0:
273; LA32-NEXT:    lu12i.w $a2, 524272
274; LA32-NEXT:    add.w $a2, $a0, $a2
275; LA32-NEXT:    sltu $a0, $a2, $a0
276; LA32-NEXT:    add.w $a1, $a1, $a0
277; LA32-NEXT:    move $a0, $a2
278; LA32-NEXT:    ret
279;
280; LA64-LABEL: add_i64_0x7fff0000:
281; LA64:       # %bb.0:
282; LA64-NEXT:    addu16i.d $a0, $a0, 32767
283; LA64-NEXT:    ret
284  %add = add i64 %x, 2147418112
285  ret i64 %add
286}
287
288define i32 @add_i32_minus_0x80000000(i32 %x) {
289; LA32-LABEL: add_i32_minus_0x80000000:
290; LA32:       # %bb.0:
291; LA32-NEXT:    lu12i.w $a1, -524288
292; LA32-NEXT:    add.w $a0, $a0, $a1
293; LA32-NEXT:    ret
294;
295; LA64-LABEL: add_i32_minus_0x80000000:
296; LA64:       # %bb.0:
297; LA64-NEXT:    addu16i.d $a0, $a0, -32768
298; LA64-NEXT:    addi.w $a0, $a0, 0
299; LA64-NEXT:    ret
300  %add = add i32 %x, -2147483648
301  ret i32 %add
302}
303
304define signext i32 @add_i32_minus_0x80000000_sext(i32 %x) {
305; LA32-LABEL: add_i32_minus_0x80000000_sext:
306; LA32:       # %bb.0:
307; LA32-NEXT:    lu12i.w $a1, -524288
308; LA32-NEXT:    add.w $a0, $a0, $a1
309; LA32-NEXT:    ret
310;
311; LA64-LABEL: add_i32_minus_0x80000000_sext:
312; LA64:       # %bb.0:
313; LA64-NEXT:    addu16i.d $a0, $a0, -32768
314; LA64-NEXT:    addi.w $a0, $a0, 0
315; LA64-NEXT:    ret
316  %add = add i32 %x, -2147483648
317  ret i32 %add
318}
319
320define i64 @add_i64_minus_0x80000000(i64 %x) {
321; LA32-LABEL: add_i64_minus_0x80000000:
322; LA32:       # %bb.0:
323; LA32-NEXT:    lu12i.w $a2, -524288
324; LA32-NEXT:    add.w $a2, $a0, $a2
325; LA32-NEXT:    sltu $a0, $a2, $a0
326; LA32-NEXT:    add.w $a0, $a1, $a0
327; LA32-NEXT:    addi.w $a1, $a0, -1
328; LA32-NEXT:    move $a0, $a2
329; LA32-NEXT:    ret
330;
331; LA64-LABEL: add_i64_minus_0x80000000:
332; LA64:       # %bb.0:
333; LA64-NEXT:    addu16i.d $a0, $a0, -32768
334; LA64-NEXT:    ret
335  %add = add i64 %x, -2147483648
336  ret i64 %add
337}
338
339define i32 @add_i32_minus_0x10000(i32 %x) {
340; LA32-LABEL: add_i32_minus_0x10000:
341; LA32:       # %bb.0:
342; LA32-NEXT:    lu12i.w $a1, -16
343; LA32-NEXT:    add.w $a0, $a0, $a1
344; LA32-NEXT:    ret
345;
346; LA64-LABEL: add_i32_minus_0x10000:
347; LA64:       # %bb.0:
348; LA64-NEXT:    addu16i.d $a0, $a0, -1
349; LA64-NEXT:    addi.w $a0, $a0, 0
350; LA64-NEXT:    ret
351  %add = add i32 %x, -65536
352  ret i32 %add
353}
354
355define signext i32 @add_i32_minus_0x10000_sext(i32 %x) {
356; LA32-LABEL: add_i32_minus_0x10000_sext:
357; LA32:       # %bb.0:
358; LA32-NEXT:    lu12i.w $a1, -16
359; LA32-NEXT:    add.w $a0, $a0, $a1
360; LA32-NEXT:    ret
361;
362; LA64-LABEL: add_i32_minus_0x10000_sext:
363; LA64:       # %bb.0:
364; LA64-NEXT:    addu16i.d $a0, $a0, -1
365; LA64-NEXT:    addi.w $a0, $a0, 0
366; LA64-NEXT:    ret
367  %add = add i32 %x, -65536
368  ret i32 %add
369}
370
371define i64 @add_i64_minus_0x10000(i64 %x) {
372; LA32-LABEL: add_i64_minus_0x10000:
373; LA32:       # %bb.0:
374; LA32-NEXT:    lu12i.w $a2, -16
375; LA32-NEXT:    add.w $a2, $a0, $a2
376; LA32-NEXT:    sltu $a0, $a2, $a0
377; LA32-NEXT:    add.w $a0, $a1, $a0
378; LA32-NEXT:    addi.w $a1, $a0, -1
379; LA32-NEXT:    move $a0, $a2
380; LA32-NEXT:    ret
381;
382; LA64-LABEL: add_i64_minus_0x10000:
383; LA64:       # %bb.0:
384; LA64-NEXT:    addu16i.d $a0, $a0, -1
385; LA64-NEXT:    ret
386  %add = add i64 %x, -65536
387  ret i64 %add
388}
389
390;; Check that `addu16i.d + addi` is emitted for these cases.
391
392define i32 @add_i32_0x7fff07ff(i32 %x) {
393; LA32-LABEL: add_i32_0x7fff07ff:
394; LA32:       # %bb.0:
395; LA32-NEXT:    lu12i.w $a1, 524272
396; LA32-NEXT:    ori $a1, $a1, 2047
397; LA32-NEXT:    add.w $a0, $a0, $a1
398; LA32-NEXT:    ret
399;
400; LA64-LABEL: add_i32_0x7fff07ff:
401; LA64:       # %bb.0:
402; LA64-NEXT:    addu16i.d $a0, $a0, 32767
403; LA64-NEXT:    addi.w $a0, $a0, 2047
404; LA64-NEXT:    ret
405  %add = add i32 %x, 2147420159
406  ret i32 %add
407}
408
409define signext i32 @add_i32_0x7fff07ff_sext(i32 %x) {
410; LA32-LABEL: add_i32_0x7fff07ff_sext:
411; LA32:       # %bb.0:
412; LA32-NEXT:    lu12i.w $a1, 524272
413; LA32-NEXT:    ori $a1, $a1, 2047
414; LA32-NEXT:    add.w $a0, $a0, $a1
415; LA32-NEXT:    ret
416;
417; LA64-LABEL: add_i32_0x7fff07ff_sext:
418; LA64:       # %bb.0:
419; LA64-NEXT:    addu16i.d $a0, $a0, 32767
420; LA64-NEXT:    addi.w $a0, $a0, 2047
421; LA64-NEXT:    ret
422  %add = add i32 %x, 2147420159
423  ret i32 %add
424}
425
426define i64 @add_i64_0x7fff07ff(i64 %x) {
427; LA32-LABEL: add_i64_0x7fff07ff:
428; LA32:       # %bb.0:
429; LA32-NEXT:    lu12i.w $a2, 524272
430; LA32-NEXT:    ori $a2, $a2, 2047
431; LA32-NEXT:    add.w $a2, $a0, $a2
432; LA32-NEXT:    sltu $a0, $a2, $a0
433; LA32-NEXT:    add.w $a1, $a1, $a0
434; LA32-NEXT:    move $a0, $a2
435; LA32-NEXT:    ret
436;
437; LA64-LABEL: add_i64_0x7fff07ff:
438; LA64:       # %bb.0:
439; LA64-NEXT:    addu16i.d $a0, $a0, 32767
440; LA64-NEXT:    addi.d $a0, $a0, 2047
441; LA64-NEXT:    ret
442  %add = add i64 %x, 2147420159
443  ret i64 %add
444}
445
446define i32 @add_i32_0x7ffef800(i32 %x) {
447; LA32-LABEL: add_i32_0x7ffef800:
448; LA32:       # %bb.0:
449; LA32-NEXT:    lu12i.w $a1, 524271
450; LA32-NEXT:    ori $a1, $a1, 2048
451; LA32-NEXT:    add.w $a0, $a0, $a1
452; LA32-NEXT:    ret
453;
454; LA64-LABEL: add_i32_0x7ffef800:
455; LA64:       # %bb.0:
456; LA64-NEXT:    addu16i.d $a0, $a0, 32767
457; LA64-NEXT:    addi.w $a0, $a0, -2048
458; LA64-NEXT:    ret
459  %add = add i32 %x, 2147416064
460  ret i32 %add
461}
462
463define signext i32 @add_i32_0x7ffef800_sext(i32 %x) {
464; LA32-LABEL: add_i32_0x7ffef800_sext:
465; LA32:       # %bb.0:
466; LA32-NEXT:    lu12i.w $a1, 524271
467; LA32-NEXT:    ori $a1, $a1, 2048
468; LA32-NEXT:    add.w $a0, $a0, $a1
469; LA32-NEXT:    ret
470;
471; LA64-LABEL: add_i32_0x7ffef800_sext:
472; LA64:       # %bb.0:
473; LA64-NEXT:    addu16i.d $a0, $a0, 32767
474; LA64-NEXT:    addi.w $a0, $a0, -2048
475; LA64-NEXT:    ret
476  %add = add i32 %x, 2147416064
477  ret i32 %add
478}
479
480define i64 @add_i64_0x7ffef800(i64 %x) {
481; LA32-LABEL: add_i64_0x7ffef800:
482; LA32:       # %bb.0:
483; LA32-NEXT:    lu12i.w $a2, 524271
484; LA32-NEXT:    ori $a2, $a2, 2048
485; LA32-NEXT:    add.w $a2, $a0, $a2
486; LA32-NEXT:    sltu $a0, $a2, $a0
487; LA32-NEXT:    add.w $a1, $a1, $a0
488; LA32-NEXT:    move $a0, $a2
489; LA32-NEXT:    ret
490;
491; LA64-LABEL: add_i64_0x7ffef800:
492; LA64:       # %bb.0:
493; LA64-NEXT:    addu16i.d $a0, $a0, 32767
494; LA64-NEXT:    addi.d $a0, $a0, -2048
495; LA64-NEXT:    ret
496  %add = add i64 %x, 2147416064
497  ret i64 %add
498}
499
500define i64 @add_i64_minus_0x80000800(i64 %x) {
501; LA32-LABEL: add_i64_minus_0x80000800:
502; LA32:       # %bb.0:
503; LA32-NEXT:    lu12i.w $a2, 524287
504; LA32-NEXT:    ori $a2, $a2, 2048
505; LA32-NEXT:    add.w $a2, $a0, $a2
506; LA32-NEXT:    sltu $a0, $a2, $a0
507; LA32-NEXT:    add.w $a0, $a1, $a0
508; LA32-NEXT:    addi.w $a1, $a0, -1
509; LA32-NEXT:    move $a0, $a2
510; LA32-NEXT:    ret
511;
512; LA64-LABEL: add_i64_minus_0x80000800:
513; LA64:       # %bb.0:
514; LA64-NEXT:    addu16i.d $a0, $a0, -32768
515; LA64-NEXT:    addi.d $a0, $a0, -2048
516; LA64-NEXT:    ret
517  %add = add i64 %x, -2147485696
518  ret i64 %add
519}
520
521define i32 @add_i32_minus_0x23450679(i32 %x) {
522; LA32-LABEL: add_i32_minus_0x23450679:
523; LA32:       # %bb.0:
524; LA32-NEXT:    lu12i.w $a1, -144465
525; LA32-NEXT:    ori $a1, $a1, 2439
526; LA32-NEXT:    add.w $a0, $a0, $a1
527; LA32-NEXT:    ret
528;
529; LA64-LABEL: add_i32_minus_0x23450679:
530; LA64:       # %bb.0:
531; LA64-NEXT:    addu16i.d $a0, $a0, -9029
532; LA64-NEXT:    addi.w $a0, $a0, -1657
533; LA64-NEXT:    ret
534  %add = add i32 %x, -591726201
535  ret i32 %add
536}
537
538define signext i32 @add_i32_minus_0x23450679_sext(i32 %x) {
539; LA32-LABEL: add_i32_minus_0x23450679_sext:
540; LA32:       # %bb.0:
541; LA32-NEXT:    lu12i.w $a1, -144465
542; LA32-NEXT:    ori $a1, $a1, 2439
543; LA32-NEXT:    add.w $a0, $a0, $a1
544; LA32-NEXT:    ret
545;
546; LA64-LABEL: add_i32_minus_0x23450679_sext:
547; LA64:       # %bb.0:
548; LA64-NEXT:    addu16i.d $a0, $a0, -9029
549; LA64-NEXT:    addi.w $a0, $a0, -1657
550; LA64-NEXT:    ret
551  %add = add i32 %x, -591726201
552  ret i32 %add
553}
554
555define i64 @add_i64_minus_0x23450679(i64 %x) {
556; LA32-LABEL: add_i64_minus_0x23450679:
557; LA32:       # %bb.0:
558; LA32-NEXT:    lu12i.w $a2, -144465
559; LA32-NEXT:    ori $a2, $a2, 2439
560; LA32-NEXT:    add.w $a2, $a0, $a2
561; LA32-NEXT:    sltu $a0, $a2, $a0
562; LA32-NEXT:    add.w $a0, $a1, $a0
563; LA32-NEXT:    addi.w $a1, $a0, -1
564; LA32-NEXT:    move $a0, $a2
565; LA32-NEXT:    ret
566;
567; LA64-LABEL: add_i64_minus_0x23450679:
568; LA64:       # %bb.0:
569; LA64-NEXT:    addu16i.d $a0, $a0, -9029
570; LA64-NEXT:    addi.d $a0, $a0, -1657
571; LA64-NEXT:    ret
572  %add = add i64 %x, -591726201
573  ret i64 %add
574}
575
576define i32 @add_i32_minus_0x2345fedd(i32 %x) {
577; LA32-LABEL: add_i32_minus_0x2345fedd:
578; LA32:       # %bb.0:
579; LA32-NEXT:    lu12i.w $a1, -144480
580; LA32-NEXT:    ori $a1, $a1, 291
581; LA32-NEXT:    add.w $a0, $a0, $a1
582; LA32-NEXT:    ret
583;
584; LA64-LABEL: add_i32_minus_0x2345fedd:
585; LA64:       # %bb.0:
586; LA64-NEXT:    addu16i.d $a0, $a0, -9030
587; LA64-NEXT:    addi.w $a0, $a0, 291
588; LA64-NEXT:    ret
589  %add = add i32 %x, -591789789
590  ret i32 %add
591}
592
593define signext i32 @add_i32_minus_0x2345fedd_sext(i32 %x) {
594; LA32-LABEL: add_i32_minus_0x2345fedd_sext:
595; LA32:       # %bb.0:
596; LA32-NEXT:    lu12i.w $a1, -144480
597; LA32-NEXT:    ori $a1, $a1, 291
598; LA32-NEXT:    add.w $a0, $a0, $a1
599; LA32-NEXT:    ret
600;
601; LA64-LABEL: add_i32_minus_0x2345fedd_sext:
602; LA64:       # %bb.0:
603; LA64-NEXT:    addu16i.d $a0, $a0, -9030
604; LA64-NEXT:    addi.w $a0, $a0, 291
605; LA64-NEXT:    ret
606  %add = add i32 %x, -591789789
607  ret i32 %add
608}
609
610define i64 @add_i64_minus_0x2345fedd(i64 %x) {
611; LA32-LABEL: add_i64_minus_0x2345fedd:
612; LA32:       # %bb.0:
613; LA32-NEXT:    lu12i.w $a2, -144480
614; LA32-NEXT:    ori $a2, $a2, 291
615; LA32-NEXT:    add.w $a2, $a0, $a2
616; LA32-NEXT:    sltu $a0, $a2, $a0
617; LA32-NEXT:    add.w $a0, $a1, $a0
618; LA32-NEXT:    addi.w $a1, $a0, -1
619; LA32-NEXT:    move $a0, $a2
620; LA32-NEXT:    ret
621;
622; LA64-LABEL: add_i64_minus_0x2345fedd:
623; LA64:       # %bb.0:
624; LA64-NEXT:    addu16i.d $a0, $a0, -9030
625; LA64-NEXT:    addi.d $a0, $a0, 291
626; LA64-NEXT:    ret
627  %add = add i64 %x, -591789789
628  ret i64 %add
629}
630
631;; Check that `addu16i.d` isn't used for the following cases.
632
633define i64 @add_i64_0x80000000(i64 %x) {
634; LA32-LABEL: add_i64_0x80000000:
635; LA32:       # %bb.0:
636; LA32-NEXT:    lu12i.w $a2, -524288
637; LA32-NEXT:    add.w $a2, $a0, $a2
638; LA32-NEXT:    sltu $a0, $a2, $a0
639; LA32-NEXT:    add.w $a1, $a1, $a0
640; LA32-NEXT:    move $a0, $a2
641; LA32-NEXT:    ret
642;
643; LA64-LABEL: add_i64_0x80000000:
644; LA64:       # %bb.0:
645; LA64-NEXT:    lu12i.w $a1, -524288
646; LA64-NEXT:    lu32i.d $a1, 0
647; LA64-NEXT:    add.d $a0, $a0, $a1
648; LA64-NEXT:    ret
649  %add = add i64 %x, 2147483648
650  ret i64 %add
651}
652
653define i64 @add_i64_0xffff0000(i64 %x) {
654; LA32-LABEL: add_i64_0xffff0000:
655; LA32:       # %bb.0:
656; LA32-NEXT:    lu12i.w $a2, -16
657; LA32-NEXT:    add.w $a2, $a0, $a2
658; LA32-NEXT:    sltu $a0, $a2, $a0
659; LA32-NEXT:    add.w $a1, $a1, $a0
660; LA32-NEXT:    move $a0, $a2
661; LA32-NEXT:    ret
662;
663; LA64-LABEL: add_i64_0xffff0000:
664; LA64:       # %bb.0:
665; LA64-NEXT:    lu12i.w $a1, -16
666; LA64-NEXT:    lu32i.d $a1, 0
667; LA64-NEXT:    add.d $a0, $a0, $a1
668; LA64-NEXT:    ret
669  %add = add i64 %x, 4294901760
670  ret i64 %add
671}
672
673;; -0x80000800 is equivalent to +0x7ffff800 in i32, so addu16i.d isn't matched
674;; in this case.
675define i32 @add_i32_minus_0x80000800(i32 %x) {
676; LA32-LABEL: add_i32_minus_0x80000800:
677; LA32:       # %bb.0:
678; LA32-NEXT:    lu12i.w $a1, 524287
679; LA32-NEXT:    ori $a1, $a1, 2048
680; LA32-NEXT:    add.w $a0, $a0, $a1
681; LA32-NEXT:    ret
682;
683; LA64-LABEL: add_i32_minus_0x80000800:
684; LA64:       # %bb.0:
685; LA64-NEXT:    lu12i.w $a1, 524287
686; LA64-NEXT:    ori $a1, $a1, 2048
687; LA64-NEXT:    add.w $a0, $a0, $a1
688; LA64-NEXT:    ret
689  %add = add i32 %x, -2147485696
690  ret i32 %add
691}
692
693define signext i32 @add_i32_minus_0x80000800_sext(i32 %x) {
694; LA32-LABEL: add_i32_minus_0x80000800_sext:
695; LA32:       # %bb.0:
696; LA32-NEXT:    lu12i.w $a1, 524287
697; LA32-NEXT:    ori $a1, $a1, 2048
698; LA32-NEXT:    add.w $a0, $a0, $a1
699; LA32-NEXT:    ret
700;
701; LA64-LABEL: add_i32_minus_0x80000800_sext:
702; LA64:       # %bb.0:
703; LA64-NEXT:    lu12i.w $a1, 524287
704; LA64-NEXT:    ori $a1, $a1, 2048
705; LA64-NEXT:    add.w $a0, $a0, $a1
706; LA64-NEXT:    ret
707  %add = add i32 %x, -2147485696
708  ret i32 %add
709}
710
711define signext i32 @add_i32_4080(i32 %x) {
712; LA32-LABEL: add_i32_4080:
713; LA32:       # %bb.0:
714; LA32-NEXT:    addi.w $a0, $a0, 2047
715; LA32-NEXT:    addi.w $a0, $a0, 2033
716; LA32-NEXT:    ret
717;
718; LA64-LABEL: add_i32_4080:
719; LA64:       # %bb.0:
720; LA64-NEXT:    addi.d $a0, $a0, 2047
721; LA64-NEXT:    addi.w $a0, $a0, 2033
722; LA64-NEXT:    ret
723  %add = add i32 %x, 4080
724  ret i32 %add
725}
726
727define signext i32 @add_i32_minus_4080(i32 %x) {
728; LA32-LABEL: add_i32_minus_4080:
729; LA32:       # %bb.0:
730; LA32-NEXT:    addi.w $a0, $a0, -2048
731; LA32-NEXT:    addi.w $a0, $a0, -2032
732; LA32-NEXT:    ret
733;
734; LA64-LABEL: add_i32_minus_4080:
735; LA64:       # %bb.0:
736; LA64-NEXT:    addi.d $a0, $a0, -2048
737; LA64-NEXT:    addi.w $a0, $a0, -2032
738; LA64-NEXT:    ret
739  %add = add i32 %x, -4080
740  ret i32 %add
741}
742
743define signext i32 @add_i32_2048(i32 %x) {
744; LA32-LABEL: add_i32_2048:
745; LA32:       # %bb.0:
746; LA32-NEXT:    addi.w $a0, $a0, 2047
747; LA32-NEXT:    addi.w $a0, $a0, 1
748; LA32-NEXT:    ret
749;
750; LA64-LABEL: add_i32_2048:
751; LA64:       # %bb.0:
752; LA64-NEXT:    addi.d $a0, $a0, 2047
753; LA64-NEXT:    addi.w $a0, $a0, 1
754; LA64-NEXT:    ret
755  %add = add i32 %x, 2048
756  ret i32 %add
757}
758
759define signext i32 @add_i32_4094(i32 %x) {
760; LA32-LABEL: add_i32_4094:
761; LA32:       # %bb.0:
762; LA32-NEXT:    addi.w $a0, $a0, 2047
763; LA32-NEXT:    addi.w $a0, $a0, 2047
764; LA32-NEXT:    ret
765;
766; LA64-LABEL: add_i32_4094:
767; LA64:       # %bb.0:
768; LA64-NEXT:    addi.d $a0, $a0, 2047
769; LA64-NEXT:    addi.w $a0, $a0, 2047
770; LA64-NEXT:    ret
771  %add = add i32 %x, 4094
772  ret i32 %add
773}
774
775define signext i32 @add_i32_minus_2049(i32 %x) {
776; LA32-LABEL: add_i32_minus_2049:
777; LA32:       # %bb.0:
778; LA32-NEXT:    addi.w $a0, $a0, -2048
779; LA32-NEXT:    addi.w $a0, $a0, -1
780; LA32-NEXT:    ret
781;
782; LA64-LABEL: add_i32_minus_2049:
783; LA64:       # %bb.0:
784; LA64-NEXT:    addi.d $a0, $a0, -2048
785; LA64-NEXT:    addi.w $a0, $a0, -1
786; LA64-NEXT:    ret
787  %add = add i32 %x, -2049
788  ret i32 %add
789}
790
791define signext i32 @add_i32_minus_4096(i32 %x) {
792; LA32-LABEL: add_i32_minus_4096:
793; LA32:       # %bb.0:
794; LA32-NEXT:    addi.w $a0, $a0, -2048
795; LA32-NEXT:    addi.w $a0, $a0, -2048
796; LA32-NEXT:    ret
797;
798; LA64-LABEL: add_i32_minus_4096:
799; LA64:       # %bb.0:
800; LA64-NEXT:    addi.d $a0, $a0, -2048
801; LA64-NEXT:    addi.w $a0, $a0, -2048
802; LA64-NEXT:    ret
803  %add = add i32 %x, -4096
804  ret i32 %add
805}
806
807define i64 @add_i64_4080(i64 %x) {
808; LA32-LABEL: add_i64_4080:
809; LA32:       # %bb.0:
810; LA32-NEXT:    addi.w $a2, $a0, 2047
811; LA32-NEXT:    addi.w $a2, $a2, 2033
812; LA32-NEXT:    sltu $a0, $a2, $a0
813; LA32-NEXT:    add.w $a1, $a1, $a0
814; LA32-NEXT:    move $a0, $a2
815; LA32-NEXT:    ret
816;
817; LA64-LABEL: add_i64_4080:
818; LA64:       # %bb.0:
819; LA64-NEXT:    addi.d $a0, $a0, 2047
820; LA64-NEXT:    addi.d $a0, $a0, 2033
821; LA64-NEXT:    ret
822  %add = add i64 %x, 4080
823  ret i64 %add
824}
825
826define i64 @add_i64_minus_4080(i64 %x) {
827; LA32-LABEL: add_i64_minus_4080:
828; LA32:       # %bb.0:
829; LA32-NEXT:    addi.w $a2, $a0, -2048
830; LA32-NEXT:    addi.w $a2, $a2, -2032
831; LA32-NEXT:    sltu $a0, $a2, $a0
832; LA32-NEXT:    add.w $a0, $a1, $a0
833; LA32-NEXT:    addi.w $a1, $a0, -1
834; LA32-NEXT:    move $a0, $a2
835; LA32-NEXT:    ret
836;
837; LA64-LABEL: add_i64_minus_4080:
838; LA64:       # %bb.0:
839; LA64-NEXT:    addi.d $a0, $a0, -2048
840; LA64-NEXT:    addi.d $a0, $a0, -2032
841; LA64-NEXT:    ret
842  %add = add i64 %x, -4080
843  ret i64 %add
844}
845
846define i64 @add_i64_2048(i64 %x) {
847; LA32-LABEL: add_i64_2048:
848; LA32:       # %bb.0:
849; LA32-NEXT:    addi.w $a2, $a0, 2047
850; LA32-NEXT:    addi.w $a2, $a2, 1
851; LA32-NEXT:    sltu $a0, $a2, $a0
852; LA32-NEXT:    add.w $a1, $a1, $a0
853; LA32-NEXT:    move $a0, $a2
854; LA32-NEXT:    ret
855;
856; LA64-LABEL: add_i64_2048:
857; LA64:       # %bb.0:
858; LA64-NEXT:    addi.d $a0, $a0, 2047
859; LA64-NEXT:    addi.d $a0, $a0, 1
860; LA64-NEXT:    ret
861  %add = add i64 %x, 2048
862  ret i64 %add
863}
864
865define i64 @add_i64_4094(i64 %x) {
866; LA32-LABEL: add_i64_4094:
867; LA32:       # %bb.0:
868; LA32-NEXT:    addi.w $a2, $a0, 2047
869; LA32-NEXT:    addi.w $a2, $a2, 2047
870; LA32-NEXT:    sltu $a0, $a2, $a0
871; LA32-NEXT:    add.w $a1, $a1, $a0
872; LA32-NEXT:    move $a0, $a2
873; LA32-NEXT:    ret
874;
875; LA64-LABEL: add_i64_4094:
876; LA64:       # %bb.0:
877; LA64-NEXT:    addi.d $a0, $a0, 2047
878; LA64-NEXT:    addi.d $a0, $a0, 2047
879; LA64-NEXT:    ret
880  %add = add i64 %x, 4094
881  ret i64 %add
882}
883
884define i64 @add_i64_minus_2049(i64 %x) {
885; LA32-LABEL: add_i64_minus_2049:
886; LA32:       # %bb.0:
887; LA32-NEXT:    addi.w $a2, $a0, -2048
888; LA32-NEXT:    addi.w $a2, $a2, -1
889; LA32-NEXT:    sltu $a0, $a2, $a0
890; LA32-NEXT:    add.w $a0, $a1, $a0
891; LA32-NEXT:    addi.w $a1, $a0, -1
892; LA32-NEXT:    move $a0, $a2
893; LA32-NEXT:    ret
894;
895; LA64-LABEL: add_i64_minus_2049:
896; LA64:       # %bb.0:
897; LA64-NEXT:    addi.d $a0, $a0, -2048
898; LA64-NEXT:    addi.d $a0, $a0, -1
899; LA64-NEXT:    ret
900  %add = add i64 %x, -2049
901  ret i64 %add
902}
903
904define i64 @add_i64_minus_4096(i64 %x) {
905; LA32-LABEL: add_i64_minus_4096:
906; LA32:       # %bb.0:
907; LA32-NEXT:    addi.w $a2, $a0, -2048
908; LA32-NEXT:    addi.w $a2, $a2, -2048
909; LA32-NEXT:    sltu $a0, $a2, $a0
910; LA32-NEXT:    add.w $a0, $a1, $a0
911; LA32-NEXT:    addi.w $a1, $a0, -1
912; LA32-NEXT:    move $a0, $a2
913; LA32-NEXT:    ret
914;
915; LA64-LABEL: add_i64_minus_4096:
916; LA64:       # %bb.0:
917; LA64-NEXT:    addi.d $a0, $a0, -2048
918; LA64-NEXT:    addi.d $a0, $a0, -2048
919; LA64-NEXT:    ret
920  %add = add i64 %x, -4096
921  ret i64 %add
922}
923