xref: /llvm-project/llvm/test/CodeGen/PowerPC/sat-add.ll (revision c5ca1b8626db71fa7ac5d851fa3a0710641136ff)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=powerpc64le-- -verify-machineinstrs | FileCheck %s
3
4; There are at least 3 potential patterns corresponding to an unsigned saturated add: min, cmp with sum, cmp with not.
5; Test each of those patterns with i8/i16/i32/i64.
6; Test each of those with a constant operand and a variable operand.
7; Test each of those with a 128-bit vector type.
8
9define i8 @unsigned_sat_constant_i8_using_min(i8 %x) {
10; CHECK-LABEL: unsigned_sat_constant_i8_using_min:
11; CHECK:       # %bb.0:
12; CHECK-NEXT:    clrlwi 4, 3, 24
13; CHECK-NEXT:    cmplwi 4, 213
14; CHECK-NEXT:    li 4, -43
15; CHECK-NEXT:    isellt 3, 3, 4
16; CHECK-NEXT:    addi 3, 3, 42
17; CHECK-NEXT:    blr
18  %c = icmp ult i8 %x, -43
19  %s = select i1 %c, i8 %x, i8 -43
20  %r = add i8 %s, 42
21  ret i8 %r
22}
23
24define i8 @unsigned_sat_constant_i8_using_cmp_sum(i8 %x) {
25; CHECK-LABEL: unsigned_sat_constant_i8_using_cmp_sum:
26; CHECK:       # %bb.0:
27; CHECK-NEXT:    clrlwi 3, 3, 24
28; CHECK-NEXT:    addi 3, 3, 42
29; CHECK-NEXT:    andi. 4, 3, 256
30; CHECK-NEXT:    li 4, -1
31; CHECK-NEXT:    iseleq 3, 3, 4
32; CHECK-NEXT:    blr
33  %a = add i8 %x, 42
34  %c = icmp ugt i8 %x, %a
35  %r = select i1 %c, i8 -1, i8 %a
36  ret i8 %r
37}
38
39define i8 @unsigned_sat_constant_i8_using_cmp_notval(i8 %x) {
40; CHECK-LABEL: unsigned_sat_constant_i8_using_cmp_notval:
41; CHECK:       # %bb.0:
42; CHECK-NEXT:    clrlwi 4, 3, 24
43; CHECK-NEXT:    addi 3, 3, 42
44; CHECK-NEXT:    cmplwi 4, 213
45; CHECK-NEXT:    li 4, -1
46; CHECK-NEXT:    iselgt 3, 4, 3
47; CHECK-NEXT:    blr
48  %a = add i8 %x, 42
49  %c = icmp ugt i8 %x, -43
50  %r = select i1 %c, i8 -1, i8 %a
51  ret i8 %r
52}
53
54define i16 @unsigned_sat_constant_i16_using_min(i16 %x) {
55; CHECK-LABEL: unsigned_sat_constant_i16_using_min:
56; CHECK:       # %bb.0:
57; CHECK-NEXT:    clrlwi 4, 3, 16
58; CHECK-NEXT:    cmplwi 4, 65493
59; CHECK-NEXT:    li 4, -43
60; CHECK-NEXT:    isellt 3, 3, 4
61; CHECK-NEXT:    addi 3, 3, 42
62; CHECK-NEXT:    blr
63  %c = icmp ult i16 %x, -43
64  %s = select i1 %c, i16 %x, i16 -43
65  %r = add i16 %s, 42
66  ret i16 %r
67}
68
69define i16 @unsigned_sat_constant_i16_using_cmp_sum(i16 %x) {
70; CHECK-LABEL: unsigned_sat_constant_i16_using_cmp_sum:
71; CHECK:       # %bb.0:
72; CHECK-NEXT:    clrlwi 3, 3, 16
73; CHECK-NEXT:    addi 3, 3, 42
74; CHECK-NEXT:    andis. 4, 3, 1
75; CHECK-NEXT:    li 4, -1
76; CHECK-NEXT:    iseleq 3, 3, 4
77; CHECK-NEXT:    blr
78  %a = add i16 %x, 42
79  %c = icmp ugt i16 %x, %a
80  %r = select i1 %c, i16 -1, i16 %a
81  ret i16 %r
82}
83
84define i16 @unsigned_sat_constant_i16_using_cmp_notval(i16 %x) {
85; CHECK-LABEL: unsigned_sat_constant_i16_using_cmp_notval:
86; CHECK:       # %bb.0:
87; CHECK-NEXT:    clrlwi 4, 3, 16
88; CHECK-NEXT:    addi 3, 3, 42
89; CHECK-NEXT:    cmplwi 4, 65493
90; CHECK-NEXT:    li 4, -1
91; CHECK-NEXT:    iselgt 3, 4, 3
92; CHECK-NEXT:    blr
93  %a = add i16 %x, 42
94  %c = icmp ugt i16 %x, -43
95  %r = select i1 %c, i16 -1, i16 %a
96  ret i16 %r
97}
98
99define i32 @unsigned_sat_constant_i32_using_min(i32 %x) {
100; CHECK-LABEL: unsigned_sat_constant_i32_using_min:
101; CHECK:       # %bb.0:
102; CHECK-NEXT:    li 4, -43
103; CHECK-NEXT:    cmplw 3, 4
104; CHECK-NEXT:    isellt 3, 3, 4
105; CHECK-NEXT:    addi 3, 3, 42
106; CHECK-NEXT:    blr
107  %c = icmp ult i32 %x, -43
108  %s = select i1 %c, i32 %x, i32 -43
109  %r = add i32 %s, 42
110  ret i32 %r
111}
112
113define i32 @unsigned_sat_constant_i32_using_cmp_sum(i32 %x) {
114; CHECK-LABEL: unsigned_sat_constant_i32_using_cmp_sum:
115; CHECK:       # %bb.0:
116; CHECK-NEXT:    addi 4, 3, 42
117; CHECK-NEXT:    cmplw 4, 3
118; CHECK-NEXT:    li 3, -1
119; CHECK-NEXT:    isellt 3, 3, 4
120; CHECK-NEXT:    blr
121  %a = add i32 %x, 42
122  %c = icmp ugt i32 %x, %a
123  %r = select i1 %c, i32 -1, i32 %a
124  ret i32 %r
125}
126
127define i32 @unsigned_sat_constant_i32_using_cmp_notval(i32 %x) {
128; CHECK-LABEL: unsigned_sat_constant_i32_using_cmp_notval:
129; CHECK:       # %bb.0:
130; CHECK-NEXT:    li 5, -43
131; CHECK-NEXT:    addi 4, 3, 42
132; CHECK-NEXT:    cmplw 3, 5
133; CHECK-NEXT:    li 3, -1
134; CHECK-NEXT:    iselgt 3, 3, 4
135; CHECK-NEXT:    blr
136  %a = add i32 %x, 42
137  %c = icmp ugt i32 %x, -43
138  %r = select i1 %c, i32 -1, i32 %a
139  ret i32 %r
140}
141
142define i64 @unsigned_sat_constant_i64_using_min(i64 %x) {
143; CHECK-LABEL: unsigned_sat_constant_i64_using_min:
144; CHECK:       # %bb.0:
145; CHECK-NEXT:    li 4, -43
146; CHECK-NEXT:    cmpld 3, 4
147; CHECK-NEXT:    isellt 3, 3, 4
148; CHECK-NEXT:    addi 3, 3, 42
149; CHECK-NEXT:    blr
150  %c = icmp ult i64 %x, -43
151  %s = select i1 %c, i64 %x, i64 -43
152  %r = add i64 %s, 42
153  ret i64 %r
154}
155
156define i64 @unsigned_sat_constant_i64_using_cmp_sum(i64 %x) {
157; CHECK-LABEL: unsigned_sat_constant_i64_using_cmp_sum:
158; CHECK:       # %bb.0:
159; CHECK-NEXT:    addi 4, 3, 42
160; CHECK-NEXT:    cmpld 4, 3
161; CHECK-NEXT:    li 3, -1
162; CHECK-NEXT:    isellt 3, 3, 4
163; CHECK-NEXT:    blr
164  %a = add i64 %x, 42
165  %c = icmp ugt i64 %x, %a
166  %r = select i1 %c, i64 -1, i64 %a
167  ret i64 %r
168}
169
170define i64 @unsigned_sat_constant_i64_using_cmp_notval(i64 %x) {
171; CHECK-LABEL: unsigned_sat_constant_i64_using_cmp_notval:
172; CHECK:       # %bb.0:
173; CHECK-NEXT:    addi 4, 3, 42
174; CHECK-NEXT:    cmpld 4, 3
175; CHECK-NEXT:    li 3, -1
176; CHECK-NEXT:    isellt 3, 3, 4
177; CHECK-NEXT:    blr
178  %a = add i64 %x, 42
179  %c = icmp ugt i64 %x, -43
180  %r = select i1 %c, i64 -1, i64 %a
181  ret i64 %r
182}
183
184define i8 @unsigned_sat_variable_i8_using_min(i8 %x, i8 %y) {
185; CHECK-LABEL: unsigned_sat_variable_i8_using_min:
186; CHECK:       # %bb.0:
187; CHECK-NEXT:    not 6, 4
188; CHECK-NEXT:    clrlwi 5, 3, 24
189; CHECK-NEXT:    clrlwi 7, 6, 24
190; CHECK-NEXT:    cmplw 5, 7
191; CHECK-NEXT:    isellt 3, 3, 6
192; CHECK-NEXT:    add 3, 3, 4
193; CHECK-NEXT:    blr
194  %noty = xor i8 %y, -1
195  %c = icmp ult i8 %x, %noty
196  %s = select i1 %c, i8 %x, i8 %noty
197  %r = add i8 %s, %y
198  ret i8 %r
199}
200
201define i8 @unsigned_sat_variable_i8_using_cmp_sum(i8 %x, i8 %y) {
202; CHECK-LABEL: unsigned_sat_variable_i8_using_cmp_sum:
203; CHECK:       # %bb.0:
204; CHECK-NEXT:    clrlwi 4, 4, 24
205; CHECK-NEXT:    clrlwi 3, 3, 24
206; CHECK-NEXT:    add 3, 3, 4
207; CHECK-NEXT:    andi. 4, 3, 256
208; CHECK-NEXT:    li 4, -1
209; CHECK-NEXT:    iseleq 3, 3, 4
210; CHECK-NEXT:    blr
211  %a = add i8 %x, %y
212  %c = icmp ugt i8 %x, %a
213  %r = select i1 %c, i8 -1, i8 %a
214  ret i8 %r
215}
216
217define i8 @unsigned_sat_variable_i8_using_cmp_notval(i8 %x, i8 %y) {
218; CHECK-LABEL: unsigned_sat_variable_i8_using_cmp_notval:
219; CHECK:       # %bb.0:
220; CHECK-NEXT:    not 6, 4
221; CHECK-NEXT:    clrlwi 5, 3, 24
222; CHECK-NEXT:    add 3, 3, 4
223; CHECK-NEXT:    li 4, -1
224; CHECK-NEXT:    clrlwi 6, 6, 24
225; CHECK-NEXT:    cmplw 5, 6
226; CHECK-NEXT:    iselgt 3, 4, 3
227; CHECK-NEXT:    blr
228  %noty = xor i8 %y, -1
229  %a = add i8 %x, %y
230  %c = icmp ugt i8 %x, %noty
231  %r = select i1 %c, i8 -1, i8 %a
232  ret i8 %r
233}
234
235define i16 @unsigned_sat_variable_i16_using_min(i16 %x, i16 %y) {
236; CHECK-LABEL: unsigned_sat_variable_i16_using_min:
237; CHECK:       # %bb.0:
238; CHECK-NEXT:    not 6, 4
239; CHECK-NEXT:    clrlwi 5, 3, 16
240; CHECK-NEXT:    clrlwi 7, 6, 16
241; CHECK-NEXT:    cmplw 5, 7
242; CHECK-NEXT:    isellt 3, 3, 6
243; CHECK-NEXT:    add 3, 3, 4
244; CHECK-NEXT:    blr
245  %noty = xor i16 %y, -1
246  %c = icmp ult i16 %x, %noty
247  %s = select i1 %c, i16 %x, i16 %noty
248  %r = add i16 %s, %y
249  ret i16 %r
250}
251
252define i16 @unsigned_sat_variable_i16_using_cmp_sum(i16 %x, i16 %y) {
253; CHECK-LABEL: unsigned_sat_variable_i16_using_cmp_sum:
254; CHECK:       # %bb.0:
255; CHECK-NEXT:    clrlwi 4, 4, 16
256; CHECK-NEXT:    clrlwi 3, 3, 16
257; CHECK-NEXT:    add 3, 3, 4
258; CHECK-NEXT:    andis. 4, 3, 1
259; CHECK-NEXT:    li 4, -1
260; CHECK-NEXT:    iseleq 3, 3, 4
261; CHECK-NEXT:    blr
262  %a = add i16 %x, %y
263  %c = icmp ugt i16 %x, %a
264  %r = select i1 %c, i16 -1, i16 %a
265  ret i16 %r
266}
267
268define i16 @unsigned_sat_variable_i16_using_cmp_notval(i16 %x, i16 %y) {
269; CHECK-LABEL: unsigned_sat_variable_i16_using_cmp_notval:
270; CHECK:       # %bb.0:
271; CHECK-NEXT:    not 6, 4
272; CHECK-NEXT:    clrlwi 5, 3, 16
273; CHECK-NEXT:    add 3, 3, 4
274; CHECK-NEXT:    li 4, -1
275; CHECK-NEXT:    clrlwi 6, 6, 16
276; CHECK-NEXT:    cmplw 5, 6
277; CHECK-NEXT:    iselgt 3, 4, 3
278; CHECK-NEXT:    blr
279  %noty = xor i16 %y, -1
280  %a = add i16 %x, %y
281  %c = icmp ugt i16 %x, %noty
282  %r = select i1 %c, i16 -1, i16 %a
283  ret i16 %r
284}
285
286define i32 @unsigned_sat_variable_i32_using_min(i32 %x, i32 %y) {
287; CHECK-LABEL: unsigned_sat_variable_i32_using_min:
288; CHECK:       # %bb.0:
289; CHECK-NEXT:    not 5, 4
290; CHECK-NEXT:    cmplw 3, 5
291; CHECK-NEXT:    isellt 3, 3, 5
292; CHECK-NEXT:    add 3, 3, 4
293; CHECK-NEXT:    blr
294  %noty = xor i32 %y, -1
295  %c = icmp ult i32 %x, %noty
296  %s = select i1 %c, i32 %x, i32 %noty
297  %r = add i32 %s, %y
298  ret i32 %r
299}
300
301define i32 @unsigned_sat_variable_i32_using_cmp_sum(i32 %x, i32 %y) {
302; CHECK-LABEL: unsigned_sat_variable_i32_using_cmp_sum:
303; CHECK:       # %bb.0:
304; CHECK-NEXT:    add 4, 3, 4
305; CHECK-NEXT:    cmplw 4, 3
306; CHECK-NEXT:    li 3, -1
307; CHECK-NEXT:    isellt 3, 3, 4
308; CHECK-NEXT:    blr
309  %a = add i32 %x, %y
310  %c = icmp ugt i32 %x, %a
311  %r = select i1 %c, i32 -1, i32 %a
312  ret i32 %r
313}
314
315define i32 @unsigned_sat_variable_i32_using_cmp_notval(i32 %x, i32 %y) {
316; CHECK-LABEL: unsigned_sat_variable_i32_using_cmp_notval:
317; CHECK:       # %bb.0:
318; CHECK-NEXT:    not 5, 4
319; CHECK-NEXT:    add 4, 3, 4
320; CHECK-NEXT:    cmplw 3, 5
321; CHECK-NEXT:    li 3, -1
322; CHECK-NEXT:    iselgt 3, 3, 4
323; CHECK-NEXT:    blr
324  %noty = xor i32 %y, -1
325  %a = add i32 %x, %y
326  %c = icmp ugt i32 %x, %noty
327  %r = select i1 %c, i32 -1, i32 %a
328  ret i32 %r
329}
330
331define i64 @unsigned_sat_variable_i64_using_min(i64 %x, i64 %y) {
332; CHECK-LABEL: unsigned_sat_variable_i64_using_min:
333; CHECK:       # %bb.0:
334; CHECK-NEXT:    not 5, 4
335; CHECK-NEXT:    cmpld 3, 5
336; CHECK-NEXT:    isellt 3, 3, 5
337; CHECK-NEXT:    add 3, 3, 4
338; CHECK-NEXT:    blr
339  %noty = xor i64 %y, -1
340  %c = icmp ult i64 %x, %noty
341  %s = select i1 %c, i64 %x, i64 %noty
342  %r = add i64 %s, %y
343  ret i64 %r
344}
345
346define i64 @unsigned_sat_variable_i64_using_cmp_sum(i64 %x, i64 %y) {
347; CHECK-LABEL: unsigned_sat_variable_i64_using_cmp_sum:
348; CHECK:       # %bb.0:
349; CHECK-NEXT:    add 4, 3, 4
350; CHECK-NEXT:    cmpld 4, 3
351; CHECK-NEXT:    li 3, -1
352; CHECK-NEXT:    isellt 3, 3, 4
353; CHECK-NEXT:    blr
354  %a = add i64 %x, %y
355  %c = icmp ugt i64 %x, %a
356  %r = select i1 %c, i64 -1, i64 %a
357  ret i64 %r
358}
359
360define i64 @unsigned_sat_variable_i64_using_cmp_notval(i64 %x, i64 %y) {
361; CHECK-LABEL: unsigned_sat_variable_i64_using_cmp_notval:
362; CHECK:       # %bb.0:
363; CHECK-NEXT:    not 5, 4
364; CHECK-NEXT:    add 4, 3, 4
365; CHECK-NEXT:    cmpld 3, 5
366; CHECK-NEXT:    li 3, -1
367; CHECK-NEXT:    iselgt 3, 3, 4
368; CHECK-NEXT:    blr
369  %noty = xor i64 %y, -1
370  %a = add i64 %x, %y
371  %c = icmp ugt i64 %x, %noty
372  %r = select i1 %c, i64 -1, i64 %a
373  ret i64 %r
374}
375
376define <16 x i8> @unsigned_sat_constant_v16i8_using_min(<16 x i8> %x) {
377; CHECK-LABEL: unsigned_sat_constant_v16i8_using_min:
378; CHECK:       # %bb.0:
379; CHECK-NEXT:    addis 3, 2, .LCPI24_0@toc@ha
380; CHECK-NEXT:    addi 3, 3, .LCPI24_0@toc@l
381; CHECK-NEXT:    lxvd2x 35, 0, 3
382; CHECK-NEXT:    addis 3, 2, .LCPI24_1@toc@ha
383; CHECK-NEXT:    addi 3, 3, .LCPI24_1@toc@l
384; CHECK-NEXT:    vminub 2, 2, 3
385; CHECK-NEXT:    lxvd2x 35, 0, 3
386; CHECK-NEXT:    vaddubm 2, 2, 3
387; CHECK-NEXT:    blr
388  %c = icmp ult <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
389  %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
390  %r = add <16 x i8> %s, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
391  ret <16 x i8> %r
392}
393
394define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_sum(<16 x i8> %x) {
395; CHECK-LABEL: unsigned_sat_constant_v16i8_using_cmp_sum:
396; CHECK:       # %bb.0:
397; CHECK-NEXT:    addis 3, 2, .LCPI25_0@toc@ha
398; CHECK-NEXT:    addi 3, 3, .LCPI25_0@toc@l
399; CHECK-NEXT:    lxvd2x 35, 0, 3
400; CHECK-NEXT:    vaddubs 2, 2, 3
401; CHECK-NEXT:    blr
402  %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
403  %c = icmp ugt <16 x i8> %x, %a
404  %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
405  ret <16 x i8> %r
406}
407
408define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_notval(<16 x i8> %x) {
409; CHECK-LABEL: unsigned_sat_constant_v16i8_using_cmp_notval:
410; CHECK:       # %bb.0:
411; CHECK-NEXT:    addis 3, 2, .LCPI26_0@toc@ha
412; CHECK-NEXT:    addi 3, 3, .LCPI26_0@toc@l
413; CHECK-NEXT:    lxvd2x 35, 0, 3
414; CHECK-NEXT:    vaddubs 2, 2, 3
415; CHECK-NEXT:    blr
416  %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
417  %c = icmp ugt <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
418  %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
419  ret <16 x i8> %r
420}
421
422define <8 x i16> @unsigned_sat_constant_v8i16_using_min(<8 x i16> %x) {
423; CHECK-LABEL: unsigned_sat_constant_v8i16_using_min:
424; CHECK:       # %bb.0:
425; CHECK-NEXT:    addis 3, 2, .LCPI27_0@toc@ha
426; CHECK-NEXT:    addi 3, 3, .LCPI27_0@toc@l
427; CHECK-NEXT:    lxvd2x 35, 0, 3
428; CHECK-NEXT:    addis 3, 2, .LCPI27_1@toc@ha
429; CHECK-NEXT:    addi 3, 3, .LCPI27_1@toc@l
430; CHECK-NEXT:    vminuh 2, 2, 3
431; CHECK-NEXT:    lxvd2x 35, 0, 3
432; CHECK-NEXT:    vadduhm 2, 2, 3
433; CHECK-NEXT:    blr
434  %c = icmp ult <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
435  %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
436  %r = add <8 x i16> %s, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
437  ret <8 x i16> %r
438}
439
440define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_sum(<8 x i16> %x) {
441; CHECK-LABEL: unsigned_sat_constant_v8i16_using_cmp_sum:
442; CHECK:       # %bb.0:
443; CHECK-NEXT:    addis 3, 2, .LCPI28_0@toc@ha
444; CHECK-NEXT:    addi 3, 3, .LCPI28_0@toc@l
445; CHECK-NEXT:    lxvd2x 35, 0, 3
446; CHECK-NEXT:    vadduhs 2, 2, 3
447; CHECK-NEXT:    blr
448  %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
449  %c = icmp ugt <8 x i16> %x, %a
450  %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
451  ret <8 x i16> %r
452}
453
454define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_notval(<8 x i16> %x) {
455; CHECK-LABEL: unsigned_sat_constant_v8i16_using_cmp_notval:
456; CHECK:       # %bb.0:
457; CHECK-NEXT:    addis 3, 2, .LCPI29_0@toc@ha
458; CHECK-NEXT:    addi 3, 3, .LCPI29_0@toc@l
459; CHECK-NEXT:    lxvd2x 35, 0, 3
460; CHECK-NEXT:    vadduhs 2, 2, 3
461; CHECK-NEXT:    blr
462  %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
463  %c = icmp ugt <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
464  %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
465  ret <8 x i16> %r
466}
467
468define <4 x i32> @unsigned_sat_constant_v4i32_using_min(<4 x i32> %x) {
469; CHECK-LABEL: unsigned_sat_constant_v4i32_using_min:
470; CHECK:       # %bb.0:
471; CHECK-NEXT:    addis 3, 2, .LCPI30_0@toc@ha
472; CHECK-NEXT:    addi 3, 3, .LCPI30_0@toc@l
473; CHECK-NEXT:    lxvd2x 35, 0, 3
474; CHECK-NEXT:    addis 3, 2, .LCPI30_1@toc@ha
475; CHECK-NEXT:    addi 3, 3, .LCPI30_1@toc@l
476; CHECK-NEXT:    vminuw 2, 2, 3
477; CHECK-NEXT:    lxvd2x 35, 0, 3
478; CHECK-NEXT:    vadduwm 2, 2, 3
479; CHECK-NEXT:    blr
480  %c = icmp ult <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43>
481  %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> <i32 -43, i32 -43, i32 -43, i32 -43>
482  %r = add <4 x i32> %s, <i32 42, i32 42, i32 42, i32 42>
483  ret <4 x i32> %r
484}
485
486define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_sum(<4 x i32> %x) {
487; CHECK-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum:
488; CHECK:       # %bb.0:
489; CHECK-NEXT:    addis 3, 2, .LCPI31_0@toc@ha
490; CHECK-NEXT:    addi 3, 3, .LCPI31_0@toc@l
491; CHECK-NEXT:    lxvd2x 35, 0, 3
492; CHECK-NEXT:    vadduws 2, 2, 3
493; CHECK-NEXT:    blr
494  %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42>
495  %c = icmp ugt <4 x i32> %x, %a
496  %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
497  ret <4 x i32> %r
498}
499
500define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval(<4 x i32> %x) {
501; CHECK-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval:
502; CHECK:       # %bb.0:
503; CHECK-NEXT:    addis 3, 2, .LCPI32_0@toc@ha
504; CHECK-NEXT:    addi 3, 3, .LCPI32_0@toc@l
505; CHECK-NEXT:    lxvd2x 35, 0, 3
506; CHECK-NEXT:    vadduws 2, 2, 3
507; CHECK-NEXT:    blr
508  %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42>
509  %c = icmp ugt <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43>
510  %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
511  ret <4 x i32> %r
512}
513
514define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) {
515; CHECK-LABEL: unsigned_sat_constant_v2i64_using_min:
516; CHECK:       # %bb.0:
517; CHECK-NEXT:    addis 3, 2, .LCPI33_0@toc@ha
518; CHECK-NEXT:    addi 3, 3, .LCPI33_0@toc@l
519; CHECK-NEXT:    lxvd2x 35, 0, 3
520; CHECK-NEXT:    addis 3, 2, .LCPI33_1@toc@ha
521; CHECK-NEXT:    addi 3, 3, .LCPI33_1@toc@l
522; CHECK-NEXT:    vminud 2, 2, 3
523; CHECK-NEXT:    lxvd2x 35, 0, 3
524; CHECK-NEXT:    vaddudm 2, 2, 3
525; CHECK-NEXT:    blr
526  %c = icmp ult <2 x i64> %x, <i64 -43, i64 -43>
527  %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> <i64 -43, i64 -43>
528  %r = add <2 x i64> %s, <i64 42, i64 42>
529  ret <2 x i64> %r
530}
531
532define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_sum(<2 x i64> %x) {
533; CHECK-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum:
534; CHECK:       # %bb.0:
535; CHECK-NEXT:    addis 3, 2, .LCPI34_0@toc@ha
536; CHECK-NEXT:    xxleqv 0, 0, 0
537; CHECK-NEXT:    addi 3, 3, .LCPI34_0@toc@l
538; CHECK-NEXT:    lxvd2x 35, 0, 3
539; CHECK-NEXT:    vaddudm 3, 2, 3
540; CHECK-NEXT:    vcmpgtud 2, 2, 3
541; CHECK-NEXT:    xxsel 34, 35, 0, 34
542; CHECK-NEXT:    blr
543  %a = add <2 x i64> %x, <i64 42, i64 42>
544  %c = icmp ugt <2 x i64> %x, %a
545  %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
546  ret <2 x i64> %r
547}
548
549define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_notval(<2 x i64> %x) {
550; CHECK-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval:
551; CHECK:       # %bb.0:
552; CHECK-NEXT:    addis 3, 2, .LCPI35_0@toc@ha
553; CHECK-NEXT:    xxleqv 0, 0, 0
554; CHECK-NEXT:    addi 3, 3, .LCPI35_0@toc@l
555; CHECK-NEXT:    lxvd2x 35, 0, 3
556; CHECK-NEXT:    addis 3, 2, .LCPI35_1@toc@ha
557; CHECK-NEXT:    addi 3, 3, .LCPI35_1@toc@l
558; CHECK-NEXT:    lxvd2x 36, 0, 3
559; CHECK-NEXT:    vaddudm 3, 2, 3
560; CHECK-NEXT:    vcmpgtud 2, 2, 4
561; CHECK-NEXT:    xxsel 34, 35, 0, 34
562; CHECK-NEXT:    blr
563  %a = add <2 x i64> %x, <i64 42, i64 42>
564  %c = icmp ugt <2 x i64> %x, <i64 -43, i64 -43>
565  %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
566  ret <2 x i64> %r
567}
568
569define <16 x i8> @unsigned_sat_variable_v16i8_using_min(<16 x i8> %x, <16 x i8> %y) {
570; CHECK-LABEL: unsigned_sat_variable_v16i8_using_min:
571; CHECK:       # %bb.0:
572; CHECK-NEXT:    xxlnor 36, 35, 35
573; CHECK-NEXT:    vminub 2, 2, 4
574; CHECK-NEXT:    vaddubm 2, 2, 3
575; CHECK-NEXT:    blr
576  %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
577  %c = icmp ult <16 x i8> %x, %noty
578  %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> %noty
579  %r = add <16 x i8> %s, %y
580  ret <16 x i8> %r
581}
582
583define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_sum(<16 x i8> %x, <16 x i8> %y) {
584; CHECK-LABEL: unsigned_sat_variable_v16i8_using_cmp_sum:
585; CHECK:       # %bb.0:
586; CHECK-NEXT:    vaddubs 2, 2, 3
587; CHECK-NEXT:    blr
588  %a = add <16 x i8> %x, %y
589  %c = icmp ugt <16 x i8> %x, %a
590  %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
591  ret <16 x i8> %r
592}
593
594define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_notval(<16 x i8> %x, <16 x i8> %y) {
595; CHECK-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval:
596; CHECK:       # %bb.0:
597; CHECK-NEXT:    xxlnor 36, 35, 35
598; CHECK-NEXT:    vaddubm 3, 2, 3
599; CHECK-NEXT:    xxleqv 0, 0, 0
600; CHECK-NEXT:    vcmpgtub 2, 2, 4
601; CHECK-NEXT:    xxsel 34, 35, 0, 34
602; CHECK-NEXT:    blr
603  %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
604  %a = add <16 x i8> %x, %y
605  %c = icmp ugt <16 x i8> %x, %noty
606  %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
607  ret <16 x i8> %r
608}
609
610define <8 x i16> @unsigned_sat_variable_v8i16_using_min(<8 x i16> %x, <8 x i16> %y) {
611; CHECK-LABEL: unsigned_sat_variable_v8i16_using_min:
612; CHECK:       # %bb.0:
613; CHECK-NEXT:    xxlnor 36, 35, 35
614; CHECK-NEXT:    vminuh 2, 2, 4
615; CHECK-NEXT:    vadduhm 2, 2, 3
616; CHECK-NEXT:    blr
617  %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>
618  %c = icmp ult <8 x i16> %x, %noty
619  %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> %noty
620  %r = add <8 x i16> %s, %y
621  ret <8 x i16> %r
622}
623
624define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_sum(<8 x i16> %x, <8 x i16> %y) {
625; CHECK-LABEL: unsigned_sat_variable_v8i16_using_cmp_sum:
626; CHECK:       # %bb.0:
627; CHECK-NEXT:    vadduhs 2, 2, 3
628; CHECK-NEXT:    blr
629  %a = add <8 x i16> %x, %y
630  %c = icmp ugt <8 x i16> %x, %a
631  %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
632  ret <8 x i16> %r
633}
634
635define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_notval(<8 x i16> %x, <8 x i16> %y) {
636; CHECK-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval:
637; CHECK:       # %bb.0:
638; CHECK-NEXT:    xxlnor 36, 35, 35
639; CHECK-NEXT:    vadduhm 3, 2, 3
640; CHECK-NEXT:    xxleqv 0, 0, 0
641; CHECK-NEXT:    vcmpgtuh 2, 2, 4
642; CHECK-NEXT:    xxsel 34, 35, 0, 34
643; CHECK-NEXT:    blr
644  %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>
645  %a = add <8 x i16> %x, %y
646  %c = icmp ugt <8 x i16> %x, %noty
647  %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
648  ret <8 x i16> %r
649}
650
651define <4 x i32> @unsigned_sat_variable_v4i32_using_min(<4 x i32> %x, <4 x i32> %y) {
652; CHECK-LABEL: unsigned_sat_variable_v4i32_using_min:
653; CHECK:       # %bb.0:
654; CHECK-NEXT:    xxlnor 36, 35, 35
655; CHECK-NEXT:    vminuw 2, 2, 4
656; CHECK-NEXT:    vadduwm 2, 2, 3
657; CHECK-NEXT:    blr
658  %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1>
659  %c = icmp ult <4 x i32> %x, %noty
660  %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> %noty
661  %r = add <4 x i32> %s, %y
662  ret <4 x i32> %r
663}
664
665define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_sum(<4 x i32> %x, <4 x i32> %y) {
666; CHECK-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum:
667; CHECK:       # %bb.0:
668; CHECK-NEXT:    vadduws 2, 2, 3
669; CHECK-NEXT:    blr
670  %a = add <4 x i32> %x, %y
671  %c = icmp ugt <4 x i32> %x, %a
672  %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
673  ret <4 x i32> %r
674}
675
676define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4 x i32> %y) {
677; CHECK-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval:
678; CHECK:       # %bb.0:
679; CHECK-NEXT:    xxlnor 36, 35, 35
680; CHECK-NEXT:    vadduwm 3, 2, 3
681; CHECK-NEXT:    xxleqv 0, 0, 0
682; CHECK-NEXT:    vcmpgtuw 2, 2, 4
683; CHECK-NEXT:    xxsel 34, 35, 0, 34
684; CHECK-NEXT:    blr
685  %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1>
686  %a = add <4 x i32> %x, %y
687  %c = icmp ugt <4 x i32> %x, %noty
688  %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
689  ret <4 x i32> %r
690}
691
692define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64> %y) {
693; CHECK-LABEL: unsigned_sat_variable_v2i64_using_min:
694; CHECK:       # %bb.0:
695; CHECK-NEXT:    xxlnor 36, 35, 35
696; CHECK-NEXT:    vminud 2, 2, 4
697; CHECK-NEXT:    vaddudm 2, 2, 3
698; CHECK-NEXT:    blr
699  %noty = xor <2 x i64> %y, <i64 -1, i64 -1>
700  %c = icmp ult <2 x i64> %x, %noty
701  %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> %noty
702  %r = add <2 x i64> %s, %y
703  ret <2 x i64> %r
704}
705
706define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i64> %y) {
707; CHECK-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum:
708; CHECK:       # %bb.0:
709; CHECK-NEXT:    vaddudm 3, 2, 3
710; CHECK-NEXT:    xxleqv 0, 0, 0
711; CHECK-NEXT:    vcmpgtud 2, 2, 3
712; CHECK-NEXT:    xxsel 34, 35, 0, 34
713; CHECK-NEXT:    blr
714  %a = add <2 x i64> %x, %y
715  %c = icmp ugt <2 x i64> %x, %a
716  %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
717  ret <2 x i64> %r
718}
719
720define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2 x i64> %y) {
721; CHECK-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval:
722; CHECK:       # %bb.0:
723; CHECK-NEXT:    xxlnor 36, 35, 35
724; CHECK-NEXT:    vaddudm 3, 2, 3
725; CHECK-NEXT:    xxleqv 0, 0, 0
726; CHECK-NEXT:    vcmpgtud 2, 2, 4
727; CHECK-NEXT:    xxsel 34, 35, 0, 34
728; CHECK-NEXT:    blr
729  %noty = xor <2 x i64> %y, <i64 -1, i64 -1>
730  %a = add <2 x i64> %x, %y
731  %c = icmp ugt <2 x i64> %x, %noty
732  %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
733  ret <2 x i64> %r
734}
735
736declare <4 x i128> @llvm.sadd.sat.v4i128(<4 x i128> %a, <4 x i128> %b);
737
738define <4 x i128> @sadd(<4 x i128> %a, <4 x i128> %b) local_unnamed_addr {
739; CHECK-LABEL: sadd:
740; CHECK:       # %bb.0:
741; CHECK-NEXT:    vadduqm 0, 2, 6
742; CHECK-NEXT:    vadduqm 10, 4, 8
743; CHECK-NEXT:    mfocrf 12, 32
744; CHECK-NEXT:    stw 12, 8(1)
745; CHECK-NEXT:    xxswapd 0, 34
746; CHECK-NEXT:    xxswapd 4, 36
747; CHECK-NEXT:    vadduqm 1, 3, 7
748; CHECK-NEXT:    vadduqm 11, 5, 9
749; CHECK-NEXT:    mffprd 3, 0
750; CHECK-NEXT:    mffprd 6, 4
751; CHECK-NEXT:    lwz 12, 8(1)
752; CHECK-NEXT:    xxswapd 2, 35
753; CHECK-NEXT:    xxswapd 5, 37
754; CHECK-NEXT:    mffprd 4, 2
755; CHECK-NEXT:    xxswapd 1, 32
756; CHECK-NEXT:    xxswapd 6, 42
757; CHECK-NEXT:    mffprd 5, 1
758; CHECK-NEXT:    cmpld 6, 5, 3
759; CHECK-NEXT:    mffprd 7, 6
760; CHECK-NEXT:    xxswapd 3, 33
761; CHECK-NEXT:    xxswapd 7, 43
762; CHECK-NEXT:    mffprd 3, 3
763; CHECK-NEXT:    cmpld 5, 7, 6
764; CHECK-NEXT:    mffprd 6, 5
765; CHECK-NEXT:    mffprd 7, 7
766; CHECK-NEXT:    mfvsrd 5, 36
767; CHECK-NEXT:    cmpld 3, 4
768; CHECK-NEXT:    mfvsrd 3, 34
769; CHECK-NEXT:    cmpld 1, 7, 6
770; CHECK-NEXT:    mfvsrd 7, 32
771; CHECK-NEXT:    mfvsrd 4, 35
772; CHECK-NEXT:    mfvsrd 6, 37
773; CHECK-NEXT:    cmpld 7, 7, 3
774; CHECK-NEXT:    cmpd 2, 7, 3
775; CHECK-NEXT:    mfvsrd 3, 33
776; CHECK-NEXT:    crandc 21, 8, 30
777; CHECK-NEXT:    crand 22, 30, 24
778; CHECK-NEXT:    cmpld 6, 3, 4
779; CHECK-NEXT:    cmpd 7, 3, 4
780; CHECK-NEXT:    mfvsrd 4, 42
781; CHECK-NEXT:    sradi 3, 3, 63
782; CHECK-NEXT:    mtocrf 32, 12
783; CHECK-NEXT:    crnor 21, 22, 21
784; CHECK-NEXT:    crandc 23, 28, 26
785; CHECK-NEXT:    crand 24, 26, 0
786; CHECK-NEXT:    cmpld 4, 5
787; CHECK-NEXT:    cmpd 7, 4, 5
788; CHECK-NEXT:    mfvsrd 5, 43
789; CHECK-NEXT:    crnor 22, 24, 23
790; CHECK-NEXT:    mtfprd 5, 3
791; CHECK-NEXT:    sradi 4, 4, 63
792; CHECK-NEXT:    mtfprd 6, 4
793; CHECK-NEXT:    crandc 25, 28, 2
794; CHECK-NEXT:    crand 20, 2, 20
795; CHECK-NEXT:    cmpld 5, 6
796; CHECK-NEXT:    cmpd 7, 5, 6
797; CHECK-NEXT:    mfvsrd 6, 38
798; CHECK-NEXT:    sradi 5, 5, 63
799; CHECK-NEXT:    crnor 20, 20, 25
800; CHECK-NEXT:    mtfprd 7, 5
801; CHECK-NEXT:    sradi 6, 6, 63
802; CHECK-NEXT:    crandc 26, 28, 2
803; CHECK-NEXT:    crand 27, 2, 4
804; CHECK-NEXT:    crnor 23, 27, 26
805; CHECK-NEXT:    mtfprd 0, 6
806; CHECK-NEXT:    mfvsrd 6, 39
807; CHECK-NEXT:    sradi 6, 6, 63
808; CHECK-NEXT:    mtfprd 1, 6
809; CHECK-NEXT:    mfvsrd 6, 40
810; CHECK-NEXT:    sradi 6, 6, 63
811; CHECK-NEXT:    mtfprd 2, 6
812; CHECK-NEXT:    mfvsrd 6, 41
813; CHECK-NEXT:    sradi 6, 6, 63
814; CHECK-NEXT:    mtfprd 3, 6
815; CHECK-NEXT:    sradi 6, 7, 63
816; CHECK-NEXT:    mtfprd 4, 6
817; CHECK-NEXT:    li 6, -1
818; CHECK-NEXT:    isel 3, 0, 6, 21
819; CHECK-NEXT:    isel 4, 0, 6, 22
820; CHECK-NEXT:    isel 5, 0, 6, 20
821; CHECK-NEXT:    isel 6, 0, 6, 23
822; CHECK-NEXT:    mtfprd 8, 3
823; CHECK-NEXT:    addis 3, 2, .LCPI48_0@toc@ha
824; CHECK-NEXT:    mtfprd 10, 4
825; CHECK-NEXT:    mtfprd 11, 5
826; CHECK-NEXT:    mtfprd 12, 6
827; CHECK-NEXT:    addi 3, 3, .LCPI48_0@toc@l
828; CHECK-NEXT:    lxvd2x 9, 0, 3
829; CHECK-NEXT:    xxspltd 45, 6, 0
830; CHECK-NEXT:    xxspltd 46, 7, 0
831; CHECK-NEXT:    xxspltd 34, 0, 0
832; CHECK-NEXT:    xxspltd 40, 5, 0
833; CHECK-NEXT:    xxspltd 35, 1, 0
834; CHECK-NEXT:    xxspltd 36, 2, 0
835; CHECK-NEXT:    xxspltd 38, 3, 0
836; CHECK-NEXT:    xxspltd 39, 4, 0
837; CHECK-NEXT:    xxspltd 41, 8, 0
838; CHECK-NEXT:    xxspltd 44, 10, 0
839; CHECK-NEXT:    xxspltd 47, 11, 0
840; CHECK-NEXT:    xxspltd 48, 12, 0
841; CHECK-NEXT:    xxlxor 0, 34, 41
842; CHECK-NEXT:    xxlxor 1, 35, 44
843; CHECK-NEXT:    xxswapd 37, 9
844; CHECK-NEXT:    xxlxor 2, 39, 37
845; CHECK-NEXT:    xxlxor 3, 40, 37
846; CHECK-NEXT:    xxsel 34, 32, 2, 0
847; CHECK-NEXT:    xxsel 35, 33, 3, 1
848; CHECK-NEXT:    xxlxor 0, 36, 47
849; CHECK-NEXT:    xxlxor 1, 45, 37
850; CHECK-NEXT:    xxsel 36, 42, 1, 0
851; CHECK-NEXT:    xxlxor 0, 38, 48
852; CHECK-NEXT:    xxlxor 1, 46, 37
853; CHECK-NEXT:    xxsel 37, 43, 1, 0
854; CHECK-NEXT:    blr
855  %c = call <4 x i128> @llvm.sadd.sat.v4i128(<4 x i128> %a, <4 x i128> %b)
856  ret <4 x i128> %c
857}
858
859define i64 @unsigned_sat_constant_i64_with_single_use(i64 %x) {
860; CHECK-LABEL: unsigned_sat_constant_i64_with_single_use:
861; CHECK:       # %bb.0:
862; CHECK-NEXT:    addi 4, 3, -4
863; CHECK-NEXT:    cmpld 4, 3
864; CHECK-NEXT:    iselgt 3, 0, 4
865; CHECK-NEXT:    blr
866  %umin = call i64 @llvm.umin.i64(i64 %x, i64 4)
867  %sub = sub i64 %x, %umin
868  ret i64 %sub
869}
870
871define i64 @unsigned_sat_constant_i64_with_multiple_use(i64 %x, i64 %y) {
872; CHECK-LABEL: unsigned_sat_constant_i64_with_multiple_use:
873; CHECK:       # %bb.0:
874; CHECK-NEXT:    cmpldi 3, 4
875; CHECK-NEXT:    li 5, 4
876; CHECK-NEXT:    isellt 5, 3, 5
877; CHECK-NEXT:    sub 3, 3, 5
878; CHECK-NEXT:    add 4, 4, 5
879; CHECK-NEXT:    mulld 3, 3, 4
880; CHECK-NEXT:    blr
881  %umin = call i64 @llvm.umin.i64(i64 %x, i64 4)
882  %sub = sub i64 %x, %umin
883  %add = add i64 %y, %umin
884  %res = mul i64 %sub, %add
885  ret i64 %res
886}
887
888declare i64 @llvm.umin.i64(i64, i64)
889