xref: /llvm-project/llvm/test/CodeGen/RISCV/select-binop-identity.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3; RUN:   | FileCheck -check-prefixes=RV32,RV32I %s
4; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
5; RUN:   | FileCheck -check-prefixes=RV64I %s
6; RUN: llc -mtriple=riscv64 -mcpu=sifive-u74 -verify-machineinstrs < %s \
7; RUN:   | FileCheck -check-prefix=SFB64 %s
8; RUN: llc -mtriple=riscv64 -mattr=+xventanacondops -verify-machineinstrs < %s \
9; RUN:   | FileCheck -check-prefixes=VTCONDOPS64 %s
10; RUN: llc -mtriple=riscv32 -mattr=+zicond -verify-machineinstrs < %s \
11; RUN:   | FileCheck -check-prefixes=RV32,ZICOND,ZICOND32 %s
12; RUN: llc -mtriple=riscv64 -mattr=+zicond -verify-machineinstrs < %s \
13; RUN:   | FileCheck -check-prefixes=ZICOND,ZICOND64 %s
14
15; InstCombine canonicalizes (c ? x | y : x) to (x | (c ? y : 0)) similar for
16; other binary operations using their identity value as the constant.
17
18; We can reverse this for and/or/xor. Allowing us to pull the binop into
19; the basic block we create when we expand select.
20
21define signext i32 @and_select_all_ones_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
22; RV32I-LABEL: and_select_all_ones_i32:
23; RV32I:       # %bb.0:
24; RV32I-NEXT:    addi a0, a0, -1
25; RV32I-NEXT:    or a0, a0, a1
26; RV32I-NEXT:    and a0, a0, a2
27; RV32I-NEXT:    ret
28;
29; RV64I-LABEL: and_select_all_ones_i32:
30; RV64I:       # %bb.0:
31; RV64I-NEXT:    addi a0, a0, -1
32; RV64I-NEXT:    or a0, a0, a1
33; RV64I-NEXT:    and a0, a0, a2
34; RV64I-NEXT:    ret
35;
36; SFB64-LABEL: and_select_all_ones_i32:
37; SFB64:       # %bb.0:
38; SFB64-NEXT:    beqz a0, .LBB0_2
39; SFB64-NEXT:  # %bb.1:
40; SFB64-NEXT:    and a2, a2, a1
41; SFB64-NEXT:  .LBB0_2:
42; SFB64-NEXT:    mv a0, a2
43; SFB64-NEXT:    ret
44;
45; VTCONDOPS64-LABEL: and_select_all_ones_i32:
46; VTCONDOPS64:       # %bb.0:
47; VTCONDOPS64-NEXT:    vt.maskcn a0, a2, a0
48; VTCONDOPS64-NEXT:    and a1, a2, a1
49; VTCONDOPS64-NEXT:    or a0, a1, a0
50; VTCONDOPS64-NEXT:    ret
51;
52; ZICOND-LABEL: and_select_all_ones_i32:
53; ZICOND:       # %bb.0:
54; ZICOND-NEXT:    czero.nez a0, a2, a0
55; ZICOND-NEXT:    and a1, a2, a1
56; ZICOND-NEXT:    or a0, a1, a0
57; ZICOND-NEXT:    ret
58  %a = select i1 %c, i32 %x, i32 -1
59  %b = and i32 %a, %y
60  ret i32 %b
61}
62
63define signext i32 @and_select_all_ones_i32_cmp(i32 signext %x, i32 signext %y, i32 signext %z) {
64; RV32I-LABEL: and_select_all_ones_i32_cmp:
65; RV32I:       # %bb.0:
66; RV32I-NEXT:    addi a2, a2, -4
67; RV32I-NEXT:    seqz a2, a2
68; RV32I-NEXT:    addi a2, a2, -1
69; RV32I-NEXT:    or a0, a2, a0
70; RV32I-NEXT:    and a0, a0, a1
71; RV32I-NEXT:    ret
72;
73; RV64I-LABEL: and_select_all_ones_i32_cmp:
74; RV64I:       # %bb.0:
75; RV64I-NEXT:    addi a2, a2, -4
76; RV64I-NEXT:    seqz a2, a2
77; RV64I-NEXT:    addi a2, a2, -1
78; RV64I-NEXT:    or a0, a2, a0
79; RV64I-NEXT:    and a0, a0, a1
80; RV64I-NEXT:    ret
81;
82; SFB64-LABEL: and_select_all_ones_i32_cmp:
83; SFB64:       # %bb.0:
84; SFB64-NEXT:    li a3, 4
85; SFB64-NEXT:    bne a2, a3, .LBB1_2
86; SFB64-NEXT:  # %bb.1:
87; SFB64-NEXT:    and a1, a1, a0
88; SFB64-NEXT:  .LBB1_2:
89; SFB64-NEXT:    mv a0, a1
90; SFB64-NEXT:    ret
91;
92; VTCONDOPS64-LABEL: and_select_all_ones_i32_cmp:
93; VTCONDOPS64:       # %bb.0:
94; VTCONDOPS64-NEXT:    addi a2, a2, -4
95; VTCONDOPS64-NEXT:    and a0, a1, a0
96; VTCONDOPS64-NEXT:    vt.maskc a1, a1, a2
97; VTCONDOPS64-NEXT:    or a0, a0, a1
98; VTCONDOPS64-NEXT:    ret
99;
100; ZICOND-LABEL: and_select_all_ones_i32_cmp:
101; ZICOND:       # %bb.0:
102; ZICOND-NEXT:    addi a2, a2, -4
103; ZICOND-NEXT:    and a0, a1, a0
104; ZICOND-NEXT:    czero.eqz a1, a1, a2
105; ZICOND-NEXT:    or a0, a0, a1
106; ZICOND-NEXT:    ret
107  %c = icmp eq i32 %z, 4
108  %a = select i1 %c, i32 %x, i32 -1
109  %b = and i32 %a, %y
110  ret i32 %b
111}
112
113define signext i32 @and_select_all_ones_i32_cmp2(i32 signext %x, i32 signext %y, i32 signext %z) {
114; RV32I-LABEL: and_select_all_ones_i32_cmp2:
115; RV32I:       # %bb.0:
116; RV32I-NEXT:    slti a2, a2, 4
117; RV32I-NEXT:    addi a2, a2, -1
118; RV32I-NEXT:    or a0, a2, a0
119; RV32I-NEXT:    and a0, a0, a1
120; RV32I-NEXT:    ret
121;
122; RV64I-LABEL: and_select_all_ones_i32_cmp2:
123; RV64I:       # %bb.0:
124; RV64I-NEXT:    slti a2, a2, 4
125; RV64I-NEXT:    addi a2, a2, -1
126; RV64I-NEXT:    or a0, a2, a0
127; RV64I-NEXT:    and a0, a0, a1
128; RV64I-NEXT:    ret
129;
130; SFB64-LABEL: and_select_all_ones_i32_cmp2:
131; SFB64:       # %bb.0:
132; SFB64-NEXT:    li a3, 4
133; SFB64-NEXT:    bge a2, a3, .LBB2_2
134; SFB64-NEXT:  # %bb.1:
135; SFB64-NEXT:    and a1, a1, a0
136; SFB64-NEXT:  .LBB2_2:
137; SFB64-NEXT:    mv a0, a1
138; SFB64-NEXT:    ret
139;
140; VTCONDOPS64-LABEL: and_select_all_ones_i32_cmp2:
141; VTCONDOPS64:       # %bb.0:
142; VTCONDOPS64-NEXT:    slti a2, a2, 4
143; VTCONDOPS64-NEXT:    and a0, a1, a0
144; VTCONDOPS64-NEXT:    vt.maskcn a1, a1, a2
145; VTCONDOPS64-NEXT:    or a0, a0, a1
146; VTCONDOPS64-NEXT:    ret
147;
148; ZICOND-LABEL: and_select_all_ones_i32_cmp2:
149; ZICOND:       # %bb.0:
150; ZICOND-NEXT:    slti a2, a2, 4
151; ZICOND-NEXT:    and a0, a1, a0
152; ZICOND-NEXT:    czero.nez a1, a1, a2
153; ZICOND-NEXT:    or a0, a0, a1
154; ZICOND-NEXT:    ret
155  %c = icmp slt i32 %z, 4
156  %a = select i1 %c, i32 %x, i32 -1
157  %b = and i32 %a, %y
158  ret i32 %b
159}
160
161define i64 @and_select_all_ones_i64(i1 zeroext %c, i64 %x, i64 %y) {
162; RV32-LABEL: and_select_all_ones_i64:
163; RV32:       # %bb.0:
164; RV32-NEXT:    neg a0, a0
165; RV32-NEXT:    or a2, a0, a2
166; RV32-NEXT:    or a0, a0, a1
167; RV32-NEXT:    and a0, a3, a0
168; RV32-NEXT:    and a1, a4, a2
169; RV32-NEXT:    ret
170;
171; RV64I-LABEL: and_select_all_ones_i64:
172; RV64I:       # %bb.0:
173; RV64I-NEXT:    neg a0, a0
174; RV64I-NEXT:    or a0, a0, a1
175; RV64I-NEXT:    and a0, a2, a0
176; RV64I-NEXT:    ret
177;
178; SFB64-LABEL: and_select_all_ones_i64:
179; SFB64:       # %bb.0:
180; SFB64-NEXT:    bnez a0, .LBB3_2
181; SFB64-NEXT:  # %bb.1:
182; SFB64-NEXT:    and a2, a2, a1
183; SFB64-NEXT:  .LBB3_2:
184; SFB64-NEXT:    mv a0, a2
185; SFB64-NEXT:    ret
186;
187; VTCONDOPS64-LABEL: and_select_all_ones_i64:
188; VTCONDOPS64:       # %bb.0:
189; VTCONDOPS64-NEXT:    vt.maskc a0, a2, a0
190; VTCONDOPS64-NEXT:    and a1, a2, a1
191; VTCONDOPS64-NEXT:    or a0, a1, a0
192; VTCONDOPS64-NEXT:    ret
193;
194; ZICOND64-LABEL: and_select_all_ones_i64:
195; ZICOND64:       # %bb.0:
196; ZICOND64-NEXT:    czero.eqz a0, a2, a0
197; ZICOND64-NEXT:    and a1, a2, a1
198; ZICOND64-NEXT:    or a0, a1, a0
199; ZICOND64-NEXT:    ret
200  %a = select i1 %c, i64 -1, i64 %x
201  %b = and i64 %y, %a
202  ret i64 %b
203}
204
205define i64 @and_select_all_ones_i64_cmp(i64 %x, i64 %y, i64 %z) {
206; RV32-LABEL: and_select_all_ones_i64_cmp:
207; RV32:       # %bb.0:
208; RV32-NEXT:    xori a4, a4, 4
209; RV32-NEXT:    or a4, a4, a5
210; RV32-NEXT:    seqz a4, a4
211; RV32-NEXT:    addi a4, a4, -1
212; RV32-NEXT:    or a1, a4, a1
213; RV32-NEXT:    or a0, a4, a0
214; RV32-NEXT:    and a0, a0, a2
215; RV32-NEXT:    and a1, a1, a3
216; RV32-NEXT:    ret
217;
218; RV64I-LABEL: and_select_all_ones_i64_cmp:
219; RV64I:       # %bb.0:
220; RV64I-NEXT:    addi a2, a2, -4
221; RV64I-NEXT:    seqz a2, a2
222; RV64I-NEXT:    addi a2, a2, -1
223; RV64I-NEXT:    or a0, a2, a0
224; RV64I-NEXT:    and a0, a0, a1
225; RV64I-NEXT:    ret
226;
227; SFB64-LABEL: and_select_all_ones_i64_cmp:
228; SFB64:       # %bb.0:
229; SFB64-NEXT:    li a3, 4
230; SFB64-NEXT:    bne a2, a3, .LBB4_2
231; SFB64-NEXT:  # %bb.1:
232; SFB64-NEXT:    and a1, a1, a0
233; SFB64-NEXT:  .LBB4_2:
234; SFB64-NEXT:    mv a0, a1
235; SFB64-NEXT:    ret
236;
237; VTCONDOPS64-LABEL: and_select_all_ones_i64_cmp:
238; VTCONDOPS64:       # %bb.0:
239; VTCONDOPS64-NEXT:    addi a2, a2, -4
240; VTCONDOPS64-NEXT:    and a0, a1, a0
241; VTCONDOPS64-NEXT:    vt.maskc a1, a1, a2
242; VTCONDOPS64-NEXT:    or a0, a0, a1
243; VTCONDOPS64-NEXT:    ret
244;
245; ZICOND64-LABEL: and_select_all_ones_i64_cmp:
246; ZICOND64:       # %bb.0:
247; ZICOND64-NEXT:    addi a2, a2, -4
248; ZICOND64-NEXT:    and a0, a1, a0
249; ZICOND64-NEXT:    czero.eqz a1, a1, a2
250; ZICOND64-NEXT:    or a0, a0, a1
251; ZICOND64-NEXT:    ret
252  %c = icmp eq i64 %z, 4
253  %a = select i1 %c, i64 %x, i64 -1
254  %b = and i64 %a, %y
255  ret i64 %b
256}
257
258define i64 @and_select_all_ones_i64_cmp2(i64 %x, i64 %y, i64 %z) {
259; RV32I-LABEL: and_select_all_ones_i64_cmp2:
260; RV32I:       # %bb.0:
261; RV32I-NEXT:    beqz a5, .LBB5_2
262; RV32I-NEXT:  # %bb.1:
263; RV32I-NEXT:    slti a4, a5, 0
264; RV32I-NEXT:    j .LBB5_3
265; RV32I-NEXT:  .LBB5_2:
266; RV32I-NEXT:    sltiu a4, a4, 4
267; RV32I-NEXT:  .LBB5_3:
268; RV32I-NEXT:    addi a4, a4, -1
269; RV32I-NEXT:    or a1, a4, a1
270; RV32I-NEXT:    or a0, a4, a0
271; RV32I-NEXT:    and a0, a0, a2
272; RV32I-NEXT:    and a1, a1, a3
273; RV32I-NEXT:    ret
274;
275; RV64I-LABEL: and_select_all_ones_i64_cmp2:
276; RV64I:       # %bb.0:
277; RV64I-NEXT:    slti a2, a2, 4
278; RV64I-NEXT:    addi a2, a2, -1
279; RV64I-NEXT:    or a0, a2, a0
280; RV64I-NEXT:    and a0, a0, a1
281; RV64I-NEXT:    ret
282;
283; SFB64-LABEL: and_select_all_ones_i64_cmp2:
284; SFB64:       # %bb.0:
285; SFB64-NEXT:    li a3, 4
286; SFB64-NEXT:    bge a2, a3, .LBB5_2
287; SFB64-NEXT:  # %bb.1:
288; SFB64-NEXT:    and a1, a1, a0
289; SFB64-NEXT:  .LBB5_2:
290; SFB64-NEXT:    mv a0, a1
291; SFB64-NEXT:    ret
292;
293; VTCONDOPS64-LABEL: and_select_all_ones_i64_cmp2:
294; VTCONDOPS64:       # %bb.0:
295; VTCONDOPS64-NEXT:    slti a2, a2, 4
296; VTCONDOPS64-NEXT:    and a0, a1, a0
297; VTCONDOPS64-NEXT:    vt.maskcn a1, a1, a2
298; VTCONDOPS64-NEXT:    or a0, a0, a1
299; VTCONDOPS64-NEXT:    ret
300;
301; ZICOND32-LABEL: and_select_all_ones_i64_cmp2:
302; ZICOND32:       # %bb.0:
303; ZICOND32-NEXT:    slti a6, a5, 0
304; ZICOND32-NEXT:    sltiu a4, a4, 4
305; ZICOND32-NEXT:    czero.eqz a6, a6, a5
306; ZICOND32-NEXT:    czero.nez a4, a4, a5
307; ZICOND32-NEXT:    or a4, a4, a6
308; ZICOND32-NEXT:    addi a4, a4, -1
309; ZICOND32-NEXT:    or a1, a4, a1
310; ZICOND32-NEXT:    or a0, a4, a0
311; ZICOND32-NEXT:    and a0, a0, a2
312; ZICOND32-NEXT:    and a1, a1, a3
313; ZICOND32-NEXT:    ret
314;
315; ZICOND64-LABEL: and_select_all_ones_i64_cmp2:
316; ZICOND64:       # %bb.0:
317; ZICOND64-NEXT:    slti a2, a2, 4
318; ZICOND64-NEXT:    and a0, a1, a0
319; ZICOND64-NEXT:    czero.nez a1, a1, a2
320; ZICOND64-NEXT:    or a0, a0, a1
321; ZICOND64-NEXT:    ret
322  %c = icmp slt i64 %z, 4
323  %a = select i1 %c, i64 %x, i64 -1
324  %b = and i64 %a, %y
325  ret i64 %b
326}
327
328define signext i32 @or_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
329; RV32I-LABEL: or_select_all_zeros_i32:
330; RV32I:       # %bb.0:
331; RV32I-NEXT:    neg a0, a0
332; RV32I-NEXT:    and a0, a0, a1
333; RV32I-NEXT:    or a0, a2, a0
334; RV32I-NEXT:    ret
335;
336; RV64I-LABEL: or_select_all_zeros_i32:
337; RV64I:       # %bb.0:
338; RV64I-NEXT:    neg a0, a0
339; RV64I-NEXT:    and a0, a0, a1
340; RV64I-NEXT:    or a0, a2, a0
341; RV64I-NEXT:    ret
342;
343; SFB64-LABEL: or_select_all_zeros_i32:
344; SFB64:       # %bb.0:
345; SFB64-NEXT:    beqz a0, .LBB6_2
346; SFB64-NEXT:  # %bb.1:
347; SFB64-NEXT:    or a2, a2, a1
348; SFB64-NEXT:  .LBB6_2:
349; SFB64-NEXT:    mv a0, a2
350; SFB64-NEXT:    ret
351;
352; VTCONDOPS64-LABEL: or_select_all_zeros_i32:
353; VTCONDOPS64:       # %bb.0:
354; VTCONDOPS64-NEXT:    vt.maskc a0, a1, a0
355; VTCONDOPS64-NEXT:    or a0, a2, a0
356; VTCONDOPS64-NEXT:    ret
357;
358; ZICOND-LABEL: or_select_all_zeros_i32:
359; ZICOND:       # %bb.0:
360; ZICOND-NEXT:    czero.eqz a0, a1, a0
361; ZICOND-NEXT:    or a0, a2, a0
362; ZICOND-NEXT:    ret
363  %a = select i1 %c, i32 %x, i32 0
364  %b = or i32 %y, %a
365  ret i32 %b
366}
367
368define i64 @or_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
369; RV32I-LABEL: or_select_all_zeros_i64:
370; RV32I:       # %bb.0:
371; RV32I-NEXT:    addi a0, a0, -1
372; RV32I-NEXT:    and a2, a0, a2
373; RV32I-NEXT:    and a0, a0, a1
374; RV32I-NEXT:    or a0, a0, a3
375; RV32I-NEXT:    or a1, a2, a4
376; RV32I-NEXT:    ret
377;
378; RV64I-LABEL: or_select_all_zeros_i64:
379; RV64I:       # %bb.0:
380; RV64I-NEXT:    addi a0, a0, -1
381; RV64I-NEXT:    and a0, a0, a1
382; RV64I-NEXT:    or a0, a0, a2
383; RV64I-NEXT:    ret
384;
385; SFB64-LABEL: or_select_all_zeros_i64:
386; SFB64:       # %bb.0:
387; SFB64-NEXT:    bnez a0, .LBB7_2
388; SFB64-NEXT:  # %bb.1:
389; SFB64-NEXT:    or a2, a2, a1
390; SFB64-NEXT:  .LBB7_2:
391; SFB64-NEXT:    mv a0, a2
392; SFB64-NEXT:    ret
393;
394; VTCONDOPS64-LABEL: or_select_all_zeros_i64:
395; VTCONDOPS64:       # %bb.0:
396; VTCONDOPS64-NEXT:    vt.maskcn a0, a1, a0
397; VTCONDOPS64-NEXT:    or a0, a0, a2
398; VTCONDOPS64-NEXT:    ret
399;
400; ZICOND32-LABEL: or_select_all_zeros_i64:
401; ZICOND32:       # %bb.0:
402; ZICOND32-NEXT:    czero.nez a2, a2, a0
403; ZICOND32-NEXT:    czero.nez a0, a1, a0
404; ZICOND32-NEXT:    or a0, a0, a3
405; ZICOND32-NEXT:    or a1, a2, a4
406; ZICOND32-NEXT:    ret
407;
408; ZICOND64-LABEL: or_select_all_zeros_i64:
409; ZICOND64:       # %bb.0:
410; ZICOND64-NEXT:    czero.nez a0, a1, a0
411; ZICOND64-NEXT:    or a0, a0, a2
412; ZICOND64-NEXT:    ret
413  %a = select i1 %c, i64 0, i64 %x
414  %b = or i64 %a, %y
415  ret i64 %b
416}
417
418define signext i32 @xor_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
419; RV32I-LABEL: xor_select_all_zeros_i32:
420; RV32I:       # %bb.0:
421; RV32I-NEXT:    addi a0, a0, -1
422; RV32I-NEXT:    and a0, a0, a1
423; RV32I-NEXT:    xor a0, a2, a0
424; RV32I-NEXT:    ret
425;
426; RV64I-LABEL: xor_select_all_zeros_i32:
427; RV64I:       # %bb.0:
428; RV64I-NEXT:    addi a0, a0, -1
429; RV64I-NEXT:    and a0, a0, a1
430; RV64I-NEXT:    xor a0, a2, a0
431; RV64I-NEXT:    ret
432;
433; SFB64-LABEL: xor_select_all_zeros_i32:
434; SFB64:       # %bb.0:
435; SFB64-NEXT:    bnez a0, .LBB8_2
436; SFB64-NEXT:  # %bb.1:
437; SFB64-NEXT:    xor a2, a2, a1
438; SFB64-NEXT:  .LBB8_2:
439; SFB64-NEXT:    mv a0, a2
440; SFB64-NEXT:    ret
441;
442; VTCONDOPS64-LABEL: xor_select_all_zeros_i32:
443; VTCONDOPS64:       # %bb.0:
444; VTCONDOPS64-NEXT:    vt.maskcn a0, a1, a0
445; VTCONDOPS64-NEXT:    xor a0, a2, a0
446; VTCONDOPS64-NEXT:    ret
447;
448; ZICOND-LABEL: xor_select_all_zeros_i32:
449; ZICOND:       # %bb.0:
450; ZICOND-NEXT:    czero.nez a0, a1, a0
451; ZICOND-NEXT:    xor a0, a2, a0
452; ZICOND-NEXT:    ret
453  %a = select i1 %c, i32 0, i32 %x
454  %b = xor i32 %y, %a
455  ret i32 %b
456}
457
458define i64 @xor_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
459; RV32I-LABEL: xor_select_all_zeros_i64:
460; RV32I:       # %bb.0:
461; RV32I-NEXT:    neg a0, a0
462; RV32I-NEXT:    and a2, a0, a2
463; RV32I-NEXT:    and a0, a0, a1
464; RV32I-NEXT:    xor a0, a0, a3
465; RV32I-NEXT:    xor a1, a2, a4
466; RV32I-NEXT:    ret
467;
468; RV64I-LABEL: xor_select_all_zeros_i64:
469; RV64I:       # %bb.0:
470; RV64I-NEXT:    neg a0, a0
471; RV64I-NEXT:    and a0, a0, a1
472; RV64I-NEXT:    xor a0, a0, a2
473; RV64I-NEXT:    ret
474;
475; SFB64-LABEL: xor_select_all_zeros_i64:
476; SFB64:       # %bb.0:
477; SFB64-NEXT:    beqz a0, .LBB9_2
478; SFB64-NEXT:  # %bb.1:
479; SFB64-NEXT:    xor a2, a2, a1
480; SFB64-NEXT:  .LBB9_2:
481; SFB64-NEXT:    mv a0, a2
482; SFB64-NEXT:    ret
483;
484; VTCONDOPS64-LABEL: xor_select_all_zeros_i64:
485; VTCONDOPS64:       # %bb.0:
486; VTCONDOPS64-NEXT:    vt.maskc a0, a1, a0
487; VTCONDOPS64-NEXT:    xor a0, a0, a2
488; VTCONDOPS64-NEXT:    ret
489;
490; ZICOND32-LABEL: xor_select_all_zeros_i64:
491; ZICOND32:       # %bb.0:
492; ZICOND32-NEXT:    czero.eqz a2, a2, a0
493; ZICOND32-NEXT:    czero.eqz a0, a1, a0
494; ZICOND32-NEXT:    xor a0, a0, a3
495; ZICOND32-NEXT:    xor a1, a2, a4
496; ZICOND32-NEXT:    ret
497;
498; ZICOND64-LABEL: xor_select_all_zeros_i64:
499; ZICOND64:       # %bb.0:
500; ZICOND64-NEXT:    czero.eqz a0, a1, a0
501; ZICOND64-NEXT:    xor a0, a0, a2
502; ZICOND64-NEXT:    ret
503  %a = select i1 %c, i64 %x, i64 0
504  %b = xor i64 %a, %y
505  ret i64 %b
506}
507
508define signext i32 @add_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
509; RV32I-LABEL: add_select_all_zeros_i32:
510; RV32I:       # %bb.0:
511; RV32I-NEXT:    addi a0, a0, -1
512; RV32I-NEXT:    and a0, a0, a1
513; RV32I-NEXT:    add a0, a2, a0
514; RV32I-NEXT:    ret
515;
516; RV64I-LABEL: add_select_all_zeros_i32:
517; RV64I:       # %bb.0:
518; RV64I-NEXT:    addi a0, a0, -1
519; RV64I-NEXT:    and a0, a0, a1
520; RV64I-NEXT:    addw a0, a2, a0
521; RV64I-NEXT:    ret
522;
523; SFB64-LABEL: add_select_all_zeros_i32:
524; SFB64:       # %bb.0:
525; SFB64-NEXT:    bnez a0, .LBB10_2
526; SFB64-NEXT:  # %bb.1:
527; SFB64-NEXT:    addw a2, a2, a1
528; SFB64-NEXT:  .LBB10_2:
529; SFB64-NEXT:    mv a0, a2
530; SFB64-NEXT:    ret
531;
532; VTCONDOPS64-LABEL: add_select_all_zeros_i32:
533; VTCONDOPS64:       # %bb.0:
534; VTCONDOPS64-NEXT:    vt.maskcn a0, a1, a0
535; VTCONDOPS64-NEXT:    addw a0, a2, a0
536; VTCONDOPS64-NEXT:    ret
537;
538; ZICOND32-LABEL: add_select_all_zeros_i32:
539; ZICOND32:       # %bb.0:
540; ZICOND32-NEXT:    czero.nez a0, a1, a0
541; ZICOND32-NEXT:    add a0, a2, a0
542; ZICOND32-NEXT:    ret
543;
544; ZICOND64-LABEL: add_select_all_zeros_i32:
545; ZICOND64:       # %bb.0:
546; ZICOND64-NEXT:    czero.nez a0, a1, a0
547; ZICOND64-NEXT:    addw a0, a2, a0
548; ZICOND64-NEXT:    ret
549  %a = select i1 %c, i32 0, i32 %x
550  %b = add i32 %y, %a
551  ret i32 %b
552}
553
554define i64 @add_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
555; RV32I-LABEL: add_select_all_zeros_i64:
556; RV32I:       # %bb.0:
557; RV32I-NEXT:    neg a0, a0
558; RV32I-NEXT:    and a2, a0, a2
559; RV32I-NEXT:    and a1, a0, a1
560; RV32I-NEXT:    add a0, a1, a3
561; RV32I-NEXT:    sltu a1, a0, a1
562; RV32I-NEXT:    add a2, a2, a4
563; RV32I-NEXT:    add a1, a2, a1
564; RV32I-NEXT:    ret
565;
566; RV64I-LABEL: add_select_all_zeros_i64:
567; RV64I:       # %bb.0:
568; RV64I-NEXT:    neg a0, a0
569; RV64I-NEXT:    and a0, a0, a1
570; RV64I-NEXT:    add a0, a0, a2
571; RV64I-NEXT:    ret
572;
573; SFB64-LABEL: add_select_all_zeros_i64:
574; SFB64:       # %bb.0:
575; SFB64-NEXT:    beqz a0, .LBB11_2
576; SFB64-NEXT:  # %bb.1:
577; SFB64-NEXT:    add a2, a2, a1
578; SFB64-NEXT:  .LBB11_2:
579; SFB64-NEXT:    mv a0, a2
580; SFB64-NEXT:    ret
581;
582; VTCONDOPS64-LABEL: add_select_all_zeros_i64:
583; VTCONDOPS64:       # %bb.0:
584; VTCONDOPS64-NEXT:    vt.maskc a0, a1, a0
585; VTCONDOPS64-NEXT:    add a0, a0, a2
586; VTCONDOPS64-NEXT:    ret
587;
588; ZICOND32-LABEL: add_select_all_zeros_i64:
589; ZICOND32:       # %bb.0:
590; ZICOND32-NEXT:    czero.eqz a2, a2, a0
591; ZICOND32-NEXT:    czero.eqz a1, a1, a0
592; ZICOND32-NEXT:    add a0, a1, a3
593; ZICOND32-NEXT:    sltu a1, a0, a1
594; ZICOND32-NEXT:    add a2, a2, a4
595; ZICOND32-NEXT:    add a1, a2, a1
596; ZICOND32-NEXT:    ret
597;
598; ZICOND64-LABEL: add_select_all_zeros_i64:
599; ZICOND64:       # %bb.0:
600; ZICOND64-NEXT:    czero.eqz a0, a1, a0
601; ZICOND64-NEXT:    add a0, a0, a2
602; ZICOND64-NEXT:    ret
603  %a = select i1 %c, i64 %x, i64 0
604  %b = add i64 %a, %y
605  ret i64 %b
606}
607
608define signext i32 @sub_select_all_zeros_i32(i1 zeroext %c, i32 signext %x, i32 signext %y) {
609; RV32I-LABEL: sub_select_all_zeros_i32:
610; RV32I:       # %bb.0:
611; RV32I-NEXT:    addi a0, a0, -1
612; RV32I-NEXT:    and a0, a0, a1
613; RV32I-NEXT:    sub a0, a2, a0
614; RV32I-NEXT:    ret
615;
616; RV64I-LABEL: sub_select_all_zeros_i32:
617; RV64I:       # %bb.0:
618; RV64I-NEXT:    addi a0, a0, -1
619; RV64I-NEXT:    and a0, a0, a1
620; RV64I-NEXT:    subw a0, a2, a0
621; RV64I-NEXT:    ret
622;
623; SFB64-LABEL: sub_select_all_zeros_i32:
624; SFB64:       # %bb.0:
625; SFB64-NEXT:    bnez a0, .LBB12_2
626; SFB64-NEXT:  # %bb.1:
627; SFB64-NEXT:    subw a2, a2, a1
628; SFB64-NEXT:  .LBB12_2:
629; SFB64-NEXT:    mv a0, a2
630; SFB64-NEXT:    ret
631;
632; VTCONDOPS64-LABEL: sub_select_all_zeros_i32:
633; VTCONDOPS64:       # %bb.0:
634; VTCONDOPS64-NEXT:    vt.maskcn a0, a1, a0
635; VTCONDOPS64-NEXT:    subw a0, a2, a0
636; VTCONDOPS64-NEXT:    ret
637;
638; ZICOND32-LABEL: sub_select_all_zeros_i32:
639; ZICOND32:       # %bb.0:
640; ZICOND32-NEXT:    czero.nez a0, a1, a0
641; ZICOND32-NEXT:    sub a0, a2, a0
642; ZICOND32-NEXT:    ret
643;
644; ZICOND64-LABEL: sub_select_all_zeros_i32:
645; ZICOND64:       # %bb.0:
646; ZICOND64-NEXT:    czero.nez a0, a1, a0
647; ZICOND64-NEXT:    subw a0, a2, a0
648; ZICOND64-NEXT:    ret
649  %a = select i1 %c, i32 0, i32 %x
650  %b = sub i32 %y, %a
651  ret i32 %b
652}
653
654define i64 @sub_select_all_zeros_i64(i1 zeroext %c, i64 %x, i64 %y) {
655; RV32I-LABEL: sub_select_all_zeros_i64:
656; RV32I:       # %bb.0:
657; RV32I-NEXT:    neg a0, a0
658; RV32I-NEXT:    and a2, a0, a2
659; RV32I-NEXT:    and a0, a0, a1
660; RV32I-NEXT:    sltu a1, a3, a0
661; RV32I-NEXT:    sub a4, a4, a2
662; RV32I-NEXT:    sub a1, a4, a1
663; RV32I-NEXT:    sub a0, a3, a0
664; RV32I-NEXT:    ret
665;
666; RV64I-LABEL: sub_select_all_zeros_i64:
667; RV64I:       # %bb.0:
668; RV64I-NEXT:    neg a0, a0
669; RV64I-NEXT:    and a0, a0, a1
670; RV64I-NEXT:    sub a0, a2, a0
671; RV64I-NEXT:    ret
672;
673; SFB64-LABEL: sub_select_all_zeros_i64:
674; SFB64:       # %bb.0:
675; SFB64-NEXT:    beqz a0, .LBB13_2
676; SFB64-NEXT:  # %bb.1:
677; SFB64-NEXT:    sub a2, a2, a1
678; SFB64-NEXT:  .LBB13_2:
679; SFB64-NEXT:    mv a0, a2
680; SFB64-NEXT:    ret
681;
682; VTCONDOPS64-LABEL: sub_select_all_zeros_i64:
683; VTCONDOPS64:       # %bb.0:
684; VTCONDOPS64-NEXT:    vt.maskc a0, a1, a0
685; VTCONDOPS64-NEXT:    sub a0, a2, a0
686; VTCONDOPS64-NEXT:    ret
687;
688; ZICOND32-LABEL: sub_select_all_zeros_i64:
689; ZICOND32:       # %bb.0:
690; ZICOND32-NEXT:    czero.eqz a2, a2, a0
691; ZICOND32-NEXT:    czero.eqz a0, a1, a0
692; ZICOND32-NEXT:    sltu a1, a3, a0
693; ZICOND32-NEXT:    sub a4, a4, a2
694; ZICOND32-NEXT:    sub a1, a4, a1
695; ZICOND32-NEXT:    sub a0, a3, a0
696; ZICOND32-NEXT:    ret
697;
698; ZICOND64-LABEL: sub_select_all_zeros_i64:
699; ZICOND64:       # %bb.0:
700; ZICOND64-NEXT:    czero.eqz a0, a1, a0
701; ZICOND64-NEXT:    sub a0, a2, a0
702; ZICOND64-NEXT:    ret
703  %a = select i1 %c, i64 %x, i64 0
704  %b = sub i64 %y, %a
705  ret i64 %b
706}
707