xref: /llvm-project/llvm/test/CodeGen/RISCV/memcmp-optsize.ll (revision 13cf5c9c227a502f86f8c0e3c7d5fe147bc91b8b)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
2; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -O2  \
3; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32
4; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -O2  \
5; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64
6; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb -O2  \
7; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBB
8; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb -O2  \
9; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBB
10; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb -O2  \
11; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBKB
12; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb -O2  \
13; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBKB
14; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v -O2  \
15; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-V
16; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v -O2  \
17; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-V
18; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+unaligned-scalar-mem -O2 \
19; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32
20; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+unaligned-scalar-mem -O2 \
21; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64
22; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb,+unaligned-scalar-mem -O2 \
23; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBB
24; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb,+unaligned-scalar-mem -O2 \
25; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBB
26; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
27; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBKB
28; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
29; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBKB
30; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
31; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-V
32; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
33; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-V
34
35declare i32 @bcmp(ptr, ptr, iXLen) nounwind readonly
36declare i32 @memcmp(ptr, ptr, iXLen) nounwind readonly
37
38define i32 @bcmp_size_0(ptr %s1, ptr %s2) nounwind optsize {
39; CHECK-RV32-LABEL: bcmp_size_0:
40; CHECK-RV32:       # %bb.0: # %entry
41; CHECK-RV32-NEXT:    addi sp, sp, -16
42; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
43; CHECK-RV32-NEXT:    li a2, 0
44; CHECK-RV32-NEXT:    call bcmp
45; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
46; CHECK-RV32-NEXT:    addi sp, sp, 16
47; CHECK-RV32-NEXT:    ret
48;
49; CHECK-RV64-LABEL: bcmp_size_0:
50; CHECK-RV64:       # %bb.0: # %entry
51; CHECK-RV64-NEXT:    addi sp, sp, -16
52; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
53; CHECK-RV64-NEXT:    li a2, 0
54; CHECK-RV64-NEXT:    call bcmp
55; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
56; CHECK-RV64-NEXT:    addi sp, sp, 16
57; CHECK-RV64-NEXT:    ret
58entry:
59  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 0)
60  ret i32 %bcmp
61}
62
63define i32 @bcmp_size_1(ptr %s1, ptr %s2) nounwind optsize {
64; CHECK-ALIGNED-RV32-LABEL: bcmp_size_1:
65; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
66; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
67; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
68; CHECK-ALIGNED-RV32-NEXT:    li a2, 1
69; CHECK-ALIGNED-RV32-NEXT:    call bcmp
70; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
71; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
72; CHECK-ALIGNED-RV32-NEXT:    ret
73;
74; CHECK-ALIGNED-RV64-LABEL: bcmp_size_1:
75; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
76; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
77; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
78; CHECK-ALIGNED-RV64-NEXT:    li a2, 1
79; CHECK-ALIGNED-RV64-NEXT:    call bcmp
80; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
81; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
82; CHECK-ALIGNED-RV64-NEXT:    ret
83;
84; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_1:
85; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
86; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
87; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
88; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 1
89; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
90; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
91; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
92; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
93;
94; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_1:
95; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
96; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
97; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
98; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 1
99; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
100; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
101; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
102; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
103;
104; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_1:
105; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
106; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
107; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
108; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 1
109; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
110; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
111; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
112; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
113;
114; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_1:
115; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
116; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
117; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
118; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 1
119; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
120; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
121; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
122; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
123;
124; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_1:
125; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
126; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
127; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
128; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 1
129; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
130; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
131; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
132; CHECK-ALIGNED-RV32-V-NEXT:    ret
133;
134; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_1:
135; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
136; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
137; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
138; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 1
139; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
140; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
141; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
142; CHECK-ALIGNED-RV64-V-NEXT:    ret
143;
144; CHECK-UNALIGNED-LABEL: bcmp_size_1:
145; CHECK-UNALIGNED:       # %bb.0: # %entry
146; CHECK-UNALIGNED-NEXT:    lbu a0, 0(a0)
147; CHECK-UNALIGNED-NEXT:    lbu a1, 0(a1)
148; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
149; CHECK-UNALIGNED-NEXT:    snez a0, a0
150; CHECK-UNALIGNED-NEXT:    ret
151entry:
152  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 1)
153  ret i32 %bcmp
154}
155
156define i32 @bcmp_size_2(ptr %s1, ptr %s2) nounwind optsize {
157; CHECK-ALIGNED-RV32-LABEL: bcmp_size_2:
158; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
159; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
160; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
161; CHECK-ALIGNED-RV32-NEXT:    li a2, 2
162; CHECK-ALIGNED-RV32-NEXT:    call bcmp
163; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
164; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
165; CHECK-ALIGNED-RV32-NEXT:    ret
166;
167; CHECK-ALIGNED-RV64-LABEL: bcmp_size_2:
168; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
169; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
170; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
171; CHECK-ALIGNED-RV64-NEXT:    li a2, 2
172; CHECK-ALIGNED-RV64-NEXT:    call bcmp
173; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
174; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
175; CHECK-ALIGNED-RV64-NEXT:    ret
176;
177; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_2:
178; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
179; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
180; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
181; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 2
182; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
183; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
184; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
185; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
186;
187; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_2:
188; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
189; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
190; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
191; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 2
192; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
193; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
194; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
195; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
196;
197; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_2:
198; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
199; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
200; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
201; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 2
202; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
203; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
204; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
205; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
206;
207; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_2:
208; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
209; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
210; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
211; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 2
212; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
213; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
214; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
215; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
216;
217; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_2:
218; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
219; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
220; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
221; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 2
222; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
223; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
224; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
225; CHECK-ALIGNED-RV32-V-NEXT:    ret
226;
227; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_2:
228; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
229; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
230; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
231; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 2
232; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
233; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
234; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
235; CHECK-ALIGNED-RV64-V-NEXT:    ret
236;
237; CHECK-UNALIGNED-LABEL: bcmp_size_2:
238; CHECK-UNALIGNED:       # %bb.0: # %entry
239; CHECK-UNALIGNED-NEXT:    lhu a0, 0(a0)
240; CHECK-UNALIGNED-NEXT:    lhu a1, 0(a1)
241; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
242; CHECK-UNALIGNED-NEXT:    snez a0, a0
243; CHECK-UNALIGNED-NEXT:    ret
244entry:
245  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 2)
246  ret i32 %bcmp
247}
248
249define i32 @bcmp_size_3(ptr %s1, ptr %s2) nounwind optsize {
250; CHECK-ALIGNED-RV32-LABEL: bcmp_size_3:
251; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
252; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
253; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
254; CHECK-ALIGNED-RV32-NEXT:    li a2, 3
255; CHECK-ALIGNED-RV32-NEXT:    call bcmp
256; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
257; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
258; CHECK-ALIGNED-RV32-NEXT:    ret
259;
260; CHECK-ALIGNED-RV64-LABEL: bcmp_size_3:
261; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
262; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
263; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
264; CHECK-ALIGNED-RV64-NEXT:    li a2, 3
265; CHECK-ALIGNED-RV64-NEXT:    call bcmp
266; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
267; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
268; CHECK-ALIGNED-RV64-NEXT:    ret
269;
270; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_3:
271; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
272; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
273; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
274; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 3
275; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
276; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
277; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
278; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
279;
280; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_3:
281; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
282; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
283; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
284; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 3
285; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
286; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
287; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
288; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
289;
290; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_3:
291; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
292; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
293; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
294; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 3
295; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
296; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
297; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
298; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
299;
300; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_3:
301; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
302; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
303; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
304; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 3
305; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
306; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
307; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
308; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
309;
310; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_3:
311; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
312; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
313; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
314; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 3
315; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
316; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
317; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
318; CHECK-ALIGNED-RV32-V-NEXT:    ret
319;
320; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_3:
321; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
322; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
323; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
324; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 3
325; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
326; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
327; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
328; CHECK-ALIGNED-RV64-V-NEXT:    ret
329;
330; CHECK-UNALIGNED-LABEL: bcmp_size_3:
331; CHECK-UNALIGNED:       # %bb.0: # %entry
332; CHECK-UNALIGNED-NEXT:    lhu a2, 0(a0)
333; CHECK-UNALIGNED-NEXT:    lbu a0, 2(a0)
334; CHECK-UNALIGNED-NEXT:    lhu a3, 0(a1)
335; CHECK-UNALIGNED-NEXT:    lbu a1, 2(a1)
336; CHECK-UNALIGNED-NEXT:    xor a2, a2, a3
337; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
338; CHECK-UNALIGNED-NEXT:    or a0, a2, a0
339; CHECK-UNALIGNED-NEXT:    snez a0, a0
340; CHECK-UNALIGNED-NEXT:    ret
341entry:
342  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 3)
343  ret i32 %bcmp
344}
345
346define i32 @bcmp_size_4(ptr %s1, ptr %s2) nounwind optsize {
347; CHECK-ALIGNED-RV32-LABEL: bcmp_size_4:
348; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
349; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
350; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
351; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
352; CHECK-ALIGNED-RV32-NEXT:    call bcmp
353; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
354; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
355; CHECK-ALIGNED-RV32-NEXT:    ret
356;
357; CHECK-ALIGNED-RV64-LABEL: bcmp_size_4:
358; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
359; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
360; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
361; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
362; CHECK-ALIGNED-RV64-NEXT:    call bcmp
363; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
364; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
365; CHECK-ALIGNED-RV64-NEXT:    ret
366;
367; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_4:
368; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
369; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
370; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
371; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
372; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
373; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
374; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
375; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
376;
377; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_4:
378; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
379; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
380; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
381; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
382; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
383; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
384; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
385; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
386;
387; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_4:
388; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
389; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
390; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
391; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
392; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
393; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
394; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
395; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
396;
397; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_4:
398; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
399; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
400; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
401; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
402; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
403; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
404; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
405; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
406;
407; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_4:
408; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
409; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
410; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
411; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
412; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
413; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
414; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
415; CHECK-ALIGNED-RV32-V-NEXT:    ret
416;
417; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_4:
418; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
419; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
420; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
421; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
422; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
423; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
424; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
425; CHECK-ALIGNED-RV64-V-NEXT:    ret
426;
427; CHECK-UNALIGNED-LABEL: bcmp_size_4:
428; CHECK-UNALIGNED:       # %bb.0: # %entry
429; CHECK-UNALIGNED-NEXT:    lw a0, 0(a0)
430; CHECK-UNALIGNED-NEXT:    lw a1, 0(a1)
431; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
432; CHECK-UNALIGNED-NEXT:    snez a0, a0
433; CHECK-UNALIGNED-NEXT:    ret
434entry:
435  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
436  ret i32 %bcmp
437}
438
439define i32 @bcmp_size_5(ptr %s1, ptr %s2) nounwind optsize {
440; CHECK-ALIGNED-RV32-LABEL: bcmp_size_5:
441; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
442; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
443; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
444; CHECK-ALIGNED-RV32-NEXT:    li a2, 5
445; CHECK-ALIGNED-RV32-NEXT:    call bcmp
446; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
447; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
448; CHECK-ALIGNED-RV32-NEXT:    ret
449;
450; CHECK-ALIGNED-RV64-LABEL: bcmp_size_5:
451; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
452; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
453; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
454; CHECK-ALIGNED-RV64-NEXT:    li a2, 5
455; CHECK-ALIGNED-RV64-NEXT:    call bcmp
456; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
457; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
458; CHECK-ALIGNED-RV64-NEXT:    ret
459;
460; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_5:
461; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
462; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
463; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
464; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 5
465; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
466; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
467; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
468; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
469;
470; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_5:
471; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
472; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
473; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
474; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 5
475; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
476; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
477; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
478; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
479;
480; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_5:
481; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
482; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
483; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
484; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 5
485; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
486; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
487; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
488; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
489;
490; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_5:
491; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
492; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
493; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
494; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 5
495; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
496; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
497; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
498; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
499;
500; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_5:
501; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
502; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
503; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
504; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 5
505; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
506; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
507; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
508; CHECK-ALIGNED-RV32-V-NEXT:    ret
509;
510; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_5:
511; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
512; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
513; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
514; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 5
515; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
516; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
517; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
518; CHECK-ALIGNED-RV64-V-NEXT:    ret
519;
520; CHECK-UNALIGNED-LABEL: bcmp_size_5:
521; CHECK-UNALIGNED:       # %bb.0: # %entry
522; CHECK-UNALIGNED-NEXT:    lw a2, 0(a0)
523; CHECK-UNALIGNED-NEXT:    lbu a0, 4(a0)
524; CHECK-UNALIGNED-NEXT:    lw a3, 0(a1)
525; CHECK-UNALIGNED-NEXT:    lbu a1, 4(a1)
526; CHECK-UNALIGNED-NEXT:    xor a2, a2, a3
527; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
528; CHECK-UNALIGNED-NEXT:    or a0, a2, a0
529; CHECK-UNALIGNED-NEXT:    snez a0, a0
530; CHECK-UNALIGNED-NEXT:    ret
531entry:
532  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 5)
533  ret i32 %bcmp
534}
535
536define i32 @bcmp_size_6(ptr %s1, ptr %s2) nounwind optsize {
537; CHECK-ALIGNED-RV32-LABEL: bcmp_size_6:
538; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
539; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
540; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
541; CHECK-ALIGNED-RV32-NEXT:    li a2, 6
542; CHECK-ALIGNED-RV32-NEXT:    call bcmp
543; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
544; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
545; CHECK-ALIGNED-RV32-NEXT:    ret
546;
547; CHECK-ALIGNED-RV64-LABEL: bcmp_size_6:
548; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
549; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
550; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
551; CHECK-ALIGNED-RV64-NEXT:    li a2, 6
552; CHECK-ALIGNED-RV64-NEXT:    call bcmp
553; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
554; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
555; CHECK-ALIGNED-RV64-NEXT:    ret
556;
557; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_6:
558; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
559; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
560; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
561; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 6
562; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
563; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
564; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
565; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
566;
567; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_6:
568; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
569; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
570; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
571; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 6
572; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
573; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
574; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
575; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
576;
577; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_6:
578; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
579; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
580; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
581; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 6
582; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
583; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
584; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
585; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
586;
587; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_6:
588; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
589; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
590; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
591; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 6
592; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
593; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
594; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
595; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
596;
597; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_6:
598; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
599; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
600; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
601; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 6
602; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
603; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
604; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
605; CHECK-ALIGNED-RV32-V-NEXT:    ret
606;
607; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_6:
608; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
609; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
610; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
611; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 6
612; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
613; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
614; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
615; CHECK-ALIGNED-RV64-V-NEXT:    ret
616;
617; CHECK-UNALIGNED-LABEL: bcmp_size_6:
618; CHECK-UNALIGNED:       # %bb.0: # %entry
619; CHECK-UNALIGNED-NEXT:    lw a2, 0(a0)
620; CHECK-UNALIGNED-NEXT:    lhu a0, 4(a0)
621; CHECK-UNALIGNED-NEXT:    lw a3, 0(a1)
622; CHECK-UNALIGNED-NEXT:    lhu a1, 4(a1)
623; CHECK-UNALIGNED-NEXT:    xor a2, a2, a3
624; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
625; CHECK-UNALIGNED-NEXT:    or a0, a2, a0
626; CHECK-UNALIGNED-NEXT:    snez a0, a0
627; CHECK-UNALIGNED-NEXT:    ret
628entry:
629  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 6)
630  ret i32 %bcmp
631}
632
633define i32 @bcmp_size_7(ptr %s1, ptr %s2) nounwind optsize {
634; CHECK-ALIGNED-RV32-LABEL: bcmp_size_7:
635; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
636; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
637; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
638; CHECK-ALIGNED-RV32-NEXT:    li a2, 7
639; CHECK-ALIGNED-RV32-NEXT:    call bcmp
640; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
641; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
642; CHECK-ALIGNED-RV32-NEXT:    ret
643;
644; CHECK-ALIGNED-RV64-LABEL: bcmp_size_7:
645; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
646; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
647; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
648; CHECK-ALIGNED-RV64-NEXT:    li a2, 7
649; CHECK-ALIGNED-RV64-NEXT:    call bcmp
650; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
651; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
652; CHECK-ALIGNED-RV64-NEXT:    ret
653;
654; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_7:
655; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
656; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
657; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
658; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 7
659; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
660; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
661; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
662; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
663;
664; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_7:
665; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
666; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
667; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
668; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 7
669; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
670; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
671; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
672; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
673;
674; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_7:
675; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
676; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
677; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
678; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 7
679; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
680; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
681; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
682; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
683;
684; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_7:
685; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
686; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
687; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
688; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 7
689; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
690; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
691; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
692; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
693;
694; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_7:
695; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
696; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
697; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
698; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 7
699; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
700; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
701; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
702; CHECK-ALIGNED-RV32-V-NEXT:    ret
703;
704; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_7:
705; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
706; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
707; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
708; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 7
709; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
710; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
711; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
712; CHECK-ALIGNED-RV64-V-NEXT:    ret
713;
714; CHECK-UNALIGNED-LABEL: bcmp_size_7:
715; CHECK-UNALIGNED:       # %bb.0: # %entry
716; CHECK-UNALIGNED-NEXT:    lw a2, 0(a0)
717; CHECK-UNALIGNED-NEXT:    lw a0, 3(a0)
718; CHECK-UNALIGNED-NEXT:    lw a3, 0(a1)
719; CHECK-UNALIGNED-NEXT:    lw a1, 3(a1)
720; CHECK-UNALIGNED-NEXT:    xor a2, a2, a3
721; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
722; CHECK-UNALIGNED-NEXT:    or a0, a2, a0
723; CHECK-UNALIGNED-NEXT:    snez a0, a0
724; CHECK-UNALIGNED-NEXT:    ret
725entry:
726  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 7)
727  ret i32 %bcmp
728}
729
730define i32 @bcmp_size_8(ptr %s1, ptr %s2) nounwind optsize {
731; CHECK-ALIGNED-RV32-LABEL: bcmp_size_8:
732; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
733; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
734; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
735; CHECK-ALIGNED-RV32-NEXT:    li a2, 8
736; CHECK-ALIGNED-RV32-NEXT:    call bcmp
737; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
738; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
739; CHECK-ALIGNED-RV32-NEXT:    ret
740;
741; CHECK-ALIGNED-RV64-LABEL: bcmp_size_8:
742; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
743; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
744; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
745; CHECK-ALIGNED-RV64-NEXT:    li a2, 8
746; CHECK-ALIGNED-RV64-NEXT:    call bcmp
747; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
748; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
749; CHECK-ALIGNED-RV64-NEXT:    ret
750;
751; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
752; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
753; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
754; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
755; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 8
756; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
757; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
758; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
759; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
760;
761; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
762; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
763; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
764; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
765; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 8
766; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
767; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
768; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
769; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
770;
771; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
772; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
773; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
774; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
775; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 8
776; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
777; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
778; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
779; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
780;
781; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
782; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
783; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
784; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
785; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 8
786; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
787; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
788; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
789; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
790;
791; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_8:
792; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
793; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
794; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
795; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 8
796; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
797; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
798; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
799; CHECK-ALIGNED-RV32-V-NEXT:    ret
800;
801; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_8:
802; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
803; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
804; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
805; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 8
806; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
807; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
808; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
809; CHECK-ALIGNED-RV64-V-NEXT:    ret
810;
811; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_8:
812; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
813; CHECK-UNALIGNED-RV32-NEXT:    lw a2, 0(a0)
814; CHECK-UNALIGNED-RV32-NEXT:    lw a0, 4(a0)
815; CHECK-UNALIGNED-RV32-NEXT:    lw a3, 0(a1)
816; CHECK-UNALIGNED-RV32-NEXT:    lw a1, 4(a1)
817; CHECK-UNALIGNED-RV32-NEXT:    xor a2, a2, a3
818; CHECK-UNALIGNED-RV32-NEXT:    xor a0, a0, a1
819; CHECK-UNALIGNED-RV32-NEXT:    or a0, a2, a0
820; CHECK-UNALIGNED-RV32-NEXT:    snez a0, a0
821; CHECK-UNALIGNED-RV32-NEXT:    ret
822;
823; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_8:
824; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
825; CHECK-UNALIGNED-RV64-NEXT:    ld a0, 0(a0)
826; CHECK-UNALIGNED-RV64-NEXT:    ld a1, 0(a1)
827; CHECK-UNALIGNED-RV64-NEXT:    xor a0, a0, a1
828; CHECK-UNALIGNED-RV64-NEXT:    snez a0, a0
829; CHECK-UNALIGNED-RV64-NEXT:    ret
830;
831; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
832; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
833; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
834; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 4(a0)
835; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
836; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 4(a1)
837; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a2, a2, a3
838; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a0, a0, a1
839; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a2, a0
840; CHECK-UNALIGNED-RV32-ZBB-NEXT:    snez a0, a0
841; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
842;
843; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
844; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
845; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 0(a0)
846; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 0(a1)
847; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
848; CHECK-UNALIGNED-RV64-ZBB-NEXT:    snez a0, a0
849; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
850;
851; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
852; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
853; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
854; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 4(a0)
855; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
856; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 4(a1)
857; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a2, a2, a3
858; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a0, a0, a1
859; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a0, a2, a0
860; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    snez a0, a0
861; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
862;
863; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
864; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
865; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 0(a0)
866; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 0(a1)
867; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
868; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    snez a0, a0
869; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
870;
871; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_8:
872; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
873; CHECK-UNALIGNED-RV32-V-NEXT:    lw a2, 0(a0)
874; CHECK-UNALIGNED-RV32-V-NEXT:    lw a0, 4(a0)
875; CHECK-UNALIGNED-RV32-V-NEXT:    lw a3, 0(a1)
876; CHECK-UNALIGNED-RV32-V-NEXT:    lw a1, 4(a1)
877; CHECK-UNALIGNED-RV32-V-NEXT:    xor a2, a2, a3
878; CHECK-UNALIGNED-RV32-V-NEXT:    xor a0, a0, a1
879; CHECK-UNALIGNED-RV32-V-NEXT:    or a0, a2, a0
880; CHECK-UNALIGNED-RV32-V-NEXT:    snez a0, a0
881; CHECK-UNALIGNED-RV32-V-NEXT:    ret
882;
883; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_8:
884; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
885; CHECK-UNALIGNED-RV64-V-NEXT:    ld a0, 0(a0)
886; CHECK-UNALIGNED-RV64-V-NEXT:    ld a1, 0(a1)
887; CHECK-UNALIGNED-RV64-V-NEXT:    xor a0, a0, a1
888; CHECK-UNALIGNED-RV64-V-NEXT:    snez a0, a0
889; CHECK-UNALIGNED-RV64-V-NEXT:    ret
890entry:
891  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 8)
892  ret i32 %bcmp
893}
894
895define i32 @bcmp_size_15(ptr %s1, ptr %s2) nounwind optsize {
896; CHECK-ALIGNED-RV32-LABEL: bcmp_size_15:
897; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
898; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
899; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
900; CHECK-ALIGNED-RV32-NEXT:    li a2, 15
901; CHECK-ALIGNED-RV32-NEXT:    call bcmp
902; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
903; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
904; CHECK-ALIGNED-RV32-NEXT:    ret
905;
906; CHECK-ALIGNED-RV64-LABEL: bcmp_size_15:
907; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
908; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
909; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
910; CHECK-ALIGNED-RV64-NEXT:    li a2, 15
911; CHECK-ALIGNED-RV64-NEXT:    call bcmp
912; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
913; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
914; CHECK-ALIGNED-RV64-NEXT:    ret
915;
916; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
917; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
918; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
919; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
920; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 15
921; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
922; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
923; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
924; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
925;
926; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
927; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
928; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
929; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
930; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 15
931; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
932; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
933; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
934; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
935;
936; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
937; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
938; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
939; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
940; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 15
941; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
942; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
943; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
944; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
945;
946; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
947; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
948; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
949; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
950; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 15
951; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
952; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
953; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
954; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
955;
956; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_15:
957; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
958; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
959; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
960; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 15
961; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
962; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
963; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
964; CHECK-ALIGNED-RV32-V-NEXT:    ret
965;
966; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_15:
967; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
968; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
969; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
970; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 15
971; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
972; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
973; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
974; CHECK-ALIGNED-RV64-V-NEXT:    ret
975;
976; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_15:
977; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
978; CHECK-UNALIGNED-RV32-NEXT:    lw a2, 0(a0)
979; CHECK-UNALIGNED-RV32-NEXT:    lw a3, 4(a0)
980; CHECK-UNALIGNED-RV32-NEXT:    lw a4, 8(a0)
981; CHECK-UNALIGNED-RV32-NEXT:    lw a0, 11(a0)
982; CHECK-UNALIGNED-RV32-NEXT:    lw a5, 0(a1)
983; CHECK-UNALIGNED-RV32-NEXT:    lw a6, 4(a1)
984; CHECK-UNALIGNED-RV32-NEXT:    lw a7, 8(a1)
985; CHECK-UNALIGNED-RV32-NEXT:    lw a1, 11(a1)
986; CHECK-UNALIGNED-RV32-NEXT:    xor a2, a2, a5
987; CHECK-UNALIGNED-RV32-NEXT:    xor a3, a3, a6
988; CHECK-UNALIGNED-RV32-NEXT:    xor a4, a4, a7
989; CHECK-UNALIGNED-RV32-NEXT:    xor a0, a0, a1
990; CHECK-UNALIGNED-RV32-NEXT:    or a2, a2, a3
991; CHECK-UNALIGNED-RV32-NEXT:    or a0, a4, a0
992; CHECK-UNALIGNED-RV32-NEXT:    or a0, a2, a0
993; CHECK-UNALIGNED-RV32-NEXT:    snez a0, a0
994; CHECK-UNALIGNED-RV32-NEXT:    ret
995;
996; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_15:
997; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
998; CHECK-UNALIGNED-RV64-NEXT:    ld a2, 0(a0)
999; CHECK-UNALIGNED-RV64-NEXT:    ld a0, 7(a0)
1000; CHECK-UNALIGNED-RV64-NEXT:    ld a3, 0(a1)
1001; CHECK-UNALIGNED-RV64-NEXT:    ld a1, 7(a1)
1002; CHECK-UNALIGNED-RV64-NEXT:    xor a2, a2, a3
1003; CHECK-UNALIGNED-RV64-NEXT:    xor a0, a0, a1
1004; CHECK-UNALIGNED-RV64-NEXT:    or a0, a2, a0
1005; CHECK-UNALIGNED-RV64-NEXT:    snez a0, a0
1006; CHECK-UNALIGNED-RV64-NEXT:    ret
1007;
1008; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
1009; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
1010; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
1011; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 4(a0)
1012; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a4, 8(a0)
1013; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 11(a0)
1014; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a5, 0(a1)
1015; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a6, 4(a1)
1016; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a7, 8(a1)
1017; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 11(a1)
1018; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a2, a2, a5
1019; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a3, a3, a6
1020; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a4, a4, a7
1021; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a0, a0, a1
1022; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a2, a2, a3
1023; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a4, a0
1024; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a2, a0
1025; CHECK-UNALIGNED-RV32-ZBB-NEXT:    snez a0, a0
1026; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
1027;
1028; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
1029; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
1030; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
1031; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 7(a0)
1032; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
1033; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 7(a1)
1034; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a2, a2, a3
1035; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
1036; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a2, a0
1037; CHECK-UNALIGNED-RV64-ZBB-NEXT:    snez a0, a0
1038; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
1039;
1040; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
1041; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1042; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
1043; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 4(a0)
1044; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a4, 8(a0)
1045; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 11(a0)
1046; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a5, 0(a1)
1047; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a6, 4(a1)
1048; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a7, 8(a1)
1049; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 11(a1)
1050; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a2, a2, a5
1051; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a3, a3, a6
1052; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a4, a4, a7
1053; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a0, a0, a1
1054; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a2, a2, a3
1055; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a0, a4, a0
1056; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a0, a2, a0
1057; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    snez a0, a0
1058; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
1059;
1060; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
1061; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1062; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
1063; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 7(a0)
1064; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
1065; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 7(a1)
1066; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a2, a2, a3
1067; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
1068; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a2, a0
1069; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    snez a0, a0
1070; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
1071;
1072; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_15:
1073; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
1074; CHECK-UNALIGNED-RV32-V-NEXT:    lw a2, 0(a0)
1075; CHECK-UNALIGNED-RV32-V-NEXT:    lw a3, 4(a0)
1076; CHECK-UNALIGNED-RV32-V-NEXT:    lw a4, 8(a0)
1077; CHECK-UNALIGNED-RV32-V-NEXT:    lw a0, 11(a0)
1078; CHECK-UNALIGNED-RV32-V-NEXT:    lw a5, 0(a1)
1079; CHECK-UNALIGNED-RV32-V-NEXT:    lw a6, 4(a1)
1080; CHECK-UNALIGNED-RV32-V-NEXT:    lw a7, 8(a1)
1081; CHECK-UNALIGNED-RV32-V-NEXT:    lw a1, 11(a1)
1082; CHECK-UNALIGNED-RV32-V-NEXT:    xor a2, a2, a5
1083; CHECK-UNALIGNED-RV32-V-NEXT:    xor a3, a3, a6
1084; CHECK-UNALIGNED-RV32-V-NEXT:    xor a4, a4, a7
1085; CHECK-UNALIGNED-RV32-V-NEXT:    xor a0, a0, a1
1086; CHECK-UNALIGNED-RV32-V-NEXT:    or a2, a2, a3
1087; CHECK-UNALIGNED-RV32-V-NEXT:    or a0, a4, a0
1088; CHECK-UNALIGNED-RV32-V-NEXT:    or a0, a2, a0
1089; CHECK-UNALIGNED-RV32-V-NEXT:    snez a0, a0
1090; CHECK-UNALIGNED-RV32-V-NEXT:    ret
1091;
1092; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_15:
1093; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
1094; CHECK-UNALIGNED-RV64-V-NEXT:    ld a2, 0(a0)
1095; CHECK-UNALIGNED-RV64-V-NEXT:    ld a0, 7(a0)
1096; CHECK-UNALIGNED-RV64-V-NEXT:    ld a3, 0(a1)
1097; CHECK-UNALIGNED-RV64-V-NEXT:    ld a1, 7(a1)
1098; CHECK-UNALIGNED-RV64-V-NEXT:    xor a2, a2, a3
1099; CHECK-UNALIGNED-RV64-V-NEXT:    xor a0, a0, a1
1100; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a2, a0
1101; CHECK-UNALIGNED-RV64-V-NEXT:    snez a0, a0
1102; CHECK-UNALIGNED-RV64-V-NEXT:    ret
1103entry:
1104  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 15)
1105  ret i32 %bcmp
1106}
1107
1108define i32 @bcmp_size_16(ptr %s1, ptr %s2) nounwind optsize {
1109; CHECK-ALIGNED-RV32-LABEL: bcmp_size_16:
1110; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
1111; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
1112; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1113; CHECK-ALIGNED-RV32-NEXT:    li a2, 16
1114; CHECK-ALIGNED-RV32-NEXT:    call bcmp
1115; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1116; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
1117; CHECK-ALIGNED-RV32-NEXT:    ret
1118;
1119; CHECK-ALIGNED-RV64-LABEL: bcmp_size_16:
1120; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1121; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1122; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1123; CHECK-ALIGNED-RV64-NEXT:    li a2, 16
1124; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1125; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1126; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1127; CHECK-ALIGNED-RV64-NEXT:    ret
1128;
1129; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
1130; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
1131; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
1132; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1133; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 16
1134; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
1135; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1136; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
1137; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
1138;
1139; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
1140; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1141; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1142; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1143; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 16
1144; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1145; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1146; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1147; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1148;
1149; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
1150; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1151; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
1152; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1153; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 16
1154; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
1155; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1156; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
1157; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
1158;
1159; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
1160; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1161; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1162; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1163; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 16
1164; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1165; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1166; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1167; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1168;
1169; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_16:
1170; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
1171; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
1172; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1173; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 16
1174; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
1175; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1176; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
1177; CHECK-ALIGNED-RV32-V-NEXT:    ret
1178;
1179; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_16:
1180; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1181; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1182; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1183; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 16
1184; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1185; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1186; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
1187; CHECK-ALIGNED-RV64-V-NEXT:    ret
1188;
1189; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_16:
1190; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
1191; CHECK-UNALIGNED-RV32-NEXT:    lw a2, 0(a0)
1192; CHECK-UNALIGNED-RV32-NEXT:    lw a3, 4(a0)
1193; CHECK-UNALIGNED-RV32-NEXT:    lw a4, 8(a0)
1194; CHECK-UNALIGNED-RV32-NEXT:    lw a0, 12(a0)
1195; CHECK-UNALIGNED-RV32-NEXT:    lw a5, 0(a1)
1196; CHECK-UNALIGNED-RV32-NEXT:    lw a6, 4(a1)
1197; CHECK-UNALIGNED-RV32-NEXT:    lw a7, 8(a1)
1198; CHECK-UNALIGNED-RV32-NEXT:    lw a1, 12(a1)
1199; CHECK-UNALIGNED-RV32-NEXT:    xor a2, a2, a5
1200; CHECK-UNALIGNED-RV32-NEXT:    xor a3, a3, a6
1201; CHECK-UNALIGNED-RV32-NEXT:    xor a4, a4, a7
1202; CHECK-UNALIGNED-RV32-NEXT:    xor a0, a0, a1
1203; CHECK-UNALIGNED-RV32-NEXT:    or a2, a2, a3
1204; CHECK-UNALIGNED-RV32-NEXT:    or a0, a4, a0
1205; CHECK-UNALIGNED-RV32-NEXT:    or a0, a2, a0
1206; CHECK-UNALIGNED-RV32-NEXT:    snez a0, a0
1207; CHECK-UNALIGNED-RV32-NEXT:    ret
1208;
1209; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_16:
1210; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
1211; CHECK-UNALIGNED-RV64-NEXT:    ld a2, 0(a0)
1212; CHECK-UNALIGNED-RV64-NEXT:    ld a0, 8(a0)
1213; CHECK-UNALIGNED-RV64-NEXT:    ld a3, 0(a1)
1214; CHECK-UNALIGNED-RV64-NEXT:    ld a1, 8(a1)
1215; CHECK-UNALIGNED-RV64-NEXT:    xor a2, a2, a3
1216; CHECK-UNALIGNED-RV64-NEXT:    xor a0, a0, a1
1217; CHECK-UNALIGNED-RV64-NEXT:    or a0, a2, a0
1218; CHECK-UNALIGNED-RV64-NEXT:    snez a0, a0
1219; CHECK-UNALIGNED-RV64-NEXT:    ret
1220;
1221; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
1222; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
1223; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
1224; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 4(a0)
1225; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a4, 8(a0)
1226; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 12(a0)
1227; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a5, 0(a1)
1228; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a6, 4(a1)
1229; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a7, 8(a1)
1230; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 12(a1)
1231; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a2, a2, a5
1232; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a3, a3, a6
1233; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a4, a4, a7
1234; CHECK-UNALIGNED-RV32-ZBB-NEXT:    xor a0, a0, a1
1235; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a2, a2, a3
1236; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a4, a0
1237; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a2, a0
1238; CHECK-UNALIGNED-RV32-ZBB-NEXT:    snez a0, a0
1239; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
1240;
1241; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
1242; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
1243; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
1244; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 8(a0)
1245; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
1246; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 8(a1)
1247; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a2, a2, a3
1248; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
1249; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a2, a0
1250; CHECK-UNALIGNED-RV64-ZBB-NEXT:    snez a0, a0
1251; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
1252;
1253; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
1254; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1255; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
1256; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 4(a0)
1257; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a4, 8(a0)
1258; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 12(a0)
1259; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a5, 0(a1)
1260; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a6, 4(a1)
1261; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a7, 8(a1)
1262; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 12(a1)
1263; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a2, a2, a5
1264; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a3, a3, a6
1265; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a4, a4, a7
1266; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    xor a0, a0, a1
1267; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a2, a2, a3
1268; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a0, a4, a0
1269; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    or a0, a2, a0
1270; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    snez a0, a0
1271; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
1272;
1273; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
1274; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1275; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
1276; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 8(a0)
1277; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
1278; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 8(a1)
1279; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a2, a2, a3
1280; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
1281; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a2, a0
1282; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    snez a0, a0
1283; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
1284;
1285; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_16:
1286; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
1287; CHECK-UNALIGNED-RV32-V-NEXT:    lw a2, 0(a0)
1288; CHECK-UNALIGNED-RV32-V-NEXT:    lw a3, 4(a0)
1289; CHECK-UNALIGNED-RV32-V-NEXT:    lw a4, 8(a0)
1290; CHECK-UNALIGNED-RV32-V-NEXT:    lw a0, 12(a0)
1291; CHECK-UNALIGNED-RV32-V-NEXT:    lw a5, 0(a1)
1292; CHECK-UNALIGNED-RV32-V-NEXT:    lw a6, 4(a1)
1293; CHECK-UNALIGNED-RV32-V-NEXT:    lw a7, 8(a1)
1294; CHECK-UNALIGNED-RV32-V-NEXT:    lw a1, 12(a1)
1295; CHECK-UNALIGNED-RV32-V-NEXT:    xor a2, a2, a5
1296; CHECK-UNALIGNED-RV32-V-NEXT:    xor a3, a3, a6
1297; CHECK-UNALIGNED-RV32-V-NEXT:    xor a4, a4, a7
1298; CHECK-UNALIGNED-RV32-V-NEXT:    xor a0, a0, a1
1299; CHECK-UNALIGNED-RV32-V-NEXT:    or a2, a2, a3
1300; CHECK-UNALIGNED-RV32-V-NEXT:    or a0, a4, a0
1301; CHECK-UNALIGNED-RV32-V-NEXT:    or a0, a2, a0
1302; CHECK-UNALIGNED-RV32-V-NEXT:    snez a0, a0
1303; CHECK-UNALIGNED-RV32-V-NEXT:    ret
1304;
1305; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_16:
1306; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
1307; CHECK-UNALIGNED-RV64-V-NEXT:    ld a2, 0(a0)
1308; CHECK-UNALIGNED-RV64-V-NEXT:    ld a0, 8(a0)
1309; CHECK-UNALIGNED-RV64-V-NEXT:    ld a3, 0(a1)
1310; CHECK-UNALIGNED-RV64-V-NEXT:    ld a1, 8(a1)
1311; CHECK-UNALIGNED-RV64-V-NEXT:    xor a2, a2, a3
1312; CHECK-UNALIGNED-RV64-V-NEXT:    xor a0, a0, a1
1313; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a2, a0
1314; CHECK-UNALIGNED-RV64-V-NEXT:    snez a0, a0
1315; CHECK-UNALIGNED-RV64-V-NEXT:    ret
1316entry:
1317  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 16)
1318  ret i32 %bcmp
1319}
1320
1321define i32 @bcmp_size_31(ptr %s1, ptr %s2) nounwind optsize {
1322; CHECK-RV32-LABEL: bcmp_size_31:
1323; CHECK-RV32:       # %bb.0: # %entry
1324; CHECK-RV32-NEXT:    addi sp, sp, -16
1325; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1326; CHECK-RV32-NEXT:    li a2, 31
1327; CHECK-RV32-NEXT:    call bcmp
1328; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1329; CHECK-RV32-NEXT:    addi sp, sp, 16
1330; CHECK-RV32-NEXT:    ret
1331;
1332; CHECK-ALIGNED-RV64-LABEL: bcmp_size_31:
1333; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1334; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1335; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1336; CHECK-ALIGNED-RV64-NEXT:    li a2, 31
1337; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1338; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1339; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1340; CHECK-ALIGNED-RV64-NEXT:    ret
1341;
1342; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
1343; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1344; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1345; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1346; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 31
1347; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1348; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1349; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1350; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1351;
1352; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
1353; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1354; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1355; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1356; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 31
1357; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1358; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1359; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1360; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1361;
1362; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_31:
1363; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1364; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1365; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1366; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 31
1367; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1368; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1369; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
1370; CHECK-ALIGNED-RV64-V-NEXT:    ret
1371;
1372; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_31:
1373; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
1374; CHECK-UNALIGNED-RV64-NEXT:    ld a2, 0(a0)
1375; CHECK-UNALIGNED-RV64-NEXT:    ld a3, 8(a0)
1376; CHECK-UNALIGNED-RV64-NEXT:    ld a4, 16(a0)
1377; CHECK-UNALIGNED-RV64-NEXT:    ld a0, 23(a0)
1378; CHECK-UNALIGNED-RV64-NEXT:    ld a5, 0(a1)
1379; CHECK-UNALIGNED-RV64-NEXT:    ld a6, 8(a1)
1380; CHECK-UNALIGNED-RV64-NEXT:    ld a7, 16(a1)
1381; CHECK-UNALIGNED-RV64-NEXT:    ld a1, 23(a1)
1382; CHECK-UNALIGNED-RV64-NEXT:    xor a2, a2, a5
1383; CHECK-UNALIGNED-RV64-NEXT:    xor a3, a3, a6
1384; CHECK-UNALIGNED-RV64-NEXT:    xor a4, a4, a7
1385; CHECK-UNALIGNED-RV64-NEXT:    xor a0, a0, a1
1386; CHECK-UNALIGNED-RV64-NEXT:    or a2, a2, a3
1387; CHECK-UNALIGNED-RV64-NEXT:    or a0, a4, a0
1388; CHECK-UNALIGNED-RV64-NEXT:    or a0, a2, a0
1389; CHECK-UNALIGNED-RV64-NEXT:    snez a0, a0
1390; CHECK-UNALIGNED-RV64-NEXT:    ret
1391;
1392; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
1393; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
1394; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
1395; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 8(a0)
1396; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a4, 16(a0)
1397; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 23(a0)
1398; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a5, 0(a1)
1399; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a6, 8(a1)
1400; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a7, 16(a1)
1401; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 23(a1)
1402; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a2, a2, a5
1403; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a3, a3, a6
1404; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a4, a4, a7
1405; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
1406; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a2, a2, a3
1407; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a4, a0
1408; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a2, a0
1409; CHECK-UNALIGNED-RV64-ZBB-NEXT:    snez a0, a0
1410; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
1411;
1412; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
1413; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1414; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
1415; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 8(a0)
1416; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a4, 16(a0)
1417; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 23(a0)
1418; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a5, 0(a1)
1419; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a6, 8(a1)
1420; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a7, 16(a1)
1421; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 23(a1)
1422; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a2, a2, a5
1423; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a3, a3, a6
1424; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a4, a4, a7
1425; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
1426; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a2, a2, a3
1427; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a4, a0
1428; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a2, a0
1429; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    snez a0, a0
1430; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
1431;
1432; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_31:
1433; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
1434; CHECK-UNALIGNED-RV64-V-NEXT:    ld a2, 0(a0)
1435; CHECK-UNALIGNED-RV64-V-NEXT:    ld a3, 8(a0)
1436; CHECK-UNALIGNED-RV64-V-NEXT:    ld a4, 16(a0)
1437; CHECK-UNALIGNED-RV64-V-NEXT:    ld a0, 23(a0)
1438; CHECK-UNALIGNED-RV64-V-NEXT:    ld a5, 0(a1)
1439; CHECK-UNALIGNED-RV64-V-NEXT:    ld a6, 8(a1)
1440; CHECK-UNALIGNED-RV64-V-NEXT:    ld a7, 16(a1)
1441; CHECK-UNALIGNED-RV64-V-NEXT:    ld a1, 23(a1)
1442; CHECK-UNALIGNED-RV64-V-NEXT:    xor a2, a2, a5
1443; CHECK-UNALIGNED-RV64-V-NEXT:    xor a3, a3, a6
1444; CHECK-UNALIGNED-RV64-V-NEXT:    xor a4, a4, a7
1445; CHECK-UNALIGNED-RV64-V-NEXT:    xor a0, a0, a1
1446; CHECK-UNALIGNED-RV64-V-NEXT:    or a2, a2, a3
1447; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a4, a0
1448; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a2, a0
1449; CHECK-UNALIGNED-RV64-V-NEXT:    snez a0, a0
1450; CHECK-UNALIGNED-RV64-V-NEXT:    ret
1451entry:
1452  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 31)
1453  ret i32 %bcmp
1454}
1455
1456define i32 @bcmp_size_32(ptr %s1, ptr %s2) nounwind optsize {
1457; CHECK-RV32-LABEL: bcmp_size_32:
1458; CHECK-RV32:       # %bb.0: # %entry
1459; CHECK-RV32-NEXT:    addi sp, sp, -16
1460; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1461; CHECK-RV32-NEXT:    li a2, 32
1462; CHECK-RV32-NEXT:    call bcmp
1463; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1464; CHECK-RV32-NEXT:    addi sp, sp, 16
1465; CHECK-RV32-NEXT:    ret
1466;
1467; CHECK-ALIGNED-RV64-LABEL: bcmp_size_32:
1468; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1469; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1470; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1471; CHECK-ALIGNED-RV64-NEXT:    li a2, 32
1472; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1473; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1474; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1475; CHECK-ALIGNED-RV64-NEXT:    ret
1476;
1477; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
1478; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1479; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1480; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1481; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 32
1482; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1483; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1484; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1485; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1486;
1487; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
1488; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1489; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1490; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1491; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 32
1492; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1493; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1494; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1495; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1496;
1497; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_32:
1498; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1499; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1500; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1501; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 32
1502; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1503; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1504; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
1505; CHECK-ALIGNED-RV64-V-NEXT:    ret
1506;
1507; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_32:
1508; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
1509; CHECK-UNALIGNED-RV64-NEXT:    ld a2, 0(a0)
1510; CHECK-UNALIGNED-RV64-NEXT:    ld a3, 8(a0)
1511; CHECK-UNALIGNED-RV64-NEXT:    ld a4, 16(a0)
1512; CHECK-UNALIGNED-RV64-NEXT:    ld a0, 24(a0)
1513; CHECK-UNALIGNED-RV64-NEXT:    ld a5, 0(a1)
1514; CHECK-UNALIGNED-RV64-NEXT:    ld a6, 8(a1)
1515; CHECK-UNALIGNED-RV64-NEXT:    ld a7, 16(a1)
1516; CHECK-UNALIGNED-RV64-NEXT:    ld a1, 24(a1)
1517; CHECK-UNALIGNED-RV64-NEXT:    xor a2, a2, a5
1518; CHECK-UNALIGNED-RV64-NEXT:    xor a3, a3, a6
1519; CHECK-UNALIGNED-RV64-NEXT:    xor a4, a4, a7
1520; CHECK-UNALIGNED-RV64-NEXT:    xor a0, a0, a1
1521; CHECK-UNALIGNED-RV64-NEXT:    or a2, a2, a3
1522; CHECK-UNALIGNED-RV64-NEXT:    or a0, a4, a0
1523; CHECK-UNALIGNED-RV64-NEXT:    or a0, a2, a0
1524; CHECK-UNALIGNED-RV64-NEXT:    snez a0, a0
1525; CHECK-UNALIGNED-RV64-NEXT:    ret
1526;
1527; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
1528; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
1529; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
1530; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 8(a0)
1531; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a4, 16(a0)
1532; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 24(a0)
1533; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a5, 0(a1)
1534; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a6, 8(a1)
1535; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a7, 16(a1)
1536; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 24(a1)
1537; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a2, a2, a5
1538; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a3, a3, a6
1539; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a4, a4, a7
1540; CHECK-UNALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
1541; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a2, a2, a3
1542; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a4, a0
1543; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a2, a0
1544; CHECK-UNALIGNED-RV64-ZBB-NEXT:    snez a0, a0
1545; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
1546;
1547; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
1548; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1549; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
1550; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 8(a0)
1551; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a4, 16(a0)
1552; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 24(a0)
1553; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a5, 0(a1)
1554; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a6, 8(a1)
1555; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a7, 16(a1)
1556; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 24(a1)
1557; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a2, a2, a5
1558; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a3, a3, a6
1559; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a4, a4, a7
1560; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
1561; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a2, a2, a3
1562; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a4, a0
1563; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a2, a0
1564; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    snez a0, a0
1565; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
1566;
1567; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_32:
1568; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
1569; CHECK-UNALIGNED-RV64-V-NEXT:    ld a2, 0(a0)
1570; CHECK-UNALIGNED-RV64-V-NEXT:    ld a3, 8(a0)
1571; CHECK-UNALIGNED-RV64-V-NEXT:    ld a4, 16(a0)
1572; CHECK-UNALIGNED-RV64-V-NEXT:    ld a0, 24(a0)
1573; CHECK-UNALIGNED-RV64-V-NEXT:    ld a5, 0(a1)
1574; CHECK-UNALIGNED-RV64-V-NEXT:    ld a6, 8(a1)
1575; CHECK-UNALIGNED-RV64-V-NEXT:    ld a7, 16(a1)
1576; CHECK-UNALIGNED-RV64-V-NEXT:    ld a1, 24(a1)
1577; CHECK-UNALIGNED-RV64-V-NEXT:    xor a2, a2, a5
1578; CHECK-UNALIGNED-RV64-V-NEXT:    xor a3, a3, a6
1579; CHECK-UNALIGNED-RV64-V-NEXT:    xor a4, a4, a7
1580; CHECK-UNALIGNED-RV64-V-NEXT:    xor a0, a0, a1
1581; CHECK-UNALIGNED-RV64-V-NEXT:    or a2, a2, a3
1582; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a4, a0
1583; CHECK-UNALIGNED-RV64-V-NEXT:    or a0, a2, a0
1584; CHECK-UNALIGNED-RV64-V-NEXT:    snez a0, a0
1585; CHECK-UNALIGNED-RV64-V-NEXT:    ret
1586entry:
1587  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 32)
1588  ret i32 %bcmp
1589}
1590
1591define i32 @bcmp_size_63(ptr %s1, ptr %s2) nounwind optsize {
1592; CHECK-RV32-LABEL: bcmp_size_63:
1593; CHECK-RV32:       # %bb.0: # %entry
1594; CHECK-RV32-NEXT:    addi sp, sp, -16
1595; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1596; CHECK-RV32-NEXT:    li a2, 63
1597; CHECK-RV32-NEXT:    call bcmp
1598; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1599; CHECK-RV32-NEXT:    addi sp, sp, 16
1600; CHECK-RV32-NEXT:    ret
1601;
1602; CHECK-RV64-LABEL: bcmp_size_63:
1603; CHECK-RV64:       # %bb.0: # %entry
1604; CHECK-RV64-NEXT:    addi sp, sp, -16
1605; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1606; CHECK-RV64-NEXT:    li a2, 63
1607; CHECK-RV64-NEXT:    call bcmp
1608; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1609; CHECK-RV64-NEXT:    addi sp, sp, 16
1610; CHECK-RV64-NEXT:    ret
1611entry:
1612  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 63)
1613  ret i32 %bcmp
1614}
1615
1616define i32 @bcmp_size_64(ptr %s1, ptr %s2) nounwind optsize {
1617; CHECK-RV32-LABEL: bcmp_size_64:
1618; CHECK-RV32:       # %bb.0: # %entry
1619; CHECK-RV32-NEXT:    addi sp, sp, -16
1620; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1621; CHECK-RV32-NEXT:    li a2, 64
1622; CHECK-RV32-NEXT:    call bcmp
1623; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1624; CHECK-RV32-NEXT:    addi sp, sp, 16
1625; CHECK-RV32-NEXT:    ret
1626;
1627; CHECK-RV64-LABEL: bcmp_size_64:
1628; CHECK-RV64:       # %bb.0: # %entry
1629; CHECK-RV64-NEXT:    addi sp, sp, -16
1630; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1631; CHECK-RV64-NEXT:    li a2, 64
1632; CHECK-RV64-NEXT:    call bcmp
1633; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1634; CHECK-RV64-NEXT:    addi sp, sp, 16
1635; CHECK-RV64-NEXT:    ret
1636entry:
1637  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 64)
1638  ret i32 %bcmp
1639}
1640
1641define i32 @bcmp_size_127(ptr %s1, ptr %s2) nounwind optsize {
1642; CHECK-RV32-LABEL: bcmp_size_127:
1643; CHECK-RV32:       # %bb.0: # %entry
1644; CHECK-RV32-NEXT:    addi sp, sp, -16
1645; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1646; CHECK-RV32-NEXT:    li a2, 127
1647; CHECK-RV32-NEXT:    call bcmp
1648; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1649; CHECK-RV32-NEXT:    addi sp, sp, 16
1650; CHECK-RV32-NEXT:    ret
1651;
1652; CHECK-RV64-LABEL: bcmp_size_127:
1653; CHECK-RV64:       # %bb.0: # %entry
1654; CHECK-RV64-NEXT:    addi sp, sp, -16
1655; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1656; CHECK-RV64-NEXT:    li a2, 127
1657; CHECK-RV64-NEXT:    call bcmp
1658; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1659; CHECK-RV64-NEXT:    addi sp, sp, 16
1660; CHECK-RV64-NEXT:    ret
1661entry:
1662  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 127)
1663  ret i32 %bcmp
1664}
1665
1666define i32 @bcmp_size_128(ptr %s1, ptr %s2) nounwind optsize {
1667; CHECK-RV32-LABEL: bcmp_size_128:
1668; CHECK-RV32:       # %bb.0: # %entry
1669; CHECK-RV32-NEXT:    addi sp, sp, -16
1670; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1671; CHECK-RV32-NEXT:    li a2, 128
1672; CHECK-RV32-NEXT:    call bcmp
1673; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1674; CHECK-RV32-NEXT:    addi sp, sp, 16
1675; CHECK-RV32-NEXT:    ret
1676;
1677; CHECK-RV64-LABEL: bcmp_size_128:
1678; CHECK-RV64:       # %bb.0: # %entry
1679; CHECK-RV64-NEXT:    addi sp, sp, -16
1680; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1681; CHECK-RV64-NEXT:    li a2, 128
1682; CHECK-RV64-NEXT:    call bcmp
1683; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1684; CHECK-RV64-NEXT:    addi sp, sp, 16
1685; CHECK-RV64-NEXT:    ret
1686entry:
1687  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 128)
1688  ret i32 %bcmp
1689}
1690
1691define i32 @bcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind optsize {
1692; CHECK-RV32-LABEL: bcmp_size_runtime:
1693; CHECK-RV32:       # %bb.0: # %entry
1694; CHECK-RV32-NEXT:    addi sp, sp, -16
1695; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1696; CHECK-RV32-NEXT:    call bcmp
1697; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1698; CHECK-RV32-NEXT:    addi sp, sp, 16
1699; CHECK-RV32-NEXT:    ret
1700;
1701; CHECK-RV64-LABEL: bcmp_size_runtime:
1702; CHECK-RV64:       # %bb.0: # %entry
1703; CHECK-RV64-NEXT:    addi sp, sp, -16
1704; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1705; CHECK-RV64-NEXT:    call bcmp
1706; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1707; CHECK-RV64-NEXT:    addi sp, sp, 16
1708; CHECK-RV64-NEXT:    ret
1709entry:
1710  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen %len)
1711  ret i32 %bcmp
1712}
1713
1714define i1 @bcmp_eq_zero(ptr %s1, ptr %s2) nounwind optsize {
1715; CHECK-ALIGNED-RV32-LABEL: bcmp_eq_zero:
1716; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
1717; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
1718; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1719; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
1720; CHECK-ALIGNED-RV32-NEXT:    call bcmp
1721; CHECK-ALIGNED-RV32-NEXT:    seqz a0, a0
1722; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1723; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
1724; CHECK-ALIGNED-RV32-NEXT:    ret
1725;
1726; CHECK-ALIGNED-RV64-LABEL: bcmp_eq_zero:
1727; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1728; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1729; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1730; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
1731; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1732; CHECK-ALIGNED-RV64-NEXT:    seqz a0, a0
1733; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1734; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1735; CHECK-ALIGNED-RV64-NEXT:    ret
1736;
1737; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_eq_zero:
1738; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
1739; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
1740; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1741; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
1742; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
1743; CHECK-ALIGNED-RV32-ZBB-NEXT:    seqz a0, a0
1744; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1745; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
1746; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
1747;
1748; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_eq_zero:
1749; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1750; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1751; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1752; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
1753; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1754; CHECK-ALIGNED-RV64-ZBB-NEXT:    seqz a0, a0
1755; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1756; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1757; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1758;
1759; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_eq_zero:
1760; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1761; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
1762; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1763; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
1764; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
1765; CHECK-ALIGNED-RV32-ZBKB-NEXT:    seqz a0, a0
1766; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1767; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
1768; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
1769;
1770; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_eq_zero:
1771; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1772; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1773; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1774; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
1775; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1776; CHECK-ALIGNED-RV64-ZBKB-NEXT:    seqz a0, a0
1777; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1778; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1779; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1780;
1781; CHECK-ALIGNED-RV32-V-LABEL: bcmp_eq_zero:
1782; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
1783; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
1784; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1785; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
1786; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
1787; CHECK-ALIGNED-RV32-V-NEXT:    seqz a0, a0
1788; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1789; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
1790; CHECK-ALIGNED-RV32-V-NEXT:    ret
1791;
1792; CHECK-ALIGNED-RV64-V-LABEL: bcmp_eq_zero:
1793; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1794; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1795; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1796; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
1797; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1798; CHECK-ALIGNED-RV64-V-NEXT:    seqz a0, a0
1799; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1800; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
1801; CHECK-ALIGNED-RV64-V-NEXT:    ret
1802;
1803; CHECK-UNALIGNED-LABEL: bcmp_eq_zero:
1804; CHECK-UNALIGNED:       # %bb.0: # %entry
1805; CHECK-UNALIGNED-NEXT:    lw a0, 0(a0)
1806; CHECK-UNALIGNED-NEXT:    lw a1, 0(a1)
1807; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
1808; CHECK-UNALIGNED-NEXT:    seqz a0, a0
1809; CHECK-UNALIGNED-NEXT:    ret
1810entry:
1811  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
1812  %ret = icmp eq i32 %bcmp, 0
1813  ret i1 %ret
1814}
1815
1816define i1 @bcmp_lt_zero(ptr %s1, ptr %s2) nounwind optsize {
1817; CHECK-ALIGNED-RV32-LABEL: bcmp_lt_zero:
1818; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
1819; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
1820; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1821; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
1822; CHECK-ALIGNED-RV32-NEXT:    call bcmp
1823; CHECK-ALIGNED-RV32-NEXT:    srli a0, a0, 31
1824; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1825; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
1826; CHECK-ALIGNED-RV32-NEXT:    ret
1827;
1828; CHECK-ALIGNED-RV64-LABEL: bcmp_lt_zero:
1829; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1830; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1831; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1832; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
1833; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1834; CHECK-ALIGNED-RV64-NEXT:    slti a0, a0, 0
1835; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1836; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1837; CHECK-ALIGNED-RV64-NEXT:    ret
1838;
1839; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_lt_zero:
1840; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
1841; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
1842; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1843; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
1844; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
1845; CHECK-ALIGNED-RV32-ZBB-NEXT:    srli a0, a0, 31
1846; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1847; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
1848; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
1849;
1850; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_lt_zero:
1851; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1852; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1853; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1854; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
1855; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1856; CHECK-ALIGNED-RV64-ZBB-NEXT:    slti a0, a0, 0
1857; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1858; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1859; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1860;
1861; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_lt_zero:
1862; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1863; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
1864; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1865; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
1866; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
1867; CHECK-ALIGNED-RV32-ZBKB-NEXT:    srli a0, a0, 31
1868; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1869; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
1870; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
1871;
1872; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_lt_zero:
1873; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1874; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1875; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1876; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
1877; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1878; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slti a0, a0, 0
1879; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1880; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1881; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1882;
1883; CHECK-ALIGNED-RV32-V-LABEL: bcmp_lt_zero:
1884; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
1885; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
1886; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1887; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
1888; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
1889; CHECK-ALIGNED-RV32-V-NEXT:    srli a0, a0, 31
1890; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1891; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
1892; CHECK-ALIGNED-RV32-V-NEXT:    ret
1893;
1894; CHECK-ALIGNED-RV64-V-LABEL: bcmp_lt_zero:
1895; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1896; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1897; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1898; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
1899; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1900; CHECK-ALIGNED-RV64-V-NEXT:    slti a0, a0, 0
1901; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1902; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
1903; CHECK-ALIGNED-RV64-V-NEXT:    ret
1904;
1905; CHECK-UNALIGNED-LABEL: bcmp_lt_zero:
1906; CHECK-UNALIGNED:       # %bb.0: # %entry
1907; CHECK-UNALIGNED-NEXT:    li a0, 0
1908; CHECK-UNALIGNED-NEXT:    ret
1909entry:
1910  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
1911  %ret = icmp slt i32 %bcmp, 0
1912  ret i1 %ret
1913}
1914
1915define i1 @bcmp_gt_zero(ptr %s1, ptr %s2) nounwind optsize {
1916; CHECK-ALIGNED-RV32-LABEL: bcmp_gt_zero:
1917; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
1918; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
1919; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1920; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
1921; CHECK-ALIGNED-RV32-NEXT:    call bcmp
1922; CHECK-ALIGNED-RV32-NEXT:    sgtz a0, a0
1923; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1924; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
1925; CHECK-ALIGNED-RV32-NEXT:    ret
1926;
1927; CHECK-ALIGNED-RV64-LABEL: bcmp_gt_zero:
1928; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
1929; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
1930; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1931; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
1932; CHECK-ALIGNED-RV64-NEXT:    call bcmp
1933; CHECK-ALIGNED-RV64-NEXT:    sgtz a0, a0
1934; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1935; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
1936; CHECK-ALIGNED-RV64-NEXT:    ret
1937;
1938; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_gt_zero:
1939; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
1940; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
1941; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1942; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
1943; CHECK-ALIGNED-RV32-ZBB-NEXT:    call bcmp
1944; CHECK-ALIGNED-RV32-ZBB-NEXT:    sgtz a0, a0
1945; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1946; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
1947; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
1948;
1949; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_gt_zero:
1950; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
1951; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
1952; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1953; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
1954; CHECK-ALIGNED-RV64-ZBB-NEXT:    call bcmp
1955; CHECK-ALIGNED-RV64-ZBB-NEXT:    sgtz a0, a0
1956; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1957; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
1958; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
1959;
1960; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_gt_zero:
1961; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
1962; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
1963; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1964; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
1965; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call bcmp
1966; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sgtz a0, a0
1967; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1968; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
1969; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
1970;
1971; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_gt_zero:
1972; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
1973; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
1974; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1975; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
1976; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call bcmp
1977; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sgtz a0, a0
1978; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
1979; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
1980; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
1981;
1982; CHECK-ALIGNED-RV32-V-LABEL: bcmp_gt_zero:
1983; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
1984; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
1985; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
1986; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
1987; CHECK-ALIGNED-RV32-V-NEXT:    call bcmp
1988; CHECK-ALIGNED-RV32-V-NEXT:    sgtz a0, a0
1989; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
1990; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
1991; CHECK-ALIGNED-RV32-V-NEXT:    ret
1992;
1993; CHECK-ALIGNED-RV64-V-LABEL: bcmp_gt_zero:
1994; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
1995; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
1996; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
1997; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
1998; CHECK-ALIGNED-RV64-V-NEXT:    call bcmp
1999; CHECK-ALIGNED-RV64-V-NEXT:    sgtz a0, a0
2000; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2001; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2002; CHECK-ALIGNED-RV64-V-NEXT:    ret
2003;
2004; CHECK-UNALIGNED-LABEL: bcmp_gt_zero:
2005; CHECK-UNALIGNED:       # %bb.0: # %entry
2006; CHECK-UNALIGNED-NEXT:    lw a0, 0(a0)
2007; CHECK-UNALIGNED-NEXT:    lw a1, 0(a1)
2008; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
2009; CHECK-UNALIGNED-NEXT:    snez a0, a0
2010; CHECK-UNALIGNED-NEXT:    ret
2011entry:
2012  %bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
2013  %ret = icmp sgt i32 %bcmp, 0
2014  ret i1 %ret
2015}
2016
2017define i32 @memcmp_size_0(ptr %s1, ptr %s2) nounwind optsize {
2018; CHECK-LABEL: memcmp_size_0:
2019; CHECK:       # %bb.0: # %entry
2020; CHECK-NEXT:    li a0, 0
2021; CHECK-NEXT:    ret
2022entry:
2023  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 0)
2024  ret i32 %memcmp
2025}
2026
2027define i32 @memcmp_size_1(ptr %s1, ptr %s2) nounwind optsize {
2028; CHECK-ALIGNED-RV32-LABEL: memcmp_size_1:
2029; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2030; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2031; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2032; CHECK-ALIGNED-RV32-NEXT:    li a2, 1
2033; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2034; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2035; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2036; CHECK-ALIGNED-RV32-NEXT:    ret
2037;
2038; CHECK-ALIGNED-RV64-LABEL: memcmp_size_1:
2039; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2040; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2041; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2042; CHECK-ALIGNED-RV64-NEXT:    li a2, 1
2043; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2044; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2045; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2046; CHECK-ALIGNED-RV64-NEXT:    ret
2047;
2048; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
2049; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2050; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2051; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2052; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 1
2053; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2054; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2055; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2056; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2057;
2058; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
2059; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2060; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2061; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2062; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 1
2063; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2064; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2065; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2066; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2067;
2068; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
2069; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2070; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2071; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2072; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 1
2073; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2074; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2075; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2076; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2077;
2078; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
2079; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2080; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2081; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2082; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 1
2083; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2084; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2085; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2086; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2087;
2088; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_1:
2089; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2090; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2091; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2092; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 1
2093; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2094; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2095; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2096; CHECK-ALIGNED-RV32-V-NEXT:    ret
2097;
2098; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_1:
2099; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2100; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2101; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2102; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 1
2103; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2104; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2105; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2106; CHECK-ALIGNED-RV64-V-NEXT:    ret
2107;
2108; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_1:
2109; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2110; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2111; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2112; CHECK-UNALIGNED-RV32-NEXT:    li a2, 1
2113; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2114; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2115; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2116; CHECK-UNALIGNED-RV32-NEXT:    ret
2117;
2118; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_1:
2119; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
2120; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
2121; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2122; CHECK-UNALIGNED-RV64-NEXT:    li a2, 1
2123; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
2124; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2125; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
2126; CHECK-UNALIGNED-RV64-NEXT:    ret
2127;
2128; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
2129; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
2130; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a0, 0(a0)
2131; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a1, 0(a1)
2132; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sub a0, a0, a1
2133; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2134;
2135; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
2136; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
2137; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a0, 0(a0)
2138; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a1, 0(a1)
2139; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a1
2140; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
2141;
2142; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
2143; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2144; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a0, 0(a0)
2145; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a1, 0(a1)
2146; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sub a0, a0, a1
2147; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2148;
2149; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
2150; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2151; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a0, 0(a0)
2152; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a1, 0(a1)
2153; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a1
2154; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
2155;
2156; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_1:
2157; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
2158; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2159; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2160; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 1
2161; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
2162; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2163; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2164; CHECK-UNALIGNED-RV32-V-NEXT:    ret
2165;
2166; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_1:
2167; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
2168; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2169; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2170; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 1
2171; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
2172; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2173; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2174; CHECK-UNALIGNED-RV64-V-NEXT:    ret
2175entry:
2176  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 1)
2177  ret i32 %memcmp
2178}
2179
2180define i32 @memcmp_size_2(ptr %s1, ptr %s2) nounwind optsize {
2181; CHECK-ALIGNED-RV32-LABEL: memcmp_size_2:
2182; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2183; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2184; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2185; CHECK-ALIGNED-RV32-NEXT:    li a2, 2
2186; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2187; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2188; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2189; CHECK-ALIGNED-RV32-NEXT:    ret
2190;
2191; CHECK-ALIGNED-RV64-LABEL: memcmp_size_2:
2192; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2193; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2194; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2195; CHECK-ALIGNED-RV64-NEXT:    li a2, 2
2196; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2197; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2198; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2199; CHECK-ALIGNED-RV64-NEXT:    ret
2200;
2201; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
2202; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2203; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2204; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2205; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 2
2206; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2207; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2208; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2209; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2210;
2211; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
2212; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2213; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2214; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2215; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 2
2216; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2217; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2218; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2219; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2220;
2221; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
2222; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2223; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2224; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2225; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 2
2226; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2227; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2228; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2229; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2230;
2231; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
2232; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2233; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2234; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2235; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 2
2236; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2237; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2238; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2239; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2240;
2241; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_2:
2242; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2243; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2244; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2245; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 2
2246; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2247; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2248; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2249; CHECK-ALIGNED-RV32-V-NEXT:    ret
2250;
2251; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_2:
2252; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2253; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2254; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2255; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 2
2256; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2257; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2258; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2259; CHECK-ALIGNED-RV64-V-NEXT:    ret
2260;
2261; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_2:
2262; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2263; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2264; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2265; CHECK-UNALIGNED-RV32-NEXT:    li a2, 2
2266; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2267; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2268; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2269; CHECK-UNALIGNED-RV32-NEXT:    ret
2270;
2271; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_2:
2272; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
2273; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
2274; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2275; CHECK-UNALIGNED-RV64-NEXT:    li a2, 2
2276; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
2277; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2278; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
2279; CHECK-UNALIGNED-RV64-NEXT:    ret
2280;
2281; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
2282; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
2283; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lh a0, 0(a0)
2284; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lh a1, 0(a1)
2285; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a0, a0
2286; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a1, a1
2287; CHECK-UNALIGNED-RV32-ZBB-NEXT:    srli a0, a0, 16
2288; CHECK-UNALIGNED-RV32-ZBB-NEXT:    srli a1, a1, 16
2289; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sub a0, a0, a1
2290; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2291;
2292; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
2293; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
2294; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lh a0, 0(a0)
2295; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lh a1, 0(a1)
2296; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
2297; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
2298; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a0, a0, 48
2299; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a1, a1, 48
2300; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a1
2301; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
2302;
2303; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
2304; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2305; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lh a0, 0(a0)
2306; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lh a1, 0(a1)
2307; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a0, a0
2308; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a1, a1
2309; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    srli a0, a0, 16
2310; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    srli a1, a1, 16
2311; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sub a0, a0, a1
2312; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2313;
2314; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
2315; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2316; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lh a0, 0(a0)
2317; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lh a1, 0(a1)
2318; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
2319; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
2320; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a0, a0, 48
2321; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a1, a1, 48
2322; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a1
2323; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
2324;
2325; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_2:
2326; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
2327; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2328; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2329; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 2
2330; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
2331; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2332; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2333; CHECK-UNALIGNED-RV32-V-NEXT:    ret
2334;
2335; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_2:
2336; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
2337; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2338; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2339; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 2
2340; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
2341; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2342; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2343; CHECK-UNALIGNED-RV64-V-NEXT:    ret
2344entry:
2345  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 2)
2346  ret i32 %memcmp
2347}
2348
2349define i32 @memcmp_size_3(ptr %s1, ptr %s2) nounwind optsize {
2350; CHECK-ALIGNED-RV32-LABEL: memcmp_size_3:
2351; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2352; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2353; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2354; CHECK-ALIGNED-RV32-NEXT:    li a2, 3
2355; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2356; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2357; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2358; CHECK-ALIGNED-RV32-NEXT:    ret
2359;
2360; CHECK-ALIGNED-RV64-LABEL: memcmp_size_3:
2361; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2362; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2363; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2364; CHECK-ALIGNED-RV64-NEXT:    li a2, 3
2365; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2366; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2367; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2368; CHECK-ALIGNED-RV64-NEXT:    ret
2369;
2370; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
2371; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2372; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2373; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2374; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 3
2375; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2376; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2377; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2378; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2379;
2380; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
2381; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2382; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2383; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2384; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 3
2385; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2386; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2387; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2388; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2389;
2390; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
2391; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2392; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2393; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2394; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 3
2395; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2396; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2397; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2398; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2399;
2400; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
2401; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2402; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2403; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2404; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 3
2405; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2406; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2407; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2408; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2409;
2410; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_3:
2411; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2412; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2413; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2414; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 3
2415; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2416; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2417; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2418; CHECK-ALIGNED-RV32-V-NEXT:    ret
2419;
2420; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_3:
2421; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2422; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2423; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2424; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 3
2425; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2426; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2427; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2428; CHECK-ALIGNED-RV64-V-NEXT:    ret
2429;
2430; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_3:
2431; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2432; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2433; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2434; CHECK-UNALIGNED-RV32-NEXT:    li a2, 3
2435; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2436; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2437; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2438; CHECK-UNALIGNED-RV32-NEXT:    ret
2439;
2440; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_3:
2441; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
2442; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
2443; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2444; CHECK-UNALIGNED-RV64-NEXT:    li a2, 3
2445; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
2446; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2447; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
2448; CHECK-UNALIGNED-RV64-NEXT:    ret
2449;
2450; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
2451; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
2452; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a2, 2(a0)
2453; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lhu a0, 0(a0)
2454; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a3, 2(a1)
2455; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lhu a1, 0(a1)
2456; CHECK-UNALIGNED-RV32-ZBB-NEXT:    slli a2, a2, 16
2457; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a0, a0, a2
2458; CHECK-UNALIGNED-RV32-ZBB-NEXT:    slli a3, a3, 16
2459; CHECK-UNALIGNED-RV32-ZBB-NEXT:    or a1, a1, a3
2460; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a0, a0
2461; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a1, a1
2462; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a2, a0, a1
2463; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a1, a0
2464; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sub a0, a0, a2
2465; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2466;
2467; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
2468; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
2469; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a2, 2(a0)
2470; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lhu a0, 0(a0)
2471; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a3, 2(a1)
2472; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lhu a1, 0(a1)
2473; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a2, a2, 16
2474; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a0, a2
2475; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a3, a3, 16
2476; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a1, a1, a3
2477; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
2478; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
2479; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a0, a0, 32
2480; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a1, a1, 32
2481; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a2, a0, a1
2482; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
2483; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a2
2484; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
2485;
2486; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
2487; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2488; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lhu a2, 0(a0)
2489; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a0, 2(a0)
2490; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lhu a3, 0(a1)
2491; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a1, 2(a1)
2492; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    pack a0, a2, a0
2493; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    pack a1, a3, a1
2494; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a0, a0
2495; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a1, a1
2496; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a2, a0, a1
2497; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a1, a0
2498; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sub a0, a0, a2
2499; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2500;
2501; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
2502; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2503; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a2, 2(a0)
2504; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lhu a0, 0(a0)
2505; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a3, 2(a1)
2506; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lhu a1, 0(a1)
2507; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    slli a2, a2, 16
2508; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a0, a0, a2
2509; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    slli a3, a3, 16
2510; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    or a1, a1, a3
2511; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
2512; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
2513; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a0, a0, 32
2514; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a1, a1, 32
2515; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a2, a0, a1
2516; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
2517; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a2
2518; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
2519;
2520; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_3:
2521; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
2522; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2523; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2524; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 3
2525; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
2526; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2527; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2528; CHECK-UNALIGNED-RV32-V-NEXT:    ret
2529;
2530; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_3:
2531; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
2532; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2533; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2534; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 3
2535; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
2536; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2537; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2538; CHECK-UNALIGNED-RV64-V-NEXT:    ret
2539entry:
2540  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 3)
2541  ret i32 %memcmp
2542}
2543
2544define i32 @memcmp_size_4(ptr %s1, ptr %s2) nounwind optsize {
2545; CHECK-ALIGNED-RV32-LABEL: memcmp_size_4:
2546; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2547; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2548; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2549; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
2550; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2551; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2552; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2553; CHECK-ALIGNED-RV32-NEXT:    ret
2554;
2555; CHECK-ALIGNED-RV64-LABEL: memcmp_size_4:
2556; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2557; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2558; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2559; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
2560; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2561; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2562; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2563; CHECK-ALIGNED-RV64-NEXT:    ret
2564;
2565; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
2566; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2567; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2568; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2569; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
2570; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2571; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2572; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2573; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2574;
2575; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
2576; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2577; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2578; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2579; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
2580; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2581; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2582; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2583; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2584;
2585; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
2586; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2587; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2588; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2589; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
2590; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2591; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2592; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2593; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2594;
2595; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
2596; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2597; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2598; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2599; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
2600; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2601; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2602; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2603; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2604;
2605; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_4:
2606; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2607; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2608; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2609; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
2610; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2611; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2612; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2613; CHECK-ALIGNED-RV32-V-NEXT:    ret
2614;
2615; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_4:
2616; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2617; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2618; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2619; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
2620; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2621; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2622; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2623; CHECK-ALIGNED-RV64-V-NEXT:    ret
2624;
2625; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_4:
2626; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2627; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2628; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2629; CHECK-UNALIGNED-RV32-NEXT:    li a2, 4
2630; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2631; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2632; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2633; CHECK-UNALIGNED-RV32-NEXT:    ret
2634;
2635; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_4:
2636; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
2637; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
2638; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2639; CHECK-UNALIGNED-RV64-NEXT:    li a2, 4
2640; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
2641; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2642; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
2643; CHECK-UNALIGNED-RV64-NEXT:    ret
2644;
2645; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
2646; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
2647; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 0(a0)
2648; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 0(a1)
2649; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a0, a0
2650; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a1, a1
2651; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a2, a0, a1
2652; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a1, a0
2653; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sub a0, a0, a2
2654; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2655;
2656; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
2657; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
2658; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a0, 0(a0)
2659; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a1, 0(a1)
2660; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
2661; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
2662; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a0, a0, 32
2663; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a1, a1, 32
2664; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a2, a0, a1
2665; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
2666; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a2
2667; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
2668;
2669; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
2670; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2671; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 0(a0)
2672; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 0(a1)
2673; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a0, a0
2674; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a1, a1
2675; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a2, a0, a1
2676; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a1, a0
2677; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sub a0, a0, a2
2678; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2679;
2680; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
2681; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2682; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a0, 0(a0)
2683; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a1, 0(a1)
2684; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
2685; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
2686; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a0, a0, 32
2687; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a1, a1, 32
2688; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a2, a0, a1
2689; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
2690; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a2
2691; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
2692;
2693; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_4:
2694; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
2695; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2696; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2697; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 4
2698; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
2699; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2700; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2701; CHECK-UNALIGNED-RV32-V-NEXT:    ret
2702;
2703; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_4:
2704; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
2705; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2706; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2707; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 4
2708; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
2709; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2710; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2711; CHECK-UNALIGNED-RV64-V-NEXT:    ret
2712entry:
2713  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
2714  ret i32 %memcmp
2715}
2716
2717define i32 @memcmp_size_5(ptr %s1, ptr %s2) nounwind optsize {
2718; CHECK-ALIGNED-RV32-LABEL: memcmp_size_5:
2719; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2720; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2721; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2722; CHECK-ALIGNED-RV32-NEXT:    li a2, 5
2723; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2724; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2725; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2726; CHECK-ALIGNED-RV32-NEXT:    ret
2727;
2728; CHECK-ALIGNED-RV64-LABEL: memcmp_size_5:
2729; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2730; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2731; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2732; CHECK-ALIGNED-RV64-NEXT:    li a2, 5
2733; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2734; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2735; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2736; CHECK-ALIGNED-RV64-NEXT:    ret
2737;
2738; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
2739; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2740; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2741; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2742; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 5
2743; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2744; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2745; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2746; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2747;
2748; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
2749; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2750; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2751; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2752; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 5
2753; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2754; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2755; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2756; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2757;
2758; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
2759; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2760; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2761; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2762; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 5
2763; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2764; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2765; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2766; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2767;
2768; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
2769; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2770; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2771; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2772; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 5
2773; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2774; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2775; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2776; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2777;
2778; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_5:
2779; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2780; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2781; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2782; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 5
2783; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2784; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2785; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2786; CHECK-ALIGNED-RV32-V-NEXT:    ret
2787;
2788; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_5:
2789; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2790; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2791; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2792; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 5
2793; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2794; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2795; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2796; CHECK-ALIGNED-RV64-V-NEXT:    ret
2797;
2798; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_5:
2799; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2800; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2801; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2802; CHECK-UNALIGNED-RV32-NEXT:    li a2, 5
2803; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2804; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2805; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2806; CHECK-UNALIGNED-RV32-NEXT:    ret
2807;
2808; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_5:
2809; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
2810; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
2811; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2812; CHECK-UNALIGNED-RV64-NEXT:    li a2, 5
2813; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
2814; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2815; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
2816; CHECK-UNALIGNED-RV64-NEXT:    ret
2817;
2818; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
2819; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
2820; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
2821; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
2822; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
2823; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
2824; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB26_2
2825; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
2826; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a0, 4(a0)
2827; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lbu a1, 4(a1)
2828; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sub a0, a0, a1
2829; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2830; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB26_2: # %res_block
2831; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
2832; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
2833; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
2834; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
2835;
2836; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
2837; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
2838; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a2, 4(a0)
2839; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lwu a0, 0(a0)
2840; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lbu a3, 4(a1)
2841; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lwu a1, 0(a1)
2842; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a2, a2, 32
2843; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a0, a2
2844; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a3, a3, 32
2845; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a1, a1, a3
2846; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
2847; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
2848; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a2, a0, a1
2849; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
2850; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a2
2851; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
2852;
2853; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
2854; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2855; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
2856; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
2857; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
2858; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
2859; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB26_2
2860; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
2861; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a0, 4(a0)
2862; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lbu a1, 4(a1)
2863; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sub a0, a0, a1
2864; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2865; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB26_2: # %res_block
2866; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
2867; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
2868; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
2869; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
2870;
2871; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
2872; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2873; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lwu a2, 0(a0)
2874; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a0, 4(a0)
2875; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lwu a3, 0(a1)
2876; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lbu a1, 4(a1)
2877; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    pack a0, a2, a0
2878; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    pack a1, a3, a1
2879; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
2880; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
2881; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a2, a0, a1
2882; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
2883; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a2
2884; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
2885;
2886; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_5:
2887; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
2888; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2889; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2890; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 5
2891; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
2892; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2893; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2894; CHECK-UNALIGNED-RV32-V-NEXT:    ret
2895;
2896; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_5:
2897; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
2898; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2899; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2900; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 5
2901; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
2902; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2903; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2904; CHECK-UNALIGNED-RV64-V-NEXT:    ret
2905entry:
2906  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 5)
2907  ret i32 %memcmp
2908}
2909
2910define i32 @memcmp_size_6(ptr %s1, ptr %s2) nounwind optsize {
2911; CHECK-ALIGNED-RV32-LABEL: memcmp_size_6:
2912; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
2913; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
2914; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2915; CHECK-ALIGNED-RV32-NEXT:    li a2, 6
2916; CHECK-ALIGNED-RV32-NEXT:    call memcmp
2917; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2918; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
2919; CHECK-ALIGNED-RV32-NEXT:    ret
2920;
2921; CHECK-ALIGNED-RV64-LABEL: memcmp_size_6:
2922; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
2923; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
2924; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2925; CHECK-ALIGNED-RV64-NEXT:    li a2, 6
2926; CHECK-ALIGNED-RV64-NEXT:    call memcmp
2927; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2928; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
2929; CHECK-ALIGNED-RV64-NEXT:    ret
2930;
2931; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
2932; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
2933; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
2934; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2935; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 6
2936; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
2937; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2938; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
2939; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
2940;
2941; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
2942; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
2943; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
2944; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2945; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 6
2946; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
2947; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2948; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
2949; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
2950;
2951; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
2952; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
2953; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
2954; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2955; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 6
2956; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
2957; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2958; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
2959; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
2960;
2961; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
2962; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
2963; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
2964; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2965; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 6
2966; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
2967; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2968; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
2969; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
2970;
2971; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_6:
2972; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
2973; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
2974; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2975; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 6
2976; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
2977; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2978; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
2979; CHECK-ALIGNED-RV32-V-NEXT:    ret
2980;
2981; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_6:
2982; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
2983; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
2984; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
2985; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 6
2986; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
2987; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
2988; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
2989; CHECK-ALIGNED-RV64-V-NEXT:    ret
2990;
2991; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_6:
2992; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
2993; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
2994; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
2995; CHECK-UNALIGNED-RV32-NEXT:    li a2, 6
2996; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
2997; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
2998; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
2999; CHECK-UNALIGNED-RV32-NEXT:    ret
3000;
3001; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_6:
3002; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
3003; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
3004; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3005; CHECK-UNALIGNED-RV64-NEXT:    li a2, 6
3006; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
3007; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3008; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
3009; CHECK-UNALIGNED-RV64-NEXT:    ret
3010;
3011; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
3012; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
3013; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
3014; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
3015; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3016; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3017; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB27_3
3018; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
3019; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lh a0, 4(a0)
3020; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lh a1, 4(a1)
3021; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a0
3022; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a1
3023; CHECK-UNALIGNED-RV32-ZBB-NEXT:    srli a2, a2, 16
3024; CHECK-UNALIGNED-RV32-ZBB-NEXT:    srli a3, a3, 16
3025; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB27_3
3026; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.2:
3027; CHECK-UNALIGNED-RV32-ZBB-NEXT:    li a0, 0
3028; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3029; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB27_3: # %res_block
3030; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
3031; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
3032; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
3033; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3034;
3035; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
3036; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
3037; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lhu a2, 4(a0)
3038; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lwu a0, 0(a0)
3039; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lhu a3, 4(a1)
3040; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lwu a1, 0(a1)
3041; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a2, a2, 32
3042; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a0, a0, a2
3043; CHECK-UNALIGNED-RV64-ZBB-NEXT:    slli a3, a3, 32
3044; CHECK-UNALIGNED-RV64-ZBB-NEXT:    or a1, a1, a3
3045; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
3046; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
3047; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a2, a0, a1
3048; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
3049; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a2
3050; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3051;
3052; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
3053; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3054; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
3055; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
3056; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3057; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3058; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB27_3
3059; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
3060; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lh a0, 4(a0)
3061; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lh a1, 4(a1)
3062; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a0
3063; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a1
3064; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    srli a2, a2, 16
3065; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    srli a3, a3, 16
3066; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB27_3
3067; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.2:
3068; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    li a0, 0
3069; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3070; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB27_3: # %res_block
3071; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
3072; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
3073; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
3074; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3075;
3076; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
3077; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3078; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lwu a2, 0(a0)
3079; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lhu a0, 4(a0)
3080; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lwu a3, 0(a1)
3081; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lhu a1, 4(a1)
3082; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    pack a0, a2, a0
3083; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    pack a1, a3, a1
3084; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
3085; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
3086; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a2, a0, a1
3087; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
3088; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a2
3089; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3090;
3091; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_6:
3092; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
3093; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3094; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3095; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 6
3096; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
3097; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3098; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3099; CHECK-UNALIGNED-RV32-V-NEXT:    ret
3100;
3101; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_6:
3102; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
3103; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3104; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3105; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 6
3106; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
3107; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3108; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3109; CHECK-UNALIGNED-RV64-V-NEXT:    ret
3110entry:
3111  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 6)
3112  ret i32 %memcmp
3113}
3114
3115define i32 @memcmp_size_7(ptr %s1, ptr %s2) nounwind optsize {
3116; CHECK-ALIGNED-RV32-LABEL: memcmp_size_7:
3117; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
3118; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
3119; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3120; CHECK-ALIGNED-RV32-NEXT:    li a2, 7
3121; CHECK-ALIGNED-RV32-NEXT:    call memcmp
3122; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3123; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
3124; CHECK-ALIGNED-RV32-NEXT:    ret
3125;
3126; CHECK-ALIGNED-RV64-LABEL: memcmp_size_7:
3127; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
3128; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
3129; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3130; CHECK-ALIGNED-RV64-NEXT:    li a2, 7
3131; CHECK-ALIGNED-RV64-NEXT:    call memcmp
3132; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3133; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
3134; CHECK-ALIGNED-RV64-NEXT:    ret
3135;
3136; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
3137; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
3138; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
3139; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3140; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 7
3141; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
3142; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3143; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
3144; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
3145;
3146; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
3147; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
3148; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
3149; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3150; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 7
3151; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
3152; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3153; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
3154; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
3155;
3156; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
3157; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3158; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
3159; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3160; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 7
3161; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
3162; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3163; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
3164; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
3165;
3166; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
3167; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3168; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
3169; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3170; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 7
3171; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
3172; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3173; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
3174; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
3175;
3176; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_7:
3177; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
3178; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3179; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3180; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 7
3181; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
3182; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3183; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3184; CHECK-ALIGNED-RV32-V-NEXT:    ret
3185;
3186; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_7:
3187; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
3188; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3189; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3190; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 7
3191; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
3192; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3193; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3194; CHECK-ALIGNED-RV64-V-NEXT:    ret
3195;
3196; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_7:
3197; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
3198; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
3199; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3200; CHECK-UNALIGNED-RV32-NEXT:    li a2, 7
3201; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
3202; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3203; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
3204; CHECK-UNALIGNED-RV32-NEXT:    ret
3205;
3206; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_7:
3207; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
3208; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
3209; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3210; CHECK-UNALIGNED-RV64-NEXT:    li a2, 7
3211; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
3212; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3213; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
3214; CHECK-UNALIGNED-RV64-NEXT:    ret
3215;
3216; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
3217; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
3218; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
3219; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
3220; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3221; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3222; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB28_3
3223; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
3224; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 3(a0)
3225; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 3(a1)
3226; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a0
3227; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a1
3228; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB28_3
3229; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.2:
3230; CHECK-UNALIGNED-RV32-ZBB-NEXT:    li a0, 0
3231; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3232; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB28_3: # %res_block
3233; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
3234; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
3235; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
3236; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3237;
3238; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
3239; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
3240; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a2, 0(a0)
3241; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a3, 0(a1)
3242; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
3243; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
3244; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a2, a2, 32
3245; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a3, a3, 32
3246; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB28_3
3247; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.1: # %loadbb1
3248; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a0, 3(a0)
3249; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a1, 3(a1)
3250; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a0
3251; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a1
3252; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a2, a2, 32
3253; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a3, a3, 32
3254; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB28_3
3255; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.2:
3256; CHECK-UNALIGNED-RV64-ZBB-NEXT:    li a0, 0
3257; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3258; CHECK-UNALIGNED-RV64-ZBB-NEXT:  .LBB28_3: # %res_block
3259; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a2, a3
3260; CHECK-UNALIGNED-RV64-ZBB-NEXT:    neg a0, a0
3261; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ori a0, a0, 1
3262; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3263;
3264; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
3265; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3266; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
3267; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
3268; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3269; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3270; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB28_3
3271; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
3272; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 3(a0)
3273; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 3(a1)
3274; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a0
3275; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a1
3276; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB28_3
3277; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.2:
3278; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    li a0, 0
3279; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3280; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB28_3: # %res_block
3281; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
3282; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
3283; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
3284; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3285;
3286; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
3287; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3288; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a2, 0(a0)
3289; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a3, 0(a1)
3290; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
3291; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
3292; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a2, a2, 32
3293; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a3, a3, 32
3294; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB28_3
3295; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.1: # %loadbb1
3296; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a0, 3(a0)
3297; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a1, 3(a1)
3298; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a0
3299; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a1
3300; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a2, a2, 32
3301; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a3, a3, 32
3302; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB28_3
3303; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.2:
3304; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    li a0, 0
3305; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3306; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  .LBB28_3: # %res_block
3307; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a2, a3
3308; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    neg a0, a0
3309; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ori a0, a0, 1
3310; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3311;
3312; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_7:
3313; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
3314; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3315; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3316; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 7
3317; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
3318; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3319; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3320; CHECK-UNALIGNED-RV32-V-NEXT:    ret
3321;
3322; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_7:
3323; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
3324; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3325; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3326; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 7
3327; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
3328; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3329; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3330; CHECK-UNALIGNED-RV64-V-NEXT:    ret
3331entry:
3332  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 7)
3333  ret i32 %memcmp
3334}
3335
3336define i32 @memcmp_size_8(ptr %s1, ptr %s2) nounwind optsize {
3337; CHECK-ALIGNED-RV32-LABEL: memcmp_size_8:
3338; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
3339; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
3340; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3341; CHECK-ALIGNED-RV32-NEXT:    li a2, 8
3342; CHECK-ALIGNED-RV32-NEXT:    call memcmp
3343; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3344; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
3345; CHECK-ALIGNED-RV32-NEXT:    ret
3346;
3347; CHECK-ALIGNED-RV64-LABEL: memcmp_size_8:
3348; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
3349; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
3350; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3351; CHECK-ALIGNED-RV64-NEXT:    li a2, 8
3352; CHECK-ALIGNED-RV64-NEXT:    call memcmp
3353; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3354; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
3355; CHECK-ALIGNED-RV64-NEXT:    ret
3356;
3357; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
3358; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
3359; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
3360; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3361; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 8
3362; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
3363; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3364; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
3365; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
3366;
3367; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
3368; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
3369; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
3370; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3371; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 8
3372; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
3373; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3374; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
3375; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
3376;
3377; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
3378; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3379; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
3380; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3381; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 8
3382; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
3383; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3384; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
3385; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
3386;
3387; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
3388; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3389; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
3390; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3391; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 8
3392; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
3393; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3394; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
3395; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
3396;
3397; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_8:
3398; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
3399; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3400; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3401; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 8
3402; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
3403; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3404; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3405; CHECK-ALIGNED-RV32-V-NEXT:    ret
3406;
3407; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_8:
3408; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
3409; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3410; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3411; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 8
3412; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
3413; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3414; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3415; CHECK-ALIGNED-RV64-V-NEXT:    ret
3416;
3417; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_8:
3418; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
3419; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
3420; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3421; CHECK-UNALIGNED-RV32-NEXT:    li a2, 8
3422; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
3423; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3424; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
3425; CHECK-UNALIGNED-RV32-NEXT:    ret
3426;
3427; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_8:
3428; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
3429; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
3430; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3431; CHECK-UNALIGNED-RV64-NEXT:    li a2, 8
3432; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
3433; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3434; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
3435; CHECK-UNALIGNED-RV64-NEXT:    ret
3436;
3437; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
3438; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
3439; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
3440; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
3441; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3442; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3443; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB29_3
3444; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
3445; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 4(a0)
3446; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 4(a1)
3447; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a0
3448; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a1
3449; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB29_3
3450; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.2:
3451; CHECK-UNALIGNED-RV32-ZBB-NEXT:    li a0, 0
3452; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3453; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB29_3: # %res_block
3454; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
3455; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
3456; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
3457; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3458;
3459; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
3460; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
3461; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 0(a0)
3462; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 0(a1)
3463; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
3464; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
3465; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a2, a0, a1
3466; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
3467; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sub a0, a0, a2
3468; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3469;
3470; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
3471; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3472; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
3473; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
3474; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3475; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3476; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB29_3
3477; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
3478; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 4(a0)
3479; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 4(a1)
3480; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a0
3481; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a1
3482; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB29_3
3483; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.2:
3484; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    li a0, 0
3485; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3486; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB29_3: # %res_block
3487; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
3488; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
3489; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
3490; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3491;
3492; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
3493; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3494; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 0(a0)
3495; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 0(a1)
3496; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
3497; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
3498; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a2, a0, a1
3499; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
3500; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sub a0, a0, a2
3501; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3502;
3503; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_8:
3504; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
3505; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3506; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3507; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 8
3508; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
3509; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3510; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3511; CHECK-UNALIGNED-RV32-V-NEXT:    ret
3512;
3513; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_8:
3514; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
3515; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3516; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3517; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 8
3518; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
3519; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3520; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3521; CHECK-UNALIGNED-RV64-V-NEXT:    ret
3522entry:
3523  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 8)
3524  ret i32 %memcmp
3525}
3526
3527define i32 @memcmp_size_15(ptr %s1, ptr %s2) nounwind optsize {
3528; CHECK-ALIGNED-RV32-LABEL: memcmp_size_15:
3529; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
3530; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
3531; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3532; CHECK-ALIGNED-RV32-NEXT:    li a2, 15
3533; CHECK-ALIGNED-RV32-NEXT:    call memcmp
3534; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3535; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
3536; CHECK-ALIGNED-RV32-NEXT:    ret
3537;
3538; CHECK-ALIGNED-RV64-LABEL: memcmp_size_15:
3539; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
3540; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
3541; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3542; CHECK-ALIGNED-RV64-NEXT:    li a2, 15
3543; CHECK-ALIGNED-RV64-NEXT:    call memcmp
3544; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3545; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
3546; CHECK-ALIGNED-RV64-NEXT:    ret
3547;
3548; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
3549; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
3550; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
3551; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3552; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 15
3553; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
3554; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3555; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
3556; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
3557;
3558; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
3559; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
3560; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
3561; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3562; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 15
3563; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
3564; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3565; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
3566; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
3567;
3568; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
3569; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3570; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
3571; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3572; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 15
3573; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
3574; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3575; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
3576; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
3577;
3578; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
3579; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3580; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
3581; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3582; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 15
3583; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
3584; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3585; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
3586; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
3587;
3588; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_15:
3589; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
3590; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3591; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3592; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 15
3593; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
3594; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3595; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3596; CHECK-ALIGNED-RV32-V-NEXT:    ret
3597;
3598; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_15:
3599; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
3600; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3601; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3602; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 15
3603; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
3604; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3605; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3606; CHECK-ALIGNED-RV64-V-NEXT:    ret
3607;
3608; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_15:
3609; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
3610; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
3611; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3612; CHECK-UNALIGNED-RV32-NEXT:    li a2, 15
3613; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
3614; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3615; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
3616; CHECK-UNALIGNED-RV32-NEXT:    ret
3617;
3618; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_15:
3619; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
3620; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
3621; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3622; CHECK-UNALIGNED-RV64-NEXT:    li a2, 15
3623; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
3624; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3625; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
3626; CHECK-UNALIGNED-RV64-NEXT:    ret
3627;
3628; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
3629; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
3630; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
3631; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
3632; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3633; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3634; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB30_5
3635; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
3636; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 4(a0)
3637; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 4(a1)
3638; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3639; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3640; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB30_5
3641; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.2: # %loadbb2
3642; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 8(a0)
3643; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 8(a1)
3644; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3645; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3646; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB30_5
3647; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.3: # %loadbb3
3648; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 11(a0)
3649; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 11(a1)
3650; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a0
3651; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a1
3652; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB30_5
3653; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.4:
3654; CHECK-UNALIGNED-RV32-ZBB-NEXT:    li a0, 0
3655; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3656; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB30_5: # %res_block
3657; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
3658; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
3659; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
3660; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3661;
3662; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
3663; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
3664; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
3665; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
3666; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
3667; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
3668; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB30_3
3669; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.1: # %loadbb1
3670; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 7(a0)
3671; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 7(a1)
3672; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a0
3673; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a1
3674; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB30_3
3675; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.2:
3676; CHECK-UNALIGNED-RV64-ZBB-NEXT:    li a0, 0
3677; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3678; CHECK-UNALIGNED-RV64-ZBB-NEXT:  .LBB30_3: # %res_block
3679; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a2, a3
3680; CHECK-UNALIGNED-RV64-ZBB-NEXT:    neg a0, a0
3681; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ori a0, a0, 1
3682; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3683;
3684; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
3685; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3686; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
3687; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
3688; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3689; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3690; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB30_5
3691; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
3692; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 4(a0)
3693; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 4(a1)
3694; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3695; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3696; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB30_5
3697; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.2: # %loadbb2
3698; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 8(a0)
3699; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 8(a1)
3700; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3701; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3702; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB30_5
3703; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.3: # %loadbb3
3704; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 11(a0)
3705; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 11(a1)
3706; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a0
3707; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a1
3708; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB30_5
3709; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.4:
3710; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    li a0, 0
3711; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3712; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB30_5: # %res_block
3713; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
3714; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
3715; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
3716; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3717;
3718; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
3719; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3720; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
3721; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
3722; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
3723; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
3724; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB30_3
3725; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.1: # %loadbb1
3726; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 7(a0)
3727; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 7(a1)
3728; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a0
3729; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a1
3730; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB30_3
3731; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.2:
3732; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    li a0, 0
3733; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3734; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  .LBB30_3: # %res_block
3735; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a2, a3
3736; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    neg a0, a0
3737; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ori a0, a0, 1
3738; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3739;
3740; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_15:
3741; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
3742; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3743; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3744; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 15
3745; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
3746; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3747; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3748; CHECK-UNALIGNED-RV32-V-NEXT:    ret
3749;
3750; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_15:
3751; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
3752; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3753; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3754; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 15
3755; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
3756; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3757; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3758; CHECK-UNALIGNED-RV64-V-NEXT:    ret
3759entry:
3760  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 15)
3761  ret i32 %memcmp
3762}
3763
3764define i32 @memcmp_size_16(ptr %s1, ptr %s2) nounwind optsize {
3765; CHECK-ALIGNED-RV32-LABEL: memcmp_size_16:
3766; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
3767; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
3768; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3769; CHECK-ALIGNED-RV32-NEXT:    li a2, 16
3770; CHECK-ALIGNED-RV32-NEXT:    call memcmp
3771; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3772; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
3773; CHECK-ALIGNED-RV32-NEXT:    ret
3774;
3775; CHECK-ALIGNED-RV64-LABEL: memcmp_size_16:
3776; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
3777; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
3778; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3779; CHECK-ALIGNED-RV64-NEXT:    li a2, 16
3780; CHECK-ALIGNED-RV64-NEXT:    call memcmp
3781; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3782; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
3783; CHECK-ALIGNED-RV64-NEXT:    ret
3784;
3785; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
3786; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
3787; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
3788; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3789; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 16
3790; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
3791; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3792; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
3793; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
3794;
3795; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
3796; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
3797; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
3798; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3799; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 16
3800; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
3801; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3802; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
3803; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
3804;
3805; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
3806; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3807; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
3808; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3809; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 16
3810; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
3811; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3812; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
3813; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
3814;
3815; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
3816; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3817; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
3818; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3819; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 16
3820; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
3821; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3822; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
3823; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
3824;
3825; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_16:
3826; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
3827; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3828; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3829; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 16
3830; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
3831; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3832; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3833; CHECK-ALIGNED-RV32-V-NEXT:    ret
3834;
3835; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_16:
3836; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
3837; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3838; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3839; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 16
3840; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
3841; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3842; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3843; CHECK-ALIGNED-RV64-V-NEXT:    ret
3844;
3845; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_16:
3846; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
3847; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
3848; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3849; CHECK-UNALIGNED-RV32-NEXT:    li a2, 16
3850; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
3851; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3852; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
3853; CHECK-UNALIGNED-RV32-NEXT:    ret
3854;
3855; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_16:
3856; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
3857; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
3858; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3859; CHECK-UNALIGNED-RV64-NEXT:    li a2, 16
3860; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
3861; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3862; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
3863; CHECK-UNALIGNED-RV64-NEXT:    ret
3864;
3865; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
3866; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
3867; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 0(a0)
3868; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 0(a1)
3869; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3870; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3871; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB31_5
3872; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.1: # %loadbb1
3873; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 4(a0)
3874; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 4(a1)
3875; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3876; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3877; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB31_5
3878; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.2: # %loadbb2
3879; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a2, 8(a0)
3880; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a3, 8(a1)
3881; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a2
3882; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a3
3883; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB31_5
3884; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.3: # %loadbb3
3885; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 12(a0)
3886; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 12(a1)
3887; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a2, a0
3888; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a3, a1
3889; CHECK-UNALIGNED-RV32-ZBB-NEXT:    bne a2, a3, .LBB31_5
3890; CHECK-UNALIGNED-RV32-ZBB-NEXT:  # %bb.4:
3891; CHECK-UNALIGNED-RV32-ZBB-NEXT:    li a0, 0
3892; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3893; CHECK-UNALIGNED-RV32-ZBB-NEXT:  .LBB31_5: # %res_block
3894; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a2, a3
3895; CHECK-UNALIGNED-RV32-ZBB-NEXT:    neg a0, a0
3896; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ori a0, a0, 1
3897; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
3898;
3899; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
3900; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
3901; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
3902; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
3903; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
3904; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
3905; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB31_3
3906; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.1: # %loadbb1
3907; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 8(a0)
3908; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 8(a1)
3909; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a0
3910; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a1
3911; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB31_3
3912; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.2:
3913; CHECK-UNALIGNED-RV64-ZBB-NEXT:    li a0, 0
3914; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3915; CHECK-UNALIGNED-RV64-ZBB-NEXT:  .LBB31_3: # %res_block
3916; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a2, a3
3917; CHECK-UNALIGNED-RV64-ZBB-NEXT:    neg a0, a0
3918; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ori a0, a0, 1
3919; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
3920;
3921; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
3922; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
3923; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 0(a0)
3924; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 0(a1)
3925; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3926; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3927; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB31_5
3928; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.1: # %loadbb1
3929; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 4(a0)
3930; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 4(a1)
3931; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3932; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3933; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB31_5
3934; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.2: # %loadbb2
3935; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a2, 8(a0)
3936; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a3, 8(a1)
3937; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a2
3938; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a3
3939; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB31_5
3940; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.3: # %loadbb3
3941; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 12(a0)
3942; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 12(a1)
3943; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a2, a0
3944; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a3, a1
3945; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    bne a2, a3, .LBB31_5
3946; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  # %bb.4:
3947; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    li a0, 0
3948; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3949; CHECK-UNALIGNED-RV32-ZBKB-NEXT:  .LBB31_5: # %res_block
3950; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a2, a3
3951; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    neg a0, a0
3952; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ori a0, a0, 1
3953; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
3954;
3955; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
3956; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
3957; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
3958; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
3959; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
3960; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
3961; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB31_3
3962; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.1: # %loadbb1
3963; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 8(a0)
3964; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 8(a1)
3965; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a0
3966; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a1
3967; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB31_3
3968; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.2:
3969; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    li a0, 0
3970; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3971; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  .LBB31_3: # %res_block
3972; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a2, a3
3973; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    neg a0, a0
3974; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ori a0, a0, 1
3975; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
3976;
3977; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_16:
3978; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
3979; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
3980; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
3981; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 16
3982; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
3983; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
3984; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
3985; CHECK-UNALIGNED-RV32-V-NEXT:    ret
3986;
3987; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_16:
3988; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
3989; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
3990; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
3991; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 16
3992; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
3993; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
3994; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
3995; CHECK-UNALIGNED-RV64-V-NEXT:    ret
3996entry:
3997  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 16)
3998  ret i32 %memcmp
3999}
4000
4001define i32 @memcmp_size_31(ptr %s1, ptr %s2) nounwind optsize {
4002; CHECK-RV32-LABEL: memcmp_size_31:
4003; CHECK-RV32:       # %bb.0: # %entry
4004; CHECK-RV32-NEXT:    addi sp, sp, -16
4005; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4006; CHECK-RV32-NEXT:    li a2, 31
4007; CHECK-RV32-NEXT:    call memcmp
4008; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4009; CHECK-RV32-NEXT:    addi sp, sp, 16
4010; CHECK-RV32-NEXT:    ret
4011;
4012; CHECK-ALIGNED-RV64-LABEL: memcmp_size_31:
4013; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
4014; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
4015; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4016; CHECK-ALIGNED-RV64-NEXT:    li a2, 31
4017; CHECK-ALIGNED-RV64-NEXT:    call memcmp
4018; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4019; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
4020; CHECK-ALIGNED-RV64-NEXT:    ret
4021;
4022; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
4023; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
4024; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
4025; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4026; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 31
4027; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
4028; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4029; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
4030; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
4031;
4032; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
4033; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4034; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
4035; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4036; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 31
4037; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
4038; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4039; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
4040; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
4041;
4042; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_31:
4043; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
4044; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4045; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4046; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 31
4047; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
4048; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4049; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4050; CHECK-ALIGNED-RV64-V-NEXT:    ret
4051;
4052; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_31:
4053; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
4054; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
4055; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4056; CHECK-UNALIGNED-RV64-NEXT:    li a2, 31
4057; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
4058; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4059; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
4060; CHECK-UNALIGNED-RV64-NEXT:    ret
4061;
4062; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
4063; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
4064; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
4065; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
4066; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4067; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4068; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB32_5
4069; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.1: # %loadbb1
4070; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 8(a0)
4071; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 8(a1)
4072; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4073; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4074; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB32_5
4075; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.2: # %loadbb2
4076; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 16(a0)
4077; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 16(a1)
4078; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4079; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4080; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB32_5
4081; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.3: # %loadbb3
4082; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 23(a0)
4083; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 23(a1)
4084; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a0
4085; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a1
4086; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB32_5
4087; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.4:
4088; CHECK-UNALIGNED-RV64-ZBB-NEXT:    li a0, 0
4089; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4090; CHECK-UNALIGNED-RV64-ZBB-NEXT:  .LBB32_5: # %res_block
4091; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a2, a3
4092; CHECK-UNALIGNED-RV64-ZBB-NEXT:    neg a0, a0
4093; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ori a0, a0, 1
4094; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4095;
4096; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
4097; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4098; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
4099; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
4100; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4101; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4102; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB32_5
4103; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.1: # %loadbb1
4104; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 8(a0)
4105; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 8(a1)
4106; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4107; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4108; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB32_5
4109; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.2: # %loadbb2
4110; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 16(a0)
4111; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 16(a1)
4112; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4113; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4114; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB32_5
4115; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.3: # %loadbb3
4116; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 23(a0)
4117; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 23(a1)
4118; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a0
4119; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a1
4120; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB32_5
4121; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.4:
4122; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    li a0, 0
4123; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4124; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  .LBB32_5: # %res_block
4125; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a2, a3
4126; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    neg a0, a0
4127; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ori a0, a0, 1
4128; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4129;
4130; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_31:
4131; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
4132; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4133; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4134; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 31
4135; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
4136; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4137; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4138; CHECK-UNALIGNED-RV64-V-NEXT:    ret
4139entry:
4140  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 31)
4141  ret i32 %memcmp
4142}
4143
4144define i32 @memcmp_size_32(ptr %s1, ptr %s2) nounwind optsize {
4145; CHECK-RV32-LABEL: memcmp_size_32:
4146; CHECK-RV32:       # %bb.0: # %entry
4147; CHECK-RV32-NEXT:    addi sp, sp, -16
4148; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4149; CHECK-RV32-NEXT:    li a2, 32
4150; CHECK-RV32-NEXT:    call memcmp
4151; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4152; CHECK-RV32-NEXT:    addi sp, sp, 16
4153; CHECK-RV32-NEXT:    ret
4154;
4155; CHECK-ALIGNED-RV64-LABEL: memcmp_size_32:
4156; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
4157; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
4158; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4159; CHECK-ALIGNED-RV64-NEXT:    li a2, 32
4160; CHECK-ALIGNED-RV64-NEXT:    call memcmp
4161; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4162; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
4163; CHECK-ALIGNED-RV64-NEXT:    ret
4164;
4165; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
4166; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
4167; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
4168; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4169; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 32
4170; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
4171; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4172; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
4173; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
4174;
4175; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
4176; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4177; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
4178; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4179; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 32
4180; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
4181; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4182; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
4183; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
4184;
4185; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_32:
4186; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
4187; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4188; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4189; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 32
4190; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
4191; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4192; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4193; CHECK-ALIGNED-RV64-V-NEXT:    ret
4194;
4195; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_32:
4196; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
4197; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
4198; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4199; CHECK-UNALIGNED-RV64-NEXT:    li a2, 32
4200; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
4201; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4202; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
4203; CHECK-UNALIGNED-RV64-NEXT:    ret
4204;
4205; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
4206; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
4207; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 0(a0)
4208; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 0(a1)
4209; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4210; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4211; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB33_5
4212; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.1: # %loadbb1
4213; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 8(a0)
4214; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 8(a1)
4215; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4216; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4217; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB33_5
4218; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.2: # %loadbb2
4219; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a2, 16(a0)
4220; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a3, 16(a1)
4221; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a2
4222; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a3
4223; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB33_5
4224; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.3: # %loadbb3
4225; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a0, 24(a0)
4226; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ld a1, 24(a1)
4227; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a2, a0
4228; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a3, a1
4229; CHECK-UNALIGNED-RV64-ZBB-NEXT:    bne a2, a3, .LBB33_5
4230; CHECK-UNALIGNED-RV64-ZBB-NEXT:  # %bb.4:
4231; CHECK-UNALIGNED-RV64-ZBB-NEXT:    li a0, 0
4232; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4233; CHECK-UNALIGNED-RV64-ZBB-NEXT:  .LBB33_5: # %res_block
4234; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a2, a3
4235; CHECK-UNALIGNED-RV64-ZBB-NEXT:    neg a0, a0
4236; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ori a0, a0, 1
4237; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4238;
4239; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
4240; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4241; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 0(a0)
4242; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 0(a1)
4243; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4244; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4245; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB33_5
4246; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.1: # %loadbb1
4247; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 8(a0)
4248; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 8(a1)
4249; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4250; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4251; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB33_5
4252; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.2: # %loadbb2
4253; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a2, 16(a0)
4254; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a3, 16(a1)
4255; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a2
4256; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a3
4257; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB33_5
4258; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.3: # %loadbb3
4259; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a0, 24(a0)
4260; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ld a1, 24(a1)
4261; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a2, a0
4262; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a3, a1
4263; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    bne a2, a3, .LBB33_5
4264; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  # %bb.4:
4265; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    li a0, 0
4266; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4267; CHECK-UNALIGNED-RV64-ZBKB-NEXT:  .LBB33_5: # %res_block
4268; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a2, a3
4269; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    neg a0, a0
4270; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ori a0, a0, 1
4271; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4272;
4273; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_32:
4274; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
4275; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4276; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4277; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 32
4278; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
4279; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4280; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4281; CHECK-UNALIGNED-RV64-V-NEXT:    ret
4282entry:
4283  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 32)
4284  ret i32 %memcmp
4285}
4286
4287define i32 @memcmp_size_63(ptr %s1, ptr %s2) nounwind optsize {
4288; CHECK-RV32-LABEL: memcmp_size_63:
4289; CHECK-RV32:       # %bb.0: # %entry
4290; CHECK-RV32-NEXT:    addi sp, sp, -16
4291; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4292; CHECK-RV32-NEXT:    li a2, 63
4293; CHECK-RV32-NEXT:    call memcmp
4294; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4295; CHECK-RV32-NEXT:    addi sp, sp, 16
4296; CHECK-RV32-NEXT:    ret
4297;
4298; CHECK-RV64-LABEL: memcmp_size_63:
4299; CHECK-RV64:       # %bb.0: # %entry
4300; CHECK-RV64-NEXT:    addi sp, sp, -16
4301; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4302; CHECK-RV64-NEXT:    li a2, 63
4303; CHECK-RV64-NEXT:    call memcmp
4304; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4305; CHECK-RV64-NEXT:    addi sp, sp, 16
4306; CHECK-RV64-NEXT:    ret
4307entry:
4308  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 63)
4309  ret i32 %memcmp
4310}
4311
4312define i32 @memcmp_size_64(ptr %s1, ptr %s2) nounwind optsize {
4313; CHECK-RV32-LABEL: memcmp_size_64:
4314; CHECK-RV32:       # %bb.0: # %entry
4315; CHECK-RV32-NEXT:    addi sp, sp, -16
4316; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4317; CHECK-RV32-NEXT:    li a2, 64
4318; CHECK-RV32-NEXT:    call memcmp
4319; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4320; CHECK-RV32-NEXT:    addi sp, sp, 16
4321; CHECK-RV32-NEXT:    ret
4322;
4323; CHECK-RV64-LABEL: memcmp_size_64:
4324; CHECK-RV64:       # %bb.0: # %entry
4325; CHECK-RV64-NEXT:    addi sp, sp, -16
4326; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4327; CHECK-RV64-NEXT:    li a2, 64
4328; CHECK-RV64-NEXT:    call memcmp
4329; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4330; CHECK-RV64-NEXT:    addi sp, sp, 16
4331; CHECK-RV64-NEXT:    ret
4332entry:
4333  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 64)
4334  ret i32 %memcmp
4335}
4336
4337define i32 @memcmp_size_127(ptr %s1, ptr %s2) nounwind optsize {
4338; CHECK-RV32-LABEL: memcmp_size_127:
4339; CHECK-RV32:       # %bb.0: # %entry
4340; CHECK-RV32-NEXT:    addi sp, sp, -16
4341; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4342; CHECK-RV32-NEXT:    li a2, 127
4343; CHECK-RV32-NEXT:    call memcmp
4344; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4345; CHECK-RV32-NEXT:    addi sp, sp, 16
4346; CHECK-RV32-NEXT:    ret
4347;
4348; CHECK-RV64-LABEL: memcmp_size_127:
4349; CHECK-RV64:       # %bb.0: # %entry
4350; CHECK-RV64-NEXT:    addi sp, sp, -16
4351; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4352; CHECK-RV64-NEXT:    li a2, 127
4353; CHECK-RV64-NEXT:    call memcmp
4354; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4355; CHECK-RV64-NEXT:    addi sp, sp, 16
4356; CHECK-RV64-NEXT:    ret
4357entry:
4358  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 127)
4359  ret i32 %memcmp
4360}
4361
4362define i32 @memcmp_size_128(ptr %s1, ptr %s2) nounwind optsize {
4363; CHECK-RV32-LABEL: memcmp_size_128:
4364; CHECK-RV32:       # %bb.0: # %entry
4365; CHECK-RV32-NEXT:    addi sp, sp, -16
4366; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4367; CHECK-RV32-NEXT:    li a2, 128
4368; CHECK-RV32-NEXT:    call memcmp
4369; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4370; CHECK-RV32-NEXT:    addi sp, sp, 16
4371; CHECK-RV32-NEXT:    ret
4372;
4373; CHECK-RV64-LABEL: memcmp_size_128:
4374; CHECK-RV64:       # %bb.0: # %entry
4375; CHECK-RV64-NEXT:    addi sp, sp, -16
4376; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4377; CHECK-RV64-NEXT:    li a2, 128
4378; CHECK-RV64-NEXT:    call memcmp
4379; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4380; CHECK-RV64-NEXT:    addi sp, sp, 16
4381; CHECK-RV64-NEXT:    ret
4382entry:
4383  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 128)
4384  ret i32 %memcmp
4385}
4386
4387define i32 @memcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind optsize {
4388; CHECK-RV32-LABEL: memcmp_size_runtime:
4389; CHECK-RV32:       # %bb.0: # %entry
4390; CHECK-RV32-NEXT:    addi sp, sp, -16
4391; CHECK-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4392; CHECK-RV32-NEXT:    call memcmp
4393; CHECK-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4394; CHECK-RV32-NEXT:    addi sp, sp, 16
4395; CHECK-RV32-NEXT:    ret
4396;
4397; CHECK-RV64-LABEL: memcmp_size_runtime:
4398; CHECK-RV64:       # %bb.0: # %entry
4399; CHECK-RV64-NEXT:    addi sp, sp, -16
4400; CHECK-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4401; CHECK-RV64-NEXT:    call memcmp
4402; CHECK-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4403; CHECK-RV64-NEXT:    addi sp, sp, 16
4404; CHECK-RV64-NEXT:    ret
4405entry:
4406  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen %len)
4407  ret i32 %memcmp
4408}
4409
4410define i1 @memcmp_eq_zero(ptr %s1, ptr %s2) nounwind optsize {
4411; CHECK-ALIGNED-RV32-LABEL: memcmp_eq_zero:
4412; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
4413; CHECK-ALIGNED-RV32-NEXT:    lbu a2, 1(a1)
4414; CHECK-ALIGNED-RV32-NEXT:    lbu a3, 2(a1)
4415; CHECK-ALIGNED-RV32-NEXT:    lbu a4, 3(a1)
4416; CHECK-ALIGNED-RV32-NEXT:    lbu a1, 0(a1)
4417; CHECK-ALIGNED-RV32-NEXT:    slli a2, a2, 8
4418; CHECK-ALIGNED-RV32-NEXT:    slli a3, a3, 16
4419; CHECK-ALIGNED-RV32-NEXT:    slli a4, a4, 24
4420; CHECK-ALIGNED-RV32-NEXT:    or a1, a2, a1
4421; CHECK-ALIGNED-RV32-NEXT:    lbu a2, 0(a0)
4422; CHECK-ALIGNED-RV32-NEXT:    lbu a5, 1(a0)
4423; CHECK-ALIGNED-RV32-NEXT:    or a3, a4, a3
4424; CHECK-ALIGNED-RV32-NEXT:    lbu a4, 2(a0)
4425; CHECK-ALIGNED-RV32-NEXT:    lbu a0, 3(a0)
4426; CHECK-ALIGNED-RV32-NEXT:    slli a5, a5, 8
4427; CHECK-ALIGNED-RV32-NEXT:    or a2, a5, a2
4428; CHECK-ALIGNED-RV32-NEXT:    slli a4, a4, 16
4429; CHECK-ALIGNED-RV32-NEXT:    slli a0, a0, 24
4430; CHECK-ALIGNED-RV32-NEXT:    or a0, a0, a4
4431; CHECK-ALIGNED-RV32-NEXT:    or a1, a3, a1
4432; CHECK-ALIGNED-RV32-NEXT:    or a0, a0, a2
4433; CHECK-ALIGNED-RV32-NEXT:    xor a0, a0, a1
4434; CHECK-ALIGNED-RV32-NEXT:    seqz a0, a0
4435; CHECK-ALIGNED-RV32-NEXT:    ret
4436;
4437; CHECK-ALIGNED-RV64-LABEL: memcmp_eq_zero:
4438; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
4439; CHECK-ALIGNED-RV64-NEXT:    lbu a2, 1(a1)
4440; CHECK-ALIGNED-RV64-NEXT:    lbu a3, 2(a1)
4441; CHECK-ALIGNED-RV64-NEXT:    lb a4, 3(a1)
4442; CHECK-ALIGNED-RV64-NEXT:    lbu a1, 0(a1)
4443; CHECK-ALIGNED-RV64-NEXT:    slli a2, a2, 8
4444; CHECK-ALIGNED-RV64-NEXT:    slli a3, a3, 16
4445; CHECK-ALIGNED-RV64-NEXT:    slli a4, a4, 24
4446; CHECK-ALIGNED-RV64-NEXT:    or a1, a2, a1
4447; CHECK-ALIGNED-RV64-NEXT:    lbu a2, 0(a0)
4448; CHECK-ALIGNED-RV64-NEXT:    lbu a5, 1(a0)
4449; CHECK-ALIGNED-RV64-NEXT:    or a3, a4, a3
4450; CHECK-ALIGNED-RV64-NEXT:    lbu a4, 2(a0)
4451; CHECK-ALIGNED-RV64-NEXT:    lb a0, 3(a0)
4452; CHECK-ALIGNED-RV64-NEXT:    slli a5, a5, 8
4453; CHECK-ALIGNED-RV64-NEXT:    or a2, a5, a2
4454; CHECK-ALIGNED-RV64-NEXT:    slli a4, a4, 16
4455; CHECK-ALIGNED-RV64-NEXT:    slli a0, a0, 24
4456; CHECK-ALIGNED-RV64-NEXT:    or a0, a0, a4
4457; CHECK-ALIGNED-RV64-NEXT:    or a1, a3, a1
4458; CHECK-ALIGNED-RV64-NEXT:    or a0, a0, a2
4459; CHECK-ALIGNED-RV64-NEXT:    xor a0, a0, a1
4460; CHECK-ALIGNED-RV64-NEXT:    seqz a0, a0
4461; CHECK-ALIGNED-RV64-NEXT:    ret
4462;
4463; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_eq_zero:
4464; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
4465; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a2, 1(a1)
4466; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a3, 2(a1)
4467; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a4, 3(a1)
4468; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a1, 0(a1)
4469; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a2, a2, 8
4470; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a3, a3, 16
4471; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a4, a4, 24
4472; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a1, a2, a1
4473; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a2, 0(a0)
4474; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a5, 1(a0)
4475; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a3, a4, a3
4476; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a4, 2(a0)
4477; CHECK-ALIGNED-RV32-ZBB-NEXT:    lbu a0, 3(a0)
4478; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a5, a5, 8
4479; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a2, a5, a2
4480; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a4, a4, 16
4481; CHECK-ALIGNED-RV32-ZBB-NEXT:    slli a0, a0, 24
4482; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a0, a0, a4
4483; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a1, a3, a1
4484; CHECK-ALIGNED-RV32-ZBB-NEXT:    or a0, a0, a2
4485; CHECK-ALIGNED-RV32-ZBB-NEXT:    xor a0, a0, a1
4486; CHECK-ALIGNED-RV32-ZBB-NEXT:    seqz a0, a0
4487; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
4488;
4489; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_eq_zero:
4490; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
4491; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a2, 1(a1)
4492; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a3, 2(a1)
4493; CHECK-ALIGNED-RV64-ZBB-NEXT:    lb a4, 3(a1)
4494; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a1, 0(a1)
4495; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a2, a2, 8
4496; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a3, a3, 16
4497; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a4, a4, 24
4498; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a1, a2, a1
4499; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a2, 0(a0)
4500; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a5, 1(a0)
4501; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a3, a4, a3
4502; CHECK-ALIGNED-RV64-ZBB-NEXT:    lbu a4, 2(a0)
4503; CHECK-ALIGNED-RV64-ZBB-NEXT:    lb a0, 3(a0)
4504; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a5, a5, 8
4505; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a2, a5, a2
4506; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a4, a4, 16
4507; CHECK-ALIGNED-RV64-ZBB-NEXT:    slli a0, a0, 24
4508; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a0, a0, a4
4509; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a1, a3, a1
4510; CHECK-ALIGNED-RV64-ZBB-NEXT:    or a0, a0, a2
4511; CHECK-ALIGNED-RV64-ZBB-NEXT:    xor a0, a0, a1
4512; CHECK-ALIGNED-RV64-ZBB-NEXT:    seqz a0, a0
4513; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
4514;
4515; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_eq_zero:
4516; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
4517; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a2, 0(a1)
4518; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a3, 1(a1)
4519; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a4, 2(a1)
4520; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a1, 3(a1)
4521; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a5, 1(a0)
4522; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a6, 2(a0)
4523; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a7, 3(a0)
4524; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lbu a0, 0(a0)
4525; CHECK-ALIGNED-RV32-ZBKB-NEXT:    packh a1, a4, a1
4526; CHECK-ALIGNED-RV32-ZBKB-NEXT:    packh a2, a2, a3
4527; CHECK-ALIGNED-RV32-ZBKB-NEXT:    packh a3, a6, a7
4528; CHECK-ALIGNED-RV32-ZBKB-NEXT:    packh a0, a0, a5
4529; CHECK-ALIGNED-RV32-ZBKB-NEXT:    pack a1, a2, a1
4530; CHECK-ALIGNED-RV32-ZBKB-NEXT:    pack a0, a0, a3
4531; CHECK-ALIGNED-RV32-ZBKB-NEXT:    xor a0, a0, a1
4532; CHECK-ALIGNED-RV32-ZBKB-NEXT:    seqz a0, a0
4533; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
4534;
4535; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_eq_zero:
4536; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4537; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a2, 0(a1)
4538; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a3, 1(a1)
4539; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a4, 2(a1)
4540; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lb a1, 3(a1)
4541; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a5, 0(a0)
4542; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a6, 1(a0)
4543; CHECK-ALIGNED-RV64-ZBKB-NEXT:    packh a2, a2, a3
4544; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lbu a3, 2(a0)
4545; CHECK-ALIGNED-RV64-ZBKB-NEXT:    lb a0, 3(a0)
4546; CHECK-ALIGNED-RV64-ZBKB-NEXT:    packh a5, a5, a6
4547; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slli a4, a4, 16
4548; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slli a1, a1, 24
4549; CHECK-ALIGNED-RV64-ZBKB-NEXT:    or a1, a1, a4
4550; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slli a3, a3, 16
4551; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slli a0, a0, 24
4552; CHECK-ALIGNED-RV64-ZBKB-NEXT:    or a0, a0, a3
4553; CHECK-ALIGNED-RV64-ZBKB-NEXT:    or a1, a1, a2
4554; CHECK-ALIGNED-RV64-ZBKB-NEXT:    or a0, a0, a5
4555; CHECK-ALIGNED-RV64-ZBKB-NEXT:    xor a0, a0, a1
4556; CHECK-ALIGNED-RV64-ZBKB-NEXT:    seqz a0, a0
4557; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
4558;
4559; CHECK-ALIGNED-RV32-V-LABEL: memcmp_eq_zero:
4560; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
4561; CHECK-ALIGNED-RV32-V-NEXT:    lbu a2, 1(a1)
4562; CHECK-ALIGNED-RV32-V-NEXT:    lbu a3, 2(a1)
4563; CHECK-ALIGNED-RV32-V-NEXT:    lbu a4, 3(a1)
4564; CHECK-ALIGNED-RV32-V-NEXT:    lbu a1, 0(a1)
4565; CHECK-ALIGNED-RV32-V-NEXT:    slli a2, a2, 8
4566; CHECK-ALIGNED-RV32-V-NEXT:    slli a3, a3, 16
4567; CHECK-ALIGNED-RV32-V-NEXT:    slli a4, a4, 24
4568; CHECK-ALIGNED-RV32-V-NEXT:    or a1, a2, a1
4569; CHECK-ALIGNED-RV32-V-NEXT:    lbu a2, 0(a0)
4570; CHECK-ALIGNED-RV32-V-NEXT:    lbu a5, 1(a0)
4571; CHECK-ALIGNED-RV32-V-NEXT:    or a3, a4, a3
4572; CHECK-ALIGNED-RV32-V-NEXT:    lbu a4, 2(a0)
4573; CHECK-ALIGNED-RV32-V-NEXT:    lbu a0, 3(a0)
4574; CHECK-ALIGNED-RV32-V-NEXT:    slli a5, a5, 8
4575; CHECK-ALIGNED-RV32-V-NEXT:    or a2, a5, a2
4576; CHECK-ALIGNED-RV32-V-NEXT:    slli a4, a4, 16
4577; CHECK-ALIGNED-RV32-V-NEXT:    slli a0, a0, 24
4578; CHECK-ALIGNED-RV32-V-NEXT:    or a0, a0, a4
4579; CHECK-ALIGNED-RV32-V-NEXT:    or a1, a3, a1
4580; CHECK-ALIGNED-RV32-V-NEXT:    or a0, a0, a2
4581; CHECK-ALIGNED-RV32-V-NEXT:    xor a0, a0, a1
4582; CHECK-ALIGNED-RV32-V-NEXT:    seqz a0, a0
4583; CHECK-ALIGNED-RV32-V-NEXT:    ret
4584;
4585; CHECK-ALIGNED-RV64-V-LABEL: memcmp_eq_zero:
4586; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
4587; CHECK-ALIGNED-RV64-V-NEXT:    lbu a2, 1(a1)
4588; CHECK-ALIGNED-RV64-V-NEXT:    lbu a3, 2(a1)
4589; CHECK-ALIGNED-RV64-V-NEXT:    lb a4, 3(a1)
4590; CHECK-ALIGNED-RV64-V-NEXT:    lbu a1, 0(a1)
4591; CHECK-ALIGNED-RV64-V-NEXT:    slli a2, a2, 8
4592; CHECK-ALIGNED-RV64-V-NEXT:    slli a3, a3, 16
4593; CHECK-ALIGNED-RV64-V-NEXT:    slli a4, a4, 24
4594; CHECK-ALIGNED-RV64-V-NEXT:    or a1, a2, a1
4595; CHECK-ALIGNED-RV64-V-NEXT:    lbu a2, 0(a0)
4596; CHECK-ALIGNED-RV64-V-NEXT:    lbu a5, 1(a0)
4597; CHECK-ALIGNED-RV64-V-NEXT:    or a3, a4, a3
4598; CHECK-ALIGNED-RV64-V-NEXT:    lbu a4, 2(a0)
4599; CHECK-ALIGNED-RV64-V-NEXT:    lb a0, 3(a0)
4600; CHECK-ALIGNED-RV64-V-NEXT:    slli a5, a5, 8
4601; CHECK-ALIGNED-RV64-V-NEXT:    or a2, a5, a2
4602; CHECK-ALIGNED-RV64-V-NEXT:    slli a4, a4, 16
4603; CHECK-ALIGNED-RV64-V-NEXT:    slli a0, a0, 24
4604; CHECK-ALIGNED-RV64-V-NEXT:    or a0, a0, a4
4605; CHECK-ALIGNED-RV64-V-NEXT:    or a1, a3, a1
4606; CHECK-ALIGNED-RV64-V-NEXT:    or a0, a0, a2
4607; CHECK-ALIGNED-RV64-V-NEXT:    xor a0, a0, a1
4608; CHECK-ALIGNED-RV64-V-NEXT:    seqz a0, a0
4609; CHECK-ALIGNED-RV64-V-NEXT:    ret
4610;
4611; CHECK-UNALIGNED-LABEL: memcmp_eq_zero:
4612; CHECK-UNALIGNED:       # %bb.0: # %entry
4613; CHECK-UNALIGNED-NEXT:    lw a0, 0(a0)
4614; CHECK-UNALIGNED-NEXT:    lw a1, 0(a1)
4615; CHECK-UNALIGNED-NEXT:    xor a0, a0, a1
4616; CHECK-UNALIGNED-NEXT:    seqz a0, a0
4617; CHECK-UNALIGNED-NEXT:    ret
4618entry:
4619  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
4620  %ret = icmp eq i32 %memcmp, 0
4621  ret i1 %ret
4622}
4623
4624define i1 @memcmp_lt_zero(ptr %s1, ptr %s2) nounwind optsize {
4625; CHECK-ALIGNED-RV32-LABEL: memcmp_lt_zero:
4626; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
4627; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
4628; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4629; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
4630; CHECK-ALIGNED-RV32-NEXT:    call memcmp
4631; CHECK-ALIGNED-RV32-NEXT:    srli a0, a0, 31
4632; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4633; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
4634; CHECK-ALIGNED-RV32-NEXT:    ret
4635;
4636; CHECK-ALIGNED-RV64-LABEL: memcmp_lt_zero:
4637; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
4638; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
4639; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4640; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
4641; CHECK-ALIGNED-RV64-NEXT:    call memcmp
4642; CHECK-ALIGNED-RV64-NEXT:    slti a0, a0, 0
4643; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4644; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
4645; CHECK-ALIGNED-RV64-NEXT:    ret
4646;
4647; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
4648; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
4649; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
4650; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4651; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
4652; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
4653; CHECK-ALIGNED-RV32-ZBB-NEXT:    srli a0, a0, 31
4654; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4655; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
4656; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
4657;
4658; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
4659; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
4660; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
4661; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4662; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
4663; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
4664; CHECK-ALIGNED-RV64-ZBB-NEXT:    slti a0, a0, 0
4665; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4666; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
4667; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
4668;
4669; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
4670; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
4671; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
4672; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4673; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
4674; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
4675; CHECK-ALIGNED-RV32-ZBKB-NEXT:    srli a0, a0, 31
4676; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4677; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
4678; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
4679;
4680; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
4681; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4682; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
4683; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4684; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
4685; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
4686; CHECK-ALIGNED-RV64-ZBKB-NEXT:    slti a0, a0, 0
4687; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4688; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
4689; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
4690;
4691; CHECK-ALIGNED-RV32-V-LABEL: memcmp_lt_zero:
4692; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
4693; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
4694; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4695; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
4696; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
4697; CHECK-ALIGNED-RV32-V-NEXT:    srli a0, a0, 31
4698; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4699; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
4700; CHECK-ALIGNED-RV32-V-NEXT:    ret
4701;
4702; CHECK-ALIGNED-RV64-V-LABEL: memcmp_lt_zero:
4703; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
4704; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4705; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4706; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
4707; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
4708; CHECK-ALIGNED-RV64-V-NEXT:    slti a0, a0, 0
4709; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4710; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4711; CHECK-ALIGNED-RV64-V-NEXT:    ret
4712;
4713; CHECK-UNALIGNED-RV32-LABEL: memcmp_lt_zero:
4714; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
4715; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
4716; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4717; CHECK-UNALIGNED-RV32-NEXT:    li a2, 4
4718; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
4719; CHECK-UNALIGNED-RV32-NEXT:    srli a0, a0, 31
4720; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4721; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
4722; CHECK-UNALIGNED-RV32-NEXT:    ret
4723;
4724; CHECK-UNALIGNED-RV64-LABEL: memcmp_lt_zero:
4725; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
4726; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
4727; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4728; CHECK-UNALIGNED-RV64-NEXT:    li a2, 4
4729; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
4730; CHECK-UNALIGNED-RV64-NEXT:    slti a0, a0, 0
4731; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4732; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
4733; CHECK-UNALIGNED-RV64-NEXT:    ret
4734;
4735; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
4736; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
4737; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 0(a0)
4738; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 0(a1)
4739; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a0, a0
4740; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a1, a1
4741; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a0, a1
4742; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
4743;
4744; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
4745; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
4746; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a0, 0(a0)
4747; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a1, 0(a1)
4748; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
4749; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
4750; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a0, a0, 32
4751; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a1, a1, 32
4752; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a0, a1
4753; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4754;
4755; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
4756; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
4757; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 0(a0)
4758; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 0(a1)
4759; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a0, a0
4760; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a1, a1
4761; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a0, a1
4762; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
4763;
4764; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
4765; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4766; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a0, 0(a0)
4767; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a1, 0(a1)
4768; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
4769; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
4770; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a0, a0, 32
4771; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a1, a1, 32
4772; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a0, a1
4773; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4774;
4775; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_lt_zero:
4776; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
4777; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
4778; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4779; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 4
4780; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
4781; CHECK-UNALIGNED-RV32-V-NEXT:    srli a0, a0, 31
4782; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4783; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
4784; CHECK-UNALIGNED-RV32-V-NEXT:    ret
4785;
4786; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_lt_zero:
4787; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
4788; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4789; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4790; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 4
4791; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
4792; CHECK-UNALIGNED-RV64-V-NEXT:    slti a0, a0, 0
4793; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4794; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4795; CHECK-UNALIGNED-RV64-V-NEXT:    ret
4796entry:
4797  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
4798  %ret = icmp slt i32 %memcmp, 0
4799  ret i1 %ret
4800}
4801
4802define i1 @memcmp_gt_zero(ptr %s1, ptr %s2) nounwind optsize {
4803; CHECK-ALIGNED-RV32-LABEL: memcmp_gt_zero:
4804; CHECK-ALIGNED-RV32:       # %bb.0: # %entry
4805; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, -16
4806; CHECK-ALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4807; CHECK-ALIGNED-RV32-NEXT:    li a2, 4
4808; CHECK-ALIGNED-RV32-NEXT:    call memcmp
4809; CHECK-ALIGNED-RV32-NEXT:    sgtz a0, a0
4810; CHECK-ALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4811; CHECK-ALIGNED-RV32-NEXT:    addi sp, sp, 16
4812; CHECK-ALIGNED-RV32-NEXT:    ret
4813;
4814; CHECK-ALIGNED-RV64-LABEL: memcmp_gt_zero:
4815; CHECK-ALIGNED-RV64:       # %bb.0: # %entry
4816; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, -16
4817; CHECK-ALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4818; CHECK-ALIGNED-RV64-NEXT:    li a2, 4
4819; CHECK-ALIGNED-RV64-NEXT:    call memcmp
4820; CHECK-ALIGNED-RV64-NEXT:    sgtz a0, a0
4821; CHECK-ALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4822; CHECK-ALIGNED-RV64-NEXT:    addi sp, sp, 16
4823; CHECK-ALIGNED-RV64-NEXT:    ret
4824;
4825; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
4826; CHECK-ALIGNED-RV32-ZBB:       # %bb.0: # %entry
4827; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, -16
4828; CHECK-ALIGNED-RV32-ZBB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4829; CHECK-ALIGNED-RV32-ZBB-NEXT:    li a2, 4
4830; CHECK-ALIGNED-RV32-ZBB-NEXT:    call memcmp
4831; CHECK-ALIGNED-RV32-ZBB-NEXT:    sgtz a0, a0
4832; CHECK-ALIGNED-RV32-ZBB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4833; CHECK-ALIGNED-RV32-ZBB-NEXT:    addi sp, sp, 16
4834; CHECK-ALIGNED-RV32-ZBB-NEXT:    ret
4835;
4836; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
4837; CHECK-ALIGNED-RV64-ZBB:       # %bb.0: # %entry
4838; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, -16
4839; CHECK-ALIGNED-RV64-ZBB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4840; CHECK-ALIGNED-RV64-ZBB-NEXT:    li a2, 4
4841; CHECK-ALIGNED-RV64-ZBB-NEXT:    call memcmp
4842; CHECK-ALIGNED-RV64-ZBB-NEXT:    sgtz a0, a0
4843; CHECK-ALIGNED-RV64-ZBB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4844; CHECK-ALIGNED-RV64-ZBB-NEXT:    addi sp, sp, 16
4845; CHECK-ALIGNED-RV64-ZBB-NEXT:    ret
4846;
4847; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
4848; CHECK-ALIGNED-RV32-ZBKB:       # %bb.0: # %entry
4849; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, -16
4850; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4851; CHECK-ALIGNED-RV32-ZBKB-NEXT:    li a2, 4
4852; CHECK-ALIGNED-RV32-ZBKB-NEXT:    call memcmp
4853; CHECK-ALIGNED-RV32-ZBKB-NEXT:    sgtz a0, a0
4854; CHECK-ALIGNED-RV32-ZBKB-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4855; CHECK-ALIGNED-RV32-ZBKB-NEXT:    addi sp, sp, 16
4856; CHECK-ALIGNED-RV32-ZBKB-NEXT:    ret
4857;
4858; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
4859; CHECK-ALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4860; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, -16
4861; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4862; CHECK-ALIGNED-RV64-ZBKB-NEXT:    li a2, 4
4863; CHECK-ALIGNED-RV64-ZBKB-NEXT:    call memcmp
4864; CHECK-ALIGNED-RV64-ZBKB-NEXT:    sgtz a0, a0
4865; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4866; CHECK-ALIGNED-RV64-ZBKB-NEXT:    addi sp, sp, 16
4867; CHECK-ALIGNED-RV64-ZBKB-NEXT:    ret
4868;
4869; CHECK-ALIGNED-RV32-V-LABEL: memcmp_gt_zero:
4870; CHECK-ALIGNED-RV32-V:       # %bb.0: # %entry
4871; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, -16
4872; CHECK-ALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4873; CHECK-ALIGNED-RV32-V-NEXT:    li a2, 4
4874; CHECK-ALIGNED-RV32-V-NEXT:    call memcmp
4875; CHECK-ALIGNED-RV32-V-NEXT:    sgtz a0, a0
4876; CHECK-ALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4877; CHECK-ALIGNED-RV32-V-NEXT:    addi sp, sp, 16
4878; CHECK-ALIGNED-RV32-V-NEXT:    ret
4879;
4880; CHECK-ALIGNED-RV64-V-LABEL: memcmp_gt_zero:
4881; CHECK-ALIGNED-RV64-V:       # %bb.0: # %entry
4882; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4883; CHECK-ALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4884; CHECK-ALIGNED-RV64-V-NEXT:    li a2, 4
4885; CHECK-ALIGNED-RV64-V-NEXT:    call memcmp
4886; CHECK-ALIGNED-RV64-V-NEXT:    sgtz a0, a0
4887; CHECK-ALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4888; CHECK-ALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4889; CHECK-ALIGNED-RV64-V-NEXT:    ret
4890;
4891; CHECK-UNALIGNED-RV32-LABEL: memcmp_gt_zero:
4892; CHECK-UNALIGNED-RV32:       # %bb.0: # %entry
4893; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, -16
4894; CHECK-UNALIGNED-RV32-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4895; CHECK-UNALIGNED-RV32-NEXT:    li a2, 4
4896; CHECK-UNALIGNED-RV32-NEXT:    call memcmp
4897; CHECK-UNALIGNED-RV32-NEXT:    sgtz a0, a0
4898; CHECK-UNALIGNED-RV32-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4899; CHECK-UNALIGNED-RV32-NEXT:    addi sp, sp, 16
4900; CHECK-UNALIGNED-RV32-NEXT:    ret
4901;
4902; CHECK-UNALIGNED-RV64-LABEL: memcmp_gt_zero:
4903; CHECK-UNALIGNED-RV64:       # %bb.0: # %entry
4904; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, -16
4905; CHECK-UNALIGNED-RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4906; CHECK-UNALIGNED-RV64-NEXT:    li a2, 4
4907; CHECK-UNALIGNED-RV64-NEXT:    call memcmp
4908; CHECK-UNALIGNED-RV64-NEXT:    sgtz a0, a0
4909; CHECK-UNALIGNED-RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4910; CHECK-UNALIGNED-RV64-NEXT:    addi sp, sp, 16
4911; CHECK-UNALIGNED-RV64-NEXT:    ret
4912;
4913; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
4914; CHECK-UNALIGNED-RV32-ZBB:       # %bb.0: # %entry
4915; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a0, 0(a0)
4916; CHECK-UNALIGNED-RV32-ZBB-NEXT:    lw a1, 0(a1)
4917; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a0, a0
4918; CHECK-UNALIGNED-RV32-ZBB-NEXT:    rev8 a1, a1
4919; CHECK-UNALIGNED-RV32-ZBB-NEXT:    sltu a0, a1, a0
4920; CHECK-UNALIGNED-RV32-ZBB-NEXT:    ret
4921;
4922; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
4923; CHECK-UNALIGNED-RV64-ZBB:       # %bb.0: # %entry
4924; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a0, 0(a0)
4925; CHECK-UNALIGNED-RV64-ZBB-NEXT:    lw a1, 0(a1)
4926; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a0, a0
4927; CHECK-UNALIGNED-RV64-ZBB-NEXT:    rev8 a1, a1
4928; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a0, a0, 32
4929; CHECK-UNALIGNED-RV64-ZBB-NEXT:    srli a1, a1, 32
4930; CHECK-UNALIGNED-RV64-ZBB-NEXT:    sltu a0, a1, a0
4931; CHECK-UNALIGNED-RV64-ZBB-NEXT:    ret
4932;
4933; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
4934; CHECK-UNALIGNED-RV32-ZBKB:       # %bb.0: # %entry
4935; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a0, 0(a0)
4936; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    lw a1, 0(a1)
4937; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a0, a0
4938; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    rev8 a1, a1
4939; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    sltu a0, a1, a0
4940; CHECK-UNALIGNED-RV32-ZBKB-NEXT:    ret
4941;
4942; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
4943; CHECK-UNALIGNED-RV64-ZBKB:       # %bb.0: # %entry
4944; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a0, 0(a0)
4945; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    lw a1, 0(a1)
4946; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a0, a0
4947; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    rev8 a1, a1
4948; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a0, a0, 32
4949; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    srli a1, a1, 32
4950; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    sltu a0, a1, a0
4951; CHECK-UNALIGNED-RV64-ZBKB-NEXT:    ret
4952;
4953; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_gt_zero:
4954; CHECK-UNALIGNED-RV32-V:       # %bb.0: # %entry
4955; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, -16
4956; CHECK-UNALIGNED-RV32-V-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
4957; CHECK-UNALIGNED-RV32-V-NEXT:    li a2, 4
4958; CHECK-UNALIGNED-RV32-V-NEXT:    call memcmp
4959; CHECK-UNALIGNED-RV32-V-NEXT:    sgtz a0, a0
4960; CHECK-UNALIGNED-RV32-V-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
4961; CHECK-UNALIGNED-RV32-V-NEXT:    addi sp, sp, 16
4962; CHECK-UNALIGNED-RV32-V-NEXT:    ret
4963;
4964; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_gt_zero:
4965; CHECK-UNALIGNED-RV64-V:       # %bb.0: # %entry
4966; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, -16
4967; CHECK-UNALIGNED-RV64-V-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
4968; CHECK-UNALIGNED-RV64-V-NEXT:    li a2, 4
4969; CHECK-UNALIGNED-RV64-V-NEXT:    call memcmp
4970; CHECK-UNALIGNED-RV64-V-NEXT:    sgtz a0, a0
4971; CHECK-UNALIGNED-RV64-V-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
4972; CHECK-UNALIGNED-RV64-V-NEXT:    addi sp, sp, 16
4973; CHECK-UNALIGNED-RV64-V-NEXT:    ret
4974entry:
4975  %memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
4976  %ret = icmp sgt i32 %memcmp, 0
4977  ret i1 %ret
4978}
4979;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
4980; CHECK-ALIGNED: {{.*}}
4981