xref: /llvm-project/llvm/test/CodeGen/ARM/cmse-harden-entry-arguments.ll (revision 78ff617d3f573fb3a9b2fef180fa0fd43d5584ea)
1; RUN: llc %s -mtriple=thumbv8m.main     -o - | FileCheck %s --check-prefixes V8M-COMMON,V8M-LE
2; RUN: llc %s -mtriple=thumbebv8m.main   -o - | FileCheck %s --check-prefixes V8M-COMMON,V8M-BE
3; RUN: llc %s -mtriple=thumbv8.1m.main   -o - | FileCheck %s --check-prefixes V81M-COMMON,V81M-LE
4; RUN: llc %s -mtriple=thumbebv8.1m.main -o - | FileCheck %s --check-prefixes V81M-COMMON,V81M-BE
5
6@arr = hidden local_unnamed_addr global [256 x i32] zeroinitializer, align 4
7
8define i32 @access_i16(i16 signext %idx) "cmse_nonsecure_entry" {
9; V8M-COMMON-LABEL: access_i16:
10; V8M-COMMON:       @ %bb.0: @ %entry
11; V8M-COMMON-NEXT:    movw r1, :lower16:arr
12; V8M-COMMON-NEXT:    sxth r0, r0
13; V8M-COMMON-NEXT:    movt r1, :upper16:arr
14; V8M-COMMON-NEXT:    mov r2, lr
15; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
16; V8M-COMMON-NEXT:    mov r1, lr
17; V8M-COMMON-NEXT:    mov r3, lr
18; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
19; V8M-COMMON-NEXT:    mov r12, lr
20; V8M-COMMON-NEXT:    bxns lr
21;
22; V81M-COMMON-LABEL: access_i16:
23; V81M-COMMON:       @ %bb.0: @ %entry
24; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
25; V81M-COMMON-NEXT:    movw r1, :lower16:arr
26; V81M-COMMON-NEXT:    sxth r0, r0
27; V81M-COMMON-NEXT:    movt r1, :upper16:arr
28; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
29; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
30; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
31; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
32; V81M-COMMON-NEXT:    bxns lr
33entry:
34  %idxprom = sext i16 %idx to i32
35  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
36  %0 = load i32, ptr %arrayidx, align 4
37  ret i32 %0
38}
39
40define i32 @access_u16(i16 zeroext %idx) "cmse_nonsecure_entry" {
41; V8M-COMMON-LABEL: access_u16:
42; V8M-COMMON:       @ %bb.0: @ %entry
43; V8M-COMMON-NEXT:    movw r1, :lower16:arr
44; V8M-COMMON-NEXT:    uxth r0, r0
45; V8M-COMMON-NEXT:    movt r1, :upper16:arr
46; V8M-COMMON-NEXT:    mov r2, lr
47; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
48; V8M-COMMON-NEXT:    mov r1, lr
49; V8M-COMMON-NEXT:    mov r3, lr
50; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
51; V8M-COMMON-NEXT:    mov r12, lr
52; V8M-COMMON-NEXT:    bxns lr
53;
54; V81M-COMMON-LABEL: access_u16:
55; V81M-COMMON:       @ %bb.0: @ %entry
56; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
57; V81M-COMMON-NEXT:    movw r1, :lower16:arr
58; V81M-COMMON-NEXT:    uxth r0, r0
59; V81M-COMMON-NEXT:    movt r1, :upper16:arr
60; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
61; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
62; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
63; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
64; V81M-COMMON-NEXT:    bxns lr
65entry:
66  %idxprom = zext i16 %idx to i32
67  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
68  %0 = load i32, ptr %arrayidx, align 4
69  ret i32 %0
70}
71
72define i32 @access_i8(i8 signext %idx) "cmse_nonsecure_entry" {
73; V8M-COMMON-LABEL: access_i8:
74; V8M-COMMON:       @ %bb.0: @ %entry
75; V8M-COMMON-NEXT:    movw r1, :lower16:arr
76; V8M-COMMON-NEXT:    sxtb r0, r0
77; V8M-COMMON-NEXT:    movt r1, :upper16:arr
78; V8M-COMMON-NEXT:    mov r2, lr
79; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
80; V8M-COMMON-NEXT:    mov r1, lr
81; V8M-COMMON-NEXT:    mov r3, lr
82; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
83; V8M-COMMON-NEXT:    mov r12, lr
84; V8M-COMMON-NEXT:    bxns lr
85;
86; V81M-COMMON-LABEL: access_i8:
87; V81M-COMMON:       @ %bb.0: @ %entry
88; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
89; V81M-COMMON-NEXT:    movw r1, :lower16:arr
90; V81M-COMMON-NEXT:    sxtb r0, r0
91; V81M-COMMON-NEXT:    movt r1, :upper16:arr
92; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
93; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
94; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
95; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
96; V81M-COMMON-NEXT:    bxns lr
97entry:
98  %idxprom = sext i8 %idx to i32
99  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
100  %0 = load i32, ptr %arrayidx, align 4
101  ret i32 %0
102}
103
104define i32 @access_u8(i8 zeroext %idx) "cmse_nonsecure_entry" {
105; V8M-COMMON-LABEL: access_u8:
106; V8M-COMMON:       @ %bb.0: @ %entry
107; V8M-COMMON-NEXT:    movw r1, :lower16:arr
108; V8M-COMMON-NEXT:    uxtb r0, r0
109; V8M-COMMON-NEXT:    movt r1, :upper16:arr
110; V8M-COMMON-NEXT:    mov r2, lr
111; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
112; V8M-COMMON-NEXT:    mov r1, lr
113; V8M-COMMON-NEXT:    mov r3, lr
114; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
115; V8M-COMMON-NEXT:    mov r12, lr
116; V8M-COMMON-NEXT:    bxns lr
117;
118; V81M-COMMON-LABEL: access_u8:
119; V81M-COMMON:       @ %bb.0: @ %entry
120; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
121; V81M-COMMON-NEXT:    movw r1, :lower16:arr
122; V81M-COMMON-NEXT:    uxtb r0, r0
123; V81M-COMMON-NEXT:    movt r1, :upper16:arr
124; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
125; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
126; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
127; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
128; V81M-COMMON-NEXT:    bxns lr
129entry:
130  %idxprom = zext i8 %idx to i32
131  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
132  %0 = load i32, ptr %arrayidx, align 4
133  ret i32 %0
134}
135
136define i32 @access_i1(i1 signext %idx) "cmse_nonsecure_entry" {
137; V8M-COMMON-LABEL: access_i1:
138; V8M-COMMON:       @ %bb.0: @ %entry
139; V8M-COMMON-NEXT:    and r0, r0, #1
140; V8M-COMMON-NEXT:    movw r1, :lower16:arr
141; V8M-COMMON-NEXT:    rsbs r0, r0, #0
142; V8M-COMMON-NEXT:    movt r1, :upper16:arr
143; V8M-COMMON-NEXT:    and r0, r0, #1
144; V8M-COMMON-NEXT:    mov r2, lr
145; V8M-COMMON-NEXT:    mov r3, lr
146; V8M-COMMON-NEXT:    mov r12, lr
147; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
148; V8M-COMMON-NEXT:    mov r1, lr
149; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
150; V8M-COMMON-NEXT:    bxns lr
151;
152; V81M-COMMON-LABEL: access_i1:
153; V81M-COMMON:       @ %bb.0: @ %entry
154; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
155; V81M-COMMON-NEXT:    and r0, r0, #1
156; V81M-COMMON-NEXT:    movw r1, :lower16:arr
157; V81M-COMMON-NEXT:    rsbs r0, r0, #0
158; V81M-COMMON-NEXT:    movt r1, :upper16:arr
159; V81M-COMMON-NEXT:    and r0, r0, #1
160; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
161; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
162; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
163; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
164; V81M-COMMON-NEXT:    bxns lr
165entry:
166  %idxprom = zext i1 %idx to i32
167  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
168  %0 = load i32, ptr %arrayidx, align 4
169  ret i32 %0
170}
171
172define i32 @access_i5(i5 signext %idx) "cmse_nonsecure_entry" {
173; V8M-COMMON-LABEL: access_i5:
174; V8M-COMMON:       @ %bb.0: @ %entry
175; V8M-COMMON-NEXT:    movw r1, :lower16:arr
176; V8M-COMMON-NEXT:    sbfx r0, r0, #0, #5
177; V8M-COMMON-NEXT:    movt r1, :upper16:arr
178; V8M-COMMON-NEXT:    mov r2, lr
179; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
180; V8M-COMMON-NEXT:    mov r1, lr
181; V8M-COMMON-NEXT:    mov r3, lr
182; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
183; V8M-COMMON-NEXT:    mov r12, lr
184; V8M-COMMON-NEXT:    bxns lr
185;
186; V81M-COMMON-LABEL: access_i5:
187; V81M-COMMON:       @ %bb.0: @ %entry
188; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
189; V81M-COMMON-NEXT:    movw r1, :lower16:arr
190; V81M-COMMON-NEXT:    sbfx r0, r0, #0, #5
191; V81M-COMMON-NEXT:    movt r1, :upper16:arr
192; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
193; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
194; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
195; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
196; V81M-COMMON-NEXT:    bxns lr
197entry:
198  %idxprom = sext i5 %idx to i32
199  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
200  %0 = load i32, ptr %arrayidx, align 4
201  ret i32 %0
202}
203
204define i32 @access_u5(i5 zeroext %idx) "cmse_nonsecure_entry" {
205; V8M-COMMON-LABEL: access_u5:
206; V8M-COMMON:       @ %bb.0: @ %entry
207; V8M-COMMON-NEXT:    movw r1, :lower16:arr
208; V8M-COMMON-NEXT:    and r0, r0, #31
209; V8M-COMMON-NEXT:    movt r1, :upper16:arr
210; V8M-COMMON-NEXT:    mov r2, lr
211; V8M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
212; V8M-COMMON-NEXT:    mov r1, lr
213; V8M-COMMON-NEXT:    mov r3, lr
214; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
215; V8M-COMMON-NEXT:    mov r12, lr
216; V8M-COMMON-NEXT:    bxns lr
217;
218; V81M-COMMON-LABEL: access_u5:
219; V81M-COMMON:       @ %bb.0: @ %entry
220; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
221; V81M-COMMON-NEXT:    movw r1, :lower16:arr
222; V81M-COMMON-NEXT:    and r0, r0, #31
223; V81M-COMMON-NEXT:    movt r1, :upper16:arr
224; V81M-COMMON-NEXT:    ldr.w r0, [r1, r0, lsl #2]
225; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
226; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
227; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
228; V81M-COMMON-NEXT:    bxns lr
229entry:
230  %idxprom = zext i5 %idx to i32
231  %arrayidx = getelementptr inbounds [256 x i32], ptr @arr, i32 0, i32 %idxprom
232  %0 = load i32, ptr %arrayidx, align 4
233  ret i32 %0
234}
235
236define i32 @access_i33(i33 %arg) "cmse_nonsecure_entry" {
237; V8M-COMMON-LABEL: access_i33:
238; V8M-COMMON:       @ %bb.0: @ %entry
239; V8M-LE-NEXT:        and r0, r1, #1
240; V8M-BE-NEXT:        and r0, r0, #1
241; V8M-COMMON-NEXT:    mov r1, lr
242; V8M-COMMON-NEXT:    rsbs r0, r0, #0
243; V8M-COMMON-NEXT:    mov r2, lr
244; V8M-COMMON-NEXT:    mov r3, lr
245; V8M-COMMON-NEXT:    mov r12, lr
246; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
247; V8M-COMMON-NEXT:    bxns lr
248;
249; V81M-COMMON-LABEL: access_i33:
250; V81M-COMMON:       @ %bb.0: @ %entry
251; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
252; V81M-LE-NEXT:        and r0, r1, #1
253; V81M-BE-NEXT:        and r0, r0, #1
254; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
255; V81M-COMMON-NEXT:    rsbs r0, r0, #0
256; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
257; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
258; V81M-COMMON-NEXT:    bxns lr
259entry:
260  %shr = ashr i33 %arg, 32
261  %conv = trunc nsw i33 %shr to i32
262  ret i32 %conv
263}
264
265define i32 @access_u33(i33 %arg) "cmse_nonsecure_entry" {
266; V8M-COMMON-LABEL: access_u33:
267; V8M-COMMON:       @ %bb.0: @ %entry
268; V8M-LE-NEXT:        and r0, r1, #1
269; V8M-BE-NEXT:        and r0, r0, #1
270; V8M-COMMON-NEXT:    mov r1, lr
271; V8M-COMMON-NEXT:    mov r2, lr
272; V8M-COMMON-NEXT:    mov r3, lr
273; V8M-COMMON-NEXT:    mov r12, lr
274; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
275; V8M-COMMON-NEXT:    bxns lr
276;
277; V81M-COMMON-LABEL: access_u33:
278; V81M-COMMON:       @ %bb.0: @ %entry
279; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
280; V81M-LE-NEXT:        and r0, r1, #1
281; V81M-BE-NEXT:        and r0, r0, #1
282; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
283; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
284; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
285; V81M-COMMON-NEXT:    bxns lr
286entry:
287  %shr = lshr i33 %arg, 32
288  %conv = trunc nuw nsw i33 %shr to i32
289  ret i32 %conv
290}
291
292define i32 @access_i65(ptr byval(i65) %0) "cmse_nonsecure_entry" {
293; V8M-COMMON-LABEL: access_i65:
294; V8M-COMMON:       @ %bb.0: @ %entry
295; V8M-COMMON-NEXT:    sub sp, #16
296; V8M-COMMON-NEXT:    stm.w sp, {r0, r1, r2, r3}
297; V8M-LE-NEXT:        ldrb.w r0, [sp, #8]
298; V8M-LE-NEXT:        and r0, r0, #1
299; V8M-LE-NEXT:        rsbs r0, r0, #0
300; V8M-BE-NEXT:        movs r1, #0
301; V8M-BE-NEXT:        sub.w r0, r1, r0, lsr #24
302; V8M-COMMON-NEXT:    add sp, #16
303; V8M-COMMON-NEXT:    mov r1, lr
304; V8M-COMMON-NEXT:    mov r2, lr
305; V8M-COMMON-NEXT:    mov r3, lr
306; V8M-COMMON-NEXT:    mov r12, lr
307; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
308; V8M-COMMON-NEXT:    bxns lr
309;
310; V81M-COMMON-LABEL: access_i65:
311; V81M-COMMON:       @ %bb.0: @ %entry
312; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
313; V81M-COMMON-NEXT:    sub sp, #16
314; V81M-COMMON-NEXT:    add sp, #4
315; V81M-COMMON-NEXT:    stm.w sp, {r0, r1, r2, r3}
316; V81M-LE-NEXT:        ldrb.w r0, [sp, #8]
317; V81M-LE-NEXT:        and r0, r0, #1
318; V81M-LE-NEXT:        rsbs r0, r0, #0
319; V81M-BE-NEXT:        movs r1, #0
320; V81M-BE-NEXT:        sub.w r0, r1, r0, lsr #24
321; V81M-COMMON-NEXT:    sub sp, #4
322; V81M-COMMON-NEXT:    add sp, #16
323; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
324; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
325; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
326; V81M-COMMON-NEXT:    bxns lr
327entry:
328  %arg = load i65, ptr %0, align 8
329  %shr = ashr i65 %arg, 64
330  %conv = trunc nsw i65 %shr to i32
331  ret i32 %conv
332}
333
334define i32 @access_u65(ptr byval(i65) %0) "cmse_nonsecure_entry" {
335; V8M-COMMON-LABEL: access_u65:
336; V8M-COMMON:       @ %bb.0: @ %entry
337; V8M-COMMON-NEXT:    sub sp, #16
338; V8M-COMMON-NEXT:    stm.w sp, {r0, r1, r2, r3}
339; V8M-LE-NEXT:        ldrb.w r0, [sp, #8]
340; V8M-BE-NEXT:        lsrs r0, r0, #24
341; V8M-COMMON-NEXT:    add sp, #16
342; V8M-COMMON-NEXT:    mov r1, lr
343; V8M-COMMON-NEXT:    mov r2, lr
344; V8M-COMMON-NEXT:    mov r3, lr
345; V8M-COMMON-NEXT:    mov r12, lr
346; V8M-COMMON-NEXT:    msr apsr_nzcvq, lr
347; V8M-COMMON-NEXT:    bxns lr
348;
349; V81M-COMMON-LABEL: access_u65:
350; V81M-COMMON:       @ %bb.0: @ %entry
351; V81M-COMMON-NEXT:    vstr fpcxtns, [sp, #-4]!
352; V81M-COMMON-NEXT:    sub sp, #16
353; V81M-COMMON-NEXT:    add sp, #4
354; V81M-COMMON-NEXT:    stm.w sp, {r0, r1, r2, r3}
355; V81M-LE-NEXT:        ldrb.w r0, [sp, #8]
356; V81M-BE-NEXT:        lsrs r0, r0, #24
357; V81M-COMMON-NEXT:    sub sp, #4
358; V81M-COMMON-NEXT:    add sp, #16
359; V81M-COMMON-NEXT:    vscclrm {s0, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, vpr}
360; V81M-COMMON-NEXT:    vldr fpcxtns, [sp], #4
361; V81M-COMMON-NEXT:    clrm {r1, r2, r3, r12, apsr}
362; V81M-COMMON-NEXT:    bxns lr
363entry:
364  %arg = load i65, ptr %0, align 8
365  %shr = lshr i65 %arg, 64
366  %conv = trunc nuw nsw i65 %shr to i32
367  ret i32 %conv
368}
369