xref: /llvm-project/llvm/test/CodeGen/AArch64/stack-tagging-unchecked-ld-st.ll (revision 5ddce70ef0e5a641d7fea95e31fc5e2439cb98cb)
1; RUN: llc < %s -mtriple=aarch64 -mattr=+mte | FileCheck %s --check-prefixes=DEFAULT,COMMON
2; RUN: llc < %s -mtriple=aarch64 -mattr=+mte -stack-tagging-unchecked-ld-st=never | FileCheck %s --check-prefixes=NEVER,COMMON
3; RUN: llc < %s -mtriple=aarch64 -mattr=+mte -stack-tagging-unchecked-ld-st=always | FileCheck %s --check-prefixes=ALWAYS,COMMON
4
5declare void @use8(ptr)
6declare void @use16(ptr)
7declare void @use32(ptr)
8declare void @use64(ptr)
9declare void @use2x64(ptr)
10declare void @llvm.lifetime.start.p0(i64, ptr nocapture)
11declare void @llvm.lifetime.end.p0(i64, ptr nocapture)
12
13define i64 @CallLd64() sanitize_memtag {
14entry:
15  %x = alloca i64, align 4
16  call void @use64(ptr %x)
17  %a = load i64, ptr %x
18  ret i64 %a
19}
20
21; COMMON:  CallLd64:
22; COMMON:  bl  use64
23
24; ALWAYS:  ldr x0, [sp]
25; DEFAULT: ldr x0, [sp]
26; NEVER:   ldr x0, [x{{.*}}]
27
28; COMMON:  ret
29
30
31define i32 @CallLd32() sanitize_memtag {
32entry:
33  %x = alloca i32, align 4
34  call void @use32(ptr %x)
35  %a = load i32, ptr %x
36  ret i32 %a
37}
38
39; COMMON:  CallLd32:
40; COMMON:  bl  use32
41
42; ALWAYS:  ldr w0, [sp]
43; DEFAULT: ldr w0, [sp]
44; NEVER:   ldr w0, [x{{.*}}]
45
46; COMMON:  ret
47
48
49define i16 @CallLd16() sanitize_memtag {
50entry:
51  %x = alloca i16, align 4
52  call void @use16(ptr %x)
53  %a = load i16, ptr %x
54  ret i16 %a
55}
56
57; COMMON:  CallLd16:
58; COMMON:  bl  use16
59
60; ALWAYS:  ldrh w0, [sp]
61; DEFAULT: ldrh w0, [sp]
62; NEVER:   ldrh w0, [x{{.*}}]
63
64; COMMON:  ret
65
66
67define i8 @CallLd8() sanitize_memtag {
68entry:
69  %x = alloca i8, align 4
70  call void @use8(ptr %x)
71  %a = load i8, ptr %x
72  ret i8 %a
73}
74
75; COMMON:  CallLd8:
76; COMMON:  bl  use8
77
78; ALWAYS:  ldrb w0, [sp]
79; DEFAULT: ldrb w0, [sp]
80; NEVER:   ldrb w0, [x{{.*}}]
81
82; COMMON:  ret
83
84
85define void @CallSt64Call() sanitize_memtag {
86entry:
87  %x = alloca i64, align 4
88  call void @use64(ptr %x)
89  store i64 42, ptr %x
90  call void @use64(ptr %x)
91  ret void
92}
93
94; COMMON:  CallSt64Call:
95; COMMON:  bl  use64
96
97; ALWAYS:  str x{{.*}}, [sp]
98; DEFAULT: str x{{.*}}, [sp]
99; NEVER:   str x{{.*}}, [x{{.*}}]
100
101; COMMON:  bl  use64
102; COMMON:  ret
103
104
105define void @CallSt32Call() sanitize_memtag {
106entry:
107  %x = alloca i32, align 4
108  call void @use32(ptr %x)
109  store i32 42, ptr %x
110  call void @use32(ptr %x)
111  ret void
112}
113
114; COMMON:  CallSt32Call:
115; COMMON:  bl  use32
116
117; ALWAYS:  str w{{.*}}, [sp]
118; DEFAULT: str w{{.*}}, [sp]
119; NEVER:   str w{{.*}}, [x{{.*}}]
120
121; COMMON:  bl  use32
122; COMMON:  ret
123
124
125define void @CallSt16Call() sanitize_memtag {
126entry:
127  %x = alloca i16, align 4
128  call void @use16(ptr %x)
129  store i16 42, ptr %x
130  call void @use16(ptr %x)
131  ret void
132}
133
134
135; COMMON:  CallSt16Call:
136; COMMON:  bl  use16
137
138; ALWAYS:  strh w{{.*}}, [sp]
139; DEFAULT: strh w{{.*}}, [sp]
140; NEVER:   strh w{{.*}}, [x{{.*}}]
141
142; COMMON:  bl  use16
143; COMMON:  ret
144
145
146define void @CallSt8Call() sanitize_memtag {
147entry:
148  %x = alloca i8, align 4
149  call void @use8(ptr %x)
150  store i8 42, ptr %x
151  call void @use8(ptr %x)
152  ret void
153}
154
155; COMMON:  CallSt8Call:
156; COMMON:  bl  use8
157
158; ALWAYS:  strb w{{.*}}, [sp]
159; DEFAULT: strb w{{.*}}, [sp]
160; NEVER:   strb w{{.*}}, [x{{.*}}]
161
162; COMMON:  bl  use8
163; COMMON:  ret
164
165
166define void @CallStPair(i64 %z) sanitize_memtag {
167entry:
168  %x = alloca [2 x i64], align 8
169  call void @use2x64(ptr %x)
170  store i64 %z, ptr %x, align 8
171  %x1 = getelementptr inbounds [2 x i64], ptr %x, i64 0, i64 1
172  store i64 %z, ptr %x1, align 8
173  call void @use2x64(ptr %x)
174  ret void
175}
176
177; COMMON:  CallStPair:
178; COMMON:  bl  use2x64
179
180; ALWAYS:  stp {{.*}}, [sp]
181; DEFAULT: stp {{.*}}, [sp]
182; NEVER:   stp {{.*}}, [x{{.*}}]
183
184; COMMON:  bl  use2x64
185; COMMON:  ret
186
187; One of the two allocas will end up out of range of ldrb [sp].
188define dso_local i8 @LargeFrame() sanitize_memtag {
189entry:
190  %x = alloca [4096 x i8], align 4
191  %y = alloca [4096 x i8], align 4
192  call void @use8(ptr %x)
193  call void @use8(ptr %y)
194  %0 = load i8, ptr %x, align 4
195  %1 = load i8, ptr %y, align 4
196  %add = add i8 %1, %0
197  ret i8 %add
198}
199
200; COMMON: LargeFrame:
201; COMMON: bl use8
202; COMMON: bl use8
203
204; NEVER:  ldrb [[A:w.*]], [x{{.*}}]
205; NEVER:  ldrb [[B:w.*]], [x{{.*}}]
206
207; DEFAULT:  ldrb [[A:w.*]], [x{{.*}}]
208; DEFAULT:  ldrb [[B:w.*]], [x{{.*}}]
209
210; ALWAYS-DAG: ldg [[PA:x.*]], [x{{.*}}]
211; ALWAYS-DAG: ldrb [[B:w.*]], [sp]
212; ALWAYS-DAG: ldrb [[A:w.*]], [[[PA]]]
213
214; COMMON: ret
215
216; One of these allocas is closer to FP than to SP, and within 256 bytes
217; of the former (see hardcoded limit in resolveFrameOffsetReference).
218; It could be lowered to an FP-relative load, but not when doing an
219; unchecked access to tagged memory!
220define i8 @FPOffset() "frame-pointer"="all" sanitize_memtag {
221  %x = alloca [200 x i8], align 4
222  %y = alloca [200 x i8], align 4
223  %z = alloca [200 x i8], align 4
224  call void @use8(ptr %x)
225  call void @use8(ptr %y)
226  call void @use8(ptr %z)
227  %x1 = load i8, ptr %x, align 4
228  %y1 = load i8, ptr %y, align 4
229  %z1 = load i8, ptr %z, align 4
230  %a = add i8 %x1, %y1
231  %b = add i8 %a, %z1
232  ret i8 %b
233}
234
235; COMMON: FPOffset:
236; COMMON: bl use8
237; COMMON: bl use8
238; COMMON: bl use8
239
240; All three loads are SP-based.
241; ALWAYS-DAG: ldrb  w{{.*}}, [sp, #416]
242; ALWAYS-DAG: ldrb  w{{.*}}, [sp, #208]
243; ALWAYS-DAG: ldrb  w{{.*}}, [sp]
244
245; DEFAULT-DAG: ldrb  w{{.*}}, [sp, #416]
246; DEFAULT-DAG: ldrb  w{{.*}}, [sp, #208]
247; DEFAULT-DAG: ldrb  w{{.*}}, [sp]
248
249; NEVER-DAG: ldrb  w{{.*}}, [x{{.*}}]
250; NEVER-DAG: ldrb  w{{.*}}, [x{{.*}}]
251; NEVER-DAG: ldrb  w{{.*}}, [x{{.*}}]
252
253; COMMON: ret
254