xref: /llvm-project/llvm/test/Instrumentation/HWAddressSanitizer/X86/alloca.ll (revision 6cc9244baa63fcb7c6f35f46dab9fa17a421a6ce)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
2; Test alloca instrumentation.
3;
4; RUN: opt < %s -passes=hwasan -hwasan-use-stack-safety=0 -S | FileCheck %s
5; RUN: opt < %s -passes=hwasan -hwasan-use-stack-safety=0 -hwasan-instrument-with-calls=0 -S | FileCheck %s  --check-prefixes=INLINE
6
7target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
8target triple = "x86_64-unknown-linux-gnu"
9
10declare void @use32(ptr)
11
12define void @test_alloca() sanitize_hwaddress {
13; CHECK-LABEL: define void @test_alloca
14; CHECK-SAME: () #[[ATTR0:[0-9]+]] personality ptr @__hwasan_personality_thunk {
15; CHECK-NEXT:  entry:
16; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr null)
17; CHECK-NEXT:    [[TMP0:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
18; CHECK-NEXT:    [[TMP1:%.*]] = ptrtoint ptr [[TMP0]] to i64
19; CHECK-NEXT:    [[TMP2:%.*]] = lshr i64 [[TMP1]], 20
20; CHECK-NEXT:    [[TMP3:%.*]] = xor i64 [[TMP1]], [[TMP2]]
21; CHECK-NEXT:    [[HWASAN_STACK_BASE_TAG:%.*]] = and i64 [[TMP3]], 63
22; CHECK-NEXT:    [[TMP4:%.*]] = lshr i64 [[TMP1]], 57
23; CHECK-NEXT:    [[HWASAN_UAR_TAG:%.*]] = and i64 [[TMP4]], 63
24; CHECK-NEXT:    [[X:%.*]] = alloca { i32, [12 x i8] }, align 16
25; CHECK-NEXT:    [[TMP5:%.*]] = xor i64 [[HWASAN_STACK_BASE_TAG]], 0
26; CHECK-NEXT:    [[TMP6:%.*]] = ptrtoint ptr [[X]] to i64
27; CHECK-NEXT:    [[TMP7:%.*]] = and i64 [[TMP6]], -9079256848778919937
28; CHECK-NEXT:    [[TMP8:%.*]] = shl i64 [[TMP5]], 57
29; CHECK-NEXT:    [[TMP9:%.*]] = or i64 [[TMP7]], [[TMP8]]
30; CHECK-NEXT:    [[X_HWASAN:%.*]] = inttoptr i64 [[TMP9]] to ptr
31; CHECK-NEXT:    [[TMP10:%.*]] = trunc i64 [[TMP5]] to i8
32; CHECK-NEXT:    call void @__hwasan_tag_memory(ptr [[X]], i8 [[TMP10]], i64 16)
33; CHECK-NEXT:    call void @use32(ptr nonnull [[X_HWASAN]])
34; CHECK-NEXT:    [[TMP11:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
35; CHECK-NEXT:    call void @__hwasan_tag_memory(ptr [[X]], i8 [[TMP11]], i64 16)
36; CHECK-NEXT:    ret void
37;
38; INLINE-LABEL: define void @test_alloca
39; INLINE-SAME: () #[[ATTR0:[0-9]+]] personality ptr @__hwasan_personality_thunk {
40; INLINE-NEXT:  entry:
41; INLINE-NEXT:    [[TMP0:%.*]] = load i64, ptr @__hwasan_tls, align 8
42; INLINE-NEXT:    [[TMP1:%.*]] = and i64 [[TMP0]], -9079256848778919937
43; INLINE-NEXT:    [[TMP2:%.*]] = ashr i64 [[TMP0]], 3
44; INLINE-NEXT:    [[TMP3:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
45; INLINE-NEXT:    [[TMP4:%.*]] = ptrtoint ptr [[TMP3]] to i64
46; INLINE-NEXT:    [[TMP5:%.*]] = shl i64 [[TMP4]], 44
47; INLINE-NEXT:    [[TMP6:%.*]] = or i64 ptrtoint (ptr @test_alloca to i64), [[TMP5]]
48; INLINE-NEXT:    [[TMP7:%.*]] = inttoptr i64 [[TMP1]] to ptr
49; INLINE-NEXT:    store i64 [[TMP6]], ptr [[TMP7]], align 8
50; INLINE-NEXT:    [[TMP8:%.*]] = ashr i64 [[TMP0]], 56
51; INLINE-NEXT:    [[TMP9:%.*]] = shl nuw nsw i64 [[TMP8]], 12
52; INLINE-NEXT:    [[TMP10:%.*]] = xor i64 [[TMP9]], -1
53; INLINE-NEXT:    [[TMP11:%.*]] = add i64 [[TMP0]], 8
54; INLINE-NEXT:    [[TMP12:%.*]] = and i64 [[TMP11]], [[TMP10]]
55; INLINE-NEXT:    store i64 [[TMP12]], ptr @__hwasan_tls, align 8
56; INLINE-NEXT:    [[TMP13:%.*]] = or i64 [[TMP1]], 4294967295
57; INLINE-NEXT:    [[HWASAN_SHADOW:%.*]] = add i64 [[TMP13]], 1
58; INLINE-NEXT:    [[TMP14:%.*]] = inttoptr i64 [[HWASAN_SHADOW]] to ptr
59; INLINE-NEXT:    [[TMP15:%.*]] = lshr i64 [[TMP4]], 57
60; INLINE-NEXT:    [[HWASAN_UAR_TAG:%.*]] = and i64 [[TMP15]], 63
61; INLINE-NEXT:    [[X:%.*]] = alloca { i32, [12 x i8] }, align 16
62; INLINE-NEXT:    [[TMP16:%.*]] = xor i64 [[TMP2]], 0
63; INLINE-NEXT:    [[TMP17:%.*]] = ptrtoint ptr [[X]] to i64
64; INLINE-NEXT:    [[TMP18:%.*]] = and i64 [[TMP17]], -9079256848778919937
65; INLINE-NEXT:    [[TMP19:%.*]] = shl i64 [[TMP16]], 57
66; INLINE-NEXT:    [[TMP20:%.*]] = or i64 [[TMP18]], [[TMP19]]
67; INLINE-NEXT:    [[X_HWASAN:%.*]] = inttoptr i64 [[TMP20]] to ptr
68; INLINE-NEXT:    [[TMP21:%.*]] = trunc i64 [[TMP16]] to i8
69; INLINE-NEXT:    [[TMP22:%.*]] = ptrtoint ptr [[X]] to i64
70; INLINE-NEXT:    [[TMP23:%.*]] = and i64 [[TMP22]], -9079256848778919937
71; INLINE-NEXT:    [[TMP24:%.*]] = lshr i64 [[TMP23]], 4
72; INLINE-NEXT:    [[TMP25:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP24]]
73; INLINE-NEXT:    [[TMP26:%.*]] = getelementptr i8, ptr [[TMP25]], i32 0
74; INLINE-NEXT:    store i8 4, ptr [[TMP26]], align 1
75; INLINE-NEXT:    [[TMP27:%.*]] = getelementptr i8, ptr [[X]], i32 15
76; INLINE-NEXT:    store i8 [[TMP21]], ptr [[TMP27]], align 1
77; INLINE-NEXT:    call void @use32(ptr nonnull [[X_HWASAN]])
78; INLINE-NEXT:    [[TMP28:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
79; INLINE-NEXT:    [[TMP29:%.*]] = ptrtoint ptr [[X]] to i64
80; INLINE-NEXT:    [[TMP30:%.*]] = and i64 [[TMP29]], -9079256848778919937
81; INLINE-NEXT:    [[TMP31:%.*]] = lshr i64 [[TMP30]], 4
82; INLINE-NEXT:    [[TMP32:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP31]]
83; INLINE-NEXT:    call void @llvm.memset.p0.i64(ptr align 1 [[TMP32]], i8 [[TMP28]], i64 1, i1 false)
84; INLINE-NEXT:    ret void
85;
86entry:
87  %x = alloca i32, align 4
88  call void @use32(ptr nonnull %x)
89  ret void
90}
91
92
93define i32 @test_simple(ptr %a) sanitize_hwaddress {
94; CHECK-LABEL: define i32 @test_simple
95; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] personality ptr @__hwasan_personality_thunk {
96; CHECK-NEXT:  entry:
97; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr null)
98; CHECK-NEXT:    [[TMP0:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
99; CHECK-NEXT:    [[TMP1:%.*]] = ptrtoint ptr [[TMP0]] to i64
100; CHECK-NEXT:    [[TMP2:%.*]] = lshr i64 [[TMP1]], 20
101; CHECK-NEXT:    [[TMP3:%.*]] = xor i64 [[TMP1]], [[TMP2]]
102; CHECK-NEXT:    [[HWASAN_STACK_BASE_TAG:%.*]] = and i64 [[TMP3]], 63
103; CHECK-NEXT:    [[TMP4:%.*]] = lshr i64 [[TMP1]], 57
104; CHECK-NEXT:    [[HWASAN_UAR_TAG:%.*]] = and i64 [[TMP4]], 63
105; CHECK-NEXT:    [[BUF_SROA_0:%.*]] = alloca { i8, [15 x i8] }, align 16
106; CHECK-NEXT:    [[TMP5:%.*]] = xor i64 [[HWASAN_STACK_BASE_TAG]], 0
107; CHECK-NEXT:    [[TMP6:%.*]] = ptrtoint ptr [[BUF_SROA_0]] to i64
108; CHECK-NEXT:    [[TMP7:%.*]] = and i64 [[TMP6]], -9079256848778919937
109; CHECK-NEXT:    [[TMP8:%.*]] = shl i64 [[TMP5]], 57
110; CHECK-NEXT:    [[TMP9:%.*]] = or i64 [[TMP7]], [[TMP8]]
111; CHECK-NEXT:    [[BUF_SROA_0_HWASAN:%.*]] = inttoptr i64 [[TMP9]] to ptr
112; CHECK-NEXT:    call void @llvm.lifetime.start.p0(i64 16, ptr nonnull [[BUF_SROA_0]])
113; CHECK-NEXT:    [[TMP10:%.*]] = trunc i64 [[TMP5]] to i8
114; CHECK-NEXT:    call void @__hwasan_tag_memory(ptr [[BUF_SROA_0]], i8 [[TMP10]], i64 16)
115; CHECK-NEXT:    [[TMP11:%.*]] = ptrtoint ptr [[BUF_SROA_0_HWASAN]] to i64
116; CHECK-NEXT:    call void @__hwasan_store1(i64 [[TMP11]])
117; CHECK-NEXT:    store volatile i8 0, ptr [[BUF_SROA_0_HWASAN]], align 4
118; CHECK-NEXT:    [[TMP12:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
119; CHECK-NEXT:    call void @__hwasan_tag_memory(ptr [[BUF_SROA_0]], i8 [[TMP12]], i64 16)
120; CHECK-NEXT:    call void @llvm.lifetime.end.p0(i64 16, ptr nonnull [[BUF_SROA_0]])
121; CHECK-NEXT:    ret i32 0
122;
123; INLINE-LABEL: define i32 @test_simple
124; INLINE-SAME: (ptr [[A:%.*]]) #[[ATTR0]] personality ptr @__hwasan_personality_thunk {
125; INLINE-NEXT:  entry:
126; INLINE-NEXT:    [[TMP0:%.*]] = load i64, ptr @__hwasan_tls, align 8
127; INLINE-NEXT:    [[TMP1:%.*]] = and i64 [[TMP0]], -9079256848778919937
128; INLINE-NEXT:    [[TMP2:%.*]] = ashr i64 [[TMP0]], 3
129; INLINE-NEXT:    [[TMP3:%.*]] = call ptr @llvm.frameaddress.p0(i32 0)
130; INLINE-NEXT:    [[TMP4:%.*]] = ptrtoint ptr [[TMP3]] to i64
131; INLINE-NEXT:    [[TMP5:%.*]] = shl i64 [[TMP4]], 44
132; INLINE-NEXT:    [[TMP6:%.*]] = or i64 ptrtoint (ptr @test_simple to i64), [[TMP5]]
133; INLINE-NEXT:    [[TMP7:%.*]] = inttoptr i64 [[TMP1]] to ptr
134; INLINE-NEXT:    store i64 [[TMP6]], ptr [[TMP7]], align 8
135; INLINE-NEXT:    [[TMP8:%.*]] = ashr i64 [[TMP0]], 56
136; INLINE-NEXT:    [[TMP9:%.*]] = shl nuw nsw i64 [[TMP8]], 12
137; INLINE-NEXT:    [[TMP10:%.*]] = xor i64 [[TMP9]], -1
138; INLINE-NEXT:    [[TMP11:%.*]] = add i64 [[TMP0]], 8
139; INLINE-NEXT:    [[TMP12:%.*]] = and i64 [[TMP11]], [[TMP10]]
140; INLINE-NEXT:    store i64 [[TMP12]], ptr @__hwasan_tls, align 8
141; INLINE-NEXT:    [[TMP13:%.*]] = or i64 [[TMP1]], 4294967295
142; INLINE-NEXT:    [[HWASAN_SHADOW:%.*]] = add i64 [[TMP13]], 1
143; INLINE-NEXT:    [[TMP14:%.*]] = inttoptr i64 [[HWASAN_SHADOW]] to ptr
144; INLINE-NEXT:    [[TMP15:%.*]] = lshr i64 [[TMP4]], 57
145; INLINE-NEXT:    [[HWASAN_UAR_TAG:%.*]] = and i64 [[TMP15]], 63
146; INLINE-NEXT:    [[BUF_SROA_0:%.*]] = alloca { i8, [15 x i8] }, align 16
147; INLINE-NEXT:    [[TMP16:%.*]] = xor i64 [[TMP2]], 0
148; INLINE-NEXT:    [[TMP17:%.*]] = ptrtoint ptr [[BUF_SROA_0]] to i64
149; INLINE-NEXT:    [[TMP18:%.*]] = and i64 [[TMP17]], -9079256848778919937
150; INLINE-NEXT:    [[TMP19:%.*]] = shl i64 [[TMP16]], 57
151; INLINE-NEXT:    [[TMP20:%.*]] = or i64 [[TMP18]], [[TMP19]]
152; INLINE-NEXT:    [[BUF_SROA_0_HWASAN:%.*]] = inttoptr i64 [[TMP20]] to ptr
153; INLINE-NEXT:    call void @llvm.lifetime.start.p0(i64 16, ptr nonnull [[BUF_SROA_0]])
154; INLINE-NEXT:    [[TMP21:%.*]] = trunc i64 [[TMP16]] to i8
155; INLINE-NEXT:    [[TMP22:%.*]] = ptrtoint ptr [[BUF_SROA_0]] to i64
156; INLINE-NEXT:    [[TMP23:%.*]] = and i64 [[TMP22]], -9079256848778919937
157; INLINE-NEXT:    [[TMP24:%.*]] = lshr i64 [[TMP23]], 4
158; INLINE-NEXT:    [[TMP25:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP24]]
159; INLINE-NEXT:    [[TMP26:%.*]] = getelementptr i8, ptr [[TMP25]], i32 0
160; INLINE-NEXT:    store i8 1, ptr [[TMP26]], align 1
161; INLINE-NEXT:    [[TMP27:%.*]] = getelementptr i8, ptr [[BUF_SROA_0]], i32 15
162; INLINE-NEXT:    store i8 [[TMP21]], ptr [[TMP27]], align 1
163; INLINE-NEXT:    [[TMP28:%.*]] = ptrtoint ptr [[BUF_SROA_0_HWASAN]] to i64
164; INLINE-NEXT:    [[TMP29:%.*]] = lshr i64 [[TMP28]], 57
165; INLINE-NEXT:    [[TMP30:%.*]] = trunc i64 [[TMP29]] to i8
166; INLINE-NEXT:    [[TMP31:%.*]] = and i64 [[TMP28]], -9079256848778919937
167; INLINE-NEXT:    [[TMP32:%.*]] = lshr i64 [[TMP31]], 4
168; INLINE-NEXT:    [[TMP33:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP32]]
169; INLINE-NEXT:    [[TMP34:%.*]] = load i8, ptr [[TMP33]], align 1
170; INLINE-NEXT:    [[TMP35:%.*]] = icmp ne i8 [[TMP30]], [[TMP34]]
171; INLINE-NEXT:    br i1 [[TMP35]], label [[TMP36:%.*]], label [[TMP50:%.*]], !prof [[PROF1:![0-9]+]]
172; INLINE:       36:
173; INLINE-NEXT:    [[TMP37:%.*]] = icmp ugt i8 [[TMP34]], 15
174; INLINE-NEXT:    br i1 [[TMP37]], label [[TMP38:%.*]], label [[TMP39:%.*]], !prof [[PROF1]]
175; INLINE:       38:
176; INLINE-NEXT:    call void asm sideeffect "int3\0Anopl 80([[RAX:%.*]])", "{rdi}"(i64 [[TMP28]])
177; INLINE-NEXT:    unreachable
178; INLINE:       39:
179; INLINE-NEXT:    [[TMP40:%.*]] = and i64 [[TMP28]], 15
180; INLINE-NEXT:    [[TMP41:%.*]] = trunc i64 [[TMP40]] to i8
181; INLINE-NEXT:    [[TMP42:%.*]] = add i8 [[TMP41]], 0
182; INLINE-NEXT:    [[TMP43:%.*]] = icmp uge i8 [[TMP42]], [[TMP34]]
183; INLINE-NEXT:    br i1 [[TMP43]], label [[TMP38]], label [[TMP44:%.*]], !prof [[PROF1]]
184; INLINE:       44:
185; INLINE-NEXT:    [[TMP45:%.*]] = or i64 [[TMP31]], 15
186; INLINE-NEXT:    [[TMP46:%.*]] = inttoptr i64 [[TMP45]] to ptr
187; INLINE-NEXT:    [[TMP47:%.*]] = load i8, ptr [[TMP46]], align 1
188; INLINE-NEXT:    [[TMP48:%.*]] = icmp ne i8 [[TMP30]], [[TMP47]]
189; INLINE-NEXT:    br i1 [[TMP48]], label [[TMP38]], label [[TMP49:%.*]], !prof [[PROF1]]
190; INLINE:       49:
191; INLINE-NEXT:    br label [[TMP50]]
192; INLINE:       50:
193; INLINE-NEXT:    store volatile i8 0, ptr [[BUF_SROA_0_HWASAN]], align 4
194; INLINE-NEXT:    [[TMP51:%.*]] = trunc i64 [[HWASAN_UAR_TAG]] to i8
195; INLINE-NEXT:    [[TMP52:%.*]] = ptrtoint ptr [[BUF_SROA_0]] to i64
196; INLINE-NEXT:    [[TMP53:%.*]] = and i64 [[TMP52]], -9079256848778919937
197; INLINE-NEXT:    [[TMP54:%.*]] = lshr i64 [[TMP53]], 4
198; INLINE-NEXT:    [[TMP55:%.*]] = getelementptr i8, ptr [[TMP14]], i64 [[TMP54]]
199; INLINE-NEXT:    call void @llvm.memset.p0.i64(ptr align 1 [[TMP55]], i8 [[TMP51]], i64 1, i1 false)
200; INLINE-NEXT:    call void @llvm.lifetime.end.p0(i64 16, ptr nonnull [[BUF_SROA_0]])
201; INLINE-NEXT:    ret i32 0
202;
203entry:
204  %buf.sroa.0 = alloca i8, align 4
205  call void @llvm.lifetime.start.p0(i64 1, ptr nonnull %buf.sroa.0)
206  store volatile i8 0, ptr %buf.sroa.0, align 4
207  call void @llvm.lifetime.end.p0(i64 1, ptr nonnull %buf.sroa.0)
208  ret i32 0
209}
210
211; Function Attrs: argmemonly mustprogress nofree nosync nounwind willreturn
212declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture)
213
214; Function Attrs: argmemonly mustprogress nofree nosync nounwind willreturn
215declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture)
216