xref: /llvm-project/llvm/test/Instrumentation/MemorySanitizer/atomics.ll (revision dc3875e468360b59300b02fdd0f940f4f6429629)
1; RUN: opt < %s -msan-check-access-address=0 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,NOORIGINS --implicit-check-not="call void @__msan_warning"
2; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=1 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,ORIGINS --implicit-check-not="call void @__msan_warning"
3; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=2 -S -passes=msan 2>&1 | FileCheck %s --check-prefixes=CHECK,ORIGINS --implicit-check-not="call void @__msan_warning"
4; RUN: opt < %s -msan-check-access-address=0 -msan-track-origins=1 -S -passes=msan -mtriple=s390x-unknown-linux 2>&1 | FileCheck %s --check-prefix=EXT
5; REQUIRES: x86-registered-target, systemz-registered-target
6
7target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
8target triple = "x86_64-unknown-linux-gnu"
9
10; atomicrmw xchg: store clean shadow, return clean shadow
11
12define i32 @AtomicRmwXchg(ptr %p, i32 %x) sanitize_memory {
13entry:
14  %0 = atomicrmw xchg ptr %p, i32 %x seq_cst
15  ret i32 %0
16}
17
18; CHECK-LABEL: @AtomicRmwXchg
19; CHECK: store i32 0,
20; CHECK: atomicrmw xchg {{.*}} seq_cst
21; CHECK: store i32 0, {{.*}} @__msan_retval_tls
22; CHECK: ret i32
23
24; atomicrmw xchg ptr: exactly the same as above
25
26define ptr @AtomicRmwXchgPtr(ptr %p, ptr %x) sanitize_memory {
27entry:
28  %0 = atomicrmw xchg ptr %p, ptr %x seq_cst
29  ret ptr %0
30}
31
32; CHECK-LABEL: @AtomicRmwXchgPtr
33; CHECK: store i64 0,
34; CHECK: atomicrmw xchg {{.*}} seq_cst
35; CHECK: store i64 0, {{.*}} @__msan_retval_tls
36; CHECK: ret ptr
37
38
39; atomicrmw max: exactly the same as above
40
41define i32 @AtomicRmwMax(ptr %p, i32 %x) sanitize_memory {
42entry:
43  %0 = atomicrmw max ptr %p, i32 %x seq_cst
44  ret i32 %0
45}
46
47; CHECK-LABEL: @AtomicRmwMax
48; CHECK: store i32 0,
49; CHECK: atomicrmw max {{.*}} seq_cst
50; CHECK: store i32 0, {{.*}} @__msan_retval_tls
51; CHECK: ret i32
52
53
54; cmpxchg: the same as above, but also check %a shadow
55
56define i32 @Cmpxchg(ptr %p, i32 %a, i32 %b) sanitize_memory {
57entry:
58  %pair = cmpxchg ptr %p, i32 %a, i32 %b seq_cst seq_cst
59  %0 = extractvalue { i32, i1 } %pair, 0
60  ret i32 %0
61}
62
63; CHECK-LABEL: @Cmpxchg
64; CHECK: store i32 0,
65; CHECK: icmp
66; CHECK: br
67; NOORIGINS: @__msan_warning_noreturn()
68; ORIGINS: @__msan_warning_with_origin_noreturn(
69; CHECK: cmpxchg {{.*}} seq_cst seq_cst
70; CHECK: store i32 0, {{.*}} @__msan_retval_tls
71; CHECK: ret i32
72
73
74; relaxed cmpxchg: bump up to "release monotonic"
75
76define i32 @CmpxchgMonotonic(ptr %p, i32 %a, i32 %b) sanitize_memory {
77entry:
78  %pair = cmpxchg ptr %p, i32 %a, i32 %b monotonic monotonic
79  %0 = extractvalue { i32, i1 } %pair, 0
80  ret i32 %0
81}
82
83; CHECK-LABEL: @CmpxchgMonotonic
84; CHECK: store i32 0,
85; CHECK: icmp
86; CHECK: br
87; NOORIGINS: @__msan_warning_noreturn()
88; ORIGINS: @__msan_warning_with_origin_noreturn(
89; CHECK: cmpxchg {{.*}} release monotonic
90; CHECK: store i32 0, {{.*}} @__msan_retval_tls
91; CHECK: ret i32
92
93
94; atomic load: preserve alignment, load shadow value after app value
95
96define i32 @AtomicLoad(ptr %p) sanitize_memory {
97entry:
98  %0 = load atomic i32, ptr %p seq_cst, align 16
99  ret i32 %0
100}
101
102; CHECK-LABEL: @AtomicLoad
103; CHECK: load atomic i32, ptr {{.*}} seq_cst, align 16
104; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32, ptr {{.*}}, align 16
105; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
106; CHECK: ret i32
107
108
109; atomic load: preserve alignment, load shadow value after app value
110
111define i32 @AtomicLoadAcquire(ptr %p) sanitize_memory {
112entry:
113  %0 = load atomic i32, ptr %p acquire, align 16
114  ret i32 %0
115}
116
117; CHECK-LABEL: @AtomicLoadAcquire
118; CHECK: load atomic i32, ptr {{.*}} acquire, align 16
119; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32, ptr {{.*}}, align 16
120; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
121; CHECK: ret i32
122
123
124; atomic load monotonic: bump up to load acquire
125
126define i32 @AtomicLoadMonotonic(ptr %p) sanitize_memory {
127entry:
128  %0 = load atomic i32, ptr %p monotonic, align 16
129  ret i32 %0
130}
131
132; CHECK-LABEL: @AtomicLoadMonotonic
133; CHECK: load atomic i32, ptr {{.*}} acquire, align 16
134; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32, ptr {{.*}}, align 16
135; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
136; CHECK: ret i32
137
138
139; atomic load unordered: bump up to load acquire
140
141define i32 @AtomicLoadUnordered(ptr %p) sanitize_memory {
142entry:
143  %0 = load atomic i32, ptr %p unordered, align 16
144  ret i32 %0
145}
146
147; CHECK-LABEL: @AtomicLoadUnordered
148; CHECK: load atomic i32, ptr {{.*}} acquire, align 16
149; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32, ptr {{.*}}, align 16
150; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
151; CHECK: ret i32
152
153
154; atomic store: preserve alignment, store clean shadow value before app value
155
156define void @AtomicStore(ptr %p, i32 %x) sanitize_memory {
157entry:
158  store atomic i32 %x, ptr %p seq_cst, align 16
159  ret void
160}
161
162; CHECK-LABEL: @AtomicStore
163; CHECK-NOT: @__msan_param_tls
164; CHECK: store i32 0, ptr {{.*}}, align 16
165; CHECK: store atomic i32 %x, ptr %p seq_cst, align 16
166; CHECK: ret void
167
168
169; atomic store: preserve alignment, store clean shadow value before app value
170
171define void @AtomicStoreRelease(ptr %p, i32 %x) sanitize_memory {
172entry:
173  store atomic i32 %x, ptr %p release, align 16
174  ret void
175}
176
177; CHECK-LABEL: @AtomicStoreRelease
178; CHECK-NOT: @__msan_param_tls
179; CHECK: store i32 0, ptr {{.*}}, align 16
180; CHECK: store atomic i32 %x, ptr %p release, align 16
181; CHECK: ret void
182
183
184; atomic store monotonic: bumped up to store release
185
186define void @AtomicStoreMonotonic(ptr %p, i32 %x) sanitize_memory {
187entry:
188  store atomic i32 %x, ptr %p monotonic, align 16
189  ret void
190}
191
192; CHECK-LABEL: @AtomicStoreMonotonic
193; CHECK-NOT: @__msan_param_tls
194; CHECK: store i32 0, ptr {{.*}}, align 16
195; CHECK: store atomic i32 %x, ptr %p release, align 16
196; CHECK: ret void
197
198
199; atomic store unordered: bumped up to store release
200
201define void @AtomicStoreUnordered(ptr %p, i32 %x) sanitize_memory {
202entry:
203  store atomic i32 %x, ptr %p unordered, align 16
204  ret void
205}
206
207; CHECK-LABEL: @AtomicStoreUnordered
208; CHECK-NOT: @__msan_param_tls
209; CHECK: store i32 0, ptr {{.*}}, align 16
210; CHECK: store atomic i32 %x, ptr %p release, align 16
211; CHECK: ret void
212
213
214; ORIGINS: declare i32 @__msan_chain_origin(i32)
215; EXT:     declare zeroext i32 @__msan_chain_origin(i32 zeroext)
216; ORIGINS: declare void @__msan_set_origin(ptr, i64, i32)
217; EXT:     declare void @__msan_set_origin(ptr, i64, i32 zeroext)
218; ORIGINS: declare ptr @__msan_memset(ptr, i32, i64)
219; EXT:     declare ptr @__msan_memset(ptr, i32 signext, i64)
220; ORIGINS: declare void @__msan_warning_with_origin_noreturn(i32)
221; EXT:     declare void @__msan_warning_with_origin_noreturn(i32 zeroext)
222