xref: /llvm-project/clang/test/CodeGen/atomic-test-and-set.c (revision c4ef805b0bda16f734276086b0984583c2e21db6)
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 5
2 // RUN: %clang_cc1 %s -emit-llvm -o - -triple=aarch64-none-elf | FileCheck %s
3 // REQUIRES: aarch64-registered-target
4 
5 #include <stdatomic.h>
6 
7 // CHECK-LABEL: define dso_local void @clear_relaxed(
8 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0:[0-9]+]] {
9 // CHECK-NEXT:  [[ENTRY:.*:]]
10 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
11 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
12 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
13 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] monotonic, align 1
14 // CHECK-NEXT:    ret void
15 //
16 void clear_relaxed(char *ptr) {
17   __atomic_clear(ptr, memory_order_relaxed);
18 }
19 
20 // CHECK-LABEL: define dso_local void @clear_seq_cst(
21 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
22 // CHECK-NEXT:  [[ENTRY:.*:]]
23 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
24 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
25 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
26 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] seq_cst, align 1
27 // CHECK-NEXT:    ret void
28 //
29 void clear_seq_cst(char *ptr) {
30   __atomic_clear(ptr, memory_order_seq_cst);
31 }
32 
33 // CHECK-LABEL: define dso_local void @clear_release(
34 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
35 // CHECK-NEXT:  [[ENTRY:.*:]]
36 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
37 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
38 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
39 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] release, align 1
40 // CHECK-NEXT:    ret void
41 //
42 void clear_release(char *ptr) {
43   __atomic_clear(ptr, memory_order_release);
44 }
45 
46 // CHECK-LABEL: define dso_local void @clear_dynamic(
47 // CHECK-SAME: ptr noundef [[PTR:%.*]], i32 noundef [[ORDER:%.*]]) #[[ATTR0]] {
48 // CHECK-NEXT:  [[ENTRY:.*:]]
49 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
50 // CHECK-NEXT:    [[ORDER_ADDR:%.*]] = alloca i32, align 4
51 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
52 // CHECK-NEXT:    store i32 [[ORDER]], ptr [[ORDER_ADDR]], align 4
53 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
54 // CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[ORDER_ADDR]], align 4
55 // CHECK-NEXT:    switch i32 [[TMP1]], label %[[MONOTONIC:.*]] [
56 // CHECK-NEXT:      i32 3, label %[[RELEASE:.*]]
57 // CHECK-NEXT:      i32 5, label %[[SEQCST:.*]]
58 // CHECK-NEXT:    ]
59 // CHECK:       [[MONOTONIC]]:
60 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] monotonic, align 1
61 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE:.*]]
62 // CHECK:       [[RELEASE]]:
63 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] release, align 1
64 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
65 // CHECK:       [[SEQCST]]:
66 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] seq_cst, align 1
67 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
68 // CHECK:       [[ATOMIC_CONTINUE]]:
69 // CHECK-NEXT:    ret void
70 //
71 void clear_dynamic(char *ptr, int order) {
72   __atomic_clear(ptr, order);
73 }
74 
75 // CHECK-LABEL: define dso_local void @test_and_set_relaxed(
76 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
77 // CHECK-NEXT:  [[ENTRY:.*:]]
78 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
79 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
80 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
81 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
82 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
83 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
84 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
85 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
86 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
87 // CHECK-NEXT:    ret void
88 //
89 void test_and_set_relaxed(char *ptr) {
90   __atomic_test_and_set(ptr, memory_order_relaxed);
91 }
92 
93 // CHECK-LABEL: define dso_local void @test_and_set_consume(
94 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
95 // CHECK-NEXT:  [[ENTRY:.*:]]
96 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
97 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
98 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
99 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
100 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 acquire, align 1
101 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
102 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
103 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
104 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
105 // CHECK-NEXT:    ret void
106 //
107 void test_and_set_consume(char *ptr) {
108   __atomic_test_and_set(ptr, memory_order_consume);
109 }
110 
111 // CHECK-LABEL: define dso_local void @test_and_set_acquire(
112 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
113 // CHECK-NEXT:  [[ENTRY:.*:]]
114 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
115 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
116 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
117 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
118 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 acquire, align 1
119 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
120 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
121 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
122 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
123 // CHECK-NEXT:    ret void
124 //
125 void test_and_set_acquire(char *ptr) {
126   __atomic_test_and_set(ptr, memory_order_acquire);
127 }
128 
129 // CHECK-LABEL: define dso_local void @test_and_set_release(
130 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
131 // CHECK-NEXT:  [[ENTRY:.*:]]
132 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
133 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
134 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
135 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
136 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 release, align 1
137 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
138 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
139 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
140 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
141 // CHECK-NEXT:    ret void
142 //
143 void test_and_set_release(char *ptr) {
144   __atomic_test_and_set(ptr, memory_order_release);
145 }
146 
147 // CHECK-LABEL: define dso_local void @test_and_set_acq_rel(
148 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
149 // CHECK-NEXT:  [[ENTRY:.*:]]
150 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
151 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
152 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
153 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
154 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 acq_rel, align 1
155 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
156 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
157 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
158 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
159 // CHECK-NEXT:    ret void
160 //
161 void test_and_set_acq_rel(char *ptr) {
162   __atomic_test_and_set(ptr, memory_order_acq_rel);
163 }
164 
165 // CHECK-LABEL: define dso_local void @test_and_set_seq_cst(
166 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
167 // CHECK-NEXT:  [[ENTRY:.*:]]
168 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
169 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
170 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
171 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
172 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 seq_cst, align 1
173 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
174 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
175 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
176 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
177 // CHECK-NEXT:    ret void
178 //
179 void test_and_set_seq_cst(char *ptr) {
180   __atomic_test_and_set(ptr, memory_order_seq_cst);
181 }
182 
183 // CHECK-LABEL: define dso_local void @test_and_set_dynamic(
184 // CHECK-SAME: ptr noundef [[PTR:%.*]], i32 noundef [[ORDER:%.*]]) #[[ATTR0]] {
185 // CHECK-NEXT:  [[ENTRY:.*:]]
186 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
187 // CHECK-NEXT:    [[ORDER_ADDR:%.*]] = alloca i32, align 4
188 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
189 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
190 // CHECK-NEXT:    store i32 [[ORDER]], ptr [[ORDER_ADDR]], align 4
191 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
192 // CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[ORDER_ADDR]], align 4
193 // CHECK-NEXT:    switch i32 [[TMP1]], label %[[MONOTONIC:.*]] [
194 // CHECK-NEXT:      i32 1, label %[[ACQUIRE:.*]]
195 // CHECK-NEXT:      i32 2, label %[[ACQUIRE]]
196 // CHECK-NEXT:      i32 3, label %[[RELEASE:.*]]
197 // CHECK-NEXT:      i32 4, label %[[ACQREL:.*]]
198 // CHECK-NEXT:      i32 5, label %[[SEQCST:.*]]
199 // CHECK-NEXT:    ]
200 // CHECK:       [[MONOTONIC]]:
201 // CHECK-NEXT:    [[TMP2:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
202 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP2]], 0
203 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
204 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE:.*]]
205 // CHECK:       [[ACQUIRE]]:
206 // CHECK-NEXT:    [[TMP3:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 acquire, align 1
207 // CHECK-NEXT:    [[TOBOOL1:%.*]] = icmp ne i8 [[TMP3]], 0
208 // CHECK-NEXT:    store i1 [[TOBOOL1]], ptr [[ATOMIC_TEMP]], align 1
209 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
210 // CHECK:       [[RELEASE]]:
211 // CHECK-NEXT:    [[TMP4:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 release, align 1
212 // CHECK-NEXT:    [[TOBOOL2:%.*]] = icmp ne i8 [[TMP4]], 0
213 // CHECK-NEXT:    store i1 [[TOBOOL2]], ptr [[ATOMIC_TEMP]], align 1
214 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
215 // CHECK:       [[ACQREL]]:
216 // CHECK-NEXT:    [[TMP5:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 acq_rel, align 1
217 // CHECK-NEXT:    [[TOBOOL3:%.*]] = icmp ne i8 [[TMP5]], 0
218 // CHECK-NEXT:    store i1 [[TOBOOL3]], ptr [[ATOMIC_TEMP]], align 1
219 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
220 // CHECK:       [[SEQCST]]:
221 // CHECK-NEXT:    [[TMP6:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 seq_cst, align 1
222 // CHECK-NEXT:    [[TOBOOL4:%.*]] = icmp ne i8 [[TMP6]], 0
223 // CHECK-NEXT:    store i1 [[TOBOOL4]], ptr [[ATOMIC_TEMP]], align 1
224 // CHECK-NEXT:    br label %[[ATOMIC_CONTINUE]]
225 // CHECK:       [[ATOMIC_CONTINUE]]:
226 // CHECK-NEXT:    [[TMP7:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
227 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP7]] to i1
228 // CHECK-NEXT:    ret void
229 //
230 void test_and_set_dynamic(char *ptr, int order) {
231   __atomic_test_and_set(ptr, order);
232 }
233 
234 // CHECK-LABEL: define dso_local void @test_and_set_array(
235 // CHECK-SAME: ) #[[ATTR0]] {
236 // CHECK-NEXT:  [[ENTRY:.*:]]
237 // CHECK-NEXT:    [[X:%.*]] = alloca [10 x i32], align 4
238 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
239 // CHECK-NEXT:    [[ARRAYDECAY:%.*]] = getelementptr inbounds [10 x i32], ptr [[X]], i64 0, i64 0
240 // CHECK-NEXT:    [[TMP0:%.*]] = atomicrmw volatile xchg ptr [[ARRAYDECAY]], i8 1 seq_cst, align 4
241 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP0]], 0
242 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
243 // CHECK-NEXT:    [[TMP1:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
244 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP1]] to i1
245 // CHECK-NEXT:    ret void
246 //
247 void test_and_set_array() {
248   volatile int x[10];
249   __atomic_test_and_set(x, memory_order_seq_cst);
250 }
251 
252 // These intrinsics accept any pointer type, including void and incomplete
253 // structs, and always access the first byte regardless of the actual type
254 // size.
255 
256 struct incomplete;
257 
258 // CHECK-LABEL: define dso_local void @clear_int(
259 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
260 // CHECK-NEXT:  [[ENTRY:.*:]]
261 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
262 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
263 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
264 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] monotonic, align 4
265 // CHECK-NEXT:    ret void
266 //
267 void clear_int(int *ptr) {
268   __atomic_clear(ptr, memory_order_relaxed);
269 }
270 // CHECK-LABEL: define dso_local void @clear_void(
271 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
272 // CHECK-NEXT:  [[ENTRY:.*:]]
273 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
274 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
275 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
276 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] monotonic, align 1
277 // CHECK-NEXT:    ret void
278 //
279 void clear_void(void *ptr) {
280   __atomic_clear(ptr, memory_order_relaxed);
281 }
282 // CHECK-LABEL: define dso_local void @clear_incomplete(
283 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
284 // CHECK-NEXT:  [[ENTRY:.*:]]
285 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
286 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
287 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
288 // CHECK-NEXT:    store atomic i8 0, ptr [[TMP0]] monotonic, align 1
289 // CHECK-NEXT:    ret void
290 //
291 void clear_incomplete(struct incomplete *ptr) {
292   __atomic_clear(ptr, memory_order_relaxed);
293 }
294 
295 // CHECK-LABEL: define dso_local void @test_and_set_int(
296 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
297 // CHECK-NEXT:  [[ENTRY:.*:]]
298 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
299 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
300 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
301 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
302 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 4
303 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
304 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
305 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
306 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
307 // CHECK-NEXT:    ret void
308 //
309 void test_and_set_int(int *ptr) {
310   __atomic_test_and_set(ptr, memory_order_relaxed);
311 }
312 // CHECK-LABEL: define dso_local void @test_and_set_void(
313 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
314 // CHECK-NEXT:  [[ENTRY:.*:]]
315 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
316 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
317 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
318 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
319 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
320 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
321 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
322 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
323 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
324 // CHECK-NEXT:    ret void
325 //
326 void test_and_set_void(void *ptr) {
327   __atomic_test_and_set(ptr, memory_order_relaxed);
328 }
329 // CHECK-LABEL: define dso_local void @test_and_set_incomplete(
330 // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
331 // CHECK-NEXT:  [[ENTRY:.*:]]
332 // CHECK-NEXT:    [[PTR_ADDR:%.*]] = alloca ptr, align 8
333 // CHECK-NEXT:    [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
334 // CHECK-NEXT:    store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
335 // CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
336 // CHECK-NEXT:    [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
337 // CHECK-NEXT:    [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
338 // CHECK-NEXT:    store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
339 // CHECK-NEXT:    [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
340 // CHECK-NEXT:    [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
341 // CHECK-NEXT:    ret void
342 //
343 void test_and_set_incomplete(struct incomplete *ptr) {
344   __atomic_test_and_set(ptr, memory_order_relaxed);
345 }
346