xref: /minix3/external/bsd/libc++/dist/libcxx/include/atomic (revision 0a6a1f1d05b60e214de2f05a7310ddd1f0e590e7)
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#endif
539#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
540#error <atomic> is not implemented
541#endif
542
543_LIBCPP_BEGIN_NAMESPACE_STD
544
545typedef enum memory_order
546{
547    memory_order_relaxed, memory_order_consume, memory_order_acquire,
548    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
549} memory_order;
550
551#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
552namespace __gcc_atomic {
553template <typename _Tp>
554struct __gcc_atomic_t {
555  __gcc_atomic_t() _NOEXCEPT {}
556  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
557    : __a_value(value) {}
558  _Tp __a_value;
559};
560#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
561
562template <typename _Tp> _Tp __create();
563
564template <typename _Tp, typename _Td>
565typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
566    __test_atomic_assignable(int);
567template <typename _Tp, typename _Up>
568__two __test_atomic_assignable(...);
569
570template <typename _Tp, typename _Td>
571struct __can_assign {
572  static const bool value =
573      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
574};
575
576static inline constexpr int __to_gcc_order(memory_order __order) {
577  // Avoid switch statement to make this a constexpr.
578  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
579         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
580          (__order == memory_order_release ? __ATOMIC_RELEASE:
581           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
582            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
583              __ATOMIC_CONSUME))));
584}
585
586static inline constexpr int __to_gcc_failure_order(memory_order __order) {
587  // Avoid switch statement to make this a constexpr.
588  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
589         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
590          (__order == memory_order_release ? __ATOMIC_RELAXED:
591           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
592            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
593              __ATOMIC_CONSUME))));
594}
595
596} // namespace __gcc_atomic
597
598template <typename _Tp>
599static inline
600typename enable_if<
601    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
602__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
603  __a->__a_value = __val;
604}
605
606template <typename _Tp>
607static inline
608typename enable_if<
609    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
610     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
611__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
612  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
613  // the default operator= in an object is not volatile, a byte-by-byte copy
614  // is required.
615  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
616  volatile char* end = to + sizeof(_Tp);
617  char* from = reinterpret_cast<char*>(&__val);
618  while (to != end) {
619    *to++ = *from++;
620  }
621}
622
623template <typename _Tp>
624static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
625  __a->__a_value = __val;
626}
627
628static inline void __c11_atomic_thread_fence(memory_order __order) {
629  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
630}
631
632static inline void __c11_atomic_signal_fence(memory_order __order) {
633  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
634}
635
636template <typename _Tp>
637static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
638                                      memory_order __order) {
639  return __atomic_store(&__a->__a_value, &__val,
640                        __gcc_atomic::__to_gcc_order(__order));
641}
642
643template <typename _Tp>
644static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
645                                      memory_order __order) {
646  __atomic_store(&__a->__a_value, &__val,
647                 __gcc_atomic::__to_gcc_order(__order));
648}
649
650template <typename _Tp>
651static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
652                                    memory_order __order) {
653  _Tp __ret;
654  __atomic_load(&__a->__a_value, &__ret,
655                __gcc_atomic::__to_gcc_order(__order));
656  return __ret;
657}
658
659template <typename _Tp>
660static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
661  _Tp __ret;
662  __atomic_load(&__a->__a_value, &__ret,
663                __gcc_atomic::__to_gcc_order(__order));
664  return __ret;
665}
666
667template <typename _Tp>
668static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
669                                        _Tp __value, memory_order __order) {
670  _Tp __ret;
671  __atomic_exchange(&__a->__a_value, &__value, &__ret,
672                    __gcc_atomic::__to_gcc_order(__order));
673  return __ret;
674}
675
676template <typename _Tp>
677static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
678                                        memory_order __order) {
679  _Tp __ret;
680  __atomic_exchange(&__a->__a_value, &__value, &__ret,
681                    __gcc_atomic::__to_gcc_order(__order));
682  return __ret;
683}
684
685template <typename _Tp>
686static inline bool __c11_atomic_compare_exchange_strong(
687    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
688    memory_order __success, memory_order __failure) {
689  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
690                                   false,
691                                   __gcc_atomic::__to_gcc_order(__success),
692                                   __gcc_atomic::__to_gcc_failure_order(__failure));
693}
694
695template <typename _Tp>
696static inline bool __c11_atomic_compare_exchange_strong(
697    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
698    memory_order __failure) {
699  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
700                                   false,
701                                   __gcc_atomic::__to_gcc_order(__success),
702                                   __gcc_atomic::__to_gcc_failure_order(__failure));
703}
704
705template <typename _Tp>
706static inline bool __c11_atomic_compare_exchange_weak(
707    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
708    memory_order __success, memory_order __failure) {
709  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
710                                   true,
711                                   __gcc_atomic::__to_gcc_order(__success),
712                                   __gcc_atomic::__to_gcc_failure_order(__failure));
713}
714
715template <typename _Tp>
716static inline bool __c11_atomic_compare_exchange_weak(
717    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
718    memory_order __failure) {
719  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
720                                   true,
721                                   __gcc_atomic::__to_gcc_order(__success),
722                                   __gcc_atomic::__to_gcc_failure_order(__failure));
723}
724
725template <typename _Tp>
726struct __skip_amt { enum {value = 1}; };
727
728template <typename _Tp>
729struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
730
731// FIXME: Haven't figured out what the spec says about using arrays with
732// atomic_fetch_add. Force a failure rather than creating bad behavior.
733template <typename _Tp>
734struct __skip_amt<_Tp[]> { };
735template <typename _Tp, int n>
736struct __skip_amt<_Tp[n]> { };
737
738template <typename _Tp, typename _Td>
739static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
740                                         _Td __delta, memory_order __order) {
741  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
742                            __gcc_atomic::__to_gcc_order(__order));
743}
744
745template <typename _Tp, typename _Td>
746static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
747                                         memory_order __order) {
748  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
749                            __gcc_atomic::__to_gcc_order(__order));
750}
751
752template <typename _Tp, typename _Td>
753static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
754                                         _Td __delta, memory_order __order) {
755  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
756                            __gcc_atomic::__to_gcc_order(__order));
757}
758
759template <typename _Tp, typename _Td>
760static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
761                                         memory_order __order) {
762  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
763                            __gcc_atomic::__to_gcc_order(__order));
764}
765
766template <typename _Tp>
767static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
768                                         _Tp __pattern, memory_order __order) {
769  return __atomic_fetch_and(&__a->__a_value, __pattern,
770                            __gcc_atomic::__to_gcc_order(__order));
771}
772
773template <typename _Tp>
774static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
775                                         _Tp __pattern, memory_order __order) {
776  return __atomic_fetch_and(&__a->__a_value, __pattern,
777                            __gcc_atomic::__to_gcc_order(__order));
778}
779
780template <typename _Tp>
781static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
782                                        _Tp __pattern, memory_order __order) {
783  return __atomic_fetch_or(&__a->__a_value, __pattern,
784                           __gcc_atomic::__to_gcc_order(__order));
785}
786
787template <typename _Tp>
788static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
789                                        memory_order __order) {
790  return __atomic_fetch_or(&__a->__a_value, __pattern,
791                           __gcc_atomic::__to_gcc_order(__order));
792}
793
794template <typename _Tp>
795static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
796                                         _Tp __pattern, memory_order __order) {
797  return __atomic_fetch_xor(&__a->__a_value, __pattern,
798                            __gcc_atomic::__to_gcc_order(__order));
799}
800
801template <typename _Tp>
802static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
803                                         memory_order __order) {
804  return __atomic_fetch_xor(&__a->__a_value, __pattern,
805                            __gcc_atomic::__to_gcc_order(__order));
806}
807#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
808
809template <class _Tp>
810inline _LIBCPP_INLINE_VISIBILITY
811_Tp
812kill_dependency(_Tp __y) _NOEXCEPT
813{
814    return __y;
815}
816
817// general atomic<T>
818
819template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
820struct __atomic_base  // false
821{
822    mutable _Atomic(_Tp) __a_;
823
824    _LIBCPP_INLINE_VISIBILITY
825    bool is_lock_free() const volatile _NOEXCEPT
826    {
827#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
828    return __c11_atomic_is_lock_free(sizeof(_Tp));
829#else
830    return __atomic_is_lock_free(sizeof(_Tp), 0);
831#endif
832    }
833    _LIBCPP_INLINE_VISIBILITY
834    bool is_lock_free() const _NOEXCEPT
835        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
836    _LIBCPP_INLINE_VISIBILITY
837    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
838        {__c11_atomic_store(&__a_, __d, __m);}
839    _LIBCPP_INLINE_VISIBILITY
840    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
841        {__c11_atomic_store(&__a_, __d, __m);}
842    _LIBCPP_INLINE_VISIBILITY
843    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
844        {return __c11_atomic_load(&__a_, __m);}
845    _LIBCPP_INLINE_VISIBILITY
846    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
847        {return __c11_atomic_load(&__a_, __m);}
848    _LIBCPP_INLINE_VISIBILITY
849    operator _Tp() const volatile _NOEXCEPT {return load();}
850    _LIBCPP_INLINE_VISIBILITY
851    operator _Tp() const _NOEXCEPT          {return load();}
852    _LIBCPP_INLINE_VISIBILITY
853    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
854        {return __c11_atomic_exchange(&__a_, __d, __m);}
855    _LIBCPP_INLINE_VISIBILITY
856    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
857        {return __c11_atomic_exchange(&__a_, __d, __m);}
858    _LIBCPP_INLINE_VISIBILITY
859    bool compare_exchange_weak(_Tp& __e, _Tp __d,
860                               memory_order __s, memory_order __f) volatile _NOEXCEPT
861        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
862    _LIBCPP_INLINE_VISIBILITY
863    bool compare_exchange_weak(_Tp& __e, _Tp __d,
864                               memory_order __s, memory_order __f) _NOEXCEPT
865        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
866    _LIBCPP_INLINE_VISIBILITY
867    bool compare_exchange_strong(_Tp& __e, _Tp __d,
868                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
869        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
870    _LIBCPP_INLINE_VISIBILITY
871    bool compare_exchange_strong(_Tp& __e, _Tp __d,
872                                 memory_order __s, memory_order __f) _NOEXCEPT
873        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
874    _LIBCPP_INLINE_VISIBILITY
875    bool compare_exchange_weak(_Tp& __e, _Tp __d,
876                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
877        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
878    _LIBCPP_INLINE_VISIBILITY
879    bool compare_exchange_weak(_Tp& __e, _Tp __d,
880                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
881        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
882    _LIBCPP_INLINE_VISIBILITY
883    bool compare_exchange_strong(_Tp& __e, _Tp __d,
884                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
885        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
886    _LIBCPP_INLINE_VISIBILITY
887    bool compare_exchange_strong(_Tp& __e, _Tp __d,
888                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
889        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
890
891    _LIBCPP_INLINE_VISIBILITY
892#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
893    __atomic_base() _NOEXCEPT = default;
894#else
895    __atomic_base() _NOEXCEPT : __a_() {}
896#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
897
898    _LIBCPP_INLINE_VISIBILITY
899    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
900#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
901    __atomic_base(const __atomic_base&) = delete;
902    __atomic_base& operator=(const __atomic_base&) = delete;
903    __atomic_base& operator=(const __atomic_base&) volatile = delete;
904#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
905private:
906    __atomic_base(const __atomic_base&);
907    __atomic_base& operator=(const __atomic_base&);
908    __atomic_base& operator=(const __atomic_base&) volatile;
909#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
910};
911
912// atomic<Integral>
913
914template <class _Tp>
915struct __atomic_base<_Tp, true>
916    : public __atomic_base<_Tp, false>
917{
918    typedef __atomic_base<_Tp, false> __base;
919    _LIBCPP_INLINE_VISIBILITY
920    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
921    _LIBCPP_INLINE_VISIBILITY
922    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
923
924    _LIBCPP_INLINE_VISIBILITY
925    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
926        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
927    _LIBCPP_INLINE_VISIBILITY
928    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
929        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
930    _LIBCPP_INLINE_VISIBILITY
931    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
932        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
933    _LIBCPP_INLINE_VISIBILITY
934    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
935        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
936    _LIBCPP_INLINE_VISIBILITY
937    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
938        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
939    _LIBCPP_INLINE_VISIBILITY
940    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
941        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
942    _LIBCPP_INLINE_VISIBILITY
943    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
944        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
945    _LIBCPP_INLINE_VISIBILITY
946    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
947        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
948    _LIBCPP_INLINE_VISIBILITY
949    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
950        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
951    _LIBCPP_INLINE_VISIBILITY
952    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
953        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
954
955    _LIBCPP_INLINE_VISIBILITY
956    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
957    _LIBCPP_INLINE_VISIBILITY
958    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
959    _LIBCPP_INLINE_VISIBILITY
960    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
961    _LIBCPP_INLINE_VISIBILITY
962    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
963    _LIBCPP_INLINE_VISIBILITY
964    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
965    _LIBCPP_INLINE_VISIBILITY
966    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
967    _LIBCPP_INLINE_VISIBILITY
968    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
969    _LIBCPP_INLINE_VISIBILITY
970    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
971    _LIBCPP_INLINE_VISIBILITY
972    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
973    _LIBCPP_INLINE_VISIBILITY
974    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
975    _LIBCPP_INLINE_VISIBILITY
976    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
977    _LIBCPP_INLINE_VISIBILITY
978    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
979    _LIBCPP_INLINE_VISIBILITY
980    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
981    _LIBCPP_INLINE_VISIBILITY
982    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
983    _LIBCPP_INLINE_VISIBILITY
984    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
985    _LIBCPP_INLINE_VISIBILITY
986    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
987    _LIBCPP_INLINE_VISIBILITY
988    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
989    _LIBCPP_INLINE_VISIBILITY
990    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
991};
992
993// atomic<T>
994
995template <class _Tp>
996struct atomic
997    : public __atomic_base<_Tp>
998{
999    typedef __atomic_base<_Tp> __base;
1000    _LIBCPP_INLINE_VISIBILITY
1001    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1002    _LIBCPP_INLINE_VISIBILITY
1003    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1004
1005    _LIBCPP_INLINE_VISIBILITY
1006    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1007        {__base::store(__d); return __d;}
1008    _LIBCPP_INLINE_VISIBILITY
1009    _Tp operator=(_Tp __d) _NOEXCEPT
1010        {__base::store(__d); return __d;}
1011};
1012
1013// atomic<T*>
1014
1015template <class _Tp>
1016struct atomic<_Tp*>
1017    : public __atomic_base<_Tp*>
1018{
1019    typedef __atomic_base<_Tp*> __base;
1020    _LIBCPP_INLINE_VISIBILITY
1021    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1022    _LIBCPP_INLINE_VISIBILITY
1023    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1024
1025    _LIBCPP_INLINE_VISIBILITY
1026    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1027        {__base::store(__d); return __d;}
1028    _LIBCPP_INLINE_VISIBILITY
1029    _Tp* operator=(_Tp* __d) _NOEXCEPT
1030        {__base::store(__d); return __d;}
1031
1032    _LIBCPP_INLINE_VISIBILITY
1033    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1034                                                                        volatile _NOEXCEPT
1035        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1038        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1039    _LIBCPP_INLINE_VISIBILITY
1040    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1041                                                                        volatile _NOEXCEPT
1042        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1043    _LIBCPP_INLINE_VISIBILITY
1044    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1045        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1046
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1049    _LIBCPP_INLINE_VISIBILITY
1050    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1053    _LIBCPP_INLINE_VISIBILITY
1054    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1055    _LIBCPP_INLINE_VISIBILITY
1056    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1057    _LIBCPP_INLINE_VISIBILITY
1058    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1059    _LIBCPP_INLINE_VISIBILITY
1060    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1061    _LIBCPP_INLINE_VISIBILITY
1062    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1063    _LIBCPP_INLINE_VISIBILITY
1064    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1065    _LIBCPP_INLINE_VISIBILITY
1066    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1067    _LIBCPP_INLINE_VISIBILITY
1068    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1069    _LIBCPP_INLINE_VISIBILITY
1070    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1071};
1072
1073// atomic_is_lock_free
1074
1075template <class _Tp>
1076inline _LIBCPP_INLINE_VISIBILITY
1077bool
1078atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1079{
1080    return __o->is_lock_free();
1081}
1082
1083template <class _Tp>
1084inline _LIBCPP_INLINE_VISIBILITY
1085bool
1086atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1087{
1088    return __o->is_lock_free();
1089}
1090
1091// atomic_init
1092
1093template <class _Tp>
1094inline _LIBCPP_INLINE_VISIBILITY
1095void
1096atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1097{
1098    __c11_atomic_init(&__o->__a_, __d);
1099}
1100
1101template <class _Tp>
1102inline _LIBCPP_INLINE_VISIBILITY
1103void
1104atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1105{
1106    __c11_atomic_init(&__o->__a_, __d);
1107}
1108
1109// atomic_store
1110
1111template <class _Tp>
1112inline _LIBCPP_INLINE_VISIBILITY
1113void
1114atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1115{
1116    __o->store(__d);
1117}
1118
1119template <class _Tp>
1120inline _LIBCPP_INLINE_VISIBILITY
1121void
1122atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1123{
1124    __o->store(__d);
1125}
1126
1127// atomic_store_explicit
1128
1129template <class _Tp>
1130inline _LIBCPP_INLINE_VISIBILITY
1131void
1132atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1133{
1134    __o->store(__d, __m);
1135}
1136
1137template <class _Tp>
1138inline _LIBCPP_INLINE_VISIBILITY
1139void
1140atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1141{
1142    __o->store(__d, __m);
1143}
1144
1145// atomic_load
1146
1147template <class _Tp>
1148inline _LIBCPP_INLINE_VISIBILITY
1149_Tp
1150atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1151{
1152    return __o->load();
1153}
1154
1155template <class _Tp>
1156inline _LIBCPP_INLINE_VISIBILITY
1157_Tp
1158atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1159{
1160    return __o->load();
1161}
1162
1163// atomic_load_explicit
1164
1165template <class _Tp>
1166inline _LIBCPP_INLINE_VISIBILITY
1167_Tp
1168atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1169{
1170    return __o->load(__m);
1171}
1172
1173template <class _Tp>
1174inline _LIBCPP_INLINE_VISIBILITY
1175_Tp
1176atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1177{
1178    return __o->load(__m);
1179}
1180
1181// atomic_exchange
1182
1183template <class _Tp>
1184inline _LIBCPP_INLINE_VISIBILITY
1185_Tp
1186atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1187{
1188    return __o->exchange(__d);
1189}
1190
1191template <class _Tp>
1192inline _LIBCPP_INLINE_VISIBILITY
1193_Tp
1194atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1195{
1196    return __o->exchange(__d);
1197}
1198
1199// atomic_exchange_explicit
1200
1201template <class _Tp>
1202inline _LIBCPP_INLINE_VISIBILITY
1203_Tp
1204atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1205{
1206    return __o->exchange(__d, __m);
1207}
1208
1209template <class _Tp>
1210inline _LIBCPP_INLINE_VISIBILITY
1211_Tp
1212atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1213{
1214    return __o->exchange(__d, __m);
1215}
1216
1217// atomic_compare_exchange_weak
1218
1219template <class _Tp>
1220inline _LIBCPP_INLINE_VISIBILITY
1221bool
1222atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1223{
1224    return __o->compare_exchange_weak(*__e, __d);
1225}
1226
1227template <class _Tp>
1228inline _LIBCPP_INLINE_VISIBILITY
1229bool
1230atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1231{
1232    return __o->compare_exchange_weak(*__e, __d);
1233}
1234
1235// atomic_compare_exchange_strong
1236
1237template <class _Tp>
1238inline _LIBCPP_INLINE_VISIBILITY
1239bool
1240atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1241{
1242    return __o->compare_exchange_strong(*__e, __d);
1243}
1244
1245template <class _Tp>
1246inline _LIBCPP_INLINE_VISIBILITY
1247bool
1248atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1249{
1250    return __o->compare_exchange_strong(*__e, __d);
1251}
1252
1253// atomic_compare_exchange_weak_explicit
1254
1255template <class _Tp>
1256inline _LIBCPP_INLINE_VISIBILITY
1257bool
1258atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1259                                      _Tp __d,
1260                                      memory_order __s, memory_order __f) _NOEXCEPT
1261{
1262    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1263}
1264
1265template <class _Tp>
1266inline _LIBCPP_INLINE_VISIBILITY
1267bool
1268atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1269                                      memory_order __s, memory_order __f) _NOEXCEPT
1270{
1271    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1272}
1273
1274// atomic_compare_exchange_strong_explicit
1275
1276template <class _Tp>
1277inline _LIBCPP_INLINE_VISIBILITY
1278bool
1279atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1280                                        _Tp* __e, _Tp __d,
1281                                        memory_order __s, memory_order __f) _NOEXCEPT
1282{
1283    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1284}
1285
1286template <class _Tp>
1287inline _LIBCPP_INLINE_VISIBILITY
1288bool
1289atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1290                                        _Tp __d,
1291                                        memory_order __s, memory_order __f) _NOEXCEPT
1292{
1293    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1294}
1295
1296// atomic_fetch_add
1297
1298template <class _Tp>
1299inline _LIBCPP_INLINE_VISIBILITY
1300typename enable_if
1301<
1302    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1303    _Tp
1304>::type
1305atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1306{
1307    return __o->fetch_add(__op);
1308}
1309
1310template <class _Tp>
1311inline _LIBCPP_INLINE_VISIBILITY
1312typename enable_if
1313<
1314    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1315    _Tp
1316>::type
1317atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1318{
1319    return __o->fetch_add(__op);
1320}
1321
1322template <class _Tp>
1323inline _LIBCPP_INLINE_VISIBILITY
1324_Tp*
1325atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1326{
1327    return __o->fetch_add(__op);
1328}
1329
1330template <class _Tp>
1331inline _LIBCPP_INLINE_VISIBILITY
1332_Tp*
1333atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1334{
1335    return __o->fetch_add(__op);
1336}
1337
1338// atomic_fetch_add_explicit
1339
1340template <class _Tp>
1341inline _LIBCPP_INLINE_VISIBILITY
1342typename enable_if
1343<
1344    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1345    _Tp
1346>::type
1347atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1348{
1349    return __o->fetch_add(__op, __m);
1350}
1351
1352template <class _Tp>
1353inline _LIBCPP_INLINE_VISIBILITY
1354typename enable_if
1355<
1356    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1357    _Tp
1358>::type
1359atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1360{
1361    return __o->fetch_add(__op, __m);
1362}
1363
1364template <class _Tp>
1365inline _LIBCPP_INLINE_VISIBILITY
1366_Tp*
1367atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1368                          memory_order __m) _NOEXCEPT
1369{
1370    return __o->fetch_add(__op, __m);
1371}
1372
1373template <class _Tp>
1374inline _LIBCPP_INLINE_VISIBILITY
1375_Tp*
1376atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1377{
1378    return __o->fetch_add(__op, __m);
1379}
1380
1381// atomic_fetch_sub
1382
1383template <class _Tp>
1384inline _LIBCPP_INLINE_VISIBILITY
1385typename enable_if
1386<
1387    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1388    _Tp
1389>::type
1390atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1391{
1392    return __o->fetch_sub(__op);
1393}
1394
1395template <class _Tp>
1396inline _LIBCPP_INLINE_VISIBILITY
1397typename enable_if
1398<
1399    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1400    _Tp
1401>::type
1402atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1403{
1404    return __o->fetch_sub(__op);
1405}
1406
1407template <class _Tp>
1408inline _LIBCPP_INLINE_VISIBILITY
1409_Tp*
1410atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1411{
1412    return __o->fetch_sub(__op);
1413}
1414
1415template <class _Tp>
1416inline _LIBCPP_INLINE_VISIBILITY
1417_Tp*
1418atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1419{
1420    return __o->fetch_sub(__op);
1421}
1422
1423// atomic_fetch_sub_explicit
1424
1425template <class _Tp>
1426inline _LIBCPP_INLINE_VISIBILITY
1427typename enable_if
1428<
1429    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1430    _Tp
1431>::type
1432atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1433{
1434    return __o->fetch_sub(__op, __m);
1435}
1436
1437template <class _Tp>
1438inline _LIBCPP_INLINE_VISIBILITY
1439typename enable_if
1440<
1441    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1442    _Tp
1443>::type
1444atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1445{
1446    return __o->fetch_sub(__op, __m);
1447}
1448
1449template <class _Tp>
1450inline _LIBCPP_INLINE_VISIBILITY
1451_Tp*
1452atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1453                          memory_order __m) _NOEXCEPT
1454{
1455    return __o->fetch_sub(__op, __m);
1456}
1457
1458template <class _Tp>
1459inline _LIBCPP_INLINE_VISIBILITY
1460_Tp*
1461atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1462{
1463    return __o->fetch_sub(__op, __m);
1464}
1465
1466// atomic_fetch_and
1467
1468template <class _Tp>
1469inline _LIBCPP_INLINE_VISIBILITY
1470typename enable_if
1471<
1472    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1473    _Tp
1474>::type
1475atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1476{
1477    return __o->fetch_and(__op);
1478}
1479
1480template <class _Tp>
1481inline _LIBCPP_INLINE_VISIBILITY
1482typename enable_if
1483<
1484    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1485    _Tp
1486>::type
1487atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1488{
1489    return __o->fetch_and(__op);
1490}
1491
1492// atomic_fetch_and_explicit
1493
1494template <class _Tp>
1495inline _LIBCPP_INLINE_VISIBILITY
1496typename enable_if
1497<
1498    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1499    _Tp
1500>::type
1501atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1502{
1503    return __o->fetch_and(__op, __m);
1504}
1505
1506template <class _Tp>
1507inline _LIBCPP_INLINE_VISIBILITY
1508typename enable_if
1509<
1510    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1511    _Tp
1512>::type
1513atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1514{
1515    return __o->fetch_and(__op, __m);
1516}
1517
1518// atomic_fetch_or
1519
1520template <class _Tp>
1521inline _LIBCPP_INLINE_VISIBILITY
1522typename enable_if
1523<
1524    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1525    _Tp
1526>::type
1527atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1528{
1529    return __o->fetch_or(__op);
1530}
1531
1532template <class _Tp>
1533inline _LIBCPP_INLINE_VISIBILITY
1534typename enable_if
1535<
1536    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1537    _Tp
1538>::type
1539atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1540{
1541    return __o->fetch_or(__op);
1542}
1543
1544// atomic_fetch_or_explicit
1545
1546template <class _Tp>
1547inline _LIBCPP_INLINE_VISIBILITY
1548typename enable_if
1549<
1550    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1551    _Tp
1552>::type
1553atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1554{
1555    return __o->fetch_or(__op, __m);
1556}
1557
1558template <class _Tp>
1559inline _LIBCPP_INLINE_VISIBILITY
1560typename enable_if
1561<
1562    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1563    _Tp
1564>::type
1565atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1566{
1567    return __o->fetch_or(__op, __m);
1568}
1569
1570// atomic_fetch_xor
1571
1572template <class _Tp>
1573inline _LIBCPP_INLINE_VISIBILITY
1574typename enable_if
1575<
1576    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1577    _Tp
1578>::type
1579atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1580{
1581    return __o->fetch_xor(__op);
1582}
1583
1584template <class _Tp>
1585inline _LIBCPP_INLINE_VISIBILITY
1586typename enable_if
1587<
1588    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1589    _Tp
1590>::type
1591atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1592{
1593    return __o->fetch_xor(__op);
1594}
1595
1596// atomic_fetch_xor_explicit
1597
1598template <class _Tp>
1599inline _LIBCPP_INLINE_VISIBILITY
1600typename enable_if
1601<
1602    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1603    _Tp
1604>::type
1605atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1606{
1607    return __o->fetch_xor(__op, __m);
1608}
1609
1610template <class _Tp>
1611inline _LIBCPP_INLINE_VISIBILITY
1612typename enable_if
1613<
1614    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1615    _Tp
1616>::type
1617atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1618{
1619    return __o->fetch_xor(__op, __m);
1620}
1621
1622// flag type and operations
1623
1624typedef struct atomic_flag
1625{
1626    _Atomic(bool) __a_;
1627
1628    _LIBCPP_INLINE_VISIBILITY
1629    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1630        {return __c11_atomic_exchange(&__a_, true, __m);}
1631    _LIBCPP_INLINE_VISIBILITY
1632    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1633        {return __c11_atomic_exchange(&__a_, true, __m);}
1634    _LIBCPP_INLINE_VISIBILITY
1635    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1636        {__c11_atomic_store(&__a_, false, __m);}
1637    _LIBCPP_INLINE_VISIBILITY
1638    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1639        {__c11_atomic_store(&__a_, false, __m);}
1640
1641    _LIBCPP_INLINE_VISIBILITY
1642#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1643    atomic_flag() _NOEXCEPT = default;
1644#else
1645    atomic_flag() _NOEXCEPT : __a_() {}
1646#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1647
1648    _LIBCPP_INLINE_VISIBILITY
1649    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1650
1651#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1652    atomic_flag(const atomic_flag&) = delete;
1653    atomic_flag& operator=(const atomic_flag&) = delete;
1654    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1655#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1656private:
1657    atomic_flag(const atomic_flag&);
1658    atomic_flag& operator=(const atomic_flag&);
1659    atomic_flag& operator=(const atomic_flag&) volatile;
1660#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1661} atomic_flag;
1662
1663inline _LIBCPP_INLINE_VISIBILITY
1664bool
1665atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1666{
1667    return __o->test_and_set();
1668}
1669
1670inline _LIBCPP_INLINE_VISIBILITY
1671bool
1672atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1673{
1674    return __o->test_and_set();
1675}
1676
1677inline _LIBCPP_INLINE_VISIBILITY
1678bool
1679atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1680{
1681    return __o->test_and_set(__m);
1682}
1683
1684inline _LIBCPP_INLINE_VISIBILITY
1685bool
1686atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1687{
1688    return __o->test_and_set(__m);
1689}
1690
1691inline _LIBCPP_INLINE_VISIBILITY
1692void
1693atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1694{
1695    __o->clear();
1696}
1697
1698inline _LIBCPP_INLINE_VISIBILITY
1699void
1700atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1701{
1702    __o->clear();
1703}
1704
1705inline _LIBCPP_INLINE_VISIBILITY
1706void
1707atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1708{
1709    __o->clear(__m);
1710}
1711
1712inline _LIBCPP_INLINE_VISIBILITY
1713void
1714atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1715{
1716    __o->clear(__m);
1717}
1718
1719// fences
1720
1721inline _LIBCPP_INLINE_VISIBILITY
1722void
1723atomic_thread_fence(memory_order __m) _NOEXCEPT
1724{
1725    __c11_atomic_thread_fence(__m);
1726}
1727
1728inline _LIBCPP_INLINE_VISIBILITY
1729void
1730atomic_signal_fence(memory_order __m) _NOEXCEPT
1731{
1732    __c11_atomic_signal_fence(__m);
1733}
1734
1735// Atomics for standard typedef types
1736
1737typedef atomic<bool>               atomic_bool;
1738typedef atomic<char>               atomic_char;
1739typedef atomic<signed char>        atomic_schar;
1740typedef atomic<unsigned char>      atomic_uchar;
1741typedef atomic<short>              atomic_short;
1742typedef atomic<unsigned short>     atomic_ushort;
1743typedef atomic<int>                atomic_int;
1744typedef atomic<unsigned int>       atomic_uint;
1745typedef atomic<long>               atomic_long;
1746typedef atomic<unsigned long>      atomic_ulong;
1747typedef atomic<long long>          atomic_llong;
1748typedef atomic<unsigned long long> atomic_ullong;
1749typedef atomic<char16_t>           atomic_char16_t;
1750typedef atomic<char32_t>           atomic_char32_t;
1751typedef atomic<wchar_t>            atomic_wchar_t;
1752
1753typedef atomic<int_least8_t>   atomic_int_least8_t;
1754typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1755typedef atomic<int_least16_t>  atomic_int_least16_t;
1756typedef atomic<uint_least16_t> atomic_uint_least16_t;
1757typedef atomic<int_least32_t>  atomic_int_least32_t;
1758typedef atomic<uint_least32_t> atomic_uint_least32_t;
1759typedef atomic<int_least64_t>  atomic_int_least64_t;
1760typedef atomic<uint_least64_t> atomic_uint_least64_t;
1761
1762typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1763typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1764typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1765typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1766typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1767typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1768typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1769typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1770
1771typedef atomic<intptr_t>  atomic_intptr_t;
1772typedef atomic<uintptr_t> atomic_uintptr_t;
1773typedef atomic<size_t>    atomic_size_t;
1774typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1775typedef atomic<intmax_t>  atomic_intmax_t;
1776typedef atomic<uintmax_t> atomic_uintmax_t;
1777
1778#define ATOMIC_FLAG_INIT {false}
1779#define ATOMIC_VAR_INIT(__v) {__v}
1780
1781#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1782#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1783#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1784#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1785#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1786#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1787#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1788#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1789#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1790#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1791
1792_LIBCPP_END_NAMESPACE_STD
1793
1794#endif  // _LIBCPP_ATOMIC
1795