1*57718be8SEnji Cooper /* $NetBSD: all_sync_ops_linkable.c,v 1.4 2014/02/21 10:26:25 martin Exp $ */
2*57718be8SEnji Cooper
3*57718be8SEnji Cooper /*-
4*57718be8SEnji Cooper * Copyright (c) 2014 The NetBSD Foundation, Inc.
5*57718be8SEnji Cooper * All rights reserved.
6*57718be8SEnji Cooper *
7*57718be8SEnji Cooper * This code is derived from software contributed to The NetBSD Foundation
8*57718be8SEnji Cooper * by Martin Husemann <martin@NetBSD.org>.
9*57718be8SEnji Cooper *
10*57718be8SEnji Cooper * Redistribution and use in source and binary forms, with or without
11*57718be8SEnji Cooper * modification, are permitted provided that the following conditions
12*57718be8SEnji Cooper * are met:
13*57718be8SEnji Cooper * 1. Redistributions of source code must retain the above copyright
14*57718be8SEnji Cooper * notice, this list of conditions and the following disclaimer.
15*57718be8SEnji Cooper * 2. Redistributions in binary form must reproduce the above copyright
16*57718be8SEnji Cooper * notice, this list of conditions and the following disclaimer in the
17*57718be8SEnji Cooper * documentation and/or other materials provided with the distribution.
18*57718be8SEnji Cooper *
19*57718be8SEnji Cooper * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20*57718be8SEnji Cooper * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21*57718be8SEnji Cooper * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22*57718be8SEnji Cooper * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23*57718be8SEnji Cooper * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24*57718be8SEnji Cooper * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25*57718be8SEnji Cooper * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26*57718be8SEnji Cooper * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27*57718be8SEnji Cooper * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28*57718be8SEnji Cooper * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29*57718be8SEnji Cooper * POSSIBILITY OF SUCH DAMAGE.
30*57718be8SEnji Cooper */
31*57718be8SEnji Cooper
32*57718be8SEnji Cooper /*
33*57718be8SEnji Cooper * This is a simple link-time test to verify all builtin atomic sync
34*57718be8SEnji Cooper * operations are available. Depending on the exact cpu/arch code generator
35*57718be8SEnji Cooper * options, some of these need support functions (which on NetBSD we
36*57718be8SEnji Cooper * typically provide in src/common/lib/libc/atomic).
37*57718be8SEnji Cooper *
38*57718be8SEnji Cooper * The list of operations has been extracted from sync-builtins.def file
39*57718be8SEnji Cooper * in the gcc distribution (as of gcc 4.8.2).
40*57718be8SEnji Cooper */
41*57718be8SEnji Cooper
42*57718be8SEnji Cooper #include <machine/types.h>
43*57718be8SEnji Cooper #include <sys/inttypes.h>
44*57718be8SEnji Cooper
45*57718be8SEnji Cooper volatile uint8_t u8 = 0;
46*57718be8SEnji Cooper volatile uint16_t u16 = 0;
47*57718be8SEnji Cooper volatile uint32_t u32 = 0;
48*57718be8SEnji Cooper
49*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
50*57718be8SEnji Cooper volatile uint64_t u64 = 0;
51*57718be8SEnji Cooper #endif
52*57718be8SEnji Cooper
53*57718be8SEnji Cooper int
main(int argc,char ** argv)54*57718be8SEnji Cooper main(int argc, char **argv)
55*57718be8SEnji Cooper {
56*57718be8SEnji Cooper __sync_synchronize();
57*57718be8SEnji Cooper __sync_add_and_fetch(&u8, 1);
58*57718be8SEnji Cooper __sync_add_and_fetch_1(&u8, 1);
59*57718be8SEnji Cooper __sync_add_and_fetch_2(&u16, 1);
60*57718be8SEnji Cooper __sync_add_and_fetch_4(&u32, 1);
61*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
62*57718be8SEnji Cooper __sync_add_and_fetch_8(&u64, 1);
63*57718be8SEnji Cooper #endif
64*57718be8SEnji Cooper __sync_bool_compare_and_swap(&u8, 1, 2);
65*57718be8SEnji Cooper __sync_bool_compare_and_swap_1(&u8, 1, 2);
66*57718be8SEnji Cooper __sync_bool_compare_and_swap_2(&u16, 1, 2);
67*57718be8SEnji Cooper __sync_bool_compare_and_swap_4(&u32, 1, 2);
68*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
69*57718be8SEnji Cooper __sync_bool_compare_and_swap_8(&u64, 1, 2);
70*57718be8SEnji Cooper #endif
71*57718be8SEnji Cooper __sync_fetch_and_add(&u8, 1);
72*57718be8SEnji Cooper __sync_fetch_and_add_1(&u8, 1);
73*57718be8SEnji Cooper __sync_fetch_and_add_2(&u16, 1);
74*57718be8SEnji Cooper __sync_fetch_and_add_4(&u32, 1);
75*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
76*57718be8SEnji Cooper __sync_fetch_and_add_8(&u64, 1);
77*57718be8SEnji Cooper #endif
78*57718be8SEnji Cooper __sync_fetch_and_and(&u8, 0x80);
79*57718be8SEnji Cooper __sync_fetch_and_and_1(&u8, 0x80);
80*57718be8SEnji Cooper __sync_fetch_and_and_2(&u16, 0x80);
81*57718be8SEnji Cooper __sync_fetch_and_and_4(&u32, 0x80);
82*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
83*57718be8SEnji Cooper __sync_fetch_and_and_8(&u64, 0x80);
84*57718be8SEnji Cooper #endif
85*57718be8SEnji Cooper #ifndef __clang__
86*57718be8SEnji Cooper __sync_fetch_and_nand(&u8, 0x80);
87*57718be8SEnji Cooper __sync_fetch_and_nand_1(&u8, 0x80);
88*57718be8SEnji Cooper __sync_fetch_and_nand_2(&u16, 0x80);
89*57718be8SEnji Cooper __sync_fetch_and_nand_4(&u32, 0x80);
90*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
91*57718be8SEnji Cooper __sync_fetch_and_nand_8(&u64, 0x80);
92*57718be8SEnji Cooper #endif
93*57718be8SEnji Cooper #endif
94*57718be8SEnji Cooper __sync_fetch_and_or(&u8, 0x80);
95*57718be8SEnji Cooper __sync_fetch_and_or_1(&u8, 0x80);
96*57718be8SEnji Cooper __sync_fetch_and_or_2(&u16, 0x80);
97*57718be8SEnji Cooper __sync_fetch_and_or_4(&u32, 0x80);
98*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
99*57718be8SEnji Cooper __sync_fetch_and_or_8(&u64, 0x80);
100*57718be8SEnji Cooper #endif
101*57718be8SEnji Cooper __sync_fetch_and_sub(&u8, 0x80);
102*57718be8SEnji Cooper __sync_fetch_and_sub_1(&u8, 0x80);
103*57718be8SEnji Cooper __sync_fetch_and_sub_2(&u16, 0x80);
104*57718be8SEnji Cooper __sync_fetch_and_sub_4(&u32, 0x80);
105*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
106*57718be8SEnji Cooper __sync_fetch_and_sub_8(&u64, 0x80);
107*57718be8SEnji Cooper #endif
108*57718be8SEnji Cooper __sync_fetch_and_xor(&u8, 0x80);
109*57718be8SEnji Cooper __sync_fetch_and_xor_1(&u8, 0x80);
110*57718be8SEnji Cooper __sync_fetch_and_xor_2(&u16, 0x80);
111*57718be8SEnji Cooper __sync_fetch_and_xor_4(&u32, 0x80);
112*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
113*57718be8SEnji Cooper __sync_fetch_and_xor_8(&u64, 0x80);
114*57718be8SEnji Cooper #endif
115*57718be8SEnji Cooper __sync_lock_release(&u8);
116*57718be8SEnji Cooper __sync_lock_release_1(&u8);
117*57718be8SEnji Cooper __sync_lock_release_2(&u16);
118*57718be8SEnji Cooper __sync_lock_release_4(&u32);
119*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
120*57718be8SEnji Cooper __sync_lock_release_8(&u64);
121*57718be8SEnji Cooper #endif
122*57718be8SEnji Cooper __sync_lock_test_and_set(&u8, 5);
123*57718be8SEnji Cooper __sync_lock_test_and_set_1(&u8, 5);
124*57718be8SEnji Cooper __sync_lock_test_and_set_2(&u16, 5);
125*57718be8SEnji Cooper __sync_lock_test_and_set_4(&u32, 5);
126*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
127*57718be8SEnji Cooper __sync_lock_test_and_set_8(&u64, 5);
128*57718be8SEnji Cooper #endif
129*57718be8SEnji Cooper #ifndef __clang__
130*57718be8SEnji Cooper __sync_nand_and_fetch(&u8, 5);
131*57718be8SEnji Cooper __sync_nand_and_fetch_1(&u8, 5);
132*57718be8SEnji Cooper __sync_nand_and_fetch_2(&u16, 5);
133*57718be8SEnji Cooper __sync_nand_and_fetch_4(&u32, 5);
134*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
135*57718be8SEnji Cooper __sync_nand_and_fetch_8(&u64, 5);
136*57718be8SEnji Cooper #endif
137*57718be8SEnji Cooper #endif
138*57718be8SEnji Cooper __sync_or_and_fetch(&u8, 5);
139*57718be8SEnji Cooper __sync_or_and_fetch_1(&u8, 5);
140*57718be8SEnji Cooper __sync_or_and_fetch_2(&u16, 5);
141*57718be8SEnji Cooper __sync_or_and_fetch_4(&u32, 5);
142*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
143*57718be8SEnji Cooper __sync_or_and_fetch_8(&u64, 5);
144*57718be8SEnji Cooper #endif
145*57718be8SEnji Cooper __sync_sub_and_fetch(&u8, 5);
146*57718be8SEnji Cooper __sync_sub_and_fetch_1(&u8, 5);
147*57718be8SEnji Cooper __sync_sub_and_fetch_2(&u16, 5);
148*57718be8SEnji Cooper __sync_sub_and_fetch_4(&u32, 5);
149*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
150*57718be8SEnji Cooper __sync_sub_and_fetch_8(&u64, 5);
151*57718be8SEnji Cooper #endif
152*57718be8SEnji Cooper __sync_val_compare_and_swap(&u8, 5, 9);
153*57718be8SEnji Cooper __sync_val_compare_and_swap_1(&u8, 5, 9);
154*57718be8SEnji Cooper __sync_val_compare_and_swap_2(&u16, 5, 9);
155*57718be8SEnji Cooper __sync_val_compare_and_swap_4(&u32, 5, 9);
156*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
157*57718be8SEnji Cooper __sync_val_compare_and_swap_8(&u64, 5, 9);
158*57718be8SEnji Cooper #endif
159*57718be8SEnji Cooper __sync_xor_and_fetch(&u8, 5);
160*57718be8SEnji Cooper __sync_xor_and_fetch_1(&u8, 5);
161*57718be8SEnji Cooper __sync_xor_and_fetch_2(&u16, 5);
162*57718be8SEnji Cooper __sync_xor_and_fetch_4(&u32, 5);
163*57718be8SEnji Cooper #ifdef __HAVE_ATOMIC64_OPS
164*57718be8SEnji Cooper __sync_xor_and_fetch_8(&u64, 5);
165*57718be8SEnji Cooper #endif
166*57718be8SEnji Cooper
167*57718be8SEnji Cooper return 0;
168*57718be8SEnji Cooper }
169