xref: /openbsd-src/sys/arch/alpha/include/atomic.h (revision 2df76cc2a0aee2cb2166e3799b7bb5c3f883f83d)
1*2df76cc2Sguenther /*	$OpenBSD: atomic.h,v 1.12 2014/03/29 18:09:28 guenther Exp $	*/
27f724c1aSart /* $NetBSD: atomic.h,v 1.7 2001/12/17 23:34:57 thorpej Exp $ */
3aed035abSart 
4aed035abSart /*-
5aed035abSart  * Copyright (c) 1998, 1999 The NetBSD Foundation, Inc.
6aed035abSart  * All rights reserved.
7aed035abSart  *
8aed035abSart  * This code is derived from software contributed to The NetBSD Foundation
9aed035abSart  * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
10aed035abSart  * NASA Ames Research Center.
11aed035abSart  *
12aed035abSart  * Redistribution and use in source and binary forms, with or without
13aed035abSart  * modification, are permitted provided that the following conditions
14aed035abSart  * are met:
15aed035abSart  * 1. Redistributions of source code must retain the above copyright
16aed035abSart  *    notice, this list of conditions and the following disclaimer.
17aed035abSart  * 2. Redistributions in binary form must reproduce the above copyright
18aed035abSart  *    notice, this list of conditions and the following disclaimer in the
19aed035abSart  *    documentation and/or other materials provided with the distribution.
20aed035abSart  *
21aed035abSart  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
22aed035abSart  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
23aed035abSart  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
24aed035abSart  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
25aed035abSart  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26aed035abSart  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27aed035abSart  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28aed035abSart  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29aed035abSart  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30aed035abSart  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31aed035abSart  * POSSIBILITY OF SUCH DAMAGE.
32aed035abSart  */
33aed035abSart 
34aed035abSart /*
35aed035abSart  * Misc. `atomic' operations.
36aed035abSart  */
37aed035abSart 
382fa72412Spirofti #ifndef _MACHINE_ATOMIC_H_
392fa72412Spirofti #define	_MACHINE_ATOMIC_H_
40aed035abSart 
418aa3ef09Sderaadt #if defined(_KERNEL)
428aa3ef09Sderaadt 
43aed035abSart /*
44aed035abSart  * atomic_setbits_ulong:
45aed035abSart  *
46aed035abSart  *	Atomically set bits in a `unsigned long'.
47aed035abSart  */
48aed035abSart static __inline void
atomic_setbits_ulong(volatile unsigned long * ulp,unsigned long v)49*2df76cc2Sguenther atomic_setbits_ulong(volatile unsigned long *ulp, unsigned long v)
50aed035abSart {
51aed035abSart 	unsigned long t0;
52aed035abSart 
53*2df76cc2Sguenther 	__asm volatile(
54aed035abSart 		"# BEGIN atomic_setbits_ulong\n"
557c0a2d84Smiod 		"1:	ldq_l	%0, %1		\n"
56aed035abSart 		"	or	%0, %2, %0	\n"
57aed035abSart 		"	stq_c	%0, %1		\n"
58aed035abSart 		"	beq	%0, 2f		\n"
59aed035abSart 		"	mb			\n"
60aed035abSart 		"	br	3f		\n"
61aed035abSart 		"2:	br	1b		\n"
62aed035abSart 		"3:				\n"
63aed035abSart 		"	# END atomic_setbits_ulong"
647c0a2d84Smiod 		: "=&r" (t0), "=m" (*ulp)
657c0a2d84Smiod 		: "r" (v)
667f724c1aSart 		: "memory");
67aed035abSart }
68aed035abSart 
69aed035abSart /*
70aed035abSart  * atomic_clearbits_ulong:
71aed035abSart  *
72aed035abSart  *	Atomically clear bits in a `unsigned long'.
73aed035abSart  */
74aed035abSart static __inline void
atomic_clearbits_ulong(volatile unsigned long * ulp,unsigned long v)75*2df76cc2Sguenther atomic_clearbits_ulong(volatile unsigned long *ulp, unsigned long v)
76aed035abSart {
77aed035abSart 	unsigned long t0;
78aed035abSart 
79*2df76cc2Sguenther 	__asm volatile(
80aed035abSart 		"# BEGIN atomic_clearbits_ulong\n"
817c0a2d84Smiod 		"1:	ldq_l	%0, %1		\n"
82aed035abSart 		"	and	%0, %2, %0	\n"
83aed035abSart 		"	stq_c	%0, %1		\n"
84aed035abSart 		"	beq	%0, 2f		\n"
85aed035abSart 		"	mb			\n"
86aed035abSart 		"	br	3f		\n"
87aed035abSart 		"2:	br	1b		\n"
88aed035abSart 		"3:				\n"
89aed035abSart 		"	# END atomic_clearbits_ulong"
907c0a2d84Smiod 		: "=&r" (t0), "=m" (*ulp)
917c0a2d84Smiod 		: "r" (~v)
927f724c1aSart 		: "memory");
93aed035abSart }
94aed035abSart 
95aed035abSart /*
96aed035abSart  * atomic_add_ulong:
97aed035abSart  *
98aed035abSart  *	Atomically add a value to a `unsigned long'.
99aed035abSart  */
100aed035abSart static __inline void
atomic_add_ulong(volatile unsigned long * ulp,unsigned long v)101*2df76cc2Sguenther atomic_add_ulong(volatile unsigned long *ulp, unsigned long v)
102aed035abSart {
103aed035abSart 	unsigned long t0;
104aed035abSart 
105*2df76cc2Sguenther 	__asm volatile(
106aed035abSart 		"# BEGIN atomic_add_ulong\n"
1077c0a2d84Smiod 		"1:	ldq_l	%0, %1		\n"
108aed035abSart 		"	addq	%0, %2, %0	\n"
109aed035abSart 		"	stq_c	%0, %1		\n"
110aed035abSart 		"	beq	%0, 2f		\n"
111aed035abSart 		"	mb			\n"
112aed035abSart 		"	br	3f		\n"
113aed035abSart 		"2:	br	1b		\n"
114aed035abSart 		"3:				\n"
115aed035abSart 		"	# END atomic_add_ulong"
1167c0a2d84Smiod 		: "=&r" (t0), "=m" (*ulp)
1177c0a2d84Smiod 		: "r" (v)
1187f724c1aSart 		: "memory");
119aed035abSart }
120aed035abSart 
121aed035abSart /*
122aed035abSart  * atomic_sub_ulong:
123aed035abSart  *
124aed035abSart  *	Atomically subtract a value from a `unsigned long'.
125aed035abSart  */
126aed035abSart static __inline void
atomic_sub_ulong(volatile unsigned long * ulp,unsigned long v)127*2df76cc2Sguenther atomic_sub_ulong(volatile unsigned long *ulp, unsigned long v)
128aed035abSart {
129aed035abSart 	unsigned long t0;
130aed035abSart 
131*2df76cc2Sguenther 	__asm volatile(
132aed035abSart 		"# BEGIN atomic_sub_ulong\n"
1337c0a2d84Smiod 		"1:	ldq_l	%0, %1		\n"
134aed035abSart 		"	subq	%0, %2, %0	\n"
135aed035abSart 		"	stq_c	%0, %1		\n"
136aed035abSart 		"	beq	%0, 2f		\n"
137aed035abSart 		"	mb			\n"
138aed035abSart 		"	br	3f		\n"
139aed035abSart 		"2:	br	1b		\n"
140aed035abSart 		"3:				\n"
141aed035abSart 		"	# END atomic_sub_ulong"
1427c0a2d84Smiod 		: "=&r" (t0), "=m" (*ulp)
1437c0a2d84Smiod 		: "r" (v)
1447f724c1aSart 		: "memory");
145aed035abSart }
146aed035abSart 
147aed035abSart /*
148aed035abSart  * atomic_loadlatch_ulong:
149aed035abSart  *
150aed035abSart  *	Atomically load and latch a `unsigned long' value.
151aed035abSart  */
152aed035abSart static __inline unsigned long
atomic_loadlatch_ulong(volatile unsigned long * ulp,unsigned long v)153*2df76cc2Sguenther atomic_loadlatch_ulong(volatile unsigned long *ulp, unsigned long v)
154aed035abSart {
155aed035abSart 	unsigned long t0, v0;
156aed035abSart 
157*2df76cc2Sguenther 	__asm volatile(
158aed035abSart 		"# BEGIN atomic_loadlatch_ulong\n"
159aed035abSart 		"1:	mov	%3, %0		\n"
1607c0a2d84Smiod 		"	ldq_l	%1, %2		\n"
161aed035abSart 		"	stq_c	%0, %2		\n"
162aed035abSart 		"	beq	%0, 2f		\n"
163aed035abSart 		"	mb			\n"
164aed035abSart 		"	br	3f		\n"
165aed035abSart 		"2:	br	1b		\n"
166aed035abSart 		"3:				\n"
167aed035abSart 		"	# END atomic_loadlatch_ulong"
1687c0a2d84Smiod 		: "=&r" (t0), "=r" (v0), "=m" (*ulp)
1697c0a2d84Smiod 		: "r" (v)
1707f724c1aSart 		: "memory");
171aed035abSart 
172aed035abSart 	return (v0);
173aed035abSart }
174aed035abSart 
1758f00b8a0Smartin /*
1768f00b8a0Smartin  * atomic_setbits_int:
1778f00b8a0Smartin  *
1788f00b8a0Smartin  *	Atomically set bits in a `unsigned int'.
1798f00b8a0Smartin  */
180f57756c9Sart static __inline void
atomic_setbits_int(volatile unsigned int * uip,unsigned int v)181*2df76cc2Sguenther atomic_setbits_int(volatile unsigned int *uip, unsigned int v)
182f57756c9Sart {
1838f00b8a0Smartin 	unsigned int t0;
1848f00b8a0Smartin 
185*2df76cc2Sguenther 	__asm volatile(
1868f00b8a0Smartin 		"# BEGIN atomic_setbits_ulong\n"
1877c0a2d84Smiod 		"1:	ldl_l	%0, %1		\n"
1888f00b8a0Smartin 		"	or	%0, %2, %0	\n"
1898f00b8a0Smartin 		"	stl_c	%0, %1		\n"
1908f00b8a0Smartin 		"	beq	%0, 2f		\n"
1918f00b8a0Smartin 		"	mb			\n"
1928f00b8a0Smartin 		"	br	3f		\n"
1938f00b8a0Smartin 		"2:	br	1b		\n"
1948f00b8a0Smartin 		"3:				\n"
1958f00b8a0Smartin 		"	# END atomic_setbits_int"
1967c0a2d84Smiod 		: "=&r" (t0), "=m" (*uip)
1977c0a2d84Smiod 		: "r" (v)
1988f00b8a0Smartin 		: "memory");
199f57756c9Sart }
200f57756c9Sart 
2018f00b8a0Smartin /*
2028f00b8a0Smartin  * atomic_clearbits_int:
2038f00b8a0Smartin  *
2048f00b8a0Smartin  *	Atomically clear bits in a `unsigned int'.
2058f00b8a0Smartin  */
206f57756c9Sart static __inline void
atomic_clearbits_int(volatile unsigned int * uip,unsigned int v)207*2df76cc2Sguenther atomic_clearbits_int(volatile unsigned int *uip, unsigned int v)
208f57756c9Sart {
2098f00b8a0Smartin 	unsigned int t0;
2108f00b8a0Smartin 
211*2df76cc2Sguenther 	__asm volatile(
2128f00b8a0Smartin 		"# BEGIN atomic_clearbits_int\n"
2137c0a2d84Smiod 		"1:	ldl_l	%0, %1		\n"
2148f00b8a0Smartin 		"	and	%0, %2, %0	\n"
2158f00b8a0Smartin 		"	stl_c	%0, %1		\n"
2168f00b8a0Smartin 		"	beq	%0, 2f		\n"
2178f00b8a0Smartin 		"	mb			\n"
2188f00b8a0Smartin 		"	br	3f		\n"
2198f00b8a0Smartin 		"2:	br	1b		\n"
2208f00b8a0Smartin 		"3:				\n"
2218f00b8a0Smartin 		"	# END atomic_clearbits_int"
2227c0a2d84Smiod 		: "=&r" (t0), "=m" (*uip)
2237c0a2d84Smiod 		: "r" (~v)
2248f00b8a0Smartin 		: "memory");
225f57756c9Sart }
226f57756c9Sart 
22721c23d01Smiod /*
22821c23d01Smiod  * atomic_add_int:
22921c23d01Smiod  *
23021c23d01Smiod  *	Atomically add a value to an `int'.
23121c23d01Smiod  */
23221c23d01Smiod static __inline void
atomic_add_int(volatile int * ulp,int v)233*2df76cc2Sguenther atomic_add_int(volatile int *ulp, int v)
23421c23d01Smiod {
23521c23d01Smiod 	unsigned long t0;
23621c23d01Smiod 
237*2df76cc2Sguenther 	__asm volatile(
23821c23d01Smiod 		"# BEGIN atomic_add_int\n"
23921c23d01Smiod 		"1:	ldl_l	%0, %1		\n"
24021c23d01Smiod 		"	addl	%0, %2, %0	\n"
24121c23d01Smiod 		"	stl_c	%0, %1		\n"
24221c23d01Smiod 		"	beq	%0, 2f		\n"
24321c23d01Smiod 		"	mb			\n"
24421c23d01Smiod 		"	br	3f		\n"
24521c23d01Smiod 		"2:	br	1b		\n"
24621c23d01Smiod 		"3:				\n"
24721c23d01Smiod 		"	# END atomic_add_ulong"
24821c23d01Smiod 		: "=&r" (t0), "=m" (*ulp)
24921c23d01Smiod 		: "r" (v)
25021c23d01Smiod 		: "memory");
25121c23d01Smiod }
25221c23d01Smiod 
2538aa3ef09Sderaadt #endif /* defined(_KERNEL) */
2542fa72412Spirofti #endif /* _MACHINE_ATOMIC_H_ */
255