xref: /netbsd-src/sys/arch/powerpc/include/lock.h (revision 8b0f9554ff8762542c4defc4f70e1eb76fb508fa)
1 /*	$NetBSD: lock.h,v 1.11 2007/10/17 19:56:41 garbled Exp $	*/
2 
3 /*-
4  * Copyright (c) 2000, 2007 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * This code is derived from software contributed to The NetBSD Foundation
8  * by Jason R. Thorpe and Andrew Doran.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  * 3. All advertising materials mentioning features or use of this software
19  *    must display the following acknowledgement:
20  *	This product includes software developed by the NetBSD
21  *	Foundation, Inc. and its contributors.
22  * 4. Neither the name of The NetBSD Foundation nor the names of its
23  *    contributors may be used to endorse or promote products derived
24  *    from this software without specific prior written permission.
25  *
26  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
27  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
30  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36  * POSSIBILITY OF SUCH DAMAGE.
37  */
38 
39 /*
40  * Machine-dependent spin lock operations.
41  */
42 
43 #ifndef _POWERPC_LOCK_H_
44 #define _POWERPC_LOCK_H_
45 
46 static __inline int
47 __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
48 {
49 	return *__ptr == __SIMPLELOCK_LOCKED;
50 }
51 
52 static __inline int
53 __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
54 {
55 	return *__ptr == __SIMPLELOCK_UNLOCKED;
56 }
57 
58 static __inline void
59 __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
60 {
61 	*__ptr = __SIMPLELOCK_UNLOCKED;
62 }
63 
64 static __inline void
65 __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
66 {
67 	*__ptr = __SIMPLELOCK_LOCKED;
68 }
69 
70 static __inline void
71 __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
72 {
73 	*alp = __SIMPLELOCK_UNLOCKED;
74 	__asm volatile ("sync");
75 }
76 
77 static __inline void
78 __cpu_simple_lock(__cpu_simple_lock_t *alp)
79 {
80 	int old;
81 
82 	__asm volatile ("	\
83 				\n\
84 1:	lwarx	%0,0,%1		\n\
85 	cmpwi	%0,%2		\n\
86 	beq+	3f		\n\
87 2:	lwzx	%0,0,%1		\n\
88 	cmpwi	%0,%2		\n\
89 	beq+	1b		\n\
90 	b	2b		\n\
91 3:	stwcx.	%3,0,%1		\n\
92 	bne-	1b		\n\
93 	isync			\n\
94 				\n"
95 	: "=&r"(old)
96 	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED)
97 	: "memory");
98 }
99 
100 static __inline int
101 __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
102 {
103 	int old, dummy;
104 
105 	__asm volatile ("	\
106 				\n\
107 1:	lwarx	%0,0,%1		\n\
108 	cmpwi	%0,%2		\n\
109 	bne	2f		\n\
110 	stwcx.	%3,0,%1		\n\
111 	bne-	1b		\n\
112 2:	stwcx.	%3,0,%4		\n\
113 	isync			\n\
114 				\n"
115 	: "=&r"(old)
116 	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED),
117 	  "r"(&dummy)
118 	: "memory");
119 
120 	return (old == __SIMPLELOCK_UNLOCKED);
121 }
122 
123 static __inline void
124 __cpu_simple_unlock(__cpu_simple_lock_t *alp)
125 {
126 	__asm volatile ("sync");
127 	*alp = __SIMPLELOCK_UNLOCKED;
128 }
129 
130 static __inline void
131 mb_read(void)
132 {
133 	__asm volatile ("isync" ::: "memory");
134 }
135 
136 static __inline void
137 mb_write(void)
138 {
139 	__asm volatile ("sync" ::: "memory");
140 }
141 
142 static __inline void
143 mb_memory(void)
144 {
145 	__asm volatile ("sync" ::: "memory");
146 }
147 
148 #endif /* _POWERPC_LOCK_H_ */
149