1 /* $NetBSD: lock.h,v 1.8 2005/12/28 19:09:29 perry Exp $ */ 2 3 /*- 4 * Copyright (c) 2000 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Jason R. Thorpe. 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 3. All advertising materials mentioning features or use of this software 19 * must display the following acknowledgement: 20 * This product includes software developed by the NetBSD 21 * Foundation, Inc. and its contributors. 22 * 4. Neither the name of The NetBSD Foundation nor the names of its 23 * contributors may be used to endorse or promote products derived 24 * from this software without specific prior written permission. 25 * 26 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 27 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 28 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 29 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 30 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 31 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 32 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 33 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 34 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 35 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 36 * POSSIBILITY OF SUCH DAMAGE. 37 */ 38 39 /* 40 * Machine-dependent spin lock operations. 41 */ 42 43 #ifndef _POWERPC_LOCK_H_ 44 #define _POWERPC_LOCK_H_ 45 46 static __inline void 47 __cpu_simple_lock_init(__cpu_simple_lock_t *alp) 48 { 49 *alp = __SIMPLELOCK_UNLOCKED; 50 __asm volatile ("sync"); 51 } 52 53 static __inline void 54 __cpu_simple_lock(__cpu_simple_lock_t *alp) 55 { 56 int old; 57 58 __asm volatile (" \ 59 \n\ 60 1: lwarx %0,0,%1 \n\ 61 cmpwi %0,%2 \n\ 62 beq+ 3f \n\ 63 2: lwzx %0,0,%1 \n\ 64 cmpwi %0,%2 \n\ 65 beq+ 1b \n\ 66 b 2b \n\ 67 3: stwcx. %3,0,%1 \n\ 68 bne- 1b \n\ 69 isync \n\ 70 \n" 71 : "=&r"(old) 72 : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED) 73 : "memory"); 74 } 75 76 static __inline int 77 __cpu_simple_lock_try(__cpu_simple_lock_t *alp) 78 { 79 int old, dummy; 80 81 __asm volatile (" \ 82 \n\ 83 1: lwarx %0,0,%1 \n\ 84 cmpwi %0,%2 \n\ 85 bne 2f \n\ 86 stwcx. %3,0,%1 \n\ 87 bne- 1b \n\ 88 2: stwcx. %3,0,%4 \n\ 89 isync \n\ 90 \n" 91 : "=&r"(old) 92 : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED), 93 "r"(&dummy) 94 : "memory"); 95 96 return (old == __SIMPLELOCK_UNLOCKED); 97 } 98 99 static __inline void 100 __cpu_simple_unlock(__cpu_simple_lock_t *alp) 101 { 102 __asm volatile ("sync"); 103 *alp = __SIMPLELOCK_UNLOCKED; 104 } 105 106 #endif /* _POWERPC_LOCK_H_ */ 107