1*d339aae0Sandvar /* $NetBSD: asm_single.h,v 1.10 2022/05/18 13:56:32 andvar Exp $ */ 22eccad65Sleo 32eccad65Sleo /* 42eccad65Sleo * Copyright (c) 1996 Leo Weppelman. 52eccad65Sleo * All rights reserved. 62eccad65Sleo * 72eccad65Sleo * Redistribution and use in source and binary forms, with or without 82eccad65Sleo * modification, are permitted provided that the following conditions 92eccad65Sleo * are met: 102eccad65Sleo * 1. Redistributions of source code must retain the above copyright 112eccad65Sleo * notice, this list of conditions and the following disclaimer. 122eccad65Sleo * 2. Redistributions in binary form must reproduce the above copyright 132eccad65Sleo * notice, this list of conditions and the following disclaimer in the 142eccad65Sleo * documentation and/or other materials provided with the distribution. 152eccad65Sleo * 162eccad65Sleo * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 172eccad65Sleo * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 182eccad65Sleo * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 192eccad65Sleo * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 202eccad65Sleo * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 212eccad65Sleo * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 222eccad65Sleo * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 232eccad65Sleo * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 242eccad65Sleo * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 252eccad65Sleo * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 262eccad65Sleo */ 272eccad65Sleo 282eccad65Sleo #ifndef _M68K_ASM_SINGLE_H 292eccad65Sleo #define _M68K_ASM_SINGLE_H 302eccad65Sleo /* 312eccad65Sleo * Provide bit manipulation macro's that resolve to a single instruction. 322eccad65Sleo * These can be considered atomic on single processor architectures when 33*d339aae0Sandvar * no page faults can occur when accessing <var>. 342eccad65Sleo * There primary use is to avoid race conditions when manipulating device 352eccad65Sleo * registers. 362eccad65Sleo */ 372eccad65Sleo 382eccad65Sleo #define single_inst_bset_b(var, bit) \ 395f1c88d7Sperry __asm volatile ("orb %1,%0" \ 40977da07fSmhitch : "+m" (var) \ 41977da07fSmhitch : "di" ((u_char)bit)) 42593454cdSthorpej 432eccad65Sleo #define single_inst_bclr_b(var, bit) \ 445f1c88d7Sperry __asm volatile ("andb %1,%0" \ 45977da07fSmhitch : "+m" (var) \ 46977da07fSmhitch : "di" ((u_char)~(bit))) 47593454cdSthorpej 482eccad65Sleo 492eccad65Sleo #define single_inst_bset_w(var, bit) \ 505f1c88d7Sperry __asm volatile ("orw %1,%0" \ 51977da07fSmhitch : "+m" (var) \ 52977da07fSmhitch : "di" ((u_short)bit)) 53593454cdSthorpej 542eccad65Sleo #define single_inst_bclr_w(var, bit) \ 555f1c88d7Sperry __asm volatile ("andw %1,%0" \ 56977da07fSmhitch : "+m" (var) \ 57977da07fSmhitch : "di" ((u_short)~(bit))) 58593454cdSthorpej 592eccad65Sleo 602eccad65Sleo #define single_inst_bset_l(var, bit) \ 615f1c88d7Sperry __asm volatile ("orl %1,%0" \ 62977da07fSmhitch : "+m" (var) \ 63977da07fSmhitch : "di" ((u_long)bit)) 64593454cdSthorpej 652eccad65Sleo #define single_inst_bclr_l(var, bit) \ 665f1c88d7Sperry __asm volatile ("andl %1,%0" \ 67977da07fSmhitch : "+m" (var) \ 68977da07fSmhitch : "di" ((u_long)~(bit))) 692eccad65Sleo 702eccad65Sleo #endif /* _M68K_ASM_SINGLE_H */ 71