1/* $NetBSD: atomic_swap.S,v 1.19 2021/07/28 07:32:20 skrll Exp $ */ 2 3/*- 4 * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Jason R. Thorpe and Matt Thomas. 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#include "atomic_op_asm.h" 33 34/* 35 * While SWP{B} is sufficient on its own for pre-ARMv7 CPUs, on MP ARMv7 cores 36 * SWP{B} is disabled since it's no longer atomic among multiple CPUs. They 37 * will actually raise an UNDEFINED exception. 38 * 39 * So if we use the LDREX/STREX template, but use a SWP instruction followed 40 * by a MOV instruction (using a temporary register), that gives a handler 41 * for the SWP UNDEFINED exception enough information to "patch" this instance 42 * SWP with correct forms of LDREX/STREX. (note that this would happen even 43 * "read-only" pages. If the page gets tossed, we will get another exception 44 * and fix yet again). 45 */ 46 47ENTRY_NP(__sync_lock_test_and_set_4) 48 DMB 49 /* FALLTHROUGH */ 50 51ENTRY_NP(_atomic_swap_32) 52 mov ip, r0 531: 54#if defined(_ARM_ARCH_6) 55 ldrex r0, [ip] 56 cmp r0, r1 57#ifdef __thumb__ 58 beq 99f 59 strex r3, r1, [ip] 60 cmp r3, #0 61#else 62 strexne r3, r1, [ip] 63 cmpne r3, #0 64#endif 65#else 66 swp r0, r1, [ip] 67 cmp r0, r1 68 movsne r3, #0 69 cmpne r3, #0 70#endif 71 bne 1b 7299: 73 RET 74END(_atomic_swap_32) 75END(__sync_lock_test_and_set_4) 76 77ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) 78ATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) 79ATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) 80ATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) 81CRT_ALIAS(__atomic_exchange_4,_atomic_swap_32) 82STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) 83STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) 84STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) 85 86#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE) 87ENTRY_NP(__sync_lock_release_4) 88 mov r1, #0 89 DMB 90 91 str r1, [r0] 92 RET 93END(__sync_lock_release_4) 94#endif 95 96ENTRY_NP(__sync_lock_test_and_set_1) 97 DMB 98 /* FALLTHROUGH */ 99 100ENTRY_NP(_atomic_swap_8) 101 mov ip, r0 1021: 103#if defined(_ARM_ARCH_6) 104 ldrexb r0, [ip] 105 strexb r3, r1, [ip] 106#else 107 swpb r0, r1, [ip] 108 mov r3, #0 109#endif 110 cmp r3, #0 111 bne 1b 112 RET 113END(_atomic_swap_8) 114END(__sync_lock_test_and_set_1) 115 116ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) 117ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) 118ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) 119CRT_ALIAS(__atomic_exchange_1,_atomic_swap_8) 120STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) 121STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) 122 123#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE) 124ENTRY_NP(__sync_lock_release_1) 125 mov r1, #0 126 DMB 127 128 strb r1, [r0] 129 RET 130END(__sync_lock_release_1) 131#endif 132