xref: /netbsd-src/common/lib/libc/arch/mips/atomic/atomic_add.S (revision bf8d907e758fe420eb478dc88a22d6c1c0a4be61)
1*bf8d907eSskrll/*	$NetBSD: atomic_add.S,v 1.7 2020/08/06 10:00:21 skrll Exp $	*/
28daf714eSmatt
38daf714eSmatt/*-
48daf714eSmatt * Copyright (c) 2008 The NetBSD Foundation, Inc.
58daf714eSmatt * All rights reserved.
68daf714eSmatt *
78daf714eSmatt * Redistribution and use in source and binary forms, with or without
88daf714eSmatt * modification, are permitted provided that the following conditions
98daf714eSmatt * are met:
108daf714eSmatt * 1. Redistributions of source code must retain the above copyright
118daf714eSmatt *    notice, this list of conditions and the following disclaimer.
128daf714eSmatt * 2. Redistributions in binary form must reproduce the above copyright
138daf714eSmatt *    notice, this list of conditions and the following disclaimer in the
148daf714eSmatt *    documentation and/or other materials provided with the distribution.
158daf714eSmatt *
168daf714eSmatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
178daf714eSmatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
188daf714eSmatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
198daf714eSmatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
208daf714eSmatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
218daf714eSmatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
228daf714eSmatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
238daf714eSmatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
248daf714eSmatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
258daf714eSmatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
268daf714eSmatt * POSSIBILITY OF SUCH DAMAGE.
278daf714eSmatt */
288daf714eSmatt
298daf714eSmatt#include <machine/asm.h>
308daf714eSmatt#include "atomic_op_asm.h"
318daf714eSmatt
32*bf8d907eSskrllRCSID("$NetBSD: atomic_add.S,v 1.7 2020/08/06 10:00:21 skrll Exp $")
338daf714eSmatt
348daf714eSmatt	.text
358daf714eSmatt	.set	noreorder
364c44c335Sbouyer#ifdef _KERNEL_OPT
374c44c335Sbouyer#include "opt_cputype.h"
384c44c335Sbouyer#ifndef MIPS3_LOONGSON2F
394c44c335Sbouyer	.set	noat
408daf714eSmatt	.set	nomacro
414c44c335Sbouyer#endif
424c44c335Sbouyer#else /* _KERNEL_OPT */
434c44c335Sbouyer	.set	noat
444c44c335Sbouyer	.set	nomacro
454c44c335Sbouyer#endif /* _KERNEL_OPT */
464c44c335Sbouyer
478daf714eSmatt
488daf714eSmattLEAF(_atomic_add_32)
49ac748598Smatt#if defined(_MIPS_ARCH_OCTEONP) || defined(_MIPS_ARCH_OCTEON2)
50ac748598Smatt	saa		a1, (a0)
51ac748598Smatt#else
52*bf8d907eSskrll	LLSCSYNC
538daf714eSmatt1:	INT_LL		t0, 0(a0)
548daf714eSmatt	 nop
558daf714eSmatt	INT_ADDU	t0, a1
568daf714eSmatt	INT_SC		t0, 0(a0)
578daf714eSmatt	beq		t0, zero, 1b
588daf714eSmatt 	 nop
59ac748598Smatt#endif
608daf714eSmatt	j		ra
618daf714eSmatt	 nop
628daf714eSmattEND(_atomic_add_32)
638daf714eSmattATOMIC_OP_ALIAS(atomic_add_32, _atomic_add_32)
648daf714eSmatt
658daf714eSmattLEAF(_atomic_add_32_nv)
66*bf8d907eSskrll	LLSCSYNC
678daf714eSmatt1:	INT_LL		v0, 0(a0)
688daf714eSmatt	 nop
698daf714eSmatt	INT_ADDU	v0, a1
708daf714eSmatt	move		t0, v0
718daf714eSmatt	INT_SC		t0, 0(a0)
728daf714eSmatt	beq		t0, zero, 1b
738daf714eSmatt 	 nop
748daf714eSmatt	j		ra
758daf714eSmatt	 nop
768daf714eSmattEND(_atomic_add_32_nv)
778daf714eSmattATOMIC_OP_ALIAS(atomic_add_32_nv, _atomic_add_32_nv)
788daf714eSmatt
798daf714eSmatt#if !defined(__mips_o32)
808daf714eSmattLEAF(_atomic_add_64)
81ac748598Smatt#if defined(_MIPS_ARCH_OCTEONP) || defined(_MIPS_ARCH_OCTEON2)
82ac748598Smatt	saad		a1, (a0)
83ac748598Smatt#else
84*bf8d907eSskrll	LLSCSYNC
858daf714eSmatt1:	REG_LL		t0, 0(a0)
868daf714eSmatt	 nop
878daf714eSmatt	REG_ADDU	t0, a1
888daf714eSmatt	REG_SC		t0, 0(a0)
898daf714eSmatt	beq		t0, zero, 1b
908daf714eSmatt 	 nop
91ac748598Smatt#endif
928daf714eSmatt	j		ra
938daf714eSmatt	 nop
948daf714eSmattEND(_atomic_add_64)
958daf714eSmattATOMIC_OP_ALIAS(atomic_add_64, _atomic_add_64)
968daf714eSmatt
978daf714eSmattLEAF(_atomic_add_64_nv)
98*bf8d907eSskrll	LLSCSYNC
998daf714eSmatt1:	REG_LL		v0, 0(a0)
1008daf714eSmatt	 nop
1018daf714eSmatt	REG_ADDU	v0, a1
1028daf714eSmatt	move		t0, v0
1038daf714eSmatt	REG_SC		t0, 0(a0)
1048daf714eSmatt	beq		t0, zero, 1b
1058daf714eSmatt 	 nop
1068daf714eSmatt	j		ra
1078daf714eSmatt	 nop
1088daf714eSmattEND(_atomic_add_64_nv)
1098daf714eSmattATOMIC_OP_ALIAS(atomic_add_64_nv, _atomic_add_64_nv)
1108daf714eSmatt#endif
1118daf714eSmatt
1128daf714eSmatt#ifdef _LP64
1138daf714eSmattSTRONG_ALIAS(_atomic_add_long,		_atomic_add_64)
1148daf714eSmattSTRONG_ALIAS(_atomic_add_long_nv,	_atomic_add_64_nv)
1158daf714eSmattSTRONG_ALIAS(_atomic_add_ptr,		_atomic_add_64)
1168daf714eSmattSTRONG_ALIAS(_atomic_add_ptr_nv,	_atomic_add_64_nv)
1178daf714eSmatt#else
1188daf714eSmattSTRONG_ALIAS(_atomic_add_long,		_atomic_add_32)
1198daf714eSmattSTRONG_ALIAS(_atomic_add_long_nv,	_atomic_add_32_nv)
1208daf714eSmattSTRONG_ALIAS(_atomic_add_ptr,		_atomic_add_32)
1218daf714eSmattSTRONG_ALIAS(_atomic_add_ptr_nv,	_atomic_add_32_nv)
1228daf714eSmatt#endif
1238daf714eSmattSTRONG_ALIAS(_atomic_add_int,		_atomic_add_32)
1248daf714eSmattSTRONG_ALIAS(_atomic_add_int_nv,	_atomic_add_32_nv)
1258daf714eSmatt
1268daf714eSmattATOMIC_OP_ALIAS(atomic_add_int,		_atomic_add_int)
1278daf714eSmattATOMIC_OP_ALIAS(atomic_add_int_nv,	_atomic_add_int_nv)
1288daf714eSmattATOMIC_OP_ALIAS(atomic_add_ptr,		_atomic_add_ptr)
1298daf714eSmattATOMIC_OP_ALIAS(atomic_add_ptr_nv,	_atomic_add_ptr_nv)
1308daf714eSmattATOMIC_OP_ALIAS(atomic_add_long,	_atomic_add_long)
1318daf714eSmattATOMIC_OP_ALIAS(atomic_add_long_nv,	_atomic_add_long_nv)
132