xref: /onnv-gate/usr/src/common/atomic/sparcv9/atomic.s (revision 10271:7c80b70bb8de)
10Sstevel@tonic-gate/*
20Sstevel@tonic-gate * CDDL HEADER START
30Sstevel@tonic-gate *
40Sstevel@tonic-gate * The contents of this file are subject to the terms of the
54292Sab196087 * Common Development and Distribution License (the "License").
64292Sab196087 * You may not use this file except in compliance with the License.
70Sstevel@tonic-gate *
80Sstevel@tonic-gate * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
90Sstevel@tonic-gate * or http://www.opensolaris.org/os/licensing.
100Sstevel@tonic-gate * See the License for the specific language governing permissions
110Sstevel@tonic-gate * and limitations under the License.
120Sstevel@tonic-gate *
130Sstevel@tonic-gate * When distributing Covered Code, include this CDDL HEADER in each
140Sstevel@tonic-gate * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
150Sstevel@tonic-gate * If applicable, add the following below this CDDL HEADER, with the
160Sstevel@tonic-gate * fields enclosed by brackets "[]" replaced with your own identifying
170Sstevel@tonic-gate * information: Portions Copyright [yyyy] [name of copyright owner]
180Sstevel@tonic-gate *
190Sstevel@tonic-gate * CDDL HEADER END
200Sstevel@tonic-gate */
216812Sraf
220Sstevel@tonic-gate/*
23*9779SShesha.Sreenivasamurthy@Sun.COM * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
240Sstevel@tonic-gate * Use is subject to license terms.
250Sstevel@tonic-gate */
260Sstevel@tonic-gate
277298SMark.J.Nelson@Sun.COM	.file	"atomic.s"
280Sstevel@tonic-gate
290Sstevel@tonic-gate#include <sys/asm_linkage.h>
300Sstevel@tonic-gate
316800Sae112802/*
326800Sae112802 * ATOMIC_BO_ENABLE_SHIFT can be selectively defined by processors
336800Sae112802 * to enable exponential backoff. No definition means backoff is
346800Sae112802 * not desired i.e. backoff should be disabled.
356800Sae112802 * By default, the shift value is used to generate a power of 2
366800Sae112802 * value for backoff limit. In the kernel, processors scale this
376800Sae112802 * shift value with the number of online cpus.
386800Sae112802 */
396800Sae112802
400Sstevel@tonic-gate#if defined(_KERNEL)
410Sstevel@tonic-gate	/*
420Sstevel@tonic-gate	 * Legacy kernel interfaces; they will go away (eventually).
430Sstevel@tonic-gate	 */
440Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
450Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
460Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
470Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
480Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
490Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
500Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
510Sstevel@tonic-gate	ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
526800Sae112802
536800Sae112802#ifdef ATOMIC_BO_ENABLE_SHIFT
546800Sae112802
556800Sae112802#if !defined(lint)
566800Sae112802	.weak   cpu_atomic_delay
576800Sae112802	.type   cpu_atomic_delay, #function
586800Sae112802#endif  /* lint */
596800Sae112802
606800Sae112802/*
616800Sae112802 * For the kernel, invoke processor specific delay routine to perform
626800Sae112802 * low-impact spin delay. The value of ATOMIC_BO_ENABLE_SHIFT is tuned
636800Sae112802 * with respect to the specific spin delay implementation.
646800Sae112802 */
656800Sae112802#define	DELAY_SPIN(label, tmp1, tmp2)					\
666800Sae112802	/*								; \
676800Sae112802	 * Define a pragma weak reference to a cpu specific		; \
686800Sae112802	 * delay routine for atomic backoff. For CPUs that		; \
696800Sae112802	 * have no such delay routine defined, the delay becomes	; \
706800Sae112802	 * just a simple tight loop.					; \
716800Sae112802	 *								; \
726800Sae112802	 * tmp1 = holds CPU specific delay routine			; \
736800Sae112802	 * tmp2 = holds atomic routine's callee return address		; \
746800Sae112802	 */								; \
756800Sae112802	sethi	%hi(cpu_atomic_delay), tmp1				; \
766800Sae112802	or	tmp1, %lo(cpu_atomic_delay), tmp1			; \
776800Sae112802label/**/0:								; \
786800Sae112802	brz,pn	tmp1, label/**/1					; \
796800Sae112802	mov	%o7, tmp2						; \
806800Sae112802	jmpl	tmp1, %o7	/* call CPU specific delay routine */	; \
816800Sae112802	  nop			/* delay slot : do nothing */		; \
826800Sae112802	mov	tmp2, %o7	/* restore callee's return address */	; \
836800Sae112802label/**/1:
846800Sae112802
856800Sae112802/*
866800Sae112802 * For the kernel, we take into consideration of cas failures
876800Sae112802 * and also scale the backoff limit w.r.t. the number of cpus.
886800Sae112802 * For cas failures, we reset the backoff value to 1 if the cas
896800Sae112802 * failures exceed or equal to the number of online cpus. This
906800Sae112802 * will enforce some degree of fairness and prevent starvation.
916800Sae112802 * We also scale/normalize the processor provided specific
926800Sae112802 * ATOMIC_BO_ENABLE_SHIFT w.r.t. the number of online cpus to
936800Sae112802 * obtain the actual final limit to use.
946800Sae112802 */
956800Sae112802#define ATOMIC_BACKOFF_CPU(val, limit, ncpu, cas_cnt, label)		\
966800Sae112802	brnz,pt	ncpu, label/**/0					; \
976800Sae112802	  inc	cas_cnt							; \
986800Sae112802	sethi	%hi(ncpus_online), ncpu					; \
996800Sae112802	ld	[ncpu + %lo(ncpus_online)], ncpu			; \
1006800Sae112802label/**/0:								; \
1016800Sae112802	cmp	cas_cnt, ncpu						; \
1026800Sae112802	blu,pt	%xcc, label/**/1					; \
1036800Sae112802	  sllx	ncpu, ATOMIC_BO_ENABLE_SHIFT, limit			; \
1046800Sae112802	mov	%g0, cas_cnt						; \
1056800Sae112802	mov	1, val							; \
1066800Sae112802label/**/1:
1076800Sae112802#endif	/* ATOMIC_BO_ENABLE_SHIFT */
1086800Sae112802
1096800Sae112802#else	/* _KERNEL */
1106800Sae112802
1116800Sae112802/*
1126800Sae112802 * ATOMIC_BO_ENABLE_SHIFT may be enabled/defined here for generic
1136800Sae112802 * libc atomics. None for now.
1146800Sae112802 */
1156800Sae112802#ifdef ATOMIC_BO_ENABLE_SHIFT
1166800Sae112802#define	DELAY_SPIN(label, tmp1, tmp2)	\
1176800Sae112802label/**/0:
1186800Sae112802
1196800Sae112802#define ATOMIC_BACKOFF_CPU(val, limit, ncpu, cas_cnt, label)  \
1206800Sae112802	set	1 << ATOMIC_BO_ENABLE_SHIFT, limit
1216800Sae112802#endif	/* ATOMIC_BO_ENABLE_SHIFT */
1226800Sae112802#endif	/* _KERNEL */
1236800Sae112802
1246800Sae112802#ifdef ATOMIC_BO_ENABLE_SHIFT
1256800Sae112802/*
1266800Sae112802 * ATOMIC_BACKOFF_INIT macro for initialization.
1276800Sae112802 * backoff val is initialized to 1.
1286800Sae112802 * ncpu is initialized to 0
1296800Sae112802 * The cas_cnt counts the cas instruction failure and is
1306800Sae112802 * initialized to 0.
1316800Sae112802 */
1326800Sae112802#define ATOMIC_BACKOFF_INIT(val, ncpu, cas_cnt)	\
1336800Sae112802	mov	1, val				; \
1346800Sae112802	mov	%g0, ncpu			; \
1356800Sae112802	mov	%g0, cas_cnt
1366800Sae112802
1376800Sae112802#define ATOMIC_BACKOFF_BRANCH(cr, backoff, loop) \
1386800Sae112802	bne,a,pn cr, backoff
1396800Sae112802
1406800Sae112802/*
1416800Sae112802 * Main ATOMIC_BACKOFF_BACKOFF macro for backoff.
1426800Sae112802 */
1436800Sae112802#define ATOMIC_BACKOFF_BACKOFF(val, limit, ncpu, cas_cnt, label, retlabel) \
1446800Sae112802	ATOMIC_BACKOFF_CPU(val, limit, ncpu, cas_cnt, label/**/_0)	; \
1456800Sae112802	cmp	val, limit						; \
1466800Sae112802	blu,a,pt %xcc, label/**/_1					; \
1476800Sae112802	  mov	val, limit						; \
1486800Sae112802label/**/_1:								; \
1496800Sae112802	mov	limit, val						; \
1506800Sae112802	DELAY_SPIN(label/**/_2, %g2, %g3)				; \
1516800Sae112802	deccc	limit							; \
1526800Sae112802	bgu,pn	%xcc, label/**/_20 /* branch to middle of DELAY_SPIN */	; \
1536800Sae112802	  nop								; \
1546800Sae112802	ba	retlabel						; \
155*9779SShesha.Sreenivasamurthy@Sun.COM	sllx	val, 1, val
156*9779SShesha.Sreenivasamurthy@Sun.COM
1576800Sae112802#else	/* ATOMIC_BO_ENABLE_SHIFT */
1586800Sae112802#define ATOMIC_BACKOFF_INIT(val, ncpu, cas_cnt)
1596800Sae112802
1606800Sae112802#define ATOMIC_BACKOFF_BRANCH(cr, backoff, loop) \
1616800Sae112802	bne,a,pn cr, loop
1626800Sae112802
1636800Sae112802#define ATOMIC_BACKOFF_BACKOFF(val, limit, ncpu, cas_cnt, label, retlabel)
1646800Sae112802#endif	/* ATOMIC_BO_ENABLE_SHIFT */
1650Sstevel@tonic-gate
1664292Sab196087	/*
1674292Sab196087	 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
1684292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
1694292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
1704292Sab196087	 * from atomic_inc_8_nv.
1714292Sab196087	 */
1720Sstevel@tonic-gate	ENTRY(atomic_inc_8)
1730Sstevel@tonic-gate	ALTENTRY(atomic_inc_8_nv)
1740Sstevel@tonic-gate	ALTENTRY(atomic_inc_uchar)
1750Sstevel@tonic-gate	ALTENTRY(atomic_inc_uchar_nv)
1760Sstevel@tonic-gate	ba	add_8
1770Sstevel@tonic-gate	  add	%g0, 1, %o1
1780Sstevel@tonic-gate	SET_SIZE(atomic_inc_uchar_nv)
1790Sstevel@tonic-gate	SET_SIZE(atomic_inc_uchar)
1800Sstevel@tonic-gate	SET_SIZE(atomic_inc_8_nv)
1810Sstevel@tonic-gate	SET_SIZE(atomic_inc_8)
1820Sstevel@tonic-gate
1834292Sab196087	/*
1844292Sab196087	 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
1854292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
1864292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
1874292Sab196087	 * from atomic_dec_8_nv.
1884292Sab196087	 */
1890Sstevel@tonic-gate	ENTRY(atomic_dec_8)
1900Sstevel@tonic-gate	ALTENTRY(atomic_dec_8_nv)
1910Sstevel@tonic-gate	ALTENTRY(atomic_dec_uchar)
1920Sstevel@tonic-gate	ALTENTRY(atomic_dec_uchar_nv)
1930Sstevel@tonic-gate	ba	add_8
1940Sstevel@tonic-gate	  sub	%g0, 1, %o1
1950Sstevel@tonic-gate	SET_SIZE(atomic_dec_uchar_nv)
1960Sstevel@tonic-gate	SET_SIZE(atomic_dec_uchar)
1970Sstevel@tonic-gate	SET_SIZE(atomic_dec_8_nv)
1980Sstevel@tonic-gate	SET_SIZE(atomic_dec_8)
1990Sstevel@tonic-gate
2004292Sab196087	/*
2014292Sab196087	 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
2024292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
2034292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
2044292Sab196087	 * from atomic_add_8_nv.
2054292Sab196087	 */
2060Sstevel@tonic-gate	ENTRY(atomic_add_8)
2070Sstevel@tonic-gate	ALTENTRY(atomic_add_8_nv)
2080Sstevel@tonic-gate	ALTENTRY(atomic_add_char)
2090Sstevel@tonic-gate	ALTENTRY(atomic_add_char_nv)
2100Sstevel@tonic-gateadd_8:
2110Sstevel@tonic-gate	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
2120Sstevel@tonic-gate	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
2130Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
2140Sstevel@tonic-gate	set	0xff, %o3		! %o3 = mask
2150Sstevel@tonic-gate	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
2160Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
2170Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single byte value
2180Sstevel@tonic-gate	andn	%o0, 0x3, %o0		! %o0 = word address
2190Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
2200Sstevel@tonic-gate1:
2210Sstevel@tonic-gate	add	%o2, %o1, %o5		! add value to the old value
2220Sstevel@tonic-gate	and	%o5, %o3, %o5		! clear other bits
2230Sstevel@tonic-gate	andn	%o2, %o3, %o4		! clear target bits
2240Sstevel@tonic-gate	or	%o4, %o5, %o5		! insert the new value
2250Sstevel@tonic-gate	cas	[%o0], %o2, %o5
2260Sstevel@tonic-gate	cmp	%o2, %o5
2270Sstevel@tonic-gate	bne,a,pn %icc, 1b
2280Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
2290Sstevel@tonic-gate	add	%o2, %o1, %o5
2300Sstevel@tonic-gate	and	%o5, %o3, %o5
2310Sstevel@tonic-gate	retl
2320Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
2330Sstevel@tonic-gate	SET_SIZE(atomic_add_char_nv)
2340Sstevel@tonic-gate	SET_SIZE(atomic_add_char)
2350Sstevel@tonic-gate	SET_SIZE(atomic_add_8_nv)
2360Sstevel@tonic-gate	SET_SIZE(atomic_add_8)
2370Sstevel@tonic-gate
2384292Sab196087	/*
2394292Sab196087	 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
2404292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
2414292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
2424292Sab196087	 * from atomic_inc_16_nv.
2434292Sab196087	 */
2440Sstevel@tonic-gate	ENTRY(atomic_inc_16)
2450Sstevel@tonic-gate	ALTENTRY(atomic_inc_16_nv)
2460Sstevel@tonic-gate	ALTENTRY(atomic_inc_ushort)
2470Sstevel@tonic-gate	ALTENTRY(atomic_inc_ushort_nv)
2480Sstevel@tonic-gate	ba	add_16
2490Sstevel@tonic-gate	  add	%g0, 1, %o1
2500Sstevel@tonic-gate	SET_SIZE(atomic_inc_ushort_nv)
2510Sstevel@tonic-gate	SET_SIZE(atomic_inc_ushort)
2520Sstevel@tonic-gate	SET_SIZE(atomic_inc_16_nv)
2530Sstevel@tonic-gate	SET_SIZE(atomic_inc_16)
2540Sstevel@tonic-gate
2554292Sab196087	/*
2564292Sab196087	 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
2574292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
2584292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
2594292Sab196087	 * from atomic_dec_16_nv.
2604292Sab196087	 */
2610Sstevel@tonic-gate	ENTRY(atomic_dec_16)
2620Sstevel@tonic-gate	ALTENTRY(atomic_dec_16_nv)
2630Sstevel@tonic-gate	ALTENTRY(atomic_dec_ushort)
2640Sstevel@tonic-gate	ALTENTRY(atomic_dec_ushort_nv)
2650Sstevel@tonic-gate	ba	add_16
2660Sstevel@tonic-gate	  sub	%g0, 1, %o1
2670Sstevel@tonic-gate	SET_SIZE(atomic_dec_ushort_nv)
2680Sstevel@tonic-gate	SET_SIZE(atomic_dec_ushort)
2690Sstevel@tonic-gate	SET_SIZE(atomic_dec_16_nv)
2700Sstevel@tonic-gate	SET_SIZE(atomic_dec_16)
2710Sstevel@tonic-gate
2724292Sab196087	/*
2734292Sab196087	 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
2744292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
2754292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
2764292Sab196087	 * from atomic_add_16_nv.
2774292Sab196087	 */
2780Sstevel@tonic-gate	ENTRY(atomic_add_16)
2790Sstevel@tonic-gate	ALTENTRY(atomic_add_16_nv)
2800Sstevel@tonic-gate	ALTENTRY(atomic_add_short)
2810Sstevel@tonic-gate	ALTENTRY(atomic_add_short_nv)
2820Sstevel@tonic-gateadd_16:
2830Sstevel@tonic-gate	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
2840Sstevel@tonic-gate	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
2850Sstevel@tonic-gate	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
2860Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
2870Sstevel@tonic-gate	sethi	%hi(0xffff0000), %o3	! %o3 = mask
2880Sstevel@tonic-gate	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
2890Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
2900Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single short value
2910Sstevel@tonic-gate	andn	%o0, 0x2, %o0		! %o0 = word address
2920Sstevel@tonic-gate	! if low-order bit is 1, we will properly get an alignment fault here
2930Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
2940Sstevel@tonic-gate1:
2950Sstevel@tonic-gate	add	%o1, %o2, %o5		! add value to the old value
2960Sstevel@tonic-gate	and	%o5, %o3, %o5		! clear other bits
2970Sstevel@tonic-gate	andn	%o2, %o3, %o4		! clear target bits
2980Sstevel@tonic-gate	or	%o4, %o5, %o5		! insert the new value
2990Sstevel@tonic-gate	cas	[%o0], %o2, %o5
3000Sstevel@tonic-gate	cmp	%o2, %o5
3010Sstevel@tonic-gate	bne,a,pn %icc, 1b
3020Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
3030Sstevel@tonic-gate	add	%o1, %o2, %o5
3040Sstevel@tonic-gate	and	%o5, %o3, %o5
3050Sstevel@tonic-gate	retl
3060Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
3070Sstevel@tonic-gate	SET_SIZE(atomic_add_short_nv)
3080Sstevel@tonic-gate	SET_SIZE(atomic_add_short)
3090Sstevel@tonic-gate	SET_SIZE(atomic_add_16_nv)
3100Sstevel@tonic-gate	SET_SIZE(atomic_add_16)
3110Sstevel@tonic-gate
3124292Sab196087	/*
3134292Sab196087	 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
3144292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
3154292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
3164292Sab196087	 * from atomic_inc_32_nv.
3174292Sab196087	 */
3180Sstevel@tonic-gate	ENTRY(atomic_inc_32)
3190Sstevel@tonic-gate	ALTENTRY(atomic_inc_32_nv)
3200Sstevel@tonic-gate	ALTENTRY(atomic_inc_uint)
3210Sstevel@tonic-gate	ALTENTRY(atomic_inc_uint_nv)
3220Sstevel@tonic-gate	ba	add_32
3230Sstevel@tonic-gate	  add	%g0, 1, %o1
3240Sstevel@tonic-gate	SET_SIZE(atomic_inc_uint_nv)
3250Sstevel@tonic-gate	SET_SIZE(atomic_inc_uint)
3260Sstevel@tonic-gate	SET_SIZE(atomic_inc_32_nv)
3270Sstevel@tonic-gate	SET_SIZE(atomic_inc_32)
3280Sstevel@tonic-gate
3294292Sab196087	/*
3304292Sab196087	 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
3314292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
3324292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
3334292Sab196087	 * from atomic_dec_32_nv.
3344292Sab196087	 */
3350Sstevel@tonic-gate	ENTRY(atomic_dec_32)
3360Sstevel@tonic-gate	ALTENTRY(atomic_dec_32_nv)
3370Sstevel@tonic-gate	ALTENTRY(atomic_dec_uint)
3380Sstevel@tonic-gate	ALTENTRY(atomic_dec_uint_nv)
3390Sstevel@tonic-gate	ba	add_32
3400Sstevel@tonic-gate	  sub	%g0, 1, %o1
3410Sstevel@tonic-gate	SET_SIZE(atomic_dec_uint_nv)
3420Sstevel@tonic-gate	SET_SIZE(atomic_dec_uint)
3430Sstevel@tonic-gate	SET_SIZE(atomic_dec_32_nv)
3440Sstevel@tonic-gate	SET_SIZE(atomic_dec_32)
3450Sstevel@tonic-gate
3464292Sab196087	/*
3474292Sab196087	 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
3484292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
3494292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
3504292Sab196087	 * from atomic_add_32_nv.
3514292Sab196087	 */
3520Sstevel@tonic-gate	ENTRY(atomic_add_32)
3530Sstevel@tonic-gate	ALTENTRY(atomic_add_32_nv)
3540Sstevel@tonic-gate	ALTENTRY(atomic_add_int)
3550Sstevel@tonic-gate	ALTENTRY(atomic_add_int_nv)
3560Sstevel@tonic-gateadd_32:
3576800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
3586800Sae1128020:
3590Sstevel@tonic-gate	ld	[%o0], %o2
3600Sstevel@tonic-gate1:
3610Sstevel@tonic-gate	add	%o2, %o1, %o3
3620Sstevel@tonic-gate	cas	[%o0], %o2, %o3
3630Sstevel@tonic-gate	cmp	%o2, %o3
3646800Sae112802	ATOMIC_BACKOFF_BRANCH(%icc, 2f, 1b)
3650Sstevel@tonic-gate	  mov	%o3, %o2
3660Sstevel@tonic-gate	retl
3670Sstevel@tonic-gate	add	%o2, %o1, %o0		! return new value
3686800Sae1128022:
3696800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, add32, 0b)
3700Sstevel@tonic-gate	SET_SIZE(atomic_add_int_nv)
3710Sstevel@tonic-gate	SET_SIZE(atomic_add_int)
3720Sstevel@tonic-gate	SET_SIZE(atomic_add_32_nv)
3730Sstevel@tonic-gate	SET_SIZE(atomic_add_32)
3740Sstevel@tonic-gate
3754292Sab196087	/*
3764292Sab196087	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
3774292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
3784292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
3794292Sab196087	 * from atomic_inc_64_nv.
3804292Sab196087	 */
3810Sstevel@tonic-gate	ENTRY(atomic_inc_64)
3820Sstevel@tonic-gate	ALTENTRY(atomic_inc_64_nv)
3830Sstevel@tonic-gate	ALTENTRY(atomic_inc_ulong)
3840Sstevel@tonic-gate	ALTENTRY(atomic_inc_ulong_nv)
3850Sstevel@tonic-gate	ba	add_64
3860Sstevel@tonic-gate	  add	%g0, 1, %o1
3870Sstevel@tonic-gate	SET_SIZE(atomic_inc_ulong_nv)
3880Sstevel@tonic-gate	SET_SIZE(atomic_inc_ulong)
3890Sstevel@tonic-gate	SET_SIZE(atomic_inc_64_nv)
3900Sstevel@tonic-gate	SET_SIZE(atomic_inc_64)
3910Sstevel@tonic-gate
3924292Sab196087	/*
3934292Sab196087	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
3944292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
3954292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
3964292Sab196087	 * from atomic_dec_64_nv.
3974292Sab196087	 */
3980Sstevel@tonic-gate	ENTRY(atomic_dec_64)
3990Sstevel@tonic-gate	ALTENTRY(atomic_dec_64_nv)
4000Sstevel@tonic-gate	ALTENTRY(atomic_dec_ulong)
4010Sstevel@tonic-gate	ALTENTRY(atomic_dec_ulong_nv)
4020Sstevel@tonic-gate	ba	add_64
4030Sstevel@tonic-gate	  sub	%g0, 1, %o1
4040Sstevel@tonic-gate	SET_SIZE(atomic_dec_ulong_nv)
4050Sstevel@tonic-gate	SET_SIZE(atomic_dec_ulong)
4060Sstevel@tonic-gate	SET_SIZE(atomic_dec_64_nv)
4070Sstevel@tonic-gate	SET_SIZE(atomic_dec_64)
4080Sstevel@tonic-gate
4094292Sab196087	/*
4104292Sab196087	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
4114292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
4124292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
4134292Sab196087	 * from atomic_add_64_nv.
4144292Sab196087	 */
4150Sstevel@tonic-gate	ENTRY(atomic_add_64)
4160Sstevel@tonic-gate	ALTENTRY(atomic_add_64_nv)
4170Sstevel@tonic-gate	ALTENTRY(atomic_add_ptr)
4180Sstevel@tonic-gate	ALTENTRY(atomic_add_ptr_nv)
4190Sstevel@tonic-gate	ALTENTRY(atomic_add_long)
4200Sstevel@tonic-gate	ALTENTRY(atomic_add_long_nv)
4210Sstevel@tonic-gateadd_64:
4226800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
4236800Sae1128020:
4240Sstevel@tonic-gate	ldx	[%o0], %o2
4250Sstevel@tonic-gate1:
4260Sstevel@tonic-gate	add	%o2, %o1, %o3
4270Sstevel@tonic-gate	casx	[%o0], %o2, %o3
4280Sstevel@tonic-gate	cmp	%o2, %o3
4296800Sae112802	ATOMIC_BACKOFF_BRANCH(%xcc, 2f, 1b)
4300Sstevel@tonic-gate	  mov	%o3, %o2
4310Sstevel@tonic-gate	retl
4320Sstevel@tonic-gate	add	%o2, %o1, %o0		! return new value
4336800Sae1128022:
4346800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, add64, 0b)
4350Sstevel@tonic-gate	SET_SIZE(atomic_add_long_nv)
4360Sstevel@tonic-gate	SET_SIZE(atomic_add_long)
4370Sstevel@tonic-gate	SET_SIZE(atomic_add_ptr_nv)
4380Sstevel@tonic-gate	SET_SIZE(atomic_add_ptr)
4390Sstevel@tonic-gate	SET_SIZE(atomic_add_64_nv)
4400Sstevel@tonic-gate	SET_SIZE(atomic_add_64)
4410Sstevel@tonic-gate
4424292Sab196087	/*
4434292Sab196087	 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
4444292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
4454292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
4464292Sab196087	 * from atomic_or_8_nv.
4474292Sab196087	 */
4480Sstevel@tonic-gate	ENTRY(atomic_or_8)
4490Sstevel@tonic-gate	ALTENTRY(atomic_or_8_nv)
4500Sstevel@tonic-gate	ALTENTRY(atomic_or_uchar)
4510Sstevel@tonic-gate	ALTENTRY(atomic_or_uchar_nv)
4520Sstevel@tonic-gate	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
4530Sstevel@tonic-gate	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
4540Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
4550Sstevel@tonic-gate	set	0xff, %o3		! %o3 = mask
4560Sstevel@tonic-gate	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
4570Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
4580Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single byte value
4590Sstevel@tonic-gate	andn	%o0, 0x3, %o0		! %o0 = word address
4600Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
4610Sstevel@tonic-gate1:
4620Sstevel@tonic-gate	or	%o2, %o1, %o5		! or in the new value
4630Sstevel@tonic-gate	cas	[%o0], %o2, %o5
4640Sstevel@tonic-gate	cmp	%o2, %o5
4650Sstevel@tonic-gate	bne,a,pn %icc, 1b
4660Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
4670Sstevel@tonic-gate	or	%o2, %o1, %o5
4680Sstevel@tonic-gate	and	%o5, %o3, %o5
4690Sstevel@tonic-gate	retl
4700Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
4710Sstevel@tonic-gate	SET_SIZE(atomic_or_uchar_nv)
4720Sstevel@tonic-gate	SET_SIZE(atomic_or_uchar)
4730Sstevel@tonic-gate	SET_SIZE(atomic_or_8_nv)
4740Sstevel@tonic-gate	SET_SIZE(atomic_or_8)
4750Sstevel@tonic-gate
4764292Sab196087	/*
4774292Sab196087	 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
4784292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
4794292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
4804292Sab196087	 * from atomic_or_16_nv.
4814292Sab196087	 */
4820Sstevel@tonic-gate	ENTRY(atomic_or_16)
4830Sstevel@tonic-gate	ALTENTRY(atomic_or_16_nv)
4840Sstevel@tonic-gate	ALTENTRY(atomic_or_ushort)
4850Sstevel@tonic-gate	ALTENTRY(atomic_or_ushort_nv)
4860Sstevel@tonic-gate	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
4870Sstevel@tonic-gate	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
4880Sstevel@tonic-gate	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
4890Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
4900Sstevel@tonic-gate	sethi	%hi(0xffff0000), %o3	! %o3 = mask
4910Sstevel@tonic-gate	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
4920Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
4930Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single short value
4940Sstevel@tonic-gate	andn	%o0, 0x2, %o0		! %o0 = word address
4950Sstevel@tonic-gate	! if low-order bit is 1, we will properly get an alignment fault here
4960Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
4970Sstevel@tonic-gate1:
4980Sstevel@tonic-gate	or	%o2, %o1, %o5		! or in the new value
4990Sstevel@tonic-gate	cas	[%o0], %o2, %o5
5000Sstevel@tonic-gate	cmp	%o2, %o5
5010Sstevel@tonic-gate	bne,a,pn %icc, 1b
5020Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
5030Sstevel@tonic-gate	or	%o2, %o1, %o5		! or in the new value
5040Sstevel@tonic-gate	and	%o5, %o3, %o5
5050Sstevel@tonic-gate	retl
5060Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
5070Sstevel@tonic-gate	SET_SIZE(atomic_or_ushort_nv)
5080Sstevel@tonic-gate	SET_SIZE(atomic_or_ushort)
5090Sstevel@tonic-gate	SET_SIZE(atomic_or_16_nv)
5100Sstevel@tonic-gate	SET_SIZE(atomic_or_16)
5110Sstevel@tonic-gate
5124292Sab196087	/*
5134292Sab196087	 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
5144292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
5154292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
5164292Sab196087	 * from atomic_or_32_nv.
5174292Sab196087	 */
5180Sstevel@tonic-gate	ENTRY(atomic_or_32)
5190Sstevel@tonic-gate	ALTENTRY(atomic_or_32_nv)
5200Sstevel@tonic-gate	ALTENTRY(atomic_or_uint)
5210Sstevel@tonic-gate	ALTENTRY(atomic_or_uint_nv)
5226800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
5236800Sae1128020:
5240Sstevel@tonic-gate	ld	[%o0], %o2
5250Sstevel@tonic-gate1:
5260Sstevel@tonic-gate	or	%o2, %o1, %o3
5270Sstevel@tonic-gate	cas	[%o0], %o2, %o3
5280Sstevel@tonic-gate	cmp	%o2, %o3
5296800Sae112802	ATOMIC_BACKOFF_BRANCH(%icc, 2f, 1b)
5300Sstevel@tonic-gate	  mov	%o3, %o2
5310Sstevel@tonic-gate	retl
5320Sstevel@tonic-gate	or	%o2, %o1, %o0		! return new value
5336800Sae1128022:
5346800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, or32, 0b)
5350Sstevel@tonic-gate	SET_SIZE(atomic_or_uint_nv)
5360Sstevel@tonic-gate	SET_SIZE(atomic_or_uint)
5370Sstevel@tonic-gate	SET_SIZE(atomic_or_32_nv)
5380Sstevel@tonic-gate	SET_SIZE(atomic_or_32)
5390Sstevel@tonic-gate
5404292Sab196087	/*
5414292Sab196087	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
5424292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
5434292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
5444292Sab196087	 * from atomic_or_64_nv.
5454292Sab196087	 */
5460Sstevel@tonic-gate	ENTRY(atomic_or_64)
5470Sstevel@tonic-gate	ALTENTRY(atomic_or_64_nv)
5480Sstevel@tonic-gate	ALTENTRY(atomic_or_ulong)
5490Sstevel@tonic-gate	ALTENTRY(atomic_or_ulong_nv)
5506800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
5516800Sae1128020:
5520Sstevel@tonic-gate	ldx	[%o0], %o2
5530Sstevel@tonic-gate1:
5540Sstevel@tonic-gate	or	%o2, %o1, %o3
5550Sstevel@tonic-gate	casx	[%o0], %o2, %o3
5560Sstevel@tonic-gate	cmp	%o2, %o3
5576800Sae112802	ATOMIC_BACKOFF_BRANCH(%xcc, 2f, 1b)
5580Sstevel@tonic-gate	  mov	%o3, %o2
5590Sstevel@tonic-gate	retl
5600Sstevel@tonic-gate	or	%o2, %o1, %o0		! return new value
5616800Sae1128022:
5626800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, or64, 0b)
5630Sstevel@tonic-gate	SET_SIZE(atomic_or_ulong_nv)
5640Sstevel@tonic-gate	SET_SIZE(atomic_or_ulong)
5650Sstevel@tonic-gate	SET_SIZE(atomic_or_64_nv)
5660Sstevel@tonic-gate	SET_SIZE(atomic_or_64)
5670Sstevel@tonic-gate
5684292Sab196087	/*
5694292Sab196087	 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
5704292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
5714292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
5724292Sab196087	 * from atomic_and_8_nv.
5734292Sab196087	 */
5740Sstevel@tonic-gate	ENTRY(atomic_and_8)
5750Sstevel@tonic-gate	ALTENTRY(atomic_and_8_nv)
5760Sstevel@tonic-gate	ALTENTRY(atomic_and_uchar)
5770Sstevel@tonic-gate	ALTENTRY(atomic_and_uchar_nv)
5780Sstevel@tonic-gate	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
5790Sstevel@tonic-gate	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
5800Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
5810Sstevel@tonic-gate	set	0xff, %o3		! %o3 = mask
5820Sstevel@tonic-gate	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
5830Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
5840Sstevel@tonic-gate	orn	%o1, %o3, %o1		! all ones in other bytes
5850Sstevel@tonic-gate	andn	%o0, 0x3, %o0		! %o0 = word address
5860Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
5870Sstevel@tonic-gate1:
5880Sstevel@tonic-gate	and	%o2, %o1, %o5		! and in the new value
5890Sstevel@tonic-gate	cas	[%o0], %o2, %o5
5900Sstevel@tonic-gate	cmp	%o2, %o5
5910Sstevel@tonic-gate	bne,a,pn %icc, 1b
5920Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
5930Sstevel@tonic-gate	and	%o2, %o1, %o5
5940Sstevel@tonic-gate	and	%o5, %o3, %o5
5950Sstevel@tonic-gate	retl
5960Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
5970Sstevel@tonic-gate	SET_SIZE(atomic_and_uchar_nv)
5980Sstevel@tonic-gate	SET_SIZE(atomic_and_uchar)
5990Sstevel@tonic-gate	SET_SIZE(atomic_and_8_nv)
6000Sstevel@tonic-gate	SET_SIZE(atomic_and_8)
6010Sstevel@tonic-gate
6024292Sab196087	/*
6034292Sab196087	 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
6044292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
6054292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
6064292Sab196087	 * from atomic_and_16_nv.
6074292Sab196087	 */
6080Sstevel@tonic-gate	ENTRY(atomic_and_16)
6090Sstevel@tonic-gate	ALTENTRY(atomic_and_16_nv)
6100Sstevel@tonic-gate	ALTENTRY(atomic_and_ushort)
6110Sstevel@tonic-gate	ALTENTRY(atomic_and_ushort_nv)
6120Sstevel@tonic-gate	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
6130Sstevel@tonic-gate	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
6140Sstevel@tonic-gate	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
6150Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
6160Sstevel@tonic-gate	sethi	%hi(0xffff0000), %o3	! %o3 = mask
6170Sstevel@tonic-gate	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
6180Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
6190Sstevel@tonic-gate	orn	%o1, %o3, %o1		! all ones in the other half
6200Sstevel@tonic-gate	andn	%o0, 0x2, %o0		! %o0 = word address
6210Sstevel@tonic-gate	! if low-order bit is 1, we will properly get an alignment fault here
6220Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
6230Sstevel@tonic-gate1:
6240Sstevel@tonic-gate	and	%o2, %o1, %o5		! and in the new value
6250Sstevel@tonic-gate	cas	[%o0], %o2, %o5
6260Sstevel@tonic-gate	cmp	%o2, %o5
6270Sstevel@tonic-gate	bne,a,pn %icc, 1b
6280Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
6290Sstevel@tonic-gate	and	%o2, %o1, %o5
6300Sstevel@tonic-gate	and	%o5, %o3, %o5
6310Sstevel@tonic-gate	retl
6320Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = new value
6330Sstevel@tonic-gate	SET_SIZE(atomic_and_ushort_nv)
6340Sstevel@tonic-gate	SET_SIZE(atomic_and_ushort)
6350Sstevel@tonic-gate	SET_SIZE(atomic_and_16_nv)
6360Sstevel@tonic-gate	SET_SIZE(atomic_and_16)
6370Sstevel@tonic-gate
6384292Sab196087	/*
6394292Sab196087	 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
6404292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
6414292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
6424292Sab196087	 * from atomic_and_32_nv.
6434292Sab196087	 */
6440Sstevel@tonic-gate	ENTRY(atomic_and_32)
6450Sstevel@tonic-gate	ALTENTRY(atomic_and_32_nv)
6460Sstevel@tonic-gate	ALTENTRY(atomic_and_uint)
6470Sstevel@tonic-gate	ALTENTRY(atomic_and_uint_nv)
6486800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
6496800Sae1128020:
6500Sstevel@tonic-gate	ld	[%o0], %o2
6510Sstevel@tonic-gate1:
6520Sstevel@tonic-gate	and	%o2, %o1, %o3
6530Sstevel@tonic-gate	cas	[%o0], %o2, %o3
6540Sstevel@tonic-gate	cmp	%o2, %o3
6556800Sae112802	ATOMIC_BACKOFF_BRANCH(%icc, 2f, 1b)
6560Sstevel@tonic-gate	  mov	%o3, %o2
6570Sstevel@tonic-gate	retl
6580Sstevel@tonic-gate	and	%o2, %o1, %o0		! return new value
6596800Sae1128022:
6606800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, and32, 0b)
6610Sstevel@tonic-gate	SET_SIZE(atomic_and_uint_nv)
6620Sstevel@tonic-gate	SET_SIZE(atomic_and_uint)
6630Sstevel@tonic-gate	SET_SIZE(atomic_and_32_nv)
6640Sstevel@tonic-gate	SET_SIZE(atomic_and_32)
6650Sstevel@tonic-gate
6664292Sab196087	/*
6674292Sab196087	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
6684292Sab196087	 * separated, you need to also edit the libc sparcv9 platform
6694292Sab196087	 * specific mapfile and remove the NODYNSORT attribute
6704292Sab196087	 * from atomic_and_64_nv.
6714292Sab196087	 */
6720Sstevel@tonic-gate	ENTRY(atomic_and_64)
6730Sstevel@tonic-gate	ALTENTRY(atomic_and_64_nv)
6740Sstevel@tonic-gate	ALTENTRY(atomic_and_ulong)
6750Sstevel@tonic-gate	ALTENTRY(atomic_and_ulong_nv)
6766800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
6776800Sae1128020:
6780Sstevel@tonic-gate	ldx	[%o0], %o2
6790Sstevel@tonic-gate1:
6800Sstevel@tonic-gate	and	%o2, %o1, %o3
6810Sstevel@tonic-gate	casx	[%o0], %o2, %o3
6820Sstevel@tonic-gate	cmp	%o2, %o3
6836800Sae112802	ATOMIC_BACKOFF_BRANCH(%xcc, 2f, 1b)
6840Sstevel@tonic-gate	  mov	%o3, %o2
6850Sstevel@tonic-gate	retl
6860Sstevel@tonic-gate	and	%o2, %o1, %o0		! return new value
6876800Sae1128022:
6886800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, and64, 0b)
6890Sstevel@tonic-gate	SET_SIZE(atomic_and_ulong_nv)
6900Sstevel@tonic-gate	SET_SIZE(atomic_and_ulong)
6910Sstevel@tonic-gate	SET_SIZE(atomic_and_64_nv)
6920Sstevel@tonic-gate	SET_SIZE(atomic_and_64)
6930Sstevel@tonic-gate
6940Sstevel@tonic-gate	ENTRY(atomic_cas_8)
6950Sstevel@tonic-gate	ALTENTRY(atomic_cas_uchar)
6960Sstevel@tonic-gate	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
6970Sstevel@tonic-gate	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
6980Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
6990Sstevel@tonic-gate	set	0xff, %o3		! %o3 = mask
7000Sstevel@tonic-gate	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
7010Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
7020Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single byte value
7030Sstevel@tonic-gate	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
7040Sstevel@tonic-gate	and	%o2, %o3, %o2		! %o2 = single byte value
7050Sstevel@tonic-gate	andn	%o0, 0x3, %o0		! %o0 = word address
7060Sstevel@tonic-gate	ld	[%o0], %o4		! read old value
7070Sstevel@tonic-gate1:
7080Sstevel@tonic-gate	andn	%o4, %o3, %o4		! clear target bits
7090Sstevel@tonic-gate	or	%o4, %o2, %o5		! insert the new value
7100Sstevel@tonic-gate	or	%o4, %o1, %o4		! insert the comparison value
7110Sstevel@tonic-gate	cas	[%o0], %o4, %o5
7120Sstevel@tonic-gate	cmp	%o4, %o5		! did we succeed?
7130Sstevel@tonic-gate	be,pt	%icc, 2f
7140Sstevel@tonic-gate	  and	%o5, %o3, %o4		! isolate the old value
7150Sstevel@tonic-gate	cmp	%o1, %o4		! should we have succeeded?
7160Sstevel@tonic-gate	be,a,pt	%icc, 1b		! yes, try again
7170Sstevel@tonic-gate	  mov	%o5, %o4		! %o4 = old value
7180Sstevel@tonic-gate2:
7190Sstevel@tonic-gate	retl
7200Sstevel@tonic-gate	srl	%o4, %g1, %o0		! %o0 = old value
7210Sstevel@tonic-gate	SET_SIZE(atomic_cas_uchar)
7220Sstevel@tonic-gate	SET_SIZE(atomic_cas_8)
7230Sstevel@tonic-gate
7240Sstevel@tonic-gate	ENTRY(atomic_cas_16)
7250Sstevel@tonic-gate	ALTENTRY(atomic_cas_ushort)
7260Sstevel@tonic-gate	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
7270Sstevel@tonic-gate	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
7280Sstevel@tonic-gate	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
7290Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
7300Sstevel@tonic-gate	sethi	%hi(0xffff0000), %o3	! %o3 = mask
7310Sstevel@tonic-gate	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
7320Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
7330Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single short value
7340Sstevel@tonic-gate	sll	%o2, %g1, %o2		! %o2 = shifted to bit offset
7350Sstevel@tonic-gate	and	%o2, %o3, %o2		! %o2 = single short value
7360Sstevel@tonic-gate	andn	%o0, 0x2, %o0		! %o0 = word address
7370Sstevel@tonic-gate	! if low-order bit is 1, we will properly get an alignment fault here
7380Sstevel@tonic-gate	ld	[%o0], %o4		! read old value
7390Sstevel@tonic-gate1:
7400Sstevel@tonic-gate	andn	%o4, %o3, %o4		! clear target bits
7410Sstevel@tonic-gate	or	%o4, %o2, %o5		! insert the new value
7420Sstevel@tonic-gate	or	%o4, %o1, %o4		! insert the comparison value
7430Sstevel@tonic-gate	cas	[%o0], %o4, %o5
7440Sstevel@tonic-gate	cmp	%o4, %o5		! did we succeed?
7450Sstevel@tonic-gate	be,pt	%icc, 2f
7460Sstevel@tonic-gate	  and	%o5, %o3, %o4		! isolate the old value
7470Sstevel@tonic-gate	cmp	%o1, %o4		! should we have succeeded?
7480Sstevel@tonic-gate	be,a,pt	%icc, 1b		! yes, try again
7490Sstevel@tonic-gate	  mov	%o5, %o4		! %o4 = old value
7500Sstevel@tonic-gate2:
7510Sstevel@tonic-gate	retl
7520Sstevel@tonic-gate	srl	%o4, %g1, %o0		! %o0 = old value
7530Sstevel@tonic-gate	SET_SIZE(atomic_cas_ushort)
7540Sstevel@tonic-gate	SET_SIZE(atomic_cas_16)
7550Sstevel@tonic-gate
7560Sstevel@tonic-gate	ENTRY(atomic_cas_32)
7570Sstevel@tonic-gate	ALTENTRY(atomic_cas_uint)
7580Sstevel@tonic-gate	cas	[%o0], %o1, %o2
7590Sstevel@tonic-gate	retl
7600Sstevel@tonic-gate	mov	%o2, %o0
7610Sstevel@tonic-gate	SET_SIZE(atomic_cas_uint)
7620Sstevel@tonic-gate	SET_SIZE(atomic_cas_32)
7630Sstevel@tonic-gate
7640Sstevel@tonic-gate	ENTRY(atomic_cas_64)
7650Sstevel@tonic-gate	ALTENTRY(atomic_cas_ptr)
7660Sstevel@tonic-gate	ALTENTRY(atomic_cas_ulong)
7670Sstevel@tonic-gate	casx	[%o0], %o1, %o2
7680Sstevel@tonic-gate	retl
7690Sstevel@tonic-gate	mov	%o2, %o0
7700Sstevel@tonic-gate	SET_SIZE(atomic_cas_ulong)
7710Sstevel@tonic-gate	SET_SIZE(atomic_cas_ptr)
7720Sstevel@tonic-gate	SET_SIZE(atomic_cas_64)
7730Sstevel@tonic-gate
7740Sstevel@tonic-gate	ENTRY(atomic_swap_8)
7750Sstevel@tonic-gate	ALTENTRY(atomic_swap_uchar)
7760Sstevel@tonic-gate	and	%o0, 0x3, %o4		! %o4 = byte offset, left-to-right
7770Sstevel@tonic-gate	xor	%o4, 0x3, %g1		! %g1 = byte offset, right-to-left
7780Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
7790Sstevel@tonic-gate	set	0xff, %o3		! %o3 = mask
7800Sstevel@tonic-gate	sll	%o3, %g1, %o3		! %o3 = shifted to bit offset
7810Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
7820Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single byte value
7830Sstevel@tonic-gate	andn	%o0, 0x3, %o0		! %o0 = word address
7840Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
7850Sstevel@tonic-gate1:
7860Sstevel@tonic-gate	andn	%o2, %o3, %o5		! clear target bits
7870Sstevel@tonic-gate	or	%o5, %o1, %o5		! insert the new value
7880Sstevel@tonic-gate	cas	[%o0], %o2, %o5
7890Sstevel@tonic-gate	cmp	%o2, %o5
7900Sstevel@tonic-gate	bne,a,pn %icc, 1b
7910Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
7920Sstevel@tonic-gate	and	%o5, %o3, %o5
7930Sstevel@tonic-gate	retl
7940Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = old value
7950Sstevel@tonic-gate	SET_SIZE(atomic_swap_uchar)
7960Sstevel@tonic-gate	SET_SIZE(atomic_swap_8)
7970Sstevel@tonic-gate
7980Sstevel@tonic-gate	ENTRY(atomic_swap_16)
7990Sstevel@tonic-gate	ALTENTRY(atomic_swap_ushort)
8000Sstevel@tonic-gate	and	%o0, 0x2, %o4		! %o4 = byte offset, left-to-right
8010Sstevel@tonic-gate	xor	%o4, 0x2, %g1		! %g1 = byte offset, right-to-left
8020Sstevel@tonic-gate	sll	%o4, 3, %o4		! %o4 = bit offset, left-to-right
8030Sstevel@tonic-gate	sll	%g1, 3, %g1		! %g1 = bit offset, right-to-left
8040Sstevel@tonic-gate	sethi	%hi(0xffff0000), %o3	! %o3 = mask
8050Sstevel@tonic-gate	srl	%o3, %o4, %o3		! %o3 = shifted to bit offset
8060Sstevel@tonic-gate	sll	%o1, %g1, %o1		! %o1 = shifted to bit offset
8070Sstevel@tonic-gate	and	%o1, %o3, %o1		! %o1 = single short value
8080Sstevel@tonic-gate	andn	%o0, 0x2, %o0		! %o0 = word address
8090Sstevel@tonic-gate	! if low-order bit is 1, we will properly get an alignment fault here
8100Sstevel@tonic-gate	ld	[%o0], %o2		! read old value
8110Sstevel@tonic-gate1:
8120Sstevel@tonic-gate	andn	%o2, %o3, %o5		! clear target bits
8130Sstevel@tonic-gate	or	%o5, %o1, %o5		! insert the new value
8140Sstevel@tonic-gate	cas	[%o0], %o2, %o5
8150Sstevel@tonic-gate	cmp	%o2, %o5
8160Sstevel@tonic-gate	bne,a,pn %icc, 1b
8170Sstevel@tonic-gate	  mov	%o5, %o2		! %o2 = old value
8180Sstevel@tonic-gate	and	%o5, %o3, %o5
8190Sstevel@tonic-gate	retl
8200Sstevel@tonic-gate	srl	%o5, %g1, %o0		! %o0 = old value
8210Sstevel@tonic-gate	SET_SIZE(atomic_swap_ushort)
8220Sstevel@tonic-gate	SET_SIZE(atomic_swap_16)
8230Sstevel@tonic-gate
8240Sstevel@tonic-gate	ENTRY(atomic_swap_32)
8250Sstevel@tonic-gate	ALTENTRY(atomic_swap_uint)
8266800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
8276800Sae1128020:
8280Sstevel@tonic-gate	ld	[%o0], %o2
8290Sstevel@tonic-gate1:
8300Sstevel@tonic-gate	mov	%o1, %o3
8310Sstevel@tonic-gate	cas	[%o0], %o2, %o3
8320Sstevel@tonic-gate	cmp	%o2, %o3
8336800Sae112802	ATOMIC_BACKOFF_BRANCH(%icc, 2f, 1b)
8340Sstevel@tonic-gate	  mov	%o3, %o2
8350Sstevel@tonic-gate	retl
8360Sstevel@tonic-gate	mov	%o3, %o0
8376800Sae1128022:
8386800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, swap32, 0b)
8390Sstevel@tonic-gate	SET_SIZE(atomic_swap_uint)
8400Sstevel@tonic-gate	SET_SIZE(atomic_swap_32)
8410Sstevel@tonic-gate
8420Sstevel@tonic-gate	ENTRY(atomic_swap_64)
8430Sstevel@tonic-gate	ALTENTRY(atomic_swap_ptr)
8440Sstevel@tonic-gate	ALTENTRY(atomic_swap_ulong)
8456800Sae112802	ATOMIC_BACKOFF_INIT(%o4, %g4, %g5)
8466800Sae1128020:
8470Sstevel@tonic-gate	ldx	[%o0], %o2
8480Sstevel@tonic-gate1:
8490Sstevel@tonic-gate	mov	%o1, %o3
8500Sstevel@tonic-gate	casx	[%o0], %o2, %o3
8510Sstevel@tonic-gate	cmp	%o2, %o3
8526800Sae112802	ATOMIC_BACKOFF_BRANCH(%xcc, 2f, 1b)
8530Sstevel@tonic-gate	  mov	%o3, %o2
8540Sstevel@tonic-gate	retl
8550Sstevel@tonic-gate	mov	%o3, %o0
8566800Sae1128022:
8576800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o4, %o5, %g4, %g5, swap64, 0b)
8580Sstevel@tonic-gate	SET_SIZE(atomic_swap_ulong)
8590Sstevel@tonic-gate	SET_SIZE(atomic_swap_ptr)
8600Sstevel@tonic-gate	SET_SIZE(atomic_swap_64)
8610Sstevel@tonic-gate
8620Sstevel@tonic-gate	ENTRY(atomic_set_long_excl)
8636800Sae112802	ATOMIC_BACKOFF_INIT(%o5, %g4, %g5)
8640Sstevel@tonic-gate	mov	1, %o3
8650Sstevel@tonic-gate	slln	%o3, %o1, %o3
8666800Sae1128020:
8670Sstevel@tonic-gate	ldn	[%o0], %o2
8680Sstevel@tonic-gate1:
8690Sstevel@tonic-gate	andcc	%o2, %o3, %g0		! test if the bit is set
8700Sstevel@tonic-gate	bnz,a,pn %ncc, 2f		! if so, then fail out
8710Sstevel@tonic-gate	  mov	-1, %o0
8720Sstevel@tonic-gate	or	%o2, %o3, %o4		! set the bit, and try to commit it
8730Sstevel@tonic-gate	casn	[%o0], %o2, %o4
8740Sstevel@tonic-gate	cmp	%o2, %o4
8756800Sae112802	ATOMIC_BACKOFF_BRANCH(%ncc, 5f, 1b)
8760Sstevel@tonic-gate	  mov	%o4, %o2
8770Sstevel@tonic-gate	mov	%g0, %o0
8780Sstevel@tonic-gate2:
8790Sstevel@tonic-gate	retl
8800Sstevel@tonic-gate	nop
8816800Sae1128025:
8826800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o5, %g1, %g4, %g5, setlongexcl, 0b)
8830Sstevel@tonic-gate	SET_SIZE(atomic_set_long_excl)
8840Sstevel@tonic-gate
8850Sstevel@tonic-gate	ENTRY(atomic_clear_long_excl)
8866800Sae112802	ATOMIC_BACKOFF_INIT(%o5, %g4, %g5)
8870Sstevel@tonic-gate	mov	1, %o3
8880Sstevel@tonic-gate	slln	%o3, %o1, %o3
8896800Sae1128020:
8900Sstevel@tonic-gate	ldn	[%o0], %o2
8910Sstevel@tonic-gate1:
8920Sstevel@tonic-gate	andncc	%o3, %o2, %g0		! test if the bit is clear
8930Sstevel@tonic-gate	bnz,a,pn %ncc, 2f		! if so, then fail out
8940Sstevel@tonic-gate	  mov	-1, %o0
8950Sstevel@tonic-gate	andn	%o2, %o3, %o4		! clear the bit, and try to commit it
8960Sstevel@tonic-gate	casn	[%o0], %o2, %o4
8970Sstevel@tonic-gate	cmp	%o2, %o4
8986800Sae112802	ATOMIC_BACKOFF_BRANCH(%ncc, 5f, 1b)
8990Sstevel@tonic-gate	  mov	%o4, %o2
9000Sstevel@tonic-gate	mov	%g0, %o0
9010Sstevel@tonic-gate2:
9020Sstevel@tonic-gate	retl
9030Sstevel@tonic-gate	nop
9046800Sae1128025:
9056800Sae112802	ATOMIC_BACKOFF_BACKOFF(%o5, %g1, %g4, %g5, clrlongexcl, 0b)
9060Sstevel@tonic-gate	SET_SIZE(atomic_clear_long_excl)
9070Sstevel@tonic-gate
9080Sstevel@tonic-gate#if !defined(_KERNEL)
9090Sstevel@tonic-gate
9100Sstevel@tonic-gate	/*
9110Sstevel@tonic-gate	 * Spitfires and Blackbirds have a problem with membars in the
9120Sstevel@tonic-gate	 * delay slot (SF_ERRATA_51).  For safety's sake, we assume
9130Sstevel@tonic-gate	 * that the whole world needs the workaround.
9140Sstevel@tonic-gate	 */
9150Sstevel@tonic-gate	ENTRY(membar_enter)
9160Sstevel@tonic-gate	membar	#StoreLoad|#StoreStore
9170Sstevel@tonic-gate	retl
9180Sstevel@tonic-gate	nop
9190Sstevel@tonic-gate	SET_SIZE(membar_enter)
9200Sstevel@tonic-gate
9210Sstevel@tonic-gate	ENTRY(membar_exit)
9220Sstevel@tonic-gate	membar	#LoadStore|#StoreStore
9230Sstevel@tonic-gate	retl
9240Sstevel@tonic-gate	nop
9250Sstevel@tonic-gate	SET_SIZE(membar_exit)
9260Sstevel@tonic-gate
9270Sstevel@tonic-gate	ENTRY(membar_producer)
9280Sstevel@tonic-gate	membar	#StoreStore
9290Sstevel@tonic-gate	retl
9300Sstevel@tonic-gate	nop
9310Sstevel@tonic-gate	SET_SIZE(membar_producer)
9320Sstevel@tonic-gate
9330Sstevel@tonic-gate	ENTRY(membar_consumer)
9340Sstevel@tonic-gate	membar	#LoadLoad
9350Sstevel@tonic-gate	retl
9360Sstevel@tonic-gate	nop
9370Sstevel@tonic-gate	SET_SIZE(membar_consumer)
9380Sstevel@tonic-gate
9390Sstevel@tonic-gate#endif	/* !_KERNEL */
940