xref: /netbsd-src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S (revision a5847cc334d9a7029f6352b847e9e8d71a0f9e0c)
1/* $NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $ */
2/*-
3 * Copyright (c) 2008 The NetBSD Foundation, Inc.
4 * All rights reserved.
5 *
6 * This code is derived from software contributed to The NetBSD Foundation
7 * by Matt Thomas <matt@3am-software.com>
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 *    notice, this list of conditions and the following disclaimer in the
16 *    documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
19 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
20 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
21 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
22 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30#include <machine/asm.h>
31
32RCSID("$NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $")
33
34ENTRY(atomic_cas_8)
35	XPUSH	{r4,r5}			/* we need some more registers */
36	and	r3, r0, #3		/* which byte do we replace? */
37#if __ARMEB__
38	eor	r3, r3, #3		/* bytes are reversed on BE */
39#endif
40	mov	r3, r3, lsl #3		/* multiply by 8 */
41	mov	r1, r1, lsl r3		/* mov old value to correct byte */
42	eor	r2, r1, r2, lsl r3	/* move new value to correct byte */
43/*	eor	r2, r2, r1 */		/* new value is now (old ^ new) */
44	mov	r5, #0xff		/* load mask */
45	mov	r5, r5, lsl r3		/* and move to correct byte */
46	mov	r3, r0			/* move pointer */
47
481:	ldrex	r4, [r3]		/* load 32bit value */
49	and	r0, r4, r5		/* clear other bytes */
50	teq	r0, r1			/* equal old value? */
51	bne	2f			/*   nope, bail. */
52	eor	r4, r4, r2		/* new == old ^ (old ^ new) */
53	strex	ip, r4, [r3]		/* attempt to store it */
54	cmp	ip, #0			/*   succeed? */
55	bne	1b			/* nope, try again. */
56
572:	XPOP	{r4,r5}			/* don't need these anymore */
58	and	r1, r3, #3
59#if __ARMEB__
60	eor	r1, r1, #3
61#endif
62	mov	r0, r0, lsr r1		/* shift it back to lsb byte */
63	RET
64