xref: /minix3/common/lib/libc/arch/arm/string/strrchr_arm.S (revision 84d9c625bfea59e274550651111ae9edfdc40fbd)
1*84d9c625SLionel Sambuc/*-
2*84d9c625SLionel Sambuc * Copyright (c) 2013 The NetBSD Foundation, Inc.
3*84d9c625SLionel Sambuc * All rights reserved.
4*84d9c625SLionel Sambuc *
5*84d9c625SLionel Sambuc * This code is derived from software contributed to The NetBSD Foundation
6*84d9c625SLionel Sambuc * by Matt Thomas of 3am Software Foundry.
7*84d9c625SLionel Sambuc *
8*84d9c625SLionel Sambuc * Redistribution and use in source and binary forms, with or without
9*84d9c625SLionel Sambuc * modification, are permitted provided that the following conditions
10*84d9c625SLionel Sambuc * are met:
11*84d9c625SLionel Sambuc * 1. Redistributions of source code must retain the above copyright
12*84d9c625SLionel Sambuc *    notice, this list of conditions and the following disclaimer.
13*84d9c625SLionel Sambuc * 2. Redistributions in binary form must reproduce the above copyright
14*84d9c625SLionel Sambuc *    notice, this list of conditions and the following disclaimer in the
15*84d9c625SLionel Sambuc *    documentation and/or other materials provided with the distribution.
16*84d9c625SLionel Sambuc *
17*84d9c625SLionel Sambuc * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
18*84d9c625SLionel Sambuc * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
19*84d9c625SLionel Sambuc * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20*84d9c625SLionel Sambuc * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
21*84d9c625SLionel Sambuc * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22*84d9c625SLionel Sambuc * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23*84d9c625SLionel Sambuc * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24*84d9c625SLionel Sambuc * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25*84d9c625SLionel Sambuc * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26*84d9c625SLionel Sambuc * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27*84d9c625SLionel Sambuc * POSSIBILITY OF SUCH DAMAGE.
28*84d9c625SLionel Sambuc */
29*84d9c625SLionel Sambuc
30*84d9c625SLionel Sambuc#include <machine/asm.h>
31*84d9c625SLionel Sambuc
32*84d9c625SLionel SambucRCSID("$NetBSD: strrchr_arm.S,v 1.6 2013/08/25 06:15:06 matt Exp $")
33*84d9c625SLionel Sambuc
34*84d9c625SLionel Sambuc#ifdef __ARMEL__
35*84d9c625SLionel Sambuc#define	BYTE0	0x000000ff
36*84d9c625SLionel Sambuc#define	BYTE1	0x0000ff00
37*84d9c625SLionel Sambuc#define	BYTE2	0x00ff0000
38*84d9c625SLionel Sambuc#define	BYTE3	0xff000000
39*84d9c625SLionel Sambuc#define	lshi	lsl
40*84d9c625SLionel Sambuc#define	lshis	lsls
41*84d9c625SLionel Sambuc#else
42*84d9c625SLionel Sambuc#define	BYTE0	0xff000000
43*84d9c625SLionel Sambuc#define	BYTE1	0x00ff0000
44*84d9c625SLionel Sambuc#define	BYTE2	0x0000ff00
45*84d9c625SLionel Sambuc#define	BYTE3	0x000000ff
46*84d9c625SLionel Sambuc#define	lshi	lsr
47*84d9c625SLionel Sambuc#define	lshis	lsrs
48*84d9c625SLionel Sambuc#endif
49*84d9c625SLionel Sambuc
50*84d9c625SLionel SambucENTRY(strrchr)
51*84d9c625SLionel Sambuc	ands	r2, r1, #0xff		/* is the byte value NUL? */
52*84d9c625SLionel Sambuc	bne	1f			/*   no, do it the hard way */
53*84d9c625SLionel Sambuc	push	{r0, lr}		/* save pointer and return addr */
54*84d9c625SLionel Sambuc	bl	PLT_SYM(strlen)		/* get length */
55*84d9c625SLionel Sambuc	pop	{r1, r2}		/* restore pointer / return addr */
56*84d9c625SLionel Sambuc	adds	r0, r0, r1		/* add pointer to length */
57*84d9c625SLionel Sambuc	RETr(r2)			/* return */
58*84d9c625SLionel Sambuc
59*84d9c625SLionel Sambuc1:	mov	r1, r0			/* we use r0 at the return value */
60*84d9c625SLionel Sambuc	movs	r0, #0			/* return NULL by default */
61*84d9c625SLionel Sambuc2:	tst	r1, #3			/* test for word alignment */
62*84d9c625SLionel Sambuc	beq	.Lpre_main_loop		/*   finally word aligned */
63*84d9c625SLionel Sambuc	ldrb	r3, [r1], #1		/* load a byte */
64*84d9c625SLionel Sambuc	cmp	r3, r2			/* did it match? */
65*84d9c625SLionel Sambuc#ifdef __thumb__
66*84d9c625SLionel Sambuc	it	eq
67*84d9c625SLionel Sambuc#endif
68*84d9c625SLionel Sambuc	subeq	r0, r1, #1		/*   yes, remember that it did */
69*84d9c625SLionel Sambuc	cmp	r3, #0			/* was it NUL? */
70*84d9c625SLionel Sambuc	bne	2b			/*   no, try next byte */
71*84d9c625SLionel Sambuc	RET				/* return */
72*84d9c625SLionel Sambuc.Lpre_main_loop:
73*84d9c625SLionel Sambuc	push	{r4, r5}		/* save some registers */
74*84d9c625SLionel Sambuc#if defined(_ARM_ARCH_7)
75*84d9c625SLionel Sambuc	movw	ip, #0xfefe		/* magic constant; 254 in each byte */
76*84d9c625SLionel Sambuc	movt	ip, #0xfefe		/* magic constant; 254 in each byte */
77*84d9c625SLionel Sambuc#elif defined(_ARM_ARCH_6)
78*84d9c625SLionel Sambuc	mov	ip, #0xfe		/* put 254 in low byte */
79*84d9c625SLionel Sambuc	orr	ip, ip, ip, lsl #8	/* move to next byte */
80*84d9c625SLionel Sambuc	orr	ip, ip, ip, lsl #16	/* move to next halfword */
81*84d9c625SLionel Sambuc#endif /* _ARM_ARCH_6 */
82*84d9c625SLionel Sambuc	orr	r2, r2, r2, lsl #8	/* move to next byte */
83*84d9c625SLionel Sambuc	orr	r2, r2, r2, lsl #16	/* move to next halfword */
84*84d9c625SLionel Sambuc.Lmain_loop:
85*84d9c625SLionel Sambuc	ldr	r3, [r1], #4		/* load next word */
86*84d9c625SLionel Sambuc#if defined(_ARM_ARCH_6)
87*84d9c625SLionel Sambuc	/*
88*84d9c625SLionel Sambuc	 * Add 254 to each byte using the UQADD8 (unsigned saturating add 8)
89*84d9c625SLionel Sambuc	 * instruction.  For every non-NUL byte, the result for that byte will
90*84d9c625SLionel Sambuc	 * become 255.  For NUL, it will be 254.  When we complement the
91*84d9c625SLionel Sambuc	 * result, if the result is non-0 then we must have encountered a NUL.
92*84d9c625SLionel Sambuc	 */
93*84d9c625SLionel Sambuc	uqadd8	r4, r3, ip		/* NUL detection happens here */
94*84d9c625SLionel Sambuc	usub8	r3, r3, r2		/* bias for char looked for? */
95*84d9c625SLionel Sambuc	uqadd8	r5, r3, ip		/* char detection happens here */
96*84d9c625SLionel Sambuc	ands	r3, r4, r5		/* merge results */
97*84d9c625SLionel Sambuc	mvns	r3, r3			/* is the complement non-0? */
98*84d9c625SLionel Sambuc	beq	.Lmain_loop		/*   no, then keep going */
99*84d9c625SLionel Sambuc
100*84d9c625SLionel Sambuc	mvns	r5, r5			/* get we find any matching bytes? */
101*84d9c625SLionel Sambuc	beq	.Ldone			/*   no, then we hit the end, return */
102*84d9c625SLionel Sambuc	mvns	r4, r4			/* did we encounter a NUL? */
103*84d9c625SLionel Sambuc	beq	.Lfind_match		/*   no, find matching byte */
104*84d9c625SLionel Sambuc	/*
105*84d9c625SLionel Sambuc	 * Copy the NUL bit to the following byte lanes.  Then clear any match
106*84d9c625SLionel Sambuc	 * bits in those byte lanes to prevent false positives in those bytes.
107*84d9c625SLionel Sambuc	 */
108*84d9c625SLionel Sambuc	bics	r5, r5, r4		/* clear any NUL match bits */
109*84d9c625SLionel Sambuc	beq	.Ldone			/*   no remaining matches, we're done */
110*84d9c625SLionel Sambuc	lshis	r3, r4, #8		/* shift up a byte */
111*84d9c625SLionel Sambuc#ifdef __thumb__
112*84d9c625SLionel Sambuc	itt	ne
113*84d9c625SLionel Sambuc#endif
114*84d9c625SLionel Sambuc	orrsne	r3, r3, r3, lshi #8	/* if non 0, copy up to next byte */
115*84d9c625SLionel Sambuc	orrsne	r3, r3, r3, lshi #8	/* if non 0, copy up to last byte */
116*84d9c625SLionel Sambuc	bics	r5, r5, r3		/* clear match bits */
117*84d9c625SLionel Sambuc	beq	.Ldone			/*   no remaining matches, we're done */
118*84d9c625SLionel Sambuc.Lfind_match:
119*84d9c625SLionel Sambuc#ifdef __ARMEL__
120*84d9c625SLionel Sambuc	rev	r5, r5			/* we want this in BE for the CLZ */
121*84d9c625SLionel Sambuc#endif
122*84d9c625SLionel Sambuc	/*
123*84d9c625SLionel Sambuc	 * If we have multiple matches, we want to the select the "last" match
124*84d9c625SLionel Sambuc	 * in the word which will be the lowest bit set.
125*84d9c625SLionel Sambuc	 */
126*84d9c625SLionel Sambuc	subs	r3, r5, #1		/* subtract 1 */
127*84d9c625SLionel Sambuc	ands	r3, r3, r5		/* and with mask */
128*84d9c625SLionel Sambuc	eors	r5, r5, r3		/* only have the lowest bit set left */
129*84d9c625SLionel Sambuc	clz	r5, r5			/* count how many leading zeros */
130*84d9c625SLionel Sambuc	add	r0, r1, r5, lsr #3	/* divide that by 8 and add to count */
131*84d9c625SLionel Sambuc	subs	r0, r0, #4		/* compensate for the post-inc */
132*84d9c625SLionel Sambuc	cmp	r4, #0			/* did we read any NULs? */
133*84d9c625SLionel Sambuc	beq	.Lmain_loop		/*   no, get next word */
134*84d9c625SLionel Sambuc#else
135*84d9c625SLionel Sambuc	/*
136*84d9c625SLionel Sambuc	 * No fancy shortcuts so just test each byte lane for a NUL.
137*84d9c625SLionel Sambuc	 * (other tests for NULs in a word take more instructions/cycles).
138*84d9c625SLionel Sambuc	 */
139*84d9c625SLionel Sambuc	eor	r4, r3, r2		/* xor .. */
140*84d9c625SLionel Sambuc	tst	r3, #BYTE0		/* is byte 0 a NUL? */
141*84d9c625SLionel Sambuc	beq	.Ldone			/*   yes, then we're done */
142*84d9c625SLionel Sambuc	tst	r4, #BYTE0		/* is byte 0 a match? */
143*84d9c625SLionel Sambuc	subeq	r0, r1, #4		/*   yes, remember its location */
144*84d9c625SLionel Sambuc	tst	r3, #BYTE1		/* is byte 1 a NUL? */
145*84d9c625SLionel Sambuc	beq	.Ldone			/*   yes, then we're done */
146*84d9c625SLionel Sambuc	tst	r4, #BYTE1		/* is byte 1 a match? */
147*84d9c625SLionel Sambuc	subeq	r0, r1, #3		/*   yes, remember its location */
148*84d9c625SLionel Sambuc	tst	r3, #BYTE2		/* is byte 2 a NUL? */
149*84d9c625SLionel Sambuc	beq	.Ldone			/*   yes, then we're done */
150*84d9c625SLionel Sambuc	tst	r4, #BYTE2		/* is byte 2 a match? */
151*84d9c625SLionel Sambuc	subeq	r0, r1, #2		/*   yes, remember its location */
152*84d9c625SLionel Sambuc	tst	r3, #BYTE3		/* is byte 3 a NUL? */
153*84d9c625SLionel Sambuc	beq	.Ldone			/*   yes, then we're done */
154*84d9c625SLionel Sambuc	tst	r4, #BYTE3		/* is byte 3 a match? */
155*84d9c625SLionel Sambuc	subeq	r0, r1, #1		/*   yes, remember its location */
156*84d9c625SLionel Sambuc	b	.Lmain_loop
157*84d9c625SLionel Sambuc#endif /* _ARM_ARCH_6 */
158*84d9c625SLionel Sambuc.Ldone:
159*84d9c625SLionel Sambuc	pop	{r4, r5}
160*84d9c625SLionel Sambuc	RET
161*84d9c625SLionel SambucEND(strrchr)
162