xref: /netbsd-src/common/lib/libc/arch/arm/quad/__aeabi_uldivmod.S (revision 413d532bcc3f62d122e56d92e13ac64825a40baf)
1/*-
2 * Copyright (c) 2012 The NetBSD Foundation, Inc.
3 * All rights reserved.
4 *
5 * This code is derived from software contributed to The NetBSD Foundation
6 * by Matt Thomas of 3am Software Foundry.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 *    notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 *    notice, this list of conditions and the following disclaimer in the
15 *    documentation and/or other materials provided with the distribution.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
18 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
19 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 * POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#include <machine/asm.h>
31
32RCSID("$NetBSD: __aeabi_uldivmod.S,v 1.8 2013/12/12 18:01:14 matt Exp $")
33
34/*
35 * typedef struct { unsigned long long quo, rem } ulldiv_t;
36 * __value_in_regs ulldiv_t __aeabi_uldivmod(unsigned long long n,
37 *	unsigned long long d);
38 */
39
40ENTRY(__aeabi_uldivmod)
41#ifdef __ARM_EABI__
42	.fnstart
43	.cfi_startproc
44#endif
45#if !defined(_KERNEL) && !defined(_STANDALONE)
46#if !defined(__thumb__)
47	orrs	ip, r2, r3
48	beq	.Ldivbyzero
49#elif defined(_ARM_ARCH_T2)
50	cbnz	r2, 1f
51	cbz	r3, .Ldivbyzero
521:
53#else
54	cmp	r2, #0
55	bne	1f
56	cmp	r3, #0
57	beq	.Ldivbyzero
581:
59#endif
60#endif
61
62	push	{r4,lr}
63#ifdef __ARM_EABI__
64	.save	{r4,lr}
65	.cfi_def_cfa_offset 8
66	.cfi_offset 14, -4
67	.cfi_offset 4, -8
68#endif
69	sub	sp, sp, #16
70#ifdef __ARM_EABI__
71	.cfi_def_cfa_offset 24
72#endif
73#if !defined(__thumb__) || defined(_ARM_ARCH_T2)
74	add	r4, sp, #8
75#else
76	mov	r4, sp
77	adds	r4, r4, #8
78#endif
79	str	r4, [sp]
80	bl	PLT_SYM(__qdivrem)
81	add	sp, sp, #8
82#ifdef __ARM_EABI__
83	.cfi_def_cfa_offset 16
84	.cfi_offset 3, -12
85	.cfi_offset 2, -16
86#endif
87	/*
88	 * The remainder is already on the stack just waiting to be popped
89	 * into r2/r3.
90	 */
91	pop	{r2-r4,pc}
92
93#if !defined(_KERNEL) && !defined(_STANDALONE)
94.Ldivbyzero:
95	push	{r0-r1,r4,lr}
96#ifdef __ARM_EABI__
97	.save	{r0-r1,r4,lr}
98	.cfi_def_cfa_offset 16
99	.cfi_offset 14, -4
100	.cfi_offset 4, -8
101#endif
102#ifdef __thumb__
103	movs	r0, #0
104	mvns	r0, r0
105#else
106	mvn	r0, #0
107#endif
108	movs	r1, r0
109	bl	PLT_SYM(__aeabi_ldiv0)
110	pop	{r2-r4,pc}
111#endif
112#ifdef __ARM_EABI__
113	.cfi_endproc
114	.fnend
115#endif
116END(__aeabi_uldivmod)
117