1*5d9d9091SRichard Lowe/* 2*5d9d9091SRichard Lowe * CDDL HEADER START 3*5d9d9091SRichard Lowe * 4*5d9d9091SRichard Lowe * The contents of this file are subject to the terms of the 5*5d9d9091SRichard Lowe * Common Development and Distribution License (the "License"). 6*5d9d9091SRichard Lowe * You may not use this file except in compliance with the License. 7*5d9d9091SRichard Lowe * 8*5d9d9091SRichard Lowe * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9*5d9d9091SRichard Lowe * or http://www.opensolaris.org/os/licensing. 10*5d9d9091SRichard Lowe * See the License for the specific language governing permissions 11*5d9d9091SRichard Lowe * and limitations under the License. 12*5d9d9091SRichard Lowe * 13*5d9d9091SRichard Lowe * When distributing Covered Code, include this CDDL HEADER in each 14*5d9d9091SRichard Lowe * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15*5d9d9091SRichard Lowe * If applicable, add the following below this CDDL HEADER, with the 16*5d9d9091SRichard Lowe * fields enclosed by brackets "[]" replaced with your own identifying 17*5d9d9091SRichard Lowe * information: Portions Copyright [yyyy] [name of copyright owner] 18*5d9d9091SRichard Lowe * 19*5d9d9091SRichard Lowe * CDDL HEADER END 20*5d9d9091SRichard Lowe */ 21*5d9d9091SRichard Lowe 22*5d9d9091SRichard Lowe/* 23*5d9d9091SRichard Lowe * Copyright 2005 Sun Microsystems, Inc. All rights reserved. 24*5d9d9091SRichard Lowe * Use is subject to license terms. 25*5d9d9091SRichard Lowe */ 26*5d9d9091SRichard Lowe 27*5d9d9091SRichard Lowe .file "_divdi3.s" 28*5d9d9091SRichard Lowe 29*5d9d9091SRichard Lowe#include <SYS.h> 30*5d9d9091SRichard Lowe 31*5d9d9091SRichard Lowe/* 32*5d9d9091SRichard Lowe * C support for 64-bit modulo and division. 33*5d9d9091SRichard Lowe * GNU routines callable from C (though generated by the compiler). 34*5d9d9091SRichard Lowe * Hand-customized compiler output - see comments for details. 35*5d9d9091SRichard Lowe */ 36*5d9d9091SRichard Lowe 37*5d9d9091SRichard Lowe#if defined(__lint) 38*5d9d9091SRichard Lowe 39*5d9d9091SRichard Lowe/*ARGSUSED*/ 40*5d9d9091SRichard Loweuint64_t 41*5d9d9091SRichard Lowe__udivdi3(uint64_t a, uint64_t b) 42*5d9d9091SRichard Lowe{ return (0); } 43*5d9d9091SRichard Lowe 44*5d9d9091SRichard Lowe/*ARGSUSED*/ 45*5d9d9091SRichard Loweuint64_t 46*5d9d9091SRichard Lowe__umoddi3(uint64_t a, uint64_t b) 47*5d9d9091SRichard Lowe{ return (0); } 48*5d9d9091SRichard Lowe 49*5d9d9091SRichard Lowe/*ARGSUSED*/ 50*5d9d9091SRichard Loweint64_t 51*5d9d9091SRichard Lowe__divdi3(int64_t a, int64_t b) 52*5d9d9091SRichard Lowe{ return (0); } 53*5d9d9091SRichard Lowe 54*5d9d9091SRichard Lowe/*ARGSUSED*/ 55*5d9d9091SRichard Loweint64_t 56*5d9d9091SRichard Lowe__moddi3(int64_t a, int64_t b) 57*5d9d9091SRichard Lowe{ return (0); } 58*5d9d9091SRichard Lowe 59*5d9d9091SRichard Lowe#else 60*5d9d9091SRichard Lowe 61*5d9d9091SRichard Lowe/* 62*5d9d9091SRichard Lowe * __udivdi3 63*5d9d9091SRichard Lowe * 64*5d9d9091SRichard Lowe * Perform division of two unsigned 64-bit quantities, returning the 65*5d9d9091SRichard Lowe * quotient in %edx:%eax. 66*5d9d9091SRichard Lowe */ 67*5d9d9091SRichard Lowe ENTRY(__udivdi3) 68*5d9d9091SRichard Lowe movl 4(%esp), %eax / x, x 69*5d9d9091SRichard Lowe movl 8(%esp), %edx / x, x 70*5d9d9091SRichard Lowe pushl 16(%esp) / y 71*5d9d9091SRichard Lowe pushl 16(%esp) 72*5d9d9091SRichard Lowe call UDiv 73*5d9d9091SRichard Lowe addl $8, %esp 74*5d9d9091SRichard Lowe ret 75*5d9d9091SRichard Lowe SET_SIZE(__udivdi3) 76*5d9d9091SRichard Lowe 77*5d9d9091SRichard Lowe/* 78*5d9d9091SRichard Lowe * __umoddi3 79*5d9d9091SRichard Lowe * 80*5d9d9091SRichard Lowe * Perform division of two unsigned 64-bit quantities, returning the 81*5d9d9091SRichard Lowe * remainder in %edx:%eax. 82*5d9d9091SRichard Lowe */ 83*5d9d9091SRichard Lowe ENTRY(__umoddi3) 84*5d9d9091SRichard Lowe subl $12, %esp 85*5d9d9091SRichard Lowe movl %esp, %ecx /, tmp65 86*5d9d9091SRichard Lowe movl 16(%esp), %eax / x, x 87*5d9d9091SRichard Lowe movl 20(%esp), %edx / x, x 88*5d9d9091SRichard Lowe pushl %ecx / tmp65 89*5d9d9091SRichard Lowe pushl 32(%esp) / y 90*5d9d9091SRichard Lowe pushl 32(%esp) 91*5d9d9091SRichard Lowe call UDivRem 92*5d9d9091SRichard Lowe movl 12(%esp), %eax / rem, rem 93*5d9d9091SRichard Lowe movl 16(%esp), %edx / rem, rem 94*5d9d9091SRichard Lowe addl $24, %esp 95*5d9d9091SRichard Lowe ret 96*5d9d9091SRichard Lowe SET_SIZE(__umoddi3) 97*5d9d9091SRichard Lowe 98*5d9d9091SRichard Lowe/* 99*5d9d9091SRichard Lowe * __divdi3 100*5d9d9091SRichard Lowe * 101*5d9d9091SRichard Lowe * Perform division of two signed 64-bit quantities, returning the 102*5d9d9091SRichard Lowe * quotient in %edx:%eax. 103*5d9d9091SRichard Lowe */ 104*5d9d9091SRichard Lowe/ int64_t 105*5d9d9091SRichard Lowe/ __divdi3(int64_t x, int64_t y) 106*5d9d9091SRichard Lowe/ { 107*5d9d9091SRichard Lowe/ int negative; 108*5d9d9091SRichard Lowe/ uint64_t xt, yt, r; 109*5d9d9091SRichard Lowe/ 110*5d9d9091SRichard Lowe/ if (x < 0) { 111*5d9d9091SRichard Lowe/ xt = -(uint64_t) x; 112*5d9d9091SRichard Lowe/ negative = 1; 113*5d9d9091SRichard Lowe/ } else { 114*5d9d9091SRichard Lowe/ xt = x; 115*5d9d9091SRichard Lowe/ negative = 0; 116*5d9d9091SRichard Lowe/ } 117*5d9d9091SRichard Lowe/ if (y < 0) { 118*5d9d9091SRichard Lowe/ yt = -(uint64_t) y; 119*5d9d9091SRichard Lowe/ negative ^= 1; 120*5d9d9091SRichard Lowe/ } else { 121*5d9d9091SRichard Lowe/ yt = y; 122*5d9d9091SRichard Lowe/ } 123*5d9d9091SRichard Lowe/ r = UDiv(xt, yt); 124*5d9d9091SRichard Lowe/ return (negative ? (int64_t) - r : r); 125*5d9d9091SRichard Lowe/ } 126*5d9d9091SRichard Lowe ENTRY(__divdi3) 127*5d9d9091SRichard Lowe pushl %ebp 128*5d9d9091SRichard Lowe pushl %edi 129*5d9d9091SRichard Lowe pushl %esi 130*5d9d9091SRichard Lowe subl $8, %esp 131*5d9d9091SRichard Lowe movl 28(%esp), %edx / x, x 132*5d9d9091SRichard Lowe testl %edx, %edx / x 133*5d9d9091SRichard Lowe movl 24(%esp), %eax / x, x 134*5d9d9091SRichard Lowe movl 32(%esp), %esi / y, y 135*5d9d9091SRichard Lowe movl 36(%esp), %edi / y, y 136*5d9d9091SRichard Lowe js .LL55 137*5d9d9091SRichard Lowe xorl %ebp, %ebp / negative 138*5d9d9091SRichard Lowe testl %edi, %edi / y 139*5d9d9091SRichard Lowe movl %eax, (%esp) / x, xt 140*5d9d9091SRichard Lowe movl %edx, 4(%esp) / x, xt 141*5d9d9091SRichard Lowe movl %esi, %eax / y, yt 142*5d9d9091SRichard Lowe movl %edi, %edx / y, yt 143*5d9d9091SRichard Lowe js .LL56 144*5d9d9091SRichard Lowe.LL53: 145*5d9d9091SRichard Lowe pushl %edx / yt 146*5d9d9091SRichard Lowe pushl %eax / yt 147*5d9d9091SRichard Lowe movl 8(%esp), %eax / xt, xt 148*5d9d9091SRichard Lowe movl 12(%esp), %edx / xt, xt 149*5d9d9091SRichard Lowe call UDiv 150*5d9d9091SRichard Lowe popl %ecx 151*5d9d9091SRichard Lowe testl %ebp, %ebp / negative 152*5d9d9091SRichard Lowe popl %esi 153*5d9d9091SRichard Lowe je .LL54 154*5d9d9091SRichard Lowe negl %eax / r 155*5d9d9091SRichard Lowe adcl $0, %edx /, r 156*5d9d9091SRichard Lowe negl %edx / r 157*5d9d9091SRichard Lowe.LL54: 158*5d9d9091SRichard Lowe addl $8, %esp 159*5d9d9091SRichard Lowe popl %esi 160*5d9d9091SRichard Lowe popl %edi 161*5d9d9091SRichard Lowe popl %ebp 162*5d9d9091SRichard Lowe ret 163*5d9d9091SRichard Lowe .align 16 164*5d9d9091SRichard Lowe.LL55: 165*5d9d9091SRichard Lowe negl %eax / x 166*5d9d9091SRichard Lowe adcl $0, %edx /, x 167*5d9d9091SRichard Lowe negl %edx / x 168*5d9d9091SRichard Lowe testl %edi, %edi / y 169*5d9d9091SRichard Lowe movl %eax, (%esp) / x, xt 170*5d9d9091SRichard Lowe movl %edx, 4(%esp) / x, xt 171*5d9d9091SRichard Lowe movl $1, %ebp /, negative 172*5d9d9091SRichard Lowe movl %esi, %eax / y, yt 173*5d9d9091SRichard Lowe movl %edi, %edx / y, yt 174*5d9d9091SRichard Lowe jns .LL53 175*5d9d9091SRichard Lowe .align 16 176*5d9d9091SRichard Lowe.LL56: 177*5d9d9091SRichard Lowe negl %eax / yt 178*5d9d9091SRichard Lowe adcl $0, %edx /, yt 179*5d9d9091SRichard Lowe negl %edx / yt 180*5d9d9091SRichard Lowe xorl $1, %ebp /, negative 181*5d9d9091SRichard Lowe jmp .LL53 182*5d9d9091SRichard Lowe SET_SIZE(__divdi3) 183*5d9d9091SRichard Lowe 184*5d9d9091SRichard Lowe/* 185*5d9d9091SRichard Lowe * __moddi3 186*5d9d9091SRichard Lowe * 187*5d9d9091SRichard Lowe * Perform division of two signed 64-bit quantities, returning the 188*5d9d9091SRichard Lowe * quotient in %edx:%eax. 189*5d9d9091SRichard Lowe */ 190*5d9d9091SRichard Lowe/ int64_t 191*5d9d9091SRichard Lowe/ __moddi3(int64_t x, int64_t y) 192*5d9d9091SRichard Lowe/ { 193*5d9d9091SRichard Lowe/ uint64_t xt, yt, rem; 194*5d9d9091SRichard Lowe/ 195*5d9d9091SRichard Lowe/ if (x < 0) { 196*5d9d9091SRichard Lowe/ xt = -(uint64_t) x; 197*5d9d9091SRichard Lowe/ } else { 198*5d9d9091SRichard Lowe/ xt = x; 199*5d9d9091SRichard Lowe/ } 200*5d9d9091SRichard Lowe/ if (y < 0) { 201*5d9d9091SRichard Lowe/ yt = -(uint64_t) y; 202*5d9d9091SRichard Lowe/ } else { 203*5d9d9091SRichard Lowe/ yt = y; 204*5d9d9091SRichard Lowe/ } 205*5d9d9091SRichard Lowe/ (void) UDivRem(xt, yt, &rem); 206*5d9d9091SRichard Lowe/ return (x < 0 ? (int64_t) - rem : rem); 207*5d9d9091SRichard Lowe/ } 208*5d9d9091SRichard Lowe ENTRY(__moddi3) 209*5d9d9091SRichard Lowe pushl %edi 210*5d9d9091SRichard Lowe pushl %esi 211*5d9d9091SRichard Lowe subl $20, %esp 212*5d9d9091SRichard Lowe movl 36(%esp), %ecx / x, 213*5d9d9091SRichard Lowe movl 32(%esp), %esi / x, 214*5d9d9091SRichard Lowe movl 36(%esp), %edi / x, 215*5d9d9091SRichard Lowe testl %ecx, %ecx 216*5d9d9091SRichard Lowe movl 40(%esp), %eax / y, y 217*5d9d9091SRichard Lowe movl 44(%esp), %edx / y, y 218*5d9d9091SRichard Lowe movl %esi, (%esp) /, xt 219*5d9d9091SRichard Lowe movl %edi, 4(%esp) /, xt 220*5d9d9091SRichard Lowe js .LL63 221*5d9d9091SRichard Lowe testl %edx, %edx / y 222*5d9d9091SRichard Lowe movl %eax, %esi / y, yt 223*5d9d9091SRichard Lowe movl %edx, %edi / y, yt 224*5d9d9091SRichard Lowe js .LL64 225*5d9d9091SRichard Lowe.LL61: 226*5d9d9091SRichard Lowe leal 8(%esp), %eax /, tmp66 227*5d9d9091SRichard Lowe pushl %eax / tmp66 228*5d9d9091SRichard Lowe pushl %edi / yt 229*5d9d9091SRichard Lowe pushl %esi / yt 230*5d9d9091SRichard Lowe movl 12(%esp), %eax / xt, xt 231*5d9d9091SRichard Lowe movl 16(%esp), %edx / xt, xt 232*5d9d9091SRichard Lowe call UDivRem 233*5d9d9091SRichard Lowe addl $12, %esp 234*5d9d9091SRichard Lowe movl 36(%esp), %edi / x, 235*5d9d9091SRichard Lowe testl %edi, %edi 236*5d9d9091SRichard Lowe movl 8(%esp), %eax / rem, rem 237*5d9d9091SRichard Lowe movl 12(%esp), %edx / rem, rem 238*5d9d9091SRichard Lowe js .LL65 239*5d9d9091SRichard Lowe addl $20, %esp 240*5d9d9091SRichard Lowe popl %esi 241*5d9d9091SRichard Lowe popl %edi 242*5d9d9091SRichard Lowe ret 243*5d9d9091SRichard Lowe .align 16 244*5d9d9091SRichard Lowe.LL63: 245*5d9d9091SRichard Lowe negl %esi 246*5d9d9091SRichard Lowe adcl $0, %edi 247*5d9d9091SRichard Lowe negl %edi 248*5d9d9091SRichard Lowe testl %edx, %edx / y 249*5d9d9091SRichard Lowe movl %esi, (%esp) /, xt 250*5d9d9091SRichard Lowe movl %edi, 4(%esp) /, xt 251*5d9d9091SRichard Lowe movl %eax, %esi / y, yt 252*5d9d9091SRichard Lowe movl %edx, %edi / y, yt 253*5d9d9091SRichard Lowe jns .LL61 254*5d9d9091SRichard Lowe .align 16 255*5d9d9091SRichard Lowe.LL64: 256*5d9d9091SRichard Lowe negl %esi / yt 257*5d9d9091SRichard Lowe adcl $0, %edi /, yt 258*5d9d9091SRichard Lowe negl %edi / yt 259*5d9d9091SRichard Lowe jmp .LL61 260*5d9d9091SRichard Lowe .align 16 261*5d9d9091SRichard Lowe.LL65: 262*5d9d9091SRichard Lowe negl %eax / rem 263*5d9d9091SRichard Lowe adcl $0, %edx /, rem 264*5d9d9091SRichard Lowe addl $20, %esp 265*5d9d9091SRichard Lowe popl %esi 266*5d9d9091SRichard Lowe negl %edx / rem 267*5d9d9091SRichard Lowe popl %edi 268*5d9d9091SRichard Lowe ret 269*5d9d9091SRichard Lowe SET_SIZE(__moddi3) 270*5d9d9091SRichard Lowe 271*5d9d9091SRichard Lowe#endif /* __lint */ 272