xref: /onnv-gate/usr/src/lib/libc/port/gen/malloc.c (revision 6812:febeba71273d)
10Sstevel@tonic-gate /*
20Sstevel@tonic-gate  * CDDL HEADER START
30Sstevel@tonic-gate  *
40Sstevel@tonic-gate  * The contents of this file are subject to the terms of the
52186Sraf  * Common Development and Distribution License (the "License").
62186Sraf  * You may not use this file except in compliance with the License.
70Sstevel@tonic-gate  *
80Sstevel@tonic-gate  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
90Sstevel@tonic-gate  * or http://www.opensolaris.org/os/licensing.
100Sstevel@tonic-gate  * See the License for the specific language governing permissions
110Sstevel@tonic-gate  * and limitations under the License.
120Sstevel@tonic-gate  *
130Sstevel@tonic-gate  * When distributing Covered Code, include this CDDL HEADER in each
140Sstevel@tonic-gate  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
150Sstevel@tonic-gate  * If applicable, add the following below this CDDL HEADER, with the
160Sstevel@tonic-gate  * fields enclosed by brackets "[]" replaced with your own identifying
170Sstevel@tonic-gate  * information: Portions Copyright [yyyy] [name of copyright owner]
180Sstevel@tonic-gate  *
190Sstevel@tonic-gate  * CDDL HEADER END
200Sstevel@tonic-gate  */
212186Sraf 
220Sstevel@tonic-gate /*
236515Sraf  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
240Sstevel@tonic-gate  * Use is subject to license terms.
250Sstevel@tonic-gate  */
260Sstevel@tonic-gate 
270Sstevel@tonic-gate /*	Copyright (c) 1988 AT&T	*/
280Sstevel@tonic-gate /*	  All Rights Reserved  	*/
290Sstevel@tonic-gate 
30*6812Sraf #pragma ident	"%Z%%M%	%I%	%E% SMI"
31*6812Sraf 
320Sstevel@tonic-gate /*
330Sstevel@tonic-gate  *	Memory management: malloc(), realloc(), free().
340Sstevel@tonic-gate  *
350Sstevel@tonic-gate  *	The following #-parameters may be redefined:
360Sstevel@tonic-gate  *	SEGMENTED: if defined, memory requests are assumed to be
370Sstevel@tonic-gate  *		non-contiguous across calls of GETCORE's.
380Sstevel@tonic-gate  *	GETCORE: a function to get more core memory. If not SEGMENTED,
390Sstevel@tonic-gate  *		GETCORE(0) is assumed to return the next available
400Sstevel@tonic-gate  *		address. Default is 'sbrk'.
410Sstevel@tonic-gate  *	ERRCORE: the error code as returned by GETCORE.
420Sstevel@tonic-gate  *		Default is (char *)(-1).
430Sstevel@tonic-gate  *	CORESIZE: a desired unit (measured in bytes) to be used
440Sstevel@tonic-gate  *		with GETCORE. Default is (1024*ALIGN).
450Sstevel@tonic-gate  *
460Sstevel@tonic-gate  *	This algorithm is based on a  best fit strategy with lists of
470Sstevel@tonic-gate  *	free elts maintained in a self-adjusting binary tree. Each list
480Sstevel@tonic-gate  *	contains all elts of the same size. The tree is ordered by size.
490Sstevel@tonic-gate  *	For results on self-adjusting trees, see the paper:
500Sstevel@tonic-gate  *		Self-Adjusting Binary Trees,
510Sstevel@tonic-gate  *		DD Sleator & RE Tarjan, JACM 1985.
520Sstevel@tonic-gate  *
530Sstevel@tonic-gate  *	The header of a block contains the size of the data part in bytes.
540Sstevel@tonic-gate  *	Since the size of a block is 0%4, the low two bits of the header
550Sstevel@tonic-gate  *	are free and used as follows:
560Sstevel@tonic-gate  *
570Sstevel@tonic-gate  *		BIT0:	1 for busy (block is in use), 0 for free.
580Sstevel@tonic-gate  *		BIT1:	if the block is busy, this bit is 1 if the
590Sstevel@tonic-gate  *			preceding block in contiguous memory is free.
600Sstevel@tonic-gate  *			Otherwise, it is always 0.
610Sstevel@tonic-gate  */
620Sstevel@tonic-gate 
63*6812Sraf #include "lint.h"
640Sstevel@tonic-gate #include "mallint.h"
650Sstevel@tonic-gate #include "mtlib.h"
660Sstevel@tonic-gate 
670Sstevel@tonic-gate /*
680Sstevel@tonic-gate  * Some abusers of the system (notably java1.2) acquire __malloc_lock
690Sstevel@tonic-gate  * in order to prevent threads from holding it while they are being
700Sstevel@tonic-gate  * suspended via thr_suspend() or thr_suspend_allmutators().
712186Sraf  * This never worked when alternate malloc() libraries were used
722186Sraf  * because they don't use __malloc_lock for their locking strategy.
732186Sraf  * We leave __malloc_lock as an external variable to satisfy these
742186Sraf  * old programs, but we define a new lock, private to libc, to do the
752186Sraf  * real locking: libc_malloc_lock.  This puts libc's malloc() package
762186Sraf  * on the same footing as all other malloc packages.
770Sstevel@tonic-gate  */
780Sstevel@tonic-gate mutex_t __malloc_lock = DEFAULTMUTEX;
790Sstevel@tonic-gate mutex_t libc_malloc_lock = DEFAULTMUTEX;
800Sstevel@tonic-gate 
810Sstevel@tonic-gate static TREE	*Root,		/* root of the free tree */
820Sstevel@tonic-gate 		*Bottom,	/* the last free chunk in the arena */
830Sstevel@tonic-gate 		*_morecore(size_t);	/* function to get more core */
840Sstevel@tonic-gate 
850Sstevel@tonic-gate static char	*Baddr;		/* current high address of the arena */
860Sstevel@tonic-gate static char	*Lfree;		/* last freed block with data intact */
870Sstevel@tonic-gate 
880Sstevel@tonic-gate static void	t_delete(TREE *);
890Sstevel@tonic-gate static void	t_splay(TREE *);
900Sstevel@tonic-gate static void	realfree(void *);
910Sstevel@tonic-gate static void	cleanfree(void *);
920Sstevel@tonic-gate static void	*_malloc_unlocked(size_t);
930Sstevel@tonic-gate 
940Sstevel@tonic-gate #define	FREESIZE (1<<5) /* size for preserving free blocks until next malloc */
950Sstevel@tonic-gate #define	FREEMASK FREESIZE-1
960Sstevel@tonic-gate 
970Sstevel@tonic-gate static void *flist[FREESIZE];	/* list of blocks to be freed on next malloc */
980Sstevel@tonic-gate static int freeidx;		/* index of free blocks in flist % FREESIZE */
990Sstevel@tonic-gate 
1000Sstevel@tonic-gate /*
1012186Sraf  * Interfaces used only by atfork_init() functions.
1022186Sraf  */
1032186Sraf void
malloc_locks(void)1042186Sraf malloc_locks(void)
1052186Sraf {
1066515Sraf 	(void) mutex_lock(&libc_malloc_lock);
1072186Sraf }
1082186Sraf 
1092186Sraf void
malloc_unlocks(void)1102186Sraf malloc_unlocks(void)
1112186Sraf {
1126515Sraf 	(void) mutex_unlock(&libc_malloc_lock);
1132186Sraf }
1142186Sraf 
1152186Sraf /*
1160Sstevel@tonic-gate  *	Allocation of small blocks
1170Sstevel@tonic-gate  */
1180Sstevel@tonic-gate static TREE	*List[MINSIZE/WORDSIZE-1]; /* lists of small blocks */
1190Sstevel@tonic-gate 
1200Sstevel@tonic-gate static void *
_smalloc(size_t size)1210Sstevel@tonic-gate _smalloc(size_t size)
1220Sstevel@tonic-gate {
1230Sstevel@tonic-gate 	TREE	*tp;
1240Sstevel@tonic-gate 	size_t	i;
1250Sstevel@tonic-gate 
1260Sstevel@tonic-gate 	ASSERT(size % WORDSIZE == 0);
1270Sstevel@tonic-gate 	/* want to return a unique pointer on malloc(0) */
1280Sstevel@tonic-gate 	if (size == 0)
1290Sstevel@tonic-gate 		size = WORDSIZE;
1300Sstevel@tonic-gate 
1310Sstevel@tonic-gate 	/* list to use */
1320Sstevel@tonic-gate 	i = size / WORDSIZE - 1;
1330Sstevel@tonic-gate 
1340Sstevel@tonic-gate 	if (List[i] == NULL) {
1350Sstevel@tonic-gate 		TREE *np;
1360Sstevel@tonic-gate 		int n;
1370Sstevel@tonic-gate 		/* number of blocks to get at one time */
1380Sstevel@tonic-gate #define	NPS (WORDSIZE*8)
1390Sstevel@tonic-gate 		ASSERT((size + WORDSIZE) * NPS >= MINSIZE);
1400Sstevel@tonic-gate 
1410Sstevel@tonic-gate 		/* get NPS of these block types */
1420Sstevel@tonic-gate 		if ((List[i] = _malloc_unlocked((size + WORDSIZE) * NPS)) == 0)
1430Sstevel@tonic-gate 			return (0);
1440Sstevel@tonic-gate 
1450Sstevel@tonic-gate 		/* make them into a link list */
1460Sstevel@tonic-gate 		for (n = 0, np = List[i]; n < NPS; ++n) {
1470Sstevel@tonic-gate 			tp = np;
1480Sstevel@tonic-gate 			SIZE(tp) = size;
1490Sstevel@tonic-gate 			np = NEXT(tp);
1500Sstevel@tonic-gate 			AFTER(tp) = np;
1510Sstevel@tonic-gate 		}
1520Sstevel@tonic-gate 		AFTER(tp) = NULL;
1530Sstevel@tonic-gate 	}
1540Sstevel@tonic-gate 
1550Sstevel@tonic-gate 	/* allocate from the head of the queue */
1560Sstevel@tonic-gate 	tp = List[i];
1570Sstevel@tonic-gate 	List[i] = AFTER(tp);
1580Sstevel@tonic-gate 	SETBIT0(SIZE(tp));
1590Sstevel@tonic-gate 	return (DATA(tp));
1600Sstevel@tonic-gate }
1610Sstevel@tonic-gate 
1620Sstevel@tonic-gate void *
malloc(size_t size)1630Sstevel@tonic-gate malloc(size_t size)
1640Sstevel@tonic-gate {
1650Sstevel@tonic-gate 	void *ret;
1660Sstevel@tonic-gate 
1670Sstevel@tonic-gate 	if (!primary_link_map) {
1680Sstevel@tonic-gate 		errno = ENOTSUP;
1690Sstevel@tonic-gate 		return (NULL);
1700Sstevel@tonic-gate 	}
1710Sstevel@tonic-gate 	assert_no_libc_locks_held();
1726515Sraf 	(void) mutex_lock(&libc_malloc_lock);
1730Sstevel@tonic-gate 	ret = _malloc_unlocked(size);
1746515Sraf 	(void) mutex_unlock(&libc_malloc_lock);
1750Sstevel@tonic-gate 	return (ret);
1760Sstevel@tonic-gate }
1770Sstevel@tonic-gate 
1780Sstevel@tonic-gate static void *
_malloc_unlocked(size_t size)1790Sstevel@tonic-gate _malloc_unlocked(size_t size)
1800Sstevel@tonic-gate {
1810Sstevel@tonic-gate 	size_t	n;
1820Sstevel@tonic-gate 	TREE	*tp, *sp;
1830Sstevel@tonic-gate 	size_t	o_bit1;
1840Sstevel@tonic-gate 
1850Sstevel@tonic-gate 	COUNT(nmalloc);
1860Sstevel@tonic-gate 	ASSERT(WORDSIZE == ALIGN);
1870Sstevel@tonic-gate 
1880Sstevel@tonic-gate 	/* check for size that could overflow calculations */
1890Sstevel@tonic-gate 	if (size > MAX_MALLOC) {
1900Sstevel@tonic-gate 		errno = ENOMEM;
1910Sstevel@tonic-gate 		return (NULL);
1920Sstevel@tonic-gate 	}
1930Sstevel@tonic-gate 
1940Sstevel@tonic-gate 	/* make sure that size is 0 mod ALIGN */
1950Sstevel@tonic-gate 	ROUND(size);
1960Sstevel@tonic-gate 
1970Sstevel@tonic-gate 	/* see if the last free block can be used */
1980Sstevel@tonic-gate 	if (Lfree) {
1990Sstevel@tonic-gate 		sp = BLOCK(Lfree);
2000Sstevel@tonic-gate 		n = SIZE(sp);
2010Sstevel@tonic-gate 		CLRBITS01(n);
2020Sstevel@tonic-gate 		if (n == size) {
2030Sstevel@tonic-gate 			/*
2040Sstevel@tonic-gate 			 * exact match, use it as is
2050Sstevel@tonic-gate 			 */
2060Sstevel@tonic-gate 			freeidx = (freeidx + FREESIZE - 1) &
2076515Sraf 			    FREEMASK; /* 1 back */
2080Sstevel@tonic-gate 			flist[freeidx] = Lfree = NULL;
2090Sstevel@tonic-gate 			return (DATA(sp));
2100Sstevel@tonic-gate 		} else if (size >= MINSIZE && n > size) {
2110Sstevel@tonic-gate 			/*
2120Sstevel@tonic-gate 			 * got a big enough piece
2130Sstevel@tonic-gate 			 */
2140Sstevel@tonic-gate 			freeidx = (freeidx + FREESIZE - 1) &
2156515Sraf 			    FREEMASK; /* 1 back */
2160Sstevel@tonic-gate 			flist[freeidx] = Lfree = NULL;
2170Sstevel@tonic-gate 			o_bit1 = SIZE(sp) & BIT1;
2180Sstevel@tonic-gate 			SIZE(sp) = n;
2190Sstevel@tonic-gate 			goto leftover;
2200Sstevel@tonic-gate 		}
2210Sstevel@tonic-gate 	}
2220Sstevel@tonic-gate 	o_bit1 = 0;
2230Sstevel@tonic-gate 
2240Sstevel@tonic-gate 	/* perform free's of space since last malloc */
2250Sstevel@tonic-gate 	cleanfree(NULL);
2260Sstevel@tonic-gate 
2270Sstevel@tonic-gate 	/* small blocks */
2280Sstevel@tonic-gate 	if (size < MINSIZE)
2290Sstevel@tonic-gate 		return (_smalloc(size));
2300Sstevel@tonic-gate 
2310Sstevel@tonic-gate 	/* search for an elt of the right size */
2320Sstevel@tonic-gate 	sp = NULL;
2330Sstevel@tonic-gate 	n  = 0;
2340Sstevel@tonic-gate 	if (Root) {
2350Sstevel@tonic-gate 		tp = Root;
2360Sstevel@tonic-gate 		for (;;) {
2370Sstevel@tonic-gate 			/* branch left */
2380Sstevel@tonic-gate 			if (SIZE(tp) >= size) {
2390Sstevel@tonic-gate 				if (n == 0 || n >= SIZE(tp)) {
2400Sstevel@tonic-gate 					sp = tp;
2410Sstevel@tonic-gate 					n = SIZE(tp);
2420Sstevel@tonic-gate 				}
2430Sstevel@tonic-gate 				if (LEFT(tp))
2440Sstevel@tonic-gate 					tp = LEFT(tp);
2450Sstevel@tonic-gate 				else
2460Sstevel@tonic-gate 					break;
2470Sstevel@tonic-gate 			} else { /* branch right */
2480Sstevel@tonic-gate 				if (RIGHT(tp))
2490Sstevel@tonic-gate 					tp = RIGHT(tp);
2500Sstevel@tonic-gate 				else
2510Sstevel@tonic-gate 					break;
2520Sstevel@tonic-gate 			}
2530Sstevel@tonic-gate 		}
2540Sstevel@tonic-gate 
2550Sstevel@tonic-gate 		if (sp) {
2560Sstevel@tonic-gate 			t_delete(sp);
2570Sstevel@tonic-gate 		} else if (tp != Root) {
2580Sstevel@tonic-gate 			/* make the searched-to element the root */
2590Sstevel@tonic-gate 			t_splay(tp);
2600Sstevel@tonic-gate 			Root = tp;
2610Sstevel@tonic-gate 		}
2620Sstevel@tonic-gate 	}
2630Sstevel@tonic-gate 
2640Sstevel@tonic-gate 	/* if found none fitted in the tree */
2650Sstevel@tonic-gate 	if (!sp) {
2660Sstevel@tonic-gate 		if (Bottom && size <= SIZE(Bottom)) {
2670Sstevel@tonic-gate 			sp = Bottom;
2680Sstevel@tonic-gate 			CLRBITS01(SIZE(sp));
2690Sstevel@tonic-gate 		} else if ((sp = _morecore(size)) == NULL) /* no more memory */
2700Sstevel@tonic-gate 			return (NULL);
2710Sstevel@tonic-gate 	}
2720Sstevel@tonic-gate 
2730Sstevel@tonic-gate 	/* tell the forward neighbor that we're busy */
2740Sstevel@tonic-gate 	CLRBIT1(SIZE(NEXT(sp)));
2750Sstevel@tonic-gate 
2760Sstevel@tonic-gate 	ASSERT(ISBIT0(SIZE(NEXT(sp))));
2770Sstevel@tonic-gate 
2780Sstevel@tonic-gate leftover:
2790Sstevel@tonic-gate 	/* if the leftover is enough for a new free piece */
2800Sstevel@tonic-gate 	if ((n = (SIZE(sp) - size)) >= MINSIZE + WORDSIZE) {
2810Sstevel@tonic-gate 		n -= WORDSIZE;
2820Sstevel@tonic-gate 		SIZE(sp) = size;
2830Sstevel@tonic-gate 		tp = NEXT(sp);
2840Sstevel@tonic-gate 		SIZE(tp) = n|BIT0;
2850Sstevel@tonic-gate 		realfree(DATA(tp));
2860Sstevel@tonic-gate 	} else if (BOTTOM(sp))
2870Sstevel@tonic-gate 		Bottom = NULL;
2880Sstevel@tonic-gate 
2890Sstevel@tonic-gate 	/* return the allocated space */
2900Sstevel@tonic-gate 	SIZE(sp) |= BIT0 | o_bit1;
2910Sstevel@tonic-gate 	return (DATA(sp));
2920Sstevel@tonic-gate }
2930Sstevel@tonic-gate 
2940Sstevel@tonic-gate 
2950Sstevel@tonic-gate /*
2960Sstevel@tonic-gate  * realloc().
2970Sstevel@tonic-gate  *
2980Sstevel@tonic-gate  * If the block size is increasing, we try forward merging first.
2990Sstevel@tonic-gate  * This is not best-fit but it avoids some data recopying.
3000Sstevel@tonic-gate  */
3010Sstevel@tonic-gate void *
realloc(void * old,size_t size)3020Sstevel@tonic-gate realloc(void *old, size_t size)
3030Sstevel@tonic-gate {
3040Sstevel@tonic-gate 	TREE	*tp, *np;
3050Sstevel@tonic-gate 	size_t	ts;
3060Sstevel@tonic-gate 	char	*new;
3070Sstevel@tonic-gate 
3080Sstevel@tonic-gate 	if (!primary_link_map) {
3090Sstevel@tonic-gate 		errno = ENOTSUP;
3100Sstevel@tonic-gate 		return (NULL);
3110Sstevel@tonic-gate 	}
3120Sstevel@tonic-gate 
3130Sstevel@tonic-gate 	assert_no_libc_locks_held();
3140Sstevel@tonic-gate 	COUNT(nrealloc);
3150Sstevel@tonic-gate 
3160Sstevel@tonic-gate 	/* check for size that could overflow calculations */
3170Sstevel@tonic-gate 	if (size > MAX_MALLOC) {
3180Sstevel@tonic-gate 		errno = ENOMEM;
3190Sstevel@tonic-gate 		return (NULL);
3200Sstevel@tonic-gate 	}
3210Sstevel@tonic-gate 
3220Sstevel@tonic-gate 	/* pointer to the block */
3236515Sraf 	(void) mutex_lock(&libc_malloc_lock);
3240Sstevel@tonic-gate 	if (old == NULL) {
3250Sstevel@tonic-gate 		new = _malloc_unlocked(size);
3266515Sraf 		(void) mutex_unlock(&libc_malloc_lock);
3270Sstevel@tonic-gate 		return (new);
3280Sstevel@tonic-gate 	}
3290Sstevel@tonic-gate 
3300Sstevel@tonic-gate 	/* perform free's of space since last malloc */
3310Sstevel@tonic-gate 	cleanfree(old);
3320Sstevel@tonic-gate 
3330Sstevel@tonic-gate 	/* make sure that size is 0 mod ALIGN */
3340Sstevel@tonic-gate 	ROUND(size);
3350Sstevel@tonic-gate 
3360Sstevel@tonic-gate 	tp = BLOCK(old);
3370Sstevel@tonic-gate 	ts = SIZE(tp);
3380Sstevel@tonic-gate 
3390Sstevel@tonic-gate 	/* if the block was freed, data has been destroyed. */
3400Sstevel@tonic-gate 	if (!ISBIT0(ts)) {
3416515Sraf 		(void) mutex_unlock(&libc_malloc_lock);
3420Sstevel@tonic-gate 		return (NULL);
3430Sstevel@tonic-gate 	}
3440Sstevel@tonic-gate 
3450Sstevel@tonic-gate 	/* nothing to do */
3460Sstevel@tonic-gate 	CLRBITS01(SIZE(tp));
3470Sstevel@tonic-gate 	if (size == SIZE(tp)) {
3480Sstevel@tonic-gate 		SIZE(tp) = ts;
3496515Sraf 		(void) mutex_unlock(&libc_malloc_lock);
3500Sstevel@tonic-gate 		return (old);
3510Sstevel@tonic-gate 	}
3520Sstevel@tonic-gate 
3530Sstevel@tonic-gate 	/* special cases involving small blocks */
3540Sstevel@tonic-gate 	if (size < MINSIZE || SIZE(tp) < MINSIZE) {
3550Sstevel@tonic-gate 		/* free is size is zero */
3560Sstevel@tonic-gate 		if (size == 0) {
3570Sstevel@tonic-gate 			SETOLD01(SIZE(tp), ts);
3580Sstevel@tonic-gate 			_free_unlocked(old);
3596515Sraf 			(void) mutex_unlock(&libc_malloc_lock);
3600Sstevel@tonic-gate 			return (NULL);
3610Sstevel@tonic-gate 		} else {
3620Sstevel@tonic-gate 			goto call_malloc;
3630Sstevel@tonic-gate 		}
3640Sstevel@tonic-gate 	}
3650Sstevel@tonic-gate 
3660Sstevel@tonic-gate 	/* block is increasing in size, try merging the next block */
3670Sstevel@tonic-gate 	if (size > SIZE(tp)) {
3680Sstevel@tonic-gate 		np = NEXT(tp);
3690Sstevel@tonic-gate 		if (!ISBIT0(SIZE(np))) {
3700Sstevel@tonic-gate 			ASSERT(SIZE(np) >= MINSIZE);
3710Sstevel@tonic-gate 			ASSERT(!ISBIT1(SIZE(np)));
3720Sstevel@tonic-gate 			SIZE(tp) += SIZE(np) + WORDSIZE;
3730Sstevel@tonic-gate 			if (np != Bottom)
3740Sstevel@tonic-gate 				t_delete(np);
3750Sstevel@tonic-gate 			else
3760Sstevel@tonic-gate 				Bottom = NULL;
3770Sstevel@tonic-gate 			CLRBIT1(SIZE(NEXT(np)));
3780Sstevel@tonic-gate 		}
3790Sstevel@tonic-gate 
3800Sstevel@tonic-gate #ifndef SEGMENTED
3810Sstevel@tonic-gate 		/* not enough & at TRUE end of memory, try extending core */
3820Sstevel@tonic-gate 		if (size > SIZE(tp) && BOTTOM(tp) && GETCORE(0) == Baddr) {
3830Sstevel@tonic-gate 			Bottom = tp;
3840Sstevel@tonic-gate 			if ((tp = _morecore(size)) == NULL) {
3850Sstevel@tonic-gate 				tp = Bottom;
3860Sstevel@tonic-gate 				Bottom = NULL;
3870Sstevel@tonic-gate 				}
3880Sstevel@tonic-gate 		}
3890Sstevel@tonic-gate #endif
3900Sstevel@tonic-gate 	}
3910Sstevel@tonic-gate 
3920Sstevel@tonic-gate 	/* got enough space to use */
3930Sstevel@tonic-gate 	if (size <= SIZE(tp)) {
3940Sstevel@tonic-gate 		size_t n;
3950Sstevel@tonic-gate 
3960Sstevel@tonic-gate chop_big:
3970Sstevel@tonic-gate 		if ((n = (SIZE(tp) - size)) >= MINSIZE + WORDSIZE) {
3980Sstevel@tonic-gate 			n -= WORDSIZE;
3990Sstevel@tonic-gate 			SIZE(tp) = size;
4000Sstevel@tonic-gate 			np = NEXT(tp);
4010Sstevel@tonic-gate 			SIZE(np) = n|BIT0;
4020Sstevel@tonic-gate 			realfree(DATA(np));
4030Sstevel@tonic-gate 		} else if (BOTTOM(tp))
4040Sstevel@tonic-gate 			Bottom = NULL;
4050Sstevel@tonic-gate 
4060Sstevel@tonic-gate 		/* the previous block may be free */
4070Sstevel@tonic-gate 		SETOLD01(SIZE(tp), ts);
4086515Sraf 		(void) mutex_unlock(&libc_malloc_lock);
4090Sstevel@tonic-gate 		return (old);
4100Sstevel@tonic-gate 	}
4110Sstevel@tonic-gate 
4120Sstevel@tonic-gate 	/* call malloc to get a new block */
4130Sstevel@tonic-gate call_malloc:
4140Sstevel@tonic-gate 	SETOLD01(SIZE(tp), ts);
4150Sstevel@tonic-gate 	if ((new = _malloc_unlocked(size)) != NULL) {
4160Sstevel@tonic-gate 		CLRBITS01(ts);
4170Sstevel@tonic-gate 		if (ts > size)
4180Sstevel@tonic-gate 			ts = size;
4190Sstevel@tonic-gate 		MEMCOPY(new, old, ts);
4200Sstevel@tonic-gate 		_free_unlocked(old);
4216515Sraf 		(void) mutex_unlock(&libc_malloc_lock);
4220Sstevel@tonic-gate 		return (new);
4230Sstevel@tonic-gate 	}
4240Sstevel@tonic-gate 
4250Sstevel@tonic-gate 	/*
4260Sstevel@tonic-gate 	 * Attempt special case recovery allocations since malloc() failed:
4270Sstevel@tonic-gate 	 *
4280Sstevel@tonic-gate 	 * 1. size <= SIZE(tp) < MINSIZE
4290Sstevel@tonic-gate 	 *	Simply return the existing block
4300Sstevel@tonic-gate 	 * 2. SIZE(tp) < size < MINSIZE
4310Sstevel@tonic-gate 	 *	malloc() may have failed to allocate the chunk of
4320Sstevel@tonic-gate 	 *	small blocks. Try asking for MINSIZE bytes.
4330Sstevel@tonic-gate 	 * 3. size < MINSIZE <= SIZE(tp)
4340Sstevel@tonic-gate 	 *	malloc() may have failed as with 2.  Change to
4350Sstevel@tonic-gate 	 *	MINSIZE allocation which is taken from the beginning
4360Sstevel@tonic-gate 	 *	of the current block.
4370Sstevel@tonic-gate 	 * 4. MINSIZE <= SIZE(tp) < size
4380Sstevel@tonic-gate 	 *	If the previous block is free and the combination of
4390Sstevel@tonic-gate 	 *	these two blocks has at least size bytes, then merge
4400Sstevel@tonic-gate 	 *	the two blocks copying the existing contents backwards.
4410Sstevel@tonic-gate 	 */
4420Sstevel@tonic-gate 	CLRBITS01(SIZE(tp));
4430Sstevel@tonic-gate 	if (SIZE(tp) < MINSIZE) {
4440Sstevel@tonic-gate 		if (size < SIZE(tp)) {			/* case 1. */
4450Sstevel@tonic-gate 			SETOLD01(SIZE(tp), ts);
4466515Sraf 			(void) mutex_unlock(&libc_malloc_lock);
4470Sstevel@tonic-gate 			return (old);
4480Sstevel@tonic-gate 		} else if (size < MINSIZE) {		/* case 2. */
4490Sstevel@tonic-gate 			size = MINSIZE;
4500Sstevel@tonic-gate 			goto call_malloc;
4510Sstevel@tonic-gate 		}
4520Sstevel@tonic-gate 	} else if (size < MINSIZE) {			/* case 3. */
4530Sstevel@tonic-gate 		size = MINSIZE;
4540Sstevel@tonic-gate 		goto chop_big;
4550Sstevel@tonic-gate 	} else if (ISBIT1(ts) &&
4560Sstevel@tonic-gate 	    (SIZE(np = LAST(tp)) + SIZE(tp) + WORDSIZE) >= size) {
4570Sstevel@tonic-gate 		ASSERT(!ISBIT0(SIZE(np)));
4580Sstevel@tonic-gate 		t_delete(np);
4590Sstevel@tonic-gate 		SIZE(np) += SIZE(tp) + WORDSIZE;
4600Sstevel@tonic-gate 		/*
4610Sstevel@tonic-gate 		 * Since the copy may overlap, use memmove() if available.
4620Sstevel@tonic-gate 		 * Otherwise, copy by hand.
4630Sstevel@tonic-gate 		 */
4640Sstevel@tonic-gate 		(void) memmove(DATA(np), old, SIZE(tp));
4650Sstevel@tonic-gate 		old = DATA(np);
4660Sstevel@tonic-gate 		tp = np;
4670Sstevel@tonic-gate 		CLRBIT1(ts);
4680Sstevel@tonic-gate 		goto chop_big;
4690Sstevel@tonic-gate 	}
4700Sstevel@tonic-gate 	SETOLD01(SIZE(tp), ts);
4716515Sraf 	(void) mutex_unlock(&libc_malloc_lock);
4720Sstevel@tonic-gate 	return (NULL);
4730Sstevel@tonic-gate }
4740Sstevel@tonic-gate 
4750Sstevel@tonic-gate 
4760Sstevel@tonic-gate /*
4770Sstevel@tonic-gate  * realfree().
4780Sstevel@tonic-gate  *
4790Sstevel@tonic-gate  * Coalescing of adjacent free blocks is done first.
4800Sstevel@tonic-gate  * Then, the new free block is leaf-inserted into the free tree
4810Sstevel@tonic-gate  * without splaying. This strategy does not guarantee the amortized
4820Sstevel@tonic-gate  * O(nlogn) behaviour for the insert/delete/find set of operations
4830Sstevel@tonic-gate  * on the tree. In practice, however, free is much more infrequent
4840Sstevel@tonic-gate  * than malloc/realloc and the tree searches performed by these
4850Sstevel@tonic-gate  * functions adequately keep the tree in balance.
4860Sstevel@tonic-gate  */
4870Sstevel@tonic-gate static void
realfree(void * old)4880Sstevel@tonic-gate realfree(void *old)
4890Sstevel@tonic-gate {
4900Sstevel@tonic-gate 	TREE	*tp, *sp, *np;
4910Sstevel@tonic-gate 	size_t	ts, size;
4920Sstevel@tonic-gate 
4930Sstevel@tonic-gate 	COUNT(nfree);
4940Sstevel@tonic-gate 
4950Sstevel@tonic-gate 	/* pointer to the block */
4960Sstevel@tonic-gate 	tp = BLOCK(old);
4970Sstevel@tonic-gate 	ts = SIZE(tp);
4980Sstevel@tonic-gate 	if (!ISBIT0(ts))
4990Sstevel@tonic-gate 		return;
5000Sstevel@tonic-gate 	CLRBITS01(SIZE(tp));
5010Sstevel@tonic-gate 
5020Sstevel@tonic-gate 	/* small block, put it in the right linked list */
5030Sstevel@tonic-gate 	if (SIZE(tp) < MINSIZE) {
5040Sstevel@tonic-gate 		ASSERT(SIZE(tp) / WORDSIZE >= 1);
5050Sstevel@tonic-gate 		ts = SIZE(tp) / WORDSIZE - 1;
5060Sstevel@tonic-gate 		AFTER(tp) = List[ts];
5070Sstevel@tonic-gate 		List[ts] = tp;
5080Sstevel@tonic-gate 		return;
5090Sstevel@tonic-gate 	}
5100Sstevel@tonic-gate 
5110Sstevel@tonic-gate 	/* see if coalescing with next block is warranted */
5120Sstevel@tonic-gate 	np = NEXT(tp);
5130Sstevel@tonic-gate 	if (!ISBIT0(SIZE(np))) {
5140Sstevel@tonic-gate 		if (np != Bottom)
5150Sstevel@tonic-gate 			t_delete(np);
5160Sstevel@tonic-gate 		SIZE(tp) += SIZE(np) + WORDSIZE;
5170Sstevel@tonic-gate 	}
5180Sstevel@tonic-gate 
5190Sstevel@tonic-gate 	/* the same with the preceding block */
5200Sstevel@tonic-gate 	if (ISBIT1(ts)) {
5210Sstevel@tonic-gate 		np = LAST(tp);
5220Sstevel@tonic-gate 		ASSERT(!ISBIT0(SIZE(np)));
5230Sstevel@tonic-gate 		ASSERT(np != Bottom);
5240Sstevel@tonic-gate 		t_delete(np);
5250Sstevel@tonic-gate 		SIZE(np) += SIZE(tp) + WORDSIZE;
5260Sstevel@tonic-gate 		tp = np;
5270Sstevel@tonic-gate 	}
5280Sstevel@tonic-gate 
5290Sstevel@tonic-gate 	/* initialize tree info */
5300Sstevel@tonic-gate 	PARENT(tp) = LEFT(tp) = RIGHT(tp) = LINKFOR(tp) = NULL;
5310Sstevel@tonic-gate 
5320Sstevel@tonic-gate 	/* the last word of the block contains self's address */
5330Sstevel@tonic-gate 	*(SELFP(tp)) = tp;
5340Sstevel@tonic-gate 
5350Sstevel@tonic-gate 	/* set bottom block, or insert in the free tree */
5360Sstevel@tonic-gate 	if (BOTTOM(tp))
5370Sstevel@tonic-gate 		Bottom = tp;
5380Sstevel@tonic-gate 	else {
5390Sstevel@tonic-gate 		/* search for the place to insert */
5400Sstevel@tonic-gate 		if (Root) {
5410Sstevel@tonic-gate 			size = SIZE(tp);
5420Sstevel@tonic-gate 			np = Root;
5430Sstevel@tonic-gate 			for (;;) {
5440Sstevel@tonic-gate 				if (SIZE(np) > size) {
5450Sstevel@tonic-gate 					if (LEFT(np))
5460Sstevel@tonic-gate 						np = LEFT(np);
5470Sstevel@tonic-gate 					else {
5480Sstevel@tonic-gate 						LEFT(np) = tp;
5490Sstevel@tonic-gate 						PARENT(tp) = np;
5500Sstevel@tonic-gate 						break;
5510Sstevel@tonic-gate 					}
5520Sstevel@tonic-gate 				} else if (SIZE(np) < size) {
5530Sstevel@tonic-gate 					if (RIGHT(np))
5540Sstevel@tonic-gate 						np = RIGHT(np);
5550Sstevel@tonic-gate 					else {
5560Sstevel@tonic-gate 						RIGHT(np) = tp;
5570Sstevel@tonic-gate 						PARENT(tp) = np;
5580Sstevel@tonic-gate 						break;
5590Sstevel@tonic-gate 					}
5600Sstevel@tonic-gate 				} else {
5610Sstevel@tonic-gate 					if ((sp = PARENT(np)) != NULL) {
5620Sstevel@tonic-gate 						if (np == LEFT(sp))
5630Sstevel@tonic-gate 							LEFT(sp) = tp;
5640Sstevel@tonic-gate 						else
5650Sstevel@tonic-gate 							RIGHT(sp) = tp;
5660Sstevel@tonic-gate 						PARENT(tp) = sp;
5670Sstevel@tonic-gate 					} else
5680Sstevel@tonic-gate 						Root = tp;
5690Sstevel@tonic-gate 
5700Sstevel@tonic-gate 					/* insert to head of list */
5710Sstevel@tonic-gate 					if ((sp = LEFT(np)) != NULL)
5720Sstevel@tonic-gate 						PARENT(sp) = tp;
5730Sstevel@tonic-gate 					LEFT(tp) = sp;
5740Sstevel@tonic-gate 
5750Sstevel@tonic-gate 					if ((sp = RIGHT(np)) != NULL)
5760Sstevel@tonic-gate 						PARENT(sp) = tp;
5770Sstevel@tonic-gate 					RIGHT(tp) = sp;
5780Sstevel@tonic-gate 
5790Sstevel@tonic-gate 					/* doubly link list */
5800Sstevel@tonic-gate 					LINKFOR(tp) = np;
5810Sstevel@tonic-gate 					LINKBAK(np) = tp;
5820Sstevel@tonic-gate 					SETNOTREE(np);
5830Sstevel@tonic-gate 
5840Sstevel@tonic-gate 					break;
5850Sstevel@tonic-gate 				}
5860Sstevel@tonic-gate 			}
5870Sstevel@tonic-gate 		} else
5880Sstevel@tonic-gate 			Root = tp;
5890Sstevel@tonic-gate 	}
5900Sstevel@tonic-gate 
5910Sstevel@tonic-gate 	/* tell next block that this one is free */
5920Sstevel@tonic-gate 	SETBIT1(SIZE(NEXT(tp)));
5930Sstevel@tonic-gate 
5940Sstevel@tonic-gate 	ASSERT(ISBIT0(SIZE(NEXT(tp))));
5950Sstevel@tonic-gate }
5960Sstevel@tonic-gate 
5970Sstevel@tonic-gate /*
5980Sstevel@tonic-gate  * Get more core. Gaps in memory are noted as busy blocks.
5990Sstevel@tonic-gate  */
6000Sstevel@tonic-gate static TREE *
_morecore(size_t size)6010Sstevel@tonic-gate _morecore(size_t size)
6020Sstevel@tonic-gate {
6030Sstevel@tonic-gate 	TREE	*tp;
6040Sstevel@tonic-gate 	ssize_t	n, offset;
6050Sstevel@tonic-gate 	char	*addr;
6060Sstevel@tonic-gate 	ssize_t	nsize;
6070Sstevel@tonic-gate 
6080Sstevel@tonic-gate 	/* compute new amount of memory to get */
6090Sstevel@tonic-gate 	tp = Bottom;
6100Sstevel@tonic-gate 	n = (ssize_t)size + 2 * WORDSIZE;
6110Sstevel@tonic-gate 	addr = GETCORE(0);
6120Sstevel@tonic-gate 
6130Sstevel@tonic-gate 	if (addr == ERRCORE)
6140Sstevel@tonic-gate 		return (NULL);
6150Sstevel@tonic-gate 
6160Sstevel@tonic-gate 	/* need to pad size out so that addr is aligned */
6170Sstevel@tonic-gate 	if ((((uintptr_t)addr) % ALIGN) != 0)
6180Sstevel@tonic-gate 		offset = ALIGN - (uintptr_t)addr % ALIGN;
6190Sstevel@tonic-gate 	else
6200Sstevel@tonic-gate 		offset = 0;
6210Sstevel@tonic-gate 
6220Sstevel@tonic-gate #ifndef SEGMENTED
6230Sstevel@tonic-gate 	/* if not segmented memory, what we need may be smaller */
6240Sstevel@tonic-gate 	if (addr == Baddr) {
6250Sstevel@tonic-gate 		n -= WORDSIZE;
6260Sstevel@tonic-gate 		if (tp != NULL)
6270Sstevel@tonic-gate 			n -= SIZE(tp);
6280Sstevel@tonic-gate 	}
6290Sstevel@tonic-gate #endif
6300Sstevel@tonic-gate 
6310Sstevel@tonic-gate 	/* get a multiple of CORESIZE */
6320Sstevel@tonic-gate 	n = ((n - 1) / CORESIZE + 1) * CORESIZE;
6330Sstevel@tonic-gate 	nsize = n + offset;
6340Sstevel@tonic-gate 
6350Sstevel@tonic-gate 	/* check if nsize request could overflow in GETCORE */
6360Sstevel@tonic-gate 	if ((size_t)nsize > MAX_MALLOC - (uintptr_t)addr) {
6370Sstevel@tonic-gate 		errno = ENOMEM;
6380Sstevel@tonic-gate 		return (NULL);
6390Sstevel@tonic-gate 	}
6400Sstevel@tonic-gate 
6410Sstevel@tonic-gate 	if ((size_t)nsize <= MAX_GETCORE) {
6420Sstevel@tonic-gate 		if (GETCORE(nsize) == ERRCORE)
6430Sstevel@tonic-gate 			return (NULL);
6440Sstevel@tonic-gate 	} else {
6450Sstevel@tonic-gate 		intptr_t	delta;
6460Sstevel@tonic-gate 		/*
6470Sstevel@tonic-gate 		 * the value required is too big for GETCORE() to deal with
6480Sstevel@tonic-gate 		 * in one go, so use GETCORE() at most 2 times instead.
6490Sstevel@tonic-gate 		 * Argument to GETCORE() must be multiple of ALIGN.
6500Sstevel@tonic-gate 		 * If not, GETCORE(-MAX_GETCORE) will not return brk point
6510Sstevel@tonic-gate 		 * to previous value, but will be ALIGN more.
6520Sstevel@tonic-gate 		 * This would leave a small hole.
6530Sstevel@tonic-gate 		 */
6540Sstevel@tonic-gate 		delta = MAX_GETCORE;
6550Sstevel@tonic-gate 		while (delta > 0) {
6560Sstevel@tonic-gate 			if (GETCORE(delta) == ERRCORE) {
6570Sstevel@tonic-gate 				if (addr != GETCORE(0))
6580Sstevel@tonic-gate 					(void) GETCORE(-MAX_GETCORE);
6590Sstevel@tonic-gate 				return (NULL);
6600Sstevel@tonic-gate 			}
6610Sstevel@tonic-gate 			nsize -= MAX_GETCORE;
6620Sstevel@tonic-gate 			delta = nsize;
6630Sstevel@tonic-gate 		}
6640Sstevel@tonic-gate 	}
6650Sstevel@tonic-gate 
6660Sstevel@tonic-gate 	/* contiguous memory */
6670Sstevel@tonic-gate 	if (addr == Baddr) {
6680Sstevel@tonic-gate 		ASSERT(offset == 0);
6690Sstevel@tonic-gate 		if (tp) {
6700Sstevel@tonic-gate 			addr = (char *)tp;
6710Sstevel@tonic-gate 			n += SIZE(tp) + 2 * WORDSIZE;
6720Sstevel@tonic-gate 		} else {
6730Sstevel@tonic-gate 			addr = Baddr - WORDSIZE;
6740Sstevel@tonic-gate 			n += WORDSIZE;
6750Sstevel@tonic-gate 		}
6760Sstevel@tonic-gate 	} else
6770Sstevel@tonic-gate 		addr += offset;
6780Sstevel@tonic-gate 
6790Sstevel@tonic-gate 	/* new bottom address */
6800Sstevel@tonic-gate 	Baddr = addr + n;
6810Sstevel@tonic-gate 
6820Sstevel@tonic-gate 	/* new bottom block */
6830Sstevel@tonic-gate 	tp = (TREE *)(uintptr_t)addr;
6840Sstevel@tonic-gate 	SIZE(tp) = n - 2 * WORDSIZE;
6850Sstevel@tonic-gate 	ASSERT((SIZE(tp) % ALIGN) == 0);
6860Sstevel@tonic-gate 
6870Sstevel@tonic-gate 	/* reserved the last word to head any noncontiguous memory */
6880Sstevel@tonic-gate 	SETBIT0(SIZE(NEXT(tp)));
6890Sstevel@tonic-gate 
6900Sstevel@tonic-gate 	/* non-contiguous memory, free old bottom block */
6910Sstevel@tonic-gate 	if (Bottom && Bottom != tp) {
6920Sstevel@tonic-gate 		SETBIT0(SIZE(Bottom));
6930Sstevel@tonic-gate 		realfree(DATA(Bottom));
6940Sstevel@tonic-gate 	}
6950Sstevel@tonic-gate 
6960Sstevel@tonic-gate 	return (tp);
6970Sstevel@tonic-gate }
6980Sstevel@tonic-gate 
6990Sstevel@tonic-gate 
7000Sstevel@tonic-gate /*
7010Sstevel@tonic-gate  * Tree rotation functions (BU: bottom-up, TD: top-down)
7020Sstevel@tonic-gate  */
7030Sstevel@tonic-gate 
7040Sstevel@tonic-gate #define	LEFT1(x, y)		\
7050Sstevel@tonic-gate 			if ((RIGHT(x) = LEFT(y)) != NULL) PARENT(RIGHT(x)) = x;\
7060Sstevel@tonic-gate 			if ((PARENT(y) = PARENT(x)) != NULL)\
7070Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(y)) = y;\
7080Sstevel@tonic-gate 				else RIGHT(PARENT(y)) = y;\
7090Sstevel@tonic-gate 			LEFT(y) = x; PARENT(x) = y
7100Sstevel@tonic-gate 
7110Sstevel@tonic-gate #define	RIGHT1(x, y)		\
7120Sstevel@tonic-gate 			if ((LEFT(x) = RIGHT(y)) != NULL) PARENT(LEFT(x)) = x;\
7130Sstevel@tonic-gate 			if ((PARENT(y) = PARENT(x)) != NULL)\
7140Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(y)) = y;\
7150Sstevel@tonic-gate 				else RIGHT(PARENT(y)) = y;\
7160Sstevel@tonic-gate 			RIGHT(y) = x; PARENT(x) = y
7170Sstevel@tonic-gate 
7180Sstevel@tonic-gate #define	BULEFT2(x, y, z)	\
7190Sstevel@tonic-gate 			if ((RIGHT(x) = LEFT(y)) != NULL) PARENT(RIGHT(x)) = x;\
7200Sstevel@tonic-gate 			if ((RIGHT(y) = LEFT(z)) != NULL) PARENT(RIGHT(y)) = y;\
7210Sstevel@tonic-gate 			if ((PARENT(z) = PARENT(x)) != NULL)\
7220Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(z)) = z;\
7230Sstevel@tonic-gate 				else RIGHT(PARENT(z)) = z;\
7240Sstevel@tonic-gate 			LEFT(z) = y; PARENT(y) = z; LEFT(y) = x; PARENT(x) = y
7250Sstevel@tonic-gate 
7260Sstevel@tonic-gate #define	BURIGHT2(x, y, z)	\
7270Sstevel@tonic-gate 			if ((LEFT(x) = RIGHT(y)) != NULL) PARENT(LEFT(x)) = x;\
7280Sstevel@tonic-gate 			if ((LEFT(y) = RIGHT(z)) != NULL) PARENT(LEFT(y)) = y;\
7290Sstevel@tonic-gate 			if ((PARENT(z) = PARENT(x)) != NULL)\
7300Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(z)) = z;\
7310Sstevel@tonic-gate 				else RIGHT(PARENT(z)) = z;\
7320Sstevel@tonic-gate 			RIGHT(z) = y; PARENT(y) = z; RIGHT(y) = x; PARENT(x) = y
7330Sstevel@tonic-gate 
7340Sstevel@tonic-gate #define	TDLEFT2(x, y, z)	\
7350Sstevel@tonic-gate 			if ((RIGHT(y) = LEFT(z)) != NULL) PARENT(RIGHT(y)) = y;\
7360Sstevel@tonic-gate 			if ((PARENT(z) = PARENT(x)) != NULL)\
7370Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(z)) = z;\
7380Sstevel@tonic-gate 				else RIGHT(PARENT(z)) = z;\
7390Sstevel@tonic-gate 			PARENT(x) = z; LEFT(z) = x;
7400Sstevel@tonic-gate 
7410Sstevel@tonic-gate #define	TDRIGHT2(x, y, z)	\
7420Sstevel@tonic-gate 			if ((LEFT(y) = RIGHT(z)) != NULL) PARENT(LEFT(y)) = y;\
7430Sstevel@tonic-gate 			if ((PARENT(z) = PARENT(x)) != NULL)\
7440Sstevel@tonic-gate 				if (LEFT(PARENT(x)) == x) LEFT(PARENT(z)) = z;\
7450Sstevel@tonic-gate 				else RIGHT(PARENT(z)) = z;\
7460Sstevel@tonic-gate 			PARENT(x) = z; RIGHT(z) = x;
7470Sstevel@tonic-gate 
7480Sstevel@tonic-gate /*
7490Sstevel@tonic-gate  * Delete a tree element
7500Sstevel@tonic-gate  */
7510Sstevel@tonic-gate static void
t_delete(TREE * op)7520Sstevel@tonic-gate t_delete(TREE *op)
7530Sstevel@tonic-gate {
7540Sstevel@tonic-gate 	TREE	*tp, *sp, *gp;
7550Sstevel@tonic-gate 
7560Sstevel@tonic-gate 	/* if this is a non-tree node */
7570Sstevel@tonic-gate 	if (ISNOTREE(op)) {
7580Sstevel@tonic-gate 		tp = LINKBAK(op);
7590Sstevel@tonic-gate 		if ((sp = LINKFOR(op)) != NULL)
7600Sstevel@tonic-gate 			LINKBAK(sp) = tp;
7610Sstevel@tonic-gate 		LINKFOR(tp) = sp;
7620Sstevel@tonic-gate 		return;
7630Sstevel@tonic-gate 	}
7640Sstevel@tonic-gate 
7650Sstevel@tonic-gate 	/* make op the root of the tree */
7660Sstevel@tonic-gate 	if (PARENT(op))
7670Sstevel@tonic-gate 		t_splay(op);
7680Sstevel@tonic-gate 
7690Sstevel@tonic-gate 	/* if this is the start of a list */
7700Sstevel@tonic-gate 	if ((tp = LINKFOR(op)) != NULL) {
7710Sstevel@tonic-gate 		PARENT(tp) = NULL;
7720Sstevel@tonic-gate 		if ((sp = LEFT(op)) != NULL)
7730Sstevel@tonic-gate 			PARENT(sp) = tp;
7740Sstevel@tonic-gate 		LEFT(tp) = sp;
7750Sstevel@tonic-gate 
7760Sstevel@tonic-gate 		if ((sp = RIGHT(op)) != NULL)
7770Sstevel@tonic-gate 			PARENT(sp) = tp;
7780Sstevel@tonic-gate 		RIGHT(tp) = sp;
7790Sstevel@tonic-gate 
7800Sstevel@tonic-gate 		Root = tp;
7810Sstevel@tonic-gate 		return;
7820Sstevel@tonic-gate 	}
7830Sstevel@tonic-gate 
7840Sstevel@tonic-gate 	/* if op has a non-null left subtree */
7850Sstevel@tonic-gate 	if ((tp = LEFT(op)) != NULL) {
7860Sstevel@tonic-gate 		PARENT(tp) = NULL;
7870Sstevel@tonic-gate 
7880Sstevel@tonic-gate 		if (RIGHT(op)) {
7890Sstevel@tonic-gate 			/* make the right-end of the left subtree its root */
7900Sstevel@tonic-gate 			while ((sp = RIGHT(tp)) != NULL) {
7910Sstevel@tonic-gate 				if ((gp = RIGHT(sp)) != NULL) {
7920Sstevel@tonic-gate 					TDLEFT2(tp, sp, gp);
7930Sstevel@tonic-gate 					tp = gp;
7940Sstevel@tonic-gate 				} else {
7950Sstevel@tonic-gate 					LEFT1(tp, sp);
7960Sstevel@tonic-gate 					tp = sp;
7970Sstevel@tonic-gate 				}
7980Sstevel@tonic-gate 			}
7990Sstevel@tonic-gate 
8000Sstevel@tonic-gate 			/* hook the right subtree of op to the above elt */
8010Sstevel@tonic-gate 			RIGHT(tp) = RIGHT(op);
8020Sstevel@tonic-gate 			PARENT(RIGHT(tp)) = tp;
8030Sstevel@tonic-gate 		}
8040Sstevel@tonic-gate 	} else if ((tp = RIGHT(op)) != NULL)	/* no left subtree */
8050Sstevel@tonic-gate 		PARENT(tp) = NULL;
8060Sstevel@tonic-gate 
8070Sstevel@tonic-gate 	Root = tp;
8080Sstevel@tonic-gate }
8090Sstevel@tonic-gate 
8100Sstevel@tonic-gate /*
8110Sstevel@tonic-gate  * Bottom up splaying (simple version).
8120Sstevel@tonic-gate  * The basic idea is to roughly cut in half the
8130Sstevel@tonic-gate  * path from Root to tp and make tp the new root.
8140Sstevel@tonic-gate  */
8150Sstevel@tonic-gate static void
t_splay(TREE * tp)8160Sstevel@tonic-gate t_splay(TREE *tp)
8170Sstevel@tonic-gate {
8180Sstevel@tonic-gate 	TREE	*pp, *gp;
8190Sstevel@tonic-gate 
8200Sstevel@tonic-gate 	/* iterate until tp is the root */
8210Sstevel@tonic-gate 	while ((pp = PARENT(tp)) != NULL) {
8220Sstevel@tonic-gate 		/* grandparent of tp */
8230Sstevel@tonic-gate 		gp = PARENT(pp);
8240Sstevel@tonic-gate 
8250Sstevel@tonic-gate 		/* x is a left child */
8260Sstevel@tonic-gate 		if (LEFT(pp) == tp) {
8270Sstevel@tonic-gate 			if (gp && LEFT(gp) == pp) {
8280Sstevel@tonic-gate 				BURIGHT2(gp, pp, tp);
8290Sstevel@tonic-gate 			} else {
8300Sstevel@tonic-gate 				RIGHT1(pp, tp);
8310Sstevel@tonic-gate 			}
8320Sstevel@tonic-gate 		} else {
8330Sstevel@tonic-gate 			ASSERT(RIGHT(pp) == tp);
8340Sstevel@tonic-gate 			if (gp && RIGHT(gp) == pp) {
8350Sstevel@tonic-gate 				BULEFT2(gp, pp, tp);
8360Sstevel@tonic-gate 			} else {
8370Sstevel@tonic-gate 				LEFT1(pp, tp);
8380Sstevel@tonic-gate 			}
8390Sstevel@tonic-gate 		}
8400Sstevel@tonic-gate 	}
8410Sstevel@tonic-gate }
8420Sstevel@tonic-gate 
8430Sstevel@tonic-gate 
8440Sstevel@tonic-gate /*
8450Sstevel@tonic-gate  *	free().
8460Sstevel@tonic-gate  *	Performs a delayed free of the block pointed to
8470Sstevel@tonic-gate  *	by old. The pointer to old is saved on a list, flist,
8480Sstevel@tonic-gate  *	until the next malloc or realloc. At that time, all the
8490Sstevel@tonic-gate  *	blocks pointed to in flist are actually freed via
8500Sstevel@tonic-gate  *	realfree(). This allows the contents of free blocks to
8510Sstevel@tonic-gate  *	remain undisturbed until the next malloc or realloc.
8520Sstevel@tonic-gate  */
8530Sstevel@tonic-gate void
free(void * old)8540Sstevel@tonic-gate free(void *old)
8550Sstevel@tonic-gate {
8566515Sraf 	if (!primary_link_map) {
8576515Sraf 		errno = ENOTSUP;
8586515Sraf 		return;
8596515Sraf 	}
8600Sstevel@tonic-gate 	assert_no_libc_locks_held();
8616515Sraf 	(void) mutex_lock(&libc_malloc_lock);
8620Sstevel@tonic-gate 	_free_unlocked(old);
8636515Sraf 	(void) mutex_unlock(&libc_malloc_lock);
8640Sstevel@tonic-gate }
8650Sstevel@tonic-gate 
8660Sstevel@tonic-gate 
8670Sstevel@tonic-gate void
_free_unlocked(void * old)8680Sstevel@tonic-gate _free_unlocked(void *old)
8690Sstevel@tonic-gate {
8700Sstevel@tonic-gate 	int	i;
8710Sstevel@tonic-gate 
8726515Sraf 	if (old == NULL)
8730Sstevel@tonic-gate 		return;
8740Sstevel@tonic-gate 
8750Sstevel@tonic-gate 	/*
8760Sstevel@tonic-gate 	 * Make sure the same data block is not freed twice.
8770Sstevel@tonic-gate 	 * 3 cases are checked.  It returns immediately if either
8780Sstevel@tonic-gate 	 * one of the conditions is true.
8790Sstevel@tonic-gate 	 *	1. Last freed.
8800Sstevel@tonic-gate 	 *	2. Not in use or freed already.
8810Sstevel@tonic-gate 	 *	3. In the free list.
8820Sstevel@tonic-gate 	 */
8830Sstevel@tonic-gate 	if (old == Lfree)
8840Sstevel@tonic-gate 		return;
8850Sstevel@tonic-gate 	if (!ISBIT0(SIZE(BLOCK(old))))
8860Sstevel@tonic-gate 		return;
8870Sstevel@tonic-gate 	for (i = 0; i < freeidx; i++)
8880Sstevel@tonic-gate 		if (old == flist[i])
8890Sstevel@tonic-gate 			return;
8900Sstevel@tonic-gate 
8910Sstevel@tonic-gate 	if (flist[freeidx] != NULL)
8920Sstevel@tonic-gate 		realfree(flist[freeidx]);
8930Sstevel@tonic-gate 	flist[freeidx] = Lfree = old;
8940Sstevel@tonic-gate 	freeidx = (freeidx + 1) & FREEMASK; /* one forward */
8950Sstevel@tonic-gate }
8960Sstevel@tonic-gate 
8970Sstevel@tonic-gate /*
8980Sstevel@tonic-gate  * cleanfree() frees all the blocks pointed to be flist.
8990Sstevel@tonic-gate  *
9000Sstevel@tonic-gate  * realloc() should work if it is called with a pointer
9010Sstevel@tonic-gate  * to a block that was freed since the last call to malloc() or
9020Sstevel@tonic-gate  * realloc(). If cleanfree() is called from realloc(), ptr
9030Sstevel@tonic-gate  * is set to the old block and that block should not be
9040Sstevel@tonic-gate  * freed since it is actually being reallocated.
9050Sstevel@tonic-gate  */
9060Sstevel@tonic-gate static void
cleanfree(void * ptr)9070Sstevel@tonic-gate cleanfree(void *ptr)
9080Sstevel@tonic-gate {
9090Sstevel@tonic-gate 	char	**flp;
9100Sstevel@tonic-gate 
9110Sstevel@tonic-gate 	flp = (char **)&(flist[freeidx]);
9120Sstevel@tonic-gate 	for (;;) {
9130Sstevel@tonic-gate 		if (flp == (char **)&(flist[0]))
9140Sstevel@tonic-gate 			flp = (char **)&(flist[FREESIZE]);
9150Sstevel@tonic-gate 		if (*--flp == NULL)
9160Sstevel@tonic-gate 			break;
9170Sstevel@tonic-gate 		if (*flp != ptr)
9180Sstevel@tonic-gate 			realfree(*flp);
9190Sstevel@tonic-gate 		*flp = NULL;
9200Sstevel@tonic-gate 	}
9210Sstevel@tonic-gate 	freeidx = 0;
9220Sstevel@tonic-gate 	Lfree = NULL;
9230Sstevel@tonic-gate }
924