xref: /onnv-gate/usr/src/lib/libuutil/common/uu_avl.c (revision 407:f4075c116450)
10Sstevel@tonic-gate /*
20Sstevel@tonic-gate  * CDDL HEADER START
30Sstevel@tonic-gate  *
40Sstevel@tonic-gate  * The contents of this file are subject to the terms of the
50Sstevel@tonic-gate  * Common Development and Distribution License, Version 1.0 only
60Sstevel@tonic-gate  * (the "License").  You may not use this file except in compliance
70Sstevel@tonic-gate  * with the License.
80Sstevel@tonic-gate  *
90Sstevel@tonic-gate  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
100Sstevel@tonic-gate  * or http://www.opensolaris.org/os/licensing.
110Sstevel@tonic-gate  * See the License for the specific language governing permissions
120Sstevel@tonic-gate  * and limitations under the License.
130Sstevel@tonic-gate  *
140Sstevel@tonic-gate  * When distributing Covered Code, include this CDDL HEADER in each
150Sstevel@tonic-gate  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
160Sstevel@tonic-gate  * If applicable, add the following below this CDDL HEADER, with the
170Sstevel@tonic-gate  * fields enclosed by brackets "[]" replaced with your own identifying
180Sstevel@tonic-gate  * information: Portions Copyright [yyyy] [name of copyright owner]
190Sstevel@tonic-gate  *
200Sstevel@tonic-gate  * CDDL HEADER END
210Sstevel@tonic-gate  */
220Sstevel@tonic-gate /*
23*407Sjwadams  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
240Sstevel@tonic-gate  * Use is subject to license terms.
250Sstevel@tonic-gate  */
260Sstevel@tonic-gate 
270Sstevel@tonic-gate #pragma ident	"%Z%%M%	%I%	%E% SMI"
280Sstevel@tonic-gate 
290Sstevel@tonic-gate #include "libuutil_common.h"
300Sstevel@tonic-gate 
310Sstevel@tonic-gate #include <stdlib.h>
320Sstevel@tonic-gate #include <string.h>
330Sstevel@tonic-gate #include <unistd.h>
340Sstevel@tonic-gate #include <sys/avl.h>
350Sstevel@tonic-gate 
360Sstevel@tonic-gate static uu_avl_pool_t	uu_null_apool = { &uu_null_apool, &uu_null_apool };
370Sstevel@tonic-gate static pthread_mutex_t	uu_apool_list_lock = PTHREAD_MUTEX_INITIALIZER;
380Sstevel@tonic-gate 
390Sstevel@tonic-gate /*
400Sstevel@tonic-gate  * The index mark change on every insert and delete, to catch stale
410Sstevel@tonic-gate  * references.
420Sstevel@tonic-gate  *
430Sstevel@tonic-gate  * We leave the low bit alone, since the avl code uses it.
440Sstevel@tonic-gate  */
450Sstevel@tonic-gate #define	INDEX_MAX		(sizeof (uintptr_t) - 2)
460Sstevel@tonic-gate #define	INDEX_NEXT(m)		(((m) == INDEX_MAX)? 2 : ((m) + 2) & INDEX_MAX)
470Sstevel@tonic-gate 
480Sstevel@tonic-gate #define	INDEX_DECODE(i)		((i) & ~INDEX_MAX)
490Sstevel@tonic-gate #define	INDEX_ENCODE(p, n)	(((n) & ~INDEX_MAX) | (p)->ua_index)
500Sstevel@tonic-gate #define	INDEX_VALID(p, i)	(((i) & INDEX_MAX) == (p)->ua_index)
510Sstevel@tonic-gate #define	INDEX_CHECK(i)		(((i) & INDEX_MAX) != 0)
520Sstevel@tonic-gate 
530Sstevel@tonic-gate /*
540Sstevel@tonic-gate  * When an element is inactive (not in a tree), we keep a marked pointer to
550Sstevel@tonic-gate  * its containing pool in its first word, and a NULL pointer in its second.
560Sstevel@tonic-gate  *
570Sstevel@tonic-gate  * On insert, we use these to verify that it comes from the correct pool.
580Sstevel@tonic-gate  */
590Sstevel@tonic-gate #define	NODE_ARRAY(p, n)	((uintptr_t *)((uintptr_t)(n) + \
600Sstevel@tonic-gate 				    (pp)->uap_nodeoffset))
610Sstevel@tonic-gate 
620Sstevel@tonic-gate #define	POOL_TO_MARKER(pp) (((uintptr_t)(pp) | 1))
630Sstevel@tonic-gate 
640Sstevel@tonic-gate #define	DEAD_MARKER		0xc4
650Sstevel@tonic-gate 
660Sstevel@tonic-gate uu_avl_pool_t *
670Sstevel@tonic-gate uu_avl_pool_create(const char *name, size_t objsize, size_t nodeoffset,
680Sstevel@tonic-gate     uu_compare_fn_t *compare_func, uint32_t flags)
690Sstevel@tonic-gate {
700Sstevel@tonic-gate 	uu_avl_pool_t *pp, *next, *prev;
710Sstevel@tonic-gate 
720Sstevel@tonic-gate 	if (name == NULL ||
730Sstevel@tonic-gate 	    uu_check_name(name, UU_NAME_DOMAIN) == -1 ||
740Sstevel@tonic-gate 	    nodeoffset + sizeof (uu_avl_node_t) > objsize ||
750Sstevel@tonic-gate 	    compare_func == NULL) {
760Sstevel@tonic-gate 		uu_set_error(UU_ERROR_INVALID_ARGUMENT);
770Sstevel@tonic-gate 		return (NULL);
780Sstevel@tonic-gate 	}
790Sstevel@tonic-gate 
800Sstevel@tonic-gate 	if (flags & ~UU_AVL_POOL_DEBUG) {
810Sstevel@tonic-gate 		uu_set_error(UU_ERROR_UNKNOWN_FLAG);
820Sstevel@tonic-gate 		return (NULL);
830Sstevel@tonic-gate 	}
840Sstevel@tonic-gate 
850Sstevel@tonic-gate 	pp = uu_zalloc(sizeof (uu_avl_pool_t));
860Sstevel@tonic-gate 	if (pp == NULL) {
870Sstevel@tonic-gate 		uu_set_error(UU_ERROR_NO_MEMORY);
880Sstevel@tonic-gate 		return (NULL);
890Sstevel@tonic-gate 	}
900Sstevel@tonic-gate 
910Sstevel@tonic-gate 	(void) strlcpy(pp->uap_name, name, sizeof (pp->uap_name));
920Sstevel@tonic-gate 	pp->uap_nodeoffset = nodeoffset;
930Sstevel@tonic-gate 	pp->uap_objsize = objsize;
940Sstevel@tonic-gate 	pp->uap_cmp = compare_func;
950Sstevel@tonic-gate 	if (flags & UU_AVL_POOL_DEBUG)
960Sstevel@tonic-gate 		pp->uap_debug = 1;
970Sstevel@tonic-gate 	pp->uap_last_index = 0;
980Sstevel@tonic-gate 
990Sstevel@tonic-gate 	(void) pthread_mutex_init(&pp->uap_lock, NULL);
1000Sstevel@tonic-gate 
101*407Sjwadams 	pp->uap_null_avl.ua_next_enc = UU_PTR_ENCODE(&pp->uap_null_avl);
102*407Sjwadams 	pp->uap_null_avl.ua_prev_enc = UU_PTR_ENCODE(&pp->uap_null_avl);
1030Sstevel@tonic-gate 
1040Sstevel@tonic-gate 	(void) pthread_mutex_lock(&uu_apool_list_lock);
1050Sstevel@tonic-gate 	pp->uap_next = next = &uu_null_apool;
1060Sstevel@tonic-gate 	pp->uap_prev = prev = next->uap_prev;
1070Sstevel@tonic-gate 	next->uap_prev = pp;
1080Sstevel@tonic-gate 	prev->uap_next = pp;
1090Sstevel@tonic-gate 	(void) pthread_mutex_unlock(&uu_apool_list_lock);
1100Sstevel@tonic-gate 
1110Sstevel@tonic-gate 	return (pp);
1120Sstevel@tonic-gate }
1130Sstevel@tonic-gate 
1140Sstevel@tonic-gate void
1150Sstevel@tonic-gate uu_avl_pool_destroy(uu_avl_pool_t *pp)
1160Sstevel@tonic-gate {
1170Sstevel@tonic-gate 	if (pp->uap_debug) {
118*407Sjwadams 		if (pp->uap_null_avl.ua_next_enc !=
119*407Sjwadams 		    UU_PTR_ENCODE(&pp->uap_null_avl) ||
120*407Sjwadams 		    pp->uap_null_avl.ua_prev_enc !=
121*407Sjwadams 		    UU_PTR_ENCODE(&pp->uap_null_avl)) {
1220Sstevel@tonic-gate 			uu_panic("uu_avl_pool_destroy: Pool \"%.*s\" (%p) has "
1230Sstevel@tonic-gate 			    "outstanding avls, or is corrupt.\n",
1240Sstevel@tonic-gate 			    sizeof (pp->uap_name), pp->uap_name, pp);
1250Sstevel@tonic-gate 		}
1260Sstevel@tonic-gate 	}
1270Sstevel@tonic-gate 	(void) pthread_mutex_lock(&uu_apool_list_lock);
1280Sstevel@tonic-gate 	pp->uap_next->uap_prev = pp->uap_prev;
1290Sstevel@tonic-gate 	pp->uap_prev->uap_next = pp->uap_next;
1300Sstevel@tonic-gate 	(void) pthread_mutex_unlock(&uu_apool_list_lock);
1310Sstevel@tonic-gate 	pp->uap_prev = NULL;
1320Sstevel@tonic-gate 	pp->uap_next = NULL;
1330Sstevel@tonic-gate 	uu_free(pp);
1340Sstevel@tonic-gate }
1350Sstevel@tonic-gate 
1360Sstevel@tonic-gate void
1370Sstevel@tonic-gate uu_avl_node_init(void *base, uu_avl_node_t *np, uu_avl_pool_t *pp)
1380Sstevel@tonic-gate {
1390Sstevel@tonic-gate 	uintptr_t *na = (uintptr_t *)np;
1400Sstevel@tonic-gate 
1410Sstevel@tonic-gate 	if (pp->uap_debug) {
1420Sstevel@tonic-gate 		uintptr_t offset = (uintptr_t)np - (uintptr_t)base;
1430Sstevel@tonic-gate 		if (offset + sizeof (*np) > pp->uap_objsize) {
1440Sstevel@tonic-gate 			uu_panic("uu_avl_node_init(%p, %p, %p (\"%s\")): "
1450Sstevel@tonic-gate 			    "offset %ld doesn't fit in object (size %ld)\n",
1460Sstevel@tonic-gate 			    base, np, pp, pp->uap_name, offset,
1470Sstevel@tonic-gate 			    pp->uap_objsize);
1480Sstevel@tonic-gate 		}
1490Sstevel@tonic-gate 		if (offset != pp->uap_nodeoffset) {
1500Sstevel@tonic-gate 			uu_panic("uu_avl_node_init(%p, %p, %p (\"%s\")): "
1510Sstevel@tonic-gate 			    "offset %ld doesn't match pool's offset (%ld)\n",
1520Sstevel@tonic-gate 			    base, np, pp, pp->uap_name, offset,
1530Sstevel@tonic-gate 			    pp->uap_objsize);
1540Sstevel@tonic-gate 		}
1550Sstevel@tonic-gate 	}
1560Sstevel@tonic-gate 
1570Sstevel@tonic-gate 	na[0] = POOL_TO_MARKER(pp);
1580Sstevel@tonic-gate 	na[1] = NULL;
1590Sstevel@tonic-gate }
1600Sstevel@tonic-gate 
1610Sstevel@tonic-gate void
1620Sstevel@tonic-gate uu_avl_node_fini(void *base, uu_avl_node_t *np, uu_avl_pool_t *pp)
1630Sstevel@tonic-gate {
1640Sstevel@tonic-gate 	uintptr_t *na = (uintptr_t *)np;
1650Sstevel@tonic-gate 
1660Sstevel@tonic-gate 	if (pp->uap_debug) {
1670Sstevel@tonic-gate 		if (na[0] == DEAD_MARKER && na[1] == DEAD_MARKER) {
1680Sstevel@tonic-gate 			uu_panic("uu_avl_node_fini(%p, %p, %p (\"%s\")): "
1690Sstevel@tonic-gate 			    "node already finied\n",
1700Sstevel@tonic-gate 			    base, np, pp, pp->uap_name);
1710Sstevel@tonic-gate 		}
1720Sstevel@tonic-gate 		if (na[0] != POOL_TO_MARKER(pp) || na[1] != NULL) {
1730Sstevel@tonic-gate 			uu_panic("uu_avl_node_fini(%p, %p, %p (\"%s\")): "
1740Sstevel@tonic-gate 			    "node corrupt, in tree, or in different pool\n",
1750Sstevel@tonic-gate 			    base, np, pp, pp->uap_name);
1760Sstevel@tonic-gate 		}
1770Sstevel@tonic-gate 	}
1780Sstevel@tonic-gate 
1790Sstevel@tonic-gate 	na[0] = DEAD_MARKER;
1800Sstevel@tonic-gate 	na[1] = DEAD_MARKER;
1810Sstevel@tonic-gate 	na[2] = DEAD_MARKER;
1820Sstevel@tonic-gate }
1830Sstevel@tonic-gate 
1840Sstevel@tonic-gate struct uu_avl_node_compare_info {
1850Sstevel@tonic-gate 	uu_compare_fn_t	*ac_compare;
1860Sstevel@tonic-gate 	void		*ac_private;
1870Sstevel@tonic-gate 	void		*ac_right;
1880Sstevel@tonic-gate 	void		*ac_found;
1890Sstevel@tonic-gate };
1900Sstevel@tonic-gate 
1910Sstevel@tonic-gate static int
1920Sstevel@tonic-gate uu_avl_node_compare(const void *l, const void *r)
1930Sstevel@tonic-gate {
1940Sstevel@tonic-gate 	struct uu_avl_node_compare_info *info =
1950Sstevel@tonic-gate 	    (struct uu_avl_node_compare_info *)l;
1960Sstevel@tonic-gate 
1970Sstevel@tonic-gate 	int res = info->ac_compare(r, info->ac_right, info->ac_private);
1980Sstevel@tonic-gate 
1990Sstevel@tonic-gate 	if (res == 0) {
2000Sstevel@tonic-gate 		if (info->ac_found == NULL)
2010Sstevel@tonic-gate 			info->ac_found = (void *)r;
2020Sstevel@tonic-gate 		return (-1);
2030Sstevel@tonic-gate 	}
2040Sstevel@tonic-gate 	if (res < 0)
2050Sstevel@tonic-gate 		return (1);
2060Sstevel@tonic-gate 	return (-1);
2070Sstevel@tonic-gate }
2080Sstevel@tonic-gate 
2090Sstevel@tonic-gate uu_avl_t *
2100Sstevel@tonic-gate uu_avl_create(uu_avl_pool_t *pp, void *parent, uint32_t flags)
2110Sstevel@tonic-gate {
2120Sstevel@tonic-gate 	uu_avl_t *ap, *next, *prev;
2130Sstevel@tonic-gate 
214*407Sjwadams 	if (flags & ~UU_AVL_DEBUG) {
2150Sstevel@tonic-gate 		uu_set_error(UU_ERROR_UNKNOWN_FLAG);
2160Sstevel@tonic-gate 		return (NULL);
2170Sstevel@tonic-gate 	}
2180Sstevel@tonic-gate 
2190Sstevel@tonic-gate 	ap = uu_zalloc(sizeof (*ap));
2200Sstevel@tonic-gate 	if (ap == NULL) {
2210Sstevel@tonic-gate 		uu_set_error(UU_ERROR_NO_MEMORY);
2220Sstevel@tonic-gate 		return (NULL);
2230Sstevel@tonic-gate 	}
2240Sstevel@tonic-gate 
2250Sstevel@tonic-gate 	ap->ua_pool = pp;
226*407Sjwadams 	ap->ua_parent_enc = UU_PTR_ENCODE(parent);
2270Sstevel@tonic-gate 	ap->ua_debug = pp->uap_debug || (flags & UU_AVL_DEBUG);
2280Sstevel@tonic-gate 	ap->ua_index = (pp->uap_last_index = INDEX_NEXT(pp->uap_last_index));
2290Sstevel@tonic-gate 
2300Sstevel@tonic-gate 	avl_create(&ap->ua_tree, &uu_avl_node_compare, pp->uap_objsize,
2310Sstevel@tonic-gate 	    pp->uap_nodeoffset);
2320Sstevel@tonic-gate 
2330Sstevel@tonic-gate 	ap->ua_null_walk.uaw_next = &ap->ua_null_walk;
2340Sstevel@tonic-gate 	ap->ua_null_walk.uaw_prev = &ap->ua_null_walk;
2350Sstevel@tonic-gate 
2360Sstevel@tonic-gate 	(void) pthread_mutex_lock(&pp->uap_lock);
237*407Sjwadams 	next = &pp->uap_null_avl;
238*407Sjwadams 	prev = UU_PTR_DECODE(next->ua_prev_enc);
239*407Sjwadams 	ap->ua_next_enc = UU_PTR_ENCODE(next);
240*407Sjwadams 	ap->ua_prev_enc = UU_PTR_ENCODE(prev);
241*407Sjwadams 	next->ua_prev_enc = UU_PTR_ENCODE(ap);
242*407Sjwadams 	prev->ua_next_enc = UU_PTR_ENCODE(ap);
2430Sstevel@tonic-gate 	(void) pthread_mutex_unlock(&pp->uap_lock);
2440Sstevel@tonic-gate 
2450Sstevel@tonic-gate 	return (ap);
2460Sstevel@tonic-gate }
2470Sstevel@tonic-gate 
2480Sstevel@tonic-gate void
2490Sstevel@tonic-gate uu_avl_destroy(uu_avl_t *ap)
2500Sstevel@tonic-gate {
2510Sstevel@tonic-gate 	uu_avl_pool_t *pp = ap->ua_pool;
2520Sstevel@tonic-gate 
2530Sstevel@tonic-gate 	if (ap->ua_debug) {
2540Sstevel@tonic-gate 		if (avl_numnodes(&ap->ua_tree) != 0) {
2550Sstevel@tonic-gate 			uu_panic("uu_avl_destroy(%p): tree not empty\n", ap);
2560Sstevel@tonic-gate 		}
2570Sstevel@tonic-gate 		if (ap->ua_null_walk.uaw_next != &ap->ua_null_walk ||
2580Sstevel@tonic-gate 		    ap->ua_null_walk.uaw_prev != &ap->ua_null_walk) {
2590Sstevel@tonic-gate 			uu_panic("uu_avl_destroy(%p):  outstanding walkers\n",
2600Sstevel@tonic-gate 			    ap);
2610Sstevel@tonic-gate 		}
2620Sstevel@tonic-gate 	}
2630Sstevel@tonic-gate 	(void) pthread_mutex_lock(&pp->uap_lock);
264*407Sjwadams 	UU_AVL_PTR(ap->ua_next_enc)->ua_prev_enc = ap->ua_prev_enc;
265*407Sjwadams 	UU_AVL_PTR(ap->ua_prev_enc)->ua_next_enc = ap->ua_next_enc;
2660Sstevel@tonic-gate 	(void) pthread_mutex_unlock(&pp->uap_lock);
267*407Sjwadams 	ap->ua_prev_enc = UU_PTR_ENCODE(NULL);
268*407Sjwadams 	ap->ua_next_enc = UU_PTR_ENCODE(NULL);
2690Sstevel@tonic-gate 
2700Sstevel@tonic-gate 	ap->ua_pool = NULL;
2710Sstevel@tonic-gate 	avl_destroy(&ap->ua_tree);
2720Sstevel@tonic-gate 
2730Sstevel@tonic-gate 	uu_free(ap);
2740Sstevel@tonic-gate }
2750Sstevel@tonic-gate 
2760Sstevel@tonic-gate size_t
2770Sstevel@tonic-gate uu_avl_numnodes(uu_avl_t *ap)
2780Sstevel@tonic-gate {
2790Sstevel@tonic-gate 	return (avl_numnodes(&ap->ua_tree));
2800Sstevel@tonic-gate }
2810Sstevel@tonic-gate 
2820Sstevel@tonic-gate void *
2830Sstevel@tonic-gate uu_avl_first(uu_avl_t *ap)
2840Sstevel@tonic-gate {
2850Sstevel@tonic-gate 	return (avl_first(&ap->ua_tree));
2860Sstevel@tonic-gate }
2870Sstevel@tonic-gate 
2880Sstevel@tonic-gate void *
2890Sstevel@tonic-gate uu_avl_last(uu_avl_t *ap)
2900Sstevel@tonic-gate {
2910Sstevel@tonic-gate 	return (avl_last(&ap->ua_tree));
2920Sstevel@tonic-gate }
2930Sstevel@tonic-gate 
2940Sstevel@tonic-gate void *
2950Sstevel@tonic-gate uu_avl_next(uu_avl_t *ap, void *node)
2960Sstevel@tonic-gate {
2970Sstevel@tonic-gate 	return (AVL_NEXT(&ap->ua_tree, node));
2980Sstevel@tonic-gate }
2990Sstevel@tonic-gate 
3000Sstevel@tonic-gate void *
3010Sstevel@tonic-gate uu_avl_prev(uu_avl_t *ap, void *node)
3020Sstevel@tonic-gate {
3030Sstevel@tonic-gate 	return (AVL_PREV(&ap->ua_tree, node));
3040Sstevel@tonic-gate }
3050Sstevel@tonic-gate 
3060Sstevel@tonic-gate static void
3070Sstevel@tonic-gate _avl_walk_init(uu_avl_walk_t *wp, uu_avl_t *ap, uint32_t flags)
3080Sstevel@tonic-gate {
3090Sstevel@tonic-gate 	uu_avl_walk_t *next, *prev;
3100Sstevel@tonic-gate 
3110Sstevel@tonic-gate 	int robust = (flags & UU_WALK_ROBUST);
3120Sstevel@tonic-gate 	int direction = (flags & UU_WALK_REVERSE)? -1 : 1;
3130Sstevel@tonic-gate 
3140Sstevel@tonic-gate 	(void) memset(wp, 0, sizeof (*wp));
3150Sstevel@tonic-gate 	wp->uaw_avl = ap;
3160Sstevel@tonic-gate 	wp->uaw_robust = robust;
3170Sstevel@tonic-gate 	wp->uaw_dir = direction;
3180Sstevel@tonic-gate 
3190Sstevel@tonic-gate 	if (direction > 0)
3200Sstevel@tonic-gate 		wp->uaw_next_result = avl_first(&ap->ua_tree);
3210Sstevel@tonic-gate 	else
3220Sstevel@tonic-gate 		wp->uaw_next_result = avl_last(&ap->ua_tree);
3230Sstevel@tonic-gate 
3240Sstevel@tonic-gate 	if (ap->ua_debug || robust) {
3250Sstevel@tonic-gate 		wp->uaw_next = next = &ap->ua_null_walk;
3260Sstevel@tonic-gate 		wp->uaw_prev = prev = next->uaw_prev;
3270Sstevel@tonic-gate 		next->uaw_prev = wp;
3280Sstevel@tonic-gate 		prev->uaw_next = wp;
3290Sstevel@tonic-gate 	}
3300Sstevel@tonic-gate }
3310Sstevel@tonic-gate 
3320Sstevel@tonic-gate static void *
3330Sstevel@tonic-gate _avl_walk_advance(uu_avl_walk_t *wp, uu_avl_t *ap)
3340Sstevel@tonic-gate {
3350Sstevel@tonic-gate 	void *np = wp->uaw_next_result;
3360Sstevel@tonic-gate 
3370Sstevel@tonic-gate 	avl_tree_t *t = &ap->ua_tree;
3380Sstevel@tonic-gate 
3390Sstevel@tonic-gate 	if (np == NULL)
3400Sstevel@tonic-gate 		return (NULL);
3410Sstevel@tonic-gate 
3420Sstevel@tonic-gate 	wp->uaw_next_result = (wp->uaw_dir > 0)? AVL_NEXT(t, np) :
3430Sstevel@tonic-gate 	    AVL_PREV(t, np);
3440Sstevel@tonic-gate 
3450Sstevel@tonic-gate 	return (np);
3460Sstevel@tonic-gate }
3470Sstevel@tonic-gate 
3480Sstevel@tonic-gate static void
3490Sstevel@tonic-gate _avl_walk_fini(uu_avl_walk_t *wp)
3500Sstevel@tonic-gate {
3510Sstevel@tonic-gate 	if (wp->uaw_next != NULL) {
3520Sstevel@tonic-gate 		wp->uaw_next->uaw_prev = wp->uaw_prev;
3530Sstevel@tonic-gate 		wp->uaw_prev->uaw_next = wp->uaw_next;
3540Sstevel@tonic-gate 		wp->uaw_next = NULL;
3550Sstevel@tonic-gate 		wp->uaw_prev = NULL;
3560Sstevel@tonic-gate 	}
3570Sstevel@tonic-gate 	wp->uaw_avl = NULL;
3580Sstevel@tonic-gate 	wp->uaw_next_result = NULL;
3590Sstevel@tonic-gate }
3600Sstevel@tonic-gate 
3610Sstevel@tonic-gate uu_avl_walk_t *
3620Sstevel@tonic-gate uu_avl_walk_start(uu_avl_t *ap, uint32_t flags)
3630Sstevel@tonic-gate {
3640Sstevel@tonic-gate 	uu_avl_walk_t *wp;
3650Sstevel@tonic-gate 
3660Sstevel@tonic-gate 	if (flags & ~(UU_WALK_ROBUST | UU_WALK_REVERSE)) {
3670Sstevel@tonic-gate 		uu_set_error(UU_ERROR_UNKNOWN_FLAG);
3680Sstevel@tonic-gate 		return (NULL);
3690Sstevel@tonic-gate 	}
3700Sstevel@tonic-gate 
3710Sstevel@tonic-gate 	wp = uu_zalloc(sizeof (*wp));
3720Sstevel@tonic-gate 	if (wp == NULL) {
3730Sstevel@tonic-gate 		uu_set_error(UU_ERROR_NO_MEMORY);
3740Sstevel@tonic-gate 		return (NULL);
3750Sstevel@tonic-gate 	}
3760Sstevel@tonic-gate 
3770Sstevel@tonic-gate 	_avl_walk_init(wp, ap, flags);
3780Sstevel@tonic-gate 	return (wp);
3790Sstevel@tonic-gate }
3800Sstevel@tonic-gate 
3810Sstevel@tonic-gate void *
3820Sstevel@tonic-gate uu_avl_walk_next(uu_avl_walk_t *wp)
3830Sstevel@tonic-gate {
3840Sstevel@tonic-gate 	return (_avl_walk_advance(wp, wp->uaw_avl));
3850Sstevel@tonic-gate }
3860Sstevel@tonic-gate 
3870Sstevel@tonic-gate void
3880Sstevel@tonic-gate uu_avl_walk_end(uu_avl_walk_t *wp)
3890Sstevel@tonic-gate {
3900Sstevel@tonic-gate 	_avl_walk_fini(wp);
3910Sstevel@tonic-gate 	uu_free(wp);
3920Sstevel@tonic-gate }
3930Sstevel@tonic-gate 
3940Sstevel@tonic-gate int
3950Sstevel@tonic-gate uu_avl_walk(uu_avl_t *ap, uu_walk_fn_t *func, void *private, uint32_t flags)
3960Sstevel@tonic-gate {
3970Sstevel@tonic-gate 	void *e;
3980Sstevel@tonic-gate 	uu_avl_walk_t my_walk;
3990Sstevel@tonic-gate 
4000Sstevel@tonic-gate 	int status = UU_WALK_NEXT;
4010Sstevel@tonic-gate 
4020Sstevel@tonic-gate 	if (flags & ~(UU_WALK_ROBUST | UU_WALK_REVERSE)) {
4030Sstevel@tonic-gate 		uu_set_error(UU_ERROR_UNKNOWN_FLAG);
4040Sstevel@tonic-gate 		return (-1);
4050Sstevel@tonic-gate 	}
4060Sstevel@tonic-gate 
4070Sstevel@tonic-gate 	_avl_walk_init(&my_walk, ap, flags);
4080Sstevel@tonic-gate 	while (status == UU_WALK_NEXT &&
4090Sstevel@tonic-gate 	    (e = _avl_walk_advance(&my_walk, ap)) != NULL)
4100Sstevel@tonic-gate 		status = (*func)(e, private);
4110Sstevel@tonic-gate 	_avl_walk_fini(&my_walk);
4120Sstevel@tonic-gate 
4130Sstevel@tonic-gate 	if (status >= 0)
4140Sstevel@tonic-gate 		return (0);
4150Sstevel@tonic-gate 	uu_set_error(UU_ERROR_CALLBACK_FAILED);
4160Sstevel@tonic-gate 	return (-1);
4170Sstevel@tonic-gate }
4180Sstevel@tonic-gate 
4190Sstevel@tonic-gate void
4200Sstevel@tonic-gate uu_avl_remove(uu_avl_t *ap, void *elem)
4210Sstevel@tonic-gate {
4220Sstevel@tonic-gate 	uu_avl_walk_t *wp;
4230Sstevel@tonic-gate 	uu_avl_pool_t *pp = ap->ua_pool;
4240Sstevel@tonic-gate 	uintptr_t *na = NODE_ARRAY(pp, elem);
4250Sstevel@tonic-gate 
4260Sstevel@tonic-gate 	if (ap->ua_debug) {
4270Sstevel@tonic-gate 		/*
4280Sstevel@tonic-gate 		 * invalidate outstanding uu_avl_index_ts.
4290Sstevel@tonic-gate 		 */
4300Sstevel@tonic-gate 		ap->ua_index = INDEX_NEXT(ap->ua_index);
4310Sstevel@tonic-gate 	}
4320Sstevel@tonic-gate 
4330Sstevel@tonic-gate 	/*
4340Sstevel@tonic-gate 	 * Robust walkers most be advanced, if we are removing the node
4350Sstevel@tonic-gate 	 * they are currently using.  In debug mode, non-robust walkers
4360Sstevel@tonic-gate 	 * are also on the walker list.
4370Sstevel@tonic-gate 	 */
4380Sstevel@tonic-gate 	for (wp = ap->ua_null_walk.uaw_next; wp != &ap->ua_null_walk;
4390Sstevel@tonic-gate 	    wp = wp->uaw_next) {
4400Sstevel@tonic-gate 		if (wp->uaw_robust) {
4410Sstevel@tonic-gate 			if (elem == wp->uaw_next_result)
4420Sstevel@tonic-gate 				(void) _avl_walk_advance(wp, ap);
4430Sstevel@tonic-gate 		} else if (wp->uaw_next_result != NULL) {
4440Sstevel@tonic-gate 			uu_panic("uu_avl_remove(%p, %p): active non-robust "
4450Sstevel@tonic-gate 			    "walker\n", ap, elem);
4460Sstevel@tonic-gate 		}
4470Sstevel@tonic-gate 	}
4480Sstevel@tonic-gate 
4490Sstevel@tonic-gate 	avl_remove(&ap->ua_tree, elem);
4500Sstevel@tonic-gate 
4510Sstevel@tonic-gate 	na[0] = POOL_TO_MARKER(pp);
4520Sstevel@tonic-gate 	na[1] = NULL;
4530Sstevel@tonic-gate }
4540Sstevel@tonic-gate 
4550Sstevel@tonic-gate void *
4560Sstevel@tonic-gate uu_avl_teardown(uu_avl_t *ap, void **cookie)
4570Sstevel@tonic-gate {
458*407Sjwadams 	void *elem = avl_destroy_nodes(&ap->ua_tree, cookie);
459*407Sjwadams 
460*407Sjwadams 	if (elem != NULL) {
461*407Sjwadams 		uu_avl_pool_t *pp = ap->ua_pool;
462*407Sjwadams 		uintptr_t *na = NODE_ARRAY(pp, elem);
463*407Sjwadams 
464*407Sjwadams 		na[0] = POOL_TO_MARKER(pp);
465*407Sjwadams 		na[1] = NULL;
466*407Sjwadams 	}
467*407Sjwadams 	return (elem);
4680Sstevel@tonic-gate }
4690Sstevel@tonic-gate 
4700Sstevel@tonic-gate void *
4710Sstevel@tonic-gate uu_avl_find(uu_avl_t *ap, void *elem, void *private, uu_avl_index_t *out)
4720Sstevel@tonic-gate {
4730Sstevel@tonic-gate 	struct uu_avl_node_compare_info info;
4740Sstevel@tonic-gate 	void *result;
4750Sstevel@tonic-gate 
4760Sstevel@tonic-gate 	info.ac_compare = ap->ua_pool->uap_cmp;
4770Sstevel@tonic-gate 	info.ac_private = private;
4780Sstevel@tonic-gate 	info.ac_right = elem;
4790Sstevel@tonic-gate 	info.ac_found = NULL;
4800Sstevel@tonic-gate 
4810Sstevel@tonic-gate 	result = avl_find(&ap->ua_tree, &info, out);
4820Sstevel@tonic-gate 	if (out != NULL)
4830Sstevel@tonic-gate 		*out = INDEX_ENCODE(ap, *out);
4840Sstevel@tonic-gate 
4850Sstevel@tonic-gate 	if (ap->ua_debug && result != NULL)
4860Sstevel@tonic-gate 		uu_panic("uu_avl_find: internal error: avl_find succeeded\n");
4870Sstevel@tonic-gate 
4880Sstevel@tonic-gate 	return (info.ac_found);
4890Sstevel@tonic-gate }
4900Sstevel@tonic-gate 
4910Sstevel@tonic-gate void
4920Sstevel@tonic-gate uu_avl_insert(uu_avl_t *ap, void *elem, uu_avl_index_t idx)
4930Sstevel@tonic-gate {
4940Sstevel@tonic-gate 	if (ap->ua_debug) {
4950Sstevel@tonic-gate 		uu_avl_pool_t *pp = ap->ua_pool;
4960Sstevel@tonic-gate 		uintptr_t *na = NODE_ARRAY(pp, elem);
4970Sstevel@tonic-gate 
4980Sstevel@tonic-gate 		if (na[1] != NULL)
4990Sstevel@tonic-gate 			uu_panic("uu_avl_insert(%p, %p, %p): node already "
5000Sstevel@tonic-gate 			    "in tree, or corrupt\n",
5010Sstevel@tonic-gate 			    ap, elem, idx);
5020Sstevel@tonic-gate 		if (na[0] == NULL)
5030Sstevel@tonic-gate 			uu_panic("uu_avl_insert(%p, %p, %p): node not "
5040Sstevel@tonic-gate 			    "initialized\n",
5050Sstevel@tonic-gate 			    ap, elem, idx);
5060Sstevel@tonic-gate 		if (na[0] != POOL_TO_MARKER(pp))
5070Sstevel@tonic-gate 			uu_panic("uu_avl_insert(%p, %p, %p): node from "
5080Sstevel@tonic-gate 			    "other pool, or corrupt\n",
5090Sstevel@tonic-gate 			    ap, elem, idx);
5100Sstevel@tonic-gate 
5110Sstevel@tonic-gate 		if (!INDEX_VALID(ap, idx))
5120Sstevel@tonic-gate 			uu_panic("uu_avl_insert(%p, %p, %p): %s\n",
5130Sstevel@tonic-gate 			    ap, elem, idx,
5140Sstevel@tonic-gate 			    INDEX_CHECK(idx)? "outdated index" :
5150Sstevel@tonic-gate 			    "invalid index");
5160Sstevel@tonic-gate 
5170Sstevel@tonic-gate 		/*
5180Sstevel@tonic-gate 		 * invalidate outstanding uu_avl_index_ts.
5190Sstevel@tonic-gate 		 */
5200Sstevel@tonic-gate 		ap->ua_index = INDEX_NEXT(ap->ua_index);
5210Sstevel@tonic-gate 	}
5220Sstevel@tonic-gate 	avl_insert(&ap->ua_tree, elem, INDEX_DECODE(idx));
5230Sstevel@tonic-gate }
5240Sstevel@tonic-gate 
5250Sstevel@tonic-gate void *
5260Sstevel@tonic-gate uu_avl_nearest_next(uu_avl_t *ap, uu_avl_index_t idx)
5270Sstevel@tonic-gate {
5280Sstevel@tonic-gate 	if (ap->ua_debug && !INDEX_VALID(ap, idx))
5290Sstevel@tonic-gate 		uu_panic("uu_avl_nearest_next(%p, %p): %s\n",
5300Sstevel@tonic-gate 		    ap, idx, INDEX_CHECK(idx)? "outdated index" :
5310Sstevel@tonic-gate 		    "invalid index");
5320Sstevel@tonic-gate 	return (avl_nearest(&ap->ua_tree, INDEX_DECODE(idx), AVL_AFTER));
5330Sstevel@tonic-gate }
5340Sstevel@tonic-gate 
5350Sstevel@tonic-gate void *
5360Sstevel@tonic-gate uu_avl_nearest_prev(uu_avl_t *ap, uu_avl_index_t idx)
5370Sstevel@tonic-gate {
5380Sstevel@tonic-gate 	if (ap->ua_debug && !INDEX_VALID(ap, idx))
5390Sstevel@tonic-gate 		uu_panic("uu_avl_nearest_prev(%p, %p): %s\n",
5400Sstevel@tonic-gate 		    ap, idx, INDEX_CHECK(idx)? "outdated index" :
5410Sstevel@tonic-gate 		    "invalid index");
5420Sstevel@tonic-gate 	return (avl_nearest(&ap->ua_tree, INDEX_DECODE(idx), AVL_BEFORE));
5430Sstevel@tonic-gate }
5440Sstevel@tonic-gate 
5450Sstevel@tonic-gate /*
5460Sstevel@tonic-gate  * called from uu_lockup() and uu_release(), as part of our fork1()-safety.
5470Sstevel@tonic-gate  */
5480Sstevel@tonic-gate void
5490Sstevel@tonic-gate uu_avl_lockup(void)
5500Sstevel@tonic-gate {
5510Sstevel@tonic-gate 	uu_avl_pool_t *pp;
5520Sstevel@tonic-gate 
5530Sstevel@tonic-gate 	(void) pthread_mutex_lock(&uu_apool_list_lock);
5540Sstevel@tonic-gate 	for (pp = uu_null_apool.uap_next; pp != &uu_null_apool;
5550Sstevel@tonic-gate 	    pp = pp->uap_next)
5560Sstevel@tonic-gate 		(void) pthread_mutex_lock(&pp->uap_lock);
5570Sstevel@tonic-gate }
5580Sstevel@tonic-gate 
5590Sstevel@tonic-gate void
5600Sstevel@tonic-gate uu_avl_release(void)
5610Sstevel@tonic-gate {
5620Sstevel@tonic-gate 	uu_avl_pool_t *pp;
5630Sstevel@tonic-gate 
5640Sstevel@tonic-gate 	for (pp = uu_null_apool.uap_next; pp != &uu_null_apool;
5650Sstevel@tonic-gate 	    pp = pp->uap_next)
5660Sstevel@tonic-gate 		(void) pthread_mutex_unlock(&pp->uap_lock);
5670Sstevel@tonic-gate 	(void) pthread_mutex_unlock(&uu_apool_list_lock);
5680Sstevel@tonic-gate }
569