1 /* Abstract AVL Tree Generic C Package.
2 ** Implementation generation header file.
3 **
4 ** This code is in the public domain. See cavl_tree.html for interface
5 ** documentation.
6 **
7 ** Version: 1.5 Author: Walt Karas
8 */
9
10 #include <string.h>
11
12 #undef L__
13 #undef L__EST_LONG_BIT
14 #undef L__SIZE
15 #undef L__tree
16 #undef L__MASK_HIGH_BIT
17 #undef L__LONG_BIT
18 #undef L__BIT_ARR_DEFN
19 #undef L__BIT_ARR_CLEAR
20 #undef L__BIT_ARR_VAL
21 #undef L__BIT_ARR_0
22 #undef L__BIT_ARR_1
23 #undef L__BIT_ARR_ALL
24 #undef L__BIT_ARR_LONGS
25 #undef L__IMPL_MASK
26 #undef L__CHECK_READ_ERROR
27 #undef L__CHECK_READ_ERROR_INV_DEPTH
28 #undef L__SC
29 #undef L__BALANCE_PARAM_PREFIX
30
31 #ifdef AVL_UNIQUE
32
33 #define L__ AVL_UNIQUE
34
35 #else
36
37 #define L__(X) X
38
39 #endif
40
41 /* Determine correct storage class for functions */
42 #ifdef AVL_PRIVATE
43
44 #define L__SC static
45
46 #else
47
48 #define L__SC
49
50 #endif
51
52 #ifdef AVL_SIZE
53
54 #define L__SIZE AVL_SIZE
55
56 #else
57
58 #define L__SIZE unsigned long
59
60 #endif
61
62 #define L__MASK_HIGH_BIT ((int) ~ ((~ (unsigned) 0) >> 1))
63
64 /* ANSI C/ISO C++ require that a long have at least 32 bits. Set
65 ** L__EST_LONG_BIT to be the greatest multiple of 8 in the range
66 ** 32 - 64 (inclusive) that is less than or equal to the number of
67 ** bits in a long.
68 */
69
70 #if (((LONG_MAX >> 31) >> 7) == 0)
71
72 #define L__EST_LONG_BIT 32
73
74 #elif (((LONG_MAX >> 31) >> 15) == 0)
75
76 #define L__EST_LONG_BIT 40
77
78 #elif (((LONG_MAX >> 31) >> 23) == 0)
79
80 #define L__EST_LONG_BIT 48
81
82 #elif (((LONG_MAX >> 31) >> 31) == 0)
83
84 #define L__EST_LONG_BIT 56
85
86 #else
87
88 #define L__EST_LONG_BIT 64
89
90 #endif
91
92 #define L__LONG_BIT (sizeof(long) * CHAR_BIT)
93
94 #if ((AVL_MAX_DEPTH) > L__EST_LONG_BIT)
95
96 /* The maximum depth may be greater than the number of bits in a long,
97 ** so multiple longs are needed to hold a bit array indexed by node
98 ** depth. */
99
100 #define L__BIT_ARR_LONGS (((AVL_MAX_DEPTH) + L__LONG_BIT - 1) / L__LONG_BIT)
101
102 #define L__BIT_ARR_DEFN(NAME) unsigned long NAME[L__BIT_ARR_LONGS];
103
104 #define L__BIT_ARR_CLEAR(NAME) memset(NAME, 0, sizeof(NAME));
105
106 #define L__BIT_ARR_VAL(BIT_ARR, BIT_NUM) \
107 ((BIT_ARR)[(BIT_NUM) / L__LONG_BIT] & (1L << ((BIT_NUM) % L__LONG_BIT)))
108
109 #define L__BIT_ARR_0(BIT_ARR, BIT_NUM) \
110 (BIT_ARR)[(BIT_NUM) / L__LONG_BIT] &= ~(1L << ((BIT_NUM) % L__LONG_BIT));
111
112 #define L__BIT_ARR_1(BIT_ARR, BIT_NUM) \
113 (BIT_ARR)[(BIT_NUM) / L__LONG_BIT] |= 1L << ((BIT_NUM) % L__LONG_BIT);
114
115 #define L__BIT_ARR_ALL(BIT_ARR, BIT_VAL) \
116 { int i = L__BIT_ARR_LONGS; do (BIT_ARR)[--i] = 0L - (BIT_VAL); while(i); }
117
118 #else /* The bit array can definitely fit in one long */
119
120 #define L__BIT_ARR_DEFN(NAME) unsigned long NAME;
121
122 #define L__BIT_ARR_CLEAR(NAME) NAME = 0;
123
124 #define L__BIT_ARR_VAL(BIT_ARR, BIT_NUM) ((BIT_ARR) & (1L << (BIT_NUM)))
125
126 #define L__BIT_ARR_0(BIT_ARR, BIT_NUM) (BIT_ARR) &= ~(1L << (BIT_NUM));
127
128 #define L__BIT_ARR_1(BIT_ARR, BIT_NUM) (BIT_ARR) |= 1L << (BIT_NUM);
129
130 #define L__BIT_ARR_ALL(BIT_ARR, BIT_VAL) (BIT_ARR) = 0L - (BIT_VAL);
131
132 #endif
133
134 #ifdef AVL_READ_ERRORS_HAPPEN
135
136 #define L__CHECK_READ_ERROR(ERROR_RETURN) \
137 { if (AVL_READ_ERROR) return(ERROR_RETURN); }
138
139 #else
140
141 #define L__CHECK_READ_ERROR(ERROR_RETURN)
142
143 #endif
144
145 /* The presumed reason that an instantiation places additional fields
146 ** inside the AVL tree structure is that the SET_ and GET_ macros
147 ** need these fields. The "balance" function does not explicitly use
148 ** any fields in the AVL tree structure, so only pass an AVL tree
149 ** structure pointer to "balance" if it has instantiation-specific
150 ** fields that are (presumably) needed by the SET_/GET_ calls within
151 ** "balance".
152 */
153 #ifdef AVL_INSIDE_STRUCT
154
155 #define L__BALANCE_PARAM_CALL_PREFIX L__tree,
156 #define L__BALANCE_PARAM_DECL_PREFIX L__(avl) *L__tree,
157
158 #else
159
160 #define L__BALANCE_PARAM_CALL_PREFIX
161 #define L__BALANCE_PARAM_DECL_PREFIX
162
163 #endif
164
165 #ifdef AVL_IMPL_MASK
166
167 #define L__IMPL_MASK (AVL_IMPL_MASK)
168
169 #else
170
171 /* Define all functions. */
172 #define L__IMPL_MASK AVL_IMPL_ALL
173
174 #endif
175
176 #if (L__IMPL_MASK & AVL_IMPL_INIT)
177
L__(init)178 L__SC void L__(init)(L__(avl) *L__tree) { AVL_SET_ROOT(L__tree, AVL_NULL); }
179
180 #endif
181
182 #if (L__IMPL_MASK & AVL_IMPL_IS_EMPTY)
183
L__(is_empty)184 L__SC int L__(is_empty)(L__(avl) *L__tree)
185 { return(L__tree->root == AVL_NULL); }
186
187 #endif
188
189 /* Put the private balance function in the same compilation module as
190 ** the insert function. */
191 #if (L__IMPL_MASK & AVL_IMPL_INSERT)
192
193 /* Balances subtree, returns handle of root node of subtree after balancing.
194 */
L__(balance)195 static L__SC AVL_HANDLE L__(balance)(L__BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h)
196 {
197 AVL_HANDLE deep_h;
198
199 /* Either the "greater than" or the "less than" subtree of
200 ** this node has to be 2 levels deeper (or else it wouldn't
201 ** need balancing).
202 */
203 if (AVL_GET_BALANCE_FACTOR(bal_h) > 0)
204 {
205 /* "Greater than" subtree is deeper. */
206
207 deep_h = AVL_GET_GREATER(bal_h, 1);
208
209 L__CHECK_READ_ERROR(AVL_NULL)
210
211 if (AVL_GET_BALANCE_FACTOR(deep_h) < 0)
212 {
213 int bf;
214
215 AVL_HANDLE old_h = bal_h;
216 bal_h = AVL_GET_LESS(deep_h, 1);
217 L__CHECK_READ_ERROR(AVL_NULL)
218 AVL_SET_GREATER(old_h, AVL_GET_LESS(bal_h, 1))
219 AVL_SET_LESS(deep_h, AVL_GET_GREATER(bal_h, 1))
220 AVL_SET_LESS(bal_h, old_h)
221 AVL_SET_GREATER(bal_h, deep_h)
222
223 bf = AVL_GET_BALANCE_FACTOR(bal_h);
224 if (bf != 0)
225 {
226 if (bf > 0)
227 {
228 AVL_SET_BALANCE_FACTOR(old_h, -1)
229 AVL_SET_BALANCE_FACTOR(deep_h, 0)
230 }
231 else
232 {
233 AVL_SET_BALANCE_FACTOR(deep_h, 1)
234 AVL_SET_BALANCE_FACTOR(old_h, 0)
235 }
236 AVL_SET_BALANCE_FACTOR(bal_h, 0)
237 }
238 else
239 {
240 AVL_SET_BALANCE_FACTOR(old_h, 0)
241 AVL_SET_BALANCE_FACTOR(deep_h, 0)
242 }
243 }
244 else
245 {
246 AVL_SET_GREATER(bal_h, AVL_GET_LESS(deep_h, 0))
247 AVL_SET_LESS(deep_h, bal_h)
248 if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
249 {
250 AVL_SET_BALANCE_FACTOR(deep_h, -1)
251 AVL_SET_BALANCE_FACTOR(bal_h, 1)
252 }
253 else
254 {
255 AVL_SET_BALANCE_FACTOR(deep_h, 0)
256 AVL_SET_BALANCE_FACTOR(bal_h, 0)
257 }
258 bal_h = deep_h;
259 }
260 }
261 else
262 {
263 /* "Less than" subtree is deeper. */
264
265 deep_h = AVL_GET_LESS(bal_h, 1);
266 L__CHECK_READ_ERROR(AVL_NULL)
267
268 if (AVL_GET_BALANCE_FACTOR(deep_h) > 0)
269 {
270 int bf;
271 AVL_HANDLE old_h = bal_h;
272 bal_h = AVL_GET_GREATER(deep_h, 1);
273 L__CHECK_READ_ERROR(AVL_NULL)
274 AVL_SET_LESS(old_h, AVL_GET_GREATER(bal_h, 0))
275 AVL_SET_GREATER(deep_h, AVL_GET_LESS(bal_h, 0))
276 AVL_SET_GREATER(bal_h, old_h)
277 AVL_SET_LESS(bal_h, deep_h)
278
279 bf = AVL_GET_BALANCE_FACTOR(bal_h);
280 if (bf != 0)
281 {
282 if (bf < 0)
283 {
284 AVL_SET_BALANCE_FACTOR(old_h, 1)
285 AVL_SET_BALANCE_FACTOR(deep_h, 0)
286 }
287 else
288 {
289 AVL_SET_BALANCE_FACTOR(deep_h, -1)
290 AVL_SET_BALANCE_FACTOR(old_h, 0)
291 }
292 AVL_SET_BALANCE_FACTOR(bal_h, 0)
293 }
294 else
295 {
296 AVL_SET_BALANCE_FACTOR(old_h, 0)
297 AVL_SET_BALANCE_FACTOR(deep_h, 0)
298 }
299 }
300 else
301 {
302 AVL_SET_LESS(bal_h, AVL_GET_GREATER(deep_h, 0))
303 AVL_SET_GREATER(deep_h, bal_h)
304 if (AVL_GET_BALANCE_FACTOR(deep_h) == 0)
305 {
306 AVL_SET_BALANCE_FACTOR(deep_h, 1)
307 AVL_SET_BALANCE_FACTOR(bal_h, -1)
308 }
309 else
310 {
311 AVL_SET_BALANCE_FACTOR(deep_h, 0)
312 AVL_SET_BALANCE_FACTOR(bal_h, 0)
313 }
314 bal_h = deep_h;
315 }
316 }
317
318 return(bal_h);
319 }
320
L__(insert)321 L__SC AVL_HANDLE L__(insert)(L__(avl) *L__tree, AVL_HANDLE h)
322 {
323 AVL_SET_LESS(h, AVL_NULL)
324 AVL_SET_GREATER(h, AVL_NULL)
325 AVL_SET_BALANCE_FACTOR(h, 0)
326
327 if (L__tree->root == AVL_NULL) {
328 AVL_SET_ROOT(L__tree, h);
329 } else
330 {
331 /* Last unbalanced node encountered in search for insertion point. */
332 AVL_HANDLE unbal = AVL_NULL;
333 /* Parent of last unbalanced node. */
334 AVL_HANDLE parent_unbal = AVL_NULL;
335 /* Balance factor of last unbalanced node. */
336 int unbal_bf;
337
338 /* Zero-based depth in tree. */
339 unsigned depth = 0, unbal_depth = 0;
340
341 /* Records a path into the tree. If bit n is true, indicates
342 ** take greater branch from the nth node in the path, otherwise
343 ** take the less branch. bit 0 gives branch from root, and
344 ** so on. */
345 L__BIT_ARR_DEFN(branch)
346
347 AVL_HANDLE hh = L__tree->root;
348 AVL_HANDLE parent = AVL_NULL;
349 int cmp;
350
351 L__BIT_ARR_CLEAR(branch)
352
353 do
354 {
355 if (AVL_GET_BALANCE_FACTOR(hh) != 0)
356 {
357 unbal = hh;
358 parent_unbal = parent;
359 unbal_depth = depth;
360 }
361 cmp = AVL_COMPARE_NODE_NODE(h, hh);
362 if (cmp == 0)
363 /* Duplicate key. */
364 return(hh);
365 parent = hh;
366 if (cmp > 0)
367 {
368 hh = AVL_GET_GREATER(hh, 1);
369 L__BIT_ARR_1(branch, depth)
370 }
371 else
372 {
373 hh = AVL_GET_LESS(hh, 1);
374 L__BIT_ARR_0(branch, depth)
375 }
376 L__CHECK_READ_ERROR(AVL_NULL)
377 depth++;
378 }
379 while (hh != AVL_NULL);
380
381 /* Add node to insert as leaf of tree. */
382 if (cmp < 0)
383 AVL_SET_LESS(parent, h)
384 else
385 AVL_SET_GREATER(parent, h)
386
387 depth = unbal_depth;
388
389 if (unbal == AVL_NULL)
390 hh = L__tree->root;
391 else
392 {
393 cmp = L__BIT_ARR_VAL(branch, depth) ? 1 : -1;
394 depth++;
395 unbal_bf = AVL_GET_BALANCE_FACTOR(unbal);
396 if (cmp < 0)
397 unbal_bf--;
398 else /* cmp > 0 */
399 unbal_bf++;
400 hh = cmp < 0 ? AVL_GET_LESS(unbal, 1) : AVL_GET_GREATER(unbal, 1);
401 L__CHECK_READ_ERROR(AVL_NULL)
402 if ((unbal_bf != -2) && (unbal_bf != 2))
403 {
404 /* No rebalancing of tree is necessary. */
405 AVL_SET_BALANCE_FACTOR(unbal, unbal_bf)
406 unbal = AVL_NULL;
407 }
408 }
409
410 if (hh != AVL_NULL)
411 while (h != hh)
412 {
413 cmp = L__BIT_ARR_VAL(branch, depth) ? 1 : -1;
414 depth++;
415 if (cmp < 0)
416 {
417 AVL_SET_BALANCE_FACTOR(hh, -1)
418 hh = AVL_GET_LESS(hh, 1);
419 }
420 else /* cmp > 0 */
421 {
422 AVL_SET_BALANCE_FACTOR(hh, 1)
423 hh = AVL_GET_GREATER(hh, 1);
424 }
425 L__CHECK_READ_ERROR(AVL_NULL)
426 }
427
428 if (unbal != AVL_NULL)
429 {
430 unbal = L__(balance)(L__BALANCE_PARAM_CALL_PREFIX unbal);
431 L__CHECK_READ_ERROR(AVL_NULL)
432 if (parent_unbal == AVL_NULL)
433 {
434 AVL_SET_ROOT(L__tree, unbal);
435 }
436 else
437 {
438 depth = unbal_depth - 1;
439 cmp = L__BIT_ARR_VAL(branch, depth) ? 1 : -1;
440 if (cmp < 0)
441 AVL_SET_LESS(parent_unbal, unbal)
442 else /* cmp > 0 */
443 AVL_SET_GREATER(parent_unbal, unbal)
444 }
445 }
446
447 }
448
449 return(h);
450 }
451
452 #endif
453
454 #if (L__IMPL_MASK & AVL_IMPL_SEARCH)
455
L__(search)456 L__SC AVL_HANDLE L__(search)(L__(avl) *L__tree, AVL_KEY k, avl_search_type st)
457 {
458 int cmp, target_cmp;
459 AVL_HANDLE match_h = AVL_NULL;
460 AVL_HANDLE h = L__tree->root;
461
462 if (st & AVL_LESS)
463 target_cmp = 1;
464 else if (st & AVL_GREATER)
465 target_cmp = -1;
466 else
467 target_cmp = 0;
468
469 while (h != AVL_NULL)
470 {
471 cmp = AVL_COMPARE_KEY_NODE(k, h);
472 if (cmp == 0)
473 {
474 if (st & AVL_EQUAL)
475 {
476 match_h = h;
477 break;
478 }
479 cmp = -target_cmp;
480 }
481 else if (target_cmp != 0)
482 if (!((cmp ^ target_cmp) & L__MASK_HIGH_BIT))
483 /* cmp and target_cmp are both positive or both negative. */
484 match_h = h;
485 h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
486 L__CHECK_READ_ERROR(AVL_NULL)
487 }
488
489 return(match_h);
490 }
491
492 #endif
493
494 #if (L__IMPL_MASK & AVL_IMPL_SEARCH_LEAST)
495
L__(search_least)496 L__SC AVL_HANDLE L__(search_least)(L__(avl) *L__tree)
497 {
498 AVL_HANDLE h = L__tree->root;
499 AVL_HANDLE parent = AVL_NULL;
500
501 while (h != AVL_NULL)
502 {
503 parent = h;
504 h = AVL_GET_LESS(h, 1);
505 L__CHECK_READ_ERROR(AVL_NULL)
506 }
507
508 return(parent);
509 }
510
511 #endif
512
L__(search_root)513 L__SC AVL_HANDLE L__(search_root)(L__(avl) *L__tree)
514 {
515 return L__tree->root;
516 }
517
518
519 #if (L__IMPL_MASK & AVL_IMPL_SEARCH_GREATEST)
520
L__(search_greatest)521 L__SC AVL_HANDLE L__(search_greatest)(L__(avl) *L__tree)
522 {
523 AVL_HANDLE h = L__tree->root;
524 AVL_HANDLE parent = AVL_NULL;
525
526 while (h != AVL_NULL)
527 {
528 parent = h;
529 h = AVL_GET_GREATER(h, 1);
530 L__CHECK_READ_ERROR(AVL_NULL)
531 }
532
533 return(parent);
534 }
535
536 #endif
537
538 #if (L__IMPL_MASK & AVL_IMPL_REMOVE)
539
540 /* Prototype of balance function (called by remove) in case not in
541 ** same compilation unit.
542 */
543 L__SC AVL_HANDLE L__(balance)(L__BALANCE_PARAM_DECL_PREFIX AVL_HANDLE bal_h);
544
L__(remove)545 L__SC AVL_HANDLE L__(remove)(L__(avl) *L__tree, AVL_KEY k)
546 {
547 /* Zero-based depth in tree. */
548 unsigned depth = 0, rm_depth;
549
550 /* Records a path into the tree. If bit n is true, indicates
551 ** take greater branch from the nth node in the path, otherwise
552 ** take the less branch. bit 0 gives branch from root, and
553 ** so on. */
554 L__BIT_ARR_DEFN(branch)
555
556 AVL_HANDLE h = L__tree->root;
557 AVL_HANDLE parent = AVL_NULL;
558 AVL_HANDLE child;
559 AVL_HANDLE path;
560 int cmp, cmp_shortened_sub_with_path = 0;
561 int reduced_depth;
562 int bf;
563 AVL_HANDLE rm;
564 AVL_HANDLE parent_rm;
565
566 L__BIT_ARR_CLEAR(branch)
567
568 for ( ; ; )
569 {
570 if (h == AVL_NULL)
571 /* No node in tree with given key. */
572 return(AVL_NULL);
573 cmp = AVL_COMPARE_KEY_NODE(k, h);
574 if (cmp == 0)
575 /* Found node to remove. */
576 break;
577 parent = h;
578 if (cmp > 0)
579 {
580 h = AVL_GET_GREATER(h, 1);
581 L__BIT_ARR_1(branch, depth)
582 }
583 else
584 {
585 h = AVL_GET_LESS(h, 1);
586 L__BIT_ARR_0(branch, depth)
587 }
588 L__CHECK_READ_ERROR(AVL_NULL)
589 depth++;
590 cmp_shortened_sub_with_path = cmp;
591 }
592 rm = h;
593 parent_rm = parent;
594 rm_depth = depth;
595
596 /* If the node to remove is not a leaf node, we need to get a
597 ** leaf node, or a node with a single leaf as its child, to put
598 ** in the place of the node to remove. We will get the greatest
599 ** node in the less subtree (of the node to remove), or the least
600 ** node in the greater subtree. We take the leaf node from the
601 ** deeper subtree, if there is one. */
602
603 if (AVL_GET_BALANCE_FACTOR(h) < 0)
604 {
605 child = AVL_GET_LESS(h, 1);
606 L__BIT_ARR_0(branch, depth)
607 cmp = -1;
608 }
609 else
610 {
611 child = AVL_GET_GREATER(h, 1);
612 L__BIT_ARR_1(branch, depth)
613 cmp = 1;
614 }
615 L__CHECK_READ_ERROR(AVL_NULL)
616 depth++;
617
618 if (child != AVL_NULL)
619 {
620 cmp = -cmp;
621 do
622 {
623 parent = h;
624 h = child;
625 if (cmp < 0)
626 {
627 child = AVL_GET_LESS(h, 1);
628 L__BIT_ARR_0(branch, depth)
629 }
630 else
631 {
632 child = AVL_GET_GREATER(h, 1);
633 L__BIT_ARR_1(branch, depth)
634 }
635 L__CHECK_READ_ERROR(AVL_NULL)
636 depth++;
637 }
638 while (child != AVL_NULL);
639
640 if (parent == rm)
641 /* Only went through do loop once. Deleted node will be replaced
642 ** in the tree structure by one of its immediate children. */
643 cmp_shortened_sub_with_path = -cmp;
644 else
645 cmp_shortened_sub_with_path = cmp;
646
647 /* Get the handle of the opposite child, which may not be null. */
648 child = cmp > 0 ? AVL_GET_LESS(h, 0) : AVL_GET_GREATER(h, 0);
649 }
650
651 if (parent == AVL_NULL) {
652 /* There were only 1 or 2 nodes in this tree. */
653 AVL_SET_ROOT(L__tree, child);
654 }
655 else if (cmp_shortened_sub_with_path < 0)
656 AVL_SET_LESS(parent, child)
657 else
658 AVL_SET_GREATER(parent, child)
659
660 /* "path" is the parent of the subtree being eliminated or reduced
661 ** from a depth of 2 to 1. If "path" is the node to be removed, we
662 ** set path to the node we're about to poke into the position of the
663 ** node to be removed. */
664 path = parent == rm ? h : parent;
665
666 if (h != rm)
667 {
668 /* Poke in the replacement for the node to be removed. */
669 AVL_SET_LESS(h, AVL_GET_LESS(rm, 0))
670 AVL_SET_GREATER(h, AVL_GET_GREATER(rm, 0))
671 AVL_SET_BALANCE_FACTOR(h, AVL_GET_BALANCE_FACTOR(rm))
672 if (parent_rm == AVL_NULL) {
673 AVL_SET_ROOT(L__tree, h);
674 }
675 else
676 {
677 depth = rm_depth - 1;
678 if (L__BIT_ARR_VAL(branch, depth))
679 AVL_SET_GREATER(parent_rm, h)
680 else
681 AVL_SET_LESS(parent_rm, h)
682 }
683 }
684
685 if (path != AVL_NULL)
686 {
687 /* Create a temporary linked list from the parent of the path node
688 ** to the root node. */
689 h = L__tree->root;
690 parent = AVL_NULL;
691 depth = 0;
692 while (h != path)
693 {
694 if (L__BIT_ARR_VAL(branch, depth))
695 {
696 child = AVL_GET_GREATER(h, 1);
697 AVL_SET_GREATER(h, parent)
698 }
699 else
700 {
701 child = AVL_GET_LESS(h, 1);
702 AVL_SET_LESS(h, parent)
703 }
704 L__CHECK_READ_ERROR(AVL_NULL)
705 depth++;
706 parent = h;
707 h = child;
708 }
709
710 /* Climb from the path node to the root node using the linked
711 ** list, restoring the tree structure and rebalancing as necessary.
712 */
713 reduced_depth = 1;
714 cmp = cmp_shortened_sub_with_path;
715 for ( ; ; )
716 {
717 if (reduced_depth)
718 {
719 bf = AVL_GET_BALANCE_FACTOR(h);
720 if (cmp < 0)
721 bf++;
722 else /* cmp > 0 */
723 bf--;
724 if ((bf == -2) || (bf == 2))
725 {
726 h = L__(balance)(L__BALANCE_PARAM_CALL_PREFIX h);
727 L__CHECK_READ_ERROR(AVL_NULL)
728 bf = AVL_GET_BALANCE_FACTOR(h);
729 }
730 else
731 AVL_SET_BALANCE_FACTOR(h, bf)
732 reduced_depth = (bf == 0);
733 }
734 if (parent == AVL_NULL)
735 break;
736 child = h;
737 h = parent;
738 depth--;
739 cmp = L__BIT_ARR_VAL(branch, depth) ? 1 : -1;
740 if (cmp < 0)
741 {
742 parent = AVL_GET_LESS(h, 1);
743 AVL_SET_LESS(h, child)
744 }
745 else
746 {
747 parent = AVL_GET_GREATER(h, 1);
748 AVL_SET_GREATER(h, child)
749 }
750 L__CHECK_READ_ERROR(AVL_NULL)
751 }
752 AVL_SET_ROOT(L__tree, h);
753 }
754
755 return(rm);
756 }
757
758 #endif
759
760 #if (L__IMPL_MASK & AVL_IMPL_SUBST)
761
L__(subst)762 L__SC AVL_HANDLE L__(subst)(L__(avl) *L__tree, AVL_HANDLE new_node)
763 {
764 AVL_HANDLE h = L__tree->root;
765 AVL_HANDLE parent = AVL_NULL;
766 int cmp, last_cmp = 0;
767
768 /* Search for node already in tree with same key. */
769 for ( ; ; )
770 {
771 if (h == AVL_NULL)
772 /* No node in tree with same key as new node. */
773 return(AVL_NULL);
774 cmp = AVL_COMPARE_NODE_NODE(new_node, h);
775 if (cmp == 0)
776 /* Found the node to substitute new one for. */
777 break;
778 last_cmp = cmp;
779 parent = h;
780 h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
781 L__CHECK_READ_ERROR(AVL_NULL)
782 }
783
784 /* Copy tree housekeeping fields from node in tree to new node. */
785 AVL_SET_LESS(new_node, AVL_GET_LESS(h, 0))
786 AVL_SET_GREATER(new_node, AVL_GET_GREATER(h, 0))
787 AVL_SET_BALANCE_FACTOR(new_node, AVL_GET_BALANCE_FACTOR(h))
788
789 if (parent == AVL_NULL)
790 {
791 /* New node is also new root. */
792 AVL_SET_ROOT(L__tree, new_node);
793 }
794 else
795 {
796 /* Make parent point to new node. */
797 if (last_cmp < 0)
798 AVL_SET_LESS(parent, new_node)
799 else
800 AVL_SET_GREATER(parent, new_node)
801 }
802
803 return(h);
804 }
805
806 #endif
807
808 #ifdef AVL_BUILD_ITER_TYPE
809
810 #if (L__IMPL_MASK & AVL_IMPL_BUILD)
811
L__(build)812 L__SC int L__(build)(
813 L__(avl) *L__tree, AVL_BUILD_ITER_TYPE p, L__SIZE num_nodes)
814 {
815 /* Gives path to subtree being built. If bit n is false, branch
816 ** less from the node at depth n, if true branch greater. */
817 L__BIT_ARR_DEFN(branch)
818
819 /* If bit n is true, then for the current subtree at depth n, its
820 ** greater subtree has one more node than its less subtree. */
821 L__BIT_ARR_DEFN(rem)
822
823 /* Depth of root node of current subtree. */
824 unsigned depth = 0;
825
826 /* Number of nodes in current subtree. */
827 L__SIZE num_sub = num_nodes;
828
829 /* The algorithm relies on a stack of nodes whose less subtree has
830 ** been built, but whose greater subtree has not yet been built.
831 ** The stack is implemented as linked list. The nodes are linked
832 ** together by having the "greater" handle of a node set to the
833 ** next node in the list. "less_parent" is the handle of the first
834 ** node in the list. */
835 AVL_HANDLE less_parent = AVL_NULL;
836
837 /* h is root of current subtree, child is one of its children. */
838 AVL_HANDLE h;
839 AVL_HANDLE child;
840
841 if (num_nodes == 0)
842 {
843 AVL_SET_ROOT(L__tree, AVL_NULL);
844 return(1);
845 }
846
847 L__BIT_ARR_CLEAR(branch)
848 L__BIT_ARR_CLEAR(rem)
849
850 for ( ; ; )
851 {
852 while (num_sub > 2)
853 {
854 /* Subtract one for root of subtree. */
855 num_sub--;
856 if (num_sub & 1)
857 L__BIT_ARR_1(rem, depth)
858 else
859 L__BIT_ARR_0(rem, depth)
860 L__BIT_ARR_0(branch, depth)
861 depth++;
862 num_sub >>= 1;
863 }
864
865 if (num_sub == 2)
866 {
867 /* Build a subtree with two nodes, slanting to greater.
868 ** I arbitrarily chose to always have the extra node in the
869 ** greater subtree when there is an odd number of nodes to
870 ** split between the two subtrees. */
871
872 h = AVL_BUILD_ITER_VAL(p);
873 L__CHECK_READ_ERROR(0)
874 AVL_BUILD_ITER_INCR(p)
875 child = AVL_BUILD_ITER_VAL(p);
876 L__CHECK_READ_ERROR(0)
877 AVL_BUILD_ITER_INCR(p)
878 AVL_SET_LESS(child, AVL_NULL)
879 AVL_SET_GREATER(child, AVL_NULL)
880 AVL_SET_BALANCE_FACTOR(child, 0)
881 AVL_SET_GREATER(h, child)
882 AVL_SET_LESS(h, AVL_NULL)
883 AVL_SET_BALANCE_FACTOR(h, 1)
884 }
885 else /* num_sub == 1 */
886 {
887 /* Build a subtree with one node. */
888
889 h = AVL_BUILD_ITER_VAL(p);
890 L__CHECK_READ_ERROR(0)
891 AVL_BUILD_ITER_INCR(p)
892 AVL_SET_LESS(h, AVL_NULL)
893 AVL_SET_GREATER(h, AVL_NULL)
894 AVL_SET_BALANCE_FACTOR(h, 0)
895 }
896
897 while (depth)
898 {
899 depth--;
900 if (!L__BIT_ARR_VAL(branch, depth))
901 /* We've completed a less subtree. */
902 break;
903
904 /* We've completed a greater subtree, so attach it to
905 ** its parent (that is less than it). We pop the parent
906 ** off the stack of less parents. */
907 child = h;
908 h = less_parent;
909 less_parent = AVL_GET_GREATER(h, 1);
910 L__CHECK_READ_ERROR(0)
911 AVL_SET_GREATER(h, child)
912 /* num_sub = 2 * (num_sub - rem[depth]) + rem[depth] + 1 */
913 num_sub <<= 1;
914 num_sub += L__BIT_ARR_VAL(rem, depth) ? 0 : 1;
915 if (num_sub & (num_sub - 1))
916 /* num_sub is not a power of 2. */
917 AVL_SET_BALANCE_FACTOR(h, 0)
918 else
919 /* num_sub is a power of 2. */
920 AVL_SET_BALANCE_FACTOR(h, 1)
921 }
922
923 if (num_sub == num_nodes)
924 /* We've completed the full tree. */
925 break;
926
927 /* The subtree we've completed is the less subtree of the
928 ** next node in the sequence. */
929
930 child = h;
931 h = AVL_BUILD_ITER_VAL(p);
932 L__CHECK_READ_ERROR(0)
933 AVL_BUILD_ITER_INCR(p)
934 AVL_SET_LESS(h, child)
935
936 /* Put h into stack of less parents. */
937 AVL_SET_GREATER(h, less_parent)
938 less_parent = h;
939
940 /* Proceed to creating greater than subtree of h. */
941 L__BIT_ARR_1(branch, depth)
942 num_sub += L__BIT_ARR_VAL(rem, depth) ? 1 : 0;
943 depth++;
944
945 } /* end for ( ; ; ) */
946
947 AVL_SET_ROOT(L__tree, h);
948
949 return(1);
950 }
951
952 #endif
953
954 #endif
955
956 #if (L__IMPL_MASK & AVL_IMPL_INIT_ITER)
957
958 /* Initialize depth to invalid value, to indicate iterator is
959 ** invalid. (Depth is zero-base.) It's not necessary to initialize
960 ** iterators prior to passing them to the "start" function.
961 */
L__(init_iter)962 L__SC void L__(init_iter)(L__(iter) *iter) { iter->depth = ~0; }
963
964 #endif
965
966 #ifdef AVL_READ_ERRORS_HAPPEN
967
968 #define L__CHECK_READ_ERROR_INV_DEPTH \
969 { if (AVL_READ_ERROR) { iter->depth = ~0; return; } }
970
971 #else
972
973 #define L__CHECK_READ_ERROR_INV_DEPTH
974
975 #endif
976
977 #if (L__IMPL_MASK & AVL_IMPL_START_ITER)
978
L__(start_iter)979 L__SC void L__(start_iter)(
980 L__(avl) *L__tree, L__(iter) *iter, AVL_KEY k, avl_search_type st)
981 {
982 AVL_HANDLE h = L__tree->root;
983 unsigned d = 0;
984 int cmp, target_cmp;
985
986 /* Save the tree that we're going to iterate through in a
987 ** member variable. */
988 iter->tree_ = L__tree;
989
990 iter->depth = ~0;
991
992 if (h == AVL_NULL)
993 /* Tree is empty. */
994 return;
995
996 if (st & AVL_LESS)
997 /* Key can be greater than key of starting node. */
998 target_cmp = 1;
999 else if (st & AVL_GREATER)
1000 /* Key can be less than key of starting node. */
1001 target_cmp = -1;
1002 else
1003 /* Key must be same as key of starting node. */
1004 target_cmp = 0;
1005
1006 for ( ; ; )
1007 {
1008 cmp = AVL_COMPARE_KEY_NODE(k, h);
1009 if (cmp == 0)
1010 {
1011 if (st & AVL_EQUAL)
1012 {
1013 /* Equal node was sought and found as starting node. */
1014 iter->depth = d;
1015 break;
1016 }
1017 cmp = -target_cmp;
1018 }
1019 else if (target_cmp != 0)
1020 if (!((cmp ^ target_cmp) & L__MASK_HIGH_BIT))
1021 /* cmp and target_cmp are both negative or both positive. */
1022 iter->depth = d;
1023 h = cmp < 0 ? AVL_GET_LESS(h, 1) : AVL_GET_GREATER(h, 1);
1024 L__CHECK_READ_ERROR_INV_DEPTH
1025 if (h == AVL_NULL)
1026 break;
1027 if (cmp > 0)
1028 L__BIT_ARR_1(iter->branch, d)
1029 else
1030 L__BIT_ARR_0(iter->branch, d)
1031 iter->path_h[d++] = h;
1032 }
1033 }
1034
1035 #endif
1036
1037 #if (L__IMPL_MASK & AVL_IMPL_START_ITER_LEAST)
1038
L__(start_iter_least)1039 L__SC void L__(start_iter_least)(L__(avl) *L__tree, L__(iter) *iter)
1040 {
1041 AVL_HANDLE h = L__tree->root;
1042
1043 iter->tree_ = L__tree;
1044
1045 iter->depth = ~0;
1046
1047 L__BIT_ARR_ALL(iter->branch, 0)
1048
1049 while (h != AVL_NULL)
1050 {
1051 if (iter->depth != ~0)
1052 iter->path_h[iter->depth] = h;
1053 iter->depth++;
1054 h = AVL_GET_LESS(h, 1);
1055 L__CHECK_READ_ERROR_INV_DEPTH
1056 }
1057 }
1058
1059 #endif
1060
1061 #if (L__IMPL_MASK & AVL_IMPL_START_ITER_GREATEST)
1062
L__(start_iter_greatest)1063 L__SC void L__(start_iter_greatest)(L__(avl) *L__tree, L__(iter) *iter)
1064 {
1065 AVL_HANDLE h = L__tree->root;
1066
1067 iter->tree_ = L__tree;
1068
1069 iter->depth = ~0;
1070
1071 L__BIT_ARR_ALL(iter->branch, 1)
1072
1073 while (h != AVL_NULL)
1074 {
1075 if (iter->depth != ~0)
1076 iter->path_h[iter->depth] = h;
1077 iter->depth++;
1078 h = AVL_GET_GREATER(h, 1);
1079 L__CHECK_READ_ERROR_INV_DEPTH
1080 }
1081 }
1082
1083 #endif
1084
1085 #if (L__IMPL_MASK & AVL_IMPL_GET_ITER)
1086
L__(get_iter)1087 L__SC AVL_HANDLE L__(get_iter)(L__(iter) *iter)
1088 {
1089 if (iter->depth == ~0)
1090 return(AVL_NULL);
1091
1092 return(iter->depth == 0 ?
1093 iter->tree_->root : iter->path_h[iter->depth - 1]);
1094 }
1095
1096 #endif
1097
1098 #if (L__IMPL_MASK & AVL_IMPL_INCR_ITER)
1099
L__(incr_iter)1100 L__SC void L__(incr_iter)(L__(iter) *iter)
1101 {
1102 #define L__tree (iter->tree_)
1103
1104 if (iter->depth != ~0)
1105 {
1106 AVL_HANDLE h =
1107 AVL_GET_GREATER((iter->depth == 0 ?
1108 iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
1109 L__CHECK_READ_ERROR_INV_DEPTH
1110
1111 if (h == AVL_NULL)
1112 do
1113 {
1114 if (iter->depth == 0)
1115 {
1116 iter->depth = ~0;
1117 break;
1118 }
1119 iter->depth--;
1120 }
1121 while (L__BIT_ARR_VAL(iter->branch, iter->depth));
1122 else
1123 {
1124 L__BIT_ARR_1(iter->branch, iter->depth)
1125 iter->path_h[iter->depth++] = h;
1126 for ( ; ; )
1127 {
1128 h = AVL_GET_LESS(h, 1);
1129 L__CHECK_READ_ERROR_INV_DEPTH
1130 if (h == AVL_NULL)
1131 break;
1132 L__BIT_ARR_0(iter->branch, iter->depth)
1133 iter->path_h[iter->depth++] = h;
1134 }
1135 }
1136 }
1137
1138 #undef L__tree
1139 }
1140
1141 #endif
1142
1143 #if (L__IMPL_MASK & AVL_IMPL_DECR_ITER)
1144
L__(decr_iter)1145 L__SC void L__(decr_iter)(L__(iter) *iter)
1146 {
1147 #define L__tree (iter->tree_)
1148
1149 if (iter->depth != ~0)
1150 {
1151 AVL_HANDLE h =
1152 AVL_GET_LESS((iter->depth == 0 ?
1153 iter->tree_->root : iter->path_h[iter->depth - 1]), 1);
1154 L__CHECK_READ_ERROR_INV_DEPTH
1155
1156 if (h == AVL_NULL)
1157 do
1158 {
1159 if (iter->depth == 0)
1160 {
1161 iter->depth = ~0;
1162 break;
1163 }
1164 iter->depth--;
1165 }
1166 while (!L__BIT_ARR_VAL(iter->branch, iter->depth));
1167 else
1168 {
1169 L__BIT_ARR_0(iter->branch, iter->depth)
1170 iter->path_h[iter->depth++] = h;
1171 for ( ; ; )
1172 {
1173 h = AVL_GET_GREATER(h, 1);
1174 L__CHECK_READ_ERROR_INV_DEPTH
1175 if (h == AVL_NULL)
1176 break;
1177 L__BIT_ARR_1(iter->branch, iter->depth)
1178 iter->path_h[iter->depth++] = h;
1179 }
1180 }
1181 }
1182
1183 #undef L__tree
1184 }
1185
1186 #endif
1187
1188 /* Tidy up the preprocessor symbol name space. */
1189 #undef L__
1190 #undef L__EST_LONG_BIT
1191 #undef L__SIZE
1192 #undef L__MASK_HIGH_BIT
1193 #undef L__LONG_BIT
1194 #undef L__BIT_ARR_DEFN
1195 #undef L__BIT_ARR_CLEAR
1196 #undef L__BIT_ARR_VAL
1197 #undef L__BIT_ARR_0
1198 #undef L__BIT_ARR_1
1199 #undef L__BIT_ARR_ALL
1200 #undef L__CHECK_READ_ERROR
1201 #undef L__CHECK_READ_ERROR_INV_DEPTH
1202 #undef L__BIT_ARR_LONGS
1203 #undef L__IMPL_MASK
1204 #undef L__CHECK_READ_ERROR
1205 #undef L__CHECK_READ_ERROR_INV_DEPTH
1206 #undef L__SC
1207 #undef L__BALANCE_PARAM_CALL_PREFIX
1208 #undef L__BALANCE_PARAM_DECL_PREFIX
1209