156962Sbostic /*- 256962Sbostic * Copyright (c) 1992 The Regents of the University of California. 356962Sbostic * All rights reserved. 456962Sbostic * 556962Sbostic * This code is derived from software contributed to Berkeley by 656962Sbostic * Peter McIlroy. 756962Sbostic * 856962Sbostic * %sccs.include.redist.c% 956962Sbostic */ 1056962Sbostic 1156962Sbostic #if defined(LIBC_SCCS) && !defined(lint) 12*56964Sbostic static char sccsid[] = "@(#)merge.c 5.2 (Berkeley) 12/02/92"; 1356962Sbostic #endif /* LIBC_SCCS and not lint */ 1456962Sbostic 1556962Sbostic /* 1656962Sbostic * Hybrid exponential search/linear search merge sort with hybrid 1756962Sbostic * natural/pairwise first pass. Requires about .3% more comparisons 1856962Sbostic * for random data than LSMS with pairwise first pass alone. 1956962Sbostic * It works for objects as small as two bytes. 2056962Sbostic */ 2156962Sbostic 2256962Sbostic #define NATURAL 2356962Sbostic #define THRESHOLD 16 /* Best choice for natural merge cut-off. */ 2456962Sbostic 2556962Sbostic /* #define NATURAL to get hybrid natural merge. 2656962Sbostic * (The default is pairwise merging.) 2756962Sbostic */ 2856962Sbostic 2956962Sbostic #include <sys/types.h> 3056962Sbostic 3156962Sbostic #include <errno.h> 3256962Sbostic #include <stdlib.h> 3356962Sbostic #include <string.h> 3456962Sbostic 3556962Sbostic static void setup __P((u_char *, u_char *, size_t, size_t, int (*)())); 3656962Sbostic static void insertionsort __P((u_char *, size_t, size_t, int (*)())); 3756962Sbostic 3856962Sbostic #define ISIZE sizeof(int) 3956962Sbostic #define PSIZE sizeof(u_char *) 4056962Sbostic #define ICOPY_LIST(src, dst, last) \ 4156962Sbostic do \ 4256962Sbostic *(int*)dst = *(int*)src, src += ISIZE, dst += ISIZE; \ 4356962Sbostic while(src < last) 4456962Sbostic #define ICOPY_ELT(src, dst, i) \ 4556962Sbostic do \ 4656962Sbostic *(int*) dst = *(int*) src, src += ISIZE, dst += ISIZE; \ 4756962Sbostic while (i -= ISIZE) 4856962Sbostic 4956962Sbostic #define CCOPY_LIST(src, dst, last) \ 5056962Sbostic do \ 5156962Sbostic *dst++ = *src++; \ 5256962Sbostic while (src < last) 5356962Sbostic #define CCOPY_ELT(src, dst, i) \ 5456962Sbostic do \ 5556962Sbostic *dst++ = *src++; \ 5656962Sbostic while (i -= 1) 5756962Sbostic 5856962Sbostic /* 5956962Sbostic * Find the next possible pointer head. (Trickery for forcing an array 6056962Sbostic * to do double duty as a linked list when objects do not align with word 6156962Sbostic * boundaries. 6256962Sbostic */ 6356962Sbostic /* Assumption: PSIZE is a power of 2. */ 6456962Sbostic #define EVAL(p) (u_char **) \ 6556962Sbostic ((u_char *)0 + \ 6656962Sbostic (((u_char *)p + PSIZE - 1 - (u_char *) 0) & ~(PSIZE - 1))) 6756962Sbostic 6856962Sbostic /* 6956962Sbostic * Arguments are as for qsort. 7056962Sbostic */ 7156962Sbostic int 7256962Sbostic mergesort(base, nmemb, size, cmp) 7356962Sbostic void *base; 7456962Sbostic size_t nmemb; 7556962Sbostic register size_t size; 7656962Sbostic int (*cmp) __P((const void *, const void *)); 7756962Sbostic { 7856962Sbostic register int i, sense; 7956962Sbostic int big, iflag; 8056962Sbostic register u_char *f1, *f2, *t, *b, *tp2, *q, *l1, *l2; 8156962Sbostic u_char *list2, *list1, *p2, *p, *last, **p1; 8256962Sbostic 83*56964Sbostic if (size < PSIZE / 2) { /* Pointers must fit into 2 * size. */ 8456962Sbostic errno = EINVAL; 8556962Sbostic return (-1); 8656962Sbostic } 8756962Sbostic 8856962Sbostic /* 8956962Sbostic * XXX 9056962Sbostic * Stupid subtraction for the Cray. 9156962Sbostic */ 9256962Sbostic iflag = 0; 9356962Sbostic if (!(size % ISIZE) && !(((char *)base - (char *)0) % ISIZE)) 9456962Sbostic iflag = 1; 9556962Sbostic 9656962Sbostic if ((list2 = malloc(nmemb * size + PSIZE)) == NULL) 9756962Sbostic return (-1); 9856962Sbostic 9956962Sbostic list1 = base; 10056962Sbostic setup(list1, list2, nmemb, size, cmp); 10156962Sbostic last = list2 + nmemb * size; 10256962Sbostic i = big = 0; 10356962Sbostic while (*EVAL(list2) != last) { 10456962Sbostic l2 = list1; 10556962Sbostic p1 = EVAL(list1); 10656962Sbostic for (tp2 = p2 = list2; p2 != last; p1 = EVAL(l2)) { 10756962Sbostic p2 = *EVAL(p2); 10856962Sbostic f1 = l2; 10956962Sbostic f2 = l1 = list1 + (p2 - list2); 11056962Sbostic if (p2 != last) 11156962Sbostic p2 = *EVAL(p2); 11256962Sbostic l2 = list1 + (p2 - list2); 11356962Sbostic while (f1 < l1 && f2 < l2) { 11456962Sbostic if ((*cmp)(f1, f2) <= 0) { 11556962Sbostic q = f2; 11656962Sbostic b = f1, t = l1; 11756962Sbostic sense = -1; 11856962Sbostic } else { 11956962Sbostic q = f1; 12056962Sbostic b = f2, t = l2; 12156962Sbostic sense = 0; 12256962Sbostic } 12356962Sbostic if (!big) { /* here i = 0 */ 12456962Sbostic LINEAR: while ((b += size) < t && cmp(q, b) >sense) 12556962Sbostic if (++i == 6) { 12656962Sbostic big = 1; 12756962Sbostic goto EXPONENTIAL; 12856962Sbostic } 12956962Sbostic } else { 13056962Sbostic EXPONENTIAL: for (i = size; ; i <<= 1) 13156962Sbostic if ((p = (b + i)) >= t) { 13256962Sbostic if ((p = t - size) > b && 13356962Sbostic (*cmp)(q, p) <= sense) 13456962Sbostic t = p; 13556962Sbostic else 13656962Sbostic b = p; 13756962Sbostic break; 13856962Sbostic } else if ((*cmp)(q, p) <= sense) { 13956962Sbostic t = p; 14056962Sbostic if (i == size) 14156962Sbostic big = 0; 14256962Sbostic goto FASTCASE; 14356962Sbostic } else 14456962Sbostic b = p; 14556962Sbostic SLOWCASE: while (t > b+size) { 14656962Sbostic i = (((t - b) / size) >> 1) * size; 14756962Sbostic if ((*cmp)(q, p = b + i) <= sense) 14856962Sbostic t = p; 14956962Sbostic else 15056962Sbostic b = p; 15156962Sbostic } 15256962Sbostic goto COPY; 15356962Sbostic FASTCASE: while (i > size) 15456962Sbostic if ((*cmp)(q, 15556962Sbostic p = b + (i >>= 1)) <= sense) 15656962Sbostic t = p; 15756962Sbostic else 15856962Sbostic b = p; 15956962Sbostic COPY: b = t; 16056962Sbostic } 16156962Sbostic i = size; 16256962Sbostic if (q == f1) { 16356962Sbostic if (iflag) { 16456962Sbostic ICOPY_LIST(f2, tp2, b); 16556962Sbostic ICOPY_ELT(f1, tp2, i); 16656962Sbostic } else { 16756962Sbostic CCOPY_LIST(f2, tp2, b); 16856962Sbostic CCOPY_ELT(f1, tp2, i); 16956962Sbostic } 17056962Sbostic } else { 17156962Sbostic if (iflag) { 17256962Sbostic ICOPY_LIST(f1, tp2, b); 17356962Sbostic ICOPY_ELT(f2, tp2, i); 17456962Sbostic } else { 17556962Sbostic CCOPY_LIST(f1, tp2, b); 17656962Sbostic CCOPY_ELT(f2, tp2, i); 17756962Sbostic } 17856962Sbostic } 17956962Sbostic } 18056962Sbostic if (f2 < l2) { 18156962Sbostic if (iflag) 18256962Sbostic ICOPY_LIST(f2, tp2, l2); 18356962Sbostic else 18456962Sbostic CCOPY_LIST(f2, tp2, l2); 18556962Sbostic } else if (f1 < l1) { 18656962Sbostic if (iflag) 18756962Sbostic ICOPY_LIST(f1, tp2, l1); 18856962Sbostic else 18956962Sbostic CCOPY_LIST(f1, tp2, l1); 19056962Sbostic } 19156962Sbostic *p1 = l2; 19256962Sbostic } 19356962Sbostic tp2 = list1; /* swap list1, list2 */ 19456962Sbostic list1 = list2; 19556962Sbostic list2 = tp2; 19656962Sbostic last = list2 + nmemb*size; 19756962Sbostic } 19856962Sbostic if (base == list2) { 19956962Sbostic memmove(list2, list1, nmemb*size); 20056962Sbostic list2 = list1; 20156962Sbostic } 20256962Sbostic free(list2); 20356962Sbostic return (0); 20456962Sbostic } 20556962Sbostic 20656962Sbostic #define swap(a, b) { \ 20756962Sbostic s = b; \ 20856962Sbostic i = size; \ 20956962Sbostic do { \ 21056962Sbostic tmp = *a; *a++ = *s; *s++ = tmp; \ 21156962Sbostic } while (--i); \ 21256962Sbostic a -= size; \ 21356962Sbostic } 21456962Sbostic #define reverse(bot, top) { \ 21556962Sbostic s = top; \ 21656962Sbostic do { \ 21756962Sbostic i = size; \ 21856962Sbostic do { \ 21956962Sbostic tmp = *bot; *bot++ = *s; *s++ = tmp; \ 22056962Sbostic } while (--i); \ 22156962Sbostic s -= size2; \ 22256962Sbostic } while(bot < s); \ 22356962Sbostic } 22456962Sbostic 22556962Sbostic /* 22656962Sbostic * Optional hybrid natural/pairwise first pass. Eats up list1 in runs of 22756962Sbostic * increasing order, list2 in a corresponding linked list. Checks for runs 22856962Sbostic * when THRESHOLD/2 pairs compare with same sense. (Only used when NATURAL 22956962Sbostic * is defined. Otherwise simple pairwise merging is used.) 23056962Sbostic */ 23156962Sbostic void 23256962Sbostic setup(list1, list2, n, size, cmp) 23356962Sbostic size_t n, size; 23456962Sbostic int (*cmp) __P((const void *, const void *)); 23556962Sbostic u_char *list1, *list2; 23656962Sbostic { 23756962Sbostic int i, length, size2, tmp, sense; 23856962Sbostic u_char *f1, *f2, *s, *l2, *last, *p2; 23956962Sbostic 24056962Sbostic size2 = size*2; 24156962Sbostic if (n <= 5) { 24256962Sbostic insertionsort(list1, n, size, cmp); 24356962Sbostic *EVAL(list2) = (u_char*) list2 + n*size; 24456962Sbostic return; 24556962Sbostic } 24656962Sbostic /* 24756962Sbostic * Avoid running pointers out of bounds; limit n to evens 24856962Sbostic * for simplicity. 24956962Sbostic */ 25056962Sbostic i = 4 + (n & 1); 25156962Sbostic insertionsort(list1 + (n - i) * size, i, size, cmp); 25256962Sbostic last = list1 + size * (n - i); 25356962Sbostic *EVAL(list2 + (last - list1)) = list2 + n * size; 25456962Sbostic 25556962Sbostic #ifdef NATURAL 25656962Sbostic p2 = list2; 25756962Sbostic f1 = list1; 25856962Sbostic sense = (cmp(f1, f1 + size) > 0); 25956962Sbostic for (; f1 < last; sense = !sense) { 26056962Sbostic length = 2; 26156962Sbostic /* Find pairs with same sense. */ 26256962Sbostic for (f2 = f1 + size2; f2 < last; f2 += size2) { 26356962Sbostic if ((cmp(f2, f2+ size) > 0) != sense) 26456962Sbostic break; 26556962Sbostic length += 2; 26656962Sbostic } 26756962Sbostic if (length < THRESHOLD) { /* Pairwise merge */ 26856962Sbostic do { 26956962Sbostic p2 = *EVAL(p2) = f1 + size2 - list1 + list2; 27056962Sbostic if (sense > 0) 27156962Sbostic swap (f1, f1 + size); 27256962Sbostic } while ((f1 += size2) < f2); 27356962Sbostic } else { /* Natural merge */ 27456962Sbostic l2 = f2; 27556962Sbostic for (f2 = f1 + size2; f2 < l2; f2 += size2) { 27656962Sbostic if ((cmp(f2-size, f2) > 0) != sense) { 27756962Sbostic p2 = *EVAL(p2) = f2 - list1 + list2; 27856962Sbostic if (sense > 0) 27956962Sbostic reverse(f1, f2-size); 28056962Sbostic f1 = f2; 28156962Sbostic } 28256962Sbostic } 28356962Sbostic if (sense > 0) 28456962Sbostic reverse (f1, f2-size); 28556962Sbostic f1 = f2; 28656962Sbostic if (f2 < last || cmp(f2, f2 + size) > 0) 28756962Sbostic p2 = *EVAL(p2) = f2 - list1 + list2; 28856962Sbostic else 28956962Sbostic p2 = *EVAL(p2) = list2 + n*size; 29056962Sbostic } 29156962Sbostic } 29256962Sbostic #else /* pairwise merge only. */ 29356962Sbostic for (f1 = list1, p2 = list2; f1 < last; f1 += size2) { 29456962Sbostic p2 = *EVAL(p2) = p2 + size2; 29556962Sbostic if (cmp (f1, f1 + size) > 0) 29656962Sbostic swap(f1, f1 + size); 29756962Sbostic } 29856962Sbostic #endif /* NATURAL */ 29956962Sbostic } 30056962Sbostic 30156962Sbostic /* 30256962Sbostic * This is to avoid out-of-bounds addresses in sorting the 30356962Sbostic * last 4 elements. 30456962Sbostic */ 30556962Sbostic static void 30656962Sbostic insertionsort(a, n, size, cmp) 30756962Sbostic u_char *a; 30856962Sbostic size_t n, size; 30956962Sbostic int (*cmp) __P((const void *, const void *)); 31056962Sbostic { 31156962Sbostic u_char *ai, *s, *t, *u, tmp; 31256962Sbostic int i; 31356962Sbostic 31456962Sbostic for (ai = a+size; --n >= 1; ai += size) 31556962Sbostic for (t = ai; t > a; t -= size) { 31656962Sbostic u = t - size; 31756962Sbostic if (cmp(u, t) <= 0) 31856962Sbostic break; 31956962Sbostic swap(u, t); 32056962Sbostic } 32156962Sbostic } 322