1 /* Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999 Aladdin Enterprises. All rights reserved. 2 3 This file is part of AFPL Ghostscript. 4 5 AFPL Ghostscript is distributed with NO WARRANTY OF ANY KIND. No author or 6 distributor accepts any responsibility for the consequences of using it, or 7 for whether it serves any particular purpose or works at all, unless he or 8 she says so in writing. Refer to the Aladdin Free Public License (the 9 "License") for full details. 10 11 Every copy of AFPL Ghostscript must include a copy of the License, normally 12 in a plain ASCII text file named PUBLIC. The License grants you the right 13 to copy, modify and redistribute AFPL Ghostscript, but only under certain 14 conditions described in the License. Among other things, the License 15 requires that the copyright notice and this notice be preserved on all 16 copies. 17 */ 18 19 /*$Id: igcref.c,v 1.3 2001/03/12 03:50:02 ghostgum Exp $ */ 20 /* ref garbage collector for Ghostscript */ 21 #include "memory_.h" 22 #include "ghost.h" 23 #include "gsexit.h" 24 #include "gsstruct.h" /* for gxalloc.h included by iastate.h */ 25 #include "iname.h" 26 #include "iastate.h" 27 #include "idebug.h" 28 #include "igc.h" 29 #include "ipacked.h" 30 #include "store.h" /* for ref_assign_inline */ 31 32 /* Define whether to trace every step of relocating ref pointers. */ 33 #if 0 34 # define rputc(c) dputc(c) 35 #else 36 # define rputc(c) DO_NOTHING 37 #endif 38 39 /* Forward references */ 40 ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed); 41 refs_proc_reloc(igc_reloc_refs); 42 43 /* 44 * Define the 'structure' type descriptor for refs. 45 * This is special because it has different shared procs. 46 */ 47 private gc_proc_clear_reloc(refs_clear_reloc); 48 private gc_proc_set_reloc(refs_set_reloc); 49 private gc_proc_compact(refs_compact); 50 private const struct_shared_procs_t refs_shared_procs = 51 {refs_clear_reloc, refs_set_reloc, refs_compact}; 52 private struct_proc_clear_marks(refs_clear_marks); 53 private struct_proc_reloc_ptrs(refs_do_reloc); 54 const gs_memory_struct_type_t st_refs = 55 {sizeof(ref), "refs", &refs_shared_procs, refs_clear_marks, 0, refs_do_reloc}; 56 57 /* 58 * Define the GC procedures for structs that actually contain refs. 59 * These are special because the shared refs_* procedures 60 * are never called. Instead, we unmark the individual refs in clear_marks, 61 * disregard refs_*_reloc (because we will never relocate a ptr_ref_type 62 * pointer pointing into the structure), disregard refs_compact (because 63 * compaction is never required), and remove the marks in reloc_ptrs. 64 * See also the comment about ptr_ref_type in imemory.h. 65 */ 66 CLEAR_MARKS_PROC(ref_struct_clear_marks) 67 { 68 ref *pref = (ref *) vptr; 69 ref *end = (ref *) ((char *)vptr + size); 70 71 for (; pref < end; pref++) 72 r_clear_attrs(pref, l_mark); 73 } 74 ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs) 75 { 76 if (index >= size / sizeof(ref)) 77 return 0; 78 pep->ptr = (const ref *)vptr + index; 79 return ptr_ref_type; 80 ENUM_PTRS_END_PROC 81 } 82 RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs) 83 { 84 ref *beg = vptr; 85 ref *end = (ref *) ((char *)vptr + size); 86 87 igc_reloc_refs((ref_packed *) beg, (ref_packed *) end, gcst); 88 ref_struct_clear_marks(vptr, size, pstype); 89 } RELOC_PTRS_END 90 91 /* ------ Unmarking phase ------ */ 92 93 /* Unmark a single ref. */ 94 void 95 ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored) 96 { 97 ref_packed *rpp = (ref_packed *)pep->ptr; 98 99 if (r_is_packed(rpp)) 100 r_clear_pmark(rpp); 101 else 102 r_clear_attrs((ref *)rpp, l_mark); 103 } 104 105 /* Unmarking routine for ref objects. */ 106 private void 107 refs_clear_marks(void /*obj_header_t */ *vptr, uint size, 108 const gs_memory_struct_type_t * pstype) 109 { 110 ref_packed *rp = (ref_packed *) vptr; 111 ref_packed *end = (ref_packed *) ((byte *) vptr + size); 112 113 /* Since the last ref is full-size, we only need to check for */ 114 /* the end of the block when we see one of those. */ 115 for (;;) { 116 if (r_is_packed(rp)) { 117 #ifdef DEBUG 118 if (gs_debug_c('8')) { 119 dlprintf1(" [8]unmark packed 0x%lx ", (ulong) rp); 120 debug_print_ref((const ref *)rp); 121 dputs("\n"); 122 } 123 #endif 124 r_clear_pmark(rp); 125 rp++; 126 } else { /* full-size ref */ 127 ref *const pref = (ref *)rp; 128 129 #ifdef DEBUG 130 if (gs_debug_c('8')) { 131 dlprintf1(" [8]unmark ref 0x%lx ", (ulong) rp); 132 debug_print_ref(pref); 133 dputs("\n"); 134 } 135 #endif 136 r_clear_attrs(pref, l_mark); 137 rp += packed_per_ref; 138 if (rp >= (ref_packed *) end) 139 break; 140 } 141 } 142 } 143 144 /* ------ Marking phase ------ */ 145 146 /* Mark a ref. Return true if new mark. */ 147 bool 148 ptr_ref_mark(enum_ptr_t *pep, gc_state_t * ignored) 149 { 150 ref_packed *rpp = (void *)pep->ptr; 151 152 if (r_is_packed(rpp)) { 153 if (r_has_pmark(rpp)) 154 return false; 155 r_set_pmark(rpp); 156 } else { 157 ref *const pref = (ref *)rpp; 158 159 if (r_has_attr(pref, l_mark)) 160 return false; 161 r_set_attrs(pref, l_mark); 162 } 163 return true; 164 } 165 166 /* ------ Relocation planning phase ------ */ 167 168 /* 169 * We store relocation in the size field of refs that don't use it, 170 * so that we don't have to scan all the way to an unmarked object. 171 * We must avoid nulls, which sometimes have useful information 172 * in their size fields, and the types above t_next_index, which are 173 * actually operators in disguise and also use the size field. 174 */ 175 176 /* Clear the relocation for a ref object. */ 177 private void 178 refs_clear_reloc(obj_header_t * hdr, uint size) 179 { 180 ref_packed *rp = (ref_packed *) (hdr + 1); 181 ref_packed *end = (ref_packed *) ((byte *) rp + size); 182 183 while (rp < end) { 184 if (r_is_packed(rp)) 185 rp++; 186 else { 187 /* Full-size ref. Store the relocation here if possible. */ 188 ref *const pref = (ref *)rp; 189 190 if (!ref_type_uses_size_or_null(r_type(pref))) { 191 if_debug1('8', " [8]clearing reloc at 0x%lx\n", (ulong) rp); 192 r_set_size(pref, 0); 193 } 194 rp += packed_per_ref; 195 } 196 } 197 } 198 199 /* Set the relocation for a ref object. */ 200 private bool 201 refs_set_reloc(obj_header_t * hdr, uint reloc, uint size) 202 { 203 ref_packed *rp = (ref_packed *) (hdr + 1); 204 ref_packed *end = (ref_packed *) ((byte *) rp + size); 205 uint freed = 0; 206 207 /* 208 * We have to be careful to keep refs aligned properly. 209 * For the moment, we do this by either keeping or discarding 210 * an entire (aligned) block of align_packed_per_ref packed elements 211 * as a unit. We know that align_packed_per_ref <= packed_per_ref, 212 * and we also know that packed refs are always allocated in blocks 213 * of align_packed_per_ref, so this makes things relatively easy. 214 */ 215 while (rp < end) { 216 if (r_is_packed(rp)) { 217 #if align_packed_per_ref == 1 218 if (r_has_pmark(rp)) { 219 if_debug1('8', 220 " [8]packed ref 0x%lx is marked\n", 221 (ulong) rp); 222 rp++; 223 } else { 224 #else 225 int i; 226 227 /* 228 * Note: align_packed_per_ref is typically 229 * 2 or 4 for 32-bit processors. 230 */ 231 #define all_marked (align_packed_per_ref * lp_mark) 232 # if align_packed_per_ref == 2 233 # if arch_sizeof_int == arch_sizeof_short * 2 234 # undef all_marked 235 # define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark ) 236 # define marked (*(int *)rp & all_marked) 237 # else 238 # define marked ((*rp & lp_mark) + (rp[1] & lp_mark)) 239 # endif 240 # else 241 # if align_packed_per_ref == 4 242 # define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\ 243 (rp[2] & lp_mark) + (rp[3] & lp_mark)) 244 # else 245 /* 246 * The value of marked is logically a uint, not an int: 247 * we declare it as int only to avoid a compiler warning 248 * message about using a non-int value in a switch statement. 249 */ 250 int marked = *rp & lp_mark; 251 252 for (i = 1; i < align_packed_per_ref; i++) 253 marked += rp[i] & lp_mark; 254 # endif 255 # endif 256 /* 257 * Now marked is lp_mark * the number of marked 258 * packed refs in the aligned block, except for 259 * a couple of special cases above. 260 */ 261 switch (marked) { 262 case all_marked: 263 if_debug2('8', 264 " [8]packed refs 0x%lx..0x%lx are marked\n", 265 (ulong) rp, 266 (ulong) (rp + (align_packed_per_ref - 1))); 267 rp += align_packed_per_ref; 268 break; 269 default: 270 /* At least one packed ref in the block */ 271 /* is marked: Keep the whole block. */ 272 for (i = align_packed_per_ref; i--; rp++) { 273 r_set_pmark(rp); 274 if_debug1('8', 275 " [8]packed ref 0x%lx is marked\n", 276 (ulong) rp); 277 } 278 break; 279 case 0: 280 #endif 281 if_debug2('8', " [8]%d packed ref(s) at 0x%lx are unmarked\n", 282 align_packed_per_ref, (ulong) rp); 283 { 284 uint rel = reloc + freed; 285 286 /* Change this to an integer so we can */ 287 /* store the relocation here. */ 288 *rp = pt_tag(pt_integer) + 289 min(rel, packed_max_value); 290 } 291 rp += align_packed_per_ref; 292 freed += sizeof(ref_packed) * align_packed_per_ref; 293 } 294 } else { /* full-size ref */ 295 uint rel = reloc + freed; 296 297 /* The following assignment is logically */ 298 /* unnecessary; we do it only for convenience */ 299 /* in debugging. */ 300 ref *pref = (ref *) rp; 301 302 if (!r_has_attr(pref, l_mark)) { 303 if_debug1('8', " [8]ref 0x%lx is unmarked\n", 304 (ulong) pref); 305 /* Change this to a mark so we can */ 306 /* store the relocation. */ 307 r_set_type(pref, t_mark); 308 r_set_size(pref, rel); 309 freed += sizeof(ref); 310 } else { 311 if_debug1('8', " [8]ref 0x%lx is marked\n", 312 (ulong) pref); 313 /* Store the relocation here if possible. */ 314 if (!ref_type_uses_size_or_null(r_type(pref))) { 315 if_debug2('8', " [8]storing reloc %u at 0x%lx\n", 316 rel, (ulong) pref); 317 r_set_size(pref, rel); 318 } 319 } 320 rp += packed_per_ref; 321 } 322 } 323 if_debug3('7', " [7]at end of refs 0x%lx, size = %u, freed = %u\n", 324 (ulong) (hdr + 1), size, freed); 325 if (freed == size) 326 return false; 327 #if arch_sizeof_int > arch_sizeof_short 328 /* 329 * If the final relocation can't fit in the r_size field 330 * (which can't happen if the object shares a chunk with 331 * any other objects, so we know reloc = 0 in this case), 332 * we have to keep the entire object unless there are no 333 * references to any ref in it. 334 */ 335 if (freed <= max_ushort) 336 return true; 337 /* 338 * We have to mark all surviving refs, but we also must 339 * overwrite any non-surviving refs with something that 340 * doesn't contain any pointers. 341 */ 342 rp = (ref_packed *) (hdr + 1); 343 while (rp < end) { 344 if (r_is_packed(rp)) { 345 if (!r_has_pmark(rp)) 346 *rp = pt_tag(pt_integer) | lp_mark; 347 ++rp; 348 } else { /* The following assignment is logically */ 349 /* unnecessary; we do it only for convenience */ 350 /* in debugging. */ 351 ref *pref = (ref *) rp; 352 353 if (!r_has_attr(pref, l_mark)) { 354 r_set_type_attrs(pref, t_mark, l_mark); 355 r_set_size(pref, reloc); 356 } else { 357 if (!ref_type_uses_size_or_null(r_type(pref))) 358 r_set_size(pref, reloc); 359 } 360 rp += packed_per_ref; 361 } 362 } 363 /* The last ref has to remain unmarked. */ 364 r_clear_attrs((ref *) rp - 1, l_mark); 365 #endif 366 return true; 367 } 368 369 /* ------ Relocation phase ------ */ 370 371 /* Relocate all the pointers in a block of refs. */ 372 private void 373 refs_do_reloc(void /*obj_header_t */ *vptr, uint size, 374 const gs_memory_struct_type_t * pstype, gc_state_t * gcst) 375 { 376 igc_reloc_refs((ref_packed *) vptr, 377 (ref_packed *) ((char *)vptr + size), 378 gcst); 379 } 380 /* Relocate the contents of a block of refs. */ 381 /* If gcst->relocating_untraced is true, we are relocating pointers from an */ 382 /* untraced space, so relocate all refs, not just marked ones. */ 383 void 384 igc_reloc_refs(ref_packed * from, ref_packed * to, gc_state_t * gcst) 385 { 386 int min_trace = gcst->min_collect; 387 ref_packed *rp = from; 388 bool do_all = gcst->relocating_untraced; 389 390 while (rp < to) { 391 ref *pref; 392 #ifdef DEBUG 393 const void *before = 0; 394 const void *after = 0; 395 # define DO_RELOC(var, stat)\ 396 BEGIN before = (var); stat; after = (var); END 397 # define SET_RELOC(var, expr)\ 398 BEGIN before = (var); after = (var) = (expr); END 399 #else 400 # define DO_RELOC(var, stat) stat 401 # define SET_RELOC(var, expr) var = expr 402 #endif 403 404 if (r_is_packed(rp)) { 405 rp++; 406 continue; 407 } 408 /* The following assignment is logically unnecessary; */ 409 /* we do it only for convenience in debugging. */ 410 pref = (ref *) rp; 411 if_debug3('8', " [8]relocating %s %d ref at 0x%lx", 412 (r_has_attr(pref, l_mark) ? "marked" : "unmarked"), 413 r_btype(pref), (ulong) pref); 414 if ((r_has_attr(pref, l_mark) || do_all) && 415 r_space(pref) >= min_trace 416 ) { 417 switch (r_type(pref)) { 418 /* Struct cases */ 419 case t_file: 420 DO_RELOC(pref->value.pfile, RELOC_VAR(pref->value.pfile)); 421 break; 422 case t_device: 423 DO_RELOC(pref->value.pdevice, 424 RELOC_VAR(pref->value.pdevice)); 425 break; 426 case t_fontID: 427 case t_struct: 428 case t_astruct: 429 DO_RELOC(pref->value.pstruct, 430 RELOC_VAR(pref->value.pstruct)); 431 break; 432 /* Non-trivial non-struct cases */ 433 case t_dictionary: 434 rputc('d'); 435 SET_RELOC(pref->value.pdict, 436 (dict *)igc_reloc_ref_ptr((ref_packed *)pref->value.pdict, gcst)); 437 break; 438 case t_array: 439 { 440 uint size = r_size(pref); 441 442 if (size != 0) { /* value.refs might be NULL */ 443 444 /* 445 * If the array is large, we allocated it in its 446 * own object (at least originally -- this might 447 * be a pointer to a subarray.) In this case, 448 * we know it is the only object in its 449 * containing st_refs object, so we know that 450 * the mark containing the relocation appears 451 * just after it. 452 */ 453 if (size < max_size_st_refs / sizeof(ref)) { 454 rputc('a'); 455 SET_RELOC(pref->value.refs, 456 (ref *) igc_reloc_ref_ptr( 457 (ref_packed *) pref->value.refs, gcst)); 458 } else { 459 rputc('A'); 460 /* 461 * See the t_shortarray case below for why we 462 * decrement size. 463 */ 464 --size; 465 SET_RELOC(pref->value.refs, 466 (ref *) igc_reloc_ref_ptr( 467 (ref_packed *) (pref->value.refs + size), 468 gcst) - size); 469 } 470 } 471 } 472 break; 473 case t_mixedarray: 474 if (r_size(pref) != 0) { /* value.refs might be NULL */ 475 rputc('m'); 476 SET_RELOC(pref->value.packed, 477 igc_reloc_ref_ptr(pref->value.packed, gcst)); 478 } 479 break; 480 case t_shortarray: 481 { 482 uint size = r_size(pref); 483 484 /* 485 * Since we know that igc_reloc_ref_ptr works by 486 * scanning forward, and we know that all the 487 * elements of this array itself are marked, we can 488 * save some scanning time by relocating the pointer 489 * to the end of the array rather than the 490 * beginning. 491 */ 492 if (size != 0) { /* value.refs might be NULL */ 493 rputc('s'); 494 /* 495 * igc_reloc_ref_ptr has to be able to determine 496 * whether the pointer points into a space that 497 * isn't being collected. It does this by 498 * checking whether the referent of the pointer 499 * is marked. For this reason, we have to pass 500 * a pointer to the last real element of the 501 * array, rather than just beyond it. 502 */ 503 --size; 504 SET_RELOC(pref->value.packed, 505 igc_reloc_ref_ptr(pref->value.packed + size, 506 gcst) - size); 507 } 508 } 509 break; 510 case t_name: 511 { 512 void *psub = name_ref_sub_table(pref); 513 void *rsub = RELOC_OBJ(psub); /* gcst implicit */ 514 515 SET_RELOC(pref->value.pname, 516 (name *) 517 ((char *)rsub + ((char *)pref->value.pname - 518 (char *)psub))); 519 } break; 520 case t_string: 521 { 522 gs_string str; 523 524 str.data = pref->value.bytes; 525 str.size = r_size(pref); 526 527 DO_RELOC(str.data, RELOC_STRING_VAR(str)); 528 pref->value.bytes = str.data; 529 } 530 break; 531 case t_oparray: 532 rputc('o'); 533 SET_RELOC(pref->value.const_refs, 534 (const ref *)igc_reloc_ref_ptr((const ref_packed *)pref->value.const_refs, gcst)); 535 break; 536 default: 537 goto no_reloc; /* don't print trace message */ 538 } 539 if_debug2('8', ", 0x%lx => 0x%lx", (ulong)before, (ulong)after); 540 } 541 no_reloc: 542 if_debug0('8', "\n"); 543 rp += packed_per_ref; 544 } 545 } 546 547 /* Relocate a pointer to a ref. */ 548 /* See gsmemory.h for why the argument is const and the result is not. */ 549 ref_packed * 550 igc_reloc_ref_ptr(const ref_packed * prp, gc_state_t * ignored) 551 { 552 /* 553 * Search forward for relocation. This algorithm is intrinsically very 554 * inefficient; we hope eventually to replace it with a better one. 555 */ 556 const ref_packed *rp = prp; 557 uint dec = 0; 558 #ifdef ALIGNMENT_ALIASING_BUG 559 const ref *rpref; 560 # define RP_REF(rp) (rpref = (const ref *)rp, rpref) 561 #else 562 # define RP_REF(rp) ((const ref *)rp) 563 #endif 564 565 /* 566 * Iff this pointer points into a space that wasn't traced, 567 * the referent won't be marked. In this case, we shouldn't 568 * do any relocation. Check for this first. 569 */ 570 if (r_is_packed(rp)) { 571 if (!r_has_pmark(rp)) 572 goto ret_rp; 573 } else { 574 if (!r_has_attr(RP_REF(rp), l_mark)) 575 goto ret_rp; 576 } 577 for (;;) { 578 579 if (r_is_packed(rp)) { 580 /* 581 * Normally, an unmarked packed ref will be an 582 * integer whose value is the amount of relocation. 583 * However, the relocation value might have been 584 * too large to fit. If this is the case, for 585 * each such unmarked packed ref we pass over, 586 * we have to decrement the final relocation. 587 */ 588 rputc((*rp & lp_mark ? '1' : '0')); 589 if (!(*rp & lp_mark)) { 590 if (*rp != pt_tag(pt_integer) + packed_max_value) { 591 /* This is a stored relocation value. */ 592 rputc('\n'); 593 rp = print_reloc(prp, "ref", 594 (const ref_packed *) 595 ((const char *)prp - 596 (*rp & packed_value_mask) + dec)); 597 break; 598 } 599 /* 600 * We know this is the first of an aligned block 601 * of packed refs. Skip over the entire block, 602 * decrementing the final relocation. 603 */ 604 dec += sizeof(ref_packed) * align_packed_per_ref; 605 rp += align_packed_per_ref; 606 } else 607 rp++; 608 continue; 609 } 610 if (!ref_type_uses_size_or_null(r_type(RP_REF(rp)))) { 611 /* reloc is in r_size */ 612 rputc('\n'); 613 rp = print_reloc(prp, "ref", 614 (const ref_packed *) 615 (r_size(RP_REF(rp)) == 0 ? prp : 616 (const ref_packed *)((const char *)prp - 617 r_size(RP_REF(rp)) + dec))); 618 break; 619 } 620 rputc('u'); 621 rp += packed_per_ref; 622 } 623 ret_rp: 624 /* Use a severely deprecated pun to remove the const property. */ 625 { 626 union { const ref_packed *r; ref_packed *w; } u; 627 628 u.r = rp; 629 return u.w; 630 } 631 } 632 633 /* ------ Compaction phase ------ */ 634 635 /* Compact a ref object. */ 636 /* Remove the marks at the same time. */ 637 private void 638 refs_compact(obj_header_t * pre, obj_header_t * dpre, uint size) 639 { 640 ref_packed *dest; 641 ref_packed *src; 642 ref_packed *end; 643 uint new_size; 644 645 src = (ref_packed *) (pre + 1); 646 end = (ref_packed *) ((byte *) src + size); 647 /* 648 * We know that a block of refs always ends with an unmarked 649 * full-size ref, so we only need to check for reaching the end 650 * of the block when we see one of those. 651 */ 652 if (dpre == pre) /* Loop while we don't need to copy. */ 653 for (;;) { 654 if (r_is_packed(src)) { 655 if (!r_has_pmark(src)) 656 break; 657 if_debug1('8', " [8]packed ref 0x%lx \"copied\"\n", 658 (ulong) src); 659 *src &= ~lp_mark; 660 src++; 661 } else { /* full-size ref */ 662 ref *const pref = (ref *)src; 663 664 if (!r_has_attr(pref, l_mark)) 665 break; 666 if_debug1('8', " [8]ref 0x%lx \"copied\"\n", (ulong) src); 667 r_clear_attrs(pref, l_mark); 668 src += packed_per_ref; 669 } 670 } else 671 *dpre = *pre; 672 dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre)); 673 for (;;) { 674 if (r_is_packed(src)) { 675 if (r_has_pmark(src)) { 676 if_debug2('8', " [8]packed ref 0x%lx copied to 0x%lx\n", 677 (ulong) src, (ulong) dest); 678 *dest++ = *src & ~lp_mark; 679 } 680 src++; 681 } else { /* full-size ref */ 682 if (r_has_attr((ref *) src, l_mark)) { 683 ref rtemp; 684 685 if_debug2('8', " [8]ref 0x%lx copied to 0x%lx\n", 686 (ulong) src, (ulong) dest); 687 /* We can't just use ref_assign_inline, */ 688 /* because the source and destination */ 689 /* might overlap! */ 690 ref_assign_inline(&rtemp, (ref *) src); 691 r_clear_attrs(&rtemp, l_mark); 692 ref_assign_inline((ref *) dest, &rtemp); 693 dest += packed_per_ref; 694 src += packed_per_ref; 695 } else { /* check for end of block */ 696 src += packed_per_ref; 697 if (src >= end) 698 break; 699 } 700 } 701 } 702 new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref); 703 #ifdef DEBUG 704 /* Check that the relocation came out OK. */ 705 /* NOTE: this check only works within a single chunk. */ 706 if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) { 707 lprintf3("Reloc error for refs 0x%lx: reloc = %lu, stored = %u\n", 708 (ulong) dpre, (ulong) ((byte *) src - (byte *) dest), 709 (uint) r_size((ref *) src - 1)); 710 gs_abort(); 711 } 712 #endif 713 /* Pad to a multiple of sizeof(ref). */ 714 while (new_size & (sizeof(ref) - 1)) 715 *dest++ = pt_tag(pt_integer), 716 new_size += sizeof(ref_packed); 717 /* We want to make the newly freed space into a free block, */ 718 /* but we can only do this if we have enough room. */ 719 if (size - new_size < sizeof(obj_header_t)) { /* Not enough room. Pad to original size. */ 720 while (new_size < size) 721 *dest++ = pt_tag(pt_integer), 722 new_size += sizeof(ref_packed); 723 } else { 724 obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1); 725 726 pfree->o_alone = 0; 727 pfree->o_size = size - new_size - sizeof(obj_header_t); 728 pfree->o_type = &st_bytes; 729 } 730 /* Re-create the final ref. */ 731 r_set_type((ref *) dest, t_integer); 732 dpre->o_size = new_size; 733 } 734