1 /* Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999 Aladdin Enterprises. All rights reserved.
2
3 This software is provided AS-IS with no warranty, either express or
4 implied.
5
6 This software is distributed under license and may not be copied,
7 modified or distributed except as expressly authorized under the terms
8 of the license contained in the file LICENSE in this distribution.
9
10 For more information about licensing, please refer to
11 http://www.ghostscript.com/licensing/. For information on
12 commercial licensing, go to http://www.artifex.com/licensing/ or
13 contact Artifex Software, Inc., 101 Lucas Valley Road #110,
14 San Rafael, CA 94903, U.S.A., +1(415)492-9861.
15 */
16
17 /* $Id: igcref.c,v 1.6 2004/08/04 19:36:12 stefan Exp $ */
18 /* ref garbage collector for Ghostscript */
19 #include "memory_.h"
20 #include "ghost.h"
21 #include "gsexit.h"
22 #include "gsstruct.h" /* for gxalloc.h included by iastate.h */
23 #include "iname.h"
24 #include "iastate.h"
25 #include "idebug.h"
26 #include "igc.h"
27 #include "ipacked.h"
28 #include "store.h" /* for ref_assign_inline */
29
30 /* Define whether to trace every step of relocating ref pointers. */
31 #if 0
32 # define rputc(c) dputc(c)
33 #else
34 # define rputc(c) DO_NOTHING
35 #endif
36
37 /* Forward references */
38 ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);
39 refs_proc_reloc(igc_reloc_refs);
40
41 /*
42 * Define the 'structure' type descriptor for refs.
43 * This is special because it has different shared procs.
44 */
45 private gc_proc_clear_reloc(refs_clear_reloc);
46 private gc_proc_set_reloc(refs_set_reloc);
47 private gc_proc_compact(refs_compact);
48 private const struct_shared_procs_t refs_shared_procs =
49 {refs_clear_reloc, refs_set_reloc, refs_compact};
50 private struct_proc_clear_marks(refs_clear_marks);
51 private struct_proc_reloc_ptrs(refs_do_reloc);
52 const gs_memory_struct_type_t st_refs =
53 {sizeof(ref), "refs", &refs_shared_procs, refs_clear_marks, 0, refs_do_reloc};
54
55 /*
56 * Define the GC procedures for structs that actually contain refs.
57 * These are special because the shared refs_* procedures
58 * are never called. Instead, we unmark the individual refs in clear_marks,
59 * disregard refs_*_reloc (because we will never relocate a ptr_ref_type
60 * pointer pointing into the structure), disregard refs_compact (because
61 * compaction is never required), and remove the marks in reloc_ptrs.
62 * See also the comment about ptr_ref_type in imemory.h.
63 */
CLEAR_MARKS_PROC(ref_struct_clear_marks)64 CLEAR_MARKS_PROC(ref_struct_clear_marks)
65 {
66 ref *pref = (ref *) vptr;
67 ref *end = (ref *) ((char *)vptr + size);
68
69 for (; pref < end; pref++)
70 r_clear_attrs(pref, l_mark);
71 }
ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs)72 ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs)
73 {
74 if (index >= size / sizeof(ref))
75 return 0;
76 pep->ptr = (const ref *)vptr + index;
77 return ptr_ref_type;
78 ENUM_PTRS_END_PROC
79 }
RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs)80 RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs)
81 {
82 vm_spaces spaces = gcst->spaces;
83 const gs_memory_t *cmem = space_system->stable_memory;
84
85 ref *beg = vptr;
86 ref *end = (ref *) ((char *)vptr + size);
87
88 igc_reloc_refs((ref_packed *) beg, (ref_packed *) end, gcst);
89 ref_struct_clear_marks(cmem, vptr, size, pstype);
90 } RELOC_PTRS_END
91
92 /* ------ Unmarking phase ------ */
93
94 /* Unmark a single ref. */
95 void
ptr_ref_unmark(enum_ptr_t * pep,gc_state_t * ignored)96 ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored)
97 {
98 ref_packed *rpp = (ref_packed *)pep->ptr;
99
100 if (r_is_packed(rpp))
101 r_clear_pmark(rpp);
102 else
103 r_clear_attrs((ref *)rpp, l_mark);
104 }
105
106 /* Unmarking routine for ref objects. */
107 private void
refs_clear_marks(const gs_memory_t * cmem,void * vptr,uint size,const gs_memory_struct_type_t * pstype)108 refs_clear_marks(const gs_memory_t *cmem,
109 void /*obj_header_t */ *vptr, uint size,
110 const gs_memory_struct_type_t * pstype)
111 {
112 ref_packed *rp = (ref_packed *) vptr;
113 ref_packed *end = (ref_packed *) ((byte *) vptr + size);
114
115 /* Since the last ref is full-size, we only need to check for */
116 /* the end of the block when we see one of those. */
117 for (;;) {
118 if (r_is_packed(rp)) {
119 #ifdef DEBUG
120 if (gs_debug_c('8')) {
121 dlprintf1(" [8]unmark packed 0x%lx ", (ulong) rp);
122 debug_print_ref(cmem, (const ref *)rp);
123 dputs("\n");
124 }
125 #endif
126 r_clear_pmark(rp);
127 rp++;
128 } else { /* full-size ref */
129 ref *const pref = (ref *)rp;
130
131 #ifdef DEBUG
132 if (gs_debug_c('8')) {
133 dlprintf1(" [8]unmark ref 0x%lx ", (ulong) rp);
134 debug_print_ref(cmem, pref);
135 dputs("\n");
136 }
137 #endif
138 r_clear_attrs(pref, l_mark);
139 rp += packed_per_ref;
140 if (rp >= (ref_packed *) end)
141 break;
142 }
143 }
144 }
145
146 /* ------ Marking phase ------ */
147
148 /* Mark a ref. Return true if new mark. */
149 bool
ptr_ref_mark(enum_ptr_t * pep,gc_state_t * ignored)150 ptr_ref_mark(enum_ptr_t *pep, gc_state_t * ignored)
151 {
152 ref_packed *rpp = (void *)pep->ptr;
153
154 if (r_is_packed(rpp)) {
155 if (r_has_pmark(rpp))
156 return false;
157 r_set_pmark(rpp);
158 } else {
159 ref *const pref = (ref *)rpp;
160
161 if (r_has_attr(pref, l_mark))
162 return false;
163 r_set_attrs(pref, l_mark);
164 }
165 return true;
166 }
167
168 /* ------ Relocation planning phase ------ */
169
170 /*
171 * We store relocation in the size field of refs that don't use it,
172 * so that we don't have to scan all the way to an unmarked object.
173 * We must avoid nulls, which sometimes have useful information
174 * in their size fields, and the types above t_next_index, which are
175 * actually operators in disguise and also use the size field.
176 */
177
178 /* Clear the relocation for a ref object. */
179 private void
refs_clear_reloc(obj_header_t * hdr,uint size)180 refs_clear_reloc(obj_header_t *hdr, uint size)
181 {
182 ref_packed *rp = (ref_packed *) (hdr + 1);
183 ref_packed *end = (ref_packed *) ((byte *) rp + size);
184
185 while (rp < end) {
186 if (r_is_packed(rp))
187 rp++;
188 else {
189 /* Full-size ref. Store the relocation here if possible. */
190 ref *const pref = (ref *)rp;
191
192 if (!ref_type_uses_size_or_null(r_type(pref))) {
193 if_debug1('8', " [8]clearing reloc at 0x%lx\n", (ulong) rp);
194 r_set_size(pref, 0);
195 }
196 rp += packed_per_ref;
197 }
198 }
199 }
200
201 /* Set the relocation for a ref object. */
202 private bool
refs_set_reloc(obj_header_t * hdr,uint reloc,uint size)203 refs_set_reloc(obj_header_t * hdr, uint reloc, uint size)
204 {
205 ref_packed *rp = (ref_packed *) (hdr + 1);
206 ref_packed *end = (ref_packed *) ((byte *) rp + size);
207 uint freed = 0;
208
209 /*
210 * We have to be careful to keep refs aligned properly.
211 * For the moment, we do this by either keeping or discarding
212 * an entire (aligned) block of align_packed_per_ref packed elements
213 * as a unit. We know that align_packed_per_ref <= packed_per_ref,
214 * and we also know that packed refs are always allocated in blocks
215 * of align_packed_per_ref, so this makes things relatively easy.
216 */
217 while (rp < end) {
218 if (r_is_packed(rp)) {
219 #if align_packed_per_ref == 1
220 if (r_has_pmark(rp)) {
221 if_debug1('8',
222 " [8]packed ref 0x%lx is marked\n",
223 (ulong) rp);
224 rp++;
225 } else {
226 #else
227 int i;
228
229 /*
230 * Note: align_packed_per_ref is typically
231 * 2 or 4 for 32-bit processors.
232 */
233 #define all_marked (align_packed_per_ref * lp_mark)
234 # if align_packed_per_ref == 2
235 # if arch_sizeof_int == arch_sizeof_short * 2
236 # undef all_marked
237 # define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark )
238 # define marked (*(int *)rp & all_marked)
239 # else
240 # define marked ((*rp & lp_mark) + (rp[1] & lp_mark))
241 # endif
242 # else
243 # if align_packed_per_ref == 4
244 # define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\
245 (rp[2] & lp_mark) + (rp[3] & lp_mark))
246 # else
247 /*
248 * The value of marked is logically a uint, not an int:
249 * we declare it as int only to avoid a compiler warning
250 * message about using a non-int value in a switch statement.
251 */
252 int marked = *rp & lp_mark;
253
254 for (i = 1; i < align_packed_per_ref; i++)
255 marked += rp[i] & lp_mark;
256 # endif
257 # endif
258 /*
259 * Now marked is lp_mark * the number of marked
260 * packed refs in the aligned block, except for
261 * a couple of special cases above.
262 */
263 switch (marked) {
264 case all_marked:
265 if_debug2('8',
266 " [8]packed refs 0x%lx..0x%lx are marked\n",
267 (ulong) rp,
268 (ulong) (rp + (align_packed_per_ref - 1)));
269 rp += align_packed_per_ref;
270 break;
271 default:
272 /* At least one packed ref in the block */
273 /* is marked: Keep the whole block. */
274 for (i = align_packed_per_ref; i--; rp++) {
275 r_set_pmark(rp);
276 if_debug1('8',
277 " [8]packed ref 0x%lx is marked\n",
278 (ulong) rp);
279 }
280 break;
281 case 0:
282 #endif
283 if_debug2('8', " [8]%d packed ref(s) at 0x%lx are unmarked\n",
284 align_packed_per_ref, (ulong) rp);
285 {
286 uint rel = reloc + freed;
287
288 /* Change this to an integer so we can */
289 /* store the relocation here. */
290 *rp = pt_tag(pt_integer) +
291 min(rel, packed_max_value);
292 }
293 rp += align_packed_per_ref;
294 freed += sizeof(ref_packed) * align_packed_per_ref;
295 }
296 } else { /* full-size ref */
297 uint rel = reloc + freed;
298
299 /* The following assignment is logically */
300 /* unnecessary; we do it only for convenience */
301 /* in debugging. */
302 ref *pref = (ref *) rp;
303
304 if (!r_has_attr(pref, l_mark)) {
305 if_debug1('8', " [8]ref 0x%lx is unmarked\n",
306 (ulong) pref);
307 /* Change this to a mark so we can */
308 /* store the relocation. */
309 r_set_type(pref, t_mark);
310 r_set_size(pref, rel);
311 freed += sizeof(ref);
312 } else {
313 if_debug1('8', " [8]ref 0x%lx is marked\n",
314 (ulong) pref);
315 /* Store the relocation here if possible. */
316 if (!ref_type_uses_size_or_null(r_type(pref))) {
317 if_debug2('8', " [8]storing reloc %u at 0x%lx\n",
318 rel, (ulong) pref);
319 r_set_size(pref, rel);
320 }
321 }
322 rp += packed_per_ref;
323 }
324 }
325 if_debug3('7', " [7]at end of refs 0x%lx, size = %u, freed = %u\n",
326 (ulong) (hdr + 1), size, freed);
327 if (freed == size)
328 return false;
329 #if arch_sizeof_int > arch_sizeof_short
330 /*
331 * If the final relocation can't fit in the r_size field
332 * (which can't happen if the object shares a chunk with
333 * any other objects, so we know reloc = 0 in this case),
334 * we have to keep the entire object unless there are no
335 * references to any ref in it.
336 */
337 if (freed <= max_ushort)
338 return true;
339 /*
340 * We have to mark all surviving refs, but we also must
341 * overwrite any non-surviving refs with something that
342 * doesn't contain any pointers.
343 */
344 rp = (ref_packed *) (hdr + 1);
345 while (rp < end) {
346 if (r_is_packed(rp)) {
347 if (!r_has_pmark(rp))
348 *rp = pt_tag(pt_integer) | lp_mark;
349 ++rp;
350 } else { /* The following assignment is logically */
351 /* unnecessary; we do it only for convenience */
352 /* in debugging. */
353 ref *pref = (ref *) rp;
354
355 if (!r_has_attr(pref, l_mark)) {
356 r_set_type_attrs(pref, t_mark, l_mark);
357 r_set_size(pref, reloc);
358 } else {
359 if (!ref_type_uses_size_or_null(r_type(pref)))
360 r_set_size(pref, reloc);
361 }
362 rp += packed_per_ref;
363 }
364 }
365 /* The last ref has to remain unmarked. */
366 r_clear_attrs((ref *) rp - 1, l_mark);
367 #endif
368 return true;
369 }
370
371 /* ------ Relocation phase ------ */
372
373 /* Relocate all the pointers in a block of refs. */
374 private void
375 refs_do_reloc(void /*obj_header_t */ *vptr, uint size,
376 const gs_memory_struct_type_t * pstype, gc_state_t * gcst)
377 {
378 igc_reloc_refs((ref_packed *) vptr,
379 (ref_packed *) ((char *)vptr + size),
380 gcst);
381 }
382 /* Relocate the contents of a block of refs. */
383 /* If gcst->relocating_untraced is true, we are relocating pointers from an */
384 /* untraced space, so relocate all refs, not just marked ones. */
385 void
386 igc_reloc_refs(ref_packed * from, ref_packed * to, gc_state_t * gcst)
387 {
388 int min_trace = gcst->min_collect;
389 ref_packed *rp = from;
390 bool do_all = gcst->relocating_untraced;
391
392 vm_spaces spaces = gcst->spaces;
393 const gs_memory_t *cmem = space_system->stable_memory;
394
395 while (rp < to) {
396 ref *pref;
397 #ifdef DEBUG
398 const void *before = 0;
399 const void *after = 0;
400 # define DO_RELOC(var, stat)\
401 BEGIN before = (var); stat; after = (var); END
402 # define SET_RELOC(var, expr)\
403 BEGIN before = (var); after = (var) = (expr); END
404 #else
405 # define DO_RELOC(var, stat) stat
406 # define SET_RELOC(var, expr) var = expr
407 #endif
408
409 if (r_is_packed(rp)) {
410 rp++;
411 continue;
412 }
413 /* The following assignment is logically unnecessary; */
414 /* we do it only for convenience in debugging. */
415 pref = (ref *) rp;
416 if_debug3('8', " [8]relocating %s %d ref at 0x%lx",
417 (r_has_attr(pref, l_mark) ? "marked" : "unmarked"),
418 r_btype(pref), (ulong) pref);
419 if ((r_has_attr(pref, l_mark) || do_all) &&
420 r_space(pref) >= min_trace
421 ) {
422 switch (r_type(pref)) {
423 /* Struct cases */
424 case t_file:
425 DO_RELOC(pref->value.pfile, RELOC_VAR(pref->value.pfile));
426 break;
427 case t_device:
428 DO_RELOC(pref->value.pdevice,
429 RELOC_VAR(pref->value.pdevice));
430 break;
431 case t_fontID:
432 case t_struct:
433 case t_astruct:
434 DO_RELOC(pref->value.pstruct,
435 RELOC_VAR(pref->value.pstruct));
436 break;
437 /* Non-trivial non-struct cases */
438 case t_dictionary:
439 rputc('d');
440 SET_RELOC(pref->value.pdict,
441 (dict *)igc_reloc_ref_ptr((ref_packed *)pref->value.pdict, gcst));
442 break;
443 case t_array:
444 {
445 uint size = r_size(pref);
446
447 if (size != 0) { /* value.refs might be NULL */
448
449 /*
450 * If the array is large, we allocated it in its
451 * own object (at least originally -- this might
452 * be a pointer to a subarray.) In this case,
453 * we know it is the only object in its
454 * containing st_refs object, so we know that
455 * the mark containing the relocation appears
456 * just after it.
457 */
458 if (size < max_size_st_refs / sizeof(ref)) {
459 rputc('a');
460 SET_RELOC(pref->value.refs,
461 (ref *) igc_reloc_ref_ptr(
462 (ref_packed *) pref->value.refs, gcst));
463 } else {
464 rputc('A');
465 /*
466 * See the t_shortarray case below for why we
467 * decrement size.
468 */
469 --size;
470 SET_RELOC(pref->value.refs,
471 (ref *) igc_reloc_ref_ptr(
472 (ref_packed *) (pref->value.refs + size),
473 gcst) - size);
474 }
475 }
476 }
477 break;
478 case t_mixedarray:
479 if (r_size(pref) != 0) { /* value.refs might be NULL */
480 rputc('m');
481 SET_RELOC(pref->value.packed,
482 igc_reloc_ref_ptr(pref->value.packed, gcst));
483 }
484 break;
485 case t_shortarray:
486 {
487 uint size = r_size(pref);
488
489 /*
490 * Since we know that igc_reloc_ref_ptr works by
491 * scanning forward, and we know that all the
492 * elements of this array itself are marked, we can
493 * save some scanning time by relocating the pointer
494 * to the end of the array rather than the
495 * beginning.
496 */
497 if (size != 0) { /* value.refs might be NULL */
498 rputc('s');
499 /*
500 * igc_reloc_ref_ptr has to be able to determine
501 * whether the pointer points into a space that
502 * isn't being collected. It does this by
503 * checking whether the referent of the pointer
504 * is marked. For this reason, we have to pass
505 * a pointer to the last real element of the
506 * array, rather than just beyond it.
507 */
508 --size;
509 SET_RELOC(pref->value.packed,
510 igc_reloc_ref_ptr(pref->value.packed + size,
511 gcst) - size);
512 }
513 }
514 break;
515 case t_name:
516 {
517 void *psub = name_ref_sub_table(cmem, pref);
518 void *rsub = RELOC_OBJ(psub); /* gcst implicit */
519
520 SET_RELOC(pref->value.pname,
521 (name *)
522 ((char *)rsub + ((char *)pref->value.pname -
523 (char *)psub)));
524 } break;
525 case t_string:
526 {
527 gs_string str;
528
529 str.data = pref->value.bytes;
530 str.size = r_size(pref);
531
532 DO_RELOC(str.data, RELOC_STRING_VAR(str));
533 pref->value.bytes = str.data;
534 }
535 break;
536 case t_oparray:
537 rputc('o');
538 SET_RELOC(pref->value.const_refs,
539 (const ref *)igc_reloc_ref_ptr((const ref_packed *)pref->value.const_refs, gcst));
540 break;
541 default:
542 goto no_reloc; /* don't print trace message */
543 }
544 if_debug2('8', ", 0x%lx => 0x%lx", (ulong)before, (ulong)after);
545 }
546 no_reloc:
547 if_debug0('8', "\n");
548 rp += packed_per_ref;
549 }
550 }
551
552 /* Relocate a pointer to a ref. */
553 /* See gsmemory.h for why the argument is const and the result is not. */
554 ref_packed *
555 igc_reloc_ref_ptr(const ref_packed * prp, gc_state_t *gcst)
556 {
557 /*
558 * Search forward for relocation. This algorithm is intrinsically very
559 * inefficient; we hope eventually to replace it with a better one.
560 */
561 const ref_packed *rp = prp;
562 uint dec = 0;
563 #ifdef ALIGNMENT_ALIASING_BUG
564 const ref *rpref;
565 # define RP_REF(rp) (rpref = (const ref *)rp, rpref)
566 #else
567 # define RP_REF(rp) ((const ref *)rp)
568 #endif
569 /*
570 * Iff this pointer points into a space that wasn't traced,
571 * the referent won't be marked. In this case, we shouldn't
572 * do any relocation. Check for this first.
573 */
574 if (r_is_packed(rp)) {
575 if (!r_has_pmark(rp))
576 goto ret_rp;
577 } else {
578 if (!r_has_attr(RP_REF(rp), l_mark))
579 goto ret_rp;
580 }
581 for (;;) {
582
583 if (r_is_packed(rp)) {
584 /*
585 * Normally, an unmarked packed ref will be an
586 * integer whose value is the amount of relocation.
587 * However, the relocation value might have been
588 * too large to fit. If this is the case, for
589 * each such unmarked packed ref we pass over,
590 * we have to decrement the final relocation.
591 */
592 rputc((*rp & lp_mark ? '1' : '0'));
593 if (!(*rp & lp_mark)) {
594 if (*rp != pt_tag(pt_integer) + packed_max_value) {
595 /* This is a stored relocation value. */
596 rputc('\n');
597 rp = print_reloc(prp, "ref",
598 (const ref_packed *)
599 ((const char *)prp -
600 (*rp & packed_value_mask) + dec));
601 break;
602 }
603 /*
604 * We know this is the first of an aligned block
605 * of packed refs. Skip over the entire block,
606 * decrementing the final relocation.
607 */
608 dec += sizeof(ref_packed) * align_packed_per_ref;
609 rp += align_packed_per_ref;
610 } else
611 rp++;
612 continue;
613 }
614 if (!ref_type_uses_size_or_null(r_type(RP_REF(rp)))) {
615 /* reloc is in r_size */
616 rputc('\n');
617 rp = print_reloc(prp, "ref",
618 (const ref_packed *)
619 (r_size(RP_REF(rp)) == 0 ? prp :
620 (const ref_packed *)((const char *)prp -
621 r_size(RP_REF(rp)) + dec)));
622 break;
623 }
624 rputc('u');
625 rp += packed_per_ref;
626 }
627 ret_rp:
628 /* Use a severely deprecated pun to remove the const property. */
629 {
630 union { const ref_packed *r; ref_packed *w; } u;
631
632 u.r = rp;
633 return u.w;
634 }
635 }
636
637 /* ------ Compaction phase ------ */
638
639 /* Compact a ref object. */
640 /* Remove the marks at the same time. */
641 private void
642 refs_compact(const gs_memory_t *mem, obj_header_t * pre, obj_header_t * dpre, uint size)
643 {
644 ref_packed *dest;
645 ref_packed *src;
646 ref_packed *end;
647 uint new_size;
648
649 src = (ref_packed *) (pre + 1);
650 end = (ref_packed *) ((byte *) src + size);
651 /*
652 * We know that a block of refs always ends with an unmarked
653 * full-size ref, so we only need to check for reaching the end
654 * of the block when we see one of those.
655 */
656 if (dpre == pre) /* Loop while we don't need to copy. */
657 for (;;) {
658 if (r_is_packed(src)) {
659 if (!r_has_pmark(src))
660 break;
661 if_debug1('8', " [8]packed ref 0x%lx \"copied\"\n",
662 (ulong) src);
663 *src &= ~lp_mark;
664 src++;
665 } else { /* full-size ref */
666 ref *const pref = (ref *)src;
667
668 if (!r_has_attr(pref, l_mark))
669 break;
670 if_debug1('8', " [8]ref 0x%lx \"copied\"\n", (ulong) src);
671 r_clear_attrs(pref, l_mark);
672 src += packed_per_ref;
673 }
674 } else
675 *dpre = *pre;
676 dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre));
677 for (;;) {
678 if (r_is_packed(src)) {
679 if (r_has_pmark(src)) {
680 if_debug2('8', " [8]packed ref 0x%lx copied to 0x%lx\n",
681 (ulong) src, (ulong) dest);
682 *dest++ = *src & ~lp_mark;
683 }
684 src++;
685 } else { /* full-size ref */
686 if (r_has_attr((ref *) src, l_mark)) {
687 ref rtemp;
688
689 if_debug2('8', " [8]ref 0x%lx copied to 0x%lx\n",
690 (ulong) src, (ulong) dest);
691 /* We can't just use ref_assign_inline, */
692 /* because the source and destination */
693 /* might overlap! */
694 ref_assign_inline(&rtemp, (ref *) src);
695 r_clear_attrs(&rtemp, l_mark);
696 ref_assign_inline((ref *) dest, &rtemp);
697 dest += packed_per_ref;
698 src += packed_per_ref;
699 } else { /* check for end of block */
700 src += packed_per_ref;
701 if (src >= end)
702 break;
703 }
704 }
705 }
706 new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref);
707 #ifdef DEBUG
708 /* Check that the relocation came out OK. */
709 /* NOTE: this check only works within a single chunk. */
710 if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) {
711 lprintf3("Reloc error for refs 0x%lx: reloc = %lu, stored = %u\n",
712 (ulong) dpre, (ulong) ((byte *) src - (byte *) dest),
713 (uint) r_size((ref *) src - 1));
714 gs_abort(mem);
715 }
716 #endif
717 /* Pad to a multiple of sizeof(ref). */
718 while (new_size & (sizeof(ref) - 1))
719 *dest++ = pt_tag(pt_integer),
720 new_size += sizeof(ref_packed);
721 /* We want to make the newly freed space into a free block, */
722 /* but we can only do this if we have enough room. */
723 if (size - new_size < sizeof(obj_header_t)) { /* Not enough room. Pad to original size. */
724 while (new_size < size)
725 *dest++ = pt_tag(pt_integer),
726 new_size += sizeof(ref_packed);
727 } else {
728 obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1);
729
730 pfree->o_alone = 0;
731 pfree->o_size = size - new_size - sizeof(obj_header_t);
732 pfree->o_type = &st_bytes;
733 }
734 /* Re-create the final ref. */
735 r_set_type((ref *) dest, t_integer);
736 dpre->o_size = new_size;
737 }
738