6 #include "gc-internal.h"
7 #include "genesis/closure.h"
8 #include "genesis/cons.h"
9 #include "genesis/constants.h"
10 #include "genesis/gc-tables.h"
11 #include "genesis/layout.h"
12 #include "genesis/package.h"
13 #include "genesis/vector.h"
14 #include "pseudo-atomic.h" // for get_alloc_pointer()
19 #ifndef LISP_FEATURE_WIN32
20 #define HAVE_GETRUSAGE 1
23 #include <sys/resource.h> // for getrusage()
26 int heap_trace_verbose
= 0;
28 extern generation_index_t gencgc_oldest_gen_to_gc
;
30 /// Each "layer" is a set of objects reachable by tracing one reverse pointer
31 /// from any object in the previously built layer.
32 /// An object will not appear in more than one layer.
34 struct __attribute((packed
)) node
{
35 lispobj object
; // With lowtag
36 // Which 0-relative word in this object points to any object
37 // in the next layer (closer to the intended target object).
44 /// memory in which to build the object lists comprising the
45 /// values in 'inverted_heap', the hashtable which maps each object
46 /// to a list of objects pointing to it.
48 char* base
, *free
, *end
;
57 // A hashtable mapping each object to a list of objects pointing to it
58 struct hopscotch_table inverted_heap
;
59 struct scratchpad scratchpad
;
62 static int gen_of(lispobj obj
) {
63 #ifdef LISP_FEATURE_IMMOBILE_SPACE
64 if (immobile_space_p(obj
))
65 return immobile_obj_gen_bits(native_pointer(obj
));
67 int page
= find_page_index((void*)obj
);
68 if (page
>= 0) return page_table
[page
].gen
;
72 const char* classify_obj(lispobj ptr
)
74 extern lispobj
* instance_classoid_name(lispobj
*);
75 extern char *widetag_names
[];
77 lispobj
* name
; // a Lisp string
78 switch(lowtag_of(ptr
)) {
79 case INSTANCE_POINTER_LOWTAG
:
80 name
= instance_classoid_name(native_pointer(ptr
));
81 if (widetag_of(*name
) == SIMPLE_BASE_STRING_WIDETAG
)
82 return (char*)(name
+ 2);
83 case LIST_POINTER_LOWTAG
:
85 case FUN_POINTER_LOWTAG
:
86 case OTHER_POINTER_LOWTAG
:
87 return widetag_names
[widetag_of(*native_pointer(ptr
))>>2];
90 sprintf(buf
, "#x%x", widetag_of(*native_pointer(ptr
)));
94 static void add_to_layer(lispobj
* obj
, int wordindex
,
95 struct layer
* layer
, int* capacity
)
97 // Resurrect the containing object's lowtag
98 lispobj ptr
= compute_lispobj(obj
);
99 int staticp
= ptr
<= STATIC_SPACE_END
;
100 int gen
= staticp
? -1 : gen_of(ptr
);
101 if (heap_trace_verbose
>2)
102 // Show the containing object, its type and generation, and pointee
104 " add_to_layer(%p,%d) = %s,g%c -> %p\n",
105 (void*)ptr
, wordindex
, classify_obj(ptr
), (staticp
? 'S' : '0'+gen
),
106 (void*)obj
[wordindex
]);
107 int count
= layer
->count
;
108 if (count
>= *capacity
) {
109 *capacity
= *capacity
? 2 * *capacity
: 4;
110 layer
->nodes
= realloc(layer
->nodes
, *capacity
* sizeof (struct node
));
112 layer
->nodes
[count
].object
= ptr
;
113 layer
->nodes
[count
].wordindex
= wordindex
;
117 /// If 'obj' is a simple-fun, return its code component,
118 /// otherwise return obj directly.
119 static lispobj
canonical_obj(lispobj obj
)
121 if (lowtag_of(obj
) == FUN_POINTER_LOWTAG
&&
122 widetag_of(*native_pointer(obj
)) == SIMPLE_FUN_WIDETAG
)
123 return make_lispobj(fun_code_header(obj
-FUN_POINTER_LOWTAG
),
124 OTHER_POINTER_LOWTAG
);
128 /* Return the word index of the pointer in 'source' which references 'target'.
129 * Return -1 on failure. (This is an error if it happens)
131 #define check_ptr(index,ptr) if(canonical_obj(ptr)==target) return index;
132 static int find_ref(lispobj
* source
, lispobj target
)
134 lispobj layout
, bitmap
;
135 int scan_limit
, i
, j
;
137 lispobj header
= *source
;
138 if (is_cons_half(header
)) {
139 check_ptr(0, header
);
140 check_ptr(1, source
[1]);
143 int widetag
= widetag_of(header
);
144 scan_limit
= sizetab
[widetag
](source
);
146 case INSTANCE_WIDETAG
:
147 #ifdef LISP_FEATURE_COMPACT_INSTANCE_HEADER
148 case FUNCALLABLE_INSTANCE_WIDETAG
:
150 // mixed boxed/unboxed objects
151 // Unlike in scav_instance where the slot loop is unswitched for
152 // speed into three cases (no raw slots, fixnum bitmap, bignum bitmap),
153 // here we just go for clarity by abstracting out logbitp.
154 layout
= instance_layout(source
);
155 check_ptr(0, layout
);
156 bitmap
= layout
? LAYOUT(layout
)->bitmap
: make_fixnum(-1);
157 for(i
=1; i
<scan_limit
; ++i
)
158 if (layout_bitmap_logbitp(i
-1, bitmap
)) check_ptr(i
, source
[i
]);
160 case CLOSURE_WIDETAG
:
161 check_ptr(1, ((struct closure
*)source
)->fun
- FUN_RAW_ADDR_OFFSET
);
163 case CODE_HEADER_WIDETAG
:
164 for_each_simple_fun(i
, function_ptr
, (struct code
*)source
, 0, {
165 int wordindex
= &function_ptr
->name
- source
;
166 for (j
=0; j
<4; ++j
) check_ptr(wordindex
+j
, source
[wordindex
+j
]);
168 scan_limit
= code_header_words(header
);
170 #ifdef LISP_FEATURE_IMMOBILE_CODE
172 check_ptr(3, fdefn_raw_referent((struct fdefn
*)source
));
177 for(i
=1; i
<scan_limit
; ++i
) check_ptr(i
, source
[i
]);
182 enum ref_kind
{ HEAP
, CONTROL_STACK
, BINDING_STACK
, TLS
};
183 char *ref_kind_name
[4] = {"heap","C stack","bindings","TLS"};
185 /// This unfortunately entails a heap scan,
186 /// but it's quite fast if the symbol is found in immobile space.
187 static lispobj
* find_sym_by_tls_index(unsigned int tls_index
)
191 #ifdef LISP_FEATURE_IMMOBILE_SPACE
192 where
= (lispobj
*)IMMOBILE_SPACE_START
;
193 end
= (lispobj
*)SYMBOL(IMMOBILE_FIXEDOBJ_FREE_POINTER
)->value
;
196 while (where
< end
) {
197 lispobj header
= *where
;
198 int widetag
= widetag_of(header
);
199 if (widetag
== SYMBOL_WIDETAG
&&
200 tls_index_of(((struct symbol
*)where
)) == tls_index
)
202 where
+= OBJECT_SIZE(header
, where
);
204 if (where
>= (lispobj
*)DYNAMIC_SPACE_START
)
206 where
= (lispobj
*)DYNAMIC_SPACE_START
;
207 end
= (lispobj
*)get_alloc_pointer();
212 static inline int interestingp(lispobj ptr
, struct hopscotch_table
* targets
)
214 return is_lisp_pointer(ptr
) && hopscotch_containsp(targets
, ptr
);
217 /* Try to find the call frame that contains 'addr', which is the address
218 * in which a conservative root was seen.
219 * Return the program counter associated with that frame. */
220 static char* deduce_thread_pc(struct thread
* th
, void** addr
)
222 uword_t
* fp
= __builtin_frame_address(0);
225 if (th
!= arch_os_get_current_thread()) {
226 int i
= fixnum_value(SymbolValue(FREE_INTERRUPT_CONTEXT_INDEX
,th
));
227 os_context_t
*c
= th
->interrupt_contexts
[i
-1];
228 #ifdef LISP_FEATURE_64_BIT
229 fp
= (uword_t
*)*os_context_register_addr(c
,reg_RBP
);
231 fp
= (uword_t
*)*os_context_register_addr(c
,reg_EBP
);
235 if ((uword_t
*)addr
< fp
)
237 uword_t prev_fp
= fp
[0];
238 if (prev_fp
== 0 || (uword_t
*)prev_fp
< fp
|| (lispobj
*)prev_fp
>= th
->control_stack_end
)
240 return_pc
= (void*)fp
[1];
241 fp
= (uword_t
*)prev_fp
;
245 static struct { void* pointer
; boolean found
; } pin_seek_state
;
246 static void compare_pointer(void *addr
) {
247 if (addr
== pin_seek_state
.pointer
)
248 pin_seek_state
.found
= 1;
251 /* Figure out which thread's control stack contains 'pointer'
252 * and the PC within the active function in the referencing frame */
253 static struct thread
* deduce_thread(void (*context_scanner
)(),
254 uword_t pointer
, char** pc
)
259 pin_seek_state
.found
= 0;
260 for_each_thread(th
) {
261 void **esp
=(void **)-1;
263 if (th
== arch_os_get_current_thread())
264 esp
= (void **)((void *)&pointer
);
267 free
= fixnum_value(SymbolValue(FREE_INTERRUPT_CONTEXT_INDEX
,th
));
268 for(i
=free
-1;i
>=0;i
--) {
269 os_context_t
*c
=th
->interrupt_contexts
[i
];
270 esp1
= (void **) *os_context_register_addr(c
,reg_SP
);
271 if (esp1
>=(void **)th
->control_stack_start
&& esp1
<(void **)th
->control_stack_end
) {
272 if(esp1
<esp
) esp
=esp1
;
273 pin_seek_state
.pointer
= (void*)pointer
;
274 context_scanner(compare_pointer
, c
);
275 pin_seek_state
.pointer
= 0;
276 if (pin_seek_state
.found
) return th
;
280 if (!esp
|| esp
== (void*) -1)
281 lose("deduce_thread: no SP known for thread %x (OS %x)", th
, th
->os_thread
);
283 for (where
= ((void **)th
->control_stack_end
)-1; where
>= esp
; where
--)
284 if ((uword_t
)*where
== pointer
) {
285 *pc
= deduce_thread_pc(th
, where
);
292 static lispobj
examine_stacks(struct hopscotch_table
* targets
,
293 void (*context_scanner
)(),
294 int n_pins
, lispobj
* pins
,
295 enum ref_kind
*root_kind
,
296 struct thread
** root_thread
,
298 unsigned int *tls_index
)
300 boolean world_stopped
= context_scanner
!= 0;
303 for_each_thread(th
) {
304 lispobj
*where
, *end
;
305 #ifdef LISP_FEATURE_SB_THREAD
306 // Examine thread-local storage
308 where
= (lispobj
*)(th
+1);
309 end
= (lispobj
*)((char*)th
+ SymbolValue(FREE_TLS_INDEX
,0));
310 for( ; where
< end
; ++where
)
311 if (interestingp(*where
, targets
)) {
313 *tls_index
= (char*)where
- (char*)th
;
317 // Examine the binding stack
318 *root_kind
= BINDING_STACK
;
319 where
= (lispobj
*)th
->binding_stack_start
;
320 end
= (lispobj
*)get_binding_stack_pointer(th
);
321 for( ; where
< end
; where
+= 2)
322 if (interestingp(*where
, targets
)) {
324 *tls_index
= where
[1];
328 // Look in the control stacks
329 *root_kind
= CONTROL_STACK
;
332 for (i
=n_pins
-1; i
>=0; --i
)
333 // Bypass interestingp() to avoid one test - pins are known pointers.
334 if (hopscotch_containsp(targets
, pin
= pins
[i
])) {
336 *root_thread
= deduce_thread(context_scanner
, pin
, thread_pc
);
340 // Scan just the current thread's stack
341 // (We don't know where the other stack pointers are)
342 th
= arch_os_get_current_thread();
343 void **esp
= __builtin_frame_address(0);
345 for (where
= ((void **)th
->control_stack_end
)-1; where
>= esp
; --where
)
346 if (*where
== (void*)pin
) {
348 *thread_pc
= deduce_thread_pc(th
, where
);
358 void free_graph(struct layer
* layer
)
362 struct layer
* next
= layer
->next
;
368 struct node
* find_node(struct layer
* layer
, lispobj ptr
)
371 for(i
=layer
->count
-1; i
>=0; --i
)
372 if (layer
->nodes
[i
].object
== ptr
)
373 return &layer
->nodes
[i
];
377 /// "Compressed" pointers are a huge win - they halve the amount
378 /// of space required to invert the heap.
379 static inline uint32_t encode_pointer(lispobj pointer
)
382 if (pointer
>= DYNAMIC_SPACE_START
) {
383 // A dynamic space pointer is stored as a count in doublewords
384 // from the heap base address. A 32GB range is representable.
385 encoding
= (pointer
- DYNAMIC_SPACE_START
) / (2*N_WORD_BYTES
);
386 gc_assert(encoding
<= 0x7FFFFFFF);
387 return (encoding
<<1) | 1; // Low bit signifies compressed ptr.
389 // Non-dynamic-space pointers are stored as-is.
390 gc_assert(pointer
<= 0xFFFFFFFF && !(pointer
& 1));
391 return pointer
; // Low bit 0 signifies literal pointer
395 static inline lispobj
decode_pointer(uint32_t encoding
)
397 if (encoding
& 1) // Compressed ptr
398 return (encoding
>>1)*(2*N_WORD_BYTES
) + DYNAMIC_SPACE_START
;
400 return encoding
; // Literal pointer
403 struct simple_fun
* simple_fun_from_pc(char* pc
)
405 struct code
* code
= (struct code
*)component_ptr_from_pc((lispobj
*)pc
);
407 struct simple_fun
* prev_fun
= (struct simple_fun
*)
408 ((char*)code
+ (code_header_words(code
->header
)<<WORD_SHIFT
)
409 + FIRST_SIMPLE_FUN_OFFSET(code
));
410 for_each_simple_fun(i
, fun
, code
, 1, {
411 if (pc
< (char*)fun
) break;
417 static void maybe_show_object_name(lispobj obj
, FILE* stream
)
419 extern void safely_show_lstring(lispobj
* string
, int quotes
, FILE *s
);
420 lispobj package
, package_name
;
421 if (lowtag_of(obj
)==OTHER_POINTER_LOWTAG
)
422 switch(widetag_of(*native_pointer(obj
))) {
424 package
= SYMBOL(obj
)->package
;
425 package_name
= ((struct package
*)native_pointer(package
))->_name
;
427 safely_show_lstring(native_pointer(package_name
), 0, stream
);
429 safely_show_lstring(native_pointer(SYMBOL(obj
)->name
), 0, stream
);
434 static boolean
root_p(lispobj ptr
, int criterion
)
436 if (ptr
<= STATIC_SPACE_END
) return 1; // always a root
437 // 0 to 2 are in order of weakest to strongest condition for stopping,
438 // i.e. criterion 0 implies that that largest number of objects
439 // are considered roots.
441 && (gen_of(ptr
) > (criterion
? HIGHEST_NORMAL_GENERATION
442 : gencgc_oldest_gen_to_gc
));
445 /// Find any shortest path to 'object' starting at a tenured object or a thread stack.
446 static void trace1(lispobj object
,
447 struct hopscotch_table
* targets
,
448 struct hopscotch_table
* visited
,
449 struct hopscotch_table
* inverted_heap
,
450 struct scratchpad
* scratchpad
,
451 int n_pins
, lispobj
* pins
, void (*context_scanner
)(),
454 struct node
* anchor
= 0;
456 enum ref_kind root_kind
;
457 struct thread
* root_thread
;
459 unsigned int tls_index
;
463 struct layer
* top_layer
= 0;
464 int layer_capacity
= 0;
466 hopscotch_put(targets
, object
, 1);
467 while ((thread_ref
= examine_stacks(targets
, context_scanner
, n_pins
, pins
,
468 &root_kind
, &root_thread
, &thread_pc
,
470 // TODO: preallocate layers to avoid possibility of malloc deadlock
471 struct layer
* layer
= (struct layer
*)malloc(sizeof (struct layer
));
474 layer
->next
= top_layer
;
477 if (heap_trace_verbose
)
478 printf("Next layer: Looking for %d object(s)\n", targets
->count
);
479 for_each_hopscotch_key(i
, target
, (*targets
)) {
480 uint32_t list
= hopscotch_get(inverted_heap
, target
, 0);
481 if (heap_trace_verbose
>1) {
482 uint32_t list1
= list
;
483 fprintf(stderr
, "target=%p srcs=", (void*)target
);
485 uint32_t* cell
= (uint32_t*)(scratchpad
->base
+ list1
);
486 lispobj
* ptr
= (lispobj
*)decode_pointer(cell
[0]);
487 if (hopscotch_containsp(visited
, (lispobj
)ptr
))
488 fprintf(stderr
, "%p ", ptr
);
491 int nwords
= OBJECT_SIZE(word
, ptr
);
492 fprintf(stderr
, "%p+%d ", ptr
, nwords
);
498 while (list
&& !anchor
) {
499 uint32_t* cell
= (uint32_t*)(scratchpad
->base
+ list
);
500 lispobj ptr
= decode_pointer(cell
[0]);
502 if (hopscotch_containsp(visited
, ptr
))
504 int wordindex
= find_ref((lispobj
*)ptr
, target
);
505 if (wordindex
== -1) {
506 fprintf(stderr
, "Strange: no ref from %p to %p\n",
507 (void*)ptr
, (void*)target
);
510 hopscotch_insert(visited
, ptr
, 1);
511 add_to_layer((lispobj
*)ptr
, wordindex
,
512 top_layer
, &layer_capacity
);
513 // Stop if the object at 'ptr' is tenured.
514 if (root_p(ptr
, criterion
)) {
515 fprintf(stderr
, "Stopping at %p: tenured\n", (void*)ptr
);
516 anchor
= &top_layer
->nodes
[top_layer
->count
-1];
520 if (!top_layer
->count
) {
521 fprintf(stderr
, "Failure tracing from %p. Current targets:\n", (void*)object
);
522 for_each_hopscotch_key(i
, target
, (*targets
))
523 fprintf(stderr
, "%p ", (void*)target
);
525 free_graph(top_layer
);
528 if (heap_trace_verbose
>1)
529 printf("Found %d object(s)\n", top_layer
->count
);
530 // The top layer's last object if static or tenured
531 // stops the scan. (And no more objects go in the top layer)
534 // Transfer the top layer objects into 'targets'
535 hopscotch_reset(targets
);
536 struct node
* nodes
= top_layer
->nodes
;
537 for (i
=top_layer
->count
-1 ; i
>=0 ; --i
) {
538 lispobj ptr
= nodes
[i
].object
;
539 hopscotch_put(targets
, ptr
, 1);
545 struct vector
* lisp_thread_name(os_thread_t os_thread
);
546 extern void show_lstring(struct vector
*, int, FILE*);
547 struct vector
* thread_name
;
550 "%s pointed to by %s: %p\n",
551 top_layer
? "Indirectly" : "Directly",
552 ref_kind_name
[root_kind
],
556 // The thread indirectly points to a target.
557 // The root object is whatever the thread pointed to,
558 // which must be an object in the top layer. Find that object.
559 anchor
= find_node(top_layer
, thread_ref
);
564 fprintf(file
, "(unknown-thread)");
565 else if ((thread_name
= lisp_thread_name(root_thread
->os_thread
)) != 0)
566 show_lstring(thread_name
, 1, file
);
568 fprintf(file
, "thread=%p", root_thread
);
569 fprintf(file
, ":%s:", ref_kind_name
[root_kind
]);
570 if (root_kind
==BINDING_STACK
|| root_kind
==TLS
) {
571 lispobj
* symbol
= find_sym_by_tls_index(tls_index
);
573 show_lstring(symbol_name(symbol
), 0, file
);
575 fprintf(file
, "%x", tls_index
);
577 struct simple_fun
* fun
= simple_fun_from_pc(thread_pc
);
579 fprintf(file
, "fun=%p", (void*)make_lispobj(fun
, FUN_POINTER_LOWTAG
));
580 if (is_lisp_pointer(fun
->name
) &&
581 widetag_of(*native_pointer(fun
->name
)) == SYMBOL_WIDETAG
) {
583 show_lstring(VECTOR(SYMBOL(fun
->name
)->name
), 0, file
);
585 } else if (thread_pc
)
586 fprintf(file
, "pc=%p", thread_pc
);
588 fprintf(file
, "}->");
589 } else { // Stopped at (pseudo)static object
590 fprintf(stderr
, "Anchor object is @ %p. word[%d]\n",
591 native_pointer(anchor
->object
), anchor
->wordindex
);
596 struct node next
= *anchor
;
597 lispobj ptr
= next
.object
;
598 if (ptr
<= STATIC_SPACE_END
)
599 fprintf(file
, "(static,");
601 fprintf(file
, "(g%d,", gen_of(ptr
));
602 fputs(classify_obj(ptr
), file
);
603 maybe_show_object_name(ptr
, file
);
604 fprintf(file
, ")%p[%d]->", (void*)ptr
, next
.wordindex
);
605 target
= native_pointer(ptr
)[next
.wordindex
];
606 // Special-case a few combinations of <type,wordindex>
607 switch (next
.wordindex
) {
609 if (lowtag_of(ptr
) == INSTANCE_POINTER_LOWTAG
||
610 lowtag_of(ptr
) == FUN_POINTER_LOWTAG
)
611 target
= instance_layout(native_pointer(ptr
));
614 if (lowtag_of(ptr
) == FUN_POINTER_LOWTAG
&&
615 widetag_of(*native_pointer(ptr
)) == CLOSURE_WIDETAG
)
616 target
-= FUN_RAW_ADDR_OFFSET
;
618 #ifdef LISP_FEATURE_IMMOBILE_CODE
620 if (lowtag_of(ptr
) == OTHER_POINTER_LOWTAG
&&
621 widetag_of(FDEFN(ptr
)->header
) == FDEFN_WIDETAG
)
622 target
= fdefn_raw_referent((struct fdefn
*)native_pointer(ptr
));
626 target
= canonical_obj(target
);
627 struct layer
* next_layer
= top_layer
->next
;
628 free(top_layer
->nodes
);
630 top_layer
= next_layer
;
632 anchor
= find_node(top_layer
, target
);
635 gc_assert(object
== target
);
638 fprintf(file
, "%p.\n", (void*)target
);
641 static void record_ptr(lispobj
* source
, lispobj target
,
642 struct scan_state
* ss
)
644 // Add 'source' to the list of objects keyed by 'target' in the inverted heap.
645 // Note that 'source' has no lowtag, and 'target' does.
646 // Pointer compression occurs here as well: the linked list of source objects
647 // is built using offsets into the scratchpad rather than absolute addresses.
648 target
= canonical_obj(target
);
649 uint32_t* new_cell
= (uint32_t*)ss
->scratchpad
.free
;
650 uint32_t* next
= new_cell
+ 2;
651 gc_assert((char*)next
<= ss
->scratchpad
.end
);
652 ss
->scratchpad
.free
= (char*)next
;
653 new_cell
[0] = encode_pointer((lispobj
)source
);
654 new_cell
[1] = hopscotch_get(&ss
->inverted_heap
, target
, 0);
655 hopscotch_put(&ss
->inverted_heap
, target
,
656 (sword_t
)((char*)new_cell
- ss
->scratchpad
.base
));
659 #ifdef LISP_FEATURE_IMMOBILE_SPACE
660 #define relevant_ptr_p(x) find_page_index(x)>=0||find_immobile_page_index(x)>=0
662 #define relevant_ptr_p(x) find_page_index(x)>=0
665 #define check_ptr(ptr) { \
667 if (!is_lisp_pointer(ptr)) ++n_immediates; \
668 else if (relevant_ptr_p((void*)ptr)) { \
670 if (record_ptrs) record_ptr(where,ptr,ss); \
673 static uword_t
build_refs(lispobj
* where
, lispobj
* end
,
674 struct scan_state
* ss
)
676 lispobj layout
, bitmap
, fun
;
677 sword_t nwords
, scan_limit
, i
, j
;
678 uword_t n_objects
= 0, n_scanned_words
= 0,
679 n_immediates
= 0, n_pointers
= 0;
681 boolean record_ptrs
= ss
->record_ptrs
;
682 for ( ; where
< end
; where
+= nwords
) {
684 lispobj header
= *where
;
685 if (is_cons_half(header
)) {
691 int widetag
= widetag_of(header
);
692 nwords
= scan_limit
= sizetab
[widetag
](where
);
694 case INSTANCE_WIDETAG
:
695 #ifdef LISP_FEATURE_COMPACT_INSTANCE_HEADER
696 case FUNCALLABLE_INSTANCE_WIDETAG
:
698 // mixed boxed/unboxed objects
699 layout
= instance_layout(where
);
701 // Partially initialized instance can't have nonzero words yet
702 bitmap
= layout
? LAYOUT(layout
)->bitmap
: make_fixnum(-1);
703 // If no raw slots, just scan without use of the bitmap.
704 if (bitmap
== make_fixnum(-1)) break;
705 for(i
=1; i
<scan_limit
; ++i
)
706 if (layout_bitmap_logbitp(i
-1, bitmap
)) check_ptr(where
[i
]);
708 case CLOSURE_WIDETAG
:
709 fun
= ((struct closure
*)where
)->fun
- FUN_RAW_ADDR_OFFSET
;
712 case CODE_HEADER_WIDETAG
:
713 for_each_simple_fun(i
, function_ptr
, (struct code
*)where
, 0, {
714 int wordindex
= &function_ptr
->name
- where
;
715 for (j
=0; j
<4; ++j
) check_ptr(where
[wordindex
+j
]);
717 scan_limit
= code_header_words(header
);
719 #ifdef LISP_FEATURE_IMMOBILE_CODE
721 check_ptr(fdefn_raw_referent((struct fdefn
*)where
));
726 if (!(other_immediate_lowtag_p(widetag
) && lowtag_for_widetag
[widetag
>>2]))
727 lose("Unknown widetag %x\n", widetag
);
728 // Skip irrelevant objects.
729 if (unboxed_obj_widetag_p(widetag
) ||
730 (widetag
== WEAK_POINTER_WIDETAG
) || /* do not follow! */
731 // These numeric types contain pointers, but are uninteresting.
732 (widetag
== COMPLEX_WIDETAG
) ||
733 (widetag
== RATIO_WIDETAG
))
736 for(i
=1; i
<scan_limit
; ++i
) check_ptr(where
[i
]);
738 if (!record_ptrs
) { // just count them
739 ss
->n_objects
+= n_objects
;
740 ss
->n_scanned_words
+= n_scanned_words
;
741 ss
->n_immediates
+= n_immediates
;
742 ss
->n_pointers
+= n_pointers
;
748 static void scan_spaces(struct scan_state
* ss
)
750 build_refs((lispobj
*)STATIC_SPACE_START
,
751 (lispobj
*)SYMBOL(STATIC_SPACE_FREE_POINTER
)->value
,
753 #ifdef LISP_FEATURE_IMMOBILE_SPACE
754 build_refs((lispobj
*)IMMOBILE_SPACE_START
,
755 (lispobj
*)SYMBOL(IMMOBILE_FIXEDOBJ_FREE_POINTER
)->value
,
757 build_refs((lispobj
*)IMMOBILE_VARYOBJ_SUBSPACE_START
,
758 (lispobj
*)SYMBOL(IMMOBILE_SPACE_FREE_POINTER
)->value
,
761 walk_generation((uword_t(*)(lispobj
*,lispobj
*,uword_t
))build_refs
,
765 #define HASH_FUNCTION HOPSCOTCH_HASH_FUN_MIX
767 static void compute_heap_inverse(struct hopscotch_table
* inverted_heap
,
768 struct scratchpad
* scratchpad
)
770 struct scan_state ss
;
771 memset(&ss
, 0, sizeof ss
);
772 fprintf(stderr
, "Pass 1: Counting heap objects... ");
774 fprintf(stderr
, "%ld objs, %ld ptrs, %ld immediates\n",
775 ss
.n_objects
, ss
.n_pointers
,
776 ss
.n_scanned_words
- ss
.n_pointers
);
777 // Guess at the initial size of ~ .5 million objects.
778 int size
= 1<<19; // flsl(tot_n_objects); this would work if you have it
779 while (ss
.n_objects
> size
) size
<<= 1;
780 fprintf(stderr
, "Pass 2: Inverting heap. Initial size=%d objects\n", size
);
781 hopscotch_create(&ss
.inverted_heap
, HASH_FUNCTION
,
782 4, // XXX: half the word size if 64-bit
783 size
/* initial size */, 0 /* default hop range */);
784 // Add one pointer due to inability to use the first
785 // two words of the scratchpad.
786 uword_t scratchpad_min_size
= (1 + ss
.n_pointers
) * 2 * sizeof (uint32_t);
787 int pagesize
= getpagesize();
788 uword_t scratchpad_size
= CEILING(scratchpad_min_size
, pagesize
);
789 ss
.scratchpad
.base
= os_allocate(scratchpad_size
);
790 gc_assert(ss
.scratchpad
.base
);
791 ss
.scratchpad
.free
= ss
.scratchpad
.base
+ 2 * sizeof(uint32_t);
792 ss
.scratchpad
.end
= ss
.scratchpad
.base
+ scratchpad_size
;
793 fprintf(stderr
, "Scratchpad: %lu bytes\n", (long unsigned)scratchpad_size
);
795 struct rusage before
, after
;
796 getrusage(RUSAGE_SELF
, &before
);
800 *inverted_heap
= ss
.inverted_heap
;
801 *scratchpad
= ss
.scratchpad
;
803 getrusage(RUSAGE_SELF
, &after
);
804 // We're done building the necessary structure. Show some memory stats.
805 #define timediff(b,a,field) \
806 ((a.field.tv_sec-b.field.tv_sec)*1000000+(a.field.tv_usec-b.field.tv_usec))
808 "Inverted heap: ct=%d, cap=%d, LF=%f ET=%ld+%ld sys+usr\n",
809 inverted_heap
->count
,
810 1+hopscotch_max_key_index(*inverted_heap
),
811 100*(float)inverted_heap
->count
/ (1+hopscotch_max_key_index(*inverted_heap
)),
812 timediff(before
, after
, ru_stime
),
813 timediff(before
, after
, ru_utime
));
817 /* Find any shortest path from a thread or tenured object
818 * to each of the specified objects.
820 static void trace_paths(void (*context_scanner
)(),
821 lispobj weak_pointers
, int n_pins
, lispobj
* pins
,
825 struct hopscotch_table inverted_heap
;
826 struct scratchpad scratchpad
;
827 // A hashset of all objects in the reverse reachability graph so far
828 struct hopscotch_table visited
; // *Without* lowtag
829 // A hashset of objects in the current graph layer
830 struct hopscotch_table targets
; // With lowtag
832 if (heap_trace_verbose
) {
833 fprintf(stderr
, "%d pins:\n", n_pins
);
834 for(i
=0;i
<n_pins
;++i
)
835 fprintf(stderr
, " %p%s", (void*)pins
[i
],
836 ((i
%8)==7||i
==n_pins
-1)?"\n":"");
838 compute_heap_inverse(&inverted_heap
, &scratchpad
);
839 hopscotch_create(&visited
, HASH_FUNCTION
, 0, 32, 0);
840 hopscotch_create(&targets
, HASH_FUNCTION
, 0, 32, 0);
842 lispobj car
= CONS(weak_pointers
)->car
;
843 lispobj value
= ((struct weak_pointer
*)native_pointer(car
))->value
;
844 weak_pointers
= CONS(weak_pointers
)->cdr
;
845 if (value
!= UNBOUND_MARKER_WIDETAG
) {
846 if (heap_trace_verbose
)
847 fprintf(stderr
, "Target=%p (%s)\n", (void*)value
, classify_obj(value
));
848 hopscotch_reset(&visited
);
849 hopscotch_reset(&targets
);
850 trace1(value
, &targets
, &visited
,
851 &inverted_heap
, &scratchpad
,
852 n_pins
, pins
, context_scanner
, criterion
);
854 } while (weak_pointers
!= NIL
);
855 os_invalidate(scratchpad
.base
, scratchpad
.end
-scratchpad
.base
);
856 hopscotch_destroy(&inverted_heap
);
857 hopscotch_destroy(&visited
);
858 hopscotch_destroy(&targets
);
861 void gc_prove_liveness(void(*context_scanner
)(),
863 int n_pins
, uword_t
* pins
,
866 int n_watched
= 0, n_live
= 0, n_bad
= 0;
868 for ( list
= objects
;
869 list
!= NIL
&& lowtag_of(list
) == LIST_POINTER_LOWTAG
;
870 list
= CONS(list
)->cdr
) {
872 lispobj car
= CONS(list
)->car
;
873 if ((lowtag_of(car
) != OTHER_POINTER_LOWTAG
||
874 widetag_of(*native_pointer(car
)) != WEAK_POINTER_WIDETAG
))
877 n_live
+= ((struct weak_pointer
*)native_pointer(car
))->value
878 != UNBOUND_MARKER_WIDETAG
;
880 if (lowtag_of(list
) != LIST_POINTER_LOWTAG
|| n_bad
) {
881 fprintf(stderr
, "; Bad value in liveness tracker\n");
884 fprintf(stderr
, "; Liveness tracking: %d/%d live watched objects\n",
888 // Put back lowtags on pinned objects, since wipe_nonpinned_words() removed
889 // them. But first test whether lowtags were already repaired
890 // in case prove_liveness() is called after gc_prove_liveness().
891 if (n_pins
>0 && !is_lisp_pointer(pins
[0])) {
893 for(i
=n_pins
-1; i
>=0; --i
) {
894 pins
[i
] = compute_lispobj((lispobj
*)pins
[i
]);
897 trace_paths(context_scanner
, objects
, n_pins
, (lispobj
*)pins
, criterion
);
900 /* This should be called inside WITHOUT-GCING so that the set
901 * of pins does not change out from underneath.
903 void prove_liveness(lispobj objects
, int criterion
)
905 extern struct hopscotch_table pinned_objects
;
906 extern int gc_n_stack_pins
;
907 gc_prove_liveness(0, objects
, gc_n_stack_pins
, pinned_objects
.keys
, criterion
);
910 #include "genesis/package.h"
911 #include "genesis/instance.h"
913 static boolean
__attribute__((unused
)) sym_stringeq(lispobj sym
, const char *string
, int len
)
915 struct vector
* name
= VECTOR(SYMBOL(sym
)->name
);
916 return widetag_of(name
->header
) == SIMPLE_BASE_STRING_WIDETAG
917 && fixnum_value(name
->length
) == len
918 && !strcmp((char*)name
->data
, string
);
921 /* Return the value of SB-THREAD::*ALL-THREADS*
922 * This does not need to be particularly efficient.
924 static const char __attribute__((unused
)) all_threads_sym
[] = "*ALL-THREADS*";
925 static lispobj
all_lisp_threads()
927 #ifdef ENTER_FOREIGN_CALLBACK
928 // Starting with a known static symbol in SB-THREAD::, get the SB-THREAD package
929 // and find *ALL-THREADS* (which isn't static). Fewer static symbols is better.
930 struct symbol
* sym
= SYMBOL(ENTER_FOREIGN_CALLBACK
);
931 struct package
* pkg
= (struct package
*)native_pointer(sym
->package
);
932 struct instance
* internals
= (struct instance
*)native_pointer(pkg
->internal_symbols
);
933 struct vector
* cells
= (struct vector
*)
934 native_pointer(internals
->slots
[INSTANCE_DATA_START
]);
935 int cells_length
= fixnum_value(cells
->length
);
936 static int index
= 0;
937 int initial_index
= index
;
939 lispobj thing
= cells
->data
[index
];
940 if (lowtag_of(thing
) == OTHER_POINTER_LOWTAG
941 && widetag_of(SYMBOL(thing
)->header
) == SYMBOL_WIDETAG
942 && sym_stringeq(thing
, all_threads_sym
, strlen(all_threads_sym
)))
943 return SYMBOL(thing
)->value
;
944 index
= (index
+ 1) % cells_length
;
945 } while (index
!= initial_index
);
946 lose("Can't find *ALL-THREADS*");
951 // These are slot offsets in (DEFSTRUCT THREAD),
952 // not the C structure defined in genesis/thread.h
953 #define LISP_THREAD_NAME_SLOT INSTANCE_DATA_START+0
954 #define LISP_THREAD_OS_THREAD_SLOT INSTANCE_DATA_START+3
956 struct vector
* lisp_thread_name(os_thread_t os_thread
)
958 lispobj list
= all_lisp_threads();
959 while (list
!= NIL
) {
960 struct instance
* lisp_thread
= (struct instance
*)native_pointer(CONS(list
)->car
);
961 list
= CONS(list
)->cdr
;
962 if ((os_thread_t
)lisp_thread
->slots
[LISP_THREAD_OS_THREAD_SLOT
]
964 return VECTOR(lisp_thread
->slots
[LISP_THREAD_NAME_SLOT
]);