Add compute_lispobj() as a thin wrapper on make_lispobj()
[sbcl.git] / src / runtime / traceroot.c
blob8a69e75e4125c5de007ddafcc9bfa58ac6b0cf85
2 #include "sbcl.h"
3 #include "arch.h"
4 #include "runtime.h"
5 #include "lispregs.h"
6 #include "gc-internal.h"
7 #include "genesis/closure.h"
8 #include "genesis/cons.h"
9 #include "genesis/constants.h"
10 #include "genesis/gc-tables.h"
11 #include "genesis/layout.h"
12 #include "genesis/package.h"
13 #include "pseudo-atomic.h" // for get_alloc_pointer()
15 #include <stdlib.h>
16 #include <stdio.h>
17 #include <sys/time.h>
18 #ifndef LISP_FEATURE_WIN32
19 #define HAVE_GETRUSAGE 1
20 #endif
21 #if HAVE_GETRUSAGE
22 #include <sys/resource.h> // for getrusage()
23 #endif
25 int heap_trace_verbose = 0;
27 extern generation_index_t gencgc_oldest_gen_to_gc;
29 /// Each "layer" is a set of objects reachable by tracing one reverse pointer
30 /// from any object in the previously built layer.
31 /// An object will not appear in more than one layer.
32 struct layer {
33 struct __attribute((packed)) node {
34 lispobj object; // With lowtag
35 // Which 0-relative word in this object points to any object
36 // in the next layer (closer to the intended target object).
37 int wordindex;
38 } *nodes;
39 struct layer* next;
40 int count;
43 /// memory in which to build the object lists comprising the
44 /// values in 'inverted_heap', the hashtable which maps each object
45 /// to a list of objects pointing to it.
46 struct scratchpad {
47 char* base, *free, *end;
50 struct scan_state {
51 long n_objects;
52 long n_scanned_words;
53 long n_immediates;
54 long n_pointers;
55 int record_ptrs;
56 // A hashtable mapping each object to a list of objects pointing to it
57 struct hopscotch_table inverted_heap;
58 struct scratchpad scratchpad;
61 static int gen_of(lispobj obj) {
62 #ifdef LISP_FEATURE_IMMOBILE_SPACE
63 if (immobile_space_p(obj))
64 return immobile_obj_gen_bits(native_pointer(obj));
65 #endif
66 int page = find_page_index((void*)obj);
67 if (page >= 0) return page_table[page].gen;
68 return -1;
71 const char* classify_obj(lispobj ptr)
73 extern lispobj* instance_classoid_name(lispobj*);
74 extern char *widetag_names[];
76 lispobj* name; // a Lisp string
77 switch(lowtag_of(ptr)) {
78 case INSTANCE_POINTER_LOWTAG:
79 name = instance_classoid_name(native_pointer(ptr));
80 if (widetag_of(*name) == SIMPLE_BASE_STRING_WIDETAG)
81 return (char*)(name + 2);
82 case LIST_POINTER_LOWTAG:
83 return "cons";
84 case FUN_POINTER_LOWTAG:
85 case OTHER_POINTER_LOWTAG:
86 return widetag_names[widetag_of(*native_pointer(ptr))>>2];
88 static char buf[8];
89 sprintf(buf, "#x%x", widetag_of(*native_pointer(ptr)));
90 return buf;
93 static void add_to_layer(lispobj* obj, int wordindex,
94 struct layer* layer, int* capacity)
96 // Resurrect the containing object's lowtag
97 lispobj ptr = compute_lispobj(obj);
98 int staticp = ptr <= STATIC_SPACE_END;
99 int gen = staticp ? -1 : gen_of(ptr);
100 if (heap_trace_verbose>2)
101 // Show the containing object, its type and generation, and pointee
102 fprintf(stderr,
103 " add_to_layer(%p,%d) = %s,g%c -> %p\n",
104 (void*)ptr, wordindex, classify_obj(ptr), (staticp ? 'S' : '0'+gen),
105 (void*)obj[wordindex]);
106 int count = layer->count;
107 if (count >= *capacity) {
108 *capacity = *capacity ? 2 * *capacity : 4;
109 layer->nodes = realloc(layer->nodes, *capacity * sizeof (struct node));
111 layer->nodes[count].object = ptr;
112 layer->nodes[count].wordindex = wordindex;
113 ++layer->count;
116 /// If 'obj' is a simple-fun, return its code component,
117 /// otherwise return obj directly.
118 static lispobj canonical_obj(lispobj obj)
120 if (lowtag_of(obj) == FUN_POINTER_LOWTAG &&
121 widetag_of(*native_pointer(obj)) == SIMPLE_FUN_WIDETAG)
122 return make_lispobj(fun_code_header(obj-FUN_POINTER_LOWTAG),
123 OTHER_POINTER_LOWTAG);
124 return obj;
127 /* Return the word index of the pointer in 'source' which references 'target'.
128 * Return -1 on failure. (This is an error if it happens)
130 #define check_ptr(index,ptr) if(canonical_obj(ptr)==target) return index;
131 static int find_ref(lispobj* source, lispobj target)
133 lispobj layout, bitmap;
134 int scan_limit, i, j;
136 lispobj header = *source;
137 if (is_cons_half(header)) {
138 check_ptr(0, header);
139 check_ptr(1, source[1]);
140 return -1;
142 int widetag = widetag_of(header);
143 scan_limit = sizetab[widetag](source);
144 switch (widetag) {
145 case INSTANCE_WIDETAG:
146 #ifdef LISP_FEATURE_COMPACT_INSTANCE_HEADER
147 case FUNCALLABLE_INSTANCE_WIDETAG:
148 #endif
149 // mixed boxed/unboxed objects
150 // Unlike in scav_instance where the slot loop is unswitched for
151 // speed into three cases (no raw slots, fixnum bitmap, bignum bitmap),
152 // here we just go for clarity by abstracting out logbitp.
153 layout = instance_layout(source);
154 check_ptr(0, layout);
155 bitmap = ((struct layout*)native_pointer(layout))->bitmap;
156 for(i=1; i<scan_limit; ++i)
157 if (layout_bitmap_logbitp(i-1, bitmap)) check_ptr(i, source[i]);
158 return -1;
159 case CLOSURE_WIDETAG:
160 check_ptr(1, ((struct closure*)source)->fun - FUN_RAW_ADDR_OFFSET);
161 break;
162 case CODE_HEADER_WIDETAG:
163 for_each_simple_fun(i, function_ptr, (struct code*)source, 0, {
164 int wordindex = &function_ptr->name - source;
165 for (j=0; j<4; ++j) check_ptr(wordindex+j, source[wordindex+j]);
167 scan_limit = code_header_words(header);
168 break;
169 #ifdef LISP_FEATURE_IMMOBILE_CODE
170 case FDEFN_WIDETAG:
171 check_ptr(3, fdefn_raw_referent((struct fdefn*)source));
172 scan_limit = 3;
173 break;
174 #endif
176 for(i=1; i<scan_limit; ++i) check_ptr(i, source[i]);
177 return -1;
179 #undef check_ptr
181 enum ref_kind { HEAP, CONTROL_STACK, BINDING_STACK, TLS };
182 char *ref_kind_name[4] = {"heap","C stack","bindings","TLS"};
184 /// This unfortunately entails a heap scan,
185 /// but it's quite fast if the symbol is found in immobile space.
186 static lispobj* find_sym_by_tls_index(unsigned int tls_index)
188 lispobj* where = 0;
189 lispobj* end = 0;
190 #ifdef LISP_FEATURE_IMMOBILE_SPACE
191 where = (lispobj*)IMMOBILE_SPACE_START;
192 end = (lispobj*)SYMBOL(IMMOBILE_FIXEDOBJ_FREE_POINTER)->value;
193 #endif
194 while (1) {
195 while (where < end) {
196 lispobj header = *where;
197 int widetag = widetag_of(header);
198 if (widetag == SYMBOL_WIDETAG &&
199 tls_index_of(((struct symbol*)where)) == tls_index)
200 return where;
201 where += OBJECT_SIZE(header, where);
203 if (where >= (lispobj*)DYNAMIC_SPACE_START)
204 break;
205 where = (lispobj*)DYNAMIC_SPACE_START;
206 end = (lispobj*)get_alloc_pointer();
208 return 0;
211 static inline int interestingp(lispobj ptr, struct hopscotch_table* targets)
213 return is_lisp_pointer(ptr) && hopscotch_containsp(targets, ptr);
216 /* Try to find the call frame that contains 'addr', which is the address
217 * in which a conservative root was seen.
218 * Return the program counter associated with that frame. */
219 static char* deduce_thread_pc(struct thread* th, void** addr)
221 uword_t* fp = __builtin_frame_address(0);
222 char* return_pc = 0;
224 if (th != arch_os_get_current_thread()) {
225 int i = fixnum_value(SymbolValue(FREE_INTERRUPT_CONTEXT_INDEX,th));
226 os_context_t *c = th->interrupt_contexts[i-1];
227 #ifdef LISP_FEATURE_64_BIT
228 fp = (uword_t*)*os_context_register_addr(c,reg_RBP);
229 #else
230 fp = (uword_t*)*os_context_register_addr(c,reg_EBP);
231 #endif
233 while (1) {
234 if ((uword_t*)addr < fp)
235 return return_pc;
236 uword_t prev_fp = fp[0];
237 if (prev_fp == 0 || (uword_t*)prev_fp < fp || (lispobj*)prev_fp >= th->control_stack_end)
238 return 0;
239 return_pc = (void*)fp[1];
240 fp = (uword_t*)prev_fp;
244 static struct { void* pointer; boolean found; } pin_seek_state;
245 static void compare_pointer(void *addr) {
246 if (addr == pin_seek_state.pointer)
247 pin_seek_state.found = 1;
250 /* Figure out which thread's control stack contains 'pointer'
251 * and the PC within the active function in the referencing frame */
252 static struct thread* deduce_thread(void (*context_scanner)(),
253 uword_t pointer, char** pc)
255 struct thread *th;
257 *pc = 0;
258 pin_seek_state.found = 0;
259 for_each_thread(th) {
260 void **esp=(void **)-1;
261 sword_t i,free;
262 if (th == arch_os_get_current_thread())
263 esp = (void **)((void *)&pointer);
264 else {
265 void **esp1;
266 free = fixnum_value(SymbolValue(FREE_INTERRUPT_CONTEXT_INDEX,th));
267 for(i=free-1;i>=0;i--) {
268 os_context_t *c=th->interrupt_contexts[i];
269 esp1 = (void **) *os_context_register_addr(c,reg_SP);
270 if (esp1>=(void **)th->control_stack_start && esp1<(void **)th->control_stack_end) {
271 if(esp1<esp) esp=esp1;
272 pin_seek_state.pointer = (void*)pointer;
273 context_scanner(compare_pointer, c);
274 pin_seek_state.pointer = 0;
275 if (pin_seek_state.found) return th;
279 if (!esp || esp == (void*) -1)
280 lose("deduce_thread: no SP known for thread %x (OS %x)", th, th->os_thread);
281 void** where;
282 for (where = ((void **)th->control_stack_end)-1; where >= esp; where--)
283 if ((uword_t)*where == pointer) {
284 *pc = deduce_thread_pc(th, where);
285 return th;
288 return 0;
291 static lispobj examine_stacks(struct hopscotch_table* targets,
292 void (*context_scanner)(),
293 int n_pins, lispobj* pins,
294 enum ref_kind *root_kind,
295 struct thread** root_thread,
296 char** thread_pc,
297 unsigned int *tls_index)
299 boolean world_stopped = context_scanner != 0;
300 struct thread *th;
302 for_each_thread(th) {
303 lispobj *where, *end;
304 #ifdef LISP_FEATURE_SB_THREAD
305 // Examine thread-local storage
306 *root_kind = TLS;
307 where = (lispobj*)(th+1);
308 end = (lispobj*)((char*)th + SymbolValue(FREE_TLS_INDEX,0));
309 for( ; where < end ; ++where)
310 if (interestingp(*where, targets)) {
311 *root_thread = th;
312 *tls_index = (char*)where - (char*)th;
313 return *where;
315 #endif
316 // Examine the binding stack
317 *root_kind = BINDING_STACK;
318 where = (lispobj*)th->binding_stack_start;
319 end = (lispobj*)get_binding_stack_pointer(th);
320 for( ; where < end ; where += 2)
321 if (interestingp(*where, targets)) {
322 *root_thread = th;
323 *tls_index = where[1];
324 return *where;
327 // Look in the control stacks
328 *root_kind = CONTROL_STACK;
329 uword_t pin;
330 int i;
331 for (i=n_pins-1; i>=0; --i)
332 // Bypass interestingp() to avoid one test - pins are known pointers.
333 if (hopscotch_containsp(targets, pin = pins[i])) {
334 if (world_stopped) {
335 *root_thread = deduce_thread(context_scanner, pin, thread_pc);
336 } else {
337 *root_thread = 0;
338 *thread_pc = 0;
339 // Scan just the current thread's stack
340 // (We don't know where the other stack pointers are)
341 th = arch_os_get_current_thread();
342 void **esp = __builtin_frame_address(0);
343 void **where;
344 for (where = ((void **)th->control_stack_end)-1; where >= esp; --where)
345 if (*where == (void*)pin) {
346 *root_thread = th;
347 *thread_pc = deduce_thread_pc(th, where);
348 break;
351 return pin;
353 *root_kind = HEAP;
354 return 0;
357 void free_graph(struct layer* layer)
359 while (layer) {
360 free(layer->nodes);
361 struct layer* next = layer->next;
362 free(layer);
363 layer = next;
367 struct node* find_node(struct layer* layer, lispobj ptr)
369 int i;
370 for(i=layer->count-1; i>=0; --i)
371 if (layer->nodes[i].object == ptr)
372 return &layer->nodes[i];
373 return 0;
376 /// "Compressed" pointers are a huge win - they halve the amount
377 /// of space required to invert the heap.
378 static inline uint32_t encode_pointer(lispobj pointer)
380 uword_t encoding;
381 if (pointer >= DYNAMIC_SPACE_START) {
382 // A dynamic space pointer is stored as a count in doublewords
383 // from the heap base address. A 32GB range is representable.
384 encoding = (pointer - DYNAMIC_SPACE_START) / (2*N_WORD_BYTES);
385 gc_assert(encoding <= 0x7FFFFFFF);
386 return (encoding<<1) | 1; // Low bit signifies compressed ptr.
387 } else {
388 // Non-dynamic-space pointers are stored as-is.
389 gc_assert(pointer <= 0xFFFFFFFF && !(pointer & 1));
390 return pointer; // Low bit 0 signifies literal pointer
394 static inline lispobj decode_pointer(uint32_t encoding)
396 if (encoding & 1) // Compressed ptr
397 return (encoding>>1)*(2*N_WORD_BYTES) + DYNAMIC_SPACE_START;
398 else
399 return encoding; // Literal pointer
402 struct simple_fun* simple_fun_from_pc(char* pc)
404 struct code* code = (struct code*)component_ptr_from_pc((lispobj*)pc);
405 if (!code) return 0;
406 struct simple_fun* prev_fun = (struct simple_fun*)
407 ((char*)code + (code_header_words(code->header)<<WORD_SHIFT)
408 + FIRST_SIMPLE_FUN_OFFSET(code));
409 for_each_simple_fun(i, fun, code, 1, {
410 if (pc < (char*)fun) break;
411 prev_fun = fun;
413 return prev_fun;
416 static void maybe_show_object_name(lispobj obj, FILE* stream)
418 extern void safely_show_lstring(lispobj* string, int quotes, FILE *s);
419 lispobj package, package_name;
420 if (lowtag_of(obj)==OTHER_POINTER_LOWTAG)
421 switch(widetag_of(*native_pointer(obj))) {
422 case SYMBOL_WIDETAG:
423 package = SYMBOL(obj)->package;
424 package_name = ((struct package*)native_pointer(package))->_name;
425 putc(',', stream);
426 safely_show_lstring(native_pointer(package_name), 0, stream);
427 fputs("::", stream);
428 safely_show_lstring(native_pointer(SYMBOL(obj)->name), 0, stream);
429 break;
433 static boolean root_p(lispobj ptr, int criterion)
435 if (ptr <= STATIC_SPACE_END) return 1; // always a root
436 // 0 to 2 are in order of weakest to strongest condition for stopping,
437 // i.e. criterion 0 implies that that largest number of objects
438 // are considered roots.
439 return criterion < 2
440 && (gen_of(ptr) > (criterion ? HIGHEST_NORMAL_GENERATION
441 : gencgc_oldest_gen_to_gc));
444 /// Find any shortest path to 'object' starting at a tenured object or a thread stack.
445 static void trace1(lispobj object,
446 struct hopscotch_table* targets,
447 struct hopscotch_table* visited,
448 struct hopscotch_table* inverted_heap,
449 struct scratchpad* scratchpad,
450 int n_pins, lispobj* pins, void (*context_scanner)(),
451 int criterion)
453 struct node* anchor = 0;
454 lispobj thread_ref;
455 enum ref_kind root_kind;
456 struct thread* root_thread;
457 char* thread_pc = 0;
458 unsigned int tls_index;
459 lispobj target;
460 int i;
462 struct layer* top_layer = 0;
463 int layer_capacity = 0;
465 hopscotch_put(targets, object, 1);
466 while ((thread_ref = examine_stacks(targets, context_scanner, n_pins, pins,
467 &root_kind, &root_thread, &thread_pc,
468 &tls_index)) == 0) {
469 // TODO: preallocate layers to avoid possibility of malloc deadlock
470 struct layer* layer = (struct layer*)malloc(sizeof (struct layer));
471 layer->nodes = 0;
472 layer->count = 0;
473 layer->next = top_layer;
474 top_layer = layer;
475 layer_capacity = 0;
476 if (heap_trace_verbose)
477 printf("Next layer: Looking for %d object(s)\n", targets->count);
478 for_each_hopscotch_key(i, target, (*targets)) {
479 uint32_t list = hopscotch_get(inverted_heap, target, 0);
480 if (heap_trace_verbose>1) {
481 uint32_t list1 = list;
482 fprintf(stderr, "target=%p srcs=", (void*)target);
483 while (list1) {
484 uint32_t* cell = (uint32_t*)(scratchpad->base + list1);
485 lispobj* ptr = (lispobj*)decode_pointer(cell[0]);
486 if (hopscotch_containsp(visited, (lispobj)ptr))
487 fprintf(stderr, "%p ", ptr);
488 else {
489 lispobj word = *ptr;
490 int nwords = OBJECT_SIZE(word, ptr);
491 fprintf(stderr, "%p+%d ", ptr, nwords);
493 list1 = cell[1];
495 putc('\n',stderr);
497 while (list && !anchor) {
498 uint32_t* cell = (uint32_t*)(scratchpad->base + list);
499 lispobj ptr = decode_pointer(cell[0]);
500 list = cell[1];
501 if (hopscotch_containsp(visited, ptr))
502 continue;
503 int wordindex = find_ref((lispobj*)ptr, target);
504 if (wordindex == -1) {
505 fprintf(stderr, "Strange: no ref from %p to %p\n",
506 (void*)ptr, (void*)target);
507 continue;
509 hopscotch_insert(visited, ptr, 1);
510 add_to_layer((lispobj*)ptr, wordindex,
511 top_layer, &layer_capacity);
512 // Stop if the object at 'ptr' is tenured.
513 if (root_p(ptr, criterion)) {
514 fprintf(stderr, "Stopping at %p: tenured\n", (void*)ptr);
515 anchor = &top_layer->nodes[top_layer->count-1];
519 if (!top_layer->count) {
520 fprintf(stderr, "Failure tracing from %p. Current targets:\n", (void*)object);
521 for_each_hopscotch_key(i, target, (*targets))
522 fprintf(stderr, "%p ", (void*)target);
523 putc('\n', stderr);
524 free_graph(top_layer);
525 return;
527 if (heap_trace_verbose>1)
528 printf("Found %d object(s)\n", top_layer->count);
529 // The top layer's last object if static or tenured
530 // stops the scan. (And no more objects go in the top layer)
531 if (anchor)
532 break;
533 // Transfer the top layer objects into 'targets'
534 hopscotch_reset(targets);
535 struct node* nodes = top_layer->nodes;
536 for (i=top_layer->count-1 ; i>=0 ; --i) {
537 lispobj ptr = nodes[i].object;
538 hopscotch_put(targets, ptr, 1);
542 FILE *file = stdout;
543 if (thread_ref) {
544 struct vector* lisp_thread_name(os_thread_t os_thread);
545 extern void show_lstring(struct vector*, int, FILE*);
546 struct vector* thread_name;
547 #if 0
548 fprintf(stderr,
549 "%s pointed to by %s: %p\n",
550 top_layer ? "Indirectly" : "Directly",
551 ref_kind_name[root_kind],
552 (void*)thread_ref);
553 #endif
554 if (top_layer) {
555 // The thread indirectly points to a target.
556 // The root object is whatever the thread pointed to,
557 // which must be an object in the top layer. Find that object.
558 anchor = find_node(top_layer, thread_ref);
559 gc_assert(anchor);
561 putc('{', file);
562 if (!root_thread)
563 fprintf(file, "(unknown-thread)");
564 else if ((thread_name = lisp_thread_name(root_thread->os_thread)) != 0)
565 show_lstring(thread_name, 1, file);
566 else
567 fprintf(file, "thread=%p", root_thread);
568 fprintf(file, ":%s:", ref_kind_name[root_kind]);
569 if (root_kind==BINDING_STACK || root_kind==TLS) {
570 lispobj* symbol = find_sym_by_tls_index(tls_index);
571 if (symbol)
572 show_lstring(symbol_name(symbol), 0, file);
573 else
574 fprintf(file, "%x", tls_index);
575 } else {
576 struct simple_fun* fun = simple_fun_from_pc(thread_pc);
577 if (fun) {
578 fprintf(file, "fun=%p", (void*)make_lispobj(fun, FUN_POINTER_LOWTAG));
579 if (is_lisp_pointer(fun->name) &&
580 widetag_of(*native_pointer(fun->name)) == SYMBOL_WIDETAG) {
581 fprintf(file, "=");
582 show_lstring(VECTOR(SYMBOL(fun->name)->name), 0, file);
584 } else if (thread_pc)
585 fprintf(file, "pc=%p", thread_pc);
587 fprintf(file, "}->");
588 } else { // Stopped at (pseudo)static object
589 fprintf(stderr, "Anchor object is @ %p. word[%d]\n",
590 native_pointer(anchor->object), anchor->wordindex);
593 target = 0;
594 while (top_layer) {
595 struct node next = *anchor;
596 lispobj ptr = next.object;
597 if (ptr <= STATIC_SPACE_END)
598 fprintf(file, "(static,");
599 else
600 fprintf(file, "(g%d,", gen_of(ptr));
601 fputs(classify_obj(ptr), file);
602 maybe_show_object_name(ptr, file);
603 fprintf(file, ")%p[%d]->", (void*)ptr, next.wordindex);
604 target = native_pointer(ptr)[next.wordindex];
605 // Special-case a few combinations of <type,wordindex>
606 switch (next.wordindex) {
607 case 0:
608 if (lowtag_of(ptr) == INSTANCE_POINTER_LOWTAG ||
609 lowtag_of(ptr) == FUN_POINTER_LOWTAG)
610 target = instance_layout(native_pointer(ptr));
611 break;
612 case 1:
613 if (lowtag_of(ptr) == FUN_POINTER_LOWTAG &&
614 widetag_of(*native_pointer(ptr)) == CLOSURE_WIDETAG)
615 target -= FUN_RAW_ADDR_OFFSET;
616 break;
617 #ifdef LISP_FEATURE_IMMOBILE_CODE
618 case 3:
619 if (lowtag_of(ptr) == OTHER_POINTER_LOWTAG &&
620 widetag_of(FDEFN(ptr)->header) == FDEFN_WIDETAG)
621 target = fdefn_raw_referent((struct fdefn*)native_pointer(ptr));
622 break;
623 #endif
625 target = canonical_obj(target);
626 struct layer* next_layer = top_layer->next;
627 free(top_layer->nodes);
628 free(top_layer);
629 top_layer = next_layer;
630 if (top_layer) {
631 anchor = find_node(top_layer, target);
632 gc_assert(anchor);
633 } else {
634 gc_assert(object == target);
637 fprintf(file, "%p.\n", (void*)target);
640 static void record_ptr(lispobj* source, lispobj target,
641 struct scan_state* ss)
643 // Add 'source' to the list of objects keyed by 'target' in the inverted heap.
644 // Note that 'source' has no lowtag, and 'target' does.
645 // Pointer compression occurs here as well: the linked list of source objects
646 // is built using offsets into the scratchpad rather than absolute addresses.
647 target = canonical_obj(target);
648 uint32_t* new_cell = (uint32_t*)ss->scratchpad.free;
649 uint32_t* next = new_cell + 2;
650 gc_assert((char*)next <= ss->scratchpad.end);
651 ss->scratchpad.free = (char*)next;
652 new_cell[0] = encode_pointer((lispobj)source);
653 new_cell[1] = hopscotch_get(&ss->inverted_heap, target, 0);
654 hopscotch_put(&ss->inverted_heap, target,
655 (sword_t)((char*)new_cell - ss->scratchpad.base));
658 #ifdef LISP_FEATURE_IMMOBILE_SPACE
659 #define relevant_ptr_p(x) find_page_index(x)>=0||find_immobile_page_index(x)>=0
660 #else
661 #define relevant_ptr_p(x) find_page_index(x)>=0
662 #endif
664 #define check_ptr(ptr) { \
665 ++n_scanned_words; \
666 if (!is_lisp_pointer(ptr)) ++n_immediates; \
667 else if (relevant_ptr_p((void*)ptr)) { \
668 ++n_pointers; \
669 if (record_ptrs) record_ptr(where,ptr,ss); \
672 static uword_t build_refs(lispobj* where, lispobj* end,
673 struct scan_state* ss)
675 lispobj layout, bitmap, fun;
676 sword_t nwords, scan_limit, i, j;
677 uword_t n_objects = 0, n_scanned_words = 0,
678 n_immediates = 0, n_pointers = 0;
680 boolean record_ptrs = ss->record_ptrs;
681 for ( ; where < end ; where += nwords ) {
682 ++n_objects;
683 lispobj header = *where;
684 if (is_cons_half(header)) {
685 nwords = 2;
686 check_ptr(header);
687 check_ptr(where[1]);
688 continue;
690 int widetag = widetag_of(header);
691 nwords = scan_limit = sizetab[widetag](where);
692 switch (widetag) {
693 case INSTANCE_WIDETAG:
694 #ifdef LISP_FEATURE_COMPACT_INSTANCE_HEADER
695 case FUNCALLABLE_INSTANCE_WIDETAG:
696 #endif
697 // mixed boxed/unboxed objects
698 layout = instance_layout(where);
699 check_ptr(layout);
700 bitmap = ((struct layout*)native_pointer(layout))->bitmap;
701 // If no raw slots, just scan without use of the bitmap.
702 if (bitmap == make_fixnum(-1)) break;
703 for(i=1; i<scan_limit; ++i)
704 if (layout_bitmap_logbitp(i-1, bitmap)) check_ptr(where[i]);
705 continue;
706 case CLOSURE_WIDETAG:
707 fun = ((struct closure*)where)->fun - FUN_RAW_ADDR_OFFSET;
708 check_ptr(fun);
709 break;
710 case CODE_HEADER_WIDETAG:
711 for_each_simple_fun(i, function_ptr, (struct code*)where, 0, {
712 int wordindex = &function_ptr->name - where;
713 for (j=0; j<4; ++j) check_ptr(where[wordindex+j]);
715 scan_limit = code_header_words(header);
716 break;
717 #ifdef LISP_FEATURE_IMMOBILE_CODE
718 case FDEFN_WIDETAG:
719 check_ptr(fdefn_raw_referent((struct fdefn*)where));
720 scan_limit = 3;
721 break;
722 #endif
723 default:
724 if (!(other_immediate_lowtag_p(widetag) && lowtag_for_widetag[widetag>>2]))
725 lose("Unknown widetag %x\n", widetag);
726 // Skip irrelevant objects.
727 if (unboxed_obj_widetag_p(widetag) ||
728 (widetag == WEAK_POINTER_WIDETAG) || /* do not follow! */
729 // These numeric types contain pointers, but are uninteresting.
730 (widetag == COMPLEX_WIDETAG) ||
731 (widetag == RATIO_WIDETAG))
732 continue;
734 for(i=1; i<scan_limit; ++i) check_ptr(where[i]);
736 if (!record_ptrs) { // just count them
737 ss->n_objects += n_objects;
738 ss->n_scanned_words += n_scanned_words;
739 ss->n_immediates += n_immediates;
740 ss->n_pointers += n_pointers;
742 return 0;
744 #undef check_ptr
746 static void scan_spaces(struct scan_state* ss)
748 build_refs((lispobj*)STATIC_SPACE_START,
749 (lispobj*)SYMBOL(STATIC_SPACE_FREE_POINTER)->value,
750 ss);
751 #ifdef LISP_FEATURE_IMMOBILE_SPACE
752 build_refs((lispobj*)IMMOBILE_SPACE_START,
753 (lispobj*)SYMBOL(IMMOBILE_FIXEDOBJ_FREE_POINTER)->value,
754 ss);
755 build_refs((lispobj*)IMMOBILE_VARYOBJ_SUBSPACE_START,
756 (lispobj*)SYMBOL(IMMOBILE_SPACE_FREE_POINTER)->value,
757 ss);
758 #endif
759 walk_generation((uword_t(*)(lispobj*,lispobj*,uword_t))build_refs,
760 -1, (uword_t)ss);
763 #define HASH_FUNCTION HOPSCOTCH_HASH_FUN_MIX
765 static void compute_heap_inverse(struct hopscotch_table* inverted_heap,
766 struct scratchpad* scratchpad)
768 struct scan_state ss;
769 memset(&ss, 0, sizeof ss);
770 fprintf(stderr, "Pass 1: Counting heap objects... ");
771 scan_spaces(&ss);
772 fprintf(stderr, "%ld objs, %ld ptrs, %ld immediates\n",
773 ss.n_objects, ss.n_pointers,
774 ss.n_scanned_words - ss.n_pointers);
775 // Guess at the initial size of ~ .5 million objects.
776 int size = 1<<19; // flsl(tot_n_objects); this would work if you have it
777 while (ss.n_objects > size) size <<= 1;
778 fprintf(stderr, "Pass 2: Inverting heap. Initial size=%d objects\n", size);
779 hopscotch_create(&ss.inverted_heap, HASH_FUNCTION,
780 4, // XXX: half the word size if 64-bit
781 size /* initial size */, 0 /* default hop range */);
782 // Add one pointer due to inability to use the first
783 // two words of the scratchpad.
784 uword_t scratchpad_min_size = (1 + ss.n_pointers) * 2 * sizeof (uint32_t);
785 int pagesize = getpagesize();
786 uword_t scratchpad_size = CEILING(scratchpad_min_size, pagesize);
787 ss.scratchpad.base = os_allocate(scratchpad_size);
788 gc_assert(ss.scratchpad.base);
789 ss.scratchpad.free = ss.scratchpad.base + 2 * sizeof(uint32_t);
790 ss.scratchpad.end = ss.scratchpad.base + scratchpad_size;
791 fprintf(stderr, "Scratchpad: %lu bytes\n", (long unsigned)scratchpad_size);
792 #if HAVE_GETRUSAGE
793 struct rusage before, after;
794 getrusage(RUSAGE_SELF, &before);
795 #endif
796 ss.record_ptrs = 1;
797 scan_spaces(&ss);
798 *inverted_heap = ss.inverted_heap;
799 *scratchpad = ss.scratchpad;
800 #if HAVE_GETRUSAGE
801 getrusage(RUSAGE_SELF, &after);
802 // We're done building the necessary structure. Show some memory stats.
803 #define timediff(b,a,field) \
804 ((a.field.tv_sec-b.field.tv_sec)*1000000+(a.field.tv_usec-b.field.tv_usec))
805 fprintf(stderr,
806 "Inverted heap: ct=%d, cap=%d, LF=%f ET=%ld+%ld sys+usr\n",
807 inverted_heap->count,
808 1+hopscotch_max_key_index(*inverted_heap),
809 100*(float)inverted_heap->count / (1+hopscotch_max_key_index(*inverted_heap)),
810 timediff(before, after, ru_stime),
811 timediff(before, after, ru_utime));
812 #endif
815 /* Find any shortest path from a thread or tenured object
816 * to each of the specified objects.
818 static void trace_paths(void (*context_scanner)(),
819 lispobj weak_pointers, int n_pins, lispobj* pins,
820 int criterion)
822 int i;
823 struct hopscotch_table inverted_heap;
824 struct scratchpad scratchpad;
825 // A hashset of all objects in the reverse reachability graph so far
826 struct hopscotch_table visited; // *Without* lowtag
827 // A hashset of objects in the current graph layer
828 struct hopscotch_table targets; // With lowtag
830 if (heap_trace_verbose) {
831 fprintf(stderr, "%d pins:\n", n_pins);
832 for(i=0;i<n_pins;++i)
833 fprintf(stderr, " %p%s", (void*)pins[i],
834 ((i%8)==7||i==n_pins-1)?"\n":"");
836 compute_heap_inverse(&inverted_heap, &scratchpad);
837 hopscotch_create(&visited, HASH_FUNCTION, 0, 32, 0);
838 hopscotch_create(&targets, HASH_FUNCTION, 0, 32, 0);
839 do {
840 lispobj car = CONS(weak_pointers)->car;
841 lispobj value = ((struct weak_pointer*)native_pointer(car))->value;
842 weak_pointers = CONS(weak_pointers)->cdr;
843 if (value != UNBOUND_MARKER_WIDETAG) {
844 if (heap_trace_verbose)
845 fprintf(stderr, "Target=%p (%s)\n", (void*)value, classify_obj(value));
846 hopscotch_reset(&visited);
847 hopscotch_reset(&targets);
848 trace1(value, &targets, &visited,
849 &inverted_heap, &scratchpad,
850 n_pins, pins, context_scanner, criterion);
852 } while (weak_pointers != NIL);
853 os_invalidate(scratchpad.base, scratchpad.end-scratchpad.base);
854 hopscotch_destroy(&inverted_heap);
855 hopscotch_destroy(&visited);
856 hopscotch_destroy(&targets);
859 void gc_prove_liveness(void(*context_scanner)(),
860 lispobj objects,
861 int n_pins, uword_t* pins,
862 int criterion)
864 int n_watched = 0, n_live = 0, n_bad = 0;
865 lispobj list;
866 for ( list = objects ;
867 list != NIL && lowtag_of(list) == LIST_POINTER_LOWTAG ;
868 list = CONS(list)->cdr ) {
869 ++n_watched;
870 lispobj car = CONS(list)->car;
871 if ((lowtag_of(car) != OTHER_POINTER_LOWTAG ||
872 widetag_of(*native_pointer(car)) != WEAK_POINTER_WIDETAG))
873 ++n_bad;
874 else
875 n_live += ((struct weak_pointer*)native_pointer(car))->value
876 != UNBOUND_MARKER_WIDETAG;
878 if (lowtag_of(list) != LIST_POINTER_LOWTAG || n_bad) {
879 fprintf(stderr, "; Bad value in liveness tracker\n");
880 return;
882 fprintf(stderr, "; Liveness tracking: %d/%d live watched objects\n",
883 n_live, n_watched);
884 if (!n_live)
885 return;
886 // Put back lowtags on pinned objects, since wipe_nonpinned_words() removed
887 // them. But first test whether lowtags were already repaired
888 // in case prove_liveness() is called after gc_prove_liveness().
889 if (n_pins>0 && !is_lisp_pointer(pins[0])) {
890 int i;
891 for(i=n_pins-1; i>=0; --i) {
892 pins[i] = compute_lispobj((lispobj*)pins[i]);
895 trace_paths(context_scanner, objects, n_pins, (lispobj*)pins, criterion);
898 /* This should be called inside WITHOUT-GCING so that the set
899 * of pins does not change out from underneath.
901 void prove_liveness(lispobj objects, int criterion)
903 extern struct hopscotch_table pinned_objects;
904 extern int gc_n_stack_pins;
905 gc_prove_liveness(0, objects, gc_n_stack_pins, pinned_objects.keys, criterion);
908 #include "genesis/package.h"
909 #include "genesis/instance.h"
910 #include "genesis/vector.h"
912 static boolean __attribute__((unused)) sym_stringeq(lispobj sym, const char *string, int len)
914 struct vector* name = (struct vector*)native_pointer(SYMBOL(sym)->name);
915 return widetag_of(name->header) == SIMPLE_BASE_STRING_WIDETAG
916 && fixnum_value(name->length) == len
917 && !strcmp((char*)name->data, string);
920 /* Return the value of SB-THREAD::*ALL-THREADS*
921 * This does not need to be particularly efficient.
923 static const char __attribute__((unused)) all_threads_sym[] = "*ALL-THREADS*";
924 static lispobj all_lisp_threads()
926 #ifdef ENTER_FOREIGN_CALLBACK
927 // Starting with a known static symbol in SB-THREAD::, get the SB-THREAD package
928 // and find *ALL-THREADS* (which isn't static). Fewer static symbols is better.
929 struct symbol* sym = SYMBOL(ENTER_FOREIGN_CALLBACK);
930 struct package* pkg = (struct package*)native_pointer(sym->package);
931 struct instance* internals = (struct instance*)native_pointer(pkg->internal_symbols);
932 struct vector* cells = (struct vector*)
933 native_pointer(internals->slots[INSTANCE_DATA_START]);
934 int cells_length = fixnum_value(cells->length);
935 static int index = 0;
936 int initial_index = index;
937 do {
938 lispobj thing = cells->data[index];
939 if (lowtag_of(thing) == OTHER_POINTER_LOWTAG
940 && widetag_of(SYMBOL(thing)->header) == SYMBOL_WIDETAG
941 && sym_stringeq(thing, all_threads_sym, strlen(all_threads_sym)))
942 return SYMBOL(thing)->value;
943 index = (index + 1) % cells_length;
944 } while (index != initial_index);
945 lose("Can't find *ALL-THREADS*");
946 #endif
947 return NIL;
950 // These are slot offsets in (DEFSTRUCT THREAD),
951 // not the C structure defined in genesis/thread.h
952 #define LISP_THREAD_NAME_SLOT INSTANCE_DATA_START+0
953 #define LISP_THREAD_OS_THREAD_SLOT INSTANCE_DATA_START+3
955 struct vector* lisp_thread_name(os_thread_t os_thread)
957 lispobj list = all_lisp_threads();
958 while (list != NIL) {
959 struct instance* lisp_thread = (struct instance*)native_pointer(CONS(list)->car);
960 list = CONS(list)->cdr;
961 if ((os_thread_t)lisp_thread->slots[LISP_THREAD_OS_THREAD_SLOT]
962 == os_thread)
963 return VECTOR(lisp_thread->slots[LISP_THREAD_NAME_SLOT]);
965 return 0;