Fix LLVM build.
[mono-project/dkf.git] / mono / mini / mini-gc.c
blobcc5a4a2f39fce9ee88e454f4d345881e1c7e2d4c
1 /*
2 * mini-gc.c: GC interface for the mono JIT
4 * Author:
5 * Zoltan Varga (vargaz@gmail.com)
7 * Copyright 2009 Novell, Inc (http://www.novell.com)
8 */
10 #include "config.h"
11 #include "mini-gc.h"
12 #include <mono/metadata/gc-internal.h>
14 //#if 0
15 #if defined(MONO_ARCH_GC_MAPS_SUPPORTED)
17 #include <mono/metadata/gc-internal.h>
18 #include <mono/utils/mono-counters.h>
20 #if SIZEOF_VOID_P == 4
21 typedef guint32 mword;
22 #else
23 typedef guint64 mword;
24 #endif
26 #define GC_BITS_PER_WORD (sizeof (mword) * 8)
28 /* Contains state needed by the GC Map construction code */
29 typedef struct {
31 * This contains information about stack slots initialized in the prolog, encoded using
32 * (slot_index << 16) | slot_type. The slot_index is relative to the CFA, i.e. 0
33 * means cfa+0, 1 means cfa-4/8, etc.
35 GSList *stack_slots_from_cfa;
36 /* Same for stack slots relative to the frame pointer */
37 GSList *stack_slots_from_fp;
39 /* Number of slots in the map */
40 int nslots;
41 /* The number of registers in the map */
42 int nregs;
43 /* Min and Max offsets of the stack frame relative to fp */
44 int min_offset, max_offset;
45 /* Same for the locals area */
46 int locals_min_offset, locals_max_offset;
48 /* The call sites where this frame can be stopped during GC */
49 GCCallSite **callsites;
50 /* The number of call sites */
51 int ncallsites;
54 * The width of the stack bitmaps in bytes. This is not equal to the bitmap width at
55 * runtime, since it includes columns which are 0.
57 int stack_bitmap_width;
58 /*
59 * A bitmap whose width equals nslots, and whose height equals ncallsites.
60 * The bitmap contains a 1 if the corresponding stack slot has type SLOT_REF at the
61 * given callsite.
63 guint8 *stack_ref_bitmap;
64 /* Same for SLOT_PIN */
65 guint8 *stack_pin_bitmap;
68 * Similar bitmaps for registers. These have width MONO_MAX_IREGS in bits.
70 int reg_bitmap_width;
71 guint8 *reg_ref_bitmap;
72 guint8 *reg_pin_bitmap;
73 } MonoCompileGC;
75 #define ALIGN_TO(val,align) ((((mgreg_t)val) + ((align) - 1)) & ~((align) - 1))
77 #undef DEBUG
79 #if 0
80 /* We don't support debug levels, its all-or-nothing */
81 #define DEBUG(s) do { s; fflush (logfile); } while (0)
82 #define DEBUG_ENABLED 1
83 #else
84 #define DEBUG(s)
85 #endif
87 #ifdef DEBUG_ENABLED
88 //#if 1
89 #define DEBUG_PRECISE(s) do { s; } while (0)
90 #define DEBUG_PRECISE_ENABLED
91 #else
92 #define DEBUG_PRECISE(s)
93 #endif
96 * Contains information collected during the conservative stack marking pass,
97 * used during the precise pass. This helps to avoid doing a stack walk twice, which
98 * is expensive.
100 typedef struct {
101 guint8 *bitmap;
102 int nslots;
103 int frame_start_offset;
104 int nreg_locations;
105 /* Relative to stack_start */
106 int reg_locations [MONO_MAX_IREGS];
107 #ifdef DEBUG_PRECISE_ENABLED
108 MonoJitInfo *ji;
109 gpointer fp;
110 int regs [MONO_MAX_IREGS];
111 #endif
112 } FrameInfo;
114 /* Max number of frames stored in the TLS data */
115 #define MAX_FRAMES 50
118 * Per-thread data kept by this module. This is stored in the GC and passed to us as
119 * parameters, instead of being stored in a TLS variable, since during a collection,
120 * only the collection thread is active.
122 typedef struct {
123 MonoLMF *lmf;
124 MonoContext ctx;
125 gboolean has_context;
126 MonoJitTlsData *jit_tls;
127 /* For debugging */
128 mgreg_t tid;
129 gpointer ref_to_track;
130 /* Number of frames collected during the !precise pass */
131 int nframes;
132 FrameInfo frames [MAX_FRAMES];
133 } TlsData;
135 /* These are constant so don't store them in the GC Maps */
136 /* Number of registers stored in gc maps */
137 #define NREGS MONO_MAX_IREGS
140 * The GC Map itself.
141 * Contains information needed to mark a stack frame.
142 * This is a transient structure, created from a compressed representation on-demand.
144 typedef struct {
146 * The offsets of the GC tracked area inside the stack frame relative to the frame pointer.
147 * This includes memory which is NOREF thus doesn't need GC maps.
149 int start_offset;
150 int end_offset;
152 * The offset relative to frame_offset where the the memory described by the GC maps
153 * begins.
155 int map_offset;
156 /* The number of stack slots in the map */
157 int nslots;
158 /* The frame pointer register */
159 guint8 frame_reg;
160 /* The size of each callsite table entry */
161 guint8 callsite_entry_size;
162 guint has_pin_slots : 1;
163 guint has_ref_slots : 1;
164 guint has_ref_regs : 1;
165 guint has_pin_regs : 1;
167 /* The offsets below are into an external bitmaps array */
170 * A bitmap whose width is equal to bitmap_width, and whose height is equal to ncallsites.
171 * The bitmap contains a 1 if the corresponding stack slot has type SLOT_REF at the
172 * given callsite.
174 guint32 stack_ref_bitmap_offset;
176 * Same for SLOT_PIN. It is possible that the same bit is set in both bitmaps at
177 * different callsites, if the slot starts out as PIN, and later changes to REF.
179 guint32 stack_pin_bitmap_offset;
182 * Corresponding bitmaps for registers
183 * These have width equal to the number of bits set in reg_ref_mask/reg_pin_mask.
184 * FIXME: Merge these with the normal bitmaps, i.e. reserve the first x slots for them ?
186 guint32 reg_pin_bitmap_offset;
187 guint32 reg_ref_bitmap_offset;
189 guint32 used_int_regs, reg_ref_mask, reg_pin_mask;
191 /* The number of bits set in the two masks above */
192 guint8 nref_regs, npin_regs;
195 * A bit array marking slots which contain refs.
196 * This is used only for debugging.
198 //guint8 *ref_slots;
200 /* Callsite offsets */
201 /* These can take up a lot of space, so encode them compactly */
202 union {
203 guint8 *offsets8;
204 guint16 *offsets16;
205 guint32 *offsets32;
206 } callsites;
207 int ncallsites;
208 } GCMap;
211 * A compressed version of GCMap. This is what gets stored in MonoJitInfo.
213 typedef struct {
214 //guint8 *ref_slots;
215 //guint8 encoded_size;
218 * The arrays below are embedded after the struct.
219 * Their address needs to be computed.
222 /* The fixed fields of the GCMap encoded using LEB128 */
223 guint8 encoded [MONO_ZERO_LEN_ARRAY];
225 /* An array of ncallsites entries, each entry is callsite_entry_size bytes long */
226 guint8 callsites [MONO_ZERO_LEN_ARRAY];
228 /* The GC bitmaps */
229 guint8 bitmaps [MONO_ZERO_LEN_ARRAY];
230 } GCEncodedMap;
232 static int precise_frame_count [2], precise_frame_limit = -1;
233 static gboolean precise_frame_limit_inited;
235 /* Stats */
236 typedef struct {
237 int scanned_stacks;
238 int scanned;
239 int scanned_precisely;
240 int scanned_conservatively;
241 int scanned_registers;
242 int scanned_native;
243 int scanned_other;
245 int all_slots;
246 int noref_slots;
247 int ref_slots;
248 int pin_slots;
250 int gc_maps_size;
251 int gc_callsites_size;
252 int gc_callsites8_size;
253 int gc_callsites16_size;
254 int gc_callsites32_size;
255 int gc_bitmaps_size;
256 int gc_map_struct_size;
257 int tlsdata_size;
258 } JITGCStats;
260 static JITGCStats stats;
262 static FILE *logfile;
264 // FIXME: Move these to a shared place
266 static inline void
267 encode_uleb128 (guint32 value, guint8 *buf, guint8 **endbuf)
269 guint8 *p = buf;
271 do {
272 guint8 b = value & 0x7f;
273 value >>= 7;
274 if (value != 0) /* more bytes to come */
275 b |= 0x80;
276 *p ++ = b;
277 } while (value);
279 *endbuf = p;
282 static G_GNUC_UNUSED void
283 encode_sleb128 (gint32 value, guint8 *buf, guint8 **endbuf)
285 gboolean more = 1;
286 gboolean negative = (value < 0);
287 guint32 size = 32;
288 guint8 byte;
289 guint8 *p = buf;
291 while (more) {
292 byte = value & 0x7f;
293 value >>= 7;
294 /* the following is unnecessary if the
295 * implementation of >>= uses an arithmetic rather
296 * than logical shift for a signed left operand
298 if (negative)
299 /* sign extend */
300 value |= - (1 <<(size - 7));
301 /* sign bit of byte is second high order bit (0x40) */
302 if ((value == 0 && !(byte & 0x40)) ||
303 (value == -1 && (byte & 0x40)))
304 more = 0;
305 else
306 byte |= 0x80;
307 *p ++= byte;
310 *endbuf = p;
313 static inline guint32
314 decode_uleb128 (guint8 *buf, guint8 **endbuf)
316 guint8 *p = buf;
317 guint32 res = 0;
318 int shift = 0;
320 while (TRUE) {
321 guint8 b = *p;
322 p ++;
324 res = res | (((int)(b & 0x7f)) << shift);
325 if (!(b & 0x80))
326 break;
327 shift += 7;
330 *endbuf = p;
332 return res;
335 static inline gint32
336 decode_sleb128 (guint8 *buf, guint8 **endbuf)
338 guint8 *p = buf;
339 gint32 res = 0;
340 int shift = 0;
342 while (TRUE) {
343 guint8 b = *p;
344 p ++;
346 res = res | (((int)(b & 0x7f)) << shift);
347 shift += 7;
348 if (!(b & 0x80)) {
349 if (shift < 32 && (b & 0x40))
350 res |= - (1 << shift);
351 break;
355 *endbuf = p;
357 return res;
360 static int
361 encode_frame_reg (int frame_reg)
363 #ifdef TARGET_AMD64
364 if (frame_reg == AMD64_RSP)
365 return 0;
366 else if (frame_reg == AMD64_RBP)
367 return 1;
368 #elif defined(TARGET_X86)
369 if (frame_reg == X86_EBP)
370 return 0;
371 else if (frame_reg == X86_ESP)
372 return 1;
373 #else
374 NOT_IMPLEMENTED;
375 #endif
376 g_assert_not_reached ();
377 return -1;
380 static int
381 decode_frame_reg (int encoded)
383 #ifdef TARGET_AMD64
384 if (encoded == 0)
385 return AMD64_RSP;
386 else if (encoded == 1)
387 return AMD64_RBP;
388 #elif defined(TARGET_X86)
389 if (encoded == 0)
390 return X86_EBP;
391 else if (encoded == 1)
392 return X86_ESP;
393 #else
394 NOT_IMPLEMENTED;
395 #endif
396 g_assert_not_reached ();
397 return -1;
400 #ifdef TARGET_AMD64
401 #ifdef HOST_WIN32
402 static int callee_saved_regs [] = { AMD64_RBP, AMD64_RBX, AMD64_R12, AMD64_R13, AMD64_R14, AMD64_R15, AMD64_RDI, AMD64_RSI };
403 #else
404 static int callee_saved_regs [] = { AMD64_RBP, AMD64_RBX, AMD64_R12, AMD64_R13, AMD64_R14, AMD64_R15 };
405 #endif
406 #elif defined(TARGET_X86)
407 static int callee_saved_regs [] = { X86_EBX, X86_ESI, X86_EDI };
408 #endif
410 static guint32
411 encode_regmask (guint32 regmask)
413 int i;
414 guint32 res;
416 res = 0;
417 for (i = 0; i < sizeof (callee_saved_regs) / sizeof (int); ++i) {
418 if (regmask & (1 << callee_saved_regs [i])) {
419 res |= (1 << i);
420 regmask -= (1 << callee_saved_regs [i]);
423 g_assert (regmask == 0);
424 return res;
427 static guint32
428 decode_regmask (guint32 regmask)
430 int i;
431 guint32 res;
433 res = 0;
434 for (i = 0; i < sizeof (callee_saved_regs) / sizeof (int); ++i)
435 if (regmask & (1 << i))
436 res |= (1 << callee_saved_regs [i]);
437 return res;
441 * encode_gc_map:
443 * Encode the fixed fields of MAP into a buffer pointed to by BUF.
445 static void
446 encode_gc_map (GCMap *map, guint8 *buf, guint8 **endbuf)
448 guint32 flags, freg;
450 encode_sleb128 (map->start_offset / sizeof (mgreg_t), buf, &buf);
451 encode_sleb128 (map->end_offset / sizeof (mgreg_t), buf, &buf);
452 encode_sleb128 (map->map_offset / sizeof (mgreg_t), buf, &buf);
453 encode_uleb128 (map->nslots, buf, &buf);
454 g_assert (map->callsite_entry_size <= 4);
455 freg = encode_frame_reg (map->frame_reg);
456 g_assert (freg < 2);
457 flags = (map->has_ref_slots ? 1 : 0) | (map->has_pin_slots ? 2 : 0) | (map->has_ref_regs ? 4 : 0) | (map->has_pin_regs ? 8 : 0) | ((map->callsite_entry_size - 1) << 4) | (freg << 6);
458 encode_uleb128 (flags, buf, &buf);
459 encode_uleb128 (encode_regmask (map->used_int_regs), buf, &buf);
460 if (map->has_ref_regs)
461 encode_uleb128 (encode_regmask (map->reg_ref_mask), buf, &buf);
462 if (map->has_pin_regs)
463 encode_uleb128 (encode_regmask (map->reg_pin_mask), buf, &buf);
464 encode_uleb128 (map->ncallsites, buf, &buf);
466 *endbuf = buf;
470 * decode_gc_map:
472 * Decode the encoded GC map representation in BUF and store the result into MAP.
474 static void
475 decode_gc_map (guint8 *buf, GCMap *map, guint8 **endbuf)
477 guint32 flags;
478 int stack_bitmap_size, reg_ref_bitmap_size, reg_pin_bitmap_size, offset, freg;
479 int i, n;
481 map->start_offset = decode_sleb128 (buf, &buf) * sizeof (mgreg_t);
482 map->end_offset = decode_sleb128 (buf, &buf) * sizeof (mgreg_t);
483 map->map_offset = decode_sleb128 (buf, &buf) * sizeof (mgreg_t);
484 map->nslots = decode_uleb128 (buf, &buf);
485 flags = decode_uleb128 (buf, &buf);
486 map->has_ref_slots = (flags & 1) ? 1 : 0;
487 map->has_pin_slots = (flags & 2) ? 1 : 0;
488 map->has_ref_regs = (flags & 4) ? 1 : 0;
489 map->has_pin_regs = (flags & 8) ? 1 : 0;
490 map->callsite_entry_size = ((flags >> 4) & 0x3) + 1;
491 freg = flags >> 6;
492 map->frame_reg = decode_frame_reg (freg);
493 map->used_int_regs = decode_regmask (decode_uleb128 (buf, &buf));
494 if (map->has_ref_regs) {
495 map->reg_ref_mask = decode_regmask (decode_uleb128 (buf, &buf));
496 n = 0;
497 for (i = 0; i < NREGS; ++i)
498 if (map->reg_ref_mask & (1 << i))
499 n ++;
500 map->nref_regs = n;
502 if (map->has_pin_regs) {
503 map->reg_pin_mask = decode_regmask (decode_uleb128 (buf, &buf));
504 n = 0;
505 for (i = 0; i < NREGS; ++i)
506 if (map->reg_pin_mask & (1 << i))
507 n ++;
508 map->npin_regs = n;
510 map->ncallsites = decode_uleb128 (buf, &buf);
512 stack_bitmap_size = (ALIGN_TO (map->nslots, 8) / 8) * map->ncallsites;
513 reg_ref_bitmap_size = (ALIGN_TO (map->nref_regs, 8) / 8) * map->ncallsites;
514 reg_pin_bitmap_size = (ALIGN_TO (map->npin_regs, 8) / 8) * map->ncallsites;
515 offset = 0;
516 map->stack_ref_bitmap_offset = offset;
517 if (map->has_ref_slots)
518 offset += stack_bitmap_size;
519 map->stack_pin_bitmap_offset = offset;
520 if (map->has_pin_slots)
521 offset += stack_bitmap_size;
522 map->reg_ref_bitmap_offset = offset;
523 if (map->has_ref_regs)
524 offset += reg_ref_bitmap_size;
525 map->reg_pin_bitmap_offset = offset;
526 if (map->has_pin_regs)
527 offset += reg_pin_bitmap_size;
529 *endbuf = buf;
532 static gpointer
533 thread_attach_func (void)
535 TlsData *tls;
537 tls = g_new0 (TlsData, 1);
538 tls->tid = GetCurrentThreadId ();
539 stats.tlsdata_size += sizeof (TlsData);
541 return tls;
544 static void
545 thread_detach_func (gpointer user_data)
547 TlsData *tls = user_data;
549 g_free (tls);
552 static void
553 thread_suspend_func (gpointer user_data, void *sigctx)
555 TlsData *tls = user_data;
557 if (!tls)
558 /* Happens during startup */
559 return;
561 tls->lmf = mono_get_lmf ();
562 if (sigctx) {
563 mono_arch_sigctx_to_monoctx (sigctx, &tls->ctx);
564 tls->has_context = TRUE;
565 } else {
566 tls->has_context = FALSE;
568 tls->jit_tls = TlsGetValue (mono_jit_tls_id);
571 #define DEAD_REF ((gpointer)(gssize)0x2a2a2a2a2a2a2a2aULL)
573 static inline void
574 set_bit (guint8 *bitmap, int width, int y, int x)
576 bitmap [(width * y) + (x / 8)] |= (1 << (x % 8));
579 static inline void
580 clear_bit (guint8 *bitmap, int width, int y, int x)
582 bitmap [(width * y) + (x / 8)] &= ~(1 << (x % 8));
585 static inline int
586 get_bit (guint8 *bitmap, int width, int y, int x)
588 return bitmap [(width * y) + (x / 8)] & (1 << (x % 8));
591 static const char*
592 slot_type_to_string (GCSlotType type)
594 switch (type) {
595 case SLOT_REF:
596 return "ref";
597 case SLOT_NOREF:
598 return "noref";
599 case SLOT_PIN:
600 return "pin";
601 default:
602 g_assert_not_reached ();
603 return NULL;
607 static inline mgreg_t
608 get_frame_pointer (MonoContext *ctx, int frame_reg)
610 #if defined(TARGET_AMD64)
611 if (frame_reg == AMD64_RSP)
612 return ctx->rsp;
613 else if (frame_reg == AMD64_RBP)
614 return ctx->rbp;
615 #elif defined(TARGET_X86)
616 if (frame_reg == X86_ESP)
617 return ctx->esp;
618 else if (frame_reg == X86_EBP)
619 return ctx->ebp;
620 #endif
621 g_assert_not_reached ();
622 return 0;
626 * conservatively_pass:
628 * Mark a thread stack conservatively and collect information needed by the precise pass.
630 static void
631 conservative_pass (TlsData *tls, guint8 *stack_start, guint8 *stack_end)
633 MonoJitInfo *ji;
634 MonoContext ctx, new_ctx;
635 MonoLMF *lmf;
636 guint8 *stack_limit;
637 gboolean last = TRUE;
638 GCMap *map;
639 GCMap map_tmp;
640 GCEncodedMap *emap;
641 guint8* fp, *p, *real_frame_start, *frame_start, *frame_end;
642 int i, pc_offset, cindex, bitmap_width;
643 int scanned = 0, scanned_precisely, scanned_conservatively, scanned_registers;
644 gboolean res;
645 StackFrameInfo frame;
646 mgreg_t *reg_locations [MONO_MAX_IREGS];
647 mgreg_t *new_reg_locations [MONO_MAX_IREGS];
648 guint8 *bitmaps;
649 FrameInfo *fi;
650 guint32 precise_regmask;
652 if (tls) {
653 tls->nframes = 0;
654 tls->ref_to_track = NULL;
657 /* tls == NULL can happen during startup */
658 if (mono_thread_internal_current () == NULL || !tls) {
659 mono_gc_conservatively_scan_area (stack_start, stack_end);
660 stats.scanned_stacks += stack_end - stack_start;
661 return;
664 lmf = tls->lmf;
665 frame.domain = NULL;
667 /* Number of bytes scanned based on GC map data */
668 scanned = 0;
669 /* Number of bytes scanned precisely based on GC map data */
670 scanned_precisely = 0;
671 /* Number of bytes scanned conservatively based on GC map data */
672 scanned_conservatively = 0;
673 /* Number of bytes scanned conservatively in register save areas */
674 scanned_registers = 0;
676 /* This is one past the last address which we have scanned */
677 stack_limit = stack_start;
679 if (!tls->has_context)
680 memset (&new_ctx, 0, sizeof (ctx));
681 else
682 memcpy (&new_ctx, &tls->ctx, sizeof (MonoContext));
684 memset (reg_locations, 0, sizeof (reg_locations));
685 memset (new_reg_locations, 0, sizeof (new_reg_locations));
687 while (TRUE) {
688 memcpy (&ctx, &new_ctx, sizeof (ctx));
690 for (i = 0; i < MONO_MAX_IREGS; ++i) {
691 if (new_reg_locations [i]) {
693 * If the current frame saves the register, it means it might modify its
694 * value, thus the old location might not contain the same value, so
695 * we have to mark it conservatively.
697 if (reg_locations [i]) {
698 DEBUG (fprintf (logfile, "\tscan saved reg %s location %p.\n", mono_arch_regname (i), reg_locations [i]));
699 mono_gc_conservatively_scan_area (reg_locations [i], reg_locations [i] + sizeof (mgreg_t));
700 scanned_registers += sizeof (mgreg_t);
703 reg_locations [i] = new_reg_locations [i];
705 DEBUG (fprintf (logfile, "\treg %s is now at location %p.\n", mono_arch_regname (i), reg_locations [i]));
709 g_assert ((mgreg_t)stack_limit % sizeof (mgreg_t) == 0);
711 res = mono_find_jit_info_ext (frame.domain ? frame.domain : mono_domain_get (), tls->jit_tls, NULL, &ctx, &new_ctx, NULL, &lmf, new_reg_locations, &frame);
712 if (!res)
713 break;
715 ji = frame.ji;
717 if (frame.type == FRAME_TYPE_MANAGED_TO_NATIVE) {
719 * These frames are problematic for several reasons:
720 * - they are unwound through an LMF, and we have no precise register tracking for those.
721 * - the LMF might not contain a precise ip, so we can't compute the call site.
722 * - the LMF only unwinds to the wrapper frame, so we get these methods twice.
724 DEBUG (fprintf (logfile, "Mark(0): <Managed-to-native transition>\n"));
725 for (i = 0; i < MONO_MAX_IREGS; ++i) {
726 if (reg_locations [i]) {
727 DEBUG (fprintf (logfile, "\tscan saved reg %s location %p.\n", mono_arch_regname (i), reg_locations [i]));
728 mono_gc_conservatively_scan_area (reg_locations [i], reg_locations [i] + sizeof (mgreg_t));
729 scanned_registers += sizeof (mgreg_t);
731 reg_locations [i] = NULL;
732 new_reg_locations [i] = NULL;
734 ctx = new_ctx;
735 continue;
738 /* The last frame can be in any state so mark conservatively */
739 if (last) {
740 if (ji) {
741 DEBUG (char *fname = mono_method_full_name (ji->method, TRUE); fprintf (logfile, "Mark(0): %s+0x%x (%p)\n", fname, pc_offset, (gpointer)MONO_CONTEXT_GET_IP (&ctx)); g_free (fname));
743 DEBUG (fprintf (logfile, "\t <Last frame>\n"));
744 last = FALSE;
745 continue;
748 pc_offset = (guint8*)MONO_CONTEXT_GET_IP (&ctx) - (guint8*)ji->code_start;
750 /* These frames are very problematic */
751 if (ji->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE) {
752 DEBUG (char *fname = mono_method_full_name (ji->method, TRUE); fprintf (logfile, "Mark(0): %s+0x%x (%p)\n", fname, pc_offset, (gpointer)MONO_CONTEXT_GET_IP (&ctx)); g_free (fname));
753 DEBUG (fprintf (logfile, "\tSkip.\n"));
754 continue;
757 /* All the other frames are at a call site */
759 if (tls->nframes == MAX_FRAMES) {
761 * Can't save information since the array is full. So scan the rest of the
762 * stack conservatively.
764 DEBUG (fprintf (logfile, "Mark (0): Frame stack full.\n"));
765 break;
768 /* Scan the frame of this method */
771 * A frame contains the following:
772 * - saved registers
773 * - saved args
774 * - locals
775 * - spill area
776 * - localloc-ed memory
778 g_assert (pc_offset >= 0);
780 emap = ji->gc_info;
782 if (!emap) {
783 DEBUG (char *fname = mono_method_full_name (ji->method, TRUE); fprintf (logfile, "Mark(0): %s+0x%x (%p)\n", fname, pc_offset, (gpointer)MONO_CONTEXT_GET_IP (&ctx)); g_free (fname));
784 DEBUG (fprintf (logfile, "\tNo GC Map.\n"));
785 continue;
788 /* The embedded callsite table requires this */
789 g_assert (((mgreg_t)emap % 4) == 0);
792 * Debugging aid to control the number of frames scanned precisely
794 if (!precise_frame_limit_inited) {
795 if (getenv ("MONO_PRECISE_COUNT"))
796 precise_frame_limit = atoi (getenv ("MONO_PRECISE_COUNT"));
797 precise_frame_limit_inited = TRUE;
800 if (precise_frame_limit != -1) {
801 if (precise_frame_count [FALSE] == precise_frame_limit)
802 printf ("LAST PRECISE FRAME: %s\n", mono_method_full_name (ji->method, TRUE));
803 if (precise_frame_count [FALSE] > precise_frame_limit)
804 continue;
806 precise_frame_count [FALSE] ++;
808 /* Decode the encoded GC map */
809 map = &map_tmp;
810 memset (map, 0, sizeof (GCMap));
811 decode_gc_map (&emap->encoded [0], map, &p);
812 p = (guint8*)ALIGN_TO (p, map->callsite_entry_size);
813 map->callsites.offsets8 = p;
814 p += map->callsite_entry_size * map->ncallsites;
815 bitmaps = p;
817 fp = (guint8*)get_frame_pointer (&ctx, map->frame_reg);
819 real_frame_start = fp + map->start_offset;
820 frame_start = fp + map->start_offset + map->map_offset;
821 frame_end = fp + map->end_offset;
823 DEBUG (char *fname = mono_method_full_name (ji->method, TRUE); fprintf (logfile, "Mark(0): %s+0x%x (%p) limit=%p fp=%p frame=%p-%p (%d)\n", fname, pc_offset, (gpointer)MONO_CONTEXT_GET_IP (&ctx), stack_limit, fp, frame_start, frame_end, (int)(frame_end - frame_start)); g_free (fname));
825 /* Find the callsite index */
826 if (map->callsite_entry_size == 1) {
827 for (i = 0; i < map->ncallsites; ++i)
828 /* ip points inside the call instruction */
829 if (map->callsites.offsets8 [i] == pc_offset + 1)
830 break;
831 } else if (map->callsite_entry_size == 2) {
832 // FIXME: Use a binary search
833 for (i = 0; i < map->ncallsites; ++i)
834 /* ip points inside the call instruction */
835 if (map->callsites.offsets16 [i] == pc_offset + 1)
836 break;
837 } else {
838 // FIXME: Use a binary search
839 for (i = 0; i < map->ncallsites; ++i)
840 /* ip points inside the call instruction */
841 if (map->callsites.offsets32 [i] == pc_offset + 1)
842 break;
844 if (i == map->ncallsites) {
845 printf ("Unable to find ip offset 0x%x in callsite list of %s.\n", pc_offset + 1, mono_method_full_name (ji->method, TRUE));
846 g_assert_not_reached ();
848 cindex = i;
850 g_assert (real_frame_start >= stack_limit);
852 if (real_frame_start > stack_limit) {
853 /* This scans the previously skipped frames as well */
854 DEBUG (fprintf (logfile, "\tscan area %p-%p (%d).\n", stack_limit, real_frame_start, (int)(real_frame_start - stack_limit)));
855 mono_gc_conservatively_scan_area (stack_limit, real_frame_start);
856 stats.scanned_other += real_frame_start - stack_limit;
859 /* Mark stack slots */
860 if (map->has_pin_slots) {
861 int bitmap_width = ALIGN_TO (map->nslots, 8) / 8;
862 guint8 *pin_bitmap = &bitmaps [map->stack_pin_bitmap_offset + (bitmap_width * cindex)];
863 guint8 *p;
864 gboolean pinned;
866 p = frame_start;
867 for (i = 0; i < map->nslots; ++i) {
868 pinned = pin_bitmap [i / 8] & (1 << (i % 8));
869 if (pinned) {
870 DEBUG (fprintf (logfile, "\tscan slot %s0x%x(fp)=%p.\n", (guint8*)p > (guint8*)fp ? "" : "-", ABS ((int)((gssize)p - (gssize)fp)), p));
871 mono_gc_conservatively_scan_area (p, p + sizeof (mgreg_t));
872 scanned_conservatively += sizeof (mgreg_t);
873 } else {
874 scanned_precisely += sizeof (mgreg_t);
876 p += sizeof (mgreg_t);
878 } else {
879 scanned_precisely += (map->nslots * sizeof (mgreg_t));
882 /* The area outside of start-end is NOREF */
883 scanned_precisely += (map->end_offset - map->start_offset) - (map->nslots * sizeof (mgreg_t));
885 /* Mark registers */
886 precise_regmask = map->used_int_regs | (1 << map->frame_reg);
887 if (map->has_pin_regs) {
888 int bitmap_width = ALIGN_TO (map->npin_regs, 8) / 8;
889 guint8 *pin_bitmap = &bitmaps [map->reg_pin_bitmap_offset + (bitmap_width * cindex)];
890 int bindex = 0;
891 for (i = 0; i < NREGS; ++i) {
892 if (!(map->used_int_regs & (1 << i)))
893 continue;
895 if (!(map->reg_pin_mask & (1 << i)))
896 continue;
898 if (pin_bitmap [bindex / 8] & (1 << (bindex % 8))) {
899 DEBUG (fprintf (logfile, "\treg %s saved at 0x%p is pinning.\n", mono_arch_regname (i), reg_locations [i]));
900 precise_regmask &= ~(1 << i);
902 bindex ++;
906 scanned += map->end_offset - map->start_offset;
908 g_assert (scanned == scanned_precisely + scanned_conservatively);
910 stack_limit = frame_end;
912 /* Save information for the precise pass */
913 fi = &tls->frames [tls->nframes];
914 fi->nslots = map->nslots;
915 bitmap_width = ALIGN_TO (map->nslots, 8) / 8;
916 if (map->has_ref_slots)
917 fi->bitmap = &bitmaps [map->stack_ref_bitmap_offset + (bitmap_width * cindex)];
918 else
919 fi->bitmap = NULL;
920 fi->frame_start_offset = frame_start - stack_start;
921 fi->nreg_locations = 0;
922 DEBUG_PRECISE (fi->ji = ji);
923 DEBUG_PRECISE (fi->fp = fp);
925 if (map->has_ref_regs) {
926 int bitmap_width = ALIGN_TO (map->nref_regs, 8) / 8;
927 guint8 *ref_bitmap = &bitmaps [map->reg_ref_bitmap_offset + (bitmap_width * cindex)];
928 int bindex = 0;
929 for (i = 0; i < NREGS; ++i) {
930 if (!(map->reg_ref_mask & (1 << i)))
931 continue;
933 if (reg_locations [i] && (ref_bitmap [bindex / 8] & (1 << (bindex % 8)))) {
934 DEBUG_PRECISE (fi->regs [fi->nreg_locations] = i);
935 DEBUG (fprintf (logfile, "\treg %s saved at 0x%p is ref.\n", mono_arch_regname (i), reg_locations [i]));
936 fi->reg_locations [fi->nreg_locations] = (guint8*)reg_locations [i] - stack_start;
937 fi->nreg_locations ++;
939 bindex ++;
944 * Clear locations of precisely stacked registers.
946 if (precise_regmask) {
947 for (i = 0; i < NREGS; ++i) {
948 if (precise_regmask & (1 << i)) {
950 * The method uses this register, and we have precise info for it.
951 * This means the location will be scanned precisely.
952 * Tell the code at the beginning of the loop that this location is
953 * processed.
955 if (reg_locations [i])
956 DEBUG (fprintf (logfile, "\treg %s at location %p (==%p) is precise.\n", mono_arch_regname (i), reg_locations [i], (gpointer)*reg_locations [i]));
957 reg_locations [i] = NULL;
962 tls->nframes ++;
965 /* Scan the remaining register save locations */
966 for (i = 0; i < MONO_MAX_IREGS; ++i) {
967 if (reg_locations [i]) {
968 DEBUG (fprintf (logfile, "\tscan saved reg location %p.\n", reg_locations [i]));
969 mono_gc_conservatively_scan_area (reg_locations [i], reg_locations [i] + sizeof (mgreg_t));
970 scanned_registers += sizeof (mgreg_t);
972 if (new_reg_locations [i]) {
973 DEBUG (fprintf (logfile, "\tscan saved reg location %p.\n", new_reg_locations [i]));
974 mono_gc_conservatively_scan_area (new_reg_locations [i], new_reg_locations [i] + sizeof (mgreg_t));
975 scanned_registers += sizeof (mgreg_t);
979 if (stack_limit < stack_end) {
980 DEBUG (fprintf (logfile, "\tscan remaining stack %p-%p (%d).\n", stack_limit, stack_end, (int)(stack_end - stack_limit)));
981 mono_gc_conservatively_scan_area (stack_limit, stack_end);
982 stats.scanned_native += stack_end - stack_limit;
985 DEBUG (fprintf (logfile, "Marked %d bytes, p=%d,c=%d out of %d.\n", scanned, scanned_precisely, scanned_conservatively, (int)(stack_end - stack_start)));
987 stats.scanned_stacks += stack_end - stack_start;
988 stats.scanned += scanned;
989 stats.scanned_precisely += scanned_precisely;
990 stats.scanned_conservatively += scanned_conservatively;
991 stats.scanned_registers += scanned_registers;
993 //mono_gc_conservatively_scan_area (stack_start, stack_end);
997 * precise_pass:
999 * Mark a thread stack precisely based on information saved during the conservative
1000 * pass.
1002 static void
1003 precise_pass (TlsData *tls, guint8 *stack_start, guint8 *stack_end)
1005 int findex, i;
1006 FrameInfo *fi;
1007 guint8 *frame_start;
1009 if (!tls)
1010 return;
1012 for (findex = 0; findex < tls->nframes; findex ++) {
1013 /* Load information saved by the !precise pass */
1014 fi = &tls->frames [findex];
1015 frame_start = stack_start + fi->frame_start_offset;
1017 DEBUG (char *fname = mono_method_full_name (fi->ji->method, TRUE); fprintf (logfile, "Mark(1): %s\n", fname); g_free (fname));
1020 * FIXME: Add a function to mark using a bitmap, to avoid doing a
1021 * call for each object.
1024 /* Mark stack slots */
1025 if (fi->bitmap) {
1026 guint8 *ref_bitmap = fi->bitmap;
1027 gboolean live;
1029 for (i = 0; i < fi->nslots; ++i) {
1030 MonoObject **ptr = (MonoObject**)(frame_start + (i * sizeof (mgreg_t)));
1032 live = ref_bitmap [i / 8] & (1 << (i % 8));
1034 if (live) {
1035 MonoObject *obj = *ptr;
1036 if (obj) {
1037 DEBUG (fprintf (logfile, "\tref %s0x%x(fp)=%p: %p ->", (guint8*)ptr >= (guint8*)fi->fp ? "" : "-", ABS ((int)((gssize)ptr - (gssize)fi->fp)), ptr, obj));
1038 *ptr = mono_gc_scan_object (obj);
1039 DEBUG (fprintf (logfile, " %p.\n", *ptr));
1040 } else {
1041 DEBUG (fprintf (logfile, "\tref %s0x%x(fp)=%p: %p.\n", (guint8*)ptr >= (guint8*)fi->fp ? "" : "-", ABS ((int)((gssize)ptr - (gssize)fi->fp)), ptr, obj));
1043 } else {
1044 #if 0
1046 * This is disabled because the pointer takes up a lot of space.
1047 * Stack slots might be shared between ref and non-ref variables ?
1049 if (map->ref_slots [i / 8] & (1 << (i % 8))) {
1050 DEBUG (fprintf (logfile, "\tref %s0x%x(fp)=%p: dead (%p)\n", (guint8*)ptr >= (guint8*)fi->fp ? "" : "-", ABS ((int)((gssize)ptr - (gssize)fi->fp)), ptr, *ptr));
1052 * Fail fast if the live range is incorrect, and
1053 * the JITted code tries to access this object
1055 *ptr = DEAD_REF;
1057 #endif
1062 /* Mark registers */
1065 * Registers are different from stack slots, they have no address where they
1066 * are stored. Instead, some frame below this frame in the stack saves them
1067 * in its prolog to the stack. We can mark this location precisely.
1069 for (i = 0; i < fi->nreg_locations; ++i) {
1071 * reg_locations [i] contains the address of the stack slot where
1072 * a reg was last saved, so mark that slot.
1074 MonoObject **ptr = (MonoObject**)((guint8*)stack_start + fi->reg_locations [i]);
1075 MonoObject *obj = *ptr;
1077 if (obj) {
1078 DEBUG (fprintf (logfile, "\treg %s saved at %p: %p ->", mono_arch_regname (fi->regs [i]), ptr, obj));
1079 *ptr = mono_gc_scan_object (obj);
1080 DEBUG (fprintf (logfile, " %p.\n", *ptr));
1081 } else {
1082 DEBUG (fprintf (logfile, "\treg %s saved at %p: %p\n", mono_arch_regname (fi->regs [i]), ptr, obj));
1088 * Debugging aid to check for missed refs.
1090 if (tls->ref_to_track) {
1091 mgreg_t *p;
1093 for (p = (mgreg_t*)stack_start; p < (mgreg_t*)stack_end; ++p)
1094 if (*p == (mgreg_t)tls->ref_to_track)
1095 printf ("REF AT %p.\n", p);
1100 * thread_mark_func:
1102 * This is called by the GC twice to mark a thread stack. PRECISE is FALSE at the first
1103 * call, and TRUE at the second. USER_DATA points to a TlsData
1104 * structure filled up by thread_suspend_func.
1106 static void
1107 thread_mark_func (gpointer user_data, guint8 *stack_start, guint8 *stack_end, gboolean precise)
1109 TlsData *tls = user_data;
1111 DEBUG (fprintf (logfile, "****************************************\n"));
1112 DEBUG (fprintf (logfile, "*** %s stack marking for thread %p (%p-%p) ***\n", precise ? "Precise" : "Conservative", tls ? GUINT_TO_POINTER (tls->tid) : NULL, stack_start, stack_end));
1113 DEBUG (fprintf (logfile, "****************************************\n"));
1115 if (!precise)
1116 conservative_pass (tls, stack_start, stack_end);
1117 else
1118 precise_pass (tls, stack_start, stack_end);
1121 static void
1122 mini_gc_init_gc_map (MonoCompile *cfg)
1124 if (COMPILE_LLVM (cfg))
1125 return;
1127 if (!mono_gc_is_moving ())
1128 return;
1130 if (!cfg->compile_aot && !mono_gc_precise_stack_mark_enabled ())
1131 return;
1133 #if 1
1134 /* Debugging support */
1136 static int precise_count;
1138 precise_count ++;
1139 if (getenv ("MONO_GCMAP_COUNT")) {
1140 if (precise_count == atoi (getenv ("MONO_GCMAP_COUNT")))
1141 printf ("LAST: %s\n", mono_method_full_name (cfg->method, TRUE));
1142 if (precise_count > atoi (getenv ("MONO_GCMAP_COUNT")))
1143 return;
1146 #endif
1148 cfg->compute_gc_maps = TRUE;
1150 cfg->gc_info = mono_mempool_alloc0 (cfg->mempool, sizeof (MonoCompileGC));
1154 * mini_gc_set_slot_type_from_fp:
1156 * Set the GC slot type of the stack slot identified by SLOT_OFFSET, which should be
1157 * relative to the frame pointer. By default, all stack slots are type PIN, so there is no
1158 * need to call this function for those slots.
1160 void
1161 mini_gc_set_slot_type_from_fp (MonoCompile *cfg, int slot_offset, GCSlotType type)
1163 MonoCompileGC *gcfg = (MonoCompileGC*)cfg->gc_info;
1165 if (!cfg->compute_gc_maps)
1166 return;
1168 g_assert (slot_offset % sizeof (mgreg_t) == 0);
1170 gcfg->stack_slots_from_fp = g_slist_prepend_mempool (cfg->mempool, gcfg->stack_slots_from_fp, GINT_TO_POINTER (((slot_offset) << 16) | type));
1174 * mini_gc_set_slot_type_from_cfa:
1176 * Set the GC slot type of the stack slot identified by SLOT_OFFSET, which should be
1177 * relative to the DWARF CFA value. This should be called from mono_arch_emit_prolog ().
1178 * If type is STACK_REF, the slot is assumed to be live from the end of the prolog until
1179 * the end of the method. By default, all stack slots are type PIN, so there is no need to
1180 * call this function for those slots.
1182 void
1183 mini_gc_set_slot_type_from_cfa (MonoCompile *cfg, int slot_offset, GCSlotType type)
1185 MonoCompileGC *gcfg = (MonoCompileGC*)cfg->gc_info;
1186 int slot = - (slot_offset / sizeof (mgreg_t));
1188 if (!cfg->compute_gc_maps)
1189 return;
1191 g_assert (slot_offset <= 0);
1192 g_assert (slot_offset % sizeof (mgreg_t) == 0);
1194 gcfg->stack_slots_from_cfa = g_slist_prepend_mempool (cfg->mempool, gcfg->stack_slots_from_cfa, GUINT_TO_POINTER (((slot) << 16) | type));
1197 static inline int
1198 fp_offset_to_slot (MonoCompile *cfg, int offset)
1200 MonoCompileGC *gcfg = cfg->gc_info;
1202 return (offset - gcfg->min_offset) / sizeof (mgreg_t);
1205 static inline int
1206 slot_to_fp_offset (MonoCompile *cfg, int slot)
1208 MonoCompileGC *gcfg = cfg->gc_info;
1210 return (slot * sizeof (mgreg_t)) + gcfg->min_offset;
1213 static inline void
1214 set_slot (MonoCompileGC *gcfg, int slot, int callsite_index, GCSlotType type)
1216 g_assert (slot >= 0 && slot < gcfg->nslots);
1218 if (type == SLOT_PIN) {
1219 clear_bit (gcfg->stack_ref_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1220 set_bit (gcfg->stack_pin_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1221 } else if (type == SLOT_REF) {
1222 set_bit (gcfg->stack_ref_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1223 clear_bit (gcfg->stack_pin_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1224 } else if (type == SLOT_NOREF) {
1225 clear_bit (gcfg->stack_ref_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1226 clear_bit (gcfg->stack_pin_bitmap, gcfg->stack_bitmap_width, callsite_index, slot);
1230 static inline void
1231 set_slot_everywhere (MonoCompileGC *gcfg, int slot, GCSlotType type)
1233 int cindex;
1235 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex)
1236 set_slot (gcfg, slot, cindex, type);
1239 static inline void
1240 set_slot_in_range (MonoCompileGC *gcfg, int slot, int from, int to, GCSlotType type)
1242 int cindex;
1244 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex) {
1245 int callsite_offset = gcfg->callsites [cindex]->pc_offset;
1246 if (callsite_offset >= from && callsite_offset < to)
1247 set_slot (gcfg, slot, cindex, type);
1251 static inline void
1252 set_reg_slot (MonoCompileGC *gcfg, int slot, int callsite_index, GCSlotType type)
1254 g_assert (slot >= 0 && slot < gcfg->nregs);
1256 if (type == SLOT_PIN) {
1257 clear_bit (gcfg->reg_ref_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1258 set_bit (gcfg->reg_pin_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1259 } else if (type == SLOT_REF) {
1260 set_bit (gcfg->reg_ref_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1261 clear_bit (gcfg->reg_pin_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1262 } else if (type == SLOT_NOREF) {
1263 clear_bit (gcfg->reg_ref_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1264 clear_bit (gcfg->reg_pin_bitmap, gcfg->reg_bitmap_width, callsite_index, slot);
1268 static inline void
1269 set_reg_slot_everywhere (MonoCompileGC *gcfg, int slot, GCSlotType type)
1271 int cindex;
1273 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex)
1274 set_reg_slot (gcfg, slot, cindex, type);
1277 static inline void
1278 set_reg_slot_in_range (MonoCompileGC *gcfg, int slot, int from, int to, GCSlotType type)
1280 int cindex;
1282 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex) {
1283 int callsite_offset = gcfg->callsites [cindex]->pc_offset;
1284 if (callsite_offset >= from && callsite_offset < to)
1285 set_reg_slot (gcfg, slot, cindex, type);
1289 static void
1290 process_spill_slots (MonoCompile *cfg)
1292 MonoCompileGC *gcfg = cfg->gc_info;
1293 MonoBasicBlock *bb;
1294 GSList *l;
1295 int i;
1297 /* Mark all ref/pin spill slots as NOREF by default outside of their live range */
1298 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
1299 for (l = bb->spill_slot_defs; l; l = l->next) {
1300 MonoInst *def = l->data;
1301 int spill_slot = def->inst_c0;
1302 int bank = def->inst_c1;
1303 int offset = cfg->spill_info [bank][spill_slot].offset;
1304 int slot = fp_offset_to_slot (cfg, offset);
1306 if (bank == MONO_REG_INT_MP || bank == MONO_REG_INT_REF)
1307 set_slot_everywhere (gcfg, slot, SLOT_NOREF);
1311 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
1312 for (l = bb->spill_slot_defs; l; l = l->next) {
1313 MonoInst *def = l->data;
1314 int spill_slot = def->inst_c0;
1315 int bank = def->inst_c1;
1316 int offset = cfg->spill_info [bank][spill_slot].offset;
1317 int slot = fp_offset_to_slot (cfg, offset);
1318 GCSlotType type;
1320 if (bank == MONO_REG_INT_MP)
1321 type = SLOT_PIN;
1322 else
1323 type = SLOT_REF;
1326 * Extend the live interval for the GC tracked spill slots
1327 * defined in this bblock.
1328 * FIXME: This is not needed.
1330 set_slot_in_range (gcfg, slot, def->backend.pc_offset, bb->native_offset + bb->native_length, type);
1332 if (cfg->verbose_level > 1)
1333 printf ("\t%s spill slot at %s0x%x(fp) (slot = %d)\n", slot_type_to_string (type), offset >= 0 ? "" : "-", ABS (offset), slot);
1337 /* Set fp spill slots to NOREF */
1338 for (i = 0; i < cfg->spill_info_len [MONO_REG_DOUBLE]; ++i) {
1339 int offset = cfg->spill_info [MONO_REG_DOUBLE][i].offset;
1340 int slot;
1342 if (offset == -1)
1343 continue;
1345 slot = fp_offset_to_slot (cfg, offset);
1347 set_slot_everywhere (gcfg, slot, SLOT_NOREF);
1348 /* FIXME: 32 bit */
1349 if (cfg->verbose_level > 1)
1350 printf ("\tfp spill slot at %s0x%x(fp) (slot = %d)\n", offset >= 0 ? "" : "-", ABS (offset), slot);
1353 /* Set int spill slots to NOREF */
1354 for (i = 0; i < cfg->spill_info_len [MONO_REG_INT]; ++i) {
1355 int offset = cfg->spill_info [MONO_REG_INT][i].offset;
1356 int slot;
1358 if (offset == -1)
1359 continue;
1361 slot = fp_offset_to_slot (cfg, offset);
1363 set_slot_everywhere (gcfg, slot, SLOT_NOREF);
1364 if (cfg->verbose_level > 1)
1365 printf ("\tint spill slot at %s0x%x(fp) (slot = %d)\n", offset >= 0 ? "" : "-", ABS (offset), slot);
1370 * process_other_slots:
1372 * Process stack slots registered using mini_gc_set_slot_type_... ().
1374 static void
1375 process_other_slots (MonoCompile *cfg)
1377 MonoCompileGC *gcfg = cfg->gc_info;
1378 GSList *l;
1380 /* Relative to the CFA */
1381 for (l = gcfg->stack_slots_from_cfa; l; l = l->next) {
1382 guint data = GPOINTER_TO_UINT (l->data);
1383 int cfa_slot = data >> 16;
1384 GCSlotType type = data & 0xff;
1385 int slot;
1388 * Map the cfa relative slot to an fp relative slot.
1389 * slot_addr == cfa - <cfa_slot>*4/8
1390 * fp + cfa_offset == cfa
1391 * -> slot_addr == fp + (cfa_offset - <cfa_slot>*4/8)
1393 slot = (cfg->cfa_offset / sizeof (mgreg_t)) - cfa_slot - (gcfg->min_offset / sizeof (mgreg_t));
1395 set_slot_everywhere (gcfg, slot, type);
1397 if (cfg->verbose_level > 1) {
1398 int fp_offset = slot_to_fp_offset (cfg, slot);
1399 if (type == SLOT_NOREF)
1400 printf ("\tnoref slot at %s0x%x(fp) (slot = %d) (cfa - 0x%x)\n", fp_offset >= 0 ? "" : "-", ABS (fp_offset), slot, (int)(cfa_slot * sizeof (mgreg_t)));
1404 /* Relative to the FP */
1405 for (l = gcfg->stack_slots_from_fp; l; l = l->next) {
1406 gint data = GPOINTER_TO_INT (l->data);
1407 int offset = data >> 16;
1408 GCSlotType type = data & 0xff;
1409 int slot;
1411 slot = fp_offset_to_slot (cfg, offset);
1413 set_slot_everywhere (gcfg, slot, type);
1415 /* Liveness for these slots is handled by process_spill_slots () */
1417 if (cfg->verbose_level > 1) {
1418 if (type == SLOT_REF)
1419 printf ("\tref slot at fp+0x%x (slot = %d)\n", offset, slot);
1424 static void
1425 process_variables (MonoCompile *cfg)
1427 MonoCompileGC *gcfg = cfg->gc_info;
1428 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1429 int i, locals_min_slot, locals_max_slot, cindex;
1430 MonoBasicBlock *bb;
1431 MonoInst *tmp;
1432 int *pc_offsets;
1433 int locals_min_offset = gcfg->locals_min_offset;
1434 int locals_max_offset = gcfg->locals_max_offset;
1436 /* Slots for locals are NOREF by default */
1437 locals_min_slot = (locals_min_offset - gcfg->min_offset) / sizeof (mgreg_t);
1438 locals_max_slot = (locals_max_offset - gcfg->min_offset) / sizeof (mgreg_t);
1439 for (i = locals_min_slot; i < locals_max_slot; ++i) {
1440 set_slot_everywhere (gcfg, i, SLOT_NOREF);
1444 * Compute the offset where variables are initialized in the first bblock, if any.
1446 pc_offsets = g_new0 (int, cfg->next_vreg);
1448 bb = cfg->bb_entry->next_bb;
1449 MONO_BB_FOR_EACH_INS (bb, tmp) {
1450 if (tmp->opcode == OP_GC_LIVENESS_DEF) {
1451 int vreg = tmp->inst_c1;
1452 if (pc_offsets [vreg] == 0) {
1453 g_assert (tmp->backend.pc_offset > 0);
1454 pc_offsets [vreg] = tmp->backend.pc_offset;
1460 * Stack slots holding arguments are initialized in the prolog.
1461 * This means we can treat them alive for the whole method.
1463 for (i = 0; i < cfg->num_varinfo; i++) {
1464 MonoInst *ins = cfg->varinfo [i];
1465 MonoType *t = ins->inst_vtype;
1466 MonoMethodVar *vmv;
1467 guint32 pos;
1468 gboolean byref, is_this = FALSE;
1469 gboolean is_arg = i < cfg->locals_start;
1471 if (ins == cfg->ret)
1472 continue;
1474 vmv = MONO_VARINFO (cfg, i);
1476 /* For some reason, 'this' is byref */
1477 if (sig->hasthis && ins == cfg->args [0] && !cfg->method->klass->valuetype) {
1478 t = &cfg->method->klass->byval_arg;
1479 is_this = TRUE;
1482 byref = t->byref;
1484 if (ins->opcode == OP_REGVAR) {
1485 int hreg;
1486 GCSlotType slot_type;
1488 t = mini_type_get_underlying_type (NULL, t);
1490 hreg = ins->dreg;
1491 g_assert (hreg < MONO_MAX_IREGS);
1493 if (byref)
1494 slot_type = SLOT_PIN;
1495 else
1496 slot_type = mini_type_is_reference (cfg, t) ? SLOT_REF : SLOT_NOREF;
1498 if (slot_type == SLOT_PIN) {
1499 /* These have no live interval, be conservative */
1500 set_reg_slot_everywhere (gcfg, hreg, slot_type);
1501 } else {
1503 * Unlike variables allocated to the stack, we generate liveness info
1504 * for noref vars in registers in mono_spill_global_vars (), because
1505 * knowing that a register doesn't contain a ref allows us to mark its save
1506 * locations precisely.
1508 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex)
1509 if (gcfg->callsites [cindex]->liveness [i / 8] & (1 << (i % 8)))
1510 set_reg_slot (gcfg, hreg, cindex, slot_type);
1513 if (cfg->verbose_level > 1) {
1514 printf ("\t%s %sreg %s(R%d)\n", slot_type_to_string (slot_type), is_arg ? "arg " : "", mono_arch_regname (hreg), vmv->vreg);
1517 continue;
1520 if (ins->opcode != OP_REGOFFSET)
1521 continue;
1523 if (ins->inst_offset % sizeof (mgreg_t) != 0)
1524 continue;
1526 if (is_arg && ins->inst_offset >= gcfg->max_offset)
1527 /* In parent frame */
1528 continue;
1530 pos = fp_offset_to_slot (cfg, ins->inst_offset);
1532 if (is_arg && ins->flags & MONO_INST_IS_DEAD) {
1533 /* These do not get stored in the prolog */
1534 set_slot_everywhere (gcfg, pos, SLOT_NOREF);
1536 if (cfg->verbose_level > 1) {
1537 printf ("\tdead arg at fp%s0x%x (slot = %d): %s\n", ins->inst_offset < 0 ? "-" : "+", (ins->inst_offset < 0) ? -(int)ins->inst_offset : (int)ins->inst_offset, pos, mono_type_full_name (ins->inst_vtype));
1539 continue;
1542 if (MONO_TYPE_ISSTRUCT (t)) {
1543 int numbits = 0, j;
1544 gsize *bitmap = NULL;
1545 gboolean pin = FALSE;
1546 int size;
1547 int size_in_slots;
1549 if (ins->backend.is_pinvoke)
1550 size = mono_class_native_size (ins->klass, NULL);
1551 else
1552 size = mono_class_value_size (ins->klass, NULL);
1553 size_in_slots = ALIGN_TO (size, sizeof (mgreg_t)) / sizeof (mgreg_t);
1555 if (!ins->klass->has_references) {
1556 if (is_arg) {
1557 for (j = 0; j < size_in_slots; ++j)
1558 set_slot_everywhere (gcfg, pos + j, SLOT_NOREF);
1560 continue;
1563 if (ins->klass->generic_container || mono_class_is_open_constructed_type (t)) {
1564 /* FIXME: Generic sharing */
1565 pin = TRUE;
1566 } else {
1567 mono_class_compute_gc_descriptor (ins->klass);
1569 bitmap = mono_gc_get_bitmap_for_descr (ins->klass->gc_descr, &numbits);
1570 if (!bitmap)
1571 pin = TRUE;
1574 * Most vtypes are marked volatile because of the LDADDR instructions,
1575 * and they have no liveness information since they are decomposed
1576 * before the liveness pass. We emit OP_GC_LIVENESS_DEF instructions for
1577 * them during VZERO decomposition.
1579 if (!pc_offsets [vmv->vreg])
1580 pin = TRUE;
1583 if (ins->backend.is_pinvoke)
1584 pin = TRUE;
1586 if (cfg->verbose_level > 1)
1587 printf ("\tvtype R%d at fp+0x%x-0x%x: %s\n", vmv->vreg, (int)ins->inst_offset, (int)(ins->inst_offset + (size / sizeof (mgreg_t))), mono_type_full_name (ins->inst_vtype));
1589 if (bitmap) {
1590 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex) {
1591 if (gcfg->callsites [cindex]->pc_offset > pc_offsets [vmv->vreg]) {
1592 for (j = 0; j < numbits; ++j) {
1593 if (bitmap [j / GC_BITS_PER_WORD] & ((gsize)1 << (j % GC_BITS_PER_WORD))) {
1594 /* The descriptor is for the boxed object */
1595 set_slot (gcfg, (pos + j - (sizeof (MonoObject) / sizeof (mgreg_t))), cindex, pin ? SLOT_PIN : SLOT_REF);
1601 if (cfg->verbose_level > 1) {
1602 for (j = 0; j < numbits; ++j) {
1603 if (bitmap [j / GC_BITS_PER_WORD] & ((gsize)1 << (j % GC_BITS_PER_WORD)))
1604 printf ("\t\t%s slot at 0x%x(fp) (slot = %d)\n", pin ? "pin" : "ref", (int)(ins->inst_offset + (j * sizeof (mgreg_t))), (int)(pos + j - (sizeof (MonoObject) / sizeof (mgreg_t))));
1607 } else {
1608 if (cfg->verbose_level > 1)
1609 printf ("\t\tpinned\n");
1610 for (j = 0; j < size_in_slots; ++j) {
1611 set_slot_everywhere (gcfg, pos + j, SLOT_PIN);
1615 g_free (bitmap);
1617 continue;
1620 if (!is_arg && (ins->inst_offset < gcfg->min_offset || ins->inst_offset >= gcfg->max_offset))
1621 /* Vret addr etc. */
1622 continue;
1624 if (t->byref) {
1625 if (is_arg) {
1626 set_slot_everywhere (gcfg, pos, SLOT_PIN);
1627 } else {
1628 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex)
1629 if (gcfg->callsites [cindex]->liveness [i / 8] & (1 << (i % 8)))
1630 set_slot (gcfg, pos, cindex, SLOT_PIN);
1632 if (cfg->verbose_level > 1)
1633 printf ("\tbyref at %s0x%x(fp) (R%d, slot = %d): %s\n", ins->inst_offset < 0 ? "-" : "", (ins->inst_offset < 0) ? -(int)ins->inst_offset : (int)ins->inst_offset, vmv->vreg, pos, mono_type_full_name (ins->inst_vtype));
1634 continue;
1638 * This is currently disabled, but could be enabled to debug crashes.
1640 #if 0
1641 if (t->type == MONO_TYPE_I) {
1643 * Variables created in mono_handle_global_vregs have type I, but they
1644 * could hold GC refs since the vregs they were created from might not been
1645 * marked as holding a GC ref. So be conservative.
1647 set_slot_everywhere (gcfg, pos, SLOT_PIN);
1648 continue;
1650 #endif
1652 t = mini_type_get_underlying_type (NULL, t);
1654 if (!mini_type_is_reference (cfg, t)) {
1655 set_slot_everywhere (gcfg, pos, SLOT_NOREF);
1656 if (cfg->verbose_level > 1)
1657 printf ("\tnoref at %s0x%x(fp) (R%d, slot = %d): %s\n", ins->inst_offset < 0 ? "-" : "", (ins->inst_offset < 0) ? -(int)ins->inst_offset : (int)ins->inst_offset, vmv->vreg, pos, mono_type_full_name (ins->inst_vtype));
1658 continue;
1661 /* 'this' is marked INDIRECT for gshared methods */
1662 if (ins->flags & (MONO_INST_VOLATILE | MONO_INST_INDIRECT) && !is_this) {
1664 * For volatile variables, treat them alive from the point they are
1665 * initialized in the first bblock until the end of the method.
1667 if (is_arg) {
1668 set_slot_everywhere (gcfg, pos, SLOT_REF);
1669 } else if (pc_offsets [vmv->vreg]) {
1670 set_slot_in_range (gcfg, pos, 0, pc_offsets [vmv->vreg], SLOT_PIN);
1671 set_slot_in_range (gcfg, pos, pc_offsets [vmv->vreg], cfg->code_size, SLOT_REF);
1672 } else {
1673 set_slot_everywhere (gcfg, pos, SLOT_PIN);
1675 if (cfg->verbose_level > 1)
1676 printf ("\tvolatile ref at %s0x%x(fp) (R%d, slot = %d): %s\n", ins->inst_offset < 0 ? "-" : "", (ins->inst_offset < 0) ? -(int)ins->inst_offset : (int)ins->inst_offset, vmv->vreg, pos, mono_type_full_name (ins->inst_vtype));
1677 continue;
1680 if (is_arg) {
1681 /* Live for the whole method */
1682 set_slot_everywhere (gcfg, pos, SLOT_REF);
1683 } else {
1684 for (cindex = 0; cindex < gcfg->ncallsites; ++cindex)
1685 if (gcfg->callsites [cindex]->liveness [i / 8] & (1 << (i % 8)))
1686 set_slot (gcfg, pos, cindex, SLOT_REF);
1689 if (cfg->verbose_level > 1) {
1690 printf ("\tref at %s0x%x(fp) (R%d, slot = %d): %s\n", ins->inst_offset < 0 ? "-" : "", (ins->inst_offset < 0) ? -(int)ins->inst_offset : (int)ins->inst_offset, vmv->vreg, pos, mono_type_full_name (ins->inst_vtype));
1694 g_free (pc_offsets);
1697 static int
1698 sp_offset_to_fp_offset (MonoCompile *cfg, int sp_offset)
1701 * Convert a sp relative offset to a slot index. This is
1702 * platform specific.
1704 #ifdef TARGET_AMD64
1705 /* fp = sp + offset */
1706 g_assert (cfg->frame_reg == AMD64_RBP);
1707 return (- cfg->arch.sp_fp_offset + sp_offset);
1708 #else
1709 NOT_IMPLEMENTED;
1710 return -1;
1711 #endif
1714 static GCSlotType
1715 type_to_gc_slot_type (MonoCompile *cfg, MonoType *t)
1717 if (t->byref)
1718 return SLOT_PIN;
1719 t = mini_type_get_underlying_type (NULL, t);
1720 if (mini_type_is_reference (cfg, t))
1721 return SLOT_REF;
1722 else {
1723 if (MONO_TYPE_ISSTRUCT (t)) {
1724 MonoClass *klass = mono_class_from_mono_type (t);
1725 if (!klass->has_references) {
1726 return SLOT_NOREF;
1727 } else {
1728 // FIXME:
1729 return SLOT_PIN;
1732 return SLOT_NOREF;
1736 static void
1737 process_param_area_slots (MonoCompile *cfg)
1739 MonoCompileGC *gcfg = cfg->gc_info;
1740 int i;
1741 gboolean *is_param;
1744 * These slots are used for passing parameters during calls. They are sp relative, not
1745 * fp relative, so they are harder to handle.
1747 if (cfg->flags & MONO_CFG_HAS_ALLOCA)
1748 /* The distance between fp and sp is not constant */
1749 return;
1751 is_param = mono_mempool_alloc0 (cfg->mempool, gcfg->nslots * sizeof (gboolean));
1753 for (i = 0; i < gcfg->ncallsites; ++i) {
1754 GCCallSite *callsite = gcfg->callsites [i];
1755 GSList *l;
1757 for (l = callsite->param_slots; l; l = l->next) {
1758 MonoInst *def = l->data;
1759 int sp_offset = def->inst_offset;
1760 int fp_offset = sp_offset_to_fp_offset (cfg, sp_offset);
1761 int slot = fp_offset_to_slot (cfg, fp_offset);
1763 g_assert (slot >= 0 && slot < gcfg->nslots);
1764 is_param [slot] = TRUE;
1768 /* All param area slots are noref by default */
1769 for (i = 0; i < gcfg->nslots; ++i) {
1770 if (is_param [i])
1771 set_slot_everywhere (gcfg, i, SLOT_NOREF);
1774 for (i = 0; i < gcfg->ncallsites; ++i) {
1775 GCCallSite *callsite = gcfg->callsites [i];
1776 GSList *l;
1778 for (l = callsite->param_slots; l; l = l->next) {
1779 MonoInst *def = l->data;
1780 MonoType *t = def->inst_vtype;
1781 int sp_offset = def->inst_offset;
1782 int fp_offset = sp_offset_to_fp_offset (cfg, sp_offset);
1783 int slot = fp_offset_to_slot (cfg, fp_offset);
1784 GCSlotType type = type_to_gc_slot_type (cfg, t);
1786 /* The slot is live between the def instruction and the call */
1787 set_slot_in_range (gcfg, slot, def->backend.pc_offset, callsite->pc_offset + 1, type);
1788 if (cfg->verbose_level > 1)
1789 printf ("\t%s param area slot at %s0x%x(fp)=0x%x(sp) (slot = %d) [0x%x-0x%x]\n", slot_type_to_string (type), fp_offset >= 0 ? "+" : "-", ABS (fp_offset), sp_offset, slot, def->backend.pc_offset, callsite->pc_offset + 1);
1794 static void
1795 process_finally_clauses (MonoCompile *cfg)
1797 MonoCompileGC *gcfg = cfg->gc_info;
1798 GCCallSite **callsites;
1799 int ncallsites;
1800 gboolean has_finally;
1801 int i, j, nslots, nregs;
1803 ncallsites = gcfg->ncallsites;
1804 nslots = gcfg->nslots;
1805 nregs = gcfg->nregs;
1806 callsites = gcfg->callsites;
1809 * The calls to the finally clauses don't show up in the cfg. See
1810 * test_0_liveness_8 ().
1811 * Variables accessed inside the finally clause are already marked VOLATILE by
1812 * mono_liveness_handle_exception_clauses (). Variables not accessed inside the finally clause have
1813 * correct liveness outside the finally clause. So mark them PIN inside the finally clauses.
1815 has_finally = FALSE;
1816 for (i = 0; i < cfg->header->num_clauses; ++i) {
1817 MonoExceptionClause *clause = &cfg->header->clauses [i];
1819 if (clause->flags == MONO_EXCEPTION_CLAUSE_FINALLY) {
1820 has_finally = TRUE;
1823 if (has_finally) {
1824 if (cfg->verbose_level > 1)
1825 printf ("\tMethod has finally clauses, pessimizing live ranges.\n");
1826 for (j = 0; j < ncallsites; ++j) {
1827 MonoBasicBlock *bb = callsites [j]->bb;
1828 MonoExceptionClause *clause;
1829 gboolean is_in_finally = FALSE;
1831 for (i = 0; i < cfg->header->num_clauses; ++i) {
1832 clause = &cfg->header->clauses [i];
1834 if (MONO_OFFSET_IN_HANDLER (clause, bb->real_offset)) {
1835 if (clause->flags == MONO_EXCEPTION_CLAUSE_FINALLY) {
1836 is_in_finally = TRUE;
1837 break;
1842 if (is_in_finally) {
1843 for (i = 0; i < nslots; ++i)
1844 set_slot (gcfg, i, j, SLOT_PIN);
1845 for (i = 0; i < nregs; ++i)
1846 set_reg_slot (gcfg, i, j, SLOT_PIN);
1852 static void
1853 compute_frame_size (MonoCompile *cfg)
1855 int i, locals_min_offset, locals_max_offset, cfa_min_offset, cfa_max_offset;
1856 int min_offset, max_offset;
1857 MonoCompileGC *gcfg = cfg->gc_info;
1858 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1859 GSList *l;
1861 /* Compute min/max offsets from the fp */
1863 /* Locals */
1864 #if defined(TARGET_AMD64) || defined(TARGET_X86)
1865 locals_min_offset = ALIGN_TO (cfg->locals_min_stack_offset, sizeof (mgreg_t));
1866 locals_max_offset = cfg->locals_max_stack_offset;
1867 #else
1868 /* min/max stack offset needs to be computed in mono_arch_allocate_vars () */
1869 NOT_IMPLEMENTED;
1870 #endif
1872 locals_min_offset = ALIGN_TO (locals_min_offset, sizeof (mgreg_t));
1873 locals_max_offset = ALIGN_TO (locals_max_offset, sizeof (mgreg_t));
1875 min_offset = locals_min_offset;
1876 max_offset = locals_max_offset;
1878 /* Arguments */
1879 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
1880 MonoInst *ins = cfg->args [i];
1882 if (ins->opcode == OP_REGOFFSET)
1883 min_offset = MIN (min_offset, ins->inst_offset);
1886 /* Cfa slots */
1887 g_assert (cfg->frame_reg == cfg->cfa_reg);
1888 g_assert (cfg->cfa_offset > 0);
1889 cfa_min_offset = 0;
1890 cfa_max_offset = cfg->cfa_offset;
1892 min_offset = MIN (min_offset, cfa_min_offset);
1893 max_offset = MAX (max_offset, cfa_max_offset);
1895 /* Fp relative slots */
1896 for (l = gcfg->stack_slots_from_fp; l; l = l->next) {
1897 gint data = GPOINTER_TO_INT (l->data);
1898 int offset = data >> 16;
1900 min_offset = MIN (min_offset, offset);
1903 /* Spill slots */
1904 if (!(cfg->flags & MONO_CFG_HAS_SPILLUP)) {
1905 int stack_offset = ALIGN_TO (cfg->stack_offset, sizeof (mgreg_t));
1906 min_offset = MIN (min_offset, (-stack_offset));
1909 /* Param area slots */
1910 #ifdef TARGET_AMD64
1911 min_offset = MIN (min_offset, -cfg->arch.sp_fp_offset);
1912 #endif
1914 gcfg->min_offset = min_offset;
1915 gcfg->max_offset = max_offset;
1916 gcfg->locals_min_offset = locals_min_offset;
1917 gcfg->locals_max_offset = locals_max_offset;
1920 static void
1921 init_gcfg (MonoCompile *cfg)
1923 int i, nregs, nslots;
1924 MonoCompileGC *gcfg = cfg->gc_info;
1925 GCCallSite **callsites;
1926 int ncallsites;
1927 MonoBasicBlock *bb;
1928 GSList *l;
1931 * Collect callsites
1933 ncallsites = 0;
1934 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
1935 ncallsites += g_slist_length (bb->gc_callsites);
1937 callsites = mono_mempool_alloc0 (cfg->mempool, ncallsites * sizeof (GCCallSite*));
1938 i = 0;
1939 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
1940 for (l = bb->gc_callsites; l; l = l->next)
1941 callsites [i++] = l->data;
1944 /* The callsites should already be ordered by pc offset */
1945 for (i = 1; i < ncallsites; ++i)
1946 g_assert (callsites [i - 1]->pc_offset < callsites [i]->pc_offset);
1949 * The stack frame looks like this:
1951 * <fp + max_offset> == cfa -> <end of previous frame>
1952 * <other stack slots>
1953 * <locals>
1954 * <other stack slots>
1955 * fp + min_offset ->
1956 * ...
1957 * fp ->
1960 if (cfg->verbose_level > 1)
1961 printf ("GC Map for %s: 0x%x-0x%x\n", mono_method_full_name (cfg->method, TRUE), gcfg->min_offset, gcfg->max_offset);
1963 nslots = (gcfg->max_offset - gcfg->min_offset) / sizeof (mgreg_t);
1964 nregs = NREGS;
1966 gcfg->nslots = nslots;
1967 gcfg->nregs = nregs;
1968 gcfg->callsites = callsites;
1969 gcfg->ncallsites = ncallsites;
1970 gcfg->stack_bitmap_width = ALIGN_TO (nslots, 8) / 8;
1971 gcfg->reg_bitmap_width = ALIGN_TO (nregs, 8) / 8;
1972 gcfg->stack_ref_bitmap = mono_mempool_alloc0 (cfg->mempool, gcfg->stack_bitmap_width * ncallsites);
1973 gcfg->stack_pin_bitmap = mono_mempool_alloc0 (cfg->mempool, gcfg->stack_bitmap_width * ncallsites);
1974 gcfg->reg_ref_bitmap = mono_mempool_alloc0 (cfg->mempool, gcfg->reg_bitmap_width * ncallsites);
1975 gcfg->reg_pin_bitmap = mono_mempool_alloc0 (cfg->mempool, gcfg->reg_bitmap_width * ncallsites);
1977 /* All slots start out as PIN */
1978 memset (gcfg->stack_pin_bitmap, 0xff, gcfg->stack_bitmap_width * ncallsites);
1979 for (i = 0; i < nregs; ++i) {
1981 * By default, registers are NOREF.
1982 * It is possible for a callee to save them before being defined in this method,
1983 * but the saved value is dead too, so it doesn't need to be marked.
1985 if ((cfg->used_int_regs & (1 << i)))
1986 set_reg_slot_everywhere (gcfg, i, SLOT_NOREF);
1990 static void
1991 create_map (MonoCompile *cfg)
1993 GCMap *map;
1994 int i, j, nregs, nslots, nref_regs, npin_regs, alloc_size, bitmaps_size, bitmaps_offset;
1995 int ntypes [16];
1996 int stack_bitmap_width, stack_bitmap_size, reg_ref_bitmap_width, reg_ref_bitmap_size;
1997 int reg_pin_bitmap_width, reg_pin_bitmap_size, bindex;
1998 int start, end;
1999 gboolean has_ref_slots, has_pin_slots, has_ref_regs, has_pin_regs;
2000 MonoCompileGC *gcfg = cfg->gc_info;
2001 GCCallSite **callsites;
2002 int ncallsites;
2003 guint8 *bitmap, *bitmaps;
2004 guint32 reg_ref_mask, reg_pin_mask;
2006 ncallsites = gcfg->ncallsites;
2007 nslots = gcfg->nslots;
2008 nregs = gcfg->nregs;
2009 callsites = gcfg->callsites;
2012 * Compute the real size of the bitmap i.e. ignore NOREF columns at the beginning and at
2013 * the end. Also, compute whenever the map needs ref/pin bitmaps, and collect stats.
2015 has_ref_slots = FALSE;
2016 has_pin_slots = FALSE;
2017 start = -1;
2018 end = -1;
2019 memset (ntypes, 0, sizeof (ntypes));
2020 for (i = 0; i < nslots; ++i) {
2021 gboolean has_ref = FALSE;
2022 gboolean has_pin = FALSE;
2024 for (j = 0; j < ncallsites; ++j) {
2025 if (get_bit (gcfg->stack_pin_bitmap, gcfg->stack_bitmap_width, j, i))
2026 has_pin = TRUE;
2027 if (get_bit (gcfg->stack_ref_bitmap, gcfg->stack_bitmap_width, j, i))
2028 has_ref = TRUE;
2031 if (has_ref)
2032 has_ref_slots = TRUE;
2033 if (has_pin)
2034 has_pin_slots = TRUE;
2036 if (has_ref)
2037 ntypes [SLOT_REF] ++;
2038 else if (has_pin)
2039 ntypes [SLOT_PIN] ++;
2040 else
2041 ntypes [SLOT_NOREF] ++;
2043 if (has_ref || has_pin) {
2044 if (start == -1)
2045 start = i;
2046 end = i + 1;
2049 if (start == -1) {
2050 start = end = nslots;
2051 } else {
2052 g_assert (start != -1);
2053 g_assert (start < end);
2056 has_ref_regs = FALSE;
2057 has_pin_regs = FALSE;
2058 reg_ref_mask = 0;
2059 reg_pin_mask = 0;
2060 nref_regs = 0;
2061 npin_regs = 0;
2062 for (i = 0; i < nregs; ++i) {
2063 gboolean has_ref = FALSE;
2064 gboolean has_pin = FALSE;
2066 if (!(cfg->used_int_regs & (1 << i)))
2067 continue;
2069 for (j = 0; j < ncallsites; ++j) {
2070 if (get_bit (gcfg->reg_ref_bitmap, gcfg->reg_bitmap_width, j, i)) {
2071 has_ref = TRUE;
2072 break;
2075 for (j = 0; j < ncallsites; ++j) {
2076 if (get_bit (gcfg->reg_pin_bitmap, gcfg->reg_bitmap_width, j, i)) {
2077 has_pin = TRUE;
2078 break;
2082 if (has_ref) {
2083 reg_ref_mask |= (1 << i);
2084 has_ref_regs = TRUE;
2085 nref_regs ++;
2087 if (has_pin) {
2088 reg_pin_mask |= (1 << i);
2089 has_pin_regs = TRUE;
2090 npin_regs ++;
2094 if (cfg->verbose_level > 1)
2095 printf ("Slots: %d Start: %d End: %d Refs: %d NoRefs: %d Pin: %d Callsites: %d\n", nslots, start, end, ntypes [SLOT_REF], ntypes [SLOT_NOREF], ntypes [SLOT_PIN], ncallsites);
2097 /* Create the GC Map */
2099 stack_bitmap_width = ALIGN_TO (end - start, 8) / 8;
2100 stack_bitmap_size = stack_bitmap_width * ncallsites;
2101 reg_ref_bitmap_width = ALIGN_TO (nref_regs, 8) / 8;
2102 reg_ref_bitmap_size = reg_ref_bitmap_width * ncallsites;
2103 reg_pin_bitmap_width = ALIGN_TO (npin_regs, 8) / 8;
2104 reg_pin_bitmap_size = reg_pin_bitmap_width * ncallsites;
2105 bitmaps_size = (has_ref_slots ? stack_bitmap_size : 0) + (has_pin_slots ? stack_bitmap_size : 0) + (has_ref_regs ? reg_ref_bitmap_size : 0) + (has_pin_regs ? reg_pin_bitmap_size : 0);
2107 map = mono_mempool_alloc0 (cfg->mempool, sizeof (GCMap));
2109 map->frame_reg = cfg->frame_reg;
2110 map->start_offset = gcfg->min_offset;
2111 map->end_offset = gcfg->min_offset + (nslots * sizeof (mgreg_t));
2112 map->map_offset = start * sizeof (mgreg_t);
2113 map->nslots = end - start;
2114 map->has_ref_slots = has_ref_slots;
2115 map->has_pin_slots = has_pin_slots;
2116 map->has_ref_regs = has_ref_regs;
2117 map->has_pin_regs = has_pin_regs;
2118 g_assert (nregs < 32);
2119 map->used_int_regs = cfg->used_int_regs;
2120 map->reg_ref_mask = reg_ref_mask;
2121 map->reg_pin_mask = reg_pin_mask;
2122 map->nref_regs = nref_regs;
2123 map->npin_regs = npin_regs;
2125 bitmaps = mono_mempool_alloc0 (cfg->mempool, bitmaps_size);
2127 bitmaps_offset = 0;
2128 if (has_ref_slots) {
2129 map->stack_ref_bitmap_offset = bitmaps_offset;
2130 bitmaps_offset += stack_bitmap_size;
2132 bitmap = &bitmaps [map->stack_ref_bitmap_offset];
2133 for (i = 0; i < nslots; ++i) {
2134 for (j = 0; j < ncallsites; ++j) {
2135 if (get_bit (gcfg->stack_ref_bitmap, gcfg->stack_bitmap_width, j, i))
2136 set_bit (bitmap, stack_bitmap_width, j, i - start);
2140 if (has_pin_slots) {
2141 map->stack_pin_bitmap_offset = bitmaps_offset;
2142 bitmaps_offset += stack_bitmap_size;
2144 bitmap = &bitmaps [map->stack_pin_bitmap_offset];
2145 for (i = 0; i < nslots; ++i) {
2146 for (j = 0; j < ncallsites; ++j) {
2147 if (get_bit (gcfg->stack_pin_bitmap, gcfg->stack_bitmap_width, j, i))
2148 set_bit (bitmap, stack_bitmap_width, j, i - start);
2152 if (has_ref_regs) {
2153 map->reg_ref_bitmap_offset = bitmaps_offset;
2154 bitmaps_offset += reg_ref_bitmap_size;
2156 bitmap = &bitmaps [map->reg_ref_bitmap_offset];
2157 bindex = 0;
2158 for (i = 0; i < nregs; ++i) {
2159 if (reg_ref_mask & (1 << i)) {
2160 for (j = 0; j < ncallsites; ++j) {
2161 if (get_bit (gcfg->reg_ref_bitmap, gcfg->reg_bitmap_width, j, i))
2162 set_bit (bitmap, reg_ref_bitmap_width, j, bindex);
2164 bindex ++;
2168 if (has_pin_regs) {
2169 map->reg_pin_bitmap_offset = bitmaps_offset;
2170 bitmaps_offset += reg_pin_bitmap_size;
2172 bitmap = &bitmaps [map->reg_pin_bitmap_offset];
2173 bindex = 0;
2174 for (i = 0; i < nregs; ++i) {
2175 if (reg_pin_mask & (1 << i)) {
2176 for (j = 0; j < ncallsites; ++j) {
2177 if (get_bit (gcfg->reg_pin_bitmap, gcfg->reg_bitmap_width, j, i))
2178 set_bit (bitmap, reg_pin_bitmap_width, j, bindex);
2180 bindex ++;
2185 /* Call sites */
2186 map->ncallsites = ncallsites;
2187 if (cfg->code_len < 256)
2188 map->callsite_entry_size = 1;
2189 else if (cfg->code_len < 65536)
2190 map->callsite_entry_size = 2;
2191 else
2192 map->callsite_entry_size = 4;
2194 /* Encode the GC Map */
2196 guint8 buf [256];
2197 guint8 *endbuf;
2198 GCEncodedMap *emap;
2199 int encoded_size;
2200 guint8 *p;
2202 encode_gc_map (map, buf, &endbuf);
2203 g_assert (endbuf - buf < 256);
2205 encoded_size = endbuf - buf;
2206 alloc_size = sizeof (GCEncodedMap) + ALIGN_TO (encoded_size, map->callsite_entry_size) + (map->callsite_entry_size * map->ncallsites) + bitmaps_size;
2208 emap = mono_domain_alloc0 (cfg->domain, alloc_size);
2209 //emap->ref_slots = map->ref_slots;
2211 /* Encoded fixed fields */
2212 p = &emap->encoded [0];
2213 //emap->encoded_size = encoded_size;
2214 memcpy (p, buf, encoded_size);
2215 p += encoded_size;
2217 /* Callsite table */
2218 p = (guint8*)ALIGN_TO ((mgreg_t)p, map->callsite_entry_size);
2219 if (map->callsite_entry_size == 1) {
2220 guint8 *offsets = p;
2221 for (i = 0; i < ncallsites; ++i)
2222 offsets [i] = callsites [i]->pc_offset;
2223 stats.gc_callsites8_size += ncallsites * sizeof (guint8);
2224 } else if (map->callsite_entry_size == 2) {
2225 guint16 *offsets = (guint16*)p;
2226 for (i = 0; i < ncallsites; ++i)
2227 offsets [i] = callsites [i]->pc_offset;
2228 stats.gc_callsites16_size += ncallsites * sizeof (guint16);
2229 } else {
2230 guint32 *offsets = (guint32*)p;
2231 for (i = 0; i < ncallsites; ++i)
2232 offsets [i] = callsites [i]->pc_offset;
2233 stats.gc_callsites32_size += ncallsites * sizeof (guint32);
2235 p += ncallsites * map->callsite_entry_size;
2237 /* Bitmaps */
2238 memcpy (p, bitmaps, bitmaps_size);
2239 p += bitmaps_size;
2241 g_assert ((guint8*)p - (guint8*)emap <= alloc_size);
2243 stats.gc_maps_size += alloc_size;
2244 stats.gc_callsites_size += ncallsites * map->callsite_entry_size;
2245 stats.gc_bitmaps_size += bitmaps_size;
2246 stats.gc_map_struct_size += sizeof (GCEncodedMap) + encoded_size;
2248 cfg->jit_info->gc_info = emap;
2250 cfg->gc_map = (guint8*)emap;
2251 cfg->gc_map_size = alloc_size;
2254 stats.all_slots += nslots;
2255 stats.ref_slots += ntypes [SLOT_REF];
2256 stats.noref_slots += ntypes [SLOT_NOREF];
2257 stats.pin_slots += ntypes [SLOT_PIN];
2260 void
2261 mini_gc_create_gc_map (MonoCompile *cfg)
2263 if (!cfg->compute_gc_maps)
2264 return;
2267 * During marking, all frames except the top frame are at a call site, and we mark the
2268 * top frame conservatively. This means that we only need to compute and record
2269 * GC maps for call sites.
2272 if (!(cfg->comp_done & MONO_COMP_LIVENESS))
2273 /* Without liveness info, the live ranges are not precise enough */
2274 return;
2276 mono_analyze_liveness_gc (cfg);
2278 compute_frame_size (cfg);
2280 init_gcfg (cfg);
2282 process_spill_slots (cfg);
2283 process_other_slots (cfg);
2284 process_param_area_slots (cfg);
2285 process_variables (cfg);
2286 process_finally_clauses (cfg);
2288 create_map (cfg);
2291 static void
2292 parse_debug_options (void)
2294 char **opts, **ptr;
2295 char *env;
2297 env = getenv ("MONO_GCMAP_DEBUG");
2298 if (!env)
2299 return;
2301 opts = g_strsplit (env, ",", -1);
2302 for (ptr = opts; ptr && *ptr; ptr ++) {
2303 /* No options yet */
2304 fprintf (stderr, "Invalid format for the MONO_GCMAP_DEBUG env variable: '%s'\n", env);
2305 exit (1);
2307 g_strfreev (opts);
2310 void
2311 mini_gc_init (void)
2313 MonoGCCallbacks cb;
2315 memset (&cb, 0, sizeof (cb));
2316 cb.thread_attach_func = thread_attach_func;
2317 cb.thread_detach_func = thread_detach_func;
2318 cb.thread_suspend_func = thread_suspend_func;
2319 /* Comment this out to disable precise stack marking */
2320 cb.thread_mark_func = thread_mark_func;
2321 mono_gc_set_gc_callbacks (&cb);
2323 logfile = mono_gc_get_logfile ();
2325 parse_debug_options ();
2327 mono_counters_register ("GC Maps size",
2328 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_maps_size);
2329 mono_counters_register ("GC Call Sites size",
2330 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_callsites_size);
2331 mono_counters_register ("GC Bitmaps size",
2332 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_bitmaps_size);
2333 mono_counters_register ("GC Map struct size",
2334 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_map_struct_size);
2335 mono_counters_register ("GC Call Sites encoded using 8 bits",
2336 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_callsites8_size);
2337 mono_counters_register ("GC Call Sites encoded using 16 bits",
2338 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_callsites16_size);
2339 mono_counters_register ("GC Call Sites encoded using 32 bits",
2340 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.gc_callsites32_size);
2342 mono_counters_register ("GC Map slots (all)",
2343 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.all_slots);
2344 mono_counters_register ("GC Map slots (ref)",
2345 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.ref_slots);
2346 mono_counters_register ("GC Map slots (noref)",
2347 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.noref_slots);
2348 mono_counters_register ("GC Map slots (pin)",
2349 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.pin_slots);
2351 mono_counters_register ("GC TLS Data size",
2352 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.tlsdata_size);
2354 mono_counters_register ("Stack space scanned (all)",
2355 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_stacks);
2356 mono_counters_register ("Stack space scanned (native)",
2357 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_native);
2358 mono_counters_register ("Stack space scanned (other)",
2359 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_other);
2360 mono_counters_register ("Stack space scanned (using GC Maps)",
2361 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned);
2362 mono_counters_register ("Stack space scanned (precise)",
2363 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_precisely);
2364 mono_counters_register ("Stack space scanned (pin)",
2365 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_conservatively);
2366 mono_counters_register ("Stack space scanned (pin registers)",
2367 MONO_COUNTER_GC | MONO_COUNTER_INT, &stats.scanned_registers);
2370 #else
2372 void
2373 mini_gc_init (void)
2377 static void
2378 mini_gc_init_gc_map (MonoCompile *cfg)
2382 void
2383 mini_gc_create_gc_map (MonoCompile *cfg)
2387 void
2388 mini_gc_set_slot_type_from_fp (MonoCompile *cfg, int slot_offset, GCSlotType type)
2392 void
2393 mini_gc_set_slot_type_from_cfa (MonoCompile *cfg, int slot_offset, GCSlotType type)
2397 #endif
2400 * mini_gc_init_cfg:
2402 * Set GC specific options in CFG.
2404 void
2405 mini_gc_init_cfg (MonoCompile *cfg)
2407 if (mono_gc_is_moving ()) {
2408 cfg->disable_ref_noref_stack_slot_share = TRUE;
2409 cfg->gen_write_barriers = TRUE;
2412 mini_gc_init_gc_map (cfg);
2416 * Problems with the current code:
2417 * - the stack walk is slow
2418 * - vtypes/refs used in EH regions are treated conservatively
2419 * - if the code is finished, less pinning will be done, causing problems because
2420 * we promote all surviving objects to old-gen.
2421 * - the unwind code can't handle a method stopped inside a finally region, it thinks the caller is
2422 * another method, but in reality it is either the exception handling code or the CALL_HANDLER opcode.
2423 * This manifests in "Unable to find ip offset x in callsite list" assertions.
2424 * - the unwind code also can't handle frames which are in the epilog, since the unwind info is not
2425 * precise there.
2429 * Ideas for creating smaller GC maps:
2430 * - remove empty columns from the bitmaps. This requires adding a mask bit array for
2431 * each bitmap.
2432 * - merge reg and stack slot bitmaps, so the unused bits at the end of the reg bitmap are
2433 * not wasted.
2434 * - if the bitmap width is not a multiple of 8, the remaining bits are wasted.
2435 * - group ref and non-ref stack slots together in mono_allocate_stack_slots ().
2436 * - add an index for the callsite table so that each entry can be encoded as a 1 byte difference
2437 * from an index entry.