1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation
6 This file is part of libgcj.
8 This software is copyrighted work licensed under the terms of the
9 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
20 #include <java/lang/Class.h>
21 #include <java/lang/reflect/Modifier.h>
22 #include <java-interp.h>
24 // More nastiness: the GC wants to define TRUE and FALSE. We don't
25 // need the Java definitions (themselves a hack), so we undefine them.
31 #include <gc_config.h>
33 // Set GC_DEBUG before including gc.h!
34 #ifdef LIBGCJ_GC_DEBUG
40 #include <javaxfc.h> // GC_finalize_all declaration.
42 #ifdef THREAD_LOCAL_ALLOC
43 # define GC_REDIRECT_TO_LOCAL
44 # include <gc_local_alloc.h>
47 // From boehm's misc.c
52 #define MAYBE_MARK(Obj, Top, Limit, Source) \
53 Top=GC_MARK_AND_PUSH((GC_PTR) Obj, Top, Limit, (GC_PTR *) Source)
55 // `kind' index used when allocating Java arrays.
56 static int array_kind_x
;
58 // Freelist used for Java arrays.
59 static void **array_free_list
;
63 // This is called by the GC during the mark phase. It marks a Java
64 // object. We use `void *' arguments and return, and not what the
65 // Boehm GC wants, to avoid pollution in our headers.
67 _Jv_MarkObj (void *addr
, void *msp
, void *msl
, void *env
)
69 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
70 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
72 if (env
== (void *)1) /* Object allocated with debug allocator. */
73 addr
= (GC_PTR
)GC_USR_PTR_FROM_BASE(addr
);
74 jobject obj
= (jobject
) addr
;
76 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
77 // The object might not yet have its vtable set, or it might
78 // really be an object on the freelist. In either case, the vtable slot
79 // will either be 0, or it will point to a cleared object.
80 // This assumes Java objects have size at least 3 words,
81 // including the header. But this should remain true, since this
82 // should only be used with debugging allocation or with large objects.
83 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
84 return mark_stack_ptr
;
85 jclass klass
= dt
->clas
;
88 # ifndef JV_HASH_SYNCHRONIZATION
89 // Every object has a sync_info pointer.
90 p
= (GC_PTR
) obj
->sync_info
;
91 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
93 // Mark the object's class.
95 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
97 if (__builtin_expect (klass
== &java::lang::Class::class$
, false))
99 // Currently we allocate some of the memory referenced from class objects
100 // as pointerfree memory, and then mark it more intelligently here.
101 // We ensure that the ClassClass mark descriptor forces invocation of
103 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
104 // collector, we need to make sure that the class object is written whenever
105 // any of the subobjects are altered and may need rescanning. This may be tricky
106 // during construction, and this may not be the right way to do this with
107 // incremental collection.
108 // If we overflow the mark stack, we will rescan the class object, so we should
109 // be OK. The same applies if we redo the mark phase because win32 unmapped part
110 // of our root set. - HB
111 jclass c
= (jclass
) addr
;
113 p
= (GC_PTR
) c
->name
;
114 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
115 p
= (GC_PTR
) c
->superclass
;
116 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
117 for (int i
= 0; i
< c
->constants
.size
; ++i
)
119 /* FIXME: We could make this more precise by using the tags -KKT */
120 p
= (GC_PTR
) c
->constants
.data
[i
].p
;
121 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
125 if (_Jv_IsInterpretedClass (c
))
127 p
= (GC_PTR
) c
->constants
.tags
;
128 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
129 p
= (GC_PTR
) c
->constants
.data
;
130 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
134 // The vtable might be allocated even for compiled code.
135 p
= (GC_PTR
) c
->vtable
;
136 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
138 // If the class is an array, then the methods field holds a
139 // pointer to the element class. If the class is primitive,
140 // then the methods field holds a pointer to the array class.
141 p
= (GC_PTR
) c
->methods
;
142 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
144 // The vtable might have been set, but the rest of the class
145 // could still be uninitialized. If this is the case, then
146 // c.isArray will SEGV. We check for this, and if it is the
147 // case we just return.
148 if (__builtin_expect (c
->name
== NULL
, false))
149 return mark_stack_ptr
;
151 if (! c
->isArray() && ! c
->isPrimitive())
153 // Scan each method in the cases where `methods' really
154 // points to a methods structure.
155 for (int i
= 0; i
< c
->method_count
; ++i
)
157 p
= (GC_PTR
) c
->methods
[i
].name
;
158 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
159 p
= (GC_PTR
) c
->methods
[i
].signature
;
160 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
162 // Note that we don't have to mark each individual throw
163 // separately, as these are stored in the constant pool.
164 p
= (GC_PTR
) c
->methods
[i
].throws
;
165 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
169 // Mark all the fields.
170 p
= (GC_PTR
) c
->fields
;
171 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
172 for (int i
= 0; i
< c
->field_count
; ++i
)
174 _Jv_Field
* field
= &c
->fields
[i
];
176 p
= (GC_PTR
) field
->name
;
177 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
178 p
= (GC_PTR
) field
->type
;
179 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
181 // For the interpreter, we also need to mark the memory
182 // containing static members
183 if ((field
->flags
& java::lang::reflect::Modifier::STATIC
))
185 p
= (GC_PTR
) field
->u
.addr
;
186 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
188 // also, if the static member is a reference,
189 // mark also the value pointed to. We check for isResolved
190 // since marking can happen before memory is allocated for
192 if (JvFieldIsRef (field
) && field
->isResolved())
194 jobject val
= *(jobject
*) field
->u
.addr
;
196 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
201 p
= (GC_PTR
) c
->vtable
;
202 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
203 p
= (GC_PTR
) c
->interfaces
;
204 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
205 for (int i
= 0; i
< c
->interface_count
; ++i
)
207 p
= (GC_PTR
) c
->interfaces
[i
];
208 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
210 p
= (GC_PTR
) c
->loader
;
211 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
213 // The dispatch tables can be allocated at runtime.
214 p
= (GC_PTR
) c
->ancestors
;
215 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
219 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
221 if (c
->isInterface())
223 p
= (GC_PTR
) c
->idt
->iface
.ioffsets
;
224 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
226 else if (! c
->isPrimitive())
228 // This field is only valid for ordinary classes.
229 p
= (GC_PTR
) c
->idt
->cls
.itable
;
230 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
234 p
= (GC_PTR
) c
->arrayclass
;
235 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
236 p
= (GC_PTR
) c
->protectionDomain
;
237 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
238 p
= (GC_PTR
) c
->hack_signers
;
239 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
240 p
= (GC_PTR
) c
->aux_info
;
241 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
244 if (_Jv_IsInterpretedClass (c
) && c
->aux_info
)
246 _Jv_InterpClass
* ic
= (_Jv_InterpClass
*) c
->aux_info
;
248 p
= (GC_PTR
) ic
->interpreted_methods
;
249 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
251 for (int i
= 0; i
< c
->method_count
; i
++)
253 // The interpreter installs a heap-allocated trampoline
254 // here, so we'll mark it.
255 p
= (GC_PTR
) c
->methods
[i
].ncode
;
256 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
258 using namespace java::lang::reflect
;
260 // Mark the direct-threaded code. Note a subtlety here:
261 // when we add Miranda methods to a class, we don't
262 // resize its interpreted_methods array. If we try to
263 // reference one of these methods, we may crash.
264 // However, we know these are all abstract, and we know
265 // that abstract methods have nothing useful in this
266 // array. So, we skip all abstract methods to avoid the
267 // problem. FIXME: this is pretty obscure, it may be
268 // better to add a methods to the execution engine and
270 if ((c
->methods
[i
].accflags
& Modifier::ABSTRACT
) != 0)
273 p
= (GC_PTR
) ic
->interpreted_methods
[i
];
274 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
276 if ((c
->methods
[i
].accflags
& Modifier::NATIVE
) != 0)
279 = (_Jv_JNIMethod
*) ic
->interpreted_methods
[i
];
282 p
= (GC_PTR
) jm
->jni_arg_types
;
283 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, p
);
289 = (_Jv_InterpMethod
*) ic
->interpreted_methods
[i
];
292 p
= (GC_PTR
) im
->prepared
;
293 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
298 p
= (GC_PTR
) ic
->field_initializers
;
299 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
307 // NOTE: each class only holds information about the class
308 // itself. So we must do the marking for the entire inheritance
309 // tree in order to mark all fields. FIXME: what about
310 // interfaces? We skip Object here, because Object only has a
311 // sync_info, and we handled that earlier.
312 // Note: occasionally `klass' can be null. For instance, this
313 // can happen if a GC occurs between the point where an object
314 // is allocated and where the vtbl slot is set.
315 while (klass
&& klass
!= &java::lang::Object::class$
)
317 jfieldID field
= JvGetFirstInstanceField (klass
);
318 jint max
= JvNumInstanceFields (klass
);
320 for (int i
= 0; i
< max
; ++i
)
322 if (JvFieldIsRef (field
))
324 jobject val
= JvGetObjectField (obj
, field
);
326 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
328 field
= field
->getNextField ();
330 klass
= klass
->getSuperclass();
334 return mark_stack_ptr
;
337 // This is called by the GC during the mark phase. It marks a Java
338 // array (of objects). We use `void *' arguments and return, and not
339 // what the Boehm GC wants, to avoid pollution in our headers.
341 _Jv_MarkArray (void *addr
, void *msp
, void *msl
, void *env
)
343 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
344 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
346 if (env
== (void *)1) /* Object allocated with debug allocator. */
347 addr
= (void *)GC_USR_PTR_FROM_BASE(addr
);
348 jobjectArray array
= (jobjectArray
) addr
;
350 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
351 // Assumes size >= 3 words. That's currently true since arrays have
352 // a vtable, sync pointer, and size. If the sync pointer goes away,
353 // we may need to round up the size.
354 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
355 return mark_stack_ptr
;
356 jclass klass
= dt
->clas
;
359 # ifndef JV_HASH_SYNCHRONIZATION
360 // Every object has a sync_info pointer.
361 p
= (GC_PTR
) array
->sync_info
;
362 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
364 // Mark the object's class.
366 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, &(dt
-> clas
));
368 for (int i
= 0; i
< JvGetArrayLength (array
); ++i
)
370 jobject obj
= elements (array
)[i
];
372 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
375 return mark_stack_ptr
;
378 // Generate a GC marking descriptor for a class.
380 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
381 // since another one could be registered first. But the compiler also
382 // knows this, so in that case everything else will break, too.
383 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
386 _Jv_BuildGCDescr(jclass self
)
389 jint bits_per_word
= CHAR_BIT
* sizeof (void *);
391 // Note: for now we only consider a bitmap mark descriptor. We
392 // could also handle the case where the first N fields of a type are
393 // references. However, this is not very likely to be used by many
394 // classes, and it is easier to compute things this way.
396 // The vtable pointer.
397 desc
|= 1ULL << (bits_per_word
- 1);
398 #ifndef JV_HASH_SYNCHRONIZATION
399 // The sync_info field.
400 desc
|= 1ULL << (bits_per_word
- 2);
403 for (jclass klass
= self
; klass
!= NULL
; klass
= klass
->getSuperclass())
405 jfieldID field
= JvGetFirstInstanceField(klass
);
406 int count
= JvNumInstanceFields(klass
);
408 for (int i
= 0; i
< count
; ++i
)
412 unsigned int off
= field
->getOffset();
413 // If we run into a weird situation, we bail.
414 if (off
% sizeof (void *) != 0)
415 return (void *) (GCJ_DEFAULT_DESCR
);
416 off
/= sizeof (void *);
417 // If we find a field outside the range of our bitmap,
418 // fall back to procedure marker. The bottom 2 bits are
420 if (off
>= (unsigned) bits_per_word
- 2)
421 return (void *) (GCJ_DEFAULT_DESCR
);
422 desc
|= 1ULL << (bits_per_word
- off
- 1);
425 field
= field
->getNextField();
429 // For bitmap mark type, bottom bits are 01.
431 // Bogus warning avoidance (on many platforms).
432 return (void *) (unsigned long) desc
;
435 // Allocate some space that is known to be pointer-free.
437 _Jv_AllocBytes (jsize size
)
439 void *r
= GC_MALLOC_ATOMIC (size
);
440 // We have to explicitly zero memory here, as the GC doesn't
441 // guarantee that PTRFREE allocations are zeroed. Note that we
442 // don't have to do this for other allocation types because we set
443 // the `ok_init' flag in the type descriptor.
448 #ifdef LIBGCJ_GC_DEBUG
451 _Jv_AllocObj (jsize size
, jclass klass
)
453 return GC_GCJ_MALLOC (size
, klass
->vtable
);
457 _Jv_AllocPtrFreeObj (jsize size
, jclass klass
)
459 #ifdef JV_HASH_SYNCHRONIZATION
460 void * obj
= GC_MALLOC_ATOMIC(size
);
461 *((_Jv_VTable
**) obj
) = klass
->vtable
;
463 void * obj
= GC_GCJ_MALLOC(size
, klass
->vtable
);
468 #endif /* LIBGCJ_GC_DEBUG */
469 // In the non-debug case, the above two functions are defined
470 // as inline functions in boehm-gc.h. In the debug case we
471 // really want to take advantage of the definitions in gc_gcj.h.
473 // Allocate space for a new Java array.
474 // Used only for arrays of objects.
476 _Jv_AllocArray (jsize size
, jclass klass
)
480 #ifdef LIBGCJ_GC_DEBUG
481 // There isn't much to lose by scanning this conservatively.
482 // If we didn't, the mark proc would have to understand that
483 // it needed to skip the header.
484 obj
= GC_MALLOC(size
);
486 const jsize min_heap_addr
= 16*1024;
487 // A heuristic. If size is less than this value, the size
488 // stored in the array can't possibly be misinterpreted as
489 // a pointer. Thus we lose nothing by scanning the object
490 // completely conservatively, since no misidentification can
493 if (size
< min_heap_addr
)
494 obj
= GC_MALLOC(size
);
496 obj
= GC_generic_malloc (size
, array_kind_x
);
498 *((_Jv_VTable
**) obj
) = klass
->vtable
;
502 /* Allocate space for a new non-Java object, which does not have the usual
503 Java object header but may contain pointers to other GC'ed objects. */
505 _Jv_AllocRawObj (jsize size
)
507 return (void *) GC_MALLOC (size
);
511 call_finalizer (GC_PTR obj
, GC_PTR client_data
)
513 _Jv_FinalizerFunc
*fn
= (_Jv_FinalizerFunc
*) client_data
;
514 jobject jobj
= (jobject
) obj
;
520 _Jv_RegisterFinalizer (void *object
, _Jv_FinalizerFunc
*meth
)
522 GC_REGISTER_FINALIZER_NO_ORDER (object
, call_finalizer
, (GC_PTR
) meth
,
527 _Jv_RunFinalizers (void)
529 GC_invoke_finalizers ();
533 _Jv_RunAllFinalizers (void)
545 _Jv_GCTotalMemory (void)
547 return GC_get_heap_size ();
551 _Jv_GCFreeMemory (void)
553 return GC_get_free_bytes ();
557 _Jv_GCSetInitialHeapSize (size_t size
)
559 size_t current
= GC_get_heap_size ();
561 GC_expand_hp (size
- current
);
565 _Jv_GCSetMaximumHeapSize (size_t size
)
567 GC_set_max_heap_size ((GC_word
) size
);
582 static void * handle_out_of_memory(size_t)
588 gcj_describe_type_fn(void *obj
, char *out_buf
)
590 _Jv_VTable
*dt
= *(_Jv_VTable
**) obj
;
592 if (! dt
/* Shouldn't happen */)
594 strcpy(out_buf
, "GCJ (bad)");
597 jclass klass
= dt
->clas
;
598 if (!klass
/* shouldn't happen */)
600 strcpy(out_buf
, "GCJ (bad)");
603 jstring name
= klass
-> getName();
604 size_t len
= name
-> length();
605 if (len
>= GC_TYPE_DESCR_LEN
) len
= GC_TYPE_DESCR_LEN
- 1;
606 JvGetStringUTFRegion (name
, 0, len
, out_buf
);
615 // Ignore pointers that do not point to the start of an object.
616 GC_all_interior_pointers
= 0;
618 // Configure the collector to use the bitmap marking descriptors that we
619 // stash in the class vtable.
620 // We always use mark proc descriptor 0, since the compiler knows
622 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj
);
624 // Cause an out of memory error to be thrown from the allocators,
625 // instead of returning 0. This is cheaper than checking on allocation.
626 GC_oom_fn
= handle_out_of_memory
;
628 GC_java_finalization
= 1;
630 // We use a different mark procedure for object arrays. This code
631 // configures a different object `kind' for object array allocation and
633 array_free_list
= GC_new_free_list();
634 proc
= GC_new_proc((GC_mark_proc
)_Jv_MarkArray
);
635 array_kind_x
= GC_new_kind(array_free_list
, GC_MAKE_PROC (proc
, 0), 0, 1);
637 // Arrange to have the GC print Java class names in backtraces, etc.
638 GC_register_describe_type_fn(GC_gcj_kind
, gcj_describe_type_fn
);
639 GC_register_describe_type_fn(GC_gcj_debug_kind
, gcj_describe_type_fn
);
642 #ifdef JV_HASH_SYNCHRONIZATION
643 // Allocate an object with a fake vtable pointer, which causes only
644 // the first field (beyond the fake vtable pointer) to be traced.
645 // Eventually this should probably be generalized.
647 static _Jv_VTable trace_one_vtable
= {
649 (void *)(2 * sizeof(void *)),
650 // descriptor; scan 2 words incl. vtable ptr.
651 // Least significant bits must be zero to
652 // identify this as a length descriptor
657 _Jv_AllocTraceOne (jsize size
/* includes vtable slot */)
659 return GC_GCJ_MALLOC (size
, &trace_one_vtable
);
662 // Ditto for two words.
663 // the first field (beyond the fake vtable pointer) to be traced.
664 // Eventually this should probably be generalized.
666 static _Jv_VTable trace_two_vtable
=
669 (void *)(3 * sizeof(void *)),
670 // descriptor; scan 3 words incl. vtable ptr.
675 _Jv_AllocTraceTwo (jsize size
/* includes vtable slot */)
677 return GC_GCJ_MALLOC (size
, &trace_two_vtable
);
680 #endif /* JV_HASH_SYNCHRONIZATION */
683 _Jv_GCInitializeFinalizers (void (*notifier
) (void))
685 GC_finalize_on_demand
= 1;
686 GC_finalizer_notifier
= notifier
;
690 _Jv_GCRegisterDisappearingLink (jobject
*objp
)
692 // This test helps to ensure that we meet a precondition of
693 // GC_general_register_disappearing_link, viz. "Obj must be a
694 // pointer to the first word of an object we allocated."
696 GC_general_register_disappearing_link ((GC_PTR
*) objp
, (GC_PTR
) *objp
);
700 _Jv_GCCanReclaimSoftReference (jobject
)
702 // For now, always reclaim soft references. FIXME.