1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation
6 This file is part of libgcj.
8 This software is copyrighted work licensed under the terms of the
9 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
20 #include <java/lang/Class.h>
21 #include <java/lang/reflect/Modifier.h>
22 #include <java-interp.h>
24 // More nastiness: the GC wants to define TRUE and FALSE. We don't
25 // need the Java definitions (themselves a hack), so we undefine them.
31 #include <gc_config.h>
33 // Set GC_DEBUG before including gc.h!
34 #ifdef LIBGCJ_GC_DEBUG
40 #include <javaxfc.h> // GC_finalize_all declaration.
42 #ifdef THREAD_LOCAL_ALLOC
43 # define GC_REDIRECT_TO_LOCAL
44 # include <gc_local_alloc.h>
47 // From boehm's misc.c
52 #define MAYBE_MARK(Obj, Top, Limit, Source) \
53 Top=GC_MARK_AND_PUSH((GC_PTR) Obj, Top, Limit, (GC_PTR *) Source)
55 // `kind' index used when allocating Java arrays.
56 static int array_kind_x
;
58 // Freelist used for Java arrays.
59 static void **array_free_list
;
63 // This is called by the GC during the mark phase. It marks a Java
64 // object. We use `void *' arguments and return, and not what the
65 // Boehm GC wants, to avoid pollution in our headers.
67 _Jv_MarkObj (void *addr
, void *msp
, void *msl
, void *env
)
69 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
70 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
72 if (env
== (void *)1) /* Object allocated with debug allocator. */
73 addr
= (GC_PTR
)GC_USR_PTR_FROM_BASE(addr
);
74 jobject obj
= (jobject
) addr
;
76 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
77 // The object might not yet have its vtable set, or it might
78 // really be an object on the freelist. In either case, the vtable slot
79 // will either be 0, or it will point to a cleared object.
80 // This assumes Java objects have size at least 3 words,
81 // including the header. But this should remain true, since this
82 // should only be used with debugging allocation or with large objects.
83 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
84 return mark_stack_ptr
;
85 jclass klass
= dt
->clas
;
88 # ifndef JV_HASH_SYNCHRONIZATION
89 // Every object has a sync_info pointer.
90 p
= (GC_PTR
) obj
->sync_info
;
91 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
93 // Mark the object's class.
95 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
97 if (__builtin_expect (klass
== &java::lang::Class::class$
, false))
99 // Currently we allocate some of the memory referenced from class objects
100 // as pointerfree memory, and then mark it more intelligently here.
101 // We ensure that the ClassClass mark descriptor forces invocation of
103 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
104 // collector, we need to make sure that the class object is written whenever
105 // any of the subobjects are altered and may need rescanning. This may be tricky
106 // during construction, and this may not be the right way to do this with
107 // incremental collection.
108 // If we overflow the mark stack, we will rescan the class object, so we should
109 // be OK. The same applies if we redo the mark phase because win32 unmapped part
110 // of our root set. - HB
111 jclass c
= (jclass
) addr
;
113 p
= (GC_PTR
) c
->name
;
114 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
115 p
= (GC_PTR
) c
->superclass
;
116 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
117 for (int i
= 0; i
< c
->constants
.size
; ++i
)
119 /* FIXME: We could make this more precise by using the tags -KKT */
120 p
= (GC_PTR
) c
->constants
.data
[i
].p
;
121 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
125 if (_Jv_IsInterpretedClass (c
))
127 p
= (GC_PTR
) c
->constants
.tags
;
128 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
129 p
= (GC_PTR
) c
->constants
.data
;
130 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
134 // The vtable might be allocated even for compiled code.
135 p
= (GC_PTR
) c
->vtable
;
136 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
138 // If the class is an array, then the methods field holds a
139 // pointer to the element class. If the class is primitive,
140 // then the methods field holds a pointer to the array class.
141 p
= (GC_PTR
) c
->methods
;
142 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
144 // The vtable might have been set, but the rest of the class
145 // could still be uninitialized. If this is the case, then
146 // c.isArray will SEGV. We check for this, and if it is the
147 // case we just return.
148 if (__builtin_expect (c
->name
== NULL
, false))
149 return mark_stack_ptr
;
151 if (! c
->isArray() && ! c
->isPrimitive())
153 // Scan each method in the cases where `methods' really
154 // points to a methods structure.
155 for (int i
= 0; i
< c
->method_count
; ++i
)
157 p
= (GC_PTR
) c
->methods
[i
].name
;
158 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
159 p
= (GC_PTR
) c
->methods
[i
].signature
;
160 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
162 // Note that we don't have to mark each individual throw
163 // separately, as these are stored in the constant pool.
164 p
= (GC_PTR
) c
->methods
[i
].throws
;
165 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
169 // Mark all the fields.
170 p
= (GC_PTR
) c
->fields
;
171 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
172 for (int i
= 0; i
< c
->field_count
; ++i
)
174 _Jv_Field
* field
= &c
->fields
[i
];
176 p
= (GC_PTR
) field
->name
;
177 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
178 p
= (GC_PTR
) field
->type
;
179 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
181 // For the interpreter, we also need to mark the memory
182 // containing static members
183 if ((field
->flags
& java::lang::reflect::Modifier::STATIC
))
185 p
= (GC_PTR
) field
->u
.addr
;
186 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
188 // also, if the static member is a reference,
189 // mark also the value pointed to. We check for isResolved
190 // since marking can happen before memory is allocated for
192 // Note that field->u.addr may be null if the class c is
193 // JV_STATE_LOADED but not JV_STATE_PREPARED (initialized).
194 // Note also that field->type could be NULL in some
195 // situations, for instance if the class has state
197 if (field
->type
&& JvFieldIsRef (field
)
198 && p
&& field
->isResolved())
200 jobject val
= *(jobject
*) p
;
202 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
207 p
= (GC_PTR
) c
->vtable
;
208 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
209 p
= (GC_PTR
) c
->interfaces
;
210 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
211 for (int i
= 0; i
< c
->interface_count
; ++i
)
213 p
= (GC_PTR
) c
->interfaces
[i
];
214 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
216 p
= (GC_PTR
) c
->loader
;
217 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
219 // The dispatch tables can be allocated at runtime.
220 p
= (GC_PTR
) c
->ancestors
;
221 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
225 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
227 if (c
->isInterface())
229 p
= (GC_PTR
) c
->idt
->iface
.ioffsets
;
230 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
232 else if (! c
->isPrimitive())
234 // This field is only valid for ordinary classes.
235 p
= (GC_PTR
) c
->idt
->cls
.itable
;
236 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
240 p
= (GC_PTR
) c
->arrayclass
;
241 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
242 p
= (GC_PTR
) c
->protectionDomain
;
243 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
244 p
= (GC_PTR
) c
->hack_signers
;
245 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
246 p
= (GC_PTR
) c
->aux_info
;
247 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
250 if (_Jv_IsInterpretedClass (c
) && c
->aux_info
)
252 _Jv_InterpClass
* ic
= (_Jv_InterpClass
*) c
->aux_info
;
254 p
= (GC_PTR
) ic
->interpreted_methods
;
255 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
257 p
= (GC_PTR
) ic
->source_file_name
;
258 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
260 for (int i
= 0; i
< c
->method_count
; i
++)
262 // The interpreter installs a heap-allocated trampoline
263 // here, so we'll mark it.
264 p
= (GC_PTR
) c
->methods
[i
].ncode
;
265 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
267 using namespace java::lang::reflect
;
269 // Mark the direct-threaded code. Note a subtlety here:
270 // when we add Miranda methods to a class, we don't
271 // resize its interpreted_methods array. If we try to
272 // reference one of these methods, we may crash.
273 // However, we know these are all abstract, and we know
274 // that abstract methods have nothing useful in this
275 // array. So, we skip all abstract methods to avoid the
276 // problem. FIXME: this is pretty obscure, it may be
277 // better to add a methods to the execution engine and
279 if ((c
->methods
[i
].accflags
& Modifier::ABSTRACT
) != 0)
282 p
= (GC_PTR
) ic
->interpreted_methods
[i
];
283 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
285 if ((c
->methods
[i
].accflags
& Modifier::NATIVE
) != 0)
288 = (_Jv_JNIMethod
*) ic
->interpreted_methods
[i
];
291 p
= (GC_PTR
) jm
->jni_arg_types
;
292 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, p
);
298 = (_Jv_InterpMethod
*) ic
->interpreted_methods
[i
];
301 p
= (GC_PTR
) im
->line_table
;
302 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
303 p
= (GC_PTR
) im
->prepared
;
304 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
309 p
= (GC_PTR
) ic
->field_initializers
;
310 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
318 // NOTE: each class only holds information about the class
319 // itself. So we must do the marking for the entire inheritance
320 // tree in order to mark all fields. FIXME: what about
321 // interfaces? We skip Object here, because Object only has a
322 // sync_info, and we handled that earlier.
323 // Note: occasionally `klass' can be null. For instance, this
324 // can happen if a GC occurs between the point where an object
325 // is allocated and where the vtbl slot is set.
326 while (klass
&& klass
!= &java::lang::Object::class$
)
328 jfieldID field
= JvGetFirstInstanceField (klass
);
329 jint max
= JvNumInstanceFields (klass
);
331 for (int i
= 0; i
< max
; ++i
)
333 if (JvFieldIsRef (field
))
335 jobject val
= JvGetObjectField (obj
, field
);
337 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
339 field
= field
->getNextField ();
341 klass
= klass
->getSuperclass();
345 return mark_stack_ptr
;
348 // This is called by the GC during the mark phase. It marks a Java
349 // array (of objects). We use `void *' arguments and return, and not
350 // what the Boehm GC wants, to avoid pollution in our headers.
352 _Jv_MarkArray (void *addr
, void *msp
, void *msl
, void *env
)
354 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
355 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
357 if (env
== (void *)1) /* Object allocated with debug allocator. */
358 addr
= (void *)GC_USR_PTR_FROM_BASE(addr
);
359 jobjectArray array
= (jobjectArray
) addr
;
361 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
362 // Assumes size >= 3 words. That's currently true since arrays have
363 // a vtable, sync pointer, and size. If the sync pointer goes away,
364 // we may need to round up the size.
365 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
366 return mark_stack_ptr
;
367 jclass klass
= dt
->clas
;
370 # ifndef JV_HASH_SYNCHRONIZATION
371 // Every object has a sync_info pointer.
372 p
= (GC_PTR
) array
->sync_info
;
373 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
375 // Mark the object's class.
377 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, &(dt
-> clas
));
379 for (int i
= 0; i
< JvGetArrayLength (array
); ++i
)
381 jobject obj
= elements (array
)[i
];
383 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
386 return mark_stack_ptr
;
389 // Generate a GC marking descriptor for a class.
391 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
392 // since another one could be registered first. But the compiler also
393 // knows this, so in that case everything else will break, too.
394 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
397 _Jv_BuildGCDescr(jclass self
)
400 jint bits_per_word
= CHAR_BIT
* sizeof (void *);
402 // Note: for now we only consider a bitmap mark descriptor. We
403 // could also handle the case where the first N fields of a type are
404 // references. However, this is not very likely to be used by many
405 // classes, and it is easier to compute things this way.
407 // The vtable pointer.
408 desc
|= 1ULL << (bits_per_word
- 1);
409 #ifndef JV_HASH_SYNCHRONIZATION
410 // The sync_info field.
411 desc
|= 1ULL << (bits_per_word
- 2);
414 for (jclass klass
= self
; klass
!= NULL
; klass
= klass
->getSuperclass())
416 jfieldID field
= JvGetFirstInstanceField(klass
);
417 int count
= JvNumInstanceFields(klass
);
419 for (int i
= 0; i
< count
; ++i
)
423 unsigned int off
= field
->getOffset();
424 // If we run into a weird situation, we bail.
425 if (off
% sizeof (void *) != 0)
426 return (void *) (GCJ_DEFAULT_DESCR
);
427 off
/= sizeof (void *);
428 // If we find a field outside the range of our bitmap,
429 // fall back to procedure marker. The bottom 2 bits are
431 if (off
>= (unsigned) bits_per_word
- 2)
432 return (void *) (GCJ_DEFAULT_DESCR
);
433 desc
|= 1ULL << (bits_per_word
- off
- 1);
436 field
= field
->getNextField();
440 // For bitmap mark type, bottom bits are 01.
442 // Bogus warning avoidance (on many platforms).
443 return (void *) (unsigned long) desc
;
446 // Allocate some space that is known to be pointer-free.
448 _Jv_AllocBytes (jsize size
)
450 void *r
= GC_MALLOC_ATOMIC (size
);
451 // We have to explicitly zero memory here, as the GC doesn't
452 // guarantee that PTRFREE allocations are zeroed. Note that we
453 // don't have to do this for other allocation types because we set
454 // the `ok_init' flag in the type descriptor.
459 #ifdef LIBGCJ_GC_DEBUG
462 _Jv_AllocObj (jsize size
, jclass klass
)
464 return GC_GCJ_MALLOC (size
, klass
->vtable
);
468 _Jv_AllocPtrFreeObj (jsize size
, jclass klass
)
470 #ifdef JV_HASH_SYNCHRONIZATION
471 void * obj
= GC_MALLOC_ATOMIC(size
);
472 *((_Jv_VTable
**) obj
) = klass
->vtable
;
474 void * obj
= GC_GCJ_MALLOC(size
, klass
->vtable
);
479 #endif /* LIBGCJ_GC_DEBUG */
480 // In the non-debug case, the above two functions are defined
481 // as inline functions in boehm-gc.h. In the debug case we
482 // really want to take advantage of the definitions in gc_gcj.h.
484 // Allocate space for a new Java array.
485 // Used only for arrays of objects.
487 _Jv_AllocArray (jsize size
, jclass klass
)
491 #ifdef LIBGCJ_GC_DEBUG
492 // There isn't much to lose by scanning this conservatively.
493 // If we didn't, the mark proc would have to understand that
494 // it needed to skip the header.
495 obj
= GC_MALLOC(size
);
497 const jsize min_heap_addr
= 16*1024;
498 // A heuristic. If size is less than this value, the size
499 // stored in the array can't possibly be misinterpreted as
500 // a pointer. Thus we lose nothing by scanning the object
501 // completely conservatively, since no misidentification can
504 if (size
< min_heap_addr
)
505 obj
= GC_MALLOC(size
);
507 obj
= GC_generic_malloc (size
, array_kind_x
);
509 *((_Jv_VTable
**) obj
) = klass
->vtable
;
513 /* Allocate space for a new non-Java object, which does not have the usual
514 Java object header but may contain pointers to other GC'ed objects. */
516 _Jv_AllocRawObj (jsize size
)
518 return (void *) GC_MALLOC (size
);
522 call_finalizer (GC_PTR obj
, GC_PTR client_data
)
524 _Jv_FinalizerFunc
*fn
= (_Jv_FinalizerFunc
*) client_data
;
525 jobject jobj
= (jobject
) obj
;
531 _Jv_RegisterFinalizer (void *object
, _Jv_FinalizerFunc
*meth
)
533 GC_REGISTER_FINALIZER_NO_ORDER (object
, call_finalizer
, (GC_PTR
) meth
,
538 _Jv_RunFinalizers (void)
540 GC_invoke_finalizers ();
544 _Jv_RunAllFinalizers (void)
556 _Jv_GCTotalMemory (void)
558 return GC_get_heap_size ();
562 _Jv_GCFreeMemory (void)
564 return GC_get_free_bytes ();
568 _Jv_GCSetInitialHeapSize (size_t size
)
570 size_t current
= GC_get_heap_size ();
572 GC_expand_hp (size
- current
);
576 _Jv_GCSetMaximumHeapSize (size_t size
)
578 GC_set_max_heap_size ((GC_word
) size
);
593 static void * handle_out_of_memory(size_t)
599 gcj_describe_type_fn(void *obj
, char *out_buf
)
601 _Jv_VTable
*dt
= *(_Jv_VTable
**) obj
;
603 if (! dt
/* Shouldn't happen */)
605 strcpy(out_buf
, "GCJ (bad)");
608 jclass klass
= dt
->clas
;
609 if (!klass
/* shouldn't happen */)
611 strcpy(out_buf
, "GCJ (bad)");
614 jstring name
= klass
-> getName();
615 size_t len
= name
-> length();
616 if (len
>= GC_TYPE_DESCR_LEN
) len
= GC_TYPE_DESCR_LEN
- 1;
617 JvGetStringUTFRegion (name
, 0, len
, out_buf
);
626 // Ignore pointers that do not point to the start of an object.
627 GC_all_interior_pointers
= 0;
629 // Configure the collector to use the bitmap marking descriptors that we
630 // stash in the class vtable.
631 // We always use mark proc descriptor 0, since the compiler knows
633 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj
);
635 // Cause an out of memory error to be thrown from the allocators,
636 // instead of returning 0. This is cheaper than checking on allocation.
637 GC_oom_fn
= handle_out_of_memory
;
639 GC_java_finalization
= 1;
641 // We use a different mark procedure for object arrays. This code
642 // configures a different object `kind' for object array allocation and
644 array_free_list
= GC_new_free_list();
645 proc
= GC_new_proc((GC_mark_proc
)_Jv_MarkArray
);
646 array_kind_x
= GC_new_kind(array_free_list
, GC_MAKE_PROC (proc
, 0), 0, 1);
648 // Arrange to have the GC print Java class names in backtraces, etc.
649 GC_register_describe_type_fn(GC_gcj_kind
, gcj_describe_type_fn
);
650 GC_register_describe_type_fn(GC_gcj_debug_kind
, gcj_describe_type_fn
);
653 #ifdef JV_HASH_SYNCHRONIZATION
654 // Allocate an object with a fake vtable pointer, which causes only
655 // the first field (beyond the fake vtable pointer) to be traced.
656 // Eventually this should probably be generalized.
658 static _Jv_VTable trace_one_vtable
= {
660 (void *)(2 * sizeof(void *)),
661 // descriptor; scan 2 words incl. vtable ptr.
662 // Least significant bits must be zero to
663 // identify this as a length descriptor
668 _Jv_AllocTraceOne (jsize size
/* includes vtable slot */)
670 return GC_GCJ_MALLOC (size
, &trace_one_vtable
);
673 // Ditto for two words.
674 // the first field (beyond the fake vtable pointer) to be traced.
675 // Eventually this should probably be generalized.
677 static _Jv_VTable trace_two_vtable
=
680 (void *)(3 * sizeof(void *)),
681 // descriptor; scan 3 words incl. vtable ptr.
686 _Jv_AllocTraceTwo (jsize size
/* includes vtable slot */)
688 return GC_GCJ_MALLOC (size
, &trace_two_vtable
);
691 #endif /* JV_HASH_SYNCHRONIZATION */
694 _Jv_GCInitializeFinalizers (void (*notifier
) (void))
696 GC_finalize_on_demand
= 1;
697 GC_finalizer_notifier
= notifier
;
701 _Jv_GCRegisterDisappearingLink (jobject
*objp
)
703 // This test helps to ensure that we meet a precondition of
704 // GC_general_register_disappearing_link, viz. "Obj must be a
705 // pointer to the first word of an object we allocated."
707 GC_general_register_disappearing_link ((GC_PTR
*) objp
, (GC_PTR
) *objp
);
711 _Jv_GCCanReclaimSoftReference (jobject
)
713 // For now, always reclaim soft references. FIXME.