1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation
6 This file is part of libgcj.
8 This software is copyrighted work licensed under the terms of the
9 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
20 #include <java/lang/Class.h>
21 #include <java/lang/reflect/Modifier.h>
22 #include <java-interp.h>
24 // More nastiness: the GC wants to define TRUE and FALSE. We don't
25 // need the Java definitions (themselves a hack), so we undefine them.
31 #include <gc_config.h>
33 // Set GC_DEBUG before including gc.h!
34 #ifdef LIBGCJ_GC_DEBUG
40 #include <javaxfc.h> // GC_finalize_all declaration.
42 #ifdef THREAD_LOCAL_ALLOC
43 # define GC_REDIRECT_TO_LOCAL
44 # include <gc_local_alloc.h>
47 // From boehm's misc.c
52 #define MAYBE_MARK(Obj, Top, Limit, Source) \
53 Top=GC_MARK_AND_PUSH((GC_PTR) Obj, Top, Limit, (GC_PTR *) Source)
55 // `kind' index used when allocating Java arrays.
56 static int array_kind_x
;
58 // Freelist used for Java arrays.
59 static void **array_free_list
;
63 // This is called by the GC during the mark phase. It marks a Java
64 // object. We use `void *' arguments and return, and not what the
65 // Boehm GC wants, to avoid pollution in our headers.
67 _Jv_MarkObj (void *addr
, void *msp
, void *msl
, void *env
)
69 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
70 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
72 if (env
== (void *)1) /* Object allocated with debug allocator. */
73 addr
= (GC_PTR
)GC_USR_PTR_FROM_BASE(addr
);
74 jobject obj
= (jobject
) addr
;
76 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
77 // The object might not yet have its vtable set, or it might
78 // really be an object on the freelist. In either case, the vtable slot
79 // will either be 0, or it will point to a cleared object.
80 // This assumes Java objects have size at least 3 words,
81 // including the header. But this should remain true, since this
82 // should only be used with debugging allocation or with large objects.
83 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
84 return mark_stack_ptr
;
85 jclass klass
= dt
->clas
;
88 # ifndef JV_HASH_SYNCHRONIZATION
89 // Every object has a sync_info pointer.
90 p
= (GC_PTR
) obj
->sync_info
;
91 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
93 // Mark the object's class.
95 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
97 if (__builtin_expect (klass
== &java::lang::Class::class$
, false))
99 // Currently we allocate some of the memory referenced from class objects
100 // as pointerfree memory, and then mark it more intelligently here.
101 // We ensure that the ClassClass mark descriptor forces invocation of
103 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
104 // collector, we need to make sure that the class object is written whenever
105 // any of the subobjects are altered and may need rescanning. This may be tricky
106 // during construction, and this may not be the right way to do this with
107 // incremental collection.
108 // If we overflow the mark stack, we will rescan the class object, so we should
109 // be OK. The same applies if we redo the mark phase because win32 unmapped part
110 // of our root set. - HB
111 jclass c
= (jclass
) addr
;
113 p
= (GC_PTR
) c
->name
;
114 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
115 p
= (GC_PTR
) c
->superclass
;
116 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
117 for (int i
= 0; i
< c
->constants
.size
; ++i
)
119 /* FIXME: We could make this more precise by using the tags -KKT */
120 p
= (GC_PTR
) c
->constants
.data
[i
].p
;
121 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
125 if (_Jv_IsInterpretedClass (c
))
127 p
= (GC_PTR
) c
->constants
.tags
;
128 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
129 p
= (GC_PTR
) c
->constants
.data
;
130 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
134 // The vtable might be allocated even for compiled code.
135 p
= (GC_PTR
) c
->vtable
;
136 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
138 // If the class is an array, then the methods field holds a
139 // pointer to the element class. If the class is primitive,
140 // then the methods field holds a pointer to the array class.
141 p
= (GC_PTR
) c
->methods
;
142 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
144 // The vtable might have been set, but the rest of the class
145 // could still be uninitialized. If this is the case, then
146 // c.isArray will SEGV. We check for this, and if it is the
147 // case we just return.
148 if (__builtin_expect (c
->name
== NULL
, false))
149 return mark_stack_ptr
;
151 if (! c
->isArray() && ! c
->isPrimitive())
153 // Scan each method in the cases where `methods' really
154 // points to a methods structure.
155 for (int i
= 0; i
< c
->method_count
; ++i
)
157 p
= (GC_PTR
) c
->methods
[i
].name
;
158 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
159 p
= (GC_PTR
) c
->methods
[i
].signature
;
160 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
162 // Note that we don't have to mark each individual throw
163 // separately, as these are stored in the constant pool.
164 p
= (GC_PTR
) c
->methods
[i
].throws
;
165 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
169 // Mark all the fields.
170 p
= (GC_PTR
) c
->fields
;
171 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
172 for (int i
= 0; i
< c
->field_count
; ++i
)
174 _Jv_Field
* field
= &c
->fields
[i
];
176 p
= (GC_PTR
) field
->name
;
177 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
178 p
= (GC_PTR
) field
->type
;
179 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
181 // For the interpreter, we also need to mark the memory
182 // containing static members
183 if ((field
->flags
& java::lang::reflect::Modifier::STATIC
))
185 p
= (GC_PTR
) field
->u
.addr
;
186 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
188 // also, if the static member is a reference,
189 // mark also the value pointed to. We check for isResolved
190 // since marking can happen before memory is allocated for
192 // Note that field->u.addr may be null if the class c is
193 // JV_STATE_LOADED but not JV_STATE_PREPARED (initialized).
194 if (JvFieldIsRef (field
) && p
&& field
->isResolved())
196 jobject val
= *(jobject
*) p
;
198 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
203 p
= (GC_PTR
) c
->vtable
;
204 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
205 p
= (GC_PTR
) c
->interfaces
;
206 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
207 for (int i
= 0; i
< c
->interface_count
; ++i
)
209 p
= (GC_PTR
) c
->interfaces
[i
];
210 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
212 p
= (GC_PTR
) c
->loader
;
213 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
215 // The dispatch tables can be allocated at runtime.
216 p
= (GC_PTR
) c
->ancestors
;
217 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
221 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
223 if (c
->isInterface())
225 p
= (GC_PTR
) c
->idt
->iface
.ioffsets
;
226 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
228 else if (! c
->isPrimitive())
230 // This field is only valid for ordinary classes.
231 p
= (GC_PTR
) c
->idt
->cls
.itable
;
232 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
->idt
);
236 p
= (GC_PTR
) c
->arrayclass
;
237 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
238 p
= (GC_PTR
) c
->protectionDomain
;
239 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
240 p
= (GC_PTR
) c
->hack_signers
;
241 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
242 p
= (GC_PTR
) c
->aux_info
;
243 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
246 if (_Jv_IsInterpretedClass (c
) && c
->aux_info
)
248 _Jv_InterpClass
* ic
= (_Jv_InterpClass
*) c
->aux_info
;
250 p
= (GC_PTR
) ic
->interpreted_methods
;
251 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
253 p
= (GC_PTR
) ic
->source_file_name
;
254 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
256 for (int i
= 0; i
< c
->method_count
; i
++)
258 // The interpreter installs a heap-allocated trampoline
259 // here, so we'll mark it.
260 p
= (GC_PTR
) c
->methods
[i
].ncode
;
261 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, c
);
263 using namespace java::lang::reflect
;
265 // Mark the direct-threaded code. Note a subtlety here:
266 // when we add Miranda methods to a class, we don't
267 // resize its interpreted_methods array. If we try to
268 // reference one of these methods, we may crash.
269 // However, we know these are all abstract, and we know
270 // that abstract methods have nothing useful in this
271 // array. So, we skip all abstract methods to avoid the
272 // problem. FIXME: this is pretty obscure, it may be
273 // better to add a methods to the execution engine and
275 if ((c
->methods
[i
].accflags
& Modifier::ABSTRACT
) != 0)
278 p
= (GC_PTR
) ic
->interpreted_methods
[i
];
279 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
281 if ((c
->methods
[i
].accflags
& Modifier::NATIVE
) != 0)
284 = (_Jv_JNIMethod
*) ic
->interpreted_methods
[i
];
287 p
= (GC_PTR
) jm
->jni_arg_types
;
288 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, p
);
294 = (_Jv_InterpMethod
*) ic
->interpreted_methods
[i
];
297 p
= (GC_PTR
) im
->line_table
;
298 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
299 p
= (GC_PTR
) im
->prepared
;
300 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
305 p
= (GC_PTR
) ic
->field_initializers
;
306 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, ic
);
314 // NOTE: each class only holds information about the class
315 // itself. So we must do the marking for the entire inheritance
316 // tree in order to mark all fields. FIXME: what about
317 // interfaces? We skip Object here, because Object only has a
318 // sync_info, and we handled that earlier.
319 // Note: occasionally `klass' can be null. For instance, this
320 // can happen if a GC occurs between the point where an object
321 // is allocated and where the vtbl slot is set.
322 while (klass
&& klass
!= &java::lang::Object::class$
)
324 jfieldID field
= JvGetFirstInstanceField (klass
);
325 jint max
= JvNumInstanceFields (klass
);
327 for (int i
= 0; i
< max
; ++i
)
329 if (JvFieldIsRef (field
))
331 jobject val
= JvGetObjectField (obj
, field
);
333 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, obj
);
335 field
= field
->getNextField ();
337 klass
= klass
->getSuperclass();
341 return mark_stack_ptr
;
344 // This is called by the GC during the mark phase. It marks a Java
345 // array (of objects). We use `void *' arguments and return, and not
346 // what the Boehm GC wants, to avoid pollution in our headers.
348 _Jv_MarkArray (void *addr
, void *msp
, void *msl
, void *env
)
350 struct GC_ms_entry
*mark_stack_ptr
= (struct GC_ms_entry
*)msp
;
351 struct GC_ms_entry
*mark_stack_limit
= (struct GC_ms_entry
*)msl
;
353 if (env
== (void *)1) /* Object allocated with debug allocator. */
354 addr
= (void *)GC_USR_PTR_FROM_BASE(addr
);
355 jobjectArray array
= (jobjectArray
) addr
;
357 _Jv_VTable
*dt
= *(_Jv_VTable
**) addr
;
358 // Assumes size >= 3 words. That's currently true since arrays have
359 // a vtable, sync pointer, and size. If the sync pointer goes away,
360 // we may need to round up the size.
361 if (__builtin_expect (! dt
|| !(dt
-> get_finalizer()), false))
362 return mark_stack_ptr
;
363 jclass klass
= dt
->clas
;
366 # ifndef JV_HASH_SYNCHRONIZATION
367 // Every object has a sync_info pointer.
368 p
= (GC_PTR
) array
->sync_info
;
369 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
371 // Mark the object's class.
373 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, &(dt
-> clas
));
375 for (int i
= 0; i
< JvGetArrayLength (array
); ++i
)
377 jobject obj
= elements (array
)[i
];
379 MAYBE_MARK (p
, mark_stack_ptr
, mark_stack_limit
, array
);
382 return mark_stack_ptr
;
385 // Generate a GC marking descriptor for a class.
387 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
388 // since another one could be registered first. But the compiler also
389 // knows this, so in that case everything else will break, too.
390 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
393 _Jv_BuildGCDescr(jclass self
)
396 jint bits_per_word
= CHAR_BIT
* sizeof (void *);
398 // Note: for now we only consider a bitmap mark descriptor. We
399 // could also handle the case where the first N fields of a type are
400 // references. However, this is not very likely to be used by many
401 // classes, and it is easier to compute things this way.
403 // The vtable pointer.
404 desc
|= 1ULL << (bits_per_word
- 1);
405 #ifndef JV_HASH_SYNCHRONIZATION
406 // The sync_info field.
407 desc
|= 1ULL << (bits_per_word
- 2);
410 for (jclass klass
= self
; klass
!= NULL
; klass
= klass
->getSuperclass())
412 jfieldID field
= JvGetFirstInstanceField(klass
);
413 int count
= JvNumInstanceFields(klass
);
415 for (int i
= 0; i
< count
; ++i
)
419 unsigned int off
= field
->getOffset();
420 // If we run into a weird situation, we bail.
421 if (off
% sizeof (void *) != 0)
422 return (void *) (GCJ_DEFAULT_DESCR
);
423 off
/= sizeof (void *);
424 // If we find a field outside the range of our bitmap,
425 // fall back to procedure marker. The bottom 2 bits are
427 if (off
>= (unsigned) bits_per_word
- 2)
428 return (void *) (GCJ_DEFAULT_DESCR
);
429 desc
|= 1ULL << (bits_per_word
- off
- 1);
432 field
= field
->getNextField();
436 // For bitmap mark type, bottom bits are 01.
438 // Bogus warning avoidance (on many platforms).
439 return (void *) (unsigned long) desc
;
442 // Allocate some space that is known to be pointer-free.
444 _Jv_AllocBytes (jsize size
)
446 void *r
= GC_MALLOC_ATOMIC (size
);
447 // We have to explicitly zero memory here, as the GC doesn't
448 // guarantee that PTRFREE allocations are zeroed. Note that we
449 // don't have to do this for other allocation types because we set
450 // the `ok_init' flag in the type descriptor.
455 #ifdef LIBGCJ_GC_DEBUG
458 _Jv_AllocObj (jsize size
, jclass klass
)
460 return GC_GCJ_MALLOC (size
, klass
->vtable
);
464 _Jv_AllocPtrFreeObj (jsize size
, jclass klass
)
466 #ifdef JV_HASH_SYNCHRONIZATION
467 void * obj
= GC_MALLOC_ATOMIC(size
);
468 *((_Jv_VTable
**) obj
) = klass
->vtable
;
470 void * obj
= GC_GCJ_MALLOC(size
, klass
->vtable
);
475 #endif /* LIBGCJ_GC_DEBUG */
476 // In the non-debug case, the above two functions are defined
477 // as inline functions in boehm-gc.h. In the debug case we
478 // really want to take advantage of the definitions in gc_gcj.h.
480 // Allocate space for a new Java array.
481 // Used only for arrays of objects.
483 _Jv_AllocArray (jsize size
, jclass klass
)
487 #ifdef LIBGCJ_GC_DEBUG
488 // There isn't much to lose by scanning this conservatively.
489 // If we didn't, the mark proc would have to understand that
490 // it needed to skip the header.
491 obj
= GC_MALLOC(size
);
493 const jsize min_heap_addr
= 16*1024;
494 // A heuristic. If size is less than this value, the size
495 // stored in the array can't possibly be misinterpreted as
496 // a pointer. Thus we lose nothing by scanning the object
497 // completely conservatively, since no misidentification can
500 if (size
< min_heap_addr
)
501 obj
= GC_MALLOC(size
);
503 obj
= GC_generic_malloc (size
, array_kind_x
);
505 *((_Jv_VTable
**) obj
) = klass
->vtable
;
509 /* Allocate space for a new non-Java object, which does not have the usual
510 Java object header but may contain pointers to other GC'ed objects. */
512 _Jv_AllocRawObj (jsize size
)
514 return (void *) GC_MALLOC (size
);
518 call_finalizer (GC_PTR obj
, GC_PTR client_data
)
520 _Jv_FinalizerFunc
*fn
= (_Jv_FinalizerFunc
*) client_data
;
521 jobject jobj
= (jobject
) obj
;
527 _Jv_RegisterFinalizer (void *object
, _Jv_FinalizerFunc
*meth
)
529 GC_REGISTER_FINALIZER_NO_ORDER (object
, call_finalizer
, (GC_PTR
) meth
,
534 _Jv_RunFinalizers (void)
536 GC_invoke_finalizers ();
540 _Jv_RunAllFinalizers (void)
552 _Jv_GCTotalMemory (void)
554 return GC_get_heap_size ();
558 _Jv_GCFreeMemory (void)
560 return GC_get_free_bytes ();
564 _Jv_GCSetInitialHeapSize (size_t size
)
566 size_t current
= GC_get_heap_size ();
568 GC_expand_hp (size
- current
);
572 _Jv_GCSetMaximumHeapSize (size_t size
)
574 GC_set_max_heap_size ((GC_word
) size
);
589 static void * handle_out_of_memory(size_t)
595 gcj_describe_type_fn(void *obj
, char *out_buf
)
597 _Jv_VTable
*dt
= *(_Jv_VTable
**) obj
;
599 if (! dt
/* Shouldn't happen */)
601 strcpy(out_buf
, "GCJ (bad)");
604 jclass klass
= dt
->clas
;
605 if (!klass
/* shouldn't happen */)
607 strcpy(out_buf
, "GCJ (bad)");
610 jstring name
= klass
-> getName();
611 size_t len
= name
-> length();
612 if (len
>= GC_TYPE_DESCR_LEN
) len
= GC_TYPE_DESCR_LEN
- 1;
613 JvGetStringUTFRegion (name
, 0, len
, out_buf
);
622 // Ignore pointers that do not point to the start of an object.
623 GC_all_interior_pointers
= 0;
625 // Configure the collector to use the bitmap marking descriptors that we
626 // stash in the class vtable.
627 // We always use mark proc descriptor 0, since the compiler knows
629 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj
);
631 // Cause an out of memory error to be thrown from the allocators,
632 // instead of returning 0. This is cheaper than checking on allocation.
633 GC_oom_fn
= handle_out_of_memory
;
635 GC_java_finalization
= 1;
637 // We use a different mark procedure for object arrays. This code
638 // configures a different object `kind' for object array allocation and
640 array_free_list
= GC_new_free_list();
641 proc
= GC_new_proc((GC_mark_proc
)_Jv_MarkArray
);
642 array_kind_x
= GC_new_kind(array_free_list
, GC_MAKE_PROC (proc
, 0), 0, 1);
644 // Arrange to have the GC print Java class names in backtraces, etc.
645 GC_register_describe_type_fn(GC_gcj_kind
, gcj_describe_type_fn
);
646 GC_register_describe_type_fn(GC_gcj_debug_kind
, gcj_describe_type_fn
);
649 #ifdef JV_HASH_SYNCHRONIZATION
650 // Allocate an object with a fake vtable pointer, which causes only
651 // the first field (beyond the fake vtable pointer) to be traced.
652 // Eventually this should probably be generalized.
654 static _Jv_VTable trace_one_vtable
= {
656 (void *)(2 * sizeof(void *)),
657 // descriptor; scan 2 words incl. vtable ptr.
658 // Least significant bits must be zero to
659 // identify this as a length descriptor
664 _Jv_AllocTraceOne (jsize size
/* includes vtable slot */)
666 return GC_GCJ_MALLOC (size
, &trace_one_vtable
);
669 // Ditto for two words.
670 // the first field (beyond the fake vtable pointer) to be traced.
671 // Eventually this should probably be generalized.
673 static _Jv_VTable trace_two_vtable
=
676 (void *)(3 * sizeof(void *)),
677 // descriptor; scan 3 words incl. vtable ptr.
682 _Jv_AllocTraceTwo (jsize size
/* includes vtable slot */)
684 return GC_GCJ_MALLOC (size
, &trace_two_vtable
);
687 #endif /* JV_HASH_SYNCHRONIZATION */
690 _Jv_GCInitializeFinalizers (void (*notifier
) (void))
692 GC_finalize_on_demand
= 1;
693 GC_finalizer_notifier
= notifier
;
697 _Jv_GCRegisterDisappearingLink (jobject
*objp
)
699 // This test helps to ensure that we meet a precondition of
700 // GC_general_register_disappearing_link, viz. "Obj must be a
701 // pointer to the first word of an object we allocated."
703 GC_general_register_disappearing_link ((GC_PTR
*) objp
, (GC_PTR
) *objp
);
707 _Jv_GCCanReclaimSoftReference (jobject
)
709 // For now, always reclaim soft references. FIXME.