* arm.h (ASM_OUTPUT_REG_PUSH, ASM_OUTPUT_REG_POP): Wrap in
[official-gcc.git] / libjava / boehm.cc
blob95721c7fbeb39023b6d7b29d2b45631c8c341854
1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation
5 This file is part of libgcj.
7 This software is copyrighted work licensed under the terms of the
8 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
9 details. */
11 #include <config.h>
13 #include <stdio.h>
14 #include <limits.h>
16 #include <jvm.h>
17 #include <gcj/cni.h>
19 #include <java/lang/Class.h>
20 #include <java/lang/reflect/Modifier.h>
21 #include <java-interp.h>
23 // More nastiness: the GC wants to define TRUE and FALSE. We don't
24 // need the Java definitions (themselves a hack), so we undefine them.
25 #undef TRUE
26 #undef FALSE
28 extern "C"
30 #include <private/gc_pmark.h>
31 #include <gc_gcj.h>
33 #ifdef THREAD_LOCAL_ALLOC
34 # define GC_REDIRECT_TO_LOCAL
35 # include <gc_local_alloc.h>
36 #endif
38 // These aren't declared in any Boehm GC header.
39 void GC_finalize_all (void);
40 ptr_t GC_debug_generic_malloc (size_t size, int k, GC_EXTRA_PARAMS);
43 #define MAYBE_MARK(Obj, Top, Limit, Source, Exit) \
44 Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
46 // `kind' index used when allocating Java arrays.
47 static int array_kind_x;
49 // Freelist used for Java arrays.
50 static ptr_t *array_free_list;
52 // Lock used to protect access to Boehm's GC_enable/GC_disable functions.
53 static _Jv_Mutex_t disable_gc_mutex;
57 // This is called by the GC during the mark phase. It marks a Java
58 // object. We use `void *' arguments and return, and not what the
59 // Boehm GC wants, to avoid pollution in our headers.
60 void *
61 _Jv_MarkObj (void *addr, void *msp, void *msl, void * /* env */)
63 mse *mark_stack_ptr = (mse *) msp;
64 mse *mark_stack_limit = (mse *) msl;
65 jobject obj = (jobject) addr;
67 // FIXME: if env is 1, this object was allocated through the debug
68 // interface, and addr points to the beginning of the debug header.
69 // In that case, we should really add the size of the header to addr.
71 _Jv_VTable *dt = *(_Jv_VTable **) addr;
72 // The object might not yet have its vtable set, or it might
73 // really be an object on the freelist. In either case, the vtable slot
74 // will either be 0, or it will point to a cleared object.
75 // This assumes Java objects have size at least 3 words,
76 // including the header. But this should remain true, since this
77 // should only be used with debugging allocation or with large objects.
78 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
79 return mark_stack_ptr;
80 jclass klass = dt->clas;
81 ptr_t p;
83 # ifndef JV_HASH_SYNCHRONIZATION
84 // Every object has a sync_info pointer.
85 p = (ptr_t) obj->sync_info;
86 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o1label);
87 # endif
88 // Mark the object's class.
89 p = (ptr_t) klass;
90 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o2label);
92 if (__builtin_expect (klass == &java::lang::Class::class$, false))
94 // Currently we allocate some of the memory referenced from class objects
95 // as pointerfree memory, and then mark it more intelligently here.
96 // We ensure that the ClassClass mark descriptor forces invocation of
97 // this procedure.
98 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
99 // collector, we need to make sure that the class object is written whenever
100 // any of the subobjects are altered and may need rescanning. This may be tricky
101 // during construction, and this may not be the right way to do this with
102 // incremental collection.
103 // If we overflow the mark stack, we will rescan the class object, so we should
104 // be OK. The same applies if we redo the mark phase because win32 unmapped part
105 // of our root set. - HB
106 jclass c = (jclass) addr;
108 p = (ptr_t) c->name;
109 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c3label);
110 p = (ptr_t) c->superclass;
111 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c4label);
112 for (int i = 0; i < c->constants.size; ++i)
114 /* FIXME: We could make this more precise by using the tags -KKT */
115 p = (ptr_t) c->constants.data[i].p;
116 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5label);
119 #ifdef INTERPRETER
120 if (_Jv_IsInterpretedClass (c))
122 p = (ptr_t) c->constants.tags;
123 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5alabel);
124 p = (ptr_t) c->constants.data;
125 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5blabel);
126 p = (ptr_t) c->vtable;
127 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5clabel);
129 #endif
131 // If the class is an array, then the methods field holds a
132 // pointer to the element class. If the class is primitive,
133 // then the methods field holds a pointer to the array class.
134 p = (ptr_t) c->methods;
135 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c6label);
137 // The vtable might have been set, but the rest of the class
138 // could still be uninitialized. If this is the case, then
139 // c.isArray will SEGV. We check for this, and if it is the
140 // case we just return.
141 if (__builtin_expect (c->name == NULL, false))
142 return mark_stack_ptr;
144 if (! c->isArray() && ! c->isPrimitive())
146 // Scan each method in the cases where `methods' really
147 // points to a methods structure.
148 for (int i = 0; i < c->method_count; ++i)
150 p = (ptr_t) c->methods[i].name;
151 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
152 cm1label);
153 p = (ptr_t) c->methods[i].signature;
154 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
155 cm2label);
159 // Mark all the fields.
160 p = (ptr_t) c->fields;
161 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8label);
162 for (int i = 0; i < c->field_count; ++i)
164 _Jv_Field* field = &c->fields[i];
166 #ifndef COMPACT_FIELDS
167 p = (ptr_t) field->name;
168 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8alabel);
169 #endif
170 p = (ptr_t) field->type;
171 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8blabel);
173 // For the interpreter, we also need to mark the memory
174 // containing static members
175 if ((field->flags & java::lang::reflect::Modifier::STATIC))
177 p = (ptr_t) field->u.addr;
178 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8clabel);
180 // also, if the static member is a reference,
181 // mark also the value pointed to. We check for isResolved
182 // since marking can happen before memory is allocated for
183 // static members.
184 if (JvFieldIsRef (field) && field->isResolved())
186 jobject val = *(jobject*) field->u.addr;
187 p = (ptr_t) val;
188 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
189 c, c8elabel);
194 p = (ptr_t) c->vtable;
195 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c9label);
196 p = (ptr_t) c->interfaces;
197 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cAlabel);
198 for (int i = 0; i < c->interface_count; ++i)
200 p = (ptr_t) c->interfaces[i];
201 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cClabel);
203 p = (ptr_t) c->loader;
204 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cBlabel);
205 p = (ptr_t) c->arrayclass;
206 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cDlabel);
207 p = (ptr_t) c->protectionDomain;
208 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cPlabel);
210 #ifdef INTERPRETER
211 if (_Jv_IsInterpretedClass (c))
213 _Jv_InterpClass* ic = (_Jv_InterpClass*) c;
215 p = (ptr_t) ic->interpreted_methods;
216 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cElabel);
218 for (int i = 0; i < c->method_count; i++)
220 p = (ptr_t) ic->interpreted_methods[i];
221 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
222 cFlabel);
224 // Mark the direct-threaded code.
225 if ((c->methods[i].accflags
226 & java::lang::reflect::Modifier::NATIVE) == 0)
228 _Jv_InterpMethod *im
229 = (_Jv_InterpMethod *) ic->interpreted_methods[i];
230 if (im)
232 p = (ptr_t) im->prepared;
233 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
234 cFlabel);
238 // The interpreter installs a heap-allocated trampoline
239 // here, so we'll mark it.
240 p = (ptr_t) c->methods[i].ncode;
241 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
242 cm3label);
245 p = (ptr_t) ic->field_initializers;
246 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cGlabel);
249 #endif
252 else
254 // NOTE: each class only holds information about the class
255 // itself. So we must do the marking for the entire inheritance
256 // tree in order to mark all fields. FIXME: what about
257 // interfaces? We skip Object here, because Object only has a
258 // sync_info, and we handled that earlier.
259 // Note: occasionally `klass' can be null. For instance, this
260 // can happen if a GC occurs between the point where an object
261 // is allocated and where the vtbl slot is set.
262 while (klass && klass != &java::lang::Object::class$)
264 jfieldID field = JvGetFirstInstanceField (klass);
265 jint max = JvNumInstanceFields (klass);
267 for (int i = 0; i < max; ++i)
269 if (JvFieldIsRef (field))
271 jobject val = JvGetObjectField (obj, field);
272 p = (ptr_t) val;
273 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
274 obj, elabel);
276 field = field->getNextField ();
278 klass = klass->getSuperclass();
282 return mark_stack_ptr;
285 // This is called by the GC during the mark phase. It marks a Java
286 // array (of objects). We use `void *' arguments and return, and not
287 // what the Boehm GC wants, to avoid pollution in our headers.
288 void *
289 _Jv_MarkArray (void *addr, void *msp, void *msl, void * /*env*/)
291 mse *mark_stack_ptr = (mse *) msp;
292 mse *mark_stack_limit = (mse *) msl;
293 jobjectArray array = (jobjectArray) addr;
295 _Jv_VTable *dt = *(_Jv_VTable **) addr;
296 // Assumes size >= 3 words. That's currently true since arrays have
297 // a vtable, sync pointer, and size. If the sync pointer goes away,
298 // we may need to round up the size.
299 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
300 return mark_stack_ptr;
301 jclass klass = dt->clas;
302 ptr_t p;
304 # ifndef JV_HASH_SYNCHRONIZATION
305 // Every object has a sync_info pointer.
306 p = (ptr_t) array->sync_info;
307 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e1label);
308 # endif
309 // Mark the object's class.
310 p = (ptr_t) klass;
311 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas), o2label);
313 for (int i = 0; i < JvGetArrayLength (array); ++i)
315 jobject obj = elements (array)[i];
316 p = (ptr_t) obj;
317 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e2label);
320 return mark_stack_ptr;
323 // Generate a GC marking descriptor for a class.
325 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
326 // since another one could be registered first. But the compiler also
327 // knows this, so in that case everything else will break, too.
328 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
330 void *
331 _Jv_BuildGCDescr(jclass self)
333 jlong desc = 0;
335 // Note: for now we only consider a bitmap mark descriptor. We
336 // could also handle the case where the first N fields of a type are
337 // references. However, this is not very likely to be used by many
338 // classes, and it is easier to compute things this way.
340 for (jclass klass = self; klass != NULL; klass = klass->getSuperclass())
342 jfieldID field = JvGetFirstInstanceField(klass);
343 int count = JvNumInstanceFields(klass);
345 for (int i = 0; i < count; ++i)
347 if (field->isRef())
349 unsigned int off = field->getOffset();
350 // If we run into a weird situation, we bail.
351 if (off % sizeof (void *) != 0)
352 return (void *) (GCJ_DEFAULT_DESCR);
353 off /= sizeof (void *);
354 // Bottom 2 bits are reserved.
355 off += 2;
356 // If we find a field outside the range of our bitmap,
357 // fall back to procedure marker.
358 if (off > CHAR_BIT * sizeof (void *))
359 return (void *) (GCJ_DEFAULT_DESCR);
360 desc |= 1ULL << off;
363 field = field->getNextField();
367 // For bitmap mark type, bottom bits are 01.
368 desc |= 1;
369 // Bogus warning avoidance (on many platforms).
370 return (void *) (unsigned long) desc;
373 // Allocate some space that is known to be pointer-free.
374 void *
375 _Jv_AllocBytes (jsize size)
377 void *r = GC_MALLOC_ATOMIC (size);
378 // We have to explicitly zero memory here, as the GC doesn't
379 // guarantee that PTRFREE allocations are zeroed. Note that we
380 // don't have to do this for other allocation types because we set
381 // the `ok_init' flag in the type descriptor.
382 memset (r, 0, size);
383 return r;
386 // Allocate space for a new Java array.
387 // Used only for arrays of objects.
388 void *
389 _Jv_AllocArray (jsize size, jclass klass)
391 void *obj;
392 const jsize min_heap_addr = 16*1024;
393 // A heuristic. If size is less than this value, the size
394 // stored in the array can't possibly be misinterpreted as
395 // a pointer. Thus we lose nothing by scanning the object
396 // completely conservatively, since no misidentification can
397 // take place.
399 #ifdef GC_DEBUG
400 // There isn't much to lose by scanning this conservatively.
401 // If we didn't, the mark proc would have to understand that
402 // it needed to skip the header.
403 obj = GC_MALLOC(size);
404 #else
405 if (size < min_heap_addr)
406 obj = GC_MALLOC(size);
407 else
408 obj = GC_generic_malloc (size, array_kind_x);
409 #endif
410 *((_Jv_VTable **) obj) = klass->vtable;
411 return obj;
414 /* Allocate space for a new non-Java object, which does not have the usual
415 Java object header but may contain pointers to other GC'ed objects. */
416 void *
417 _Jv_AllocRawObj (jsize size)
419 return (void *) GC_MALLOC (size);
422 static void
423 call_finalizer (GC_PTR obj, GC_PTR client_data)
425 _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
426 jobject jobj = (jobject) obj;
428 (*fn) (jobj);
431 void
432 _Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
434 GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
435 NULL, NULL);
438 void
439 _Jv_RunFinalizers (void)
441 GC_invoke_finalizers ();
444 void
445 _Jv_RunAllFinalizers (void)
447 GC_finalize_all ();
450 void
451 _Jv_RunGC (void)
453 GC_gcollect ();
456 long
457 _Jv_GCTotalMemory (void)
459 return GC_get_heap_size ();
462 long
463 _Jv_GCFreeMemory (void)
465 return GC_get_free_bytes ();
468 void
469 _Jv_GCSetInitialHeapSize (size_t size)
471 size_t current = GC_get_heap_size ();
472 if (size > current)
473 GC_expand_hp (size - current);
476 void
477 _Jv_GCSetMaximumHeapSize (size_t size)
479 GC_set_max_heap_size ((GC_word) size);
482 // From boehm's misc.c
483 extern "C" void GC_enable();
484 extern "C" void GC_disable();
486 void
487 _Jv_DisableGC (void)
489 _Jv_MutexLock (&disable_gc_mutex);
490 GC_disable();
491 _Jv_MutexUnlock (&disable_gc_mutex);
494 void
495 _Jv_EnableGC (void)
497 _Jv_MutexLock (&disable_gc_mutex);
498 GC_enable();
499 _Jv_MutexUnlock (&disable_gc_mutex);
502 static void * handle_out_of_memory(size_t)
504 _Jv_ThrowNoMemory();
507 void
508 _Jv_InitGC (void)
510 int proc;
512 // Ignore pointers that do not point to the start of an object.
513 GC_all_interior_pointers = 0;
515 // Configure the collector to use the bitmap marking descriptors that we
516 // stash in the class vtable.
517 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
519 // Cause an out of memory error to be thrown from the allocators,
520 // instead of returning 0. This is cheaper than checking on allocation.
521 GC_oom_fn = handle_out_of_memory;
523 GC_java_finalization = 1;
525 // We use a different mark procedure for object arrays. This code
526 // configures a different object `kind' for object array allocation and
527 // marking. FIXME: see above.
528 array_free_list = (ptr_t *) GC_generic_malloc_inner ((MAXOBJSZ + 1)
529 * sizeof (ptr_t),
530 PTRFREE);
531 memset (array_free_list, 0, (MAXOBJSZ + 1) * sizeof (ptr_t));
533 proc = GC_n_mark_procs++;
534 GC_mark_procs[proc] = (GC_mark_proc) _Jv_MarkArray;
536 array_kind_x = GC_n_kinds++;
537 GC_obj_kinds[array_kind_x].ok_freelist = array_free_list;
538 GC_obj_kinds[array_kind_x].ok_reclaim_list = 0;
539 GC_obj_kinds[array_kind_x].ok_descriptor = GC_MAKE_PROC (proc, 0);
540 GC_obj_kinds[array_kind_x].ok_relocate_descr = FALSE;
541 GC_obj_kinds[array_kind_x].ok_init = TRUE;
543 _Jv_MutexInit (&disable_gc_mutex);
546 #ifdef JV_HASH_SYNCHRONIZATION
547 // Allocate an object with a fake vtable pointer, which causes only
548 // the first field (beyond the fake vtable pointer) to be traced.
549 // Eventually this should probably be generalized.
551 static _Jv_VTable trace_one_vtable = {
552 0, // class pointer
553 (void *)(2 * sizeof(void *)),
554 // descriptor; scan 2 words incl. vtable ptr.
555 // Least significant bits must be zero to
556 // identify this as a length descriptor
557 {0} // First method
560 void *
561 _Jv_AllocTraceOne (jsize size /* includes vtable slot */)
563 return GC_GCJ_MALLOC (size, &trace_one_vtable);
566 // Ditto for two words.
567 // the first field (beyond the fake vtable pointer) to be traced.
568 // Eventually this should probably be generalized.
570 static _Jv_VTable trace_two_vtable =
572 0, // class pointer
573 (void *)(3 * sizeof(void *)),
574 // descriptor; scan 3 words incl. vtable ptr.
575 {0} // First method
578 void *
579 _Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
581 return GC_GCJ_MALLOC (size, &trace_two_vtable);
584 #endif /* JV_HASH_SYNCHRONIZATION */
586 void
587 _Jv_GCInitializeFinalizers (void (*notifier) (void))
589 GC_finalize_on_demand = 1;
590 GC_finalizer_notifier = notifier;
593 void
594 _Jv_GCRegisterDisappearingLink (jobject *objp)
596 GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
599 jboolean
600 _Jv_GCCanReclaimSoftReference (jobject)
602 // For now, always reclaim soft references. FIXME.
603 return true;