* df.c (df_insn_refs_record): Use XEXP (x, 0) for USE.
[official-gcc.git] / libjava / boehm.cc
blobbe2855aef72b9b410054bb608c6d1a76682fedcc
1 // boehm.cc - interface between libjava and Boehm GC.
3 /* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation
5 This file is part of libgcj.
7 This software is copyrighted work licensed under the terms of the
8 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
9 details. */
11 #include <config.h>
13 #include <stdio.h>
15 #include <jvm.h>
16 #include <gcj/cni.h>
18 #include <java/lang/Class.h>
19 #include <java/lang/reflect/Modifier.h>
20 #include <java-interp.h>
22 // More nastiness: the GC wants to define TRUE and FALSE. We don't
23 // need the Java definitions (themselves a hack), so we undefine them.
24 #undef TRUE
25 #undef FALSE
27 extern "C"
29 #include <private/gc_pmark.h>
30 #include <gc_gcj.h>
32 #ifdef THREAD_LOCAL_ALLOC
33 # define GC_REDIRECT_TO_LOCAL
34 # include <gc_local_alloc.h>
35 #endif
37 // These aren't declared in any Boehm GC header.
38 void GC_finalize_all (void);
39 ptr_t GC_debug_generic_malloc (size_t size, int k, GC_EXTRA_PARAMS);
42 // We must check for plausibility ourselves.
43 #define MAYBE_MARK(Obj, Top, Limit, Source, Exit) \
44 Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
46 // `kind' index used when allocating Java arrays.
47 static int array_kind_x;
49 // Freelist used for Java arrays.
50 static ptr_t *array_free_list;
52 // Lock used to protect access to Boehm's GC_enable/GC_disable functions.
53 static _Jv_Mutex_t disable_gc_mutex;
57 // This is called by the GC during the mark phase. It marks a Java
58 // object. We use `void *' arguments and return, and not what the
59 // Boehm GC wants, to avoid pollution in our headers.
60 void *
61 _Jv_MarkObj (void *addr, void *msp, void *msl, void * /* env */)
63 mse *mark_stack_ptr = (mse *) msp;
64 mse *mark_stack_limit = (mse *) msl;
65 jobject obj = (jobject) addr;
67 // FIXME: if env is 1, this object was allocated through the debug
68 // interface, and addr points to the beginning of the debug header.
69 // In that case, we should really add the size of the header to addr.
71 _Jv_VTable *dt = *(_Jv_VTable **) addr;
72 // The object might not yet have its vtable set, or it might
73 // really be an object on the freelist. In either case, the vtable slot
74 // will either be 0, or it will point to a cleared object.
75 // This assumes Java objects have size at least 3 words,
76 // including the header. But this should remain true, since this
77 // should only be used with debugging allocation or with large objects.
78 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
79 return mark_stack_ptr;
80 jclass klass = dt->clas;
81 ptr_t p;
83 # ifndef JV_HASH_SYNCHRONIZATION
84 // Every object has a sync_info pointer.
85 p = (ptr_t) obj->sync_info;
86 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o1label);
87 # endif
88 // Mark the object's class.
89 p = (ptr_t) klass;
90 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o2label);
92 if (__builtin_expect (klass == &java::lang::Class::class$, false))
94 // Currently we allocate some of the memory referenced from class objects
95 // as pointerfree memory, and then mark it more intelligently here.
96 // We ensure that the ClassClass mark descriptor forces invocation of
97 // this procedure.
98 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
99 // collector, we need to make sure that the class object is written whenever
100 // any of the subobjects are altered and may need rescanning. This may be tricky
101 // during construction, and this may not be the right way to do this with
102 // incremental collection.
103 // If we overflow the mark stack, we will rescan the class object, so we should
104 // be OK. The same applies if we redo the mark phase because win32 unmapped part
105 // of our root set. - HB
106 jclass c = (jclass) addr;
108 p = (ptr_t) c->name;
109 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c3label);
110 p = (ptr_t) c->superclass;
111 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c4label);
112 for (int i = 0; i < c->constants.size; ++i)
114 /* FIXME: We could make this more precise by using the tags -KKT */
115 p = (ptr_t) c->constants.data[i].p;
116 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5label);
119 #ifdef INTERPRETER
120 if (_Jv_IsInterpretedClass (c))
122 p = (ptr_t) c->constants.tags;
123 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5alabel);
124 p = (ptr_t) c->constants.data;
125 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5blabel);
126 p = (ptr_t) c->vtable;
127 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5clabel);
129 #endif
131 // If the class is an array, then the methods field holds a
132 // pointer to the element class. If the class is primitive,
133 // then the methods field holds a pointer to the array class.
134 p = (ptr_t) c->methods;
135 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c6label);
138 if (! c->isArray() && ! c->isPrimitive())
140 // Scan each method in the cases where `methods' really
141 // points to a methods structure.
142 for (int i = 0; i < c->method_count; ++i)
144 p = (ptr_t) c->methods[i].name;
145 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
146 cm1label);
147 p = (ptr_t) c->methods[i].signature;
148 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
149 cm2label);
151 // FIXME: `ncode' entry?
153 #ifdef INTERPRETER
154 // The interpreter installs a heap-allocated
155 // trampoline here, so we'll mark it.
156 if (_Jv_IsInterpretedClass (c))
158 p = (ptr_t) c->methods[i].ncode;
159 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
160 cm3label);
162 #endif
166 // Mark all the fields.
167 p = (ptr_t) c->fields;
168 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8label);
169 for (int i = 0; i < c->field_count; ++i)
171 _Jv_Field* field = &c->fields[i];
173 #ifndef COMPACT_FIELDS
174 p = (ptr_t) field->name;
175 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8alabel);
176 #endif
177 p = (ptr_t) field->type;
178 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8blabel);
180 // For the interpreter, we also need to mark the memory
181 // containing static members
182 if ((field->flags & java::lang::reflect::Modifier::STATIC))
184 p = (ptr_t) field->u.addr;
185 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8clabel);
187 // also, if the static member is a reference,
188 // mark also the value pointed to. We check for isResolved
189 // since marking can happen before memory is allocated for
190 // static members.
191 if (JvFieldIsRef (field) && field->isResolved())
193 jobject val = *(jobject*) field->u.addr;
194 p = (ptr_t) val;
195 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
196 c, c8elabel);
201 p = (ptr_t) c->vtable;
202 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c9label);
203 p = (ptr_t) c->interfaces;
204 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cAlabel);
205 for (int i = 0; i < c->interface_count; ++i)
207 p = (ptr_t) c->interfaces[i];
208 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cClabel);
210 p = (ptr_t) c->loader;
211 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cBlabel);
212 p = (ptr_t) c->arrayclass;
213 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cDlabel);
215 #ifdef INTERPRETER
216 if (_Jv_IsInterpretedClass (c))
218 _Jv_InterpClass* ic = (_Jv_InterpClass*)c;
220 p = (ptr_t) ic->interpreted_methods;
221 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cElabel);
223 for (int i = 0; i < c->method_count; i++)
225 p = (ptr_t) ic->interpreted_methods[i];
226 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
227 cFlabel);
230 p = (ptr_t) ic->field_initializers;
231 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cGlabel);
234 #endif
237 else
239 // NOTE: each class only holds information about the class
240 // itself. So we must do the marking for the entire inheritance
241 // tree in order to mark all fields. FIXME: what about
242 // interfaces? We skip Object here, because Object only has a
243 // sync_info, and we handled that earlier.
244 // Note: occasionally `klass' can be null. For instance, this
245 // can happen if a GC occurs between the point where an object
246 // is allocated and where the vtbl slot is set.
247 while (klass && klass != &java::lang::Object::class$)
249 jfieldID field = JvGetFirstInstanceField (klass);
250 jint max = JvNumInstanceFields (klass);
252 for (int i = 0; i < max; ++i)
254 if (JvFieldIsRef (field))
256 jobject val = JvGetObjectField (obj, field);
257 p = (ptr_t) val;
258 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
259 obj, elabel);
261 field = field->getNextField ();
263 klass = klass->getSuperclass();
267 return mark_stack_ptr;
270 // This is called by the GC during the mark phase. It marks a Java
271 // array (of objects). We use `void *' arguments and return, and not
272 // what the Boehm GC wants, to avoid pollution in our headers.
273 void *
274 _Jv_MarkArray (void *addr, void *msp, void *msl, void * /*env*/)
276 mse *mark_stack_ptr = (mse *) msp;
277 mse *mark_stack_limit = (mse *) msl;
278 jobjectArray array = (jobjectArray) addr;
280 _Jv_VTable *dt = *(_Jv_VTable **) addr;
281 // Assumes size >= 3 words. That's currently true since arrays have
282 // a vtable, sync pointer, and size. If the sync pointer goes away,
283 // we may need to round up the size.
284 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
285 return mark_stack_ptr;
286 jclass klass = dt->clas;
287 ptr_t p;
289 # ifndef JV_HASH_SYNCHRONIZATION
290 // Every object has a sync_info pointer.
291 p = (ptr_t) array->sync_info;
292 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e1label);
293 # endif
294 // Mark the object's class.
295 p = (ptr_t) klass;
296 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas), o2label);
298 for (int i = 0; i < JvGetArrayLength (array); ++i)
300 jobject obj = elements (array)[i];
301 p = (ptr_t) obj;
302 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e2label);
305 return mark_stack_ptr;
308 // Generate a GC marking descriptor for a class.
310 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
311 // since another one could be registered first. But the compiler also
312 // knows this, so in that case everything else will break, too.
313 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
314 void *
315 _Jv_BuildGCDescr(jclass)
317 /* FIXME: We should really look at the class and build the descriptor. */
318 return (void *)(GCJ_DEFAULT_DESCR);
321 // Allocate some space that is known to be pointer-free.
322 void *
323 _Jv_AllocBytes (jsize size)
325 void *r = GC_MALLOC_ATOMIC (size);
326 // We have to explicitly zero memory here, as the GC doesn't
327 // guarantee that PTRFREE allocations are zeroed. Note that we
328 // don't have to do this for other allocation types because we set
329 // the `ok_init' flag in the type descriptor.
330 memset (r, 0, size);
331 return r;
334 // Allocate space for a new Java array.
335 // Used only for arrays of objects.
336 void *
337 _Jv_AllocArray (jsize size, jclass klass)
339 void *obj;
340 const jsize min_heap_addr = 16*1024;
341 // A heuristic. If size is less than this value, the size
342 // stored in the array can't possibly be misinterpreted as
343 // a pointer. Thus we lose nothing by scanning the object
344 // completely conservatively, since no misidentification can
345 // take place.
347 #ifdef GC_DEBUG
348 // There isn't much to lose by scanning this conservatively.
349 // If we didn't, the mark proc would have to understand that
350 // it needed to skip the header.
351 obj = GC_MALLOC(size);
352 #else
353 if (size < min_heap_addr)
354 obj = GC_MALLOC(size);
355 else
356 obj = GC_generic_malloc (size, array_kind_x);
357 #endif
358 *((_Jv_VTable **) obj) = klass->vtable;
359 return obj;
362 /* Allocate space for a new non-Java object, which does not have the usual
363 Java object header but may contain pointers to other GC'ed objects. */
364 void *
365 _Jv_AllocRawObj (jsize size)
367 return (void *) GC_MALLOC (size);
370 static void
371 call_finalizer (GC_PTR obj, GC_PTR client_data)
373 _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
374 jobject jobj = (jobject) obj;
376 (*fn) (jobj);
379 void
380 _Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
382 GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
383 NULL, NULL);
386 void
387 _Jv_RunFinalizers (void)
389 GC_invoke_finalizers ();
392 void
393 _Jv_RunAllFinalizers (void)
395 GC_finalize_all ();
398 void
399 _Jv_RunGC (void)
401 GC_gcollect ();
404 long
405 _Jv_GCTotalMemory (void)
407 return GC_get_heap_size ();
410 long
411 _Jv_GCFreeMemory (void)
413 return GC_get_free_bytes ();
416 void
417 _Jv_GCSetInitialHeapSize (size_t size)
419 size_t current = GC_get_heap_size ();
420 if (size > current)
421 GC_expand_hp (size - current);
424 void
425 _Jv_GCSetMaximumHeapSize (size_t size)
427 GC_set_max_heap_size ((GC_word) size);
430 // From boehm's misc.c
431 extern "C" void GC_enable();
432 extern "C" void GC_disable();
434 void
435 _Jv_DisableGC (void)
437 _Jv_MutexLock (&disable_gc_mutex);
438 GC_disable();
439 _Jv_MutexUnlock (&disable_gc_mutex);
442 void
443 _Jv_EnableGC (void)
445 _Jv_MutexLock (&disable_gc_mutex);
446 GC_enable();
447 _Jv_MutexUnlock (&disable_gc_mutex);
450 static void * handle_out_of_memory(size_t)
452 _Jv_ThrowNoMemory();
455 void
456 _Jv_InitGC (void)
458 int proc;
460 // Ignore pointers that do not point to the start of an object.
461 GC_all_interior_pointers = 0;
463 // Configure the collector to use the bitmap marking descriptors that we
464 // stash in the class vtable.
465 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
467 // Cause an out of memory error to be thrown from the allocators,
468 // instead of returning 0. This is cheaper than checking on allocation.
469 GC_oom_fn = handle_out_of_memory;
471 GC_java_finalization = 1;
473 // We use a different mark procedure for object arrays. This code
474 // configures a different object `kind' for object array allocation and
475 // marking. FIXME: see above.
476 array_free_list = (ptr_t *) GC_generic_malloc_inner ((MAXOBJSZ + 1)
477 * sizeof (ptr_t),
478 PTRFREE);
479 memset (array_free_list, 0, (MAXOBJSZ + 1) * sizeof (ptr_t));
481 proc = GC_n_mark_procs++;
482 GC_mark_procs[proc] = (GC_mark_proc) _Jv_MarkArray;
484 array_kind_x = GC_n_kinds++;
485 GC_obj_kinds[array_kind_x].ok_freelist = array_free_list;
486 GC_obj_kinds[array_kind_x].ok_reclaim_list = 0;
487 GC_obj_kinds[array_kind_x].ok_descriptor = GC_MAKE_PROC (proc, 0);
488 GC_obj_kinds[array_kind_x].ok_relocate_descr = FALSE;
489 GC_obj_kinds[array_kind_x].ok_init = TRUE;
491 _Jv_MutexInit (&disable_gc_mutex);
494 #ifdef JV_HASH_SYNCHRONIZATION
495 // Allocate an object with a fake vtable pointer, which causes only
496 // the first field (beyond the fake vtable pointer) to be traced.
497 // Eventually this should probably be generalized.
499 static _Jv_VTable trace_one_vtable = {
500 0, // class pointer
501 (void *)(2 * sizeof(void *)),
502 // descriptor; scan 2 words incl. vtable ptr.
503 // Least significant bits must be zero to
504 // identify this as a length descriptor
505 {0} // First method
508 void *
509 _Jv_AllocTraceOne (jsize size /* includes vtable slot */)
511 return GC_GCJ_MALLOC (size, &trace_one_vtable);
514 // Ditto for two words.
515 // the first field (beyond the fake vtable pointer) to be traced.
516 // Eventually this should probably be generalized.
518 static _Jv_VTable trace_two_vtable =
520 0, // class pointer
521 (void *)(3 * sizeof(void *)),
522 // descriptor; scan 3 words incl. vtable ptr.
523 {0} // First method
526 void *
527 _Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
529 return GC_GCJ_MALLOC (size, &trace_two_vtable);
532 #endif /* JV_HASH_SYNCHRONIZATION */
534 void
535 _Jv_GCInitializeFinalizers (void (*notifier) (void))
537 GC_finalize_on_demand = 1;
538 GC_finalizer_notifier = notifier;
541 void
542 _Jv_GCRegisterDisappearingLink (jobject *objp)
544 GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
547 jboolean
548 _Jv_GCCanReclaimSoftReference (jobject)
550 // For now, always reclaim soft references. FIXME.
551 return true;