tree-ssa-loop-niter.c (number_of_iterations_exit): New parameter EVERY_ITERATION...
[official-gcc.git] / boehm-gc / gcj_mlc.c
blob2aaef795d923ef33a64825661faf4dff5ffdf3c0
1 /*
2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * Copyright (c) 1999 by Hewlett-Packard Company. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
15 /* Boehm, July 31, 1995 5:02 pm PDT */
18 * This is an allocator interface tuned for gcj (the GNU static
19 * java compiler).
21 * Each allocated object has a pointer in its first word to a vtable,
22 * which for our purposes is simply a structure describing the type of
23 * the object.
24 * This descriptor structure contains a GC marking descriptor at offset
25 * MARK_DESCR_OFFSET.
27 * It is hoped that this interface may also be useful for other systems,
28 * possibly with some tuning of the constants. But the immediate goal
29 * is to get better gcj performance.
31 * We assume:
32 * 1) We have an ANSI conforming C compiler.
33 * 2) Counting on explicit initialization of this interface is OK.
34 * 3) FASTLOCK is not a significant win.
37 #include "private/gc_pmark.h"
38 #include "gc_gcj.h"
39 #include "private/dbg_mlc.h"
41 #ifdef GC_GCJ_SUPPORT
43 GC_bool GC_gcj_malloc_initialized = FALSE;
45 int GC_gcj_kind; /* Object kind for objects with descriptors */
46 /* in "vtable". */
47 int GC_gcj_debug_kind; /* The kind of objects that is always marked */
48 /* with a mark proc call. */
50 ptr_t * GC_gcjobjfreelist;
51 ptr_t * GC_gcjdebugobjfreelist;
53 /* Caller does not hold allocation lock. */
54 void GC_init_gcj_malloc(int mp_index, void * /* really GC_mark_proc */mp)
56 register int i;
57 GC_bool ignore_gcj_info;
58 DCL_LOCK_STATE;
60 GC_init(); /* In case it's not already done. */
61 DISABLE_SIGNALS();
62 LOCK();
63 if (GC_gcj_malloc_initialized) {
64 UNLOCK();
65 ENABLE_SIGNALS();
66 return;
68 GC_gcj_malloc_initialized = TRUE;
69 ignore_gcj_info = (0 != GETENV("GC_IGNORE_GCJ_INFO"));
70 # ifdef CONDPRINT
71 if (GC_print_stats && ignore_gcj_info) {
72 GC_printf0("Gcj-style type information is disabled!\n");
74 # endif
75 GC_ASSERT(GC_mark_procs[mp_index] == (GC_mark_proc)0); /* unused */
76 GC_mark_procs[mp_index] = (GC_mark_proc)mp;
77 if (mp_index >= GC_n_mark_procs) ABORT("GC_init_gcj_malloc: bad index");
78 /* Set up object kind gcj-style indirect descriptor. */
79 GC_gcjobjfreelist = (ptr_t *)GC_new_free_list_inner();
80 if (ignore_gcj_info) {
81 /* Use a simple length-based descriptor, thus forcing a fully */
82 /* conservative scan. */
83 GC_gcj_kind = GC_new_kind_inner((void **)GC_gcjobjfreelist,
84 (0 | GC_DS_LENGTH),
85 TRUE, TRUE);
86 } else {
87 GC_gcj_kind = GC_new_kind_inner(
88 (void **)GC_gcjobjfreelist,
89 (((word)(-MARK_DESCR_OFFSET - GC_INDIR_PER_OBJ_BIAS))
90 | GC_DS_PER_OBJECT),
91 FALSE, TRUE);
93 /* Set up object kind for objects that require mark proc call. */
94 if (ignore_gcj_info) {
95 GC_gcj_debug_kind = GC_gcj_kind;
96 GC_gcjdebugobjfreelist = GC_gcjobjfreelist;
97 } else {
98 GC_gcjdebugobjfreelist = (ptr_t *)GC_new_free_list_inner();
99 GC_gcj_debug_kind = GC_new_kind_inner(
100 (void **)GC_gcjdebugobjfreelist,
101 GC_MAKE_PROC(mp_index,
102 1 /* allocated with debug info */),
103 FALSE, TRUE);
105 UNLOCK();
106 ENABLE_SIGNALS();
109 ptr_t GC_clear_stack();
111 #define GENERAL_MALLOC(lb,k) \
112 (GC_PTR)GC_clear_stack(GC_generic_malloc_inner((word)lb, k))
114 #define GENERAL_MALLOC_IOP(lb,k) \
115 (GC_PTR)GC_clear_stack(GC_generic_malloc_inner_ignore_off_page(lb, k))
117 /* We need a mechanism to release the lock and invoke finalizers. */
118 /* We don't really have an opportunity to do this on a rarely executed */
119 /* path on which the lock is not held. Thus we check at a */
120 /* rarely executed point at which it is safe to release the lock. */
121 /* We do this even where we could just call GC_INVOKE_FINALIZERS, */
122 /* since it's probably cheaper and certainly more uniform. */
123 /* FIXME - Consider doing the same elsewhere? */
124 static void maybe_finalize()
126 static int last_finalized_no = 0;
128 if (GC_gc_no == last_finalized_no) return;
129 if (!GC_is_initialized) return;
130 UNLOCK();
131 GC_INVOKE_FINALIZERS();
132 last_finalized_no = GC_gc_no;
133 LOCK();
136 /* Allocate an object, clear it, and store the pointer to the */
137 /* type structure (vtable in gcj). */
138 /* This adds a byte at the end of the object if GC_malloc would.*/
139 void * GC_gcj_malloc(size_t lb, void * ptr_to_struct_containing_descr)
141 register ptr_t op;
142 register ptr_t * opp;
143 register word lw;
144 DCL_LOCK_STATE;
146 if( EXPECT(SMALL_OBJ(lb), 1) ) {
147 # ifdef MERGE_SIZES
148 lw = GC_size_map[lb];
149 # else
150 lw = ALIGNED_WORDS(lb);
151 # endif
152 opp = &(GC_gcjobjfreelist[lw]);
153 LOCK();
154 op = *opp;
155 if(EXPECT(op == 0, 0)) {
156 maybe_finalize();
157 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_gcj_kind);
158 if (0 == op) {
159 UNLOCK();
160 return(GC_oom_fn(lb));
162 # ifdef MERGE_SIZES
163 lw = GC_size_map[lb]; /* May have been uninitialized. */
164 # endif
165 } else {
166 *opp = obj_link(op);
167 GC_words_allocd += lw;
169 *(void **)op = ptr_to_struct_containing_descr;
170 GC_ASSERT(((void **)op)[1] == 0);
171 UNLOCK();
172 } else {
173 LOCK();
174 maybe_finalize();
175 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_gcj_kind);
176 if (0 == op) {
177 UNLOCK();
178 return(GC_oom_fn(lb));
180 *(void **)op = ptr_to_struct_containing_descr;
181 UNLOCK();
183 return((GC_PTR) op);
186 /* Similar to GC_gcj_malloc, but add debug info. This is allocated */
187 /* with GC_gcj_debug_kind. */
188 GC_PTR GC_debug_gcj_malloc(size_t lb, void * ptr_to_struct_containing_descr,
189 GC_EXTRA_PARAMS)
191 GC_PTR result;
193 /* We're careful to avoid extra calls, which could */
194 /* confuse the backtrace. */
195 LOCK();
196 maybe_finalize();
197 result = GC_generic_malloc_inner(lb + DEBUG_BYTES, GC_gcj_debug_kind);
198 if (result == 0) {
199 UNLOCK();
200 GC_err_printf2("GC_debug_gcj_malloc(%ld, 0x%lx) returning NIL (",
201 (unsigned long) lb,
202 (unsigned long) ptr_to_struct_containing_descr);
203 GC_err_puts(s);
204 GC_err_printf1(":%ld)\n", (unsigned long)i);
205 return(GC_oom_fn(lb));
207 *((void **)((ptr_t)result + sizeof(oh))) = ptr_to_struct_containing_descr;
208 UNLOCK();
209 if (!GC_debugging_started) {
210 GC_start_debugging();
212 ADD_CALL_CHAIN(result, ra);
213 return (GC_store_debug_info(result, (word)lb, s, (word)i));
216 /* Similar to GC_gcj_malloc, but the size is in words, and we don't */
217 /* adjust it. The size is assumed to be such that it can be */
218 /* allocated as a small object. */
219 void * GC_gcj_fast_malloc(size_t lw, void * ptr_to_struct_containing_descr)
221 ptr_t op;
222 ptr_t * opp;
223 DCL_LOCK_STATE;
225 opp = &(GC_gcjobjfreelist[lw]);
226 LOCK();
227 op = *opp;
228 if( EXPECT(op == 0, 0) ) {
229 maybe_finalize();
230 op = (ptr_t)GC_clear_stack(
231 GC_generic_malloc_words_small_inner(lw, GC_gcj_kind));
232 if (0 == op) {
233 UNLOCK();
234 return GC_oom_fn(WORDS_TO_BYTES(lw));
236 } else {
237 *opp = obj_link(op);
238 GC_words_allocd += lw;
240 *(void **)op = ptr_to_struct_containing_descr;
241 UNLOCK();
242 return((GC_PTR) op);
245 /* And a debugging version of the above: */
246 void * GC_debug_gcj_fast_malloc(size_t lw,
247 void * ptr_to_struct_containing_descr,
248 GC_EXTRA_PARAMS)
250 GC_PTR result;
251 size_t lb = WORDS_TO_BYTES(lw);
253 /* We clone the code from GC_debug_gcj_malloc, so that we */
254 /* dont end up with extra frames on the stack, which could */
255 /* confuse the backtrace. */
256 LOCK();
257 maybe_finalize();
258 result = GC_generic_malloc_inner(lb + DEBUG_BYTES, GC_gcj_debug_kind);
259 if (result == 0) {
260 UNLOCK();
261 GC_err_printf2("GC_debug_gcj_fast_malloc(%ld, 0x%lx) returning NIL (",
262 (unsigned long) lw,
263 (unsigned long) ptr_to_struct_containing_descr);
264 GC_err_puts(s);
265 GC_err_printf1(":%ld)\n", (unsigned long)i);
266 return GC_oom_fn(WORDS_TO_BYTES(lw));
268 *((void **)((ptr_t)result + sizeof(oh))) = ptr_to_struct_containing_descr;
269 UNLOCK();
270 if (!GC_debugging_started) {
271 GC_start_debugging();
273 ADD_CALL_CHAIN(result, ra);
274 return (GC_store_debug_info(result, (word)lb, s, (word)i));
277 void * GC_gcj_malloc_ignore_off_page(size_t lb,
278 void * ptr_to_struct_containing_descr)
280 register ptr_t op;
281 register ptr_t * opp;
282 register word lw;
283 DCL_LOCK_STATE;
285 if( SMALL_OBJ(lb) ) {
286 # ifdef MERGE_SIZES
287 lw = GC_size_map[lb];
288 # else
289 lw = ALIGNED_WORDS(lb);
290 # endif
291 opp = &(GC_gcjobjfreelist[lw]);
292 LOCK();
293 if( (op = *opp) == 0 ) {
294 maybe_finalize();
295 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_gcj_kind);
296 # ifdef MERGE_SIZES
297 lw = GC_size_map[lb]; /* May have been uninitialized. */
298 # endif
299 } else {
300 *opp = obj_link(op);
301 GC_words_allocd += lw;
303 *(void **)op = ptr_to_struct_containing_descr;
304 UNLOCK();
305 } else {
306 LOCK();
307 maybe_finalize();
308 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_gcj_kind);
309 if (0 != op) {
310 *(void **)op = ptr_to_struct_containing_descr;
312 UNLOCK();
314 return((GC_PTR) op);
317 #else
319 char GC_no_gcj_support;
321 #endif /* GC_GCJ_SUPPORT */