1 /* Handle exceptions for GNU compiler for the Java(TM) language.
2 Copyright (C) 1997, 1998, 1999, 2000, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
28 #include "coretypes.h"
33 #include "java-tree.h"
35 #include "java-opcodes.h"
39 #include "java-except.h"
42 static void expand_start_java_handler (struct eh_range
*);
43 static struct eh_range
*find_handler_in_range (int, struct eh_range
*,
45 static void link_handler (struct eh_range
*, struct eh_range
*);
46 static void check_start_handlers (struct eh_range
*, int);
47 static void free_eh_ranges (struct eh_range
*range
);
49 struct eh_range
*current_method_handlers
;
51 struct eh_range
*current_try_block
= NULL
;
53 struct eh_range
*eh_range_freelist
= NULL
;
55 /* These variables are used to speed up find_handler. */
57 static int cache_range_start
, cache_range_end
;
58 static struct eh_range
*cache_range
;
59 static struct eh_range
*cache_next_child
;
61 /* A dummy range that represents the entire method. */
63 struct eh_range whole_range
;
65 #if defined(DEBUG_JAVA_BINDING_LEVELS)
66 extern int binding_depth
;
67 extern int is_class_level
;
68 extern int current_pc
;
69 extern void indent ();
73 /* Search for the most specific eh_range containing PC.
74 Assume PC is within RANGE.
75 CHILD is a list of children of RANGE such that any
76 previous children have end_pc values that are too low. */
78 static struct eh_range
*
79 find_handler_in_range (int pc
, struct eh_range
*range
, struct eh_range
*child
)
81 for (; child
!= NULL
; child
= child
->next_sibling
)
83 if (pc
< child
->start_pc
)
85 if (pc
< child
->end_pc
)
86 return find_handler_in_range (pc
, child
, child
->first_child
);
89 cache_range_start
= pc
;
90 cache_next_child
= child
;
91 cache_range_end
= child
== NULL
? range
->end_pc
: child
->start_pc
;
95 /* Find the inner-most handler that contains PC. */
101 if (pc
>= cache_range_start
)
104 if (pc
< cache_range_end
)
106 while (pc
>= h
->end_pc
)
108 cache_next_child
= h
->next_sibling
;
115 cache_next_child
= h
->first_child
;
117 return find_handler_in_range (pc
, h
, cache_next_child
);
120 /* Recursive helper routine for check_nested_ranges. */
123 link_handler (struct eh_range
*range
, struct eh_range
*outer
)
125 struct eh_range
**ptr
;
127 if (range
->start_pc
== outer
->start_pc
&& range
->end_pc
== outer
->end_pc
)
129 outer
->handlers
= chainon (outer
->handlers
, range
->handlers
);
133 /* If the new range completely encloses the `outer' range, then insert it
134 between the outer range and its parent. */
135 if (range
->start_pc
<= outer
->start_pc
&& range
->end_pc
>= outer
->end_pc
)
137 range
->outer
= outer
->outer
;
138 range
->next_sibling
= NULL
;
139 range
->first_child
= outer
;
141 struct eh_range
**pr
= &(outer
->outer
->first_child
);
143 pr
= &(*pr
)->next_sibling
;
146 outer
->outer
= range
;
150 /* Handle overlapping ranges by splitting the new range. */
151 if (range
->start_pc
< outer
->start_pc
|| range
->end_pc
> outer
->end_pc
)
153 struct eh_range
*h
= xmalloc (sizeof (struct eh_range
));
154 if (range
->start_pc
< outer
->start_pc
)
156 h
->start_pc
= range
->start_pc
;
157 h
->end_pc
= outer
->start_pc
;
158 range
->start_pc
= outer
->start_pc
;
162 h
->start_pc
= outer
->end_pc
;
163 h
->end_pc
= range
->end_pc
;
164 range
->end_pc
= outer
->end_pc
;
166 h
->first_child
= NULL
;
168 h
->handlers
= build_tree_list (TREE_PURPOSE (range
->handlers
),
169 TREE_VALUE (range
->handlers
));
170 h
->next_sibling
= NULL
;
173 /* Restart both from the top to avoid having to make this
174 function smart about reentrancy. */
175 link_handler (h
, &whole_range
);
176 link_handler (range
, &whole_range
);
180 ptr
= &outer
->first_child
;
181 for (;; ptr
= &(*ptr
)->next_sibling
)
183 if (*ptr
== NULL
|| range
->end_pc
<= (*ptr
)->start_pc
)
185 range
->next_sibling
= *ptr
;
186 range
->first_child
= NULL
;
187 range
->outer
= outer
;
191 else if (range
->start_pc
< (*ptr
)->end_pc
)
193 link_handler (range
, *ptr
);
196 /* end_pc > (*ptr)->start_pc && start_pc >= (*ptr)->end_pc. */
200 /* The first pass of exception range processing (calling add_handler)
201 constructs a linked list of exception ranges. We turn this into
202 the data structure expected by the rest of the code, and also
203 ensure that exception ranges are properly nested. */
206 handle_nested_ranges (void)
208 struct eh_range
*ptr
, *next
;
210 ptr
= whole_range
.first_child
;
211 whole_range
.first_child
= NULL
;
212 for (; ptr
; ptr
= next
)
214 next
= ptr
->next_sibling
;
215 ptr
->next_sibling
= NULL
;
216 link_handler (ptr
, &whole_range
);
220 /* Free RANGE as well as its children and siblings. */
223 free_eh_ranges (struct eh_range
*range
)
227 struct eh_range
*next
= range
->next_sibling
;
228 free_eh_ranges (range
->first_child
);
229 if (range
!= &whole_range
)
235 /* Called to re-initialize the exception machinery for a new method. */
238 method_init_exceptions (void)
240 free_eh_ranges (&whole_range
);
241 whole_range
.start_pc
= 0;
242 whole_range
.end_pc
= DECL_CODE_LENGTH (current_function_decl
) + 1;
243 whole_range
.outer
= NULL
;
244 whole_range
.first_child
= NULL
;
245 whole_range
.next_sibling
= NULL
;
246 cache_range_start
= 0xFFFFFF;
249 /* Add an exception range. If we already have an exception range
250 which has the same handler and label, and the new range overlaps
251 that one, then we simply extend the existing range. Some bytecode
252 obfuscators generate seemingly nonoverlapping exception ranges
253 which, when coalesced, do in fact nest correctly.
255 This constructs an ordinary linked list which check_nested_ranges()
256 later turns into the data structure we actually want.
258 We expect the input to come in order of increasing START_PC. This
259 function doesn't attempt to detect the case where two previously
260 added disjoint ranges could be coalesced by a new range; that is
261 what the sorting counteracts. */
264 add_handler (int start_pc
, int end_pc
, tree handler
, tree type
)
266 struct eh_range
*ptr
, *prev
= NULL
, *h
;
268 for (ptr
= whole_range
.first_child
; ptr
; ptr
= ptr
->next_sibling
)
270 if (start_pc
>= ptr
->start_pc
271 && start_pc
<= ptr
->end_pc
272 && TREE_PURPOSE (ptr
->handlers
) == type
273 && TREE_VALUE (ptr
->handlers
) == handler
)
275 /* Already found an overlapping range, so coalesce. */
276 ptr
->end_pc
= MAX (ptr
->end_pc
, end_pc
);
282 h
= xmalloc (sizeof (struct eh_range
));
283 h
->start_pc
= start_pc
;
285 h
->first_child
= NULL
;
287 h
->handlers
= build_tree_list (type
, handler
);
288 h
->next_sibling
= NULL
;
293 whole_range
.first_child
= h
;
295 prev
->next_sibling
= h
;
298 /* if there are any handlers for this range, issue start of region */
300 expand_start_java_handler (struct eh_range
*range
)
302 #if defined(DEBUG_JAVA_BINDING_LEVELS)
304 fprintf (stderr
, "expand start handler pc %d --> %d\n",
305 current_pc
, range
->end_pc
);
306 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
308 register_exception_range (range
, range
->start_pc
, range
->end_pc
);
313 prepare_eh_table_type (tree type
)
322 /* The "type" (match_info) in a (Java) exception table is a pointer to:
323 * a) NULL - meaning match any type in a try-finally.
324 * b) a pointer to a pointer to a class.
325 * c) a pointer to a pointer to a utf8_ref. The pointer is
326 * rewritten to point to the appropriate class. */
328 if (type
== NULL_TREE
)
331 if (TYPE_TO_RUNTIME_MAP (output_class
) == NULL
)
332 TYPE_TO_RUNTIME_MAP (output_class
) = java_treetreehash_create (10, 1);
334 slot
= java_treetreehash_new (TYPE_TO_RUNTIME_MAP (output_class
), type
);
336 return TREE_VALUE (*slot
);
338 if (is_compiled_class (type
) && !flag_indirect_dispatch
)
340 name
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
)));
341 buf
= alloca (strlen (name
) + 5);
342 sprintf (buf
, "%s_ref", name
);
343 decl
= build_decl (VAR_DECL
, get_identifier (buf
), ptr_type_node
);
344 TREE_STATIC (decl
) = 1;
345 DECL_ARTIFICIAL (decl
) = 1;
346 DECL_IGNORED_P (decl
) = 1;
347 TREE_READONLY (decl
) = 1;
348 TREE_THIS_VOLATILE (decl
) = 0;
349 DECL_INITIAL (decl
) = build_class_ref (type
);
350 layout_decl (decl
, 0);
352 exp
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (decl
)), decl
);
356 utf8_ref
= build_utf8_ref (DECL_NAME (TYPE_NAME (type
)));
357 name
= IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (utf8_ref
, 0)));
358 buf
= alloca (strlen (name
) + 5);
359 sprintf (buf
, "%s_ref", name
);
360 decl
= build_decl (VAR_DECL
, get_identifier (buf
), utf8const_ptr_type
);
361 TREE_STATIC (decl
) = 1;
362 DECL_ARTIFICIAL (decl
) = 1;
363 DECL_IGNORED_P (decl
) = 1;
364 TREE_READONLY (decl
) = 1;
365 TREE_THIS_VOLATILE (decl
) = 0;
366 layout_decl (decl
, 0);
368 exp
= build1 (ADDR_EXPR
, build_pointer_type (utf8const_ptr_type
), decl
);
369 TYPE_CATCH_CLASSES (output_class
) =
370 tree_cons (NULL
, make_catch_class_record (exp
, utf8_ref
),
371 TYPE_CATCH_CLASSES (output_class
));
374 exp
= convert (ptr_type_node
, exp
);
376 *slot
= tree_cons (type
, exp
, NULL_TREE
);
382 expand_catch_class (void **entry
, void *x ATTRIBUTE_UNUSED
)
384 struct treetreehash_entry
*ite
= (struct treetreehash_entry
*) *entry
;
385 tree addr
= TREE_VALUE ((tree
)ite
->value
);
388 decl
= TREE_OPERAND (addr
, 0);
389 rest_of_decl_compilation (decl
, global_bindings_p (), 0);
393 /* For every class in the TYPE_TO_RUNTIME_MAP, expand the
394 corresponding object that is used by the runtime type matcher. */
397 java_expand_catch_classes (tree this_class
)
399 if (TYPE_TO_RUNTIME_MAP (this_class
))
401 (TYPE_TO_RUNTIME_MAP (this_class
),
402 expand_catch_class
, NULL
);
405 /* Build a reference to the jthrowable object being carried in the
409 build_exception_object_ref (tree type
)
413 /* Java only passes object via pointer and doesn't require adjusting.
414 The java object is immediately before the generic exception header. */
415 obj
= build0 (EXC_PTR_EXPR
, build_pointer_type (type
));
416 obj
= build2 (MINUS_EXPR
, TREE_TYPE (obj
), obj
,
417 TYPE_SIZE_UNIT (TREE_TYPE (obj
)));
418 obj
= build1 (INDIRECT_REF
, type
, obj
);
423 /* If there are any handlers for this range, isssue end of range,
424 and then all handler blocks */
426 expand_end_java_handler (struct eh_range
*range
)
428 tree handler
= range
->handlers
;
430 for ( ; handler
!= NULL_TREE
; handler
= TREE_CHAIN (handler
))
432 /* For bytecode we treat exceptions a little unusually. A
433 `finally' clause looks like an ordinary exception handler for
434 Throwable. The reason for this is that the bytecode has
435 already expanded the finally logic, and we would have to do
436 extra (and difficult) work to get this to look like a
437 gcc-style finally clause. */
438 tree type
= TREE_PURPOSE (handler
);
440 type
= throwable_type_node
;
441 type
= prepare_eh_table_type (type
);
444 tree catch_expr
= build2 (CATCH_EXPR
, void_type_node
, type
,
445 build1 (GOTO_EXPR
, void_type_node
,
446 TREE_VALUE (handler
)));
447 tree try_catch_expr
= build2 (TRY_CATCH_EXPR
, void_type_node
,
448 *get_stmts (), catch_expr
);
449 *get_stmts () = try_catch_expr
;
452 #if defined(DEBUG_JAVA_BINDING_LEVELS)
454 fprintf (stderr
, "expand end handler pc %d <-- %d\n",
455 current_pc
, range
->start_pc
);
456 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
459 /* Recursive helper routine for maybe_start_handlers. */
462 check_start_handlers (struct eh_range
*range
, int pc
)
464 if (range
!= NULL_EH_RANGE
&& range
->start_pc
== pc
)
466 check_start_handlers (range
->outer
, pc
);
467 if (!range
->expanded
)
468 expand_start_java_handler (range
);
473 static struct eh_range
*current_range
;
475 /* Emit any start-of-try-range starting at start_pc and ending after
479 maybe_start_try (int start_pc
, int end_pc
)
481 struct eh_range
*range
;
485 range
= find_handler (start_pc
);
486 while (range
!= NULL_EH_RANGE
&& range
->start_pc
== start_pc
487 && range
->end_pc
< end_pc
)
488 range
= range
->outer
;
490 current_range
= range
;
491 check_start_handlers (range
, start_pc
);