configure.ac: Check declarations for asprintf and vasprintf.
[official-gcc.git] / gcc / tree-eh.c
blob294e309458b17ea9f52e610a916e2b158aaf8184
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
44 void
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p = 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
56 static int
57 struct_ptr_eq (const void *a, const void *b)
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
64 static hashval_t
65 struct_ptr_hash (const void *a)
67 const void * const * x = a;
68 return (size_t)*x >> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 static void
85 record_stmt_eh_region (struct eh_region *region, tree t)
87 if (!region)
88 return;
90 add_stmt_to_eh_region (t, get_eh_region_number (region));
93 void
94 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96 struct throw_stmt_node *n;
97 void **slot;
99 gcc_assert (num >= 0);
100 gcc_assert (TREE_CODE (t) != RESX_EXPR);
102 n = ggc_alloc (sizeof (*n));
103 n->stmt = t;
104 n->region_nr = num;
106 if (!get_eh_throw_stmt_table (ifun))
107 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
108 struct_ptr_eq,
109 ggc_free));
111 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
112 gcc_assert (!*slot);
113 *slot = n;
114 /* ??? For the benefit of calls.c, converting all this to rtl,
115 we need to record the call expression, not just the outer
116 modify statement. */
117 if (TREE_CODE (t) == MODIFY_EXPR
118 && (t = get_call_expr_in (t)))
119 add_stmt_to_eh_region_fn (ifun, t, num);
122 void
123 add_stmt_to_eh_region (tree t, int num)
125 add_stmt_to_eh_region_fn (cfun, t, num);
128 bool
129 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
131 struct throw_stmt_node dummy;
132 void **slot;
134 if (!get_eh_throw_stmt_table (ifun))
135 return false;
137 dummy.stmt = t;
138 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
139 NO_INSERT);
140 if (slot)
142 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
143 /* ??? For the benefit of calls.c, converting all this to rtl,
144 we need to record the call expression, not just the outer
145 modify statement. */
146 if (TREE_CODE (t) == MODIFY_EXPR
147 && (t = get_call_expr_in (t)))
148 remove_stmt_from_eh_region_fn (ifun, t);
149 return true;
151 else
152 return false;
155 bool
156 remove_stmt_from_eh_region (tree t)
158 return remove_stmt_from_eh_region_fn (cfun, t);
162 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
164 struct throw_stmt_node *p, n;
166 if (!get_eh_throw_stmt_table (ifun))
167 return -2;
169 n.stmt = t;
170 p = htab_find (get_eh_throw_stmt_table (ifun), &n);
172 return (p ? p->region_nr : -1);
176 lookup_stmt_eh_region (tree t)
178 /* We can get called from initialized data when -fnon-call-exceptions
179 is on; prevent crash. */
180 if (!cfun)
181 return -1;
182 return lookup_stmt_eh_region_fn (cfun, t);
186 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
187 nodes and LABEL_DECL nodes. We will use this during the second phase to
188 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
190 struct finally_tree_node
192 tree child, parent;
195 /* Note that this table is *not* marked GTY. It is short-lived. */
196 static htab_t finally_tree;
198 static void
199 record_in_finally_tree (tree child, tree parent)
201 struct finally_tree_node *n;
202 void **slot;
204 n = xmalloc (sizeof (*n));
205 n->child = child;
206 n->parent = parent;
208 slot = htab_find_slot (finally_tree, n, INSERT);
209 gcc_assert (!*slot);
210 *slot = n;
213 static void
214 collect_finally_tree (tree t, tree region)
216 tailrecurse:
217 switch (TREE_CODE (t))
219 case LABEL_EXPR:
220 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
221 break;
223 case TRY_FINALLY_EXPR:
224 record_in_finally_tree (t, region);
225 collect_finally_tree (TREE_OPERAND (t, 0), t);
226 t = TREE_OPERAND (t, 1);
227 goto tailrecurse;
229 case TRY_CATCH_EXPR:
230 collect_finally_tree (TREE_OPERAND (t, 0), region);
231 t = TREE_OPERAND (t, 1);
232 goto tailrecurse;
234 case CATCH_EXPR:
235 t = CATCH_BODY (t);
236 goto tailrecurse;
238 case EH_FILTER_EXPR:
239 t = EH_FILTER_FAILURE (t);
240 goto tailrecurse;
242 case STATEMENT_LIST:
244 tree_stmt_iterator i;
245 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
246 collect_finally_tree (tsi_stmt (i), region);
248 break;
250 default:
251 /* A type, a decl, or some kind of statement that we're not
252 interested in. Don't walk them. */
253 break;
257 /* Use the finally tree to determine if a jump from START to TARGET
258 would leave the try_finally node that START lives in. */
260 static bool
261 outside_finally_tree (tree start, tree target)
263 struct finally_tree_node n, *p;
267 n.child = start;
268 p = htab_find (finally_tree, &n);
269 if (!p)
270 return true;
271 start = p->parent;
273 while (start != target);
275 return false;
278 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
279 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
280 The eh region creation is straight-forward, but frobbing all the gotos
281 and such into shape isn't. */
283 /* State of the world while lowering. */
285 struct leh_state
287 /* What's "current" while constructing the eh region tree. These
288 correspond to variables of the same name in cfun->eh, which we
289 don't have easy access to. */
290 struct eh_region *cur_region;
291 struct eh_region *prev_try;
293 /* Processing of TRY_FINALLY requires a bit more state. This is
294 split out into a separate structure so that we don't have to
295 copy so much when processing other nodes. */
296 struct leh_tf_state *tf;
299 struct leh_tf_state
301 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
302 is the original TRY_FINALLY_EXPR. We need to retain this so that
303 outside_finally_tree can reliably reference the tree used in the
304 collect_finally_tree data structures. */
305 tree try_finally_expr;
306 tree *top_p;
308 /* The state outside this try_finally node. */
309 struct leh_state *outer;
311 /* The exception region created for it. */
312 struct eh_region *region;
314 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
315 that are seen to escape this TRY_FINALLY_EXPR node. */
316 struct goto_queue_node {
317 tree stmt;
318 tree repl_stmt;
319 tree cont_stmt;
320 int index;
321 } *goto_queue;
322 size_t goto_queue_size;
323 size_t goto_queue_active;
325 /* The set of unique labels seen as entries in the goto queue. */
326 VEC(tree,heap) *dest_array;
328 /* A label to be added at the end of the completed transformed
329 sequence. It will be set if may_fallthru was true *at one time*,
330 though subsequent transformations may have cleared that flag. */
331 tree fallthru_label;
333 /* A label that has been registered with except.c to be the
334 landing pad for this try block. */
335 tree eh_label;
337 /* True if it is possible to fall out the bottom of the try block.
338 Cleared if the fallthru is converted to a goto. */
339 bool may_fallthru;
341 /* True if any entry in goto_queue is a RETURN_EXPR. */
342 bool may_return;
344 /* True if the finally block can receive an exception edge.
345 Cleared if the exception case is handled by code duplication. */
346 bool may_throw;
349 static void lower_eh_filter (struct leh_state *, tree *);
350 static void lower_eh_constructs_1 (struct leh_state *, tree *);
352 /* Comparison function for qsort/bsearch. We're interested in
353 searching goto queue elements for source statements. */
355 static int
356 goto_queue_cmp (const void *x, const void *y)
358 tree a = ((const struct goto_queue_node *)x)->stmt;
359 tree b = ((const struct goto_queue_node *)y)->stmt;
360 return (a == b ? 0 : a < b ? -1 : 1);
363 /* Search for STMT in the goto queue. Return the replacement,
364 or null if the statement isn't in the queue. */
366 static tree
367 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
369 struct goto_queue_node tmp, *ret;
370 tmp.stmt = stmt;
371 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
372 sizeof (struct goto_queue_node), goto_queue_cmp);
373 return (ret ? ret->repl_stmt : NULL);
376 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
377 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
378 then we can just splat it in, otherwise we add the new stmts immediately
379 after the COND_EXPR and redirect. */
381 static void
382 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
383 tree_stmt_iterator *tsi)
385 tree new, one, label;
387 new = find_goto_replacement (tf, *tp);
388 if (!new)
389 return;
391 one = expr_only (new);
392 if (one && TREE_CODE (one) == GOTO_EXPR)
394 *tp = one;
395 return;
398 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
399 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
401 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
402 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
405 /* The real work of replace_goto_queue. Returns with TSI updated to
406 point to the next statement. */
408 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
410 static void
411 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
413 switch (TREE_CODE (t))
415 case GOTO_EXPR:
416 case RETURN_EXPR:
417 t = find_goto_replacement (tf, t);
418 if (t)
420 tsi_link_before (tsi, t, TSI_SAME_STMT);
421 tsi_delink (tsi);
422 return;
424 break;
426 case COND_EXPR:
427 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
428 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
429 break;
431 case TRY_FINALLY_EXPR:
432 case TRY_CATCH_EXPR:
433 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
434 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
435 break;
436 case CATCH_EXPR:
437 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
438 break;
439 case EH_FILTER_EXPR:
440 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
441 break;
443 case STATEMENT_LIST:
444 gcc_unreachable ();
446 default:
447 /* These won't have gotos in them. */
448 break;
451 tsi_next (tsi);
454 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
456 static void
457 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
459 tree_stmt_iterator i = tsi_start (t);
460 while (!tsi_end_p (i))
461 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
464 /* Replace all goto queue members. */
466 static void
467 replace_goto_queue (struct leh_tf_state *tf)
469 if (tf->goto_queue_active == 0)
470 return;
471 replace_goto_queue_stmt_list (*tf->top_p, tf);
474 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
475 node, and if so record that fact in the goto queue associated with that
476 try_finally node. */
478 static void
479 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
481 struct leh_tf_state *tf = state->tf;
482 struct goto_queue_node *q;
483 size_t active, size;
484 int index;
486 if (!tf)
487 return;
489 switch (TREE_CODE (stmt))
491 case GOTO_EXPR:
493 tree lab = GOTO_DESTINATION (stmt);
495 /* Computed and non-local gotos do not get processed. Given
496 their nature we can neither tell whether we've escaped the
497 finally block nor redirect them if we knew. */
498 if (TREE_CODE (lab) != LABEL_DECL)
499 return;
501 /* No need to record gotos that don't leave the try block. */
502 if (! outside_finally_tree (lab, tf->try_finally_expr))
503 return;
505 if (! tf->dest_array)
507 tf->dest_array = VEC_alloc (tree, heap, 10);
508 VEC_quick_push (tree, tf->dest_array, lab);
509 index = 0;
511 else
513 int n = VEC_length (tree, tf->dest_array);
514 for (index = 0; index < n; ++index)
515 if (VEC_index (tree, tf->dest_array, index) == lab)
516 break;
517 if (index == n)
518 VEC_safe_push (tree, heap, tf->dest_array, lab);
521 break;
523 case RETURN_EXPR:
524 tf->may_return = true;
525 index = -1;
526 break;
528 default:
529 gcc_unreachable ();
532 active = tf->goto_queue_active;
533 size = tf->goto_queue_size;
534 if (active >= size)
536 size = (size ? size * 2 : 32);
537 tf->goto_queue_size = size;
538 tf->goto_queue
539 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
542 q = &tf->goto_queue[active];
543 tf->goto_queue_active = active + 1;
545 memset (q, 0, sizeof (*q));
546 q->stmt = stmt;
547 q->index = index;
550 #ifdef ENABLE_CHECKING
551 /* We do not process SWITCH_EXPRs for now. As long as the original source
552 was in fact structured, and we've not yet done jump threading, then none
553 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
555 static void
556 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
558 struct leh_tf_state *tf = state->tf;
559 size_t i, n;
560 tree vec;
562 if (!tf)
563 return;
565 vec = SWITCH_LABELS (switch_expr);
566 n = TREE_VEC_LENGTH (vec);
568 for (i = 0; i < n; ++i)
570 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
571 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
574 #else
575 #define verify_norecord_switch_expr(state, switch_expr)
576 #endif
578 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
579 whatever is needed to finish the return. If MOD is non-null, insert it
580 before the new branch. RETURN_VALUE_P is a cache containing a temporary
581 variable to be used in manipulating the value returned from the function. */
583 static void
584 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
585 tree *return_value_p)
587 tree ret_expr = TREE_OPERAND (q->stmt, 0);
588 tree x;
590 if (ret_expr)
592 /* The nasty part about redirecting the return value is that the
593 return value itself is to be computed before the FINALLY block
594 is executed. e.g.
596 int x;
597 int foo (void)
599 x = 0;
600 try {
601 return x;
602 } finally {
603 x++;
607 should return 0, not 1. Arrange for this to happen by copying
608 computed the return value into a local temporary. This also
609 allows us to redirect multiple return statements through the
610 same destination block; whether this is a net win or not really
611 depends, I guess, but it does make generation of the switch in
612 lower_try_finally_switch easier. */
614 switch (TREE_CODE (ret_expr))
616 case RESULT_DECL:
617 if (!*return_value_p)
618 *return_value_p = ret_expr;
619 else
620 gcc_assert (*return_value_p == ret_expr);
621 q->cont_stmt = q->stmt;
622 break;
624 case MODIFY_EXPR:
626 tree result = TREE_OPERAND (ret_expr, 0);
627 tree new, old = TREE_OPERAND (ret_expr, 1);
629 if (!*return_value_p)
631 if (aggregate_value_p (TREE_TYPE (result),
632 TREE_TYPE (current_function_decl)))
633 /* If this function returns in memory, copy the argument
634 into the return slot now. Otherwise, we might need to
635 worry about magic return semantics, so we need to use a
636 temporary to hold the value until we're actually ready
637 to return. */
638 new = result;
639 else
640 new = create_tmp_var (TREE_TYPE (old), "rettmp");
641 *return_value_p = new;
643 else
644 new = *return_value_p;
646 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
647 append_to_statement_list (x, &q->repl_stmt);
649 if (new == result)
650 x = result;
651 else
652 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
653 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
656 default:
657 gcc_unreachable ();
660 else
662 /* If we don't return a value, all return statements are the same. */
663 q->cont_stmt = q->stmt;
666 if (mod)
667 append_to_statement_list (mod, &q->repl_stmt);
669 x = build1 (GOTO_EXPR, void_type_node, finlab);
670 append_to_statement_list (x, &q->repl_stmt);
673 /* Similar, but easier, for GOTO_EXPR. */
675 static void
676 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
678 tree x;
680 q->cont_stmt = q->stmt;
681 if (mod)
682 append_to_statement_list (mod, &q->repl_stmt);
684 x = build1 (GOTO_EXPR, void_type_node, finlab);
685 append_to_statement_list (x, &q->repl_stmt);
688 /* We want to transform
689 try { body; } catch { stuff; }
691 body; goto over; lab: stuff; over:
693 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
694 should be placed before the second operand, or NULL. OVER is
695 an existing label that should be put at the exit, or NULL. */
697 static void
698 frob_into_branch_around (tree *tp, tree lab, tree over)
700 tree x, op1;
702 op1 = TREE_OPERAND (*tp, 1);
703 *tp = TREE_OPERAND (*tp, 0);
705 if (block_may_fallthru (*tp))
707 if (!over)
708 over = create_artificial_label ();
709 x = build1 (GOTO_EXPR, void_type_node, over);
710 append_to_statement_list (x, tp);
713 if (lab)
715 x = build1 (LABEL_EXPR, void_type_node, lab);
716 append_to_statement_list (x, tp);
719 append_to_statement_list (op1, tp);
721 if (over)
723 x = build1 (LABEL_EXPR, void_type_node, over);
724 append_to_statement_list (x, tp);
728 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
729 Make sure to record all new labels found. */
731 static tree
732 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
734 tree region = NULL;
736 t = unsave_expr_now (t);
738 if (outer_state->tf)
739 region = outer_state->tf->try_finally_expr;
740 collect_finally_tree (t, region);
742 return t;
745 /* A subroutine of lower_try_finally. Create a fallthru label for
746 the given try_finally state. The only tricky bit here is that
747 we have to make sure to record the label in our outer context. */
749 static tree
750 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
752 tree label = tf->fallthru_label;
753 if (!label)
755 label = create_artificial_label ();
756 tf->fallthru_label = label;
757 if (tf->outer->tf)
758 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
760 return label;
763 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
764 returns non-null, then the language requires that the exception path out
765 of a try_finally be treated specially. To wit: the code within the
766 finally block may not itself throw an exception. We have two choices here.
767 First we can duplicate the finally block and wrap it in a must_not_throw
768 region. Second, we can generate code like
770 try {
771 finally_block;
772 } catch {
773 if (fintmp == eh_edge)
774 protect_cleanup_actions;
777 where "fintmp" is the temporary used in the switch statement generation
778 alternative considered below. For the nonce, we always choose the first
779 option.
781 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
783 static void
784 honor_protect_cleanup_actions (struct leh_state *outer_state,
785 struct leh_state *this_state,
786 struct leh_tf_state *tf)
788 tree protect_cleanup_actions, finally, x;
789 tree_stmt_iterator i;
790 bool finally_may_fallthru;
792 /* First check for nothing to do. */
793 if (lang_protect_cleanup_actions)
794 protect_cleanup_actions = lang_protect_cleanup_actions ();
795 else
796 protect_cleanup_actions = NULL;
798 finally = TREE_OPERAND (*tf->top_p, 1);
800 /* If the EH case of the finally block can fall through, this may be a
801 structure of the form
802 try {
803 try {
804 throw ...;
805 } cleanup {
806 try {
807 throw ...;
808 } catch (...) {
811 } catch (...) {
812 yyy;
814 E.g. with an inline destructor with an embedded try block. In this
815 case we must save the runtime EH data around the nested exception.
817 This complication means that any time the previous runtime data might
818 be used (via fallthru from the finally) we handle the eh case here,
819 whether or not protect_cleanup_actions is active. */
821 finally_may_fallthru = block_may_fallthru (finally);
822 if (!finally_may_fallthru && !protect_cleanup_actions)
823 return;
825 /* Duplicate the FINALLY block. Only need to do this for try-finally,
826 and not for cleanups. */
827 if (this_state)
828 finally = lower_try_finally_dup_block (finally, outer_state);
830 /* Resume execution after the exception. Adding this now lets
831 lower_eh_filter not add unnecessary gotos, as it is clear that
832 we never fallthru from this copy of the finally block. */
833 if (finally_may_fallthru)
835 tree save_eptr, save_filt;
837 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
838 save_filt = create_tmp_var (integer_type_node, "save_filt");
840 i = tsi_start (finally);
841 x = build (EXC_PTR_EXPR, ptr_type_node);
842 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
843 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
845 x = build (FILTER_EXPR, integer_type_node);
846 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
847 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
849 i = tsi_last (finally);
850 x = build (EXC_PTR_EXPR, ptr_type_node);
851 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
852 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
854 x = build (FILTER_EXPR, integer_type_node);
855 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
856 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
858 x = build_resx (get_eh_region_number (tf->region));
859 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
862 /* Wrap the block with protect_cleanup_actions as the action. */
863 if (protect_cleanup_actions)
865 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
866 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
867 EH_FILTER_MUST_NOT_THROW (x) = 1;
868 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
869 lower_eh_filter (outer_state, &finally);
871 else
872 lower_eh_constructs_1 (outer_state, &finally);
874 /* Hook this up to the end of the existing try block. If we
875 previously fell through the end, we'll have to branch around.
876 This means adding a new goto, and adding it to the queue. */
878 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
880 if (tf->may_fallthru)
882 x = lower_try_finally_fallthru_label (tf);
883 x = build1 (GOTO_EXPR, void_type_node, x);
884 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
886 if (this_state)
887 maybe_record_in_goto_queue (this_state, x);
889 tf->may_fallthru = false;
892 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
893 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
894 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
896 /* Having now been handled, EH isn't to be considered with
897 the rest of the outgoing edges. */
898 tf->may_throw = false;
901 /* A subroutine of lower_try_finally. We have determined that there is
902 no fallthru edge out of the finally block. This means that there is
903 no outgoing edge corresponding to any incoming edge. Restructure the
904 try_finally node for this special case. */
906 static void
907 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
909 tree x, finally, lab, return_val;
910 struct goto_queue_node *q, *qe;
912 if (tf->may_throw)
913 lab = tf->eh_label;
914 else
915 lab = create_artificial_label ();
917 finally = TREE_OPERAND (*tf->top_p, 1);
918 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
920 x = build1 (LABEL_EXPR, void_type_node, lab);
921 append_to_statement_list (x, tf->top_p);
923 return_val = NULL;
924 q = tf->goto_queue;
925 qe = q + tf->goto_queue_active;
926 for (; q < qe; ++q)
927 if (q->index < 0)
928 do_return_redirection (q, lab, NULL, &return_val);
929 else
930 do_goto_redirection (q, lab, NULL);
932 replace_goto_queue (tf);
934 lower_eh_constructs_1 (state, &finally);
935 append_to_statement_list (finally, tf->top_p);
938 /* A subroutine of lower_try_finally. We have determined that there is
939 exactly one destination of the finally block. Restructure the
940 try_finally node for this special case. */
942 static void
943 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
945 struct goto_queue_node *q, *qe;
946 tree x, finally, finally_label;
948 finally = TREE_OPERAND (*tf->top_p, 1);
949 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
951 lower_eh_constructs_1 (state, &finally);
953 if (tf->may_throw)
955 /* Only reachable via the exception edge. Add the given label to
956 the head of the FINALLY block. Append a RESX at the end. */
958 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
959 append_to_statement_list (x, tf->top_p);
961 append_to_statement_list (finally, tf->top_p);
963 x = build_resx (get_eh_region_number (tf->region));
965 append_to_statement_list (x, tf->top_p);
967 return;
970 if (tf->may_fallthru)
972 /* Only reachable via the fallthru edge. Do nothing but let
973 the two blocks run together; we'll fall out the bottom. */
974 append_to_statement_list (finally, tf->top_p);
975 return;
978 finally_label = create_artificial_label ();
979 x = build1 (LABEL_EXPR, void_type_node, finally_label);
980 append_to_statement_list (x, tf->top_p);
982 append_to_statement_list (finally, tf->top_p);
984 q = tf->goto_queue;
985 qe = q + tf->goto_queue_active;
987 if (tf->may_return)
989 /* Reachable by return expressions only. Redirect them. */
990 tree return_val = NULL;
991 for (; q < qe; ++q)
992 do_return_redirection (q, finally_label, NULL, &return_val);
993 replace_goto_queue (tf);
995 else
997 /* Reachable by goto expressions only. Redirect them. */
998 for (; q < qe; ++q)
999 do_goto_redirection (q, finally_label, NULL);
1000 replace_goto_queue (tf);
1002 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1004 /* Reachable by goto to fallthru label only. Redirect it
1005 to the new label (already created, sadly), and do not
1006 emit the final branch out, or the fallthru label. */
1007 tf->fallthru_label = NULL;
1008 return;
1012 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1013 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1016 /* A subroutine of lower_try_finally. There are multiple edges incoming
1017 and outgoing from the finally block. Implement this by duplicating the
1018 finally block for every destination. */
1020 static void
1021 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1023 tree finally, new_stmt;
1024 tree x;
1026 finally = TREE_OPERAND (*tf->top_p, 1);
1027 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1029 new_stmt = NULL_TREE;
1031 if (tf->may_fallthru)
1033 x = lower_try_finally_dup_block (finally, state);
1034 lower_eh_constructs_1 (state, &x);
1035 append_to_statement_list (x, &new_stmt);
1037 x = lower_try_finally_fallthru_label (tf);
1038 x = build1 (GOTO_EXPR, void_type_node, x);
1039 append_to_statement_list (x, &new_stmt);
1042 if (tf->may_throw)
1044 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1045 append_to_statement_list (x, &new_stmt);
1047 x = lower_try_finally_dup_block (finally, state);
1048 lower_eh_constructs_1 (state, &x);
1049 append_to_statement_list (x, &new_stmt);
1051 x = build_resx (get_eh_region_number (tf->region));
1052 append_to_statement_list (x, &new_stmt);
1055 if (tf->goto_queue)
1057 struct goto_queue_node *q, *qe;
1058 tree return_val = NULL;
1059 int return_index, index;
1060 struct
1062 struct goto_queue_node *q;
1063 tree label;
1064 } *labels;
1066 return_index = VEC_length (tree, tf->dest_array);
1067 labels = xcalloc (sizeof (*labels), return_index + 1);
1069 q = tf->goto_queue;
1070 qe = q + tf->goto_queue_active;
1071 for (; q < qe; q++)
1073 index = q->index < 0 ? return_index : q->index;
1075 if (!labels[index].q)
1076 labels[index].q = q;
1079 for (index = 0; index < return_index + 1; index++)
1081 tree lab;
1083 q = labels[index].q;
1084 if (! q)
1085 continue;
1087 lab = labels[index].label = create_artificial_label ();
1089 if (index == return_index)
1090 do_return_redirection (q, lab, NULL, &return_val);
1091 else
1092 do_goto_redirection (q, lab, NULL);
1094 x = build1 (LABEL_EXPR, void_type_node, lab);
1095 append_to_statement_list (x, &new_stmt);
1097 x = lower_try_finally_dup_block (finally, state);
1098 lower_eh_constructs_1 (state, &x);
1099 append_to_statement_list (x, &new_stmt);
1101 append_to_statement_list (q->cont_stmt, &new_stmt);
1102 maybe_record_in_goto_queue (state, q->cont_stmt);
1105 for (q = tf->goto_queue; q < qe; q++)
1107 tree lab;
1109 index = q->index < 0 ? return_index : q->index;
1111 if (labels[index].q == q)
1112 continue;
1114 lab = labels[index].label;
1116 if (index == return_index)
1117 do_return_redirection (q, lab, NULL, &return_val);
1118 else
1119 do_goto_redirection (q, lab, NULL);
1122 replace_goto_queue (tf);
1123 free (labels);
1126 /* Need to link new stmts after running replace_goto_queue due
1127 to not wanting to process the same goto stmts twice. */
1128 append_to_statement_list (new_stmt, tf->top_p);
1131 /* A subroutine of lower_try_finally. There are multiple edges incoming
1132 and outgoing from the finally block. Implement this by instrumenting
1133 each incoming edge and creating a switch statement at the end of the
1134 finally block that branches to the appropriate destination. */
1136 static void
1137 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1139 struct goto_queue_node *q, *qe;
1140 tree return_val = NULL;
1141 tree finally, finally_tmp, finally_label;
1142 int return_index, eh_index, fallthru_index;
1143 int nlabels, ndests, j, last_case_index;
1144 tree case_label_vec, switch_stmt, last_case, switch_body;
1145 tree x;
1147 /* Mash the TRY block to the head of the chain. */
1148 finally = TREE_OPERAND (*tf->top_p, 1);
1149 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1151 /* Lower the finally block itself. */
1152 lower_eh_constructs_1 (state, &finally);
1154 /* Prepare for switch statement generation. */
1155 nlabels = VEC_length (tree, tf->dest_array);
1156 return_index = nlabels;
1157 eh_index = return_index + tf->may_return;
1158 fallthru_index = eh_index + tf->may_throw;
1159 ndests = fallthru_index + tf->may_fallthru;
1161 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1162 finally_label = create_artificial_label ();
1164 case_label_vec = make_tree_vec (ndests);
1165 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1166 NULL_TREE, case_label_vec);
1167 switch_body = NULL;
1168 last_case = NULL;
1169 last_case_index = 0;
1171 /* Begin inserting code for getting to the finally block. Things
1172 are done in this order to correspond to the sequence the code is
1173 layed out. */
1175 if (tf->may_fallthru)
1177 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1178 build_int_cst (NULL_TREE, fallthru_index));
1179 append_to_statement_list (x, tf->top_p);
1181 if (tf->may_throw)
1183 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1184 append_to_statement_list (x, tf->top_p);
1188 last_case = build (CASE_LABEL_EXPR, void_type_node,
1189 build_int_cst (NULL_TREE, fallthru_index), NULL,
1190 create_artificial_label ());
1191 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1192 last_case_index++;
1194 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1195 append_to_statement_list (x, &switch_body);
1197 x = lower_try_finally_fallthru_label (tf);
1198 x = build1 (GOTO_EXPR, void_type_node, x);
1199 append_to_statement_list (x, &switch_body);
1202 if (tf->may_throw)
1204 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1205 append_to_statement_list (x, tf->top_p);
1207 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1208 build_int_cst (NULL_TREE, eh_index));
1209 append_to_statement_list (x, tf->top_p);
1211 last_case = build (CASE_LABEL_EXPR, void_type_node,
1212 build_int_cst (NULL_TREE, eh_index), NULL,
1213 create_artificial_label ());
1214 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1215 last_case_index++;
1217 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1218 append_to_statement_list (x, &switch_body);
1219 x = build_resx (get_eh_region_number (tf->region));
1220 append_to_statement_list (x, &switch_body);
1223 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1224 append_to_statement_list (x, tf->top_p);
1226 append_to_statement_list (finally, tf->top_p);
1228 /* Redirect each incoming goto edge. */
1229 q = tf->goto_queue;
1230 qe = q + tf->goto_queue_active;
1231 j = last_case_index + tf->may_return;
1232 for (; q < qe; ++q)
1234 tree mod;
1235 int switch_id, case_index;
1237 if (q->index < 0)
1239 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1240 build_int_cst (NULL_TREE, return_index));
1241 do_return_redirection (q, finally_label, mod, &return_val);
1242 switch_id = return_index;
1244 else
1246 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1247 build_int_cst (NULL_TREE, q->index));
1248 do_goto_redirection (q, finally_label, mod);
1249 switch_id = q->index;
1252 case_index = j + q->index;
1253 if (!TREE_VEC_ELT (case_label_vec, case_index))
1254 TREE_VEC_ELT (case_label_vec, case_index)
1255 = build (CASE_LABEL_EXPR, void_type_node,
1256 build_int_cst (NULL_TREE, switch_id), NULL,
1257 /* We store the cont_stmt in the
1258 CASE_LABEL, so that we can recover it
1259 in the loop below. We don't create
1260 the new label while walking the
1261 goto_queue because pointers don't
1262 offer a stable order. */
1263 q->cont_stmt);
1265 for (j = last_case_index; j < last_case_index + nlabels; j++)
1267 tree label;
1268 tree cont_stmt;
1270 last_case = TREE_VEC_ELT (case_label_vec, j);
1272 gcc_assert (last_case);
1274 cont_stmt = CASE_LABEL (last_case);
1276 label = create_artificial_label ();
1277 CASE_LABEL (last_case) = label;
1279 x = build (LABEL_EXPR, void_type_node, label);
1280 append_to_statement_list (x, &switch_body);
1281 append_to_statement_list (cont_stmt, &switch_body);
1282 maybe_record_in_goto_queue (state, cont_stmt);
1284 replace_goto_queue (tf);
1286 /* Make sure that the last case is the default label, as one is required.
1287 Then sort the labels, which is also required in GIMPLE. */
1288 CASE_LOW (last_case) = NULL;
1289 sort_case_labels (case_label_vec);
1291 /* Need to link switch_stmt after running replace_goto_queue due
1292 to not wanting to process the same goto stmts twice. */
1293 append_to_statement_list (switch_stmt, tf->top_p);
1294 append_to_statement_list (switch_body, tf->top_p);
1297 /* Decide whether or not we are going to duplicate the finally block.
1298 There are several considerations.
1300 First, if this is Java, then the finally block contains code
1301 written by the user. It has line numbers associated with it,
1302 so duplicating the block means it's difficult to set a breakpoint.
1303 Since controlling code generation via -g is verboten, we simply
1304 never duplicate code without optimization.
1306 Second, we'd like to prevent egregious code growth. One way to
1307 do this is to estimate the size of the finally block, multiply
1308 that by the number of copies we'd need to make, and compare against
1309 the estimate of the size of the switch machinery we'd have to add. */
1311 static bool
1312 decide_copy_try_finally (int ndests, tree finally)
1314 int f_estimate, sw_estimate;
1316 if (!optimize)
1317 return false;
1319 /* Finally estimate N times, plus N gotos. */
1320 f_estimate = estimate_num_insns (finally);
1321 f_estimate = (f_estimate + 1) * ndests;
1323 /* Switch statement (cost 10), N variable assignments, N gotos. */
1324 sw_estimate = 10 + 2 * ndests;
1326 /* Optimize for size clearly wants our best guess. */
1327 if (optimize_size)
1328 return f_estimate < sw_estimate;
1330 /* ??? These numbers are completely made up so far. */
1331 if (optimize > 1)
1332 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1333 else
1334 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1337 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1338 to a sequence of labels and blocks, plus the exception region trees
1339 that record all the magic. This is complicated by the need to
1340 arrange for the FINALLY block to be executed on all exits. */
1342 static void
1343 lower_try_finally (struct leh_state *state, tree *tp)
1345 struct leh_tf_state this_tf;
1346 struct leh_state this_state;
1347 int ndests;
1349 /* Process the try block. */
1351 memset (&this_tf, 0, sizeof (this_tf));
1352 this_tf.try_finally_expr = *tp;
1353 this_tf.top_p = tp;
1354 this_tf.outer = state;
1355 if (using_eh_for_cleanups_p)
1356 this_tf.region
1357 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1358 else
1359 this_tf.region = NULL;
1361 this_state.cur_region = this_tf.region;
1362 this_state.prev_try = state->prev_try;
1363 this_state.tf = &this_tf;
1365 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1367 /* Determine if the try block is escaped through the bottom. */
1368 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1370 /* Determine if any exceptions are possible within the try block. */
1371 if (using_eh_for_cleanups_p)
1372 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1373 if (this_tf.may_throw)
1375 this_tf.eh_label = create_artificial_label ();
1376 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1377 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1380 /* Sort the goto queue for efficient searching later. */
1381 if (this_tf.goto_queue_active > 1)
1382 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1383 sizeof (struct goto_queue_node), goto_queue_cmp);
1385 /* Determine how many edges (still) reach the finally block. Or rather,
1386 how many destinations are reached by the finally block. Use this to
1387 determine how we process the finally block itself. */
1389 ndests = VEC_length (tree, this_tf.dest_array);
1390 ndests += this_tf.may_fallthru;
1391 ndests += this_tf.may_return;
1392 ndests += this_tf.may_throw;
1394 /* If the FINALLY block is not reachable, dike it out. */
1395 if (ndests == 0)
1396 *tp = TREE_OPERAND (*tp, 0);
1398 /* If the finally block doesn't fall through, then any destination
1399 we might try to impose there isn't reached either. There may be
1400 some minor amount of cleanup and redirection still needed. */
1401 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1402 lower_try_finally_nofallthru (state, &this_tf);
1404 /* We can easily special-case redirection to a single destination. */
1405 else if (ndests == 1)
1406 lower_try_finally_onedest (state, &this_tf);
1408 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1409 lower_try_finally_copy (state, &this_tf);
1410 else
1411 lower_try_finally_switch (state, &this_tf);
1413 /* If someone requested we add a label at the end of the transformed
1414 block, do so. */
1415 if (this_tf.fallthru_label)
1417 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1418 append_to_statement_list (x, tp);
1421 VEC_free (tree, heap, this_tf.dest_array);
1422 if (this_tf.goto_queue)
1423 free (this_tf.goto_queue);
1426 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1427 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1428 exception region trees that record all the magic. */
1430 static void
1431 lower_catch (struct leh_state *state, tree *tp)
1433 struct eh_region *try_region;
1434 struct leh_state this_state;
1435 tree_stmt_iterator i;
1436 tree out_label;
1438 try_region = gen_eh_region_try (state->cur_region);
1439 this_state.cur_region = try_region;
1440 this_state.prev_try = try_region;
1441 this_state.tf = state->tf;
1443 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1445 if (!get_eh_region_may_contain_throw (try_region))
1447 *tp = TREE_OPERAND (*tp, 0);
1448 return;
1451 out_label = NULL;
1452 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1454 struct eh_region *catch_region;
1455 tree catch, x, eh_label;
1457 catch = tsi_stmt (i);
1458 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1460 this_state.cur_region = catch_region;
1461 this_state.prev_try = state->prev_try;
1462 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1464 eh_label = create_artificial_label ();
1465 set_eh_region_tree_label (catch_region, eh_label);
1467 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1468 tsi_link_before (&i, x, TSI_SAME_STMT);
1470 if (block_may_fallthru (CATCH_BODY (catch)))
1472 if (!out_label)
1473 out_label = create_artificial_label ();
1475 x = build1 (GOTO_EXPR, void_type_node, out_label);
1476 append_to_statement_list (x, &CATCH_BODY (catch));
1479 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1480 tsi_delink (&i);
1483 frob_into_branch_around (tp, NULL, out_label);
1486 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1487 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1488 region trees that record all the magic. */
1490 static void
1491 lower_eh_filter (struct leh_state *state, tree *tp)
1493 struct leh_state this_state;
1494 struct eh_region *this_region;
1495 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1496 tree eh_label;
1498 if (EH_FILTER_MUST_NOT_THROW (inner))
1499 this_region = gen_eh_region_must_not_throw (state->cur_region);
1500 else
1501 this_region = gen_eh_region_allowed (state->cur_region,
1502 EH_FILTER_TYPES (inner));
1503 this_state = *state;
1504 this_state.cur_region = this_region;
1506 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1508 if (!get_eh_region_may_contain_throw (this_region))
1510 *tp = TREE_OPERAND (*tp, 0);
1511 return;
1514 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1515 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1517 eh_label = create_artificial_label ();
1518 set_eh_region_tree_label (this_region, eh_label);
1520 frob_into_branch_around (tp, eh_label, NULL);
1523 /* Implement a cleanup expression. This is similar to try-finally,
1524 except that we only execute the cleanup block for exception edges. */
1526 static void
1527 lower_cleanup (struct leh_state *state, tree *tp)
1529 struct leh_state this_state;
1530 struct eh_region *this_region;
1531 struct leh_tf_state fake_tf;
1533 /* If not using eh, then exception-only cleanups are no-ops. */
1534 if (!flag_exceptions)
1536 *tp = TREE_OPERAND (*tp, 0);
1537 lower_eh_constructs_1 (state, tp);
1538 return;
1541 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1542 this_state = *state;
1543 this_state.cur_region = this_region;
1545 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1547 if (!get_eh_region_may_contain_throw (this_region))
1549 *tp = TREE_OPERAND (*tp, 0);
1550 return;
1553 /* Build enough of a try-finally state so that we can reuse
1554 honor_protect_cleanup_actions. */
1555 memset (&fake_tf, 0, sizeof (fake_tf));
1556 fake_tf.top_p = tp;
1557 fake_tf.outer = state;
1558 fake_tf.region = this_region;
1559 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1560 fake_tf.may_throw = true;
1562 fake_tf.eh_label = create_artificial_label ();
1563 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1565 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1567 if (fake_tf.may_throw)
1569 /* In this case honor_protect_cleanup_actions had nothing to do,
1570 and we should process this normally. */
1571 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1572 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1574 else
1576 /* In this case honor_protect_cleanup_actions did nearly all of
1577 the work. All we have left is to append the fallthru_label. */
1579 *tp = TREE_OPERAND (*tp, 0);
1580 if (fake_tf.fallthru_label)
1582 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1583 append_to_statement_list (x, tp);
1588 /* Main loop for lowering eh constructs. */
1590 static void
1591 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1593 tree_stmt_iterator i;
1594 tree t = *tp;
1596 switch (TREE_CODE (t))
1598 case COND_EXPR:
1599 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1600 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1601 break;
1603 case CALL_EXPR:
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state->cur_region && tree_could_throw_p (t))
1607 record_stmt_eh_region (state->cur_region, t);
1608 note_eh_region_may_contain_throw (state->cur_region);
1610 break;
1612 case MODIFY_EXPR:
1613 /* Look for things that can throw exceptions, and record them. */
1614 if (state->cur_region && tree_could_throw_p (t))
1616 record_stmt_eh_region (state->cur_region, t);
1617 note_eh_region_may_contain_throw (state->cur_region);
1619 break;
1621 case GOTO_EXPR:
1622 case RETURN_EXPR:
1623 maybe_record_in_goto_queue (state, t);
1624 break;
1625 case SWITCH_EXPR:
1626 verify_norecord_switch_expr (state, t);
1627 break;
1629 case TRY_FINALLY_EXPR:
1630 lower_try_finally (state, tp);
1631 break;
1633 case TRY_CATCH_EXPR:
1634 i = tsi_start (TREE_OPERAND (t, 1));
1635 switch (TREE_CODE (tsi_stmt (i)))
1637 case CATCH_EXPR:
1638 lower_catch (state, tp);
1639 break;
1640 case EH_FILTER_EXPR:
1641 lower_eh_filter (state, tp);
1642 break;
1643 default:
1644 lower_cleanup (state, tp);
1645 break;
1647 break;
1649 case STATEMENT_LIST:
1650 for (i = tsi_start (t); !tsi_end_p (i); )
1652 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1653 t = tsi_stmt (i);
1654 if (TREE_CODE (t) == STATEMENT_LIST)
1656 tsi_link_before (&i, t, TSI_SAME_STMT);
1657 tsi_delink (&i);
1659 else
1660 tsi_next (&i);
1662 break;
1664 default:
1665 /* A type, a decl, or some kind of statement that we're not
1666 interested in. Don't walk them. */
1667 break;
1671 static void
1672 lower_eh_constructs (void)
1674 struct leh_state null_state;
1675 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1677 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1679 collect_finally_tree (*tp, NULL);
1681 memset (&null_state, 0, sizeof (null_state));
1682 lower_eh_constructs_1 (&null_state, tp);
1684 htab_delete (finally_tree);
1686 collect_eh_region_array ();
1689 struct tree_opt_pass pass_lower_eh =
1691 "eh", /* name */
1692 NULL, /* gate */
1693 lower_eh_constructs, /* execute */
1694 NULL, /* sub */
1695 NULL, /* next */
1696 0, /* static_pass_number */
1697 TV_TREE_EH, /* tv_id */
1698 PROP_gimple_lcf, /* properties_required */
1699 PROP_gimple_leh, /* properties_provided */
1700 PROP_gimple_lcf, /* properties_destroyed */
1701 0, /* todo_flags_start */
1702 TODO_dump_func, /* todo_flags_finish */
1703 0 /* letter */
1707 /* Construct EH edges for STMT. */
1709 static void
1710 make_eh_edge (struct eh_region *region, void *data)
1712 tree stmt, lab;
1713 basic_block src, dst;
1715 stmt = data;
1716 lab = get_eh_region_tree_label (region);
1718 src = bb_for_stmt (stmt);
1719 dst = label_to_block (lab);
1721 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1724 void
1725 make_eh_edges (tree stmt)
1727 int region_nr;
1728 bool is_resx;
1730 if (TREE_CODE (stmt) == RESX_EXPR)
1732 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1733 is_resx = true;
1735 else
1737 region_nr = lookup_stmt_eh_region (stmt);
1738 if (region_nr < 0)
1739 return;
1740 is_resx = false;
1743 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1746 static bool mark_eh_edge_found_error;
1748 /* Mark edge make_eh_edge would create for given region by setting it aux
1749 field, output error if something goes wrong. */
1750 static void
1751 mark_eh_edge (struct eh_region *region, void *data)
1753 tree stmt, lab;
1754 basic_block src, dst;
1755 edge e;
1757 stmt = data;
1758 lab = get_eh_region_tree_label (region);
1760 src = bb_for_stmt (stmt);
1761 dst = label_to_block (lab);
1763 e = find_edge (src, dst);
1764 if (!e)
1766 error ("EH edge %i->%i is missing %i %i.", src->index, dst->index, src, dst);
1767 mark_eh_edge_found_error = true;
1769 else if (!(e->flags & EDGE_EH))
1771 error ("EH edge %i->%i miss EH flag.", src->index, dst->index);
1772 mark_eh_edge_found_error = true;
1774 else if (e->aux)
1776 /* ??? might not be mistake. */
1777 error ("EH edge %i->%i has duplicated regions.", src->index, dst->index);
1778 mark_eh_edge_found_error = true;
1780 else
1781 e->aux = (void *)1;
1784 /* Verify that BB containing stmt as last stmt has precisely the edges
1785 make_eh_edges would create. */
1786 bool
1787 verify_eh_edges (tree stmt)
1789 int region_nr;
1790 bool is_resx;
1791 basic_block bb = bb_for_stmt (stmt);
1792 edge_iterator ei;
1793 edge e;
1795 FOR_EACH_EDGE (e, ei, bb->succs)
1796 gcc_assert (!e->aux);
1797 mark_eh_edge_found_error = false;
1798 if (TREE_CODE (stmt) == RESX_EXPR)
1800 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1801 is_resx = true;
1803 else
1805 region_nr = lookup_stmt_eh_region (stmt);
1806 if (region_nr < 0)
1808 FOR_EACH_EDGE (e, ei, bb->succs)
1809 if (e->flags & EDGE_EH)
1811 error ("BB %i can not throw but has EH edges", bb->index);
1812 return true;
1814 return false;
1816 if (!tree_could_throw_p (stmt))
1818 error ("BB %i last statement has incorrectly set region", bb->index);
1819 return true;
1821 is_resx = false;
1824 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1825 FOR_EACH_EDGE (e, ei, bb->succs)
1827 if ((e->flags & EDGE_EH) && !e->aux)
1829 error ("Unnecesary EH edge %i->%i", bb->index, e->dest->index);
1830 mark_eh_edge_found_error = true;
1831 return true;
1833 e->aux = NULL;
1835 return mark_eh_edge_found_error;
1839 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1840 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1841 This routine expects only GIMPLE lhs or rhs input. */
1843 bool
1844 tree_could_trap_p (tree expr)
1846 enum tree_code code = TREE_CODE (expr);
1847 bool honor_nans = false;
1848 bool honor_snans = false;
1849 bool fp_operation = false;
1850 bool honor_trapv = false;
1851 tree t, base;
1853 if (TREE_CODE_CLASS (code) == tcc_comparison
1854 || TREE_CODE_CLASS (code) == tcc_unary
1855 || TREE_CODE_CLASS (code) == tcc_binary)
1857 t = TREE_TYPE (expr);
1858 fp_operation = FLOAT_TYPE_P (t);
1859 if (fp_operation)
1861 honor_nans = flag_trapping_math && !flag_finite_math_only;
1862 honor_snans = flag_signaling_nans != 0;
1864 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1865 honor_trapv = true;
1868 restart:
1869 switch (code)
1871 case COMPONENT_REF:
1872 case REALPART_EXPR:
1873 case IMAGPART_EXPR:
1874 case BIT_FIELD_REF:
1875 case WITH_SIZE_EXPR:
1876 expr = TREE_OPERAND (expr, 0);
1877 code = TREE_CODE (expr);
1878 goto restart;
1880 case ARRAY_RANGE_REF:
1881 /* Let us be conservative here for now. We might be checking bounds of
1882 the access similarly to the case below. */
1883 if (!TREE_THIS_NOTRAP (expr))
1884 return true;
1886 base = TREE_OPERAND (expr, 0);
1887 return tree_could_trap_p (base);
1889 case ARRAY_REF:
1890 base = TREE_OPERAND (expr, 0);
1891 if (tree_could_trap_p (base))
1892 return true;
1894 if (TREE_THIS_NOTRAP (expr))
1895 return false;
1897 return !in_array_bounds_p (expr);
1899 case INDIRECT_REF:
1900 case ALIGN_INDIRECT_REF:
1901 case MISALIGNED_INDIRECT_REF:
1902 return !TREE_THIS_NOTRAP (expr);
1904 case ASM_EXPR:
1905 return TREE_THIS_VOLATILE (expr);
1907 case TRUNC_DIV_EXPR:
1908 case CEIL_DIV_EXPR:
1909 case FLOOR_DIV_EXPR:
1910 case ROUND_DIV_EXPR:
1911 case EXACT_DIV_EXPR:
1912 case CEIL_MOD_EXPR:
1913 case FLOOR_MOD_EXPR:
1914 case ROUND_MOD_EXPR:
1915 case TRUNC_MOD_EXPR:
1916 case RDIV_EXPR:
1917 if (honor_snans || honor_trapv)
1918 return true;
1919 if (fp_operation)
1920 return flag_trapping_math;
1921 t = TREE_OPERAND (expr, 1);
1922 if (!TREE_CONSTANT (t) || integer_zerop (t))
1923 return true;
1924 return false;
1926 case LT_EXPR:
1927 case LE_EXPR:
1928 case GT_EXPR:
1929 case GE_EXPR:
1930 case LTGT_EXPR:
1931 /* Some floating point comparisons may trap. */
1932 return honor_nans;
1934 case EQ_EXPR:
1935 case NE_EXPR:
1936 case UNORDERED_EXPR:
1937 case ORDERED_EXPR:
1938 case UNLT_EXPR:
1939 case UNLE_EXPR:
1940 case UNGT_EXPR:
1941 case UNGE_EXPR:
1942 case UNEQ_EXPR:
1943 return honor_snans;
1945 case CONVERT_EXPR:
1946 case FIX_TRUNC_EXPR:
1947 case FIX_CEIL_EXPR:
1948 case FIX_FLOOR_EXPR:
1949 case FIX_ROUND_EXPR:
1950 /* Conversion of floating point might trap. */
1951 return honor_nans;
1953 case NEGATE_EXPR:
1954 case ABS_EXPR:
1955 case CONJ_EXPR:
1956 /* These operations don't trap with floating point. */
1957 if (honor_trapv)
1958 return true;
1959 return false;
1961 case PLUS_EXPR:
1962 case MINUS_EXPR:
1963 case MULT_EXPR:
1964 /* Any floating arithmetic may trap. */
1965 if (fp_operation && flag_trapping_math)
1966 return true;
1967 if (honor_trapv)
1968 return true;
1969 return false;
1971 case CALL_EXPR:
1972 t = get_callee_fndecl (expr);
1973 /* Assume that calls to weak functions may trap. */
1974 if (!t || !DECL_P (t) || DECL_WEAK (t))
1975 return true;
1976 return false;
1978 default:
1979 /* Any floating arithmetic may trap. */
1980 if (fp_operation && flag_trapping_math)
1981 return true;
1982 return false;
1986 bool
1987 tree_could_throw_p (tree t)
1989 if (!flag_exceptions)
1990 return false;
1991 if (TREE_CODE (t) == MODIFY_EXPR)
1993 if (flag_non_call_exceptions
1994 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1995 return true;
1996 t = TREE_OPERAND (t, 1);
1999 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2000 t = TREE_OPERAND (t, 0);
2001 if (TREE_CODE (t) == CALL_EXPR)
2002 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2003 if (flag_non_call_exceptions)
2004 return tree_could_trap_p (t);
2005 return false;
2008 bool
2009 tree_can_throw_internal (tree stmt)
2011 int region_nr;
2012 bool is_resx = false;
2014 if (TREE_CODE (stmt) == RESX_EXPR)
2015 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2016 else
2017 region_nr = lookup_stmt_eh_region (stmt);
2018 if (region_nr < 0)
2019 return false;
2020 return can_throw_internal_1 (region_nr, is_resx);
2023 bool
2024 tree_can_throw_external (tree stmt)
2026 int region_nr;
2027 bool is_resx = false;
2029 if (TREE_CODE (stmt) == RESX_EXPR)
2030 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2031 else
2032 region_nr = lookup_stmt_eh_region (stmt);
2033 if (region_nr < 0)
2034 return tree_could_throw_p (stmt);
2035 else
2036 return can_throw_external_1 (region_nr, is_resx);
2039 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2040 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2041 in the table if it should be in there. Return TRUE if a replacement was
2042 done that my require an EH edge purge. */
2044 bool
2045 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2047 int region_nr = lookup_stmt_eh_region (old_stmt);
2049 if (region_nr >= 0)
2051 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2053 if (new_stmt == old_stmt && new_stmt_could_throw)
2054 return false;
2056 remove_stmt_from_eh_region (old_stmt);
2057 if (new_stmt_could_throw)
2059 add_stmt_to_eh_region (new_stmt, region_nr);
2060 return false;
2062 else
2063 return true;
2066 return false;