* tree-ssa-operands.c (get_call_expr_operands): Add VUSE operands for
[official-gcc.git] / gcc / tree-eh.c
blob0ddf849c9ccbd2b06bf4b5403edfd44ec5622f60
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
44 void
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p = 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
56 static int
57 struct_ptr_eq (const void *a, const void *b)
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
64 static hashval_t
65 struct_ptr_hash (const void *a)
67 const void * const * x = a;
68 return (size_t)*x >> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 struct throw_stmt_node GTY(())
86 tree stmt;
87 int region_nr;
90 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
92 static void
93 record_stmt_eh_region (struct eh_region *region, tree t)
95 struct throw_stmt_node *n;
96 void **slot;
98 if (!region)
99 return;
101 n = ggc_alloc (sizeof (*n));
102 n->stmt = t;
103 n->region_nr = get_eh_region_number (region);
105 slot = htab_find_slot (throw_stmt_table, n, INSERT);
106 if (*slot)
107 abort ();
108 *slot = n;
111 void
112 add_stmt_to_eh_region (tree t, int num)
114 struct throw_stmt_node *n;
115 void **slot;
117 if (num < 0)
118 abort ();
120 n = ggc_alloc (sizeof (*n));
121 n->stmt = t;
122 n->region_nr = num;
124 slot = htab_find_slot (throw_stmt_table, n, INSERT);
125 if (*slot)
126 abort ();
127 *slot = n;
130 bool
131 remove_stmt_from_eh_region (tree t)
133 struct throw_stmt_node dummy;
134 void **slot;
136 if (!throw_stmt_table)
137 return false;
139 dummy.stmt = t;
140 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
141 if (slot)
143 htab_clear_slot (throw_stmt_table, slot);
144 return true;
146 else
147 return false;
151 lookup_stmt_eh_region (tree t)
153 struct throw_stmt_node *p, n;
155 if (!throw_stmt_table)
156 return -2;
158 n.stmt = t;
159 p = htab_find (throw_stmt_table, &n);
161 return (p ? p->region_nr : -1);
166 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
167 nodes and LABEL_DECL nodes. We will use this during the second phase to
168 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
170 struct finally_tree_node
172 tree child, parent;
175 /* Note that this table is *not* marked GTY. It is short-lived. */
176 static htab_t finally_tree;
178 static void
179 record_in_finally_tree (tree child, tree parent)
181 struct finally_tree_node *n;
182 void **slot;
184 n = xmalloc (sizeof (*n));
185 n->child = child;
186 n->parent = parent;
188 slot = htab_find_slot (finally_tree, n, INSERT);
189 if (*slot)
190 abort ();
191 *slot = n;
194 static void
195 collect_finally_tree (tree t, tree region)
197 tailrecurse:
198 switch (TREE_CODE (t))
200 case LABEL_EXPR:
201 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
202 break;
204 case TRY_FINALLY_EXPR:
205 record_in_finally_tree (t, region);
206 collect_finally_tree (TREE_OPERAND (t, 0), t);
207 t = TREE_OPERAND (t, 1);
208 goto tailrecurse;
210 case TRY_CATCH_EXPR:
211 collect_finally_tree (TREE_OPERAND (t, 0), region);
212 t = TREE_OPERAND (t, 1);
213 goto tailrecurse;
215 case CATCH_EXPR:
216 t = CATCH_BODY (t);
217 goto tailrecurse;
219 case EH_FILTER_EXPR:
220 t = EH_FILTER_FAILURE (t);
221 goto tailrecurse;
223 case STATEMENT_LIST:
225 tree_stmt_iterator i;
226 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
227 collect_finally_tree (tsi_stmt (i), region);
229 break;
231 default:
232 /* A type, a decl, or some kind of statement that we're not
233 interested in. Don't walk them. */
234 break;
238 /* Use the finally tree to determine if a jump from START to TARGET
239 would leave the try_finally node that START lives in. */
241 static bool
242 outside_finally_tree (tree start, tree target)
244 struct finally_tree_node n, *p;
248 n.child = start;
249 p = htab_find (finally_tree, &n);
250 if (!p)
251 return true;
252 start = p->parent;
254 while (start != target);
256 return false;
259 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
260 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
261 The eh region creation is straight-forward, but frobbing all the gotos
262 and such into shape isn't. */
264 /* State of the world while lowering. */
266 struct leh_state
268 /* What's "current" while constructing the eh region tree. These
269 correspond to variables of the same name in cfun->eh, which we
270 don't have easy access to. */
271 struct eh_region *cur_region;
272 struct eh_region *prev_try;
274 /* Processing of TRY_FINALLY requires a bit more state. This is
275 split out into a separate structure so that we don't have to
276 copy so much when processing other nodes. */
277 struct leh_tf_state *tf;
280 struct leh_tf_state
282 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
283 is the original TRY_FINALLY_EXPR. We need to retain this so that
284 outside_finally_tree can reliably reference the tree used in the
285 collect_finally_tree data structures. */
286 tree try_finally_expr;
287 tree *top_p;
289 /* The state outside this try_finally node. */
290 struct leh_state *outer;
292 /* The exception region created for it. */
293 struct eh_region *region;
295 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
296 that are seen to escape this TRY_FINALLY_EXPR node. */
297 struct goto_queue_node {
298 tree stmt;
299 tree repl_stmt;
300 tree cont_stmt;
301 int index;
302 } *goto_queue;
303 size_t goto_queue_size;
304 size_t goto_queue_active;
306 /* The set of unique labels seen as entries in the goto queue. */
307 varray_type dest_array;
309 /* A label to be added at the end of the completed transformed
310 sequence. It will be set if may_fallthru was true *at one time*,
311 though subsequent transformations may have cleared that flag. */
312 tree fallthru_label;
314 /* A label that has been registered with except.c to be the
315 landing pad for this try block. */
316 tree eh_label;
318 /* True if it is possible to fall out the bottom of the try block.
319 Cleared if the fallthru is converted to a goto. */
320 bool may_fallthru;
322 /* True if any entry in goto_queue is a RETURN_EXPR. */
323 bool may_return;
325 /* True if the finally block can receive an exception edge.
326 Cleared if the exception case is handled by code duplication. */
327 bool may_throw;
330 static void lower_eh_filter (struct leh_state *, tree *);
331 static void lower_eh_constructs_1 (struct leh_state *, tree *);
333 /* Comparison function for qsort/bsearch. We're interested in
334 searching goto queue elements for source statements. */
336 static int
337 goto_queue_cmp (const void *x, const void *y)
339 tree a = ((const struct goto_queue_node *)x)->stmt;
340 tree b = ((const struct goto_queue_node *)y)->stmt;
341 return (a == b ? 0 : a < b ? -1 : 1);
344 /* Search for STMT in the goto queue. Return the replacement,
345 or null if the statement isn't in the queue. */
347 static tree
348 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
350 struct goto_queue_node tmp, *ret;
351 tmp.stmt = stmt;
352 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
353 sizeof (struct goto_queue_node), goto_queue_cmp);
354 return (ret ? ret->repl_stmt : NULL);
357 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
358 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
359 then we can just splat it in, otherwise we add the new stmts immediately
360 after the COND_EXPR and redirect. */
362 static void
363 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
364 tree_stmt_iterator *tsi)
366 tree new, one, label;
368 new = find_goto_replacement (tf, *tp);
369 if (!new)
370 return;
372 one = expr_only (new);
373 if (one && TREE_CODE (one) == GOTO_EXPR)
375 *tp = one;
376 return;
379 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
380 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
382 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
383 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
386 /* The real work of replace_goto_queue. Returns with TSI updated to
387 point to the next statement. */
389 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
391 static void
392 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
394 switch (TREE_CODE (t))
396 case GOTO_EXPR:
397 case RETURN_EXPR:
398 t = find_goto_replacement (tf, t);
399 if (t)
401 tsi_link_before (tsi, t, TSI_SAME_STMT);
402 tsi_delink (tsi);
403 return;
405 break;
407 case COND_EXPR:
408 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
409 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
410 break;
412 case TRY_FINALLY_EXPR:
413 case TRY_CATCH_EXPR:
414 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
415 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
416 break;
417 case CATCH_EXPR:
418 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
419 break;
420 case EH_FILTER_EXPR:
421 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
422 break;
424 case STATEMENT_LIST:
425 abort ();
427 default:
428 /* These won't have gotos in them. */
429 break;
432 tsi_next (tsi);
435 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
437 static void
438 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
440 tree_stmt_iterator i = tsi_start (t);
441 while (!tsi_end_p (i))
442 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
445 /* Replace all goto queue members. */
447 static void
448 replace_goto_queue (struct leh_tf_state *tf)
450 replace_goto_queue_stmt_list (*tf->top_p, tf);
453 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
454 node, and if so record that fact in the goto queue associated with that
455 try_finally node. */
457 static void
458 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
460 struct leh_tf_state *tf = state->tf;
461 struct goto_queue_node *q;
462 size_t active, size;
463 int index;
465 if (!tf)
466 return;
468 switch (TREE_CODE (stmt))
470 case GOTO_EXPR:
472 tree lab = GOTO_DESTINATION (stmt);
474 /* Computed and non-local gotos do not get processed. Given
475 their nature we can neither tell whether we've escaped the
476 finally block nor redirect them if we knew. */
477 if (TREE_CODE (lab) != LABEL_DECL)
478 return;
480 /* No need to record gotos that don't leave the try block. */
481 if (! outside_finally_tree (lab, tf->try_finally_expr))
482 return;
484 if (! tf->dest_array)
486 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
487 VARRAY_PUSH_TREE (tf->dest_array, lab);
488 index = 0;
490 else
492 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
493 for (index = 0; index < n; ++index)
494 if (VARRAY_TREE (tf->dest_array, index) == lab)
495 break;
496 if (index == n)
497 VARRAY_PUSH_TREE (tf->dest_array, lab);
500 break;
502 case RETURN_EXPR:
503 tf->may_return = true;
504 index = -1;
505 break;
507 default:
508 abort ();
511 active = tf->goto_queue_active;
512 size = tf->goto_queue_size;
513 if (active >= size)
515 size = (size ? size * 2 : 32);
516 tf->goto_queue_size = size;
517 tf->goto_queue
518 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
521 q = &tf->goto_queue[active];
522 tf->goto_queue_active = active + 1;
524 memset (q, 0, sizeof (*q));
525 q->stmt = stmt;
526 q->index = index;
529 #ifdef ENABLE_CHECKING
530 /* We do not process SWITCH_EXPRs for now. As long as the original source
531 was in fact structured, and we've not yet done jump threading, then none
532 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
534 static void
535 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
537 struct leh_tf_state *tf = state->tf;
538 size_t i, n;
539 tree vec;
541 if (!tf)
542 return;
544 vec = SWITCH_LABELS (switch_expr);
545 n = TREE_VEC_LENGTH (vec);
547 for (i = 0; i < n; ++i)
549 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
550 if (outside_finally_tree (lab, tf->try_finally_expr))
551 abort ();
554 #else
555 #define verify_norecord_switch_expr(state, switch_expr)
556 #endif
558 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
559 whatever is needed to finish the return. If MOD is non-null, insert it
560 before the new branch. RETURN_VALUE_P is a cache containing a temporary
561 variable to be used in manipulating the value returned from the function. */
563 static void
564 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
565 tree *return_value_p)
567 tree ret_expr = TREE_OPERAND (q->stmt, 0);
568 tree x;
570 if (ret_expr)
572 /* The nasty part about redirecting the return value is that the
573 return value itself is to be computed before the FINALLY block
574 is executed. e.g.
576 int x;
577 int foo (void)
579 x = 0;
580 try {
581 return x;
582 } finally {
583 x++;
587 should return 0, not 1. Arrange for this to happen by copying
588 computed the return value into a local temporary. This also
589 allows us to redirect multiple return statements through the
590 same destination block; whether this is a net win or not really
591 depends, I guess, but it does make generation of the switch in
592 lower_try_finally_switch easier. */
594 if (TREE_CODE (ret_expr) == RESULT_DECL)
596 if (!*return_value_p)
597 *return_value_p = ret_expr;
598 else if (*return_value_p != ret_expr)
599 abort ();
600 q->cont_stmt = q->stmt;
602 else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
604 tree result = TREE_OPERAND (ret_expr, 0);
605 tree new, old = TREE_OPERAND (ret_expr, 1);
607 if (!*return_value_p)
609 if (aggregate_value_p (TREE_TYPE (result),
610 TREE_TYPE (current_function_decl)))
611 /* If this function returns in memory, copy the argument
612 into the return slot now. Otherwise, we might need to
613 worry about magic return semantics, so we need to use a
614 temporary to hold the value until we're actually ready
615 to return. */
616 new = result;
617 else
618 new = create_tmp_var (TREE_TYPE (old), "rettmp");
619 *return_value_p = new;
621 else
622 new = *return_value_p;
624 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
625 append_to_statement_list (x, &q->repl_stmt);
627 if (new == result)
628 x = result;
629 else
630 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
631 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
633 else
634 abort ();
636 else
638 /* If we don't return a value, all return statements are the same. */
639 q->cont_stmt = q->stmt;
642 if (mod)
643 append_to_statement_list (mod, &q->repl_stmt);
645 x = build1 (GOTO_EXPR, void_type_node, finlab);
646 append_to_statement_list (x, &q->repl_stmt);
649 /* Similar, but easier, for GOTO_EXPR. */
651 static void
652 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
654 tree x;
656 q->cont_stmt = q->stmt;
657 if (mod)
658 append_to_statement_list (mod, &q->repl_stmt);
660 x = build1 (GOTO_EXPR, void_type_node, finlab);
661 append_to_statement_list (x, &q->repl_stmt);
664 /* We want to transform
665 try { body; } catch { stuff; }
667 body; goto over; lab: stuff; over:
669 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
670 should be placed before the second operand, or NULL. OVER is
671 an existing label that should be put at the exit, or NULL. */
673 static void
674 frob_into_branch_around (tree *tp, tree lab, tree over)
676 tree x, op1;
678 op1 = TREE_OPERAND (*tp, 1);
679 *tp = TREE_OPERAND (*tp, 0);
681 if (block_may_fallthru (*tp))
683 if (!over)
684 over = create_artificial_label ();
685 x = build1 (GOTO_EXPR, void_type_node, over);
686 append_to_statement_list (x, tp);
689 if (lab)
691 x = build1 (LABEL_EXPR, void_type_node, lab);
692 append_to_statement_list (x, tp);
695 append_to_statement_list (op1, tp);
697 if (over)
699 x = build1 (LABEL_EXPR, void_type_node, over);
700 append_to_statement_list (x, tp);
704 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
705 Make sure to record all new labels found. */
707 static tree
708 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
710 tree region = NULL;
712 t = unsave_expr_now (t);
714 if (outer_state->tf)
715 region = outer_state->tf->try_finally_expr;
716 collect_finally_tree (t, region);
718 return t;
721 /* A subroutine of lower_try_finally. Create a fallthru label for
722 the given try_finally state. The only tricky bit here is that
723 we have to make sure to record the label in our outer context. */
725 static tree
726 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
728 tree label = tf->fallthru_label;
729 if (!label)
731 label = create_artificial_label ();
732 tf->fallthru_label = label;
733 if (tf->outer->tf)
734 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
736 return label;
739 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
740 returns non-null, then the language requires that the exception path out
741 of a try_finally be treated specially. To wit: the code within the
742 finally block may not itself throw an exception. We have two choices here.
743 First we can duplicate the finally block and wrap it in a must_not_throw
744 region. Second, we can generate code like
746 try {
747 finally_block;
748 } catch {
749 if (fintmp == eh_edge)
750 protect_cleanup_actions;
753 where "fintmp" is the temporary used in the switch statement generation
754 alternative considered below. For the nonce, we always choose the first
755 option.
757 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
759 static void
760 honor_protect_cleanup_actions (struct leh_state *outer_state,
761 struct leh_state *this_state,
762 struct leh_tf_state *tf)
764 tree protect_cleanup_actions, finally, x;
765 tree_stmt_iterator i;
766 bool finally_may_fallthru;
768 /* First check for nothing to do. */
769 if (lang_protect_cleanup_actions)
770 protect_cleanup_actions = lang_protect_cleanup_actions ();
771 else
772 protect_cleanup_actions = NULL;
774 finally = TREE_OPERAND (*tf->top_p, 1);
776 /* If the EH case of the finally block can fall through, this may be a
777 structure of the form
778 try {
779 try {
780 throw ...;
781 } cleanup {
782 try {
783 throw ...;
784 } catch (...) {
787 } catch (...) {
788 yyy;
790 E.g. with an inline destructor with an embedded try block. In this
791 case we must save the runtime EH data around the nested exception.
793 This complication means that any time the previous runtime data might
794 be used (via fallthru from the finally) we handle the eh case here,
795 whether or not protect_cleanup_actions is active. */
797 finally_may_fallthru = block_may_fallthru (finally);
798 if (!finally_may_fallthru && !protect_cleanup_actions)
799 return;
801 /* Duplicate the FINALLY block. Only need to do this for try-finally,
802 and not for cleanups. */
803 if (this_state)
804 finally = lower_try_finally_dup_block (finally, outer_state);
806 /* Resume execution after the exception. Adding this now lets
807 lower_eh_filter not add unnecessary gotos, as it is clear that
808 we never fallthru from this copy of the finally block. */
809 if (finally_may_fallthru)
811 tree save_eptr, save_filt;
813 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
814 save_filt = create_tmp_var (integer_type_node, "save_filt");
816 i = tsi_start (finally);
817 x = build (EXC_PTR_EXPR, ptr_type_node);
818 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
819 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
821 x = build (FILTER_EXPR, integer_type_node);
822 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
823 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
825 i = tsi_last (finally);
826 x = build (EXC_PTR_EXPR, ptr_type_node);
827 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
828 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
830 x = build (FILTER_EXPR, integer_type_node);
831 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
832 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
834 x = build1 (RESX_EXPR, void_type_node,
835 build_int_cst (NULL_TREE,
836 get_eh_region_number (tf->region), 0));
837 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
840 /* Wrap the block with protect_cleanup_actions as the action. */
841 if (protect_cleanup_actions)
843 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
844 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
845 EH_FILTER_MUST_NOT_THROW (x) = 1;
846 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
847 lower_eh_filter (outer_state, &finally);
849 else
850 lower_eh_constructs_1 (outer_state, &finally);
852 /* Hook this up to the end of the existing try block. If we
853 previously fell through the end, we'll have to branch around.
854 This means adding a new goto, and adding it to the queue. */
856 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
858 if (tf->may_fallthru)
860 x = lower_try_finally_fallthru_label (tf);
861 x = build1 (GOTO_EXPR, void_type_node, x);
862 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
864 if (this_state)
865 maybe_record_in_goto_queue (this_state, x);
867 tf->may_fallthru = false;
870 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
871 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
872 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
874 /* Having now been handled, EH isn't to be considered with
875 the rest of the outgoing edges. */
876 tf->may_throw = false;
879 /* A subroutine of lower_try_finally. We have determined that there is
880 no fallthru edge out of the finally block. This means that there is
881 no outgoing edge corresponding to any incoming edge. Restructure the
882 try_finally node for this special case. */
884 static void
885 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
887 tree x, finally, lab, return_val;
888 struct goto_queue_node *q, *qe;
890 if (tf->may_throw)
891 lab = tf->eh_label;
892 else
893 lab = create_artificial_label ();
895 finally = TREE_OPERAND (*tf->top_p, 1);
896 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
898 x = build1 (LABEL_EXPR, void_type_node, lab);
899 append_to_statement_list (x, tf->top_p);
901 return_val = NULL;
902 q = tf->goto_queue;
903 qe = q + tf->goto_queue_active;
904 for (; q < qe; ++q)
905 if (q->index < 0)
906 do_return_redirection (q, lab, NULL, &return_val);
907 else
908 do_goto_redirection (q, lab, NULL);
910 replace_goto_queue (tf);
912 lower_eh_constructs_1 (state, &finally);
913 append_to_statement_list (finally, tf->top_p);
916 /* A subroutine of lower_try_finally. We have determined that there is
917 exactly one destination of the finally block. Restructure the
918 try_finally node for this special case. */
920 static void
921 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
923 struct goto_queue_node *q, *qe;
924 tree x, finally, finally_label;
926 finally = TREE_OPERAND (*tf->top_p, 1);
927 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
929 lower_eh_constructs_1 (state, &finally);
931 if (tf->may_throw)
933 /* Only reachable via the exception edge. Add the given label to
934 the head of the FINALLY block. Append a RESX at the end. */
936 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
937 append_to_statement_list (x, tf->top_p);
939 append_to_statement_list (finally, tf->top_p);
941 x = build1 (RESX_EXPR, void_type_node,
942 build_int_cst (NULL_TREE,
943 get_eh_region_number (tf->region), 0));
944 append_to_statement_list (x, tf->top_p);
946 return;
949 if (tf->may_fallthru)
951 /* Only reachable via the fallthru edge. Do nothing but let
952 the two blocks run together; we'll fall out the bottom. */
953 append_to_statement_list (finally, tf->top_p);
954 return;
957 finally_label = create_artificial_label ();
958 x = build1 (LABEL_EXPR, void_type_node, finally_label);
959 append_to_statement_list (x, tf->top_p);
961 append_to_statement_list (finally, tf->top_p);
963 q = tf->goto_queue;
964 qe = q + tf->goto_queue_active;
966 if (tf->may_return)
968 /* Reachable by return expressions only. Redirect them. */
969 tree return_val = NULL;
970 for (; q < qe; ++q)
971 do_return_redirection (q, finally_label, NULL, &return_val);
972 replace_goto_queue (tf);
974 else
976 /* Reachable by goto expressions only. Redirect them. */
977 for (; q < qe; ++q)
978 do_goto_redirection (q, finally_label, NULL);
979 replace_goto_queue (tf);
981 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
983 /* Reachable by goto to fallthru label only. Redirect it
984 to the new label (already created, sadly), and do not
985 emit the final branch out, or the fallthru label. */
986 tf->fallthru_label = NULL;
987 return;
991 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
992 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
995 /* A subroutine of lower_try_finally. There are multiple edges incoming
996 and outgoing from the finally block. Implement this by duplicating the
997 finally block for every destination. */
999 static void
1000 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1002 tree finally, new_stmt;
1003 tree x;
1005 finally = TREE_OPERAND (*tf->top_p, 1);
1006 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1008 new_stmt = NULL_TREE;
1010 if (tf->may_fallthru)
1012 x = lower_try_finally_dup_block (finally, state);
1013 lower_eh_constructs_1 (state, &x);
1014 append_to_statement_list (x, &new_stmt);
1016 x = lower_try_finally_fallthru_label (tf);
1017 x = build1 (GOTO_EXPR, void_type_node, x);
1018 append_to_statement_list (x, &new_stmt);
1021 if (tf->may_throw)
1023 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1024 append_to_statement_list (x, &new_stmt);
1026 x = lower_try_finally_dup_block (finally, state);
1027 lower_eh_constructs_1 (state, &x);
1028 append_to_statement_list (x, &new_stmt);
1030 x = build1 (RESX_EXPR, void_type_node,
1031 build_int_cst (NULL_TREE,
1032 get_eh_region_number (tf->region), 0));
1033 append_to_statement_list (x, &new_stmt);
1036 if (tf->goto_queue)
1038 struct goto_queue_node *q, *qe;
1039 tree return_val = NULL;
1040 int return_index;
1041 tree *labels;
1043 if (tf->dest_array)
1044 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1045 else
1046 return_index = 0;
1047 labels = xcalloc (sizeof (tree), return_index + 1);
1049 q = tf->goto_queue;
1050 qe = q + tf->goto_queue_active;
1051 for (; q < qe; q++)
1053 int index = q->index < 0 ? return_index : q->index;
1054 tree lab = labels[index];
1055 bool build_p = false;
1057 if (!lab)
1059 labels[index] = lab = create_artificial_label ();
1060 build_p = true;
1063 if (index == return_index)
1064 do_return_redirection (q, lab, NULL, &return_val);
1065 else
1066 do_goto_redirection (q, lab, NULL);
1068 if (build_p)
1070 x = build1 (LABEL_EXPR, void_type_node, lab);
1071 append_to_statement_list (x, &new_stmt);
1073 x = lower_try_finally_dup_block (finally, state);
1074 lower_eh_constructs_1 (state, &x);
1075 append_to_statement_list (x, &new_stmt);
1077 append_to_statement_list (q->cont_stmt, &new_stmt);
1078 maybe_record_in_goto_queue (state, q->cont_stmt);
1081 replace_goto_queue (tf);
1082 free (labels);
1085 /* Need to link new stmts after running replace_goto_queue due
1086 to not wanting to process the same goto stmts twice. */
1087 append_to_statement_list (new_stmt, tf->top_p);
1090 /* A subroutine of lower_try_finally. There are multiple edges incoming
1091 and outgoing from the finally block. Implement this by instrumenting
1092 each incoming edge and creating a switch statement at the end of the
1093 finally block that branches to the appropriate destination. */
1095 static void
1096 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1098 struct goto_queue_node *q, *qe;
1099 tree return_val = NULL;
1100 tree finally, finally_tmp, finally_label;
1101 int return_index, eh_index, fallthru_index;
1102 int nlabels, ndests, j, last_case_index;
1103 tree case_label_vec, switch_stmt, last_case, switch_body;
1104 tree x;
1106 /* Mash the TRY block to the head of the chain. */
1107 finally = TREE_OPERAND (*tf->top_p, 1);
1108 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1110 /* Lower the finally block itself. */
1111 lower_eh_constructs_1 (state, &finally);
1113 /* Prepare for switch statement generation. */
1114 if (tf->dest_array)
1115 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1116 else
1117 nlabels = 0;
1118 return_index = nlabels;
1119 eh_index = return_index + tf->may_return;
1120 fallthru_index = eh_index + tf->may_throw;
1121 ndests = fallthru_index + tf->may_fallthru;
1123 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1124 finally_label = create_artificial_label ();
1126 case_label_vec = make_tree_vec (ndests);
1127 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1128 NULL_TREE, case_label_vec);
1129 switch_body = NULL;
1130 last_case = NULL;
1131 last_case_index = 0;
1133 /* Begin inserting code for getting to the finally block. Things
1134 are done in this order to correspond to the sequence the code is
1135 layed out. */
1137 if (tf->may_fallthru)
1139 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1140 build_int_cst (NULL_TREE, fallthru_index, 0));
1141 append_to_statement_list (x, tf->top_p);
1143 if (tf->may_throw)
1145 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1146 append_to_statement_list (x, tf->top_p);
1150 last_case = build (CASE_LABEL_EXPR, void_type_node,
1151 build_int_cst (NULL_TREE, fallthru_index, 0), NULL,
1152 create_artificial_label ());
1153 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1154 last_case_index++;
1156 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1157 append_to_statement_list (x, &switch_body);
1159 x = lower_try_finally_fallthru_label (tf);
1160 x = build1 (GOTO_EXPR, void_type_node, x);
1161 append_to_statement_list (x, &switch_body);
1164 if (tf->may_throw)
1166 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1167 append_to_statement_list (x, tf->top_p);
1169 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1170 build_int_cst (NULL_TREE, eh_index, 0));
1171 append_to_statement_list (x, tf->top_p);
1173 last_case = build (CASE_LABEL_EXPR, void_type_node,
1174 build_int_cst (NULL_TREE, eh_index, 0), NULL,
1175 create_artificial_label ());
1176 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1177 last_case_index++;
1179 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1180 append_to_statement_list (x, &switch_body);
1181 x = build1 (RESX_EXPR, void_type_node,
1182 build_int_cst (NULL_TREE,
1183 get_eh_region_number (tf->region), 0));
1184 append_to_statement_list (x, &switch_body);
1187 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1188 append_to_statement_list (x, tf->top_p);
1190 append_to_statement_list (finally, tf->top_p);
1192 /* Redirect each incoming goto edge. */
1193 q = tf->goto_queue;
1194 qe = q + tf->goto_queue_active;
1195 j = last_case_index + tf->may_return;
1196 last_case_index += nlabels;
1197 for (; q < qe; ++q)
1199 tree mod;
1200 int switch_id, case_index;
1202 if (q->index < 0)
1204 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1205 build_int_cst (NULL_TREE, return_index, 0));
1206 do_return_redirection (q, finally_label, mod, &return_val);
1207 switch_id = return_index;
1209 else
1211 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1212 build_int_cst (NULL_TREE, q->index, 0));
1213 do_goto_redirection (q, finally_label, mod);
1214 switch_id = q->index;
1217 case_index = j + q->index;
1218 if (!TREE_VEC_ELT (case_label_vec, case_index))
1220 last_case = build (CASE_LABEL_EXPR, void_type_node,
1221 build_int_cst (NULL_TREE, switch_id, 0), NULL,
1222 create_artificial_label ());
1223 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1225 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1226 append_to_statement_list (x, &switch_body);
1227 append_to_statement_list (q->cont_stmt, &switch_body);
1228 maybe_record_in_goto_queue (state, q->cont_stmt);
1231 replace_goto_queue (tf);
1232 last_case_index += nlabels;
1234 /* Make sure that the last case is the default label, as one is required.
1235 Then sort the labels, which is also required in GIMPLE. */
1236 CASE_LOW (last_case) = NULL;
1237 sort_case_labels (case_label_vec);
1239 /* Need to link switch_stmt after running replace_goto_queue due
1240 to not wanting to process the same goto stmts twice. */
1241 append_to_statement_list (switch_stmt, tf->top_p);
1242 append_to_statement_list (switch_body, tf->top_p);
1245 /* Decide whether or not we are going to duplicate the finally block.
1246 There are several considerations.
1248 First, if this is Java, then the finally block contains code
1249 written by the user. It has line numbers associated with it,
1250 so duplicating the block means it's difficult to set a breakpoint.
1251 Since controlling code generation via -g is verboten, we simply
1252 never duplicate code without optimization.
1254 Second, we'd like to prevent egregious code growth. One way to
1255 do this is to estimate the size of the finally block, multiply
1256 that by the number of copies we'd need to make, and compare against
1257 the estimate of the size of the switch machinery we'd have to add. */
1259 static bool
1260 decide_copy_try_finally (int ndests, tree finally)
1262 int f_estimate, sw_estimate;
1264 if (!optimize)
1265 return false;
1267 /* Finally estimate N times, plus N gotos. */
1268 f_estimate = estimate_num_insns (finally);
1269 f_estimate = (f_estimate + 1) * ndests;
1271 /* Switch statement (cost 10), N variable assignments, N gotos. */
1272 sw_estimate = 10 + 2 * ndests;
1274 /* Optimize for size clearly wants our best guess. */
1275 if (optimize_size)
1276 return f_estimate < sw_estimate;
1278 /* ??? These numbers are completely made up so far. */
1279 if (optimize > 1)
1280 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1281 else
1282 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1285 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1286 to a sequence of labels and blocks, plus the exception region trees
1287 that record all the magic. This is complicated by the need to
1288 arrange for the FINALLY block to be executed on all exits. */
1290 static void
1291 lower_try_finally (struct leh_state *state, tree *tp)
1293 struct leh_tf_state this_tf;
1294 struct leh_state this_state;
1295 int ndests;
1297 /* Process the try block. */
1299 memset (&this_tf, 0, sizeof (this_tf));
1300 this_tf.try_finally_expr = *tp;
1301 this_tf.top_p = tp;
1302 this_tf.outer = state;
1303 if (using_eh_for_cleanups_p)
1304 this_tf.region
1305 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1306 else
1307 this_tf.region = NULL;
1309 this_state.cur_region = this_tf.region;
1310 this_state.prev_try = state->prev_try;
1311 this_state.tf = &this_tf;
1313 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1315 /* Determine if the try block is escaped through the bottom. */
1316 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1318 /* Determine if any exceptions are possible within the try block. */
1319 if (using_eh_for_cleanups_p)
1320 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1321 if (this_tf.may_throw)
1323 this_tf.eh_label = create_artificial_label ();
1324 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1325 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1328 /* Sort the goto queue for efficient searching later. */
1329 if (this_tf.goto_queue_active > 1)
1330 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1331 sizeof (struct goto_queue_node), goto_queue_cmp);
1333 /* Determine how many edges (still) reach the finally block. Or rather,
1334 how many destinations are reached by the finally block. Use this to
1335 determine how we process the finally block itself. */
1337 if (this_tf.dest_array)
1338 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1339 else
1340 ndests = 0;
1341 ndests += this_tf.may_fallthru;
1342 ndests += this_tf.may_return;
1343 ndests += this_tf.may_throw;
1345 /* If the FINALLY block is not reachable, dike it out. */
1346 if (ndests == 0)
1347 *tp = TREE_OPERAND (*tp, 0);
1349 /* If the finally block doesn't fall through, then any destination
1350 we might try to impose there isn't reached either. There may be
1351 some minor amount of cleanup and redirection still needed. */
1352 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1353 lower_try_finally_nofallthru (state, &this_tf);
1355 /* We can easily special-case redirection to a single destination. */
1356 else if (ndests == 1)
1357 lower_try_finally_onedest (state, &this_tf);
1359 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1360 lower_try_finally_copy (state, &this_tf);
1361 else
1362 lower_try_finally_switch (state, &this_tf);
1364 /* If someone requested we add a label at the end of the transformed
1365 block, do so. */
1366 if (this_tf.fallthru_label)
1368 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1369 append_to_statement_list (x, tp);
1372 if (this_tf.goto_queue)
1373 free (this_tf.goto_queue);
1376 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1377 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1378 exception region trees that record all the magic. */
1380 static void
1381 lower_catch (struct leh_state *state, tree *tp)
1383 struct eh_region *try_region;
1384 struct leh_state this_state;
1385 tree_stmt_iterator i;
1386 tree out_label;
1388 try_region = gen_eh_region_try (state->cur_region);
1389 this_state.cur_region = try_region;
1390 this_state.prev_try = try_region;
1391 this_state.tf = state->tf;
1393 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1395 if (!get_eh_region_may_contain_throw (try_region))
1397 *tp = TREE_OPERAND (*tp, 0);
1398 return;
1401 out_label = NULL;
1402 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1404 struct eh_region *catch_region;
1405 tree catch, x, eh_label;
1407 catch = tsi_stmt (i);
1408 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1410 this_state.cur_region = catch_region;
1411 this_state.prev_try = state->prev_try;
1412 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1414 eh_label = create_artificial_label ();
1415 set_eh_region_tree_label (catch_region, eh_label);
1417 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1418 tsi_link_before (&i, x, TSI_SAME_STMT);
1420 if (block_may_fallthru (CATCH_BODY (catch)))
1422 if (!out_label)
1423 out_label = create_artificial_label ();
1425 x = build1 (GOTO_EXPR, void_type_node, out_label);
1426 append_to_statement_list (x, &CATCH_BODY (catch));
1429 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1430 tsi_delink (&i);
1433 frob_into_branch_around (tp, NULL, out_label);
1436 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1437 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1438 region trees that record all the magic. */
1440 static void
1441 lower_eh_filter (struct leh_state *state, tree *tp)
1443 struct leh_state this_state;
1444 struct eh_region *this_region;
1445 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1446 tree eh_label;
1448 if (EH_FILTER_MUST_NOT_THROW (inner))
1449 this_region = gen_eh_region_must_not_throw (state->cur_region);
1450 else
1451 this_region = gen_eh_region_allowed (state->cur_region,
1452 EH_FILTER_TYPES (inner));
1453 this_state = *state;
1454 this_state.cur_region = this_region;
1456 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1458 if (!get_eh_region_may_contain_throw (this_region))
1460 *tp = TREE_OPERAND (*tp, 0);
1461 return;
1464 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1465 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1467 eh_label = create_artificial_label ();
1468 set_eh_region_tree_label (this_region, eh_label);
1470 frob_into_branch_around (tp, eh_label, NULL);
1473 /* Implement a cleanup expression. This is similar to try-finally,
1474 except that we only execute the cleanup block for exception edges. */
1476 static void
1477 lower_cleanup (struct leh_state *state, tree *tp)
1479 struct leh_state this_state;
1480 struct eh_region *this_region;
1481 struct leh_tf_state fake_tf;
1483 /* If not using eh, then exception-only cleanups are no-ops. */
1484 if (!flag_exceptions)
1486 *tp = TREE_OPERAND (*tp, 0);
1487 lower_eh_constructs_1 (state, tp);
1488 return;
1491 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1492 this_state = *state;
1493 this_state.cur_region = this_region;
1495 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1497 if (!get_eh_region_may_contain_throw (this_region))
1499 *tp = TREE_OPERAND (*tp, 0);
1500 return;
1503 /* Build enough of a try-finally state so that we can reuse
1504 honor_protect_cleanup_actions. */
1505 memset (&fake_tf, 0, sizeof (fake_tf));
1506 fake_tf.top_p = tp;
1507 fake_tf.outer = state;
1508 fake_tf.region = this_region;
1509 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1510 fake_tf.may_throw = true;
1512 fake_tf.eh_label = create_artificial_label ();
1513 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1515 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1517 if (fake_tf.may_throw)
1519 /* In this case honor_protect_cleanup_actions had nothing to do,
1520 and we should process this normally. */
1521 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1522 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1524 else
1526 /* In this case honor_protect_cleanup_actions did nearly all of
1527 the work. All we have left is to append the fallthru_label. */
1529 *tp = TREE_OPERAND (*tp, 0);
1530 if (fake_tf.fallthru_label)
1532 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1533 append_to_statement_list (x, tp);
1538 /* Main loop for lowering eh constructs. */
1540 static void
1541 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1543 tree_stmt_iterator i;
1544 tree t = *tp;
1546 switch (TREE_CODE (t))
1548 case COND_EXPR:
1549 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1550 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1551 break;
1553 case CALL_EXPR:
1554 /* Look for things that can throw exceptions, and record them. */
1555 if (state->cur_region && tree_could_throw_p (t))
1557 record_stmt_eh_region (state->cur_region, t);
1558 note_eh_region_may_contain_throw (state->cur_region);
1560 break;
1562 case MODIFY_EXPR:
1563 /* Look for things that can throw exceptions, and record them. */
1564 if (state->cur_region && tree_could_throw_p (t))
1566 tree op;
1568 record_stmt_eh_region (state->cur_region, t);
1569 note_eh_region_may_contain_throw (state->cur_region);
1571 /* ??? For the benefit of calls.c, converting all this to rtl,
1572 we need to record the call expression, not just the outer
1573 modify statement. */
1574 op = get_call_expr_in (t);
1575 if (op)
1576 record_stmt_eh_region (state->cur_region, op);
1578 break;
1580 case GOTO_EXPR:
1581 case RETURN_EXPR:
1582 maybe_record_in_goto_queue (state, t);
1583 break;
1584 case SWITCH_EXPR:
1585 verify_norecord_switch_expr (state, t);
1586 break;
1588 case TRY_FINALLY_EXPR:
1589 lower_try_finally (state, tp);
1590 break;
1592 case TRY_CATCH_EXPR:
1593 i = tsi_start (TREE_OPERAND (t, 1));
1594 switch (TREE_CODE (tsi_stmt (i)))
1596 case CATCH_EXPR:
1597 lower_catch (state, tp);
1598 break;
1599 case EH_FILTER_EXPR:
1600 lower_eh_filter (state, tp);
1601 break;
1602 default:
1603 lower_cleanup (state, tp);
1604 break;
1606 break;
1608 case STATEMENT_LIST:
1609 for (i = tsi_start (t); !tsi_end_p (i); )
1611 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1612 t = tsi_stmt (i);
1613 if (TREE_CODE (t) == STATEMENT_LIST)
1615 tsi_link_before (&i, t, TSI_SAME_STMT);
1616 tsi_delink (&i);
1618 else
1619 tsi_next (&i);
1621 break;
1623 default:
1624 /* A type, a decl, or some kind of statement that we're not
1625 interested in. Don't walk them. */
1626 break;
1630 static void
1631 lower_eh_constructs (void)
1633 struct leh_state null_state;
1634 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1636 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1637 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1638 ggc_free);
1640 collect_finally_tree (*tp, NULL);
1642 memset (&null_state, 0, sizeof (null_state));
1643 lower_eh_constructs_1 (&null_state, tp);
1645 htab_delete (finally_tree);
1647 collect_eh_region_array ();
1650 struct tree_opt_pass pass_lower_eh =
1652 "eh", /* name */
1653 NULL, /* gate */
1654 lower_eh_constructs, /* execute */
1655 NULL, /* sub */
1656 NULL, /* next */
1657 0, /* static_pass_number */
1658 TV_TREE_EH, /* tv_id */
1659 PROP_gimple_lcf, /* properties_required */
1660 PROP_gimple_leh, /* properties_provided */
1661 PROP_gimple_lcf, /* properties_destroyed */
1662 0, /* todo_flags_start */
1663 TODO_dump_func /* todo_flags_finish */
1667 /* Construct EH edges for STMT. */
1669 static void
1670 make_eh_edge (struct eh_region *region, void *data)
1672 tree stmt, lab;
1673 basic_block src, dst;
1675 stmt = data;
1676 lab = get_eh_region_tree_label (region);
1678 src = bb_for_stmt (stmt);
1679 dst = label_to_block (lab);
1681 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1684 void
1685 make_eh_edges (tree stmt)
1687 int region_nr;
1688 bool is_resx;
1690 if (TREE_CODE (stmt) == RESX_EXPR)
1692 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1693 is_resx = true;
1695 else
1697 region_nr = lookup_stmt_eh_region (stmt);
1698 if (region_nr < 0)
1699 return;
1700 is_resx = false;
1703 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1708 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1709 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1710 This routine expects only GIMPLE lhs or rhs input. */
1712 bool
1713 tree_could_trap_p (tree expr)
1715 enum tree_code code = TREE_CODE (expr);
1716 bool honor_nans = false;
1717 bool honor_snans = false;
1718 bool fp_operation = false;
1719 bool honor_trapv = false;
1720 tree t, base, idx;
1722 if (TREE_CODE_CLASS (code) == '<'
1723 || TREE_CODE_CLASS (code) == '1'
1724 || TREE_CODE_CLASS (code) == '2')
1726 t = TREE_TYPE (expr);
1727 fp_operation = FLOAT_TYPE_P (t);
1728 if (fp_operation)
1730 honor_nans = flag_trapping_math && !flag_finite_math_only;
1731 honor_snans = flag_signaling_nans != 0;
1733 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1734 honor_trapv = true;
1737 restart:
1738 switch (code)
1740 case COMPONENT_REF:
1741 case REALPART_EXPR:
1742 case IMAGPART_EXPR:
1743 case BIT_FIELD_REF:
1744 case WITH_SIZE_EXPR:
1745 expr = TREE_OPERAND (expr, 0);
1746 code = TREE_CODE (expr);
1747 goto restart;
1749 case ARRAY_RANGE_REF:
1750 /* Let us be conservative here for now. We might be checking bounds of
1751 the access similarly to the case below. */
1752 if (!TREE_THIS_NOTRAP (expr))
1753 return true;
1755 base = TREE_OPERAND (expr, 0);
1756 return tree_could_trap_p (base);
1758 case ARRAY_REF:
1759 base = TREE_OPERAND (expr, 0);
1760 idx = TREE_OPERAND (expr, 1);
1761 if (tree_could_trap_p (base))
1762 return true;
1764 if (TREE_THIS_NOTRAP (expr))
1765 return false;
1767 return !in_array_bounds_p (expr);
1769 case INDIRECT_REF:
1770 return !TREE_THIS_NOTRAP (expr);
1772 case ASM_EXPR:
1773 return TREE_THIS_VOLATILE (expr);
1775 case TRUNC_DIV_EXPR:
1776 case CEIL_DIV_EXPR:
1777 case FLOOR_DIV_EXPR:
1778 case ROUND_DIV_EXPR:
1779 case EXACT_DIV_EXPR:
1780 case CEIL_MOD_EXPR:
1781 case FLOOR_MOD_EXPR:
1782 case ROUND_MOD_EXPR:
1783 case TRUNC_MOD_EXPR:
1784 case RDIV_EXPR:
1785 if (honor_snans || honor_trapv)
1786 return true;
1787 if (fp_operation && flag_trapping_math)
1788 return true;
1789 t = TREE_OPERAND (expr, 1);
1790 if (!TREE_CONSTANT (t) || integer_zerop (t))
1791 return true;
1792 return false;
1794 case LT_EXPR:
1795 case LE_EXPR:
1796 case GT_EXPR:
1797 case GE_EXPR:
1798 case LTGT_EXPR:
1799 /* Some floating point comparisons may trap. */
1800 return honor_nans;
1802 case EQ_EXPR:
1803 case NE_EXPR:
1804 case UNORDERED_EXPR:
1805 case ORDERED_EXPR:
1806 case UNLT_EXPR:
1807 case UNLE_EXPR:
1808 case UNGT_EXPR:
1809 case UNGE_EXPR:
1810 case UNEQ_EXPR:
1811 return honor_snans;
1813 case CONVERT_EXPR:
1814 case FIX_TRUNC_EXPR:
1815 case FIX_CEIL_EXPR:
1816 case FIX_FLOOR_EXPR:
1817 case FIX_ROUND_EXPR:
1818 /* Conversion of floating point might trap. */
1819 return honor_nans;
1821 case NEGATE_EXPR:
1822 case ABS_EXPR:
1823 case CONJ_EXPR:
1824 /* These operations don't trap with floating point. */
1825 if (honor_trapv)
1826 return true;
1827 return false;
1829 case PLUS_EXPR:
1830 case MINUS_EXPR:
1831 case MULT_EXPR:
1832 /* Any floating arithmetic may trap. */
1833 if (fp_operation && flag_trapping_math)
1834 return true;
1835 if (honor_trapv)
1836 return true;
1837 return false;
1839 default:
1840 /* Any floating arithmetic may trap. */
1841 if (fp_operation && flag_trapping_math)
1842 return true;
1843 return false;
1847 bool
1848 tree_could_throw_p (tree t)
1850 if (!flag_exceptions)
1851 return false;
1852 if (TREE_CODE (t) == MODIFY_EXPR)
1854 if (flag_non_call_exceptions
1855 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1856 return true;
1857 t = TREE_OPERAND (t, 1);
1860 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1861 t = TREE_OPERAND (t, 0);
1862 if (TREE_CODE (t) == CALL_EXPR)
1863 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1864 if (flag_non_call_exceptions)
1865 return tree_could_trap_p (t);
1866 return false;
1869 bool
1870 tree_can_throw_internal (tree stmt)
1872 int region_nr = lookup_stmt_eh_region (stmt);
1873 if (region_nr < 0)
1874 return false;
1875 return can_throw_internal_1 (region_nr);
1878 bool
1879 tree_can_throw_external (tree stmt)
1881 int region_nr = lookup_stmt_eh_region (stmt);
1882 if (region_nr < 0)
1883 return false;
1884 return can_throw_external_1 (region_nr);
1887 bool
1888 maybe_clean_eh_stmt (tree stmt)
1890 if (!tree_could_throw_p (stmt))
1891 if (remove_stmt_from_eh_region (stmt))
1892 return true;
1893 return false;
1896 #include "gt-tree-eh.h"