Merge from the pain train
[official-gcc.git] / gcc / tree-eh.c
blobf19c851fb74a163e596c00cecdc9bffa37e54d02
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
44 void
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p = 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
56 static int
57 struct_ptr_eq (const void *a, const void *b)
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
64 static hashval_t
65 struct_ptr_hash (const void *a)
67 const void * const * x = a;
68 return (size_t)*x >> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
84 struct throw_stmt_node GTY(())
86 tree stmt;
87 int region_nr;
90 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
92 static void
93 record_stmt_eh_region (struct eh_region *region, tree t)
95 struct throw_stmt_node *n;
96 void **slot;
98 if (!region)
99 return;
101 n = ggc_alloc (sizeof (*n));
102 n->stmt = t;
103 n->region_nr = get_eh_region_number (region);
105 slot = htab_find_slot (throw_stmt_table, n, INSERT);
106 gcc_assert (!*slot);
107 *slot = n;
110 void
111 add_stmt_to_eh_region (tree t, int num)
113 struct throw_stmt_node *n;
114 void **slot;
116 gcc_assert (num >= 0);
118 n = ggc_alloc (sizeof (*n));
119 n->stmt = t;
120 n->region_nr = num;
122 slot = htab_find_slot (throw_stmt_table, n, INSERT);
123 gcc_assert (!*slot);
124 *slot = n;
127 bool
128 remove_stmt_from_eh_region (tree t)
130 struct throw_stmt_node dummy;
131 void **slot;
133 if (!throw_stmt_table)
134 return false;
136 dummy.stmt = t;
137 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
138 if (slot)
140 htab_clear_slot (throw_stmt_table, slot);
141 return true;
143 else
144 return false;
148 lookup_stmt_eh_region (tree t)
150 struct throw_stmt_node *p, n;
152 if (!throw_stmt_table)
153 return -2;
155 n.stmt = t;
156 p = htab_find (throw_stmt_table, &n);
158 return (p ? p->region_nr : -1);
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 struct finally_tree_node
169 tree child, parent;
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree;
175 static void
176 record_in_finally_tree (tree child, tree parent)
178 struct finally_tree_node *n;
179 void **slot;
181 n = xmalloc (sizeof (*n));
182 n->child = child;
183 n->parent = parent;
185 slot = htab_find_slot (finally_tree, n, INSERT);
186 gcc_assert (!*slot);
187 *slot = n;
190 static void
191 collect_finally_tree (tree t, tree region)
193 tailrecurse:
194 switch (TREE_CODE (t))
196 case LABEL_EXPR:
197 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
198 break;
200 case TRY_FINALLY_EXPR:
201 record_in_finally_tree (t, region);
202 collect_finally_tree (TREE_OPERAND (t, 0), t);
203 t = TREE_OPERAND (t, 1);
204 goto tailrecurse;
206 case TRY_CATCH_EXPR:
207 collect_finally_tree (TREE_OPERAND (t, 0), region);
208 t = TREE_OPERAND (t, 1);
209 goto tailrecurse;
211 case CATCH_EXPR:
212 t = CATCH_BODY (t);
213 goto tailrecurse;
215 case EH_FILTER_EXPR:
216 t = EH_FILTER_FAILURE (t);
217 goto tailrecurse;
219 case STATEMENT_LIST:
221 tree_stmt_iterator i;
222 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
223 collect_finally_tree (tsi_stmt (i), region);
225 break;
227 default:
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
230 break;
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
237 static bool
238 outside_finally_tree (tree start, tree target)
240 struct finally_tree_node n, *p;
244 n.child = start;
245 p = htab_find (finally_tree, &n);
246 if (!p)
247 return true;
248 start = p->parent;
250 while (start != target);
252 return false;
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
260 /* State of the world while lowering. */
262 struct leh_state
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region *cur_region;
268 struct eh_region *prev_try;
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state *tf;
276 struct leh_tf_state
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr;
283 tree *top_p;
285 /* The state outside this try_finally node. */
286 struct leh_state *outer;
288 /* The exception region created for it. */
289 struct eh_region *region;
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node {
294 tree stmt;
295 tree repl_stmt;
296 tree cont_stmt;
297 int index;
298 } *goto_queue;
299 size_t goto_queue_size;
300 size_t goto_queue_active;
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array;
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
308 tree fallthru_label;
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
312 tree eh_label;
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
316 bool may_fallthru;
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
319 bool may_return;
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
323 bool may_throw;
326 static void lower_eh_filter (struct leh_state *, tree *);
327 static void lower_eh_constructs_1 (struct leh_state *, tree *);
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
332 static int
333 goto_queue_cmp (const void *x, const void *y)
335 tree a = ((const struct goto_queue_node *)x)->stmt;
336 tree b = ((const struct goto_queue_node *)y)->stmt;
337 return (a == b ? 0 : a < b ? -1 : 1);
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
343 static tree
344 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
346 struct goto_queue_node tmp, *ret;
347 tmp.stmt = stmt;
348 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
349 sizeof (struct goto_queue_node), goto_queue_cmp);
350 return (ret ? ret->repl_stmt : NULL);
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
358 static void
359 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
360 tree_stmt_iterator *tsi)
362 tree new, one, label;
364 new = find_goto_replacement (tf, *tp);
365 if (!new)
366 return;
368 one = expr_only (new);
369 if (one && TREE_CODE (one) == GOTO_EXPR)
371 *tp = one;
372 return;
375 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
376 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
378 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
379 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
385 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
387 static void
388 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
390 switch (TREE_CODE (t))
392 case GOTO_EXPR:
393 case RETURN_EXPR:
394 t = find_goto_replacement (tf, t);
395 if (t)
397 tsi_link_before (tsi, t, TSI_SAME_STMT);
398 tsi_delink (tsi);
399 return;
401 break;
403 case COND_EXPR:
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
406 break;
408 case TRY_FINALLY_EXPR:
409 case TRY_CATCH_EXPR:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
412 break;
413 case CATCH_EXPR:
414 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
415 break;
416 case EH_FILTER_EXPR:
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
418 break;
420 case STATEMENT_LIST:
421 gcc_unreachable ();
423 default:
424 /* These won't have gotos in them. */
425 break;
428 tsi_next (tsi);
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
433 static void
434 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
436 tree_stmt_iterator i = tsi_start (t);
437 while (!tsi_end_p (i))
438 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
441 /* Replace all goto queue members. */
443 static void
444 replace_goto_queue (struct leh_tf_state *tf)
446 if (tf->goto_queue_active == 0)
447 return;
448 replace_goto_queue_stmt_list (*tf->top_p, tf);
451 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
452 node, and if so record that fact in the goto queue associated with that
453 try_finally node. */
455 static void
456 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
458 struct leh_tf_state *tf = state->tf;
459 struct goto_queue_node *q;
460 size_t active, size;
461 int index;
463 if (!tf)
464 return;
466 switch (TREE_CODE (stmt))
468 case GOTO_EXPR:
470 tree lab = GOTO_DESTINATION (stmt);
472 /* Computed and non-local gotos do not get processed. Given
473 their nature we can neither tell whether we've escaped the
474 finally block nor redirect them if we knew. */
475 if (TREE_CODE (lab) != LABEL_DECL)
476 return;
478 /* No need to record gotos that don't leave the try block. */
479 if (! outside_finally_tree (lab, tf->try_finally_expr))
480 return;
482 if (! tf->dest_array)
484 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
485 VARRAY_PUSH_TREE (tf->dest_array, lab);
486 index = 0;
488 else
490 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
491 for (index = 0; index < n; ++index)
492 if (VARRAY_TREE (tf->dest_array, index) == lab)
493 break;
494 if (index == n)
495 VARRAY_PUSH_TREE (tf->dest_array, lab);
498 break;
500 case RETURN_EXPR:
501 tf->may_return = true;
502 index = -1;
503 break;
505 default:
506 gcc_unreachable ();
509 active = tf->goto_queue_active;
510 size = tf->goto_queue_size;
511 if (active >= size)
513 size = (size ? size * 2 : 32);
514 tf->goto_queue_size = size;
515 tf->goto_queue
516 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
519 q = &tf->goto_queue[active];
520 tf->goto_queue_active = active + 1;
522 memset (q, 0, sizeof (*q));
523 q->stmt = stmt;
524 q->index = index;
527 #ifdef ENABLE_CHECKING
528 /* We do not process SWITCH_EXPRs for now. As long as the original source
529 was in fact structured, and we've not yet done jump threading, then none
530 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
532 static void
533 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
535 struct leh_tf_state *tf = state->tf;
536 size_t i, n;
537 tree vec;
539 if (!tf)
540 return;
542 vec = SWITCH_LABELS (switch_expr);
543 n = TREE_VEC_LENGTH (vec);
545 for (i = 0; i < n; ++i)
547 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
548 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
551 #else
552 #define verify_norecord_switch_expr(state, switch_expr)
553 #endif
555 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
556 whatever is needed to finish the return. If MOD is non-null, insert it
557 before the new branch. RETURN_VALUE_P is a cache containing a temporary
558 variable to be used in manipulating the value returned from the function. */
560 static void
561 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
562 tree *return_value_p)
564 tree ret_expr = TREE_OPERAND (q->stmt, 0);
565 tree x;
567 if (ret_expr)
569 /* The nasty part about redirecting the return value is that the
570 return value itself is to be computed before the FINALLY block
571 is executed. e.g.
573 int x;
574 int foo (void)
576 x = 0;
577 try {
578 return x;
579 } finally {
580 x++;
584 should return 0, not 1. Arrange for this to happen by copying
585 computed the return value into a local temporary. This also
586 allows us to redirect multiple return statements through the
587 same destination block; whether this is a net win or not really
588 depends, I guess, but it does make generation of the switch in
589 lower_try_finally_switch easier. */
591 switch (TREE_CODE (ret_expr))
593 case RESULT_DECL:
594 if (!*return_value_p)
595 *return_value_p = ret_expr;
596 else
597 gcc_assert (*return_value_p == ret_expr);
598 q->cont_stmt = q->stmt;
599 break;
601 case MODIFY_EXPR:
603 tree result = TREE_OPERAND (ret_expr, 0);
604 tree new, old = TREE_OPERAND (ret_expr, 1);
606 if (!*return_value_p)
608 if (aggregate_value_p (TREE_TYPE (result),
609 TREE_TYPE (current_function_decl)))
610 /* If this function returns in memory, copy the argument
611 into the return slot now. Otherwise, we might need to
612 worry about magic return semantics, so we need to use a
613 temporary to hold the value until we're actually ready
614 to return. */
615 new = result;
616 else
617 new = create_tmp_var (TREE_TYPE (old), "rettmp");
618 *return_value_p = new;
620 else
621 new = *return_value_p;
623 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
624 append_to_statement_list (x, &q->repl_stmt);
626 if (new == result)
627 x = result;
628 else
629 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
630 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
633 default:
634 gcc_unreachable ();
637 else
639 /* If we don't return a value, all return statements are the same. */
640 q->cont_stmt = q->stmt;
643 if (mod)
644 append_to_statement_list (mod, &q->repl_stmt);
646 x = build1 (GOTO_EXPR, void_type_node, finlab);
647 append_to_statement_list (x, &q->repl_stmt);
650 /* Similar, but easier, for GOTO_EXPR. */
652 static void
653 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
655 tree x;
657 q->cont_stmt = q->stmt;
658 if (mod)
659 append_to_statement_list (mod, &q->repl_stmt);
661 x = build1 (GOTO_EXPR, void_type_node, finlab);
662 append_to_statement_list (x, &q->repl_stmt);
665 /* We want to transform
666 try { body; } catch { stuff; }
668 body; goto over; lab: stuff; over:
670 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
671 should be placed before the second operand, or NULL. OVER is
672 an existing label that should be put at the exit, or NULL. */
674 static void
675 frob_into_branch_around (tree *tp, tree lab, tree over)
677 tree x, op1;
679 op1 = TREE_OPERAND (*tp, 1);
680 *tp = TREE_OPERAND (*tp, 0);
682 if (block_may_fallthru (*tp))
684 if (!over)
685 over = create_artificial_label ();
686 x = build1 (GOTO_EXPR, void_type_node, over);
687 append_to_statement_list (x, tp);
690 if (lab)
692 x = build1 (LABEL_EXPR, void_type_node, lab);
693 append_to_statement_list (x, tp);
696 append_to_statement_list (op1, tp);
698 if (over)
700 x = build1 (LABEL_EXPR, void_type_node, over);
701 append_to_statement_list (x, tp);
705 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
706 Make sure to record all new labels found. */
708 static tree
709 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
711 tree region = NULL;
713 t = unsave_expr_now (t);
715 if (outer_state->tf)
716 region = outer_state->tf->try_finally_expr;
717 collect_finally_tree (t, region);
719 return t;
722 /* A subroutine of lower_try_finally. Create a fallthru label for
723 the given try_finally state. The only tricky bit here is that
724 we have to make sure to record the label in our outer context. */
726 static tree
727 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
729 tree label = tf->fallthru_label;
730 if (!label)
732 label = create_artificial_label ();
733 tf->fallthru_label = label;
734 if (tf->outer->tf)
735 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
737 return label;
740 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
741 returns non-null, then the language requires that the exception path out
742 of a try_finally be treated specially. To wit: the code within the
743 finally block may not itself throw an exception. We have two choices here.
744 First we can duplicate the finally block and wrap it in a must_not_throw
745 region. Second, we can generate code like
747 try {
748 finally_block;
749 } catch {
750 if (fintmp == eh_edge)
751 protect_cleanup_actions;
754 where "fintmp" is the temporary used in the switch statement generation
755 alternative considered below. For the nonce, we always choose the first
756 option.
758 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
760 static void
761 honor_protect_cleanup_actions (struct leh_state *outer_state,
762 struct leh_state *this_state,
763 struct leh_tf_state *tf)
765 tree protect_cleanup_actions, finally, x;
766 tree_stmt_iterator i;
767 bool finally_may_fallthru;
769 /* First check for nothing to do. */
770 if (lang_protect_cleanup_actions)
771 protect_cleanup_actions = lang_protect_cleanup_actions ();
772 else
773 protect_cleanup_actions = NULL;
775 finally = TREE_OPERAND (*tf->top_p, 1);
777 /* If the EH case of the finally block can fall through, this may be a
778 structure of the form
779 try {
780 try {
781 throw ...;
782 } cleanup {
783 try {
784 throw ...;
785 } catch (...) {
788 } catch (...) {
789 yyy;
791 E.g. with an inline destructor with an embedded try block. In this
792 case we must save the runtime EH data around the nested exception.
794 This complication means that any time the previous runtime data might
795 be used (via fallthru from the finally) we handle the eh case here,
796 whether or not protect_cleanup_actions is active. */
798 finally_may_fallthru = block_may_fallthru (finally);
799 if (!finally_may_fallthru && !protect_cleanup_actions)
800 return;
802 /* Duplicate the FINALLY block. Only need to do this for try-finally,
803 and not for cleanups. */
804 if (this_state)
805 finally = lower_try_finally_dup_block (finally, outer_state);
807 /* Resume execution after the exception. Adding this now lets
808 lower_eh_filter not add unnecessary gotos, as it is clear that
809 we never fallthru from this copy of the finally block. */
810 if (finally_may_fallthru)
812 tree save_eptr, save_filt;
814 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
815 save_filt = create_tmp_var (integer_type_node, "save_filt");
817 i = tsi_start (finally);
818 x = build (EXC_PTR_EXPR, ptr_type_node);
819 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
820 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
822 x = build (FILTER_EXPR, integer_type_node);
823 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
824 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
826 i = tsi_last (finally);
827 x = build (EXC_PTR_EXPR, ptr_type_node);
828 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
829 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
831 x = build (FILTER_EXPR, integer_type_node);
832 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
833 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
835 x = build1 (RESX_EXPR, void_type_node,
836 build_int_cst (NULL_TREE,
837 get_eh_region_number (tf->region)));
838 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
841 /* Wrap the block with protect_cleanup_actions as the action. */
842 if (protect_cleanup_actions)
844 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
845 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
846 EH_FILTER_MUST_NOT_THROW (x) = 1;
847 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
848 lower_eh_filter (outer_state, &finally);
850 else
851 lower_eh_constructs_1 (outer_state, &finally);
853 /* Hook this up to the end of the existing try block. If we
854 previously fell through the end, we'll have to branch around.
855 This means adding a new goto, and adding it to the queue. */
857 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
859 if (tf->may_fallthru)
861 x = lower_try_finally_fallthru_label (tf);
862 x = build1 (GOTO_EXPR, void_type_node, x);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
865 if (this_state)
866 maybe_record_in_goto_queue (this_state, x);
868 tf->may_fallthru = false;
871 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
872 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
873 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
875 /* Having now been handled, EH isn't to be considered with
876 the rest of the outgoing edges. */
877 tf->may_throw = false;
880 /* A subroutine of lower_try_finally. We have determined that there is
881 no fallthru edge out of the finally block. This means that there is
882 no outgoing edge corresponding to any incoming edge. Restructure the
883 try_finally node for this special case. */
885 static void
886 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
888 tree x, finally, lab, return_val;
889 struct goto_queue_node *q, *qe;
891 if (tf->may_throw)
892 lab = tf->eh_label;
893 else
894 lab = create_artificial_label ();
896 finally = TREE_OPERAND (*tf->top_p, 1);
897 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
899 x = build1 (LABEL_EXPR, void_type_node, lab);
900 append_to_statement_list (x, tf->top_p);
902 return_val = NULL;
903 q = tf->goto_queue;
904 qe = q + tf->goto_queue_active;
905 for (; q < qe; ++q)
906 if (q->index < 0)
907 do_return_redirection (q, lab, NULL, &return_val);
908 else
909 do_goto_redirection (q, lab, NULL);
911 replace_goto_queue (tf);
913 lower_eh_constructs_1 (state, &finally);
914 append_to_statement_list (finally, tf->top_p);
917 /* A subroutine of lower_try_finally. We have determined that there is
918 exactly one destination of the finally block. Restructure the
919 try_finally node for this special case. */
921 static void
922 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
924 struct goto_queue_node *q, *qe;
925 tree x, finally, finally_label;
927 finally = TREE_OPERAND (*tf->top_p, 1);
928 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
930 lower_eh_constructs_1 (state, &finally);
932 if (tf->may_throw)
934 /* Only reachable via the exception edge. Add the given label to
935 the head of the FINALLY block. Append a RESX at the end. */
937 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
938 append_to_statement_list (x, tf->top_p);
940 append_to_statement_list (finally, tf->top_p);
942 x = build1 (RESX_EXPR, void_type_node,
943 build_int_cst (NULL_TREE,
944 get_eh_region_number (tf->region)));
945 append_to_statement_list (x, tf->top_p);
947 return;
950 if (tf->may_fallthru)
952 /* Only reachable via the fallthru edge. Do nothing but let
953 the two blocks run together; we'll fall out the bottom. */
954 append_to_statement_list (finally, tf->top_p);
955 return;
958 finally_label = create_artificial_label ();
959 x = build1 (LABEL_EXPR, void_type_node, finally_label);
960 append_to_statement_list (x, tf->top_p);
962 append_to_statement_list (finally, tf->top_p);
964 q = tf->goto_queue;
965 qe = q + tf->goto_queue_active;
967 if (tf->may_return)
969 /* Reachable by return expressions only. Redirect them. */
970 tree return_val = NULL;
971 for (; q < qe; ++q)
972 do_return_redirection (q, finally_label, NULL, &return_val);
973 replace_goto_queue (tf);
975 else
977 /* Reachable by goto expressions only. Redirect them. */
978 for (; q < qe; ++q)
979 do_goto_redirection (q, finally_label, NULL);
980 replace_goto_queue (tf);
982 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
984 /* Reachable by goto to fallthru label only. Redirect it
985 to the new label (already created, sadly), and do not
986 emit the final branch out, or the fallthru label. */
987 tf->fallthru_label = NULL;
988 return;
992 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
993 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
996 /* A subroutine of lower_try_finally. There are multiple edges incoming
997 and outgoing from the finally block. Implement this by duplicating the
998 finally block for every destination. */
1000 static void
1001 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1003 tree finally, new_stmt;
1004 tree x;
1006 finally = TREE_OPERAND (*tf->top_p, 1);
1007 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1009 new_stmt = NULL_TREE;
1011 if (tf->may_fallthru)
1013 x = lower_try_finally_dup_block (finally, state);
1014 lower_eh_constructs_1 (state, &x);
1015 append_to_statement_list (x, &new_stmt);
1017 x = lower_try_finally_fallthru_label (tf);
1018 x = build1 (GOTO_EXPR, void_type_node, x);
1019 append_to_statement_list (x, &new_stmt);
1022 if (tf->may_throw)
1024 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1025 append_to_statement_list (x, &new_stmt);
1027 x = lower_try_finally_dup_block (finally, state);
1028 lower_eh_constructs_1 (state, &x);
1029 append_to_statement_list (x, &new_stmt);
1031 x = build1 (RESX_EXPR, void_type_node,
1032 build_int_cst (NULL_TREE,
1033 get_eh_region_number (tf->region)));
1034 append_to_statement_list (x, &new_stmt);
1037 if (tf->goto_queue)
1039 struct goto_queue_node *q, *qe;
1040 tree return_val = NULL;
1041 int return_index;
1042 tree *labels;
1044 if (tf->dest_array)
1045 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1046 else
1047 return_index = 0;
1048 labels = xcalloc (sizeof (tree), return_index + 1);
1050 q = tf->goto_queue;
1051 qe = q + tf->goto_queue_active;
1052 for (; q < qe; q++)
1054 int index = q->index < 0 ? return_index : q->index;
1055 tree lab = labels[index];
1056 bool build_p = false;
1058 if (!lab)
1060 labels[index] = lab = create_artificial_label ();
1061 build_p = true;
1064 if (index == return_index)
1065 do_return_redirection (q, lab, NULL, &return_val);
1066 else
1067 do_goto_redirection (q, lab, NULL);
1069 if (build_p)
1071 x = build1 (LABEL_EXPR, void_type_node, lab);
1072 append_to_statement_list (x, &new_stmt);
1074 x = lower_try_finally_dup_block (finally, state);
1075 lower_eh_constructs_1 (state, &x);
1076 append_to_statement_list (x, &new_stmt);
1078 append_to_statement_list (q->cont_stmt, &new_stmt);
1079 maybe_record_in_goto_queue (state, q->cont_stmt);
1082 replace_goto_queue (tf);
1083 free (labels);
1086 /* Need to link new stmts after running replace_goto_queue due
1087 to not wanting to process the same goto stmts twice. */
1088 append_to_statement_list (new_stmt, tf->top_p);
1091 /* A subroutine of lower_try_finally. There are multiple edges incoming
1092 and outgoing from the finally block. Implement this by instrumenting
1093 each incoming edge and creating a switch statement at the end of the
1094 finally block that branches to the appropriate destination. */
1096 static void
1097 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1099 struct goto_queue_node *q, *qe;
1100 tree return_val = NULL;
1101 tree finally, finally_tmp, finally_label;
1102 int return_index, eh_index, fallthru_index;
1103 int nlabels, ndests, j, last_case_index;
1104 tree case_label_vec, switch_stmt, last_case, switch_body;
1105 tree x;
1107 /* Mash the TRY block to the head of the chain. */
1108 finally = TREE_OPERAND (*tf->top_p, 1);
1109 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1111 /* Lower the finally block itself. */
1112 lower_eh_constructs_1 (state, &finally);
1114 /* Prepare for switch statement generation. */
1115 if (tf->dest_array)
1116 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1117 else
1118 nlabels = 0;
1119 return_index = nlabels;
1120 eh_index = return_index + tf->may_return;
1121 fallthru_index = eh_index + tf->may_throw;
1122 ndests = fallthru_index + tf->may_fallthru;
1124 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1125 finally_label = create_artificial_label ();
1127 case_label_vec = make_tree_vec (ndests);
1128 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1129 NULL_TREE, case_label_vec);
1130 switch_body = NULL;
1131 last_case = NULL;
1132 last_case_index = 0;
1134 /* Begin inserting code for getting to the finally block. Things
1135 are done in this order to correspond to the sequence the code is
1136 layed out. */
1138 if (tf->may_fallthru)
1140 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1141 build_int_cst (NULL_TREE, fallthru_index));
1142 append_to_statement_list (x, tf->top_p);
1144 if (tf->may_throw)
1146 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1147 append_to_statement_list (x, tf->top_p);
1151 last_case = build (CASE_LABEL_EXPR, void_type_node,
1152 build_int_cst (NULL_TREE, fallthru_index), NULL,
1153 create_artificial_label ());
1154 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1155 last_case_index++;
1157 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1158 append_to_statement_list (x, &switch_body);
1160 x = lower_try_finally_fallthru_label (tf);
1161 x = build1 (GOTO_EXPR, void_type_node, x);
1162 append_to_statement_list (x, &switch_body);
1165 if (tf->may_throw)
1167 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1168 append_to_statement_list (x, tf->top_p);
1170 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1171 build_int_cst (NULL_TREE, eh_index));
1172 append_to_statement_list (x, tf->top_p);
1174 last_case = build (CASE_LABEL_EXPR, void_type_node,
1175 build_int_cst (NULL_TREE, eh_index), NULL,
1176 create_artificial_label ());
1177 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1178 last_case_index++;
1180 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1181 append_to_statement_list (x, &switch_body);
1182 x = build1 (RESX_EXPR, void_type_node,
1183 build_int_cst (NULL_TREE,
1184 get_eh_region_number (tf->region)));
1185 append_to_statement_list (x, &switch_body);
1188 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1189 append_to_statement_list (x, tf->top_p);
1191 append_to_statement_list (finally, tf->top_p);
1193 /* Redirect each incoming goto edge. */
1194 q = tf->goto_queue;
1195 qe = q + tf->goto_queue_active;
1196 j = last_case_index + tf->may_return;
1197 last_case_index += nlabels;
1198 for (; q < qe; ++q)
1200 tree mod;
1201 int switch_id, case_index;
1203 if (q->index < 0)
1205 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1206 build_int_cst (NULL_TREE, return_index));
1207 do_return_redirection (q, finally_label, mod, &return_val);
1208 switch_id = return_index;
1210 else
1212 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1213 build_int_cst (NULL_TREE, q->index));
1214 do_goto_redirection (q, finally_label, mod);
1215 switch_id = q->index;
1218 case_index = j + q->index;
1219 if (!TREE_VEC_ELT (case_label_vec, case_index))
1221 last_case = build (CASE_LABEL_EXPR, void_type_node,
1222 build_int_cst (NULL_TREE, switch_id), NULL,
1223 create_artificial_label ());
1224 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1226 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1227 append_to_statement_list (x, &switch_body);
1228 append_to_statement_list (q->cont_stmt, &switch_body);
1229 maybe_record_in_goto_queue (state, q->cont_stmt);
1232 replace_goto_queue (tf);
1233 last_case_index += nlabels;
1235 /* Make sure that the last case is the default label, as one is required.
1236 Then sort the labels, which is also required in GIMPLE. */
1237 CASE_LOW (last_case) = NULL;
1238 sort_case_labels (case_label_vec);
1240 /* Need to link switch_stmt after running replace_goto_queue due
1241 to not wanting to process the same goto stmts twice. */
1242 append_to_statement_list (switch_stmt, tf->top_p);
1243 append_to_statement_list (switch_body, tf->top_p);
1246 /* Decide whether or not we are going to duplicate the finally block.
1247 There are several considerations.
1249 First, if this is Java, then the finally block contains code
1250 written by the user. It has line numbers associated with it,
1251 so duplicating the block means it's difficult to set a breakpoint.
1252 Since controlling code generation via -g is verboten, we simply
1253 never duplicate code without optimization.
1255 Second, we'd like to prevent egregious code growth. One way to
1256 do this is to estimate the size of the finally block, multiply
1257 that by the number of copies we'd need to make, and compare against
1258 the estimate of the size of the switch machinery we'd have to add. */
1260 static bool
1261 decide_copy_try_finally (int ndests, tree finally)
1263 int f_estimate, sw_estimate;
1265 if (!optimize)
1266 return false;
1268 /* Finally estimate N times, plus N gotos. */
1269 f_estimate = estimate_num_insns (finally);
1270 f_estimate = (f_estimate + 1) * ndests;
1272 /* Switch statement (cost 10), N variable assignments, N gotos. */
1273 sw_estimate = 10 + 2 * ndests;
1275 /* Optimize for size clearly wants our best guess. */
1276 if (optimize_size)
1277 return f_estimate < sw_estimate;
1279 /* ??? These numbers are completely made up so far. */
1280 if (optimize > 1)
1281 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1282 else
1283 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1286 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1287 to a sequence of labels and blocks, plus the exception region trees
1288 that record all the magic. This is complicated by the need to
1289 arrange for the FINALLY block to be executed on all exits. */
1291 static void
1292 lower_try_finally (struct leh_state *state, tree *tp)
1294 struct leh_tf_state this_tf;
1295 struct leh_state this_state;
1296 int ndests;
1298 /* Process the try block. */
1300 memset (&this_tf, 0, sizeof (this_tf));
1301 this_tf.try_finally_expr = *tp;
1302 this_tf.top_p = tp;
1303 this_tf.outer = state;
1304 if (using_eh_for_cleanups_p)
1305 this_tf.region
1306 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1307 else
1308 this_tf.region = NULL;
1310 this_state.cur_region = this_tf.region;
1311 this_state.prev_try = state->prev_try;
1312 this_state.tf = &this_tf;
1314 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1316 /* Determine if the try block is escaped through the bottom. */
1317 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1319 /* Determine if any exceptions are possible within the try block. */
1320 if (using_eh_for_cleanups_p)
1321 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1322 if (this_tf.may_throw)
1324 this_tf.eh_label = create_artificial_label ();
1325 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1326 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1329 /* Sort the goto queue for efficient searching later. */
1330 if (this_tf.goto_queue_active > 1)
1331 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1332 sizeof (struct goto_queue_node), goto_queue_cmp);
1334 /* Determine how many edges (still) reach the finally block. Or rather,
1335 how many destinations are reached by the finally block. Use this to
1336 determine how we process the finally block itself. */
1338 if (this_tf.dest_array)
1339 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1340 else
1341 ndests = 0;
1342 ndests += this_tf.may_fallthru;
1343 ndests += this_tf.may_return;
1344 ndests += this_tf.may_throw;
1346 /* If the FINALLY block is not reachable, dike it out. */
1347 if (ndests == 0)
1348 *tp = TREE_OPERAND (*tp, 0);
1350 /* If the finally block doesn't fall through, then any destination
1351 we might try to impose there isn't reached either. There may be
1352 some minor amount of cleanup and redirection still needed. */
1353 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1354 lower_try_finally_nofallthru (state, &this_tf);
1356 /* We can easily special-case redirection to a single destination. */
1357 else if (ndests == 1)
1358 lower_try_finally_onedest (state, &this_tf);
1360 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1361 lower_try_finally_copy (state, &this_tf);
1362 else
1363 lower_try_finally_switch (state, &this_tf);
1365 /* If someone requested we add a label at the end of the transformed
1366 block, do so. */
1367 if (this_tf.fallthru_label)
1369 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1370 append_to_statement_list (x, tp);
1373 if (this_tf.goto_queue)
1374 free (this_tf.goto_queue);
1377 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1378 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1379 exception region trees that record all the magic. */
1381 static void
1382 lower_catch (struct leh_state *state, tree *tp)
1384 struct eh_region *try_region;
1385 struct leh_state this_state;
1386 tree_stmt_iterator i;
1387 tree out_label;
1389 try_region = gen_eh_region_try (state->cur_region);
1390 this_state.cur_region = try_region;
1391 this_state.prev_try = try_region;
1392 this_state.tf = state->tf;
1394 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1396 if (!get_eh_region_may_contain_throw (try_region))
1398 *tp = TREE_OPERAND (*tp, 0);
1399 return;
1402 out_label = NULL;
1403 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1405 struct eh_region *catch_region;
1406 tree catch, x, eh_label;
1408 catch = tsi_stmt (i);
1409 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1411 this_state.cur_region = catch_region;
1412 this_state.prev_try = state->prev_try;
1413 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1415 eh_label = create_artificial_label ();
1416 set_eh_region_tree_label (catch_region, eh_label);
1418 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1419 tsi_link_before (&i, x, TSI_SAME_STMT);
1421 if (block_may_fallthru (CATCH_BODY (catch)))
1423 if (!out_label)
1424 out_label = create_artificial_label ();
1426 x = build1 (GOTO_EXPR, void_type_node, out_label);
1427 append_to_statement_list (x, &CATCH_BODY (catch));
1430 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1431 tsi_delink (&i);
1434 frob_into_branch_around (tp, NULL, out_label);
1437 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1438 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1439 region trees that record all the magic. */
1441 static void
1442 lower_eh_filter (struct leh_state *state, tree *tp)
1444 struct leh_state this_state;
1445 struct eh_region *this_region;
1446 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1447 tree eh_label;
1449 if (EH_FILTER_MUST_NOT_THROW (inner))
1450 this_region = gen_eh_region_must_not_throw (state->cur_region);
1451 else
1452 this_region = gen_eh_region_allowed (state->cur_region,
1453 EH_FILTER_TYPES (inner));
1454 this_state = *state;
1455 this_state.cur_region = this_region;
1457 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1459 if (!get_eh_region_may_contain_throw (this_region))
1461 *tp = TREE_OPERAND (*tp, 0);
1462 return;
1465 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1466 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1468 eh_label = create_artificial_label ();
1469 set_eh_region_tree_label (this_region, eh_label);
1471 frob_into_branch_around (tp, eh_label, NULL);
1474 /* Implement a cleanup expression. This is similar to try-finally,
1475 except that we only execute the cleanup block for exception edges. */
1477 static void
1478 lower_cleanup (struct leh_state *state, tree *tp)
1480 struct leh_state this_state;
1481 struct eh_region *this_region;
1482 struct leh_tf_state fake_tf;
1484 /* If not using eh, then exception-only cleanups are no-ops. */
1485 if (!flag_exceptions)
1487 *tp = TREE_OPERAND (*tp, 0);
1488 lower_eh_constructs_1 (state, tp);
1489 return;
1492 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1493 this_state = *state;
1494 this_state.cur_region = this_region;
1496 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1498 if (!get_eh_region_may_contain_throw (this_region))
1500 *tp = TREE_OPERAND (*tp, 0);
1501 return;
1504 /* Build enough of a try-finally state so that we can reuse
1505 honor_protect_cleanup_actions. */
1506 memset (&fake_tf, 0, sizeof (fake_tf));
1507 fake_tf.top_p = tp;
1508 fake_tf.outer = state;
1509 fake_tf.region = this_region;
1510 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1511 fake_tf.may_throw = true;
1513 fake_tf.eh_label = create_artificial_label ();
1514 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1516 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1518 if (fake_tf.may_throw)
1520 /* In this case honor_protect_cleanup_actions had nothing to do,
1521 and we should process this normally. */
1522 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1523 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1525 else
1527 /* In this case honor_protect_cleanup_actions did nearly all of
1528 the work. All we have left is to append the fallthru_label. */
1530 *tp = TREE_OPERAND (*tp, 0);
1531 if (fake_tf.fallthru_label)
1533 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1534 append_to_statement_list (x, tp);
1539 /* Main loop for lowering eh constructs. */
1541 static void
1542 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1544 tree_stmt_iterator i;
1545 tree t = *tp;
1547 switch (TREE_CODE (t))
1549 case COND_EXPR:
1550 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1551 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1552 break;
1554 case CALL_EXPR:
1555 /* Look for things that can throw exceptions, and record them. */
1556 if (state->cur_region && tree_could_throw_p (t))
1558 record_stmt_eh_region (state->cur_region, t);
1559 note_eh_region_may_contain_throw (state->cur_region);
1561 break;
1563 case MODIFY_EXPR:
1564 /* Look for things that can throw exceptions, and record them. */
1565 if (state->cur_region && tree_could_throw_p (t))
1567 tree op;
1569 record_stmt_eh_region (state->cur_region, t);
1570 note_eh_region_may_contain_throw (state->cur_region);
1572 /* ??? For the benefit of calls.c, converting all this to rtl,
1573 we need to record the call expression, not just the outer
1574 modify statement. */
1575 op = get_call_expr_in (t);
1576 if (op)
1577 record_stmt_eh_region (state->cur_region, op);
1579 break;
1581 case GOTO_EXPR:
1582 case RETURN_EXPR:
1583 maybe_record_in_goto_queue (state, t);
1584 break;
1585 case SWITCH_EXPR:
1586 verify_norecord_switch_expr (state, t);
1587 break;
1589 case TRY_FINALLY_EXPR:
1590 lower_try_finally (state, tp);
1591 break;
1593 case TRY_CATCH_EXPR:
1594 i = tsi_start (TREE_OPERAND (t, 1));
1595 switch (TREE_CODE (tsi_stmt (i)))
1597 case CATCH_EXPR:
1598 lower_catch (state, tp);
1599 break;
1600 case EH_FILTER_EXPR:
1601 lower_eh_filter (state, tp);
1602 break;
1603 default:
1604 lower_cleanup (state, tp);
1605 break;
1607 break;
1609 case STATEMENT_LIST:
1610 for (i = tsi_start (t); !tsi_end_p (i); )
1612 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1613 t = tsi_stmt (i);
1614 if (TREE_CODE (t) == STATEMENT_LIST)
1616 tsi_link_before (&i, t, TSI_SAME_STMT);
1617 tsi_delink (&i);
1619 else
1620 tsi_next (&i);
1622 break;
1624 default:
1625 /* A type, a decl, or some kind of statement that we're not
1626 interested in. Don't walk them. */
1627 break;
1631 static void
1632 lower_eh_constructs (void)
1634 struct leh_state null_state;
1635 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1637 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1638 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1639 ggc_free);
1641 collect_finally_tree (*tp, NULL);
1643 memset (&null_state, 0, sizeof (null_state));
1644 lower_eh_constructs_1 (&null_state, tp);
1646 htab_delete (finally_tree);
1648 collect_eh_region_array ();
1651 struct tree_opt_pass pass_lower_eh =
1653 "eh", /* name */
1654 NULL, /* gate */
1655 lower_eh_constructs, /* execute */
1656 NULL, /* sub */
1657 NULL, /* next */
1658 0, /* static_pass_number */
1659 TV_TREE_EH, /* tv_id */
1660 PROP_gimple_lcf, /* properties_required */
1661 PROP_gimple_leh, /* properties_provided */
1662 PROP_gimple_lcf, /* properties_destroyed */
1663 0, /* todo_flags_start */
1664 TODO_dump_func, /* todo_flags_finish */
1665 0 /* letter */
1669 /* Construct EH edges for STMT. */
1671 static void
1672 make_eh_edge (struct eh_region *region, void *data)
1674 tree stmt, lab;
1675 basic_block src, dst;
1677 stmt = data;
1678 lab = get_eh_region_tree_label (region);
1680 src = bb_for_stmt (stmt);
1681 dst = label_to_block (lab);
1683 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1686 void
1687 make_eh_edges (tree stmt)
1689 int region_nr;
1690 bool is_resx;
1692 if (TREE_CODE (stmt) == RESX_EXPR)
1694 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1695 is_resx = true;
1697 else
1699 region_nr = lookup_stmt_eh_region (stmt);
1700 if (region_nr < 0)
1701 return;
1702 is_resx = false;
1705 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1710 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1711 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1712 This routine expects only GIMPLE lhs or rhs input. */
1714 bool
1715 tree_could_trap_p (tree expr)
1717 enum tree_code code = TREE_CODE (expr);
1718 bool honor_nans = false;
1719 bool honor_snans = false;
1720 bool fp_operation = false;
1721 bool honor_trapv = false;
1722 tree t, base, idx;
1724 if (TREE_CODE_CLASS (code) == tcc_comparison
1725 || TREE_CODE_CLASS (code) == tcc_unary
1726 || TREE_CODE_CLASS (code) == tcc_binary)
1728 t = TREE_TYPE (expr);
1729 fp_operation = FLOAT_TYPE_P (t);
1730 if (fp_operation)
1732 honor_nans = flag_trapping_math && !flag_finite_math_only;
1733 honor_snans = flag_signaling_nans != 0;
1735 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1736 honor_trapv = true;
1739 restart:
1740 switch (code)
1742 case COMPONENT_REF:
1743 case REALPART_EXPR:
1744 case IMAGPART_EXPR:
1745 case BIT_FIELD_REF:
1746 case WITH_SIZE_EXPR:
1747 expr = TREE_OPERAND (expr, 0);
1748 code = TREE_CODE (expr);
1749 goto restart;
1751 case ARRAY_RANGE_REF:
1752 /* Let us be conservative here for now. We might be checking bounds of
1753 the access similarly to the case below. */
1754 if (!TREE_THIS_NOTRAP (expr))
1755 return true;
1757 base = TREE_OPERAND (expr, 0);
1758 return tree_could_trap_p (base);
1760 case ARRAY_REF:
1761 base = TREE_OPERAND (expr, 0);
1762 idx = TREE_OPERAND (expr, 1);
1763 if (tree_could_trap_p (base))
1764 return true;
1766 if (TREE_THIS_NOTRAP (expr))
1767 return false;
1769 return !in_array_bounds_p (expr);
1771 case INDIRECT_REF:
1772 case ALIGN_INDIRECT_REF:
1773 case MISALIGNED_INDIRECT_REF:
1774 return !TREE_THIS_NOTRAP (expr);
1776 case ASM_EXPR:
1777 return TREE_THIS_VOLATILE (expr);
1779 case TRUNC_DIV_EXPR:
1780 case CEIL_DIV_EXPR:
1781 case FLOOR_DIV_EXPR:
1782 case ROUND_DIV_EXPR:
1783 case EXACT_DIV_EXPR:
1784 case CEIL_MOD_EXPR:
1785 case FLOOR_MOD_EXPR:
1786 case ROUND_MOD_EXPR:
1787 case TRUNC_MOD_EXPR:
1788 case RDIV_EXPR:
1789 if (honor_snans || honor_trapv)
1790 return true;
1791 if (fp_operation && flag_trapping_math)
1792 return true;
1793 t = TREE_OPERAND (expr, 1);
1794 if (!TREE_CONSTANT (t) || integer_zerop (t))
1795 return true;
1796 return false;
1798 case LT_EXPR:
1799 case LE_EXPR:
1800 case GT_EXPR:
1801 case GE_EXPR:
1802 case LTGT_EXPR:
1803 /* Some floating point comparisons may trap. */
1804 return honor_nans;
1806 case EQ_EXPR:
1807 case NE_EXPR:
1808 case UNORDERED_EXPR:
1809 case ORDERED_EXPR:
1810 case UNLT_EXPR:
1811 case UNLE_EXPR:
1812 case UNGT_EXPR:
1813 case UNGE_EXPR:
1814 case UNEQ_EXPR:
1815 return honor_snans;
1817 case CONVERT_EXPR:
1818 case FIX_TRUNC_EXPR:
1819 case FIX_CEIL_EXPR:
1820 case FIX_FLOOR_EXPR:
1821 case FIX_ROUND_EXPR:
1822 /* Conversion of floating point might trap. */
1823 return honor_nans;
1825 case NEGATE_EXPR:
1826 case ABS_EXPR:
1827 case CONJ_EXPR:
1828 /* These operations don't trap with floating point. */
1829 if (honor_trapv)
1830 return true;
1831 return false;
1833 case PLUS_EXPR:
1834 case MINUS_EXPR:
1835 case MULT_EXPR:
1836 /* Any floating arithmetic may trap. */
1837 if (fp_operation && flag_trapping_math)
1838 return true;
1839 if (honor_trapv)
1840 return true;
1841 return false;
1843 case CALL_EXPR:
1844 t = get_callee_fndecl (expr);
1845 /* Assume that calls to weak functions may trap. */
1846 if (!t || !DECL_P (t) || DECL_WEAK (t))
1847 return true;
1848 return false;
1850 default:
1851 /* Any floating arithmetic may trap. */
1852 if (fp_operation && flag_trapping_math)
1853 return true;
1854 return false;
1858 bool
1859 tree_could_throw_p (tree t)
1861 if (!flag_exceptions)
1862 return false;
1863 if (TREE_CODE (t) == MODIFY_EXPR)
1865 if (flag_non_call_exceptions
1866 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1867 return true;
1868 t = TREE_OPERAND (t, 1);
1871 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1872 t = TREE_OPERAND (t, 0);
1873 if (TREE_CODE (t) == CALL_EXPR)
1874 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1875 if (flag_non_call_exceptions)
1876 return tree_could_trap_p (t);
1877 return false;
1880 bool
1881 tree_can_throw_internal (tree stmt)
1883 int region_nr = lookup_stmt_eh_region (stmt);
1884 if (region_nr < 0)
1885 return false;
1886 return can_throw_internal_1 (region_nr);
1889 bool
1890 tree_can_throw_external (tree stmt)
1892 int region_nr = lookup_stmt_eh_region (stmt);
1893 if (region_nr < 0)
1894 return false;
1895 return can_throw_external_1 (region_nr);
1898 bool
1899 maybe_clean_eh_stmt (tree stmt)
1901 if (!tree_could_throw_p (stmt))
1902 if (remove_stmt_from_eh_region (stmt))
1903 return true;
1904 return false;
1907 #include "gt-tree-eh.h"