Update my e-mail address for new employer.
[official-gcc.git] / gcc / tree-eh.c
blob37fce85eb29df39421c7160802d2543261ea1890
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
45 void
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p = 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
57 static int
58 struct_ptr_eq (const void *a, const void *b)
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
65 static hashval_t
66 struct_ptr_hash (const void *a)
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
88 if (!region)
89 return;
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
97 struct throw_stmt_node *n;
98 void **slot;
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
117 void
118 add_stmt_to_eh_region (tree t, int num)
120 add_stmt_to_eh_region_fn (cfun, t, num);
123 bool
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
126 struct throw_stmt_node dummy;
127 void **slot;
129 if (!get_eh_throw_stmt_table (ifun))
130 return false;
132 dummy.stmt = t;
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
134 NO_INSERT);
135 if (slot)
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
138 return true;
140 else
141 return false;
144 bool
145 remove_stmt_from_eh_region (tree t)
147 return remove_stmt_from_eh_region_fn (cfun, t);
151 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
153 struct throw_stmt_node *p, n;
155 if (!get_eh_throw_stmt_table (ifun))
156 return -2;
158 n.stmt = t;
159 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
160 &n);
162 return (p ? p->region_nr : -1);
166 lookup_stmt_eh_region (tree t)
168 /* We can get called from initialized data when -fnon-call-exceptions
169 is on; prevent crash. */
170 if (!cfun)
171 return -1;
172 return lookup_stmt_eh_region_fn (cfun, t);
176 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
177 nodes and LABEL_DECL nodes. We will use this during the second phase to
178 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
180 struct finally_tree_node
182 tree child, parent;
185 /* Note that this table is *not* marked GTY. It is short-lived. */
186 static htab_t finally_tree;
188 static void
189 record_in_finally_tree (tree child, tree parent)
191 struct finally_tree_node *n;
192 void **slot;
194 n = XNEW (struct finally_tree_node);
195 n->child = child;
196 n->parent = parent;
198 slot = htab_find_slot (finally_tree, n, INSERT);
199 gcc_assert (!*slot);
200 *slot = n;
203 static void
204 collect_finally_tree (tree t, tree region)
206 tailrecurse:
207 switch (TREE_CODE (t))
209 case LABEL_EXPR:
210 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
211 break;
213 case TRY_FINALLY_EXPR:
214 record_in_finally_tree (t, region);
215 collect_finally_tree (TREE_OPERAND (t, 0), t);
216 t = TREE_OPERAND (t, 1);
217 goto tailrecurse;
219 case TRY_CATCH_EXPR:
220 collect_finally_tree (TREE_OPERAND (t, 0), region);
221 t = TREE_OPERAND (t, 1);
222 goto tailrecurse;
224 case CATCH_EXPR:
225 t = CATCH_BODY (t);
226 goto tailrecurse;
228 case EH_FILTER_EXPR:
229 t = EH_FILTER_FAILURE (t);
230 goto tailrecurse;
232 case STATEMENT_LIST:
234 tree_stmt_iterator i;
235 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
236 collect_finally_tree (tsi_stmt (i), region);
238 break;
240 default:
241 /* A type, a decl, or some kind of statement that we're not
242 interested in. Don't walk them. */
243 break;
247 /* Use the finally tree to determine if a jump from START to TARGET
248 would leave the try_finally node that START lives in. */
250 static bool
251 outside_finally_tree (tree start, tree target)
253 struct finally_tree_node n, *p;
257 n.child = start;
258 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
259 if (!p)
260 return true;
261 start = p->parent;
263 while (start != target);
265 return false;
268 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
269 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
270 The eh region creation is straight-forward, but frobbing all the gotos
271 and such into shape isn't. */
273 /* State of the world while lowering. */
275 struct leh_state
277 /* What's "current" while constructing the eh region tree. These
278 correspond to variables of the same name in cfun->eh, which we
279 don't have easy access to. */
280 struct eh_region *cur_region;
281 struct eh_region *prev_try;
283 /* Processing of TRY_FINALLY requires a bit more state. This is
284 split out into a separate structure so that we don't have to
285 copy so much when processing other nodes. */
286 struct leh_tf_state *tf;
289 struct leh_tf_state
291 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
292 is the original TRY_FINALLY_EXPR. We need to retain this so that
293 outside_finally_tree can reliably reference the tree used in the
294 collect_finally_tree data structures. */
295 tree try_finally_expr;
296 tree *top_p;
298 /* The state outside this try_finally node. */
299 struct leh_state *outer;
301 /* The exception region created for it. */
302 struct eh_region *region;
304 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
305 that are seen to escape this TRY_FINALLY_EXPR node. */
306 struct goto_queue_node {
307 tree stmt;
308 tree repl_stmt;
309 tree cont_stmt;
310 int index;
311 } *goto_queue;
312 size_t goto_queue_size;
313 size_t goto_queue_active;
315 /* The set of unique labels seen as entries in the goto queue. */
316 VEC(tree,heap) *dest_array;
318 /* A label to be added at the end of the completed transformed
319 sequence. It will be set if may_fallthru was true *at one time*,
320 though subsequent transformations may have cleared that flag. */
321 tree fallthru_label;
323 /* A label that has been registered with except.c to be the
324 landing pad for this try block. */
325 tree eh_label;
327 /* True if it is possible to fall out the bottom of the try block.
328 Cleared if the fallthru is converted to a goto. */
329 bool may_fallthru;
331 /* True if any entry in goto_queue is a RETURN_EXPR. */
332 bool may_return;
334 /* True if the finally block can receive an exception edge.
335 Cleared if the exception case is handled by code duplication. */
336 bool may_throw;
339 static void lower_eh_filter (struct leh_state *, tree *);
340 static void lower_eh_constructs_1 (struct leh_state *, tree *);
342 /* Comparison function for qsort/bsearch. We're interested in
343 searching goto queue elements for source statements. */
345 static int
346 goto_queue_cmp (const void *x, const void *y)
348 tree a = ((const struct goto_queue_node *)x)->stmt;
349 tree b = ((const struct goto_queue_node *)y)->stmt;
350 return (a == b ? 0 : a < b ? -1 : 1);
353 /* Search for STMT in the goto queue. Return the replacement,
354 or null if the statement isn't in the queue. */
356 static tree
357 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
359 struct goto_queue_node tmp, *ret;
360 tmp.stmt = stmt;
361 ret = (struct goto_queue_node *)
362 bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
363 sizeof (struct goto_queue_node), goto_queue_cmp);
364 return (ret ? ret->repl_stmt : NULL);
367 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
368 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
369 then we can just splat it in, otherwise we add the new stmts immediately
370 after the COND_EXPR and redirect. */
372 static void
373 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
374 tree_stmt_iterator *tsi)
376 tree new, one, label;
378 new = find_goto_replacement (tf, *tp);
379 if (!new)
380 return;
382 one = expr_only (new);
383 if (one && TREE_CODE (one) == GOTO_EXPR)
385 *tp = one;
386 return;
389 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
390 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
392 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
393 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
396 /* The real work of replace_goto_queue. Returns with TSI updated to
397 point to the next statement. */
399 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
401 static void
402 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
404 switch (TREE_CODE (t))
406 case GOTO_EXPR:
407 case RETURN_EXPR:
408 t = find_goto_replacement (tf, t);
409 if (t)
411 tsi_link_before (tsi, t, TSI_SAME_STMT);
412 tsi_delink (tsi);
413 return;
415 break;
417 case COND_EXPR:
418 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
419 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
420 break;
422 case TRY_FINALLY_EXPR:
423 case TRY_CATCH_EXPR:
424 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
425 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
426 break;
427 case CATCH_EXPR:
428 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
429 break;
430 case EH_FILTER_EXPR:
431 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
432 break;
434 case STATEMENT_LIST:
435 gcc_unreachable ();
437 default:
438 /* These won't have gotos in them. */
439 break;
442 tsi_next (tsi);
445 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
447 static void
448 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
450 tree_stmt_iterator i = tsi_start (t);
451 while (!tsi_end_p (i))
452 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
455 /* Replace all goto queue members. */
457 static void
458 replace_goto_queue (struct leh_tf_state *tf)
460 if (tf->goto_queue_active == 0)
461 return;
462 replace_goto_queue_stmt_list (*tf->top_p, tf);
465 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
466 node, and if so record that fact in the goto queue associated with that
467 try_finally node. */
469 static void
470 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
472 struct leh_tf_state *tf = state->tf;
473 struct goto_queue_node *q;
474 size_t active, size;
475 int index;
477 if (!tf)
478 return;
480 switch (TREE_CODE (stmt))
482 case GOTO_EXPR:
484 tree lab = GOTO_DESTINATION (stmt);
486 /* Computed and non-local gotos do not get processed. Given
487 their nature we can neither tell whether we've escaped the
488 finally block nor redirect them if we knew. */
489 if (TREE_CODE (lab) != LABEL_DECL)
490 return;
492 /* No need to record gotos that don't leave the try block. */
493 if (! outside_finally_tree (lab, tf->try_finally_expr))
494 return;
496 if (! tf->dest_array)
498 tf->dest_array = VEC_alloc (tree, heap, 10);
499 VEC_quick_push (tree, tf->dest_array, lab);
500 index = 0;
502 else
504 int n = VEC_length (tree, tf->dest_array);
505 for (index = 0; index < n; ++index)
506 if (VEC_index (tree, tf->dest_array, index) == lab)
507 break;
508 if (index == n)
509 VEC_safe_push (tree, heap, tf->dest_array, lab);
512 break;
514 case RETURN_EXPR:
515 tf->may_return = true;
516 index = -1;
517 break;
519 default:
520 gcc_unreachable ();
523 active = tf->goto_queue_active;
524 size = tf->goto_queue_size;
525 if (active >= size)
527 size = (size ? size * 2 : 32);
528 tf->goto_queue_size = size;
529 tf->goto_queue
530 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
533 q = &tf->goto_queue[active];
534 tf->goto_queue_active = active + 1;
536 memset (q, 0, sizeof (*q));
537 q->stmt = stmt;
538 q->index = index;
541 #ifdef ENABLE_CHECKING
542 /* We do not process SWITCH_EXPRs for now. As long as the original source
543 was in fact structured, and we've not yet done jump threading, then none
544 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
546 static void
547 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
549 struct leh_tf_state *tf = state->tf;
550 size_t i, n;
551 tree vec;
553 if (!tf)
554 return;
556 vec = SWITCH_LABELS (switch_expr);
557 n = TREE_VEC_LENGTH (vec);
559 for (i = 0; i < n; ++i)
561 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
562 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
565 #else
566 #define verify_norecord_switch_expr(state, switch_expr)
567 #endif
569 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
570 whatever is needed to finish the return. If MOD is non-null, insert it
571 before the new branch. RETURN_VALUE_P is a cache containing a temporary
572 variable to be used in manipulating the value returned from the function. */
574 static void
575 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
576 tree *return_value_p)
578 tree ret_expr = TREE_OPERAND (q->stmt, 0);
579 tree x;
581 if (ret_expr)
583 /* The nasty part about redirecting the return value is that the
584 return value itself is to be computed before the FINALLY block
585 is executed. e.g.
587 int x;
588 int foo (void)
590 x = 0;
591 try {
592 return x;
593 } finally {
594 x++;
598 should return 0, not 1. Arrange for this to happen by copying
599 computed the return value into a local temporary. This also
600 allows us to redirect multiple return statements through the
601 same destination block; whether this is a net win or not really
602 depends, I guess, but it does make generation of the switch in
603 lower_try_finally_switch easier. */
605 switch (TREE_CODE (ret_expr))
607 case RESULT_DECL:
608 if (!*return_value_p)
609 *return_value_p = ret_expr;
610 else
611 gcc_assert (*return_value_p == ret_expr);
612 q->cont_stmt = q->stmt;
613 break;
615 case GIMPLE_MODIFY_STMT:
617 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
618 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
620 if (!*return_value_p)
622 if (aggregate_value_p (TREE_TYPE (result),
623 TREE_TYPE (current_function_decl)))
624 /* If this function returns in memory, copy the argument
625 into the return slot now. Otherwise, we might need to
626 worry about magic return semantics, so we need to use a
627 temporary to hold the value until we're actually ready
628 to return. */
629 new = result;
630 else
631 new = create_tmp_var (TREE_TYPE (old), "rettmp");
632 *return_value_p = new;
634 else
635 new = *return_value_p;
637 x = build_gimple_modify_stmt (new, old);
638 append_to_statement_list (x, &q->repl_stmt);
640 if (new == result)
641 x = result;
642 else
643 x = build_gimple_modify_stmt (result, new);
644 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
647 default:
648 gcc_unreachable ();
651 else
653 /* If we don't return a value, all return statements are the same. */
654 q->cont_stmt = q->stmt;
657 if (mod)
658 append_to_statement_list (mod, &q->repl_stmt);
660 x = build1 (GOTO_EXPR, void_type_node, finlab);
661 append_to_statement_list (x, &q->repl_stmt);
664 /* Similar, but easier, for GOTO_EXPR. */
666 static void
667 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
669 tree x;
671 q->cont_stmt = q->stmt;
672 if (mod)
673 append_to_statement_list (mod, &q->repl_stmt);
675 x = build1 (GOTO_EXPR, void_type_node, finlab);
676 append_to_statement_list (x, &q->repl_stmt);
679 /* We want to transform
680 try { body; } catch { stuff; }
682 body; goto over; lab: stuff; over:
684 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
685 should be placed before the second operand, or NULL. OVER is
686 an existing label that should be put at the exit, or NULL. */
688 static void
689 frob_into_branch_around (tree *tp, tree lab, tree over)
691 tree x, op1;
693 op1 = TREE_OPERAND (*tp, 1);
694 *tp = TREE_OPERAND (*tp, 0);
696 if (block_may_fallthru (*tp))
698 if (!over)
699 over = create_artificial_label ();
700 x = build1 (GOTO_EXPR, void_type_node, over);
701 append_to_statement_list (x, tp);
704 if (lab)
706 x = build1 (LABEL_EXPR, void_type_node, lab);
707 append_to_statement_list (x, tp);
710 append_to_statement_list (op1, tp);
712 if (over)
714 x = build1 (LABEL_EXPR, void_type_node, over);
715 append_to_statement_list (x, tp);
719 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
720 Make sure to record all new labels found. */
722 static tree
723 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
725 tree region = NULL;
727 t = unsave_expr_now (t);
729 if (outer_state->tf)
730 region = outer_state->tf->try_finally_expr;
731 collect_finally_tree (t, region);
733 return t;
736 /* A subroutine of lower_try_finally. Create a fallthru label for
737 the given try_finally state. The only tricky bit here is that
738 we have to make sure to record the label in our outer context. */
740 static tree
741 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
743 tree label = tf->fallthru_label;
744 if (!label)
746 label = create_artificial_label ();
747 tf->fallthru_label = label;
748 if (tf->outer->tf)
749 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
751 return label;
754 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
755 returns non-null, then the language requires that the exception path out
756 of a try_finally be treated specially. To wit: the code within the
757 finally block may not itself throw an exception. We have two choices here.
758 First we can duplicate the finally block and wrap it in a must_not_throw
759 region. Second, we can generate code like
761 try {
762 finally_block;
763 } catch {
764 if (fintmp == eh_edge)
765 protect_cleanup_actions;
768 where "fintmp" is the temporary used in the switch statement generation
769 alternative considered below. For the nonce, we always choose the first
770 option.
772 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
774 static void
775 honor_protect_cleanup_actions (struct leh_state *outer_state,
776 struct leh_state *this_state,
777 struct leh_tf_state *tf)
779 tree protect_cleanup_actions, finally, x;
780 tree_stmt_iterator i;
781 bool finally_may_fallthru;
783 /* First check for nothing to do. */
784 if (lang_protect_cleanup_actions)
785 protect_cleanup_actions = lang_protect_cleanup_actions ();
786 else
787 protect_cleanup_actions = NULL;
789 finally = TREE_OPERAND (*tf->top_p, 1);
791 /* If the EH case of the finally block can fall through, this may be a
792 structure of the form
793 try {
794 try {
795 throw ...;
796 } cleanup {
797 try {
798 throw ...;
799 } catch (...) {
802 } catch (...) {
803 yyy;
805 E.g. with an inline destructor with an embedded try block. In this
806 case we must save the runtime EH data around the nested exception.
808 This complication means that any time the previous runtime data might
809 be used (via fallthru from the finally) we handle the eh case here,
810 whether or not protect_cleanup_actions is active. */
812 finally_may_fallthru = block_may_fallthru (finally);
813 if (!finally_may_fallthru && !protect_cleanup_actions)
814 return;
816 /* Duplicate the FINALLY block. Only need to do this for try-finally,
817 and not for cleanups. */
818 if (this_state)
819 finally = lower_try_finally_dup_block (finally, outer_state);
821 /* Resume execution after the exception. Adding this now lets
822 lower_eh_filter not add unnecessary gotos, as it is clear that
823 we never fallthru from this copy of the finally block. */
824 if (finally_may_fallthru)
826 tree save_eptr, save_filt;
828 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
829 save_filt = create_tmp_var (integer_type_node, "save_filt");
831 i = tsi_start (finally);
832 x = build0 (EXC_PTR_EXPR, ptr_type_node);
833 x = build_gimple_modify_stmt (save_eptr, x);
834 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
836 x = build0 (FILTER_EXPR, integer_type_node);
837 x = build_gimple_modify_stmt (save_filt, x);
838 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
840 i = tsi_last (finally);
841 x = build0 (EXC_PTR_EXPR, ptr_type_node);
842 x = build_gimple_modify_stmt (x, save_eptr);
843 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
845 x = build0 (FILTER_EXPR, integer_type_node);
846 x = build_gimple_modify_stmt (x, save_filt);
847 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
849 x = build_resx (get_eh_region_number (tf->region));
850 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
853 /* Wrap the block with protect_cleanup_actions as the action. */
854 if (protect_cleanup_actions)
856 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
857 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
858 EH_FILTER_MUST_NOT_THROW (x) = 1;
859 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
860 lower_eh_filter (outer_state, &finally);
862 else
863 lower_eh_constructs_1 (outer_state, &finally);
865 /* Hook this up to the end of the existing try block. If we
866 previously fell through the end, we'll have to branch around.
867 This means adding a new goto, and adding it to the queue. */
869 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
871 if (tf->may_fallthru)
873 x = lower_try_finally_fallthru_label (tf);
874 x = build1 (GOTO_EXPR, void_type_node, x);
875 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
877 if (this_state)
878 maybe_record_in_goto_queue (this_state, x);
880 tf->may_fallthru = false;
883 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
884 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
885 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
887 /* Having now been handled, EH isn't to be considered with
888 the rest of the outgoing edges. */
889 tf->may_throw = false;
892 /* A subroutine of lower_try_finally. We have determined that there is
893 no fallthru edge out of the finally block. This means that there is
894 no outgoing edge corresponding to any incoming edge. Restructure the
895 try_finally node for this special case. */
897 static void
898 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
900 tree x, finally, lab, return_val;
901 struct goto_queue_node *q, *qe;
903 if (tf->may_throw)
904 lab = tf->eh_label;
905 else
906 lab = create_artificial_label ();
908 finally = TREE_OPERAND (*tf->top_p, 1);
909 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
911 x = build1 (LABEL_EXPR, void_type_node, lab);
912 append_to_statement_list (x, tf->top_p);
914 return_val = NULL;
915 q = tf->goto_queue;
916 qe = q + tf->goto_queue_active;
917 for (; q < qe; ++q)
918 if (q->index < 0)
919 do_return_redirection (q, lab, NULL, &return_val);
920 else
921 do_goto_redirection (q, lab, NULL);
923 replace_goto_queue (tf);
925 lower_eh_constructs_1 (state, &finally);
926 append_to_statement_list (finally, tf->top_p);
929 /* A subroutine of lower_try_finally. We have determined that there is
930 exactly one destination of the finally block. Restructure the
931 try_finally node for this special case. */
933 static void
934 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
936 struct goto_queue_node *q, *qe;
937 tree x, finally, finally_label;
939 finally = TREE_OPERAND (*tf->top_p, 1);
940 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
942 lower_eh_constructs_1 (state, &finally);
944 if (tf->may_throw)
946 /* Only reachable via the exception edge. Add the given label to
947 the head of the FINALLY block. Append a RESX at the end. */
949 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
950 append_to_statement_list (x, tf->top_p);
952 append_to_statement_list (finally, tf->top_p);
954 x = build_resx (get_eh_region_number (tf->region));
956 append_to_statement_list (x, tf->top_p);
958 return;
961 if (tf->may_fallthru)
963 /* Only reachable via the fallthru edge. Do nothing but let
964 the two blocks run together; we'll fall out the bottom. */
965 append_to_statement_list (finally, tf->top_p);
966 return;
969 finally_label = create_artificial_label ();
970 x = build1 (LABEL_EXPR, void_type_node, finally_label);
971 append_to_statement_list (x, tf->top_p);
973 append_to_statement_list (finally, tf->top_p);
975 q = tf->goto_queue;
976 qe = q + tf->goto_queue_active;
978 if (tf->may_return)
980 /* Reachable by return expressions only. Redirect them. */
981 tree return_val = NULL;
982 for (; q < qe; ++q)
983 do_return_redirection (q, finally_label, NULL, &return_val);
984 replace_goto_queue (tf);
986 else
988 /* Reachable by goto expressions only. Redirect them. */
989 for (; q < qe; ++q)
990 do_goto_redirection (q, finally_label, NULL);
991 replace_goto_queue (tf);
993 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
995 /* Reachable by goto to fallthru label only. Redirect it
996 to the new label (already created, sadly), and do not
997 emit the final branch out, or the fallthru label. */
998 tf->fallthru_label = NULL;
999 return;
1003 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1004 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1007 /* A subroutine of lower_try_finally. There are multiple edges incoming
1008 and outgoing from the finally block. Implement this by duplicating the
1009 finally block for every destination. */
1011 static void
1012 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1014 tree finally, new_stmt;
1015 tree x;
1017 finally = TREE_OPERAND (*tf->top_p, 1);
1018 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1020 new_stmt = NULL_TREE;
1022 if (tf->may_fallthru)
1024 x = lower_try_finally_dup_block (finally, state);
1025 lower_eh_constructs_1 (state, &x);
1026 append_to_statement_list (x, &new_stmt);
1028 x = lower_try_finally_fallthru_label (tf);
1029 x = build1 (GOTO_EXPR, void_type_node, x);
1030 append_to_statement_list (x, &new_stmt);
1033 if (tf->may_throw)
1035 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1036 append_to_statement_list (x, &new_stmt);
1038 x = lower_try_finally_dup_block (finally, state);
1039 lower_eh_constructs_1 (state, &x);
1040 append_to_statement_list (x, &new_stmt);
1042 x = build_resx (get_eh_region_number (tf->region));
1043 append_to_statement_list (x, &new_stmt);
1046 if (tf->goto_queue)
1048 struct goto_queue_node *q, *qe;
1049 tree return_val = NULL;
1050 int return_index, index;
1051 struct labels_s
1053 struct goto_queue_node *q;
1054 tree label;
1055 } *labels;
1057 return_index = VEC_length (tree, tf->dest_array);
1058 labels = XCNEWVEC (struct labels_s, return_index + 1);
1060 q = tf->goto_queue;
1061 qe = q + tf->goto_queue_active;
1062 for (; q < qe; q++)
1064 index = q->index < 0 ? return_index : q->index;
1066 if (!labels[index].q)
1067 labels[index].q = q;
1070 for (index = 0; index < return_index + 1; index++)
1072 tree lab;
1074 q = labels[index].q;
1075 if (! q)
1076 continue;
1078 lab = labels[index].label = create_artificial_label ();
1080 if (index == return_index)
1081 do_return_redirection (q, lab, NULL, &return_val);
1082 else
1083 do_goto_redirection (q, lab, NULL);
1085 x = build1 (LABEL_EXPR, void_type_node, lab);
1086 append_to_statement_list (x, &new_stmt);
1088 x = lower_try_finally_dup_block (finally, state);
1089 lower_eh_constructs_1 (state, &x);
1090 append_to_statement_list (x, &new_stmt);
1092 append_to_statement_list (q->cont_stmt, &new_stmt);
1093 maybe_record_in_goto_queue (state, q->cont_stmt);
1096 for (q = tf->goto_queue; q < qe; q++)
1098 tree lab;
1100 index = q->index < 0 ? return_index : q->index;
1102 if (labels[index].q == q)
1103 continue;
1105 lab = labels[index].label;
1107 if (index == return_index)
1108 do_return_redirection (q, lab, NULL, &return_val);
1109 else
1110 do_goto_redirection (q, lab, NULL);
1113 replace_goto_queue (tf);
1114 free (labels);
1117 /* Need to link new stmts after running replace_goto_queue due
1118 to not wanting to process the same goto stmts twice. */
1119 append_to_statement_list (new_stmt, tf->top_p);
1122 /* A subroutine of lower_try_finally. There are multiple edges incoming
1123 and outgoing from the finally block. Implement this by instrumenting
1124 each incoming edge and creating a switch statement at the end of the
1125 finally block that branches to the appropriate destination. */
1127 static void
1128 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1130 struct goto_queue_node *q, *qe;
1131 tree return_val = NULL;
1132 tree finally, finally_tmp, finally_label;
1133 int return_index, eh_index, fallthru_index;
1134 int nlabels, ndests, j, last_case_index;
1135 tree case_label_vec, switch_stmt, last_case, switch_body;
1136 tree x;
1138 /* Mash the TRY block to the head of the chain. */
1139 finally = TREE_OPERAND (*tf->top_p, 1);
1140 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1142 /* Lower the finally block itself. */
1143 lower_eh_constructs_1 (state, &finally);
1145 /* Prepare for switch statement generation. */
1146 nlabels = VEC_length (tree, tf->dest_array);
1147 return_index = nlabels;
1148 eh_index = return_index + tf->may_return;
1149 fallthru_index = eh_index + tf->may_throw;
1150 ndests = fallthru_index + tf->may_fallthru;
1152 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1153 finally_label = create_artificial_label ();
1155 case_label_vec = make_tree_vec (ndests);
1156 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1157 NULL_TREE, case_label_vec);
1158 switch_body = NULL;
1159 last_case = NULL;
1160 last_case_index = 0;
1162 /* Begin inserting code for getting to the finally block. Things
1163 are done in this order to correspond to the sequence the code is
1164 layed out. */
1166 if (tf->may_fallthru)
1168 x = build_gimple_modify_stmt (finally_tmp,
1169 build_int_cst (integer_type_node,
1170 fallthru_index));
1171 append_to_statement_list (x, tf->top_p);
1173 if (tf->may_throw)
1175 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1176 append_to_statement_list (x, tf->top_p);
1180 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1181 build_int_cst (NULL_TREE, fallthru_index), NULL,
1182 create_artificial_label ());
1183 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1184 last_case_index++;
1186 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1187 append_to_statement_list (x, &switch_body);
1189 x = lower_try_finally_fallthru_label (tf);
1190 x = build1 (GOTO_EXPR, void_type_node, x);
1191 append_to_statement_list (x, &switch_body);
1194 if (tf->may_throw)
1196 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1197 append_to_statement_list (x, tf->top_p);
1199 x = build_gimple_modify_stmt (finally_tmp,
1200 build_int_cst (integer_type_node,
1201 eh_index));
1202 append_to_statement_list (x, tf->top_p);
1204 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1205 build_int_cst (NULL_TREE, eh_index), NULL,
1206 create_artificial_label ());
1207 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1208 last_case_index++;
1210 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1211 append_to_statement_list (x, &switch_body);
1212 x = build_resx (get_eh_region_number (tf->region));
1213 append_to_statement_list (x, &switch_body);
1216 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1217 append_to_statement_list (x, tf->top_p);
1219 append_to_statement_list (finally, tf->top_p);
1221 /* Redirect each incoming goto edge. */
1222 q = tf->goto_queue;
1223 qe = q + tf->goto_queue_active;
1224 j = last_case_index + tf->may_return;
1225 for (; q < qe; ++q)
1227 tree mod;
1228 int switch_id, case_index;
1230 if (q->index < 0)
1232 mod = build_gimple_modify_stmt (finally_tmp,
1233 build_int_cst (integer_type_node,
1234 return_index));
1235 do_return_redirection (q, finally_label, mod, &return_val);
1236 switch_id = return_index;
1238 else
1240 mod = build_gimple_modify_stmt (finally_tmp,
1241 build_int_cst (integer_type_node,
1242 q->index));
1243 do_goto_redirection (q, finally_label, mod);
1244 switch_id = q->index;
1247 case_index = j + q->index;
1248 if (!TREE_VEC_ELT (case_label_vec, case_index))
1249 TREE_VEC_ELT (case_label_vec, case_index)
1250 = build3 (CASE_LABEL_EXPR, void_type_node,
1251 build_int_cst (NULL_TREE, switch_id), NULL,
1252 /* We store the cont_stmt in the
1253 CASE_LABEL, so that we can recover it
1254 in the loop below. We don't create
1255 the new label while walking the
1256 goto_queue because pointers don't
1257 offer a stable order. */
1258 q->cont_stmt);
1260 for (j = last_case_index; j < last_case_index + nlabels; j++)
1262 tree label;
1263 tree cont_stmt;
1265 last_case = TREE_VEC_ELT (case_label_vec, j);
1267 gcc_assert (last_case);
1269 cont_stmt = CASE_LABEL (last_case);
1271 label = create_artificial_label ();
1272 CASE_LABEL (last_case) = label;
1274 x = build1 (LABEL_EXPR, void_type_node, label);
1275 append_to_statement_list (x, &switch_body);
1276 append_to_statement_list (cont_stmt, &switch_body);
1277 maybe_record_in_goto_queue (state, cont_stmt);
1279 replace_goto_queue (tf);
1281 /* Make sure that the last case is the default label, as one is required.
1282 Then sort the labels, which is also required in GIMPLE. */
1283 CASE_LOW (last_case) = NULL;
1284 sort_case_labels (case_label_vec);
1286 /* Need to link switch_stmt after running replace_goto_queue due
1287 to not wanting to process the same goto stmts twice. */
1288 append_to_statement_list (switch_stmt, tf->top_p);
1289 append_to_statement_list (switch_body, tf->top_p);
1292 /* Decide whether or not we are going to duplicate the finally block.
1293 There are several considerations.
1295 First, if this is Java, then the finally block contains code
1296 written by the user. It has line numbers associated with it,
1297 so duplicating the block means it's difficult to set a breakpoint.
1298 Since controlling code generation via -g is verboten, we simply
1299 never duplicate code without optimization.
1301 Second, we'd like to prevent egregious code growth. One way to
1302 do this is to estimate the size of the finally block, multiply
1303 that by the number of copies we'd need to make, and compare against
1304 the estimate of the size of the switch machinery we'd have to add. */
1306 static bool
1307 decide_copy_try_finally (int ndests, tree finally)
1309 int f_estimate, sw_estimate;
1311 if (!optimize)
1312 return false;
1314 /* Finally estimate N times, plus N gotos. */
1315 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1316 f_estimate = (f_estimate + 1) * ndests;
1318 /* Switch statement (cost 10), N variable assignments, N gotos. */
1319 sw_estimate = 10 + 2 * ndests;
1321 /* Optimize for size clearly wants our best guess. */
1322 if (optimize_size)
1323 return f_estimate < sw_estimate;
1325 /* ??? These numbers are completely made up so far. */
1326 if (optimize > 1)
1327 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1328 else
1329 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1332 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1333 to a sequence of labels and blocks, plus the exception region trees
1334 that record all the magic. This is complicated by the need to
1335 arrange for the FINALLY block to be executed on all exits. */
1337 static void
1338 lower_try_finally (struct leh_state *state, tree *tp)
1340 struct leh_tf_state this_tf;
1341 struct leh_state this_state;
1342 int ndests;
1344 /* Process the try block. */
1346 memset (&this_tf, 0, sizeof (this_tf));
1347 this_tf.try_finally_expr = *tp;
1348 this_tf.top_p = tp;
1349 this_tf.outer = state;
1350 if (using_eh_for_cleanups_p)
1351 this_tf.region
1352 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1353 else
1354 this_tf.region = NULL;
1356 this_state.cur_region = this_tf.region;
1357 this_state.prev_try = state->prev_try;
1358 this_state.tf = &this_tf;
1360 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1362 /* Determine if the try block is escaped through the bottom. */
1363 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1365 /* Determine if any exceptions are possible within the try block. */
1366 if (using_eh_for_cleanups_p)
1367 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1368 if (this_tf.may_throw)
1370 this_tf.eh_label = create_artificial_label ();
1371 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1372 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1375 /* Sort the goto queue for efficient searching later. */
1376 if (this_tf.goto_queue_active > 1)
1377 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1378 sizeof (struct goto_queue_node), goto_queue_cmp);
1380 /* Determine how many edges (still) reach the finally block. Or rather,
1381 how many destinations are reached by the finally block. Use this to
1382 determine how we process the finally block itself. */
1384 ndests = VEC_length (tree, this_tf.dest_array);
1385 ndests += this_tf.may_fallthru;
1386 ndests += this_tf.may_return;
1387 ndests += this_tf.may_throw;
1389 /* If the FINALLY block is not reachable, dike it out. */
1390 if (ndests == 0)
1391 *tp = TREE_OPERAND (*tp, 0);
1393 /* If the finally block doesn't fall through, then any destination
1394 we might try to impose there isn't reached either. There may be
1395 some minor amount of cleanup and redirection still needed. */
1396 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1397 lower_try_finally_nofallthru (state, &this_tf);
1399 /* We can easily special-case redirection to a single destination. */
1400 else if (ndests == 1)
1401 lower_try_finally_onedest (state, &this_tf);
1403 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1404 lower_try_finally_copy (state, &this_tf);
1405 else
1406 lower_try_finally_switch (state, &this_tf);
1408 /* If someone requested we add a label at the end of the transformed
1409 block, do so. */
1410 if (this_tf.fallthru_label)
1412 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1413 append_to_statement_list (x, tp);
1416 VEC_free (tree, heap, this_tf.dest_array);
1417 if (this_tf.goto_queue)
1418 free (this_tf.goto_queue);
1421 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1422 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1423 exception region trees that record all the magic. */
1425 static void
1426 lower_catch (struct leh_state *state, tree *tp)
1428 struct eh_region *try_region;
1429 struct leh_state this_state;
1430 tree_stmt_iterator i;
1431 tree out_label;
1433 try_region = gen_eh_region_try (state->cur_region);
1434 this_state.cur_region = try_region;
1435 this_state.prev_try = try_region;
1436 this_state.tf = state->tf;
1438 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1440 if (!get_eh_region_may_contain_throw (try_region))
1442 *tp = TREE_OPERAND (*tp, 0);
1443 return;
1446 out_label = NULL;
1447 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1449 struct eh_region *catch_region;
1450 tree catch, x, eh_label;
1452 catch = tsi_stmt (i);
1453 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1455 this_state.cur_region = catch_region;
1456 this_state.prev_try = state->prev_try;
1457 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1459 eh_label = create_artificial_label ();
1460 set_eh_region_tree_label (catch_region, eh_label);
1462 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1463 tsi_link_before (&i, x, TSI_SAME_STMT);
1465 if (block_may_fallthru (CATCH_BODY (catch)))
1467 if (!out_label)
1468 out_label = create_artificial_label ();
1470 x = build1 (GOTO_EXPR, void_type_node, out_label);
1471 append_to_statement_list (x, &CATCH_BODY (catch));
1474 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1475 tsi_delink (&i);
1478 frob_into_branch_around (tp, NULL, out_label);
1481 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1482 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1483 region trees that record all the magic. */
1485 static void
1486 lower_eh_filter (struct leh_state *state, tree *tp)
1488 struct leh_state this_state;
1489 struct eh_region *this_region;
1490 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1491 tree eh_label;
1493 if (EH_FILTER_MUST_NOT_THROW (inner))
1494 this_region = gen_eh_region_must_not_throw (state->cur_region);
1495 else
1496 this_region = gen_eh_region_allowed (state->cur_region,
1497 EH_FILTER_TYPES (inner));
1498 this_state = *state;
1499 this_state.cur_region = this_region;
1500 /* For must not throw regions any cleanup regions inside it
1501 can't reach outer catch regions. */
1502 if (EH_FILTER_MUST_NOT_THROW (inner))
1503 this_state.prev_try = NULL;
1505 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1507 if (!get_eh_region_may_contain_throw (this_region))
1509 *tp = TREE_OPERAND (*tp, 0);
1510 return;
1513 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1514 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1516 eh_label = create_artificial_label ();
1517 set_eh_region_tree_label (this_region, eh_label);
1519 frob_into_branch_around (tp, eh_label, NULL);
1522 /* Implement a cleanup expression. This is similar to try-finally,
1523 except that we only execute the cleanup block for exception edges. */
1525 static void
1526 lower_cleanup (struct leh_state *state, tree *tp)
1528 struct leh_state this_state;
1529 struct eh_region *this_region;
1530 struct leh_tf_state fake_tf;
1532 /* If not using eh, then exception-only cleanups are no-ops. */
1533 if (!flag_exceptions)
1535 *tp = TREE_OPERAND (*tp, 0);
1536 lower_eh_constructs_1 (state, tp);
1537 return;
1540 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1541 this_state = *state;
1542 this_state.cur_region = this_region;
1544 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1546 if (!get_eh_region_may_contain_throw (this_region))
1548 *tp = TREE_OPERAND (*tp, 0);
1549 return;
1552 /* Build enough of a try-finally state so that we can reuse
1553 honor_protect_cleanup_actions. */
1554 memset (&fake_tf, 0, sizeof (fake_tf));
1555 fake_tf.top_p = tp;
1556 fake_tf.outer = state;
1557 fake_tf.region = this_region;
1558 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1559 fake_tf.may_throw = true;
1561 fake_tf.eh_label = create_artificial_label ();
1562 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1564 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1566 if (fake_tf.may_throw)
1568 /* In this case honor_protect_cleanup_actions had nothing to do,
1569 and we should process this normally. */
1570 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1571 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1573 else
1575 /* In this case honor_protect_cleanup_actions did nearly all of
1576 the work. All we have left is to append the fallthru_label. */
1578 *tp = TREE_OPERAND (*tp, 0);
1579 if (fake_tf.fallthru_label)
1581 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1582 append_to_statement_list (x, tp);
1587 /* Main loop for lowering eh constructs. */
1589 static void
1590 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1592 tree_stmt_iterator i;
1593 tree t = *tp;
1595 switch (TREE_CODE (t))
1597 case COND_EXPR:
1598 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1599 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1600 break;
1602 case CALL_EXPR:
1603 /* Look for things that can throw exceptions, and record them. */
1604 if (state->cur_region && tree_could_throw_p (t))
1606 record_stmt_eh_region (state->cur_region, t);
1607 note_eh_region_may_contain_throw (state->cur_region);
1609 break;
1611 case GIMPLE_MODIFY_STMT:
1612 /* Look for things that can throw exceptions, and record them. */
1613 if (state->cur_region && tree_could_throw_p (t))
1615 record_stmt_eh_region (state->cur_region, t);
1616 note_eh_region_may_contain_throw (state->cur_region);
1618 break;
1620 case GOTO_EXPR:
1621 case RETURN_EXPR:
1622 maybe_record_in_goto_queue (state, t);
1623 break;
1624 case SWITCH_EXPR:
1625 verify_norecord_switch_expr (state, t);
1626 break;
1628 case TRY_FINALLY_EXPR:
1629 lower_try_finally (state, tp);
1630 break;
1632 case TRY_CATCH_EXPR:
1633 i = tsi_start (TREE_OPERAND (t, 1));
1634 switch (TREE_CODE (tsi_stmt (i)))
1636 case CATCH_EXPR:
1637 lower_catch (state, tp);
1638 break;
1639 case EH_FILTER_EXPR:
1640 lower_eh_filter (state, tp);
1641 break;
1642 default:
1643 lower_cleanup (state, tp);
1644 break;
1646 break;
1648 case STATEMENT_LIST:
1649 for (i = tsi_start (t); !tsi_end_p (i); )
1651 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1652 t = tsi_stmt (i);
1653 if (TREE_CODE (t) == STATEMENT_LIST)
1655 tsi_link_before (&i, t, TSI_SAME_STMT);
1656 tsi_delink (&i);
1658 else
1659 tsi_next (&i);
1661 break;
1663 default:
1664 /* A type, a decl, or some kind of statement that we're not
1665 interested in. Don't walk them. */
1666 break;
1670 static unsigned int
1671 lower_eh_constructs (void)
1673 struct leh_state null_state;
1674 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1676 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1678 collect_finally_tree (*tp, NULL);
1680 memset (&null_state, 0, sizeof (null_state));
1681 lower_eh_constructs_1 (&null_state, tp);
1683 htab_delete (finally_tree);
1685 collect_eh_region_array ();
1686 return 0;
1689 struct tree_opt_pass pass_lower_eh =
1691 "eh", /* name */
1692 NULL, /* gate */
1693 lower_eh_constructs, /* execute */
1694 NULL, /* sub */
1695 NULL, /* next */
1696 0, /* static_pass_number */
1697 TV_TREE_EH, /* tv_id */
1698 PROP_gimple_lcf, /* properties_required */
1699 PROP_gimple_leh, /* properties_provided */
1700 0, /* properties_destroyed */
1701 0, /* todo_flags_start */
1702 TODO_dump_func, /* todo_flags_finish */
1703 0 /* letter */
1707 /* Construct EH edges for STMT. */
1709 static void
1710 make_eh_edge (struct eh_region *region, void *data)
1712 tree stmt, lab;
1713 basic_block src, dst;
1715 stmt = (tree) data;
1716 lab = get_eh_region_tree_label (region);
1718 src = bb_for_stmt (stmt);
1719 dst = label_to_block (lab);
1721 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1724 void
1725 make_eh_edges (tree stmt)
1727 int region_nr;
1728 bool is_resx;
1730 if (TREE_CODE (stmt) == RESX_EXPR)
1732 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1733 is_resx = true;
1735 else
1737 region_nr = lookup_stmt_eh_region (stmt);
1738 if (region_nr < 0)
1739 return;
1740 is_resx = false;
1743 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1746 static bool mark_eh_edge_found_error;
1748 /* Mark edge make_eh_edge would create for given region by setting it aux
1749 field, output error if something goes wrong. */
1750 static void
1751 mark_eh_edge (struct eh_region *region, void *data)
1753 tree stmt, lab;
1754 basic_block src, dst;
1755 edge e;
1757 stmt = (tree) data;
1758 lab = get_eh_region_tree_label (region);
1760 src = bb_for_stmt (stmt);
1761 dst = label_to_block (lab);
1763 e = find_edge (src, dst);
1764 if (!e)
1766 error ("EH edge %i->%i is missing", src->index, dst->index);
1767 mark_eh_edge_found_error = true;
1769 else if (!(e->flags & EDGE_EH))
1771 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1772 mark_eh_edge_found_error = true;
1774 else if (e->aux)
1776 /* ??? might not be mistake. */
1777 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1778 mark_eh_edge_found_error = true;
1780 else
1781 e->aux = (void *)1;
1784 /* Verify that BB containing stmt as last stmt has precisely the edges
1785 make_eh_edges would create. */
1786 bool
1787 verify_eh_edges (tree stmt)
1789 int region_nr;
1790 bool is_resx;
1791 basic_block bb = bb_for_stmt (stmt);
1792 edge_iterator ei;
1793 edge e;
1795 FOR_EACH_EDGE (e, ei, bb->succs)
1796 gcc_assert (!e->aux);
1797 mark_eh_edge_found_error = false;
1798 if (TREE_CODE (stmt) == RESX_EXPR)
1800 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1801 is_resx = true;
1803 else
1805 region_nr = lookup_stmt_eh_region (stmt);
1806 if (region_nr < 0)
1808 FOR_EACH_EDGE (e, ei, bb->succs)
1809 if (e->flags & EDGE_EH)
1811 error ("BB %i can not throw but has EH edges", bb->index);
1812 return true;
1814 return false;
1816 if (!tree_could_throw_p (stmt))
1818 error ("BB %i last statement has incorrectly set region", bb->index);
1819 return true;
1821 is_resx = false;
1824 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1825 FOR_EACH_EDGE (e, ei, bb->succs)
1827 if ((e->flags & EDGE_EH) && !e->aux)
1829 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1830 mark_eh_edge_found_error = true;
1831 return true;
1833 e->aux = NULL;
1835 return mark_eh_edge_found_error;
1839 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1840 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1841 This routine expects only GIMPLE lhs or rhs input. */
1843 bool
1844 tree_could_trap_p (tree expr)
1846 enum tree_code code = TREE_CODE (expr);
1847 bool honor_nans = false;
1848 bool honor_snans = false;
1849 bool fp_operation = false;
1850 bool honor_trapv = false;
1851 tree t, base;
1853 if (TREE_CODE_CLASS (code) == tcc_comparison
1854 || TREE_CODE_CLASS (code) == tcc_unary
1855 || TREE_CODE_CLASS (code) == tcc_binary)
1857 t = TREE_TYPE (expr);
1858 fp_operation = FLOAT_TYPE_P (t);
1859 if (fp_operation)
1861 honor_nans = flag_trapping_math && !flag_finite_math_only;
1862 honor_snans = flag_signaling_nans != 0;
1864 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1865 honor_trapv = true;
1868 restart:
1869 switch (code)
1871 case TARGET_MEM_REF:
1872 /* For TARGET_MEM_REFs use the information based on the original
1873 reference. */
1874 expr = TMR_ORIGINAL (expr);
1875 code = TREE_CODE (expr);
1876 goto restart;
1878 case COMPONENT_REF:
1879 case REALPART_EXPR:
1880 case IMAGPART_EXPR:
1881 case BIT_FIELD_REF:
1882 case VIEW_CONVERT_EXPR:
1883 case WITH_SIZE_EXPR:
1884 expr = TREE_OPERAND (expr, 0);
1885 code = TREE_CODE (expr);
1886 goto restart;
1888 case ARRAY_RANGE_REF:
1889 base = TREE_OPERAND (expr, 0);
1890 if (tree_could_trap_p (base))
1891 return true;
1893 if (TREE_THIS_NOTRAP (expr))
1894 return false;
1896 return !range_in_array_bounds_p (expr);
1898 case ARRAY_REF:
1899 base = TREE_OPERAND (expr, 0);
1900 if (tree_could_trap_p (base))
1901 return true;
1903 if (TREE_THIS_NOTRAP (expr))
1904 return false;
1906 return !in_array_bounds_p (expr);
1908 case INDIRECT_REF:
1909 case ALIGN_INDIRECT_REF:
1910 case MISALIGNED_INDIRECT_REF:
1911 return !TREE_THIS_NOTRAP (expr);
1913 case ASM_EXPR:
1914 return TREE_THIS_VOLATILE (expr);
1916 case TRUNC_DIV_EXPR:
1917 case CEIL_DIV_EXPR:
1918 case FLOOR_DIV_EXPR:
1919 case ROUND_DIV_EXPR:
1920 case EXACT_DIV_EXPR:
1921 case CEIL_MOD_EXPR:
1922 case FLOOR_MOD_EXPR:
1923 case ROUND_MOD_EXPR:
1924 case TRUNC_MOD_EXPR:
1925 case RDIV_EXPR:
1926 if (honor_snans || honor_trapv)
1927 return true;
1928 if (fp_operation)
1929 return flag_trapping_math;
1930 t = TREE_OPERAND (expr, 1);
1931 if (!TREE_CONSTANT (t) || integer_zerop (t))
1932 return true;
1933 return false;
1935 case LT_EXPR:
1936 case LE_EXPR:
1937 case GT_EXPR:
1938 case GE_EXPR:
1939 case LTGT_EXPR:
1940 /* Some floating point comparisons may trap. */
1941 return honor_nans;
1943 case EQ_EXPR:
1944 case NE_EXPR:
1945 case UNORDERED_EXPR:
1946 case ORDERED_EXPR:
1947 case UNLT_EXPR:
1948 case UNLE_EXPR:
1949 case UNGT_EXPR:
1950 case UNGE_EXPR:
1951 case UNEQ_EXPR:
1952 return honor_snans;
1954 case CONVERT_EXPR:
1955 case FIX_TRUNC_EXPR:
1956 /* Conversion of floating point might trap. */
1957 return honor_nans;
1959 case NEGATE_EXPR:
1960 case ABS_EXPR:
1961 case CONJ_EXPR:
1962 /* These operations don't trap with floating point. */
1963 if (honor_trapv)
1964 return true;
1965 return false;
1967 case PLUS_EXPR:
1968 case MINUS_EXPR:
1969 case MULT_EXPR:
1970 /* Any floating arithmetic may trap. */
1971 if (fp_operation && flag_trapping_math)
1972 return true;
1973 if (honor_trapv)
1974 return true;
1975 return false;
1977 case CALL_EXPR:
1978 t = get_callee_fndecl (expr);
1979 /* Assume that calls to weak functions may trap. */
1980 if (!t || !DECL_P (t) || DECL_WEAK (t))
1981 return true;
1982 return false;
1984 default:
1985 /* Any floating arithmetic may trap. */
1986 if (fp_operation && flag_trapping_math)
1987 return true;
1988 return false;
1992 bool
1993 tree_could_throw_p (tree t)
1995 if (!flag_exceptions)
1996 return false;
1997 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1999 if (flag_non_call_exceptions
2000 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2001 return true;
2002 t = GIMPLE_STMT_OPERAND (t, 1);
2005 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2006 t = TREE_OPERAND (t, 0);
2007 if (TREE_CODE (t) == CALL_EXPR)
2008 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2009 if (flag_non_call_exceptions)
2010 return tree_could_trap_p (t);
2011 return false;
2014 bool
2015 tree_can_throw_internal (tree stmt)
2017 int region_nr;
2018 bool is_resx = false;
2020 if (TREE_CODE (stmt) == RESX_EXPR)
2021 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2022 else
2023 region_nr = lookup_stmt_eh_region (stmt);
2024 if (region_nr < 0)
2025 return false;
2026 return can_throw_internal_1 (region_nr, is_resx);
2029 bool
2030 tree_can_throw_external (tree stmt)
2032 int region_nr;
2033 bool is_resx = false;
2035 if (TREE_CODE (stmt) == RESX_EXPR)
2036 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2037 else
2038 region_nr = lookup_stmt_eh_region (stmt);
2039 if (region_nr < 0)
2040 return tree_could_throw_p (stmt);
2041 else
2042 return can_throw_external_1 (region_nr, is_resx);
2045 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2046 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2047 in the table if it should be in there. Return TRUE if a replacement was
2048 done that my require an EH edge purge. */
2050 bool
2051 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2053 int region_nr = lookup_stmt_eh_region (old_stmt);
2055 if (region_nr >= 0)
2057 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2059 if (new_stmt == old_stmt && new_stmt_could_throw)
2060 return false;
2062 remove_stmt_from_eh_region (old_stmt);
2063 if (new_stmt_could_throw)
2065 add_stmt_to_eh_region (new_stmt, region_nr);
2066 return false;
2068 else
2069 return true;
2072 return false;